如何在WebRTCvideo通话中控制带宽?

我正在尝试使用WebRTC和node.js开发video呼叫/会议应用程序。 目前,video通话过程中无法控制带宽。 有没有办法控制/减less带宽。 (就像我想让整个我的Web应用程序在video会议上工作在150 kbps)。

任何build议,高度赞赏。 提前致谢。

试试这个演示 。 您可以在会话描述中注入带宽属性( b=AS ):

 audioBandwidth = 50; videoBandwidth = 256; function setBandwidth(sdp) { sdp = sdp.replace(/a=mid:audio\r\n/g, 'a=mid:audio\r\nb=AS:' + audioBandwidth + '\r\n'); sdp = sdp.replace(/a=mid:video\r\n/g, 'a=mid:video\r\nb=AS:' + videoBandwidth + '\r\n'); return sdp; } // ---------------------------------------------------------- peer.createOffer(function (sessionDescription) { sessionDescription.sdp = setBandwidth(sessionDescription.sdp); peer.setLocalDescription(sessionDescription); }, null, constraints); peer.createAnswer(function (sessionDescription) { sessionDescription.sdp = setBandwidth(sessionDescription.sdp); peer.setLocalDescription(sessionDescription); }, null, constraints); 

b=AS已经存在于data m-line sdp中; 其默认值是50

更新于2015年9月23日

这里是一个库,可以完全控制audio/video轨道的比特率:

 // here is how to use it var bandwidth = { screen: 300, // 300kbits minimum audio: 50, // 50kbits minimum video: 256 // 256kbits (both min-max) }; var isScreenSharing = false; sdp = BandwidthHandler.setApplicationSpecificBandwidth(sdp, bandwidth, isScreenSharing); sdp = BandwidthHandler.setVideoBitrates(sdp, { min: bandwidth.video, max: bandwidth.video }); sdp = BandwidthHandler.setOpusAttributes(sdp); 

这里是库代码。 它相当大,但它的作品!

 // BandwidthHandler.js var BandwidthHandler = (function() { function setBAS(sdp, bandwidth, isScreen) { if (!!navigator.mozGetUserMedia || !bandwidth) { return sdp; } if (isScreen) { if (!bandwidth.screen) { console.warn('It seems that you are not using bandwidth for screen. Screen sharing is expected to fail.'); } else if (bandwidth.screen < 300) { console.warn('It seems that you are using wrong bandwidth value for screen. Screen sharing is expected to fail.'); } } // if screen; must use at least 300kbs if (bandwidth.screen && isScreen) { sdp = sdp.replace(/b=AS([^\r\n]+\r\n)/g, ''); sdp = sdp.replace(/a=mid:video\r\n/g, 'a=mid:video\r\nb=AS:' + bandwidth.screen + '\r\n'); } // remove existing bandwidth lines if (bandwidth.audio || bandwidth.video || bandwidth.data) { sdp = sdp.replace(/b=AS([^\r\n]+\r\n)/g, ''); } if (bandwidth.audio) { sdp = sdp.replace(/a=mid:audio\r\n/g, 'a=mid:audio\r\nb=AS:' + bandwidth.audio + '\r\n'); } if (bandwidth.video) { sdp = sdp.replace(/a=mid:video\r\n/g, 'a=mid:video\r\nb=AS:' + (isScreen ? bandwidth.screen : bandwidth.video) + '\r\n'); } return sdp; } // Find the line in sdpLines that starts with |prefix|, and, if specified, // contains |substr| (case-insensitive search). function findLine(sdpLines, prefix, substr) { return findLineInRange(sdpLines, 0, -1, prefix, substr); } // Find the line in sdpLines[startLine...endLine - 1] that starts with |prefix| // and, if specified, contains |substr| (case-insensitive search). function findLineInRange(sdpLines, startLine, endLine, prefix, substr) { var realEndLine = endLine !== -1 ? endLine : sdpLines.length; for (var i = startLine; i < realEndLine; ++i) { if (sdpLines[i].indexOf(prefix) === 0) { if (!substr || sdpLines[i].toLowerCase().indexOf(substr.toLowerCase()) !== -1) { return i; } } } return null; } // Gets the codec payload type from an a=rtpmap:X line. function getCodecPayloadType(sdpLine) { var pattern = new RegExp('a=rtpmap:(\\d+) \\w+\\/\\d+'); var result = sdpLine.match(pattern); return (result && result.length === 2) ? result[1] : null; } function setVideoBitrates(sdp, params) { params = params || {}; var xgoogle_min_bitrate = params.min; var xgoogle_max_bitrate = params.max; var sdpLines = sdp.split('\r\n'); // VP8 var vp8Index = findLine(sdpLines, 'a=rtpmap', 'VP8/90000'); var vp8Payload; if (vp8Index) { vp8Payload = getCodecPayloadType(sdpLines[vp8Index]); } if (!vp8Payload) { return sdp; } var rtxIndex = findLine(sdpLines, 'a=rtpmap', 'rtx/90000'); var rtxPayload; if (rtxIndex) { rtxPayload = getCodecPayloadType(sdpLines[rtxIndex]); } if (!rtxIndex) { return sdp; } var rtxFmtpLineIndex = findLine(sdpLines, 'a=fmtp:' + rtxPayload.toString()); if (rtxFmtpLineIndex !== null) { var appendrtxNext = '\r\n'; appendrtxNext += 'a=fmtp:' + vp8Payload + ' x-google-min-bitrate=' + (xgoogle_min_bitrate || '228') + '; x-google-max-bitrate=' + (xgoogle_max_bitrate || '228'); sdpLines[rtxFmtpLineIndex] = sdpLines[rtxFmtpLineIndex].concat(appendrtxNext); sdp = sdpLines.join('\r\n'); } return sdp; } function setOpusAttributes(sdp, params) { params = params || {}; var sdpLines = sdp.split('\r\n'); // Opus var opusIndex = findLine(sdpLines, 'a=rtpmap', 'opus/48000'); var opusPayload; if (opusIndex) { opusPayload = getCodecPayloadType(sdpLines[opusIndex]); } if (!opusPayload) { return sdp; } var opusFmtpLineIndex = findLine(sdpLines, 'a=fmtp:' + opusPayload.toString()); if (opusFmtpLineIndex === null) { return sdp; } var appendOpusNext = ''; appendOpusNext += '; stereo=' + (typeof params.stereo != 'undefined' ? params.stereo : '1'); appendOpusNext += '; sprop-stereo=' + (typeof params['sprop-stereo'] != 'undefined' ? params['sprop-stereo'] : '1'); if (typeof params.maxaveragebitrate != 'undefined') { appendOpusNext += '; maxaveragebitrate=' + (params.maxaveragebitrate || 128 * 1024 * 8); } if (typeof params.maxplaybackrate != 'undefined') { appendOpusNext += '; maxplaybackrate=' + (params.maxplaybackrate || 128 * 1024 * 8); } if (typeof params.cbr != 'undefined') { appendOpusNext += '; cbr=' + (typeof params.cbr != 'undefined' ? params.cbr : '1'); } if (typeof params.useinbandfec != 'undefined') { appendOpusNext += '; useinbandfec=' + params.useinbandfec; } if (typeof params.usedtx != 'undefined') { appendOpusNext += '; usedtx=' + params.usedtx; } if (typeof params.maxptime != 'undefined') { appendOpusNext += '\r\na=maxptime:' + params.maxptime; } sdpLines[opusFmtpLineIndex] = sdpLines[opusFmtpLineIndex].concat(appendOpusNext); sdp = sdpLines.join('\r\n'); return sdp; } return { setApplicationSpecificBandwidth: function(sdp, bandwidth, isScreen) { return setBAS(sdp, bandwidth, isScreen); }, setVideoBitrates: function(sdp, params) { return setVideoBitrates(sdp, params); }, setOpusAttributes: function(sdp, params) { return setOpusAttributes(sdp, params); } }; })(); 

这里是如何设置先进的opus比特率参数:

 sdp = BandwidthHandler.setOpusAttributes(sdp, { 'stereo': 0, // to disable stereo (to force mono audio) 'sprop-stereo': 1, 'maxaveragebitrate': 500 * 1024 * 8, // 500 kbits 'maxplaybackrate': 500 * 1024 * 8, // 500 kbits 'cbr': 0, // disable cbr 'useinbandfec': 1, // use inband fec 'usedtx': 1, // use dtx 'maxptime': 3 }); 

不知道这是否有帮助,但是您可以限制getUserMedia中的video分辨率,请参阅: simp.info/getusermedia/constraints/上的演示。

你也应该能够在stream上使用带宽限制( 见本演示 ),但即使在最新的金丝雀(29.0.1529.3)中,它也不能工作。

在讨论webrtc邮件列表上讨论了基于SDP的方法,该列表链接到WebRTC错误1846。

WebRTC是用于点对点通信的,您不能控制video通话中的带宽。

在谷歌浏览器中有一个video元素的这些属性:

 webkitVideoDecodedByteCount: 0 webkitAudioDecodedByteCount: 0 

这些知道客户端可以解码video的速度有用。 当video播放时,你会跟踪这些字节的增量,它给你的字节/秒的客户端正在处理video。( SO线程 )

您应该使用Network Information API来了解带宽(它仍在实施中)。