WebRTCPedia! the Encyclopedia!

HOME © Muaz Khan . @WebRTCWeb . Github . Latest issues . What's New?

MediaStream.stop is obsolete or removed; how to fix it?

var MediaStream = window.MediaStream;

if (typeof MediaStream === 'undefined' && typeof webkitMediaStream !== 'undefined') {
    MediaStream = webkitMediaStream;
}

if (typeof MediaStream !== 'undefined' && !('stop' in MediaStream.prototype)) {
    MediaStream.prototype.stop = function() {
        this.getTracks().forEach(function(track) {
            track.stop();
        });
    };
}

Now, feel free to use stream.stop:

stream.addEventListener('ended', function() {
    alert('Stream is stopped.');
}, false);

stream.stop();
via: http://stackoverflow.com/a/11646945/552182

getRemoteStreams alternative?

var stream = new MediaStream();
peer.getReceivers().forEach(function(receiver) {
    stream.addTrack(receiver.track);
});
 
video.srcObject = stream;
 
console.log(stream.getTracks()); // check console logs

Or

var audioTrack, videoTrack;
peer.getReceivers().forEach(function(receiver) {
    if (receiver.track.kind === 'audio' && !audioTrack) {
        audioTrack = receiver.track;
    }

    if (receiver.track.kind === 'video' && !videoTrack) {
        videoTrack = receiver.track;
    }
});

var stream = new MediaStream();

if (audioTrack) {
    stream.addTrack(audioTrack);
}

if (videoTrack) {
    stream.addTrack(videoTrack);
}

video.srcObject = stream;

console.log(stream.getTracks()); // check console logs

Or override "getRemoteStreams" and "getLocalStreams"

if (!peer.getRemoteStreams) {
    peer.getRemoteStreams = function() {
        var stream = new MediaStream();
        peer.getReceivers().forEach(function(receiver) {
            stream.addTrack(receiver.track);
        });
        return [stream];
    };
}

if (!peer.getLocalStreams) {
    peer.getLocalStreams = function() {
        var stream = new MediaStream();
        peer.getSenders().forEach(function(sender) {
            stream.addTrack(sender.track);
        });
        return [stream];
    };
}

How to detect if screen-sharing stopped or camera stopped (cross browser)?

addStreamStopListener(yourScreen, function() {
    alert('screen sharing is ended.');
});

function addStreamStopListener(stream, callback) {
    stream.addEventListener('ended', function() {
        callback();
        callback = function() {};
    }, false);
    stream.addEventListener('inactive', function() {
        callback();
        callback = function() {};
    }, false);
    stream.getTracks().forEach(function(track) {
        track.addEventListener('ended', function() {
            callback();
            callback = function() {};
        }, false);
        track.addEventListener('inactive', function() {
            callback();
            callback = function() {};
        }, false);
    });
}

Stream end handler using promises:

addStreamStopListenerUsingPromises(stream).then(function() {
    alert('screen sharing is ended.');
});

function addStreamStopListenerUsingPromises(stream) {
    return new Promise(function(resolve, reject) {
        try {
            var callback = resolve;
            stream.addEventListener('ended', function() {
                callback();
                callback = function() {};
            }, false);
            stream.addEventListener('inactive', function() {
                callback();
                callback = function() {};
            }, false);
            stream.getTracks().forEach(function(track) {
                track.addEventListener('ended', function() {
                    callback();
                    callback = function() {};
                }, false);
                track.addEventListener('inactive', function() {
                    callback();
                    callback = function() {};
                }, false);
            });
        } catch (e) {
            reject(e);
        }
    });
}

How to get mp3 stream?

document.querySelector('input[type=file]').onchange = function(event) {
    var file = this.files[0];
    if(file && file.size > 0) {
        getMp3Stream(file, function(mp3Stream) {
            mp3Stream.getTracks().forEach(function(track) {
                rtcPeerConnection.addTrack(track, mp3Stream);
            });

            // or record
            var recorder = new MediaRecorder(strea);

            // or merge into microphone (using MultiStreamsMixer.js)
            var audioMixer = new MultiStreamsMixer([microphone, mp3Stream]);
            var microphoneAndScreenStream = audioMixer.getMixedStream();
        });
    }
};

function getMp3Stream(mp3File, callback) {
    window.AudioContext = window.AudioContext || window.webkitAudioContext;
    var context = new AudioContext();
    var gainNode = context.createGain();
    gainNode.connect(context.destination);
    gainNode.gain.value = 0; // don't play for self

    var reader = new FileReader();
    reader.onload = (function(e) {
        // Import callback function
        // provides PCM audio data decoded as an audio buffer
        context.decodeAudioData(e.target.result, createSoundSource);
    });
    reader.readAsArrayBuffer(mp3File);

    function createSoundSource(buffer) {
        var soundSource = context.createBufferSource();
        soundSource.buffer = buffer;
        soundSource.start(0, 0 / 1000);
        soundSource.connect(gainNode);
        var destination = context.createMediaStreamDestination();
        soundSource.connect(destination);

        // durtion=second*1000 (milliseconds)
        callback(destination.stream, buffer.duration * 1000);
    }
}

How to get mp3 stream using promises?

document.querySelector('input[type=file]').onchange = function(event) {
    var file = this.files[0];
    if(file && file.size > 0) {
        getMp3StreamUsingPromises(file).then(function(mp3Stream) {});
        var mp3Stream = await getMp3StreamUsingPromises(file);
    }
};

function getMp3StreamUsingPromises(mp3File) {
    return new Promise(function(resolve, reject) {
        try {
            window.AudioContext = window.AudioContext || window.webkitAudioContext;
            var context = new AudioContext();
            var gainNode = context.createGain();
            gainNode.connect(context.destination);
            gainNode.gain.value = 0; // don't play for self

            function createSoundSource(buffer) {
                var soundSource = context.createBufferSource();
                soundSource.buffer = buffer;
                soundSource.start(0, 0 / 1000);
                soundSource.connect(gainNode);
                var destination = context.createMediaStreamDestination();
                soundSource.connect(destination);

                // durtion=second*1000 (milliseconds)
                resolve(destination.stream, buffer.duration * 1000);
            }

            var reader = new FileReader();
            reader.onload = (function(e) {
                // Import callback function
                // provides PCM audio data decoded as an audio buffer
                context.decodeAudioData(e.target.result, createSoundSource);
            });
            reader.readAsArrayBuffer(mp3File);
        } catch (e) {
            reject(e);
        }
    });
}

How to check if website already has privileges to access camera/microphone?

// link: https://www.webrtc-experiment.com/DetectRTC/checkDevicesSupport.js

// check for microphone/camera support!
checkDeviceSupport(function() {
    document.write('hasWebCam: ', hasWebcam, '<br>');
    document.write('hasMicrophone: ', hasMicrophone, '<br>');
    document.write('isWebsiteHasWebcamPermissions: ', isWebsiteHasWebcamPermissions, '<br>');
    document.write('isWebsiteHasMicrophonePermissions: ', isWebsiteHasMicrophonePermissions, '<br>');
});
via: http://stackoverflow.com/a/30047627/552182

How to manage audio/video bitrates?

// Link this Library:
// https://www.webrtc-experiment.com/BandwidthHandler.js

// here is how to use it
var bandwidth = {
    screen: 300, // 300kbits minimum
    audio: 50, // 50kbits  minimum
    video: 256 // 256kbits (both min-max)
};
var isScreenSharing = false;

sdp = BandwidthHandler.setApplicationSpecificBandwidth(sdp, bandwidth, isScreenSharing);
sdp = BandwidthHandler.setVideoBitrates(sdp, {
    min: bandwidth.video,
    max: bandwidth.video
});
sdp = BandwidthHandler.setOpusAttributes(sdp);

How to set audio sdp parameters?

sdp = BandwidthHandler.setOpusAttributes(sdp, {
    'stereo': 0, // to disable stereo (to force mono audio)
    'sprop-stereo': 1,
    'maxaveragebitrate': 500 * 1024 * 8, // 500 kbits
    'maxplaybackrate': 500 * 1024 * 8, // 500 kbits
    'cbr': 0, // disable cbr
    'useinbandfec': 1, // use inband fec
    'usedtx': 1, // use dtx
    'maxptime': 3
});
via: http://stackoverflow.com/a/16868123/552182

How to fix Chrome camera/microphone failures?

# open this page
chrome://settings/content#media-stream-mic
via: http://stackoverflow.com/a/14617402/552182

How to detect local or remote stream?

// chrome 48+
var isRemoteAudioStream = false;
var isRemoteVideoStream = false;
stream.getTracks().forEach(function(track) {
    if (track.remote === true && track.kind === 'audio') {
        isRemoteAudioStream = true;
    }

    if (track.remote === true && track.kind === 'video') {
        isRemoteVideoStream = true;
    }
});

alert('Remote audio stream? ' + isRemoteAudioStream);
alert('Remote video stream? ' + isRemoteVideoStream);

How to capture audio+screen in a single getUserMedia request?

// firefox 38+
var screen = {
    mediaSource: 'monitor' // monitor or window
};

var constraints = {
    video: screen,
    audio: true
};

navigator.mozGetUserMedia(constraints, successCallback, failureCallback);

How to display HTMLVideoElement poster?

htmlVideoElement.src = null;
htmlVideoElement.pause();

// above two lines are mandatory
htmlVideoElement.poster = '/muted.png';

Before sending data over RTC-data-channels

// define below snippet as soon as you invoked "peer.createDataChannel" method

// first step: take a reference to original "send" method
channel.internalSend = channel.send;

// define your own "send" wrapper
channel.send = function(data) {
    // check for "readyState==open"
    if(channel.readyState !== 'open') {
        // make sure that peer is NOT closed
        if(peer.iceConnectionState.search(/disconnected|closed|failed/gi) !== -1) {
            return;
        }

        // retry after 1-second
        setTimeout(function() {
            channels.send(data);
        }, 1000);

        return;
    }

    // send data using real data-channel object
    channel.internalSend(data);
};

Modify streams without "remaking" getUserMedia request

// supported only in firefox 43+

var originalStream = window.yourOrigianlStream;

// change from 360p to 720p
function changeVideoStreamTo720p() {
    var videoConstraints = {
        width: { min: 1280 },
        height: { min: 720 }
    };

    originalStream.getTracks().forEach(function(track) {
        if(track.kind === 'video') {
            track.applyConstraints(videoConstraints);
        }
    });
}

function showFrontCamera() {
    var videoConstraints = {
        facingMode: { exact: 'user' }
    };

    originalStream.getTracks().forEach(function(track) {
        if(track.kind === 'video') {
            track.applyConstraints(videoConstraints);
        }
    });
}

function showRearCamera() {
    var videoConstraints = {
        facingMode: { exact: 'environment' }
    };

    originalStream.getTracks().forEach(function(track) {
        if(track.kind === 'video') {
            track.applyConstraints(videoConstraints);
        }
    });
}

Capture Rear or Front Camera

// supported only in firefox 43+

#1 capture rear camera
var videoConstraints = {
    facingMode: { exact: 'environment' }
};
navigator.mozGetUserMedia({ video: videoConstraints }, onSuccessCallback, onFailureCallback);

#1 capture front camera
var videoConstraints = {
    facingMode: { exact: 'user' }
};
navigator.mozGetUserMedia({ video: videoConstraints }, onSuccessCallback, onFailureCallback);

Select Secondary (2nd) Camera

function selectSecondaryCamera() {
    // link: https://www.webrtc-experiment.com/DetectRTC/checkDeviceSupport.js

    // LIVE Demo for this function
    // https://jsfiddle.net/cf90az9q/

    checkDeviceSupport(function() {
        var secondDevice = videoInputDevices[1];
        if(!secondDevice) return alert('Secondary webcam is NOT available.');

        var videoConstraints = {
            deviceId: secondDevice.deviceId
        };

        if(!!navigator.webkitGetUserMedia) {
            videoConstraints = {
                mandatory: {},
                optional: [{
                    sourceId: secondDevice.deviceId
                }]
            }
        }

        navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
        navigator.getUserMedia({ video: videoConstraints }, function(stream) {
            // 
        }, function(error) {
            alert(JSON.stringify(error));
        });
    });
}
  1. Maximum peer connections limit is 256.
  2. Opus codec minimum bandwidth is 6kbit/s
  3. Opus codec maximum bandwidth is 510kbits/s
  4. Vp8 codec minimum bandwidth is 100kbits/s
  5. Vp8 codec maximum bandwidth is 2000+ kbits/s
    1. 720p at 30 FPS causes 1.0-to-2.0 Mbps bandwidth usage
    2. 360p at 30 FPS causes 0.5-to-1.0 Mbps bandwidth usage
    3. 180p at 30 FPS causes 0.1-to-0.5 Mbps bandwidth usage
  6. Maximum bandwidth used by each RTP port is 1MB.
  7. Only one media source i.e. "APM" is permitted.
  8. WebRTC currently uses UDP for RTP transmission.
  9. Maximum video bitrate on chrome is about 2Mb/s (i.e. 2000kbits/s).
  10. Minimum video bitrate on chrome is .05Mb/s (i.e. 50kbits/s).
  11. Starting video bitrate on chrome is .3Mb/s (i.e. 300kbits/s).
  12. Each RTP port is using 1 MB bandwidth. It means that 4 MB bandwidth is acquired by each peer.
  13. Maximum external video condecs can be used on chrome is 8.
  14. Maximum simulcast streams limit is 4.
  15. In peer-to-server connection; you can catch DTLS/SRTP (i.e. RTP/RTCP) pacekts as binary-stream.
  16. "peer.removeStream" Removes a stream from the PeerConnection. If the stream parameter is absent, removes the stream that was most recently added to the PeerConnection.
  17. Opus uses both mono and stereo codecs. Mono bitrate for opus on chrome is 32000 and stereo bitrate is 64000.
  18. According to draft "draft-spittka-payload-rtp-opus-03", "Opus bitrate should be in the range between 6000 and 510000", that's why opus min bitrate on chrome is 6000 and max bitrate is 510000.
  19. SCTP pacekt max size is 1280.
  20. Data max bandwidth is 30720 bps.
  21. You can set following resolutions (min/max width/height):
    1. 1920:1080
    2. 1280:720
    3. 960:720
    4. 640:360
    5. 640:480
    6. 320:240
    7. 320:180
    Maybe:
    1. 1280:800
    2. 1280:720
    3. 960:600
    4. 960:540
    5. 640:400
    6. 640:360
    7. 640:480
    8. 480:300
    9. 480:270
    10. 480:360
    11. 320:200
    12. 320:180
    13. 320:240
    14. 240:150
    15. 240:135
    16. 240:180
    17. 160:100
    18. 160:90
    19. 160:120
  22. Following buffer-sizes are allowed (used in RecordRTC):
    1. 256
    2. 512
    3. 1024
    4. 2048
    5. 4096
    6. 8192
    7. 16384
  23. SampleRate must be in the range 22050 to 96000 (used in RecordRTC).
  24. Possible media m-lines:
    1. m=audio
    2. m=video
    3. m=application
    4. m=data
    5. m=control
    6. m=radius
    7. m=tacacs
    8. m=diameter
    9. m=NAS_L2TP
    10. m=NAS_LOGIN
    11. m=NAS_NONE
    12. m=image

Suggestions

  1. If you're newcomer, newbie or beginner; you're suggested to try RTCMultiConnection.js or DataChannel.js libraries.

Latest Updates

Feedback

Enter your email too; if you want "direct" reply!