From 6e26e7a659fe20a1d961522e7ab0cd81690393ef Mon Sep 17 00:00:00 2001 From: hristoterezov Date: Fri, 13 Nov 2015 18:23:28 -0600 Subject: [PATCH] Adds device selection support. --- JitsiConference.js | 1 + JitsiMeetJS.js | 7 + doc/API.md | 31 +-- doc/example/example.js | 31 ++- doc/example/index.html | 5 + lib-jitsi-meet.js | 361 +++++++++++++++++++++++++++------ modules/RTC/JitsiLocalTrack.js | 7 +- modules/RTC/RTC.js | 20 +- modules/RTC/RTCUtils.js | 152 +++++++++----- 9 files changed, 473 insertions(+), 142 deletions(-) diff --git a/JitsiConference.js b/JitsiConference.js index 528012634..00a643c90 100644 --- a/JitsiConference.js +++ b/JitsiConference.js @@ -257,6 +257,7 @@ function setupListeners(conference) { }); conference.rtc.addListener(StreamEventTypes.EVENT_TYPE_LOCAL_ENDED, function (stream) { conference.eventEmitter.emit(JitsiConferenceEvents.TRACK_REMOVED, stream); + conference.removeTrack(stream); }); conference.rtc.addListener(StreamEventTypes.TRACK_MUTE_CHANGED, function (track) { conference.eventEmitter.emit(JitsiConferenceEvents.TRACK_MUTE_CHANGED, track); diff --git a/JitsiMeetJS.js b/JitsiMeetJS.js index 1e47474b5..9d5386bea 100644 --- a/JitsiMeetJS.js +++ b/JitsiMeetJS.js @@ -31,6 +31,13 @@ var LibJitsiMeet = { * Creates the media tracks and returns them trough the callback. * @param options Object with properties / settings specifying the tracks which should be created. * should be created or some additional configurations about resolution for example. + * @param {Array} options.devices the devices that will be requested + * @param {string} options.resolution resolution constraints + * @param {bool} options.dontCreateJitsiTrack if true objects with the following structure {stream: the Media Stream, + * type: "audio" or "video", videoType: "camera" or "desktop"} + * will be returned trough the Promise, otherwise JitsiTrack objects will be returned. + * @param {string} options.cameraDeviceId + * @param {string} options.micDeviceId * @returns {Promise.<{Array.}, JitsiConferenceError>} A promise that returns an array of created JitsiTracks if resolved, * or a JitsiConferenceError if rejected. */ diff --git a/doc/API.md b/doc/API.md index 4d41e6e1d..9bddf1e52 100644 --- a/doc/API.md +++ b/doc/API.md @@ -46,6 +46,20 @@ The ```options``` parameter is JS object with the following properties: JitsiMeetJS.setLogLevel(JitsiMeetJS.logLevels.ERROR); ``` +* ```JitsiMeetJS.createLocalTracks(options)``` - Creates the media tracks and returns them trough ```Promise``` object. + - options - JS object with configuration options for the local media tracks. You can change the following properties there: + 1. devices - array with the devices - "video" and "audio" that will be passed to GUM. If that property is not set GUM will try to get all available devices. + 2. resolution - the prefered resolution for the local video. + 3. cameraDeviceId - the deviceID for the video device that is going to be used + 4. micDeviceId - the deviceID for the audio device that is going to be used + +* ```JitsiMeetJS.enumerateDevices(callback)``` - returns list of the available devices as a parameter to the callback function. Every device is a object with the following format: + - label - the name of the device + - kind - "audioinput" or "videoinput" + - deviceId - the id of the device. + +* ```JitsiMeetJS.isDeviceListAvailable()```- returns true if retrieving the device list is support and false - otherwise. + * ```JitsiMeetJS.events``` - JS object that contains all events used by the API. You will need that JS object when you try to subscribe for connection or conference events. We have two event types - connection and conference. You can access the events with the following code ```JitsiMeetJS.events..```. For example if you want to use the conference event that is fired when somebody leave conference you can use the following code - ```JitsiMeetJS.events.conference.USER_LEFT```. @@ -139,11 +153,6 @@ The object represents a conference. We have the following methods to control the 2. leave() - leaves the conference -3. createLocalTracks(options) - Creates the media tracks and returns them trough ```Promise``` object. - - options - JS object with configuration options for the local media tracks. You can change the following properties there: - 1. devices - array with the devices - "video" and "audio" that will be passed to GUM. If that property is not set GUM will try to get all available devices. - 2. resolution - the prefered resolution for the local video. - 4. getLocalTracks() - Returns array with JitsiTrack objects for the local streams. 5. addEventListener(event, listener) - Subscribes the passed listener to the event. @@ -202,7 +211,10 @@ The object represents a conference. We have the following methods to control the 16. removeCommandListener(command) - removes the listeners for the specified command - command - the name of the command - +17. addTrack(track) - Adds JitsiLocalTrack object to the conference. + - track - the JitsiLocalTrack +17. removeTrack(track) - Removes JitsiLocalTrack object to the conference. + - track - the JitsiLocalTrack JitsiTrack ====== The object represents single track - video or audio. They can be remote tracks ( from the other participants in the call) or local tracks (from the devices of the local participant). @@ -229,12 +241,7 @@ Note: This method is implemented only for the local tracks. Note: This method is implemented only for the local tracks. -7. start() - start sending the track to the other participants in the conference. - -Note: This method is implemented only for the local tracks. - -8. getId() - returns unique string for the track. - +7. getId() - returns unique string for the track. Getting Started diff --git a/doc/example/example.js b/doc/example/example.js index 7bdcb9149..b3f70806a 100644 --- a/doc/example/example.js +++ b/doc/example/example.js @@ -141,15 +141,34 @@ $(window).bind('unload', unload); // JitsiMeetJS.setLogLevel(JitsiMeetJS.logLevels.ERROR); JitsiMeetJS.init(); -JitsiMeetJS.createLocalTracks({resolution: "720"}).then(onLocalTracks); -var connection = new JitsiMeetJS.JitsiConnection(null, null, options); +JitsiMeetJS.enumerateDevices(function (devices) { + for(var i = 0; i < devices.length; i++) + { + var device = devices[i]; + if(device.kind === "videoinput") + $("#videoDevices").append(""); + } +}) + +var connection = null; var room = null; var localTracks = []; var remoteTracks = {}; -connection.addEventListener(JitsiMeetJS.events.connection.CONNECTION_ESTABLISHED, onConnectionSuccess); -connection.addEventListener(JitsiMeetJS.events.connection.CONNECTION_FAILED, onConnectionFailed); -connection.addEventListener(JitsiMeetJS.events.connection.CONNECTION_DISCONNECTED, disconnect); -connection.connect(); +/** + * Starts the conference with the selected device + */ +function selectDevice() { + var videoID = $("#videoDevices").val(); + JitsiMeetJS.createLocalTracks({resolution: "720", cameraDeviceId: videoID}).then(onLocalTracks); + connection = new JitsiMeetJS.JitsiConnection(null, null, options); + + connection.addEventListener(JitsiMeetJS.events.connection.CONNECTION_ESTABLISHED, onConnectionSuccess); + connection.addEventListener(JitsiMeetJS.events.connection.CONNECTION_FAILED, onConnectionFailed); + connection.addEventListener(JitsiMeetJS.events.connection.CONNECTION_DISCONNECTED, disconnect); + + connection.connect(); +} diff --git a/doc/example/index.html b/doc/example/index.html index 71df4a4dc..f9da8d262 100644 --- a/doc/example/index.html +++ b/doc/example/index.html @@ -12,6 +12,11 @@ Change Display Name +
Select Video device: + + Select +
diff --git a/lib-jitsi-meet.js b/lib-jitsi-meet.js index 386f5f371..050c4eb9d 100644 --- a/lib-jitsi-meet.js +++ b/lib-jitsi-meet.js @@ -619,15 +619,27 @@ var LibJitsiMeet = { * Creates the media tracks and returns them trough the callback. * @param options Object with properties / settings specifying the tracks which should be created. * should be created or some additional configurations about resolution for example. + * @param {Array} options.devices the devices that will be requested + * @param {string} options.resolution resolution constraints + * @param {bool} options.dontCreateJitsiTrack if true objects with the following structure {stream: the Media Stream, + * type: "audio" or "video", videoType: "camera" or "desktop"} + * will be returned trough the Promise, otherwise JitsiTrack objects will be returned. + * @param {string} options.cameraDeviceId + * @param {string} options.micDeviceId * @returns {Promise.<{Array.}, JitsiConferenceError>} A promise that returns an array of created JitsiTracks if resolved, * or a JitsiConferenceError if rejected. */ createLocalTracks: function (options) { return RTC.obtainAudioAndVideoPermissions(options || {}); + }, + isDeviceListAvailable: function () { + return RTC.isDeviceListAvailable(); + }, + enumerateDevices: function (callback) { + RTC.enumerateDevices(callback); } }; - //Setups the promise object. window.Promise = window.Promise || require("es6-promise").polyfill(); @@ -1031,9 +1043,10 @@ JitsiLocalTrack.prototype._setMute = function (mute) { } else { var self = this; var RTC = require("./RTCUtils"); - RTC.obtainAudioAndVideoPermissions( - (isAudio ? ["audio"] : ["video"]), - self.resolution, true) + RTC.obtainAudioAndVideoPermissions({ + devices: (isAudio ? ["audio"] : ["video"]), + resolution: self.resolution, + dontCreateJitsiTrack: true}) .then(function (streams) { var stream = null; for(var i = 0; i < streams.length; i++) { @@ -1373,15 +1386,18 @@ function RTC(room, options) { /** * Creates the local MediaStreams. - * @param options object for options (NOTE: currently only list of devices and resolution are supported) - * @param dontCreateJitsiTrack if true objects with the following structure {stream: the Media Stream, - * type: "audio" or "video", videoType: "camera" or "desktop"} + * @param {Object} [options] optional parameters + * @param {Array} options.devices the devices that will be requested + * @param {string} options.resolution resolution constraints + * @param {bool} options.dontCreateJitsiTrack if true objects with the following structure {stream: the Media Stream, + * type: "audio" or "video", videoType: "camera" or "desktop"} * will be returned trough the Promise, otherwise JitsiTrack objects will be returned. + * @param {string} options.cameraDeviceId + * @param {string} options.micDeviceId * @returns {*} Promise object that will receive the new JitsiTracks */ -RTC.obtainAudioAndVideoPermissions = function (options, dontCreateJitsiTrack) { - return RTCUtils.obtainAudioAndVideoPermissions( - options.devices, options.resolution, dontCreateJitsiTrack); +RTC.obtainAudioAndVideoPermissions = function (options) { + return RTCUtils.obtainAudioAndVideoPermissions(options); } RTC.prototype.onIncommingCall = function(event) { @@ -1472,11 +1488,10 @@ RTC.getPCConstraints = function () { }; RTC.getUserMediaWithConstraints = function(um, success_callback, - failure_callback, resolution, - bandwidth, fps, desktopStream) + failure_callback, options) { return RTCUtils.getUserMediaWithConstraints(this, um, success_callback, - failure_callback, resolution, bandwidth, fps, desktopStream); + failure_callback, options); }; RTC.attachMediaStream = function (elSelector, stream) { @@ -1491,6 +1506,18 @@ RTC.getVideoSrc = function (element) { return RTCUtils.getVideoSrc(element); }; +RTC.isDeviceListAvailable = function () { + return RTCUtils.isDeviceListAvailable(); +}; + +/** + * Allows to receive list of available cameras/microphones. + * @param {function} callback would receive array of devices as an argument + */ +RTC.enumerateDevices = function (callback) { + RTCUtils.enumerateDevices(callback); +}; + RTC.setVideoSrc = function (element, src) { RTCUtils.setVideoSrc(element, src); }; @@ -1808,7 +1835,7 @@ var eventEmitter = new EventEmitter(); var devices = { audio: true, video: true -}; +} var rtcReady = false; @@ -1858,22 +1885,44 @@ function setResolutionConstraints(constraints, resolution) { constraints.video.mandatory.maxHeight = constraints.video.mandatory.minHeight; } - -function getConstraints(um, resolution, bandwidth, fps, desktopStream) { +/** + * @param {string[]} um required user media types + * + * @param {Object} [options={}] optional parameters + * @param {string} options.resolution + * @param {number} options.bandwidth + * @param {number} options.fps + * @param {string} options.desktopStream + * @param {string} options.cameraDeviceId + * @param {string} options.micDeviceId + * @param {bool} firefox_fake_device + */ +function getConstraints(um, options) { var constraints = {audio: false, video: false}; if (um.indexOf('video') >= 0) { // same behaviour as true constraints.video = { mandatory: {}, optional: [] }; + if (options.cameraDeviceId) { + constraints.video.optional.push({ + sourceId: options.cameraDeviceId + }); + } + constraints.video.optional.push({ googLeakyBucket: true }); - setResolutionConstraints(constraints, resolution); + setResolutionConstraints(constraints, options.resolution); } if (um.indexOf('audio') >= 0) { if (!RTCBrowserType.isFirefox()) { // same behaviour as true constraints.audio = { mandatory: {}, optional: []}; + if (options.micDeviceId) { + constraints.audio.optional.push({ + sourceId: options.micDeviceId + }); + } // if it is good enough for hangouts... constraints.audio.optional.push( {googEchoCancellation: true}, @@ -1885,7 +1934,15 @@ function getConstraints(um, resolution, bandwidth, fps, desktopStream) { {googAutoGainControl2: true} ); } else { - constraints.audio = true; + if (options.micDeviceId) { + constraints.audio = { + mandatory: {}, + optional: [{ + sourceId: options.micDeviceId + }]}; + } else { + constraints.audio = true; + } } } if (um.indexOf('screen') >= 0) { @@ -1918,7 +1975,7 @@ function getConstraints(um, resolution, bandwidth, fps, desktopStream) { constraints.video = { mandatory: { chromeMediaSource: "desktop", - chromeMediaSourceId: desktopStream, + chromeMediaSourceId: options.desktopStream, googLeakyBucket: true, maxWidth: window.screen.width, maxHeight: window.screen.height, @@ -1928,28 +1985,36 @@ function getConstraints(um, resolution, bandwidth, fps, desktopStream) { }; } - if (bandwidth) { + if (options.bandwidth) { if (!constraints.video) { //same behaviour as true constraints.video = {mandatory: {}, optional: []}; } - constraints.video.optional.push({bandwidth: bandwidth}); + constraints.video.optional.push({bandwidth: options.bandwidth}); } - if (fps) { + if (options.fps) { // for some cameras it might be necessary to request 30fps // so they choose 30fps mjpg over 10fps yuy2 if (!constraints.video) { // same behaviour as true; constraints.video = {mandatory: {}, optional: []}; } - constraints.video.mandatory.minFrameRate = fps; + constraints.video.mandatory.minFrameRate = options.fps; + } + + // we turn audio for both audio and video tracks, the fake audio & video seems to work + // only when enabled in one getUserMedia call, we cannot get fake audio separate by fake video + // this later can be a problem with some of the tests + if(RTCBrowserType.isFirefox() && options.firefox_fake_device) + { + constraints.audio = true; + constraints.fake = true; } return constraints; } function setAvailableDevices(um, available) { - var devices = {}; if (um.indexOf("video") != -1) { devices.video = available; } @@ -1968,6 +2033,131 @@ function onReady () { eventEmitter.emit(RTCEvents.RTC_READY, true); }; +/** + * Apply function with arguments if function exists. + * Do nothing if function not provided. + * @param {function} [fn] function to apply + * @param {Array} [args=[]] arguments for function + */ +function maybeApply(fn, args) { + if (fn) { + fn.apply(null, args || []); + } +} + +var getUserMediaStatus = { + initialized: false, + callbacks: [] +}; + +/** + * Wrap `getUserMedia` to allow others to know if it was executed at least + * once or not. Wrapper function uses `getUserMediaStatus` object. + * @param {Function} getUserMedia native function + * @returns {Function} wrapped function + */ +function wrapGetUserMedia(getUserMedia) { + return function (constraints, successCallback, errorCallback) { + getUserMedia(constraints, function (stream) { + maybeApply(successCallback, [stream]); + if (!getUserMediaStatus.initialized) { + getUserMediaStatus.initialized = true; + getUserMediaStatus.callbacks.forEach(function (callback) { + callback(); + }); + getUserMediaStatus.callbacks.length = 0; + } + }, function (error) { + maybeApply(errorCallback, [error]); + }); + }; +} + +/** + * Create stub device which equals to auto selected device. + * @param {string} kind if that should be `audio` or `video` device + * @returns {Object} stub device description in `enumerateDevices` format + */ +function createAutoDeviceInfo(kind) { + return { + facing: null, + label: 'Auto', + kind: kind, + deviceId: '', + groupId: null + }; +} + + +/** + * Execute function after getUserMedia was executed at least once. + * @param {Function} callback function to execute after getUserMedia + */ +function afterUserMediaInitialized(callback) { + if (getUserMediaStatus.initialized) { + callback(); + } else { + getUserMediaStatus.callbacks.push(callback); + } +} + +/** + * Wrapper function which makes enumerateDevices to wait + * until someone executes getUserMedia first time. + * @param {Function} enumerateDevices native function + * @returns {Funtion} wrapped function + */ +function wrapEnumerateDevices(enumerateDevices) { + return function (callback) { + // enumerate devices only after initial getUserMedia + afterUserMediaInitialized(function () { + + enumerateDevices().then(function (devices) { + //add auto devices + devices.unshift( + createAutoDeviceInfo('audioinput'), + createAutoDeviceInfo('videoinput') + ); + + callback(devices); + }, function (err) { + console.error('cannot enumerate devices: ', err); + + // return only auto devices + callback([createAutoDeviceInfo('audioInput'), + createAutoDeviceInfo('videoinput')]); + }); + }); + }; +} + +/** + * Use old MediaStreamTrack to get devices list and + * convert it to enumerateDevices format. + * @param {Function} callback function to call when received devices list. + */ +function enumerateDevicesThroughMediaStreamTrack (callback) { + MediaStreamTrack.getSources(function (sources) { + var devices = sources.map(function (source) { + var kind = (source.kind || '').toLowerCase(); + return { + facing: source.facing || null, + label: source.label, + kind: kind ? kind + 'input': null, + deviceId: source.id, + groupId: source.groupId || null + }; + }); + + //add auto devices + devices.unshift( + createAutoDeviceInfo('audioinput'), + createAutoDeviceInfo('videoinput') + ); + callback(devices); + }); +} + //Options parameter is to pass config options. Currently uses only "useIPv6". var RTCUtils = { init: function (options) { @@ -1976,7 +2166,10 @@ var RTCUtils = { var FFversion = RTCBrowserType.getFirefoxVersion(); if (FFversion >= 40) { this.peerconnection = mozRTCPeerConnection; - this.getUserMedia = navigator.mozGetUserMedia.bind(navigator); + this.getUserMedia = wrapGetUserMedia(navigator.mozGetUserMedia.bind(navigator)); + this.enumerateDevices = wrapEnumerateDevices( + navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices) + ); this.pc_constraints = {}; this.attachMediaStream = function (element, stream) { // srcObject is being standardized and FF will eventually @@ -2022,7 +2215,16 @@ var RTCUtils = { } else if (RTCBrowserType.isChrome() || RTCBrowserType.isOpera()) { this.peerconnection = webkitRTCPeerConnection; - this.getUserMedia = navigator.webkitGetUserMedia.bind(navigator); + var getUserMedia = navigator.webkitGetUserMedia.bind(navigator); + if (navigator.mediaDevices) { + this.getUserMedia = wrapGetUserMedia(getUserMedia); + this.enumerateDevices = wrapEnumerateDevices( + navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices) + ); + } else { + this.getUserMedia = getUserMedia; + this.enumerateDevices = enumerateDevicesThroughMediaStreamTrack; + } this.attachMediaStream = function (element, stream) { element.attr('src', webkitURL.createObjectURL(stream)); }; @@ -2072,6 +2274,7 @@ var RTCUtils = { self.peerconnection = RTCPeerConnection; self.getUserMedia = getUserMedia; + self.enumerateDevices = enumerateDevicesThroughMediaStreamTrack; self.attachMediaStream = function (elSel, stream) { if (stream.id === "dummyAudio" || stream.id === "dummyVideo") { @@ -2126,9 +2329,23 @@ var RTCUtils = { } }, - getUserMediaWithConstraints: function ( um, success_callback, failure_callback, resolution, bandwidth, fps, desktopStream) { + /** + * @param {string[]} um required user media types + * @param {function} success_callback + * @param {Function} failure_callback + * @param {Object} [options] optional parameters + * @param {string} options.resolution + * @param {number} options.bandwidth + * @param {number} options.fps + * @param {string} options.desktopStream + * @param {string} options.cameraDeviceId + * @param {string} options.micDeviceId + **/ + getUserMediaWithConstraints: function ( um, success_callback, failure_callback, options) { + options = options || {}; + resolution = options.resolution; var constraints = getConstraints( - um, resolution, bandwidth, fps, desktopStream); + um, options); logger.info("Get media constraints", constraints); @@ -2157,30 +2374,28 @@ var RTCUtils = { /** * Creates the local MediaStreams. - * @param devices the devices that will be requested - * @param resolution resolution constraints - * @param dontCreateJitsiTrack if true objects with the following structure {stream: the Media Stream, + * @param {Object} [options] optional parameters + * @param {Array} options.devices the devices that will be requested + * @param {string} options.resolution resolution constraints + * @param {bool} options.dontCreateJitsiTrack if true objects with the following structure {stream: the Media Stream, * type: "audio" or "video", videoType: "camera" or "desktop"} * will be returned trough the Promise, otherwise JitsiTrack objects will be returned. + * @param {string} options.cameraDeviceId + * @param {string} options.micDeviceId * @returns {*} Promise object that will receive the new JitsiTracks */ - obtainAudioAndVideoPermissions: function (devices, resolution, dontCreateJitsiTracks) { + obtainAudioAndVideoPermissions: function (options) { var self = this; - // Get AV + options = options || {}; return new Promise(function (resolve, reject) { var successCallback = function (stream) { - var streams = self.successCallback(stream, resolution); - resolve(dontCreateJitsiTracks? streams: self.createLocalTracks(streams)); + var streams = self.successCallback(stream, options.resolution); + resolve(options.dontCreateJitsiTracks? + streams: self.createLocalTracks(streams)); }; - if (!devices) - devices = ['audio', 'video']; - - if (devices.length === 0) { - successCallback(); - return; - } + options.devices = options.devices || ['audio', 'video']; if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed()) { @@ -2204,39 +2419,40 @@ var RTCUtils = { function (error, resolution) { logger.error( 'failed to obtain video stream - stop', error); - self.errorCallback(error, resolve, resolution, dontCreateJitsiTracks); + self.errorCallback(error, resolve, options); }, - resolution || '360'); + {resolution: options.resolution || '360', + cameraDeviceId: options.cameraDeviceId}); }; var obtainAudio = function () { self.getUserMediaWithConstraints( ['audio'], function (audioStream) { - if (devices.indexOf('video') !== -1) + (options.devices.indexOf('video') === -1) || obtainVideo(audioStream); }, function (error) { logger.error( 'failed to obtain audio stream - stop', error); - self.errorCallback(error, resolve, null, dontCreateJitsiTracks); - } - ); + self.errorCallback(error, resolve, options); + },{micDeviceId: options.micDeviceId}); }; - if (devices.indexOf('audio') !== -1) { + if((devices.indexOf('audio') === -1)) + obtainVideo(null) + else obtainAudio(); - } else { - obtainVideo(null); - } } else { this.getUserMediaWithConstraints( - devices, + options.devices, function (stream) { successCallback(stream); }, function (error, resolution) { - self.errorCallback(error, resolve, resolution, dontCreateJitsiTracks); + self.errorCallback(error, resolve, options); }, - resolution || '360'); + {resolution: options.resolution || '360', + cameraDeviceId: options.cameraDeviceId, + micDeviceId: options.micDeviceId}); } }.bind(this)); }, @@ -2260,15 +2476,17 @@ var RTCUtils = { * Error callback called from GUM. Retries the GUM call with different resolutions. * @param error the error * @param resolve the resolve funtion that will be called on success. - * @param currentResolution the last resolution used for GUM. + * @param {Object} options with the following properties: + * @param resolution the last resolution used for GUM. * @param dontCreateJitsiTracks if true objects with the following structure {stream: the Media Stream, * type: "audio" or "video", videoType: "camera" or "desktop"} * will be returned trough the Promise, otherwise JitsiTrack objects will be returned. */ - errorCallback: function (error, resolve, currentResolution, dontCreateJitsiTracks) { + errorCallback: function (error, resolve, options) { var self = this; + options = options || {}; logger.error('failed to obtain audio/video stream - trying audio only', error); - var resolution = getPreviousResolution(currentResolution); + var resolution = getPreviousResolution(options.resolution); if (typeof error == "object" && error.constraintName && error.name && (error.name == "ConstraintNotSatisfiedError" || error.name == "OverconstrainedError") && @@ -2278,23 +2496,26 @@ var RTCUtils = { self.getUserMediaWithConstraints(['audio', 'video'], function (stream) { var streams = self.successCallback(stream, resolution); - resolve(dontCreateJitsiTracks? streams: self.createLocalTracks(streams)); + resolve(options.dontCreateJitsiTracks? streams: self.createLocalTracks(streams)); }, function (error, resolution) { - return self.errorCallback(error, resolve, resolution, dontCreateJitsiTracks); - }, resolution); + return self.errorCallback(error, resolve, + {resolution: resolution, + dontCreateJitsiTracks: options.dontCreateJitsiTracks}); + }, + {resolution: options.resolution}); } else { self.getUserMediaWithConstraints( ['audio'], function (stream) { var streams = self.successCallback(stream, resolution); - resolve(dontCreateJitsiTracks? streams: self.createLocalTracks(streams)); + resolve(options.dontCreateJitsiTracks? streams: self.createLocalTracks(streams)); }, function (error) { logger.error('failed to obtain audio/video stream - stop', error); var streams = self.successCallback(null); - resolve(dontCreateJitsiTracks? streams: self.createLocalTracks(streams)); + resolve(options.dontCreateJitsiTracks? streams: self.createLocalTracks(streams)); } ); } @@ -2397,8 +2618,20 @@ var RTCUtils = { eventEmitter.emit(eventType, localStream); } return newStreams; + }, + + /** + * Checks if its possible to enumerate available cameras/micropones. + * @returns {boolean} true if available, false otherwise. + */ + isDeviceListAvailable: function () { + var isEnumerateDevicesAvailable = navigator.mediaDevices && navigator.mediaDevices.enumerateDevices; + if (isEnumerateDevicesAvailable) { + return true; + } + return (MediaStreamTrack && MediaStreamTrack.getSources)? true : false; } -} +}; module.exports = RTCUtils; diff --git a/modules/RTC/JitsiLocalTrack.js b/modules/RTC/JitsiLocalTrack.js index 635396704..2249049f5 100644 --- a/modules/RTC/JitsiLocalTrack.js +++ b/modules/RTC/JitsiLocalTrack.js @@ -66,9 +66,10 @@ JitsiLocalTrack.prototype._setMute = function (mute) { } else { var self = this; var RTC = require("./RTCUtils"); - RTC.obtainAudioAndVideoPermissions( - (isAudio ? ["audio"] : ["video"]), - self.resolution, true) + RTC.obtainAudioAndVideoPermissions({ + devices: (isAudio ? ["audio"] : ["video"]), + resolution: self.resolution, + dontCreateJitsiTrack: true}) .then(function (streams) { var stream = null; for(var i = 0; i < streams.length; i++) { diff --git a/modules/RTC/RTC.js b/modules/RTC/RTC.js index edf5ee8e0..7ace1cc86 100644 --- a/modules/RTC/RTC.js +++ b/modules/RTC/RTC.js @@ -38,15 +38,18 @@ function RTC(room, options) { /** * Creates the local MediaStreams. - * @param options object for options (NOTE: currently only list of devices and resolution are supported) - * @param dontCreateJitsiTrack if true objects with the following structure {stream: the Media Stream, - * type: "audio" or "video", videoType: "camera" or "desktop"} + * @param {Object} [options] optional parameters + * @param {Array} options.devices the devices that will be requested + * @param {string} options.resolution resolution constraints + * @param {bool} options.dontCreateJitsiTrack if true objects with the following structure {stream: the Media Stream, + * type: "audio" or "video", videoType: "camera" or "desktop"} * will be returned trough the Promise, otherwise JitsiTrack objects will be returned. + * @param {string} options.cameraDeviceId + * @param {string} options.micDeviceId * @returns {*} Promise object that will receive the new JitsiTracks */ -RTC.obtainAudioAndVideoPermissions = function (options, dontCreateJitsiTrack) { - return RTCUtils.obtainAudioAndVideoPermissions( - options.devices, options.resolution, dontCreateJitsiTrack); +RTC.obtainAudioAndVideoPermissions = function (options) { + return RTCUtils.obtainAudioAndVideoPermissions(options); } RTC.prototype.onIncommingCall = function(event) { @@ -137,11 +140,10 @@ RTC.getPCConstraints = function () { }; RTC.getUserMediaWithConstraints = function(um, success_callback, - failure_callback, resolution, - bandwidth, fps, desktopStream) + failure_callback, options) { return RTCUtils.getUserMediaWithConstraints(this, um, success_callback, - failure_callback, resolution, bandwidth, fps, desktopStream); + failure_callback, options); }; RTC.attachMediaStream = function (elSelector, stream) { diff --git a/modules/RTC/RTCUtils.js b/modules/RTC/RTCUtils.js index e5b2162bf..e1954951f 100644 --- a/modules/RTC/RTCUtils.js +++ b/modules/RTC/RTCUtils.js @@ -15,7 +15,7 @@ var eventEmitter = new EventEmitter(); var devices = { audio: true, video: true -}; +} var rtcReady = false; @@ -65,22 +65,44 @@ function setResolutionConstraints(constraints, resolution) { constraints.video.mandatory.maxHeight = constraints.video.mandatory.minHeight; } - -function getConstraints(um, resolution, bandwidth, fps, desktopStream) { +/** + * @param {string[]} um required user media types + * + * @param {Object} [options={}] optional parameters + * @param {string} options.resolution + * @param {number} options.bandwidth + * @param {number} options.fps + * @param {string} options.desktopStream + * @param {string} options.cameraDeviceId + * @param {string} options.micDeviceId + * @param {bool} firefox_fake_device + */ +function getConstraints(um, options) { var constraints = {audio: false, video: false}; if (um.indexOf('video') >= 0) { // same behaviour as true constraints.video = { mandatory: {}, optional: [] }; + if (options.cameraDeviceId) { + constraints.video.optional.push({ + sourceId: options.cameraDeviceId + }); + } + constraints.video.optional.push({ googLeakyBucket: true }); - setResolutionConstraints(constraints, resolution); + setResolutionConstraints(constraints, options.resolution); } if (um.indexOf('audio') >= 0) { if (!RTCBrowserType.isFirefox()) { // same behaviour as true constraints.audio = { mandatory: {}, optional: []}; + if (options.micDeviceId) { + constraints.audio.optional.push({ + sourceId: options.micDeviceId + }); + } // if it is good enough for hangouts... constraints.audio.optional.push( {googEchoCancellation: true}, @@ -92,7 +114,15 @@ function getConstraints(um, resolution, bandwidth, fps, desktopStream) { {googAutoGainControl2: true} ); } else { - constraints.audio = true; + if (options.micDeviceId) { + constraints.audio = { + mandatory: {}, + optional: [{ + sourceId: options.micDeviceId + }]}; + } else { + constraints.audio = true; + } } } if (um.indexOf('screen') >= 0) { @@ -125,7 +155,7 @@ function getConstraints(um, resolution, bandwidth, fps, desktopStream) { constraints.video = { mandatory: { chromeMediaSource: "desktop", - chromeMediaSourceId: desktopStream, + chromeMediaSourceId: options.desktopStream, googLeakyBucket: true, maxWidth: window.screen.width, maxHeight: window.screen.height, @@ -135,28 +165,36 @@ function getConstraints(um, resolution, bandwidth, fps, desktopStream) { }; } - if (bandwidth) { + if (options.bandwidth) { if (!constraints.video) { //same behaviour as true constraints.video = {mandatory: {}, optional: []}; } - constraints.video.optional.push({bandwidth: bandwidth}); + constraints.video.optional.push({bandwidth: options.bandwidth}); } - if (fps) { + if (options.fps) { // for some cameras it might be necessary to request 30fps // so they choose 30fps mjpg over 10fps yuy2 if (!constraints.video) { // same behaviour as true; constraints.video = {mandatory: {}, optional: []}; } - constraints.video.mandatory.minFrameRate = fps; + constraints.video.mandatory.minFrameRate = options.fps; + } + + // we turn audio for both audio and video tracks, the fake audio & video seems to work + // only when enabled in one getUserMedia call, we cannot get fake audio separate by fake video + // this later can be a problem with some of the tests + if(RTCBrowserType.isFirefox() && options.firefox_fake_device) + { + constraints.audio = true; + constraints.fake = true; } return constraints; } function setAvailableDevices(um, available) { - var devices = {}; if (um.indexOf("video") != -1) { devices.video = available; } @@ -471,9 +509,23 @@ var RTCUtils = { } }, - getUserMediaWithConstraints: function ( um, success_callback, failure_callback, resolution, bandwidth, fps, desktopStream) { + /** + * @param {string[]} um required user media types + * @param {function} success_callback + * @param {Function} failure_callback + * @param {Object} [options] optional parameters + * @param {string} options.resolution + * @param {number} options.bandwidth + * @param {number} options.fps + * @param {string} options.desktopStream + * @param {string} options.cameraDeviceId + * @param {string} options.micDeviceId + **/ + getUserMediaWithConstraints: function ( um, success_callback, failure_callback, options) { + options = options || {}; + resolution = options.resolution; var constraints = getConstraints( - um, resolution, bandwidth, fps, desktopStream); + um, options); logger.info("Get media constraints", constraints); @@ -502,30 +554,28 @@ var RTCUtils = { /** * Creates the local MediaStreams. - * @param devices the devices that will be requested - * @param resolution resolution constraints - * @param dontCreateJitsiTrack if true objects with the following structure {stream: the Media Stream, + * @param {Object} [options] optional parameters + * @param {Array} options.devices the devices that will be requested + * @param {string} options.resolution resolution constraints + * @param {bool} options.dontCreateJitsiTrack if true objects with the following structure {stream: the Media Stream, * type: "audio" or "video", videoType: "camera" or "desktop"} * will be returned trough the Promise, otherwise JitsiTrack objects will be returned. + * @param {string} options.cameraDeviceId + * @param {string} options.micDeviceId * @returns {*} Promise object that will receive the new JitsiTracks */ - obtainAudioAndVideoPermissions: function (devices, resolution, dontCreateJitsiTracks) { + obtainAudioAndVideoPermissions: function (options) { var self = this; - // Get AV + options = options || {}; return new Promise(function (resolve, reject) { var successCallback = function (stream) { - var streams = self.successCallback(stream, resolution); - resolve(dontCreateJitsiTracks? streams: self.createLocalTracks(streams)); + var streams = self.successCallback(stream, options.resolution); + resolve(options.dontCreateJitsiTracks? + streams: self.createLocalTracks(streams)); }; - if (!devices) - devices = ['audio', 'video']; - - if (devices.length === 0) { - successCallback(); - return; - } + options.devices = options.devices || ['audio', 'video']; if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed()) { @@ -549,39 +599,40 @@ var RTCUtils = { function (error, resolution) { logger.error( 'failed to obtain video stream - stop', error); - self.errorCallback(error, resolve, resolution, dontCreateJitsiTracks); + self.errorCallback(error, resolve, options); }, - resolution || '360'); + {resolution: options.resolution || '360', + cameraDeviceId: options.cameraDeviceId}); }; var obtainAudio = function () { self.getUserMediaWithConstraints( ['audio'], function (audioStream) { - if (devices.indexOf('video') !== -1) + (options.devices.indexOf('video') === -1) || obtainVideo(audioStream); }, function (error) { logger.error( 'failed to obtain audio stream - stop', error); - self.errorCallback(error, resolve, null, dontCreateJitsiTracks); - } - ); + self.errorCallback(error, resolve, options); + },{micDeviceId: options.micDeviceId}); }; - if (devices.indexOf('audio') !== -1) { + if((devices.indexOf('audio') === -1)) + obtainVideo(null) + else obtainAudio(); - } else { - obtainVideo(null); - } } else { this.getUserMediaWithConstraints( - devices, + options.devices, function (stream) { successCallback(stream); }, function (error, resolution) { - self.errorCallback(error, resolve, resolution, dontCreateJitsiTracks); + self.errorCallback(error, resolve, options); }, - resolution || '360'); + {resolution: options.resolution || '360', + cameraDeviceId: options.cameraDeviceId, + micDeviceId: options.micDeviceId}); } }.bind(this)); }, @@ -605,15 +656,17 @@ var RTCUtils = { * Error callback called from GUM. Retries the GUM call with different resolutions. * @param error the error * @param resolve the resolve funtion that will be called on success. - * @param currentResolution the last resolution used for GUM. + * @param {Object} options with the following properties: + * @param resolution the last resolution used for GUM. * @param dontCreateJitsiTracks if true objects with the following structure {stream: the Media Stream, * type: "audio" or "video", videoType: "camera" or "desktop"} * will be returned trough the Promise, otherwise JitsiTrack objects will be returned. */ - errorCallback: function (error, resolve, currentResolution, dontCreateJitsiTracks) { + errorCallback: function (error, resolve, options) { var self = this; + options = options || {}; logger.error('failed to obtain audio/video stream - trying audio only', error); - var resolution = getPreviousResolution(currentResolution); + var resolution = getPreviousResolution(options.resolution); if (typeof error == "object" && error.constraintName && error.name && (error.name == "ConstraintNotSatisfiedError" || error.name == "OverconstrainedError") && @@ -623,23 +676,26 @@ var RTCUtils = { self.getUserMediaWithConstraints(['audio', 'video'], function (stream) { var streams = self.successCallback(stream, resolution); - resolve(dontCreateJitsiTracks? streams: self.createLocalTracks(streams)); + resolve(options.dontCreateJitsiTracks? streams: self.createLocalTracks(streams)); }, function (error, resolution) { - return self.errorCallback(error, resolve, resolution, dontCreateJitsiTracks); - }, resolution); + return self.errorCallback(error, resolve, + {resolution: resolution, + dontCreateJitsiTracks: options.dontCreateJitsiTracks}); + }, + {resolution: options.resolution}); } else { self.getUserMediaWithConstraints( ['audio'], function (stream) { var streams = self.successCallback(stream, resolution); - resolve(dontCreateJitsiTracks? streams: self.createLocalTracks(streams)); + resolve(options.dontCreateJitsiTracks? streams: self.createLocalTracks(streams)); }, function (error) { logger.error('failed to obtain audio/video stream - stop', error); var streams = self.successCallback(null); - resolve(dontCreateJitsiTracks? streams: self.createLocalTracks(streams)); + resolve(options.dontCreateJitsiTracks? streams: self.createLocalTracks(streams)); } ); }