From 6de7caa9ce6547af0873d1c383da74a864016c46 Mon Sep 17 00:00:00 2001 From: isymchych Date: Tue, 8 Dec 2015 16:32:01 +0200 Subject: [PATCH] fix issues in RTCUtils --- lib-jitsi-meet.js | 37 +++++++++++++++++++++---------------- modules/RTC/RTCUtils.js | 37 +++++++++++++++++++++---------------- 2 files changed, 42 insertions(+), 32 deletions(-) diff --git a/lib-jitsi-meet.js b/lib-jitsi-meet.js index e1da6a8aa..7919352d8 100644 --- a/lib-jitsi-meet.js +++ b/lib-jitsi-meet.js @@ -2146,7 +2146,12 @@ isAndroid = navigator.userAgent.indexOf('Android') != -1; module.exports = RTCBrowserType; },{}],18:[function(require,module,exports){ (function (__filename){ -/* global config, require, attachMediaStream, getUserMedia */ +/* global config, require, attachMediaStream, getUserMedia, + RTCPeerConnection, RTCSessionDescription, RTCIceCandidate, MediaStreamTrack, + mozRTCPeerConnection, mozRTCSessionDescription, mozRTCIceCandidate, + webkitRTCPeerConnection, webkitMediaStream, webkitURL +*/ +/* jshint -W101 */ var logger = require("jitsi-meet-logger").getLogger(__filename); var RTCBrowserType = require("./RTCBrowserType"); @@ -2163,7 +2168,7 @@ var eventEmitter = new EventEmitter(); var devices = { audio: true, video: true -} +}; var rtcReady = false; @@ -2343,7 +2348,7 @@ function onReady (options, GUM) { rtcReady = true; eventEmitter.emit(RTCEvents.RTC_READY, true); screenObtainer.init(eventEmitter, options, GUM); -}; +} /** * Apply function with arguments if function exists. @@ -2471,8 +2476,8 @@ function enumerateDevicesThroughMediaStreamTrack (callback) { } function obtainDevices(options) { - if(!options.devices || options.devices.length === 0) { - return options.successCallback(streams); + if (!options.devices || options.devices.length === 0) { + return options.successCallback(options.streams); } var device = options.devices.splice(0, 1); @@ -2512,8 +2517,8 @@ function handleLocalStream(streams, resolution) { var videoTracks = audioVideo.getVideoTracks(); if(videoTracks.length) { videoStream = new webkitMediaStream(); - for (i = 0; i < videoTracks.length; i++) { - videoStream.addTrack(videoTracks[i]); + for (var j = 0; j < videoTracks.length; j++) { + videoStream.addTrack(videoTracks[j]); } } } @@ -2652,7 +2657,7 @@ var RTCUtils = { //AdapterJS.WebRTCPlugin.setLogLevel( // AdapterJS.WebRTCPlugin.PLUGIN_LOG_LEVELS.VERBOSE); - + var self = this; AdapterJS.webRTCReady(function (isPlugin) { self.peerconnection = RTCPeerConnection; @@ -2710,7 +2715,7 @@ var RTCUtils = { // Call onReady() if Temasys plugin is not used if (!RTCBrowserType.isTemasysPluginUsed()) { - onReady(options, self.getUserMediaWithConstraints); + onReady(options, this.getUserMediaWithConstraints); resolve(); } }.bind(this)); @@ -2729,9 +2734,8 @@ var RTCUtils = { **/ getUserMediaWithConstraints: function ( um, success_callback, failure_callback, options) { options = options || {}; - resolution = options.resolution; - var constraints = getConstraints( - um, options); + var resolution = options.resolution; + var constraints = getConstraints(um, options); logger.info("Get media constraints", constraints); @@ -2788,12 +2792,12 @@ var RTCUtils = { RTCBrowserType.isTemasysPluginUsed()) { var GUM = function (device, s, e) { this.getUserMediaWithConstraints(device, s, e, options); - } + }; var deviceGUM = { "audio": GUM.bind(self, ["audio"]), "video": GUM.bind(self, ["video"]), "desktop": screenObtainer.obtainStream - } + }; // With FF/IE we can't split the stream into audio and video because FF // doesn't support media stream constructors. So, we need to get the // audio stream separately from the video stream using two distinct GUM @@ -2804,13 +2808,14 @@ var RTCUtils = { // the successCallback method. obtainDevices({ devices: options.devices, + streams: [], successCallback: successCallback, errorCallback: reject, deviceGUM: deviceGUM }); } else { - var hasDesktop = false; - if(hasDesktop = options.devices.indexOf("desktop") !== -1) { + var hasDesktop = options.devices.indexOf('desktop') > -1; + if (hasDesktop) { options.devices.splice(options.devices.indexOf("desktop"), 1); } options.resolution = options.resolution || '360'; diff --git a/modules/RTC/RTCUtils.js b/modules/RTC/RTCUtils.js index 41d11539e..9a3907db8 100644 --- a/modules/RTC/RTCUtils.js +++ b/modules/RTC/RTCUtils.js @@ -1,4 +1,9 @@ -/* global config, require, attachMediaStream, getUserMedia */ +/* global config, require, attachMediaStream, getUserMedia, + RTCPeerConnection, RTCSessionDescription, RTCIceCandidate, MediaStreamTrack, + mozRTCPeerConnection, mozRTCSessionDescription, mozRTCIceCandidate, + webkitRTCPeerConnection, webkitMediaStream, webkitURL +*/ +/* jshint -W101 */ var logger = require("jitsi-meet-logger").getLogger(__filename); var RTCBrowserType = require("./RTCBrowserType"); @@ -15,7 +20,7 @@ var eventEmitter = new EventEmitter(); var devices = { audio: true, video: true -} +}; var rtcReady = false; @@ -195,7 +200,7 @@ function onReady (options, GUM) { rtcReady = true; eventEmitter.emit(RTCEvents.RTC_READY, true); screenObtainer.init(eventEmitter, options, GUM); -}; +} /** * Apply function with arguments if function exists. @@ -323,8 +328,8 @@ function enumerateDevicesThroughMediaStreamTrack (callback) { } function obtainDevices(options) { - if(!options.devices || options.devices.length === 0) { - return options.successCallback(streams); + if (!options.devices || options.devices.length === 0) { + return options.successCallback(options.streams); } var device = options.devices.splice(0, 1); @@ -364,8 +369,8 @@ function handleLocalStream(streams, resolution) { var videoTracks = audioVideo.getVideoTracks(); if(videoTracks.length) { videoStream = new webkitMediaStream(); - for (i = 0; i < videoTracks.length; i++) { - videoStream.addTrack(videoTracks[i]); + for (var j = 0; j < videoTracks.length; j++) { + videoStream.addTrack(videoTracks[j]); } } } @@ -504,7 +509,7 @@ var RTCUtils = { //AdapterJS.WebRTCPlugin.setLogLevel( // AdapterJS.WebRTCPlugin.PLUGIN_LOG_LEVELS.VERBOSE); - + var self = this; AdapterJS.webRTCReady(function (isPlugin) { self.peerconnection = RTCPeerConnection; @@ -562,7 +567,7 @@ var RTCUtils = { // Call onReady() if Temasys plugin is not used if (!RTCBrowserType.isTemasysPluginUsed()) { - onReady(options, self.getUserMediaWithConstraints); + onReady(options, this.getUserMediaWithConstraints); resolve(); } }.bind(this)); @@ -581,9 +586,8 @@ var RTCUtils = { **/ getUserMediaWithConstraints: function ( um, success_callback, failure_callback, options) { options = options || {}; - resolution = options.resolution; - var constraints = getConstraints( - um, options); + var resolution = options.resolution; + var constraints = getConstraints(um, options); logger.info("Get media constraints", constraints); @@ -640,12 +644,12 @@ var RTCUtils = { RTCBrowserType.isTemasysPluginUsed()) { var GUM = function (device, s, e) { this.getUserMediaWithConstraints(device, s, e, options); - } + }; var deviceGUM = { "audio": GUM.bind(self, ["audio"]), "video": GUM.bind(self, ["video"]), "desktop": screenObtainer.obtainStream - } + }; // With FF/IE we can't split the stream into audio and video because FF // doesn't support media stream constructors. So, we need to get the // audio stream separately from the video stream using two distinct GUM @@ -656,13 +660,14 @@ var RTCUtils = { // the successCallback method. obtainDevices({ devices: options.devices, + streams: [], successCallback: successCallback, errorCallback: reject, deviceGUM: deviceGUM }); } else { - var hasDesktop = false; - if(hasDesktop = options.devices.indexOf("desktop") !== -1) { + var hasDesktop = options.devices.indexOf('desktop') > -1; + if (hasDesktop) { options.devices.splice(options.devices.indexOf("desktop"), 1); } options.resolution = options.resolution || '360';