diff --git a/app.js b/app.js index 375a266fd..eb10d1340 100644 --- a/app.js +++ b/app.js @@ -1,23 +1,7 @@ /* jshint -W117 */ /* application specific logic */ var nickname = null; -var focusMucJid = null; var ssrc2jid = {}; -//TODO: this array must be removed when firefox implement multistream support -var notReceivedSSRCs = []; - -var jid2Ssrc = {}; - -/** - * Indicates whether ssrc is camera video or desktop stream. - * FIXME: remove those maps - */ -var ssrc2videoType = {}; -/** - * Currently focused video "src"(displayed in large video). - * @type {String} - */ -var focusedVideoInfo = null; function init() { diff --git a/index.html b/index.html index df7748e0a..de00b219e 100644 --- a/index.html +++ b/index.html @@ -27,13 +27,13 @@ - - - + + + - - + + diff --git a/libs/modules/RTC.bundle.js b/libs/modules/RTC.bundle.js index 0a9992545..e25968708 100644 --- a/libs/modules/RTC.bundle.js +++ b/libs/modules/RTC.bundle.js @@ -240,11 +240,12 @@ module.exports = DataChannels; },{}],2:[function(require,module,exports){ //var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js"); -function LocalStream(stream, type, eventEmitter) +function LocalStream(stream, type, eventEmitter, videoType) { this.stream = stream; this.eventEmitter = eventEmitter; this.type = type; + this.videoType = videoType; var self = this; if(type == "audio") { @@ -359,6 +360,7 @@ function MediaStream(data, sid, ssrc, browser) { this.ssrc = ssrc; this.type = (this.stream.getVideoTracks().length > 0)? MediaStreamType.VIDEO_TYPE : MediaStreamType.AUDIO_TYPE; + this.videoType = null; this.muted = false; if(browser == RTCBrowserType.RTC_BROWSER_FIREFOX) { @@ -493,6 +495,20 @@ var RTC = { function (stream, isUsingScreenStream, callback) { self.changeLocalVideo(stream, isUsingScreenStream, callback); }, DesktopSharingEventTypes.NEW_STREAM_CREATED); + xmpp.addListener(XMPPEvents.CHANGED_STREAMS, function (jid, changedStreams) { + for(var i = 0; i < changedStreams.length; i++) { + var type = changedStreams[i].type; + if (type != "audio") { + var peerStreams = self.remoteStreams[jid]; + if(!peerStreams) + continue; + var videoStream = peerStreams[MediaStreamType.VIDEO_TYPE]; + if(!videoStream) + continue; + videoStream.videoType = changedStreams[i].type; + } + } + }) this.rtcUtils = new RTCUtils(this); this.rtcUtils.obtainAudioAndVideoPermissions(); }, @@ -529,13 +545,39 @@ var RTC = { }, changeLocalVideo: function (stream, isUsingScreenStream, callback) { var oldStream = this.localVideo.getOriginalStream(); - var type = (isUsingScreenStream? "desktop" : "video"); - RTC.localVideo = this.createLocalStream(stream, type, true); + var type = (isUsingScreenStream? "screen" : "video"); + RTC.localVideo = this.createLocalStream(stream, "video", true, type); // Stop the stream to trigger onended event for old stream oldStream.stop(); xmpp.switchStreams(stream, oldStream,callback); - } + }, + /** + * Checks if video identified by given src is desktop stream. + * @param videoSrc eg. + * blob:https%3A//pawel.jitsi.net/9a46e0bd-131e-4d18-9c14-a9264e8db395 + * @returns {boolean} + */ + isVideoSrcDesktop: function (jid) { + if(!jid) + return false; + var isDesktop = false; + var stream = null; + if (xmpp.myJid() && + xmpp.myResource() === jid) { + // local video + stream = this.localVideo; + } else { + var peerStreams = this.remoteStreams[jid]; + if(!peerStreams) + return false; + stream = peerStreams[MediaStreamType.VIDEO_TYPE]; + } + if(stream) + isDesktop = (stream.videoType === "screen"); + + return isDesktop; + } }; module.exports = RTC; diff --git a/libs/modules/UI.bundle.js b/libs/modules/UI.bundle.js index ed15d086d..480ac4817 100644 --- a/libs/modules/UI.bundle.js +++ b/libs/modules/UI.bundle.js @@ -59,9 +59,6 @@ function streamHandler(stream) { case "stream": VideoLayout.changeLocalStream(stream); break; - case "desktop": - VideoLayout.changeLocalVideo(stream); - break; } } @@ -355,12 +352,7 @@ UI.onMucLeft = function (jid) { } }, 10); - // Unlock large video - if (focusedVideoInfo && focusedVideoInfo.jid === jid) - { - console.info("Focused video owner has left the conference"); - focusedVideoInfo = null; - } + VideoLayout.participantLeft(jid); }; @@ -4402,6 +4394,11 @@ var largeVideoState = { updateInProgress: false, newSrc: '' }; +/** + * Currently focused video "src"(displayed in large video). + * @type {String} + */ +var focusedVideoInfo = null; /** * Indicates if we have muted our audio before the conference has started. @@ -4479,15 +4476,6 @@ function waitForRemoteVideo(selector, ssrc, stream, jid) { if (selector[0].currentTime > 0) { var videoStream = simulcast.getReceivingVideoStream(stream); RTC.attachMediaStream(selector, videoStream); // FIXME: why do i have to do this for FF? - - // FIXME: add a class that will associate peer Jid, video.src, it's ssrc and video type - // in order to get rid of too many maps - if (ssrc && jid) { - jid2Ssrc[Strophe.getResourceFromJid(jid)] = ssrc; - } else { - console.warn("No ssrc given for jid", jid); - } - videoactive(selector); } else { setTimeout(function () { @@ -4879,43 +4867,6 @@ function createModeratorIndicatorElement(parentElement) { } -/** - * Checks if video identified by given src is desktop stream. - * @param videoSrc eg. - * blob:https%3A//pawel.jitsi.net/9a46e0bd-131e-4d18-9c14-a9264e8db395 - * @returns {boolean} - */ -function isVideoSrcDesktop(jid) { - // FIXME: fix this mapping mess... - // figure out if large video is desktop stream or just a camera - - if(!jid) - return false; - var isDesktop = false; - if (xmpp.myJid() && - xmpp.myResource() === jid) { - // local video - isDesktop = desktopsharing.isUsingScreenStream(); - } else { - // Do we have associations... - var videoSsrc = jid2Ssrc[jid]; - if (videoSsrc) { - var videoType = ssrc2videoType[videoSsrc]; - if (videoType) { - // Finally there... - isDesktop = videoType === 'screen'; - } else { - console.error("No video type for ssrc: " + videoSsrc); - } - } else { - console.error("No ssrc for jid: " + jid); - } - } - return isDesktop; -} - - - var VideoLayout = (function (my) { my.connectionIndicators = {}; @@ -4958,7 +4909,7 @@ var VideoLayout = (function (my) { my.changeLocalVideo = function(stream) { var flipX = true; - if(stream.type == "desktop") + if(stream.videoType == "screen") flipX = false; var localVideo = document.createElement('video'); localVideo.id = 'localVideo_' + @@ -5141,11 +5092,8 @@ var VideoLayout = (function (my) { largeVideoState.newSrc = newSrc; largeVideoState.isVisible = $('#largeVideo').is(':visible'); - largeVideoState.isDesktop = isVideoSrcDesktop(resourceJid); - if(jid2Ssrc[largeVideoState.userResourceJid] || - (xmpp.myResource() && - largeVideoState.userResourceJid === - xmpp.myResource())) { + largeVideoState.isDesktop = RTC.isVideoSrcDesktop(resourceJid); + if(largeVideoState.userResourceJid) { largeVideoState.oldResourceJid = largeVideoState.userResourceJid; } else { largeVideoState.oldResourceJid = null; @@ -6520,7 +6468,6 @@ var VideoLayout = (function (my) { } var jid = ssrc2jid[primarySSRC]; - jid2Ssrc[jid] = primarySSRC; if (updateLargeVideo) { VideoLayout.updateLargeVideo(RTC.getVideoSrc(selRemoteVideo[0]), null, @@ -6618,6 +6565,15 @@ var VideoLayout = (function (my) { } }; + my.participantLeft = function (jid) { + // Unlock large video + if (focusedVideoInfo && focusedVideoInfo.jid === jid) + { + console.info("Focused video owner has left the conference"); + focusedVideoInfo = null; + } + } + return my; }(VideoLayout || {})); diff --git a/libs/modules/statistics.bundle.js b/libs/modules/statistics.bundle.js index b34f70565..c768c813d 100644 --- a/libs/modules/statistics.bundle.js +++ b/libs/modules/statistics.bundle.js @@ -130,7 +130,7 @@ LocalStatsCollector.prototype.stop = function () { module.exports = LocalStatsCollector; },{}],2:[function(require,module,exports){ -/* global focusMucJid, ssrc2jid */ +/* global ssrc2jid */ /* jshint -W117 */ /** * Calculates packet lost percent using the number of lost packets and the diff --git a/libs/modules/xmpp.bundle.js b/libs/modules/xmpp.bundle.js index 8c478a641..0ee935d3b 100644 --- a/libs/modules/xmpp.bundle.js +++ b/libs/modules/xmpp.bundle.js @@ -54,6 +54,9 @@ function JingleSession(me, sid, connection, service) { this.videoMuteByUser = false; } +//TODO: this array must be removed when firefox implement multistream support +JingleSession.notReceivedSSRCs = []; + JingleSession.prototype.initiate = function (peerjid, isInitiator) { var self = this; if (this.state !== null) { @@ -1355,8 +1358,8 @@ JingleSession.prototype.remoteStreamAdded = function (data) { //TODO: this code should be removed when firefox implement multistream support if(RTC.getBrowserType() == RTCBrowserType.RTC_BROWSER_FIREFOX) { - if((notReceivedSSRCs.length == 0) || - !ssrc2jid[notReceivedSSRCs[notReceivedSSRCs.length - 1]]) + if((JingleSession.notReceivedSSRCs.length == 0) || + !ssrc2jid[JingleSession.notReceivedSSRCs[JingleSession.notReceivedSSRCs.length - 1]]) { // TODO(gp) limit wait duration to 1 sec. setTimeout(function(d) { @@ -1367,7 +1370,7 @@ JingleSession.prototype.remoteStreamAdded = function (data) { return; } - thessrc = notReceivedSSRCs.pop(); + thessrc = JingleSession.notReceivedSSRCs.pop(); if (ssrc2jid[thessrc]) { data.peerjid = ssrc2jid[thessrc]; } @@ -3079,15 +3082,15 @@ function setRecordingToken(token) { recordingToken = token; } -function setRecording(state, token, callback) { +function setRecording(state, token, callback, connection) { if (useJirecon){ - this.setRecordingJirecon(state, token, callback); + this.setRecordingJirecon(state, token, callback, connection); } else { - this.setRecordingColibri(state, token, callback); + this.setRecordingColibri(state, token, callback, connection); } } -function setRecordingJirecon(state, token, callback) { +function setRecordingJirecon(state, token, callback, connection) { if (state == recordingEnabled){ return; } @@ -3126,8 +3129,8 @@ function setRecordingJirecon(state, token, callback) { // Sends a COLIBRI message which enables or disables (according to 'state') // the recording on the bridge. Waits for the result IQ and calls 'callback' // with the new recording state, according to the IQ. -function setRecordingColibri(state, token, callback) { - var elem = $iq({to: focusMucJid, type: 'set'}); +function setRecordingColibri(state, token, callback, connection) { + var elem = $iq({to: connection.emuc.focusMucJid, type: 'set'}); elem.c('conference', { xmlns: 'http://jitsi.org/protocol/colibri' }); @@ -3151,7 +3154,7 @@ function setRecordingColibri(state, token, callback) { var Recording = { toggleRecording: function (tokenEmptyCallback, - startingCallback, startedCallback) { + startingCallback, startedCallback, connection) { if (!Moderator.isModerator()) { console.log( 'non-focus, or conference not yet organized:' + @@ -3199,7 +3202,8 @@ var Recording = { } startedCallback(state); - } + }, + connection ); } @@ -3215,6 +3219,7 @@ module.exports = Recording; var bridgeIsDown = false; var Moderator = require("./moderator"); +var JingleSession = require("./JingleSession"); module.exports = function(XMPP, eventEmitter) { Strophe.addConnectionPlugin('emuc', { @@ -3228,6 +3233,7 @@ module.exports = function(XMPP, eventEmitter) { joined: false, isOwner: false, role: null, + focusMucJid: null, init: function (conn) { this.connection = conn; }, @@ -3400,7 +3406,7 @@ module.exports = function(XMPP, eventEmitter) { this.list_members.push(from); console.log('entered', from, member); if (member.isFocus) { - focusMucJid = from; + this.focusMucJid = from; console.info("Ignore focus: " + from + ", real JID: " + member.jid); } else { @@ -3753,8 +3759,6 @@ module.exports = function(XMPP, eventEmitter) { API.triggerEvent("participantLeft", {jid: jid}); - delete jid2Ssrc[jid]; - this.connection.jingle.terminateByJid(jid); if (this.getPrezi(jid)) { @@ -3777,7 +3781,6 @@ module.exports = function(XMPP, eventEmitter) { Object.keys(ssrc2jid).forEach(function (ssrc) { if (ssrc2jid[ssrc] == jid) { delete ssrc2jid[ssrc]; - delete ssrc2videoType[ssrc]; } }); @@ -3786,10 +3789,10 @@ module.exports = function(XMPP, eventEmitter) { //console.log(jid, 'assoc ssrc', ssrc.getAttribute('type'), ssrc.getAttribute('ssrc')); var ssrcV = ssrc.getAttribute('ssrc'); ssrc2jid[ssrcV] = from; - notReceivedSSRCs.push(ssrcV); + JingleSession.notReceivedSSRCs.push(ssrcV); + var type = ssrc.getAttribute('type'); - ssrc2videoType[ssrcV] = type; var direction = ssrc.getAttribute('direction'); @@ -3822,7 +3825,7 @@ module.exports = function(XMPP, eventEmitter) { }; -},{"./moderator":6}],9:[function(require,module,exports){ +},{"./JingleSession":1,"./moderator":6}],9:[function(require,module,exports){ /* jshint -W117 */ var JingleSession = require("./JingleSession"); @@ -4202,7 +4205,7 @@ module.exports = function (XMPP) { }, setMute: function (jid, mute) { console.info("set mute", mute); - var iqToFocus = $iq({to: focusMucJid, type: 'set'}) + var iqToFocus = $iq({to: this.connection.emuc.focusMucJid, type: 'set'}) .c('mute', { xmlns: 'http://jitsi.org/jitmeet/audio', jid: jid @@ -4221,7 +4224,7 @@ module.exports = function (XMPP) { }, onMute: function (iq) { var from = iq.getAttribute('from'); - if (from !== focusMucJid) { + if (from !== this.connection.emuc.focusMucJid) { console.warn("Ignored mute from non focus peer"); return false; } @@ -4264,7 +4267,7 @@ module.exports = function() { var req = $iq( { type: 'set', - to: focusMucJid + to: this.connection.emuc.focusMucJid } ); req.c('dial', @@ -4707,7 +4710,7 @@ var XMPP = { toggleRecording: function (tokenEmptyCallback, startingCallback, startedCallback) { Recording.toggleRecording(tokenEmptyCallback, - startingCallback, startedCallback); + startingCallback, startedCallback, connection); }, addToPresence: function (name, value, dontSend) { switch (name) @@ -4737,7 +4740,7 @@ var XMPP = { connection.emuc.sendPresence(); }, sendLogs: function (data) { - if(!focusMucJid) + if(!connection.emuc.focusMucJid) return; var deflate = true; @@ -4748,7 +4751,7 @@ var XMPP = { } content = Base64.encode(content); // XEP-0337-ish - var message = $msg({to: focusMucJid, type: 'normal'}); + var message = $msg({to: connection.emuc.focusMucJid, type: 'normal'}); message.c('log', { xmlns: 'urn:xmpp:eventlog', id: 'PeerConnectionStats'}); message.c('message').t(content).up(); diff --git a/modules/RTC/LocalStream.js b/modules/RTC/LocalStream.js index fdcfcfd8e..e3717605f 100644 --- a/modules/RTC/LocalStream.js +++ b/modules/RTC/LocalStream.js @@ -1,10 +1,11 @@ //var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js"); -function LocalStream(stream, type, eventEmitter) +function LocalStream(stream, type, eventEmitter, videoType) { this.stream = stream; this.eventEmitter = eventEmitter; this.type = type; + this.videoType = videoType; var self = this; if(type == "audio") { diff --git a/modules/RTC/MediaStream.js b/modules/RTC/MediaStream.js index 222f013a3..041dbfde5 100644 --- a/modules/RTC/MediaStream.js +++ b/modules/RTC/MediaStream.js @@ -32,6 +32,7 @@ function MediaStream(data, sid, ssrc, browser) { this.ssrc = ssrc; this.type = (this.stream.getVideoTracks().length > 0)? MediaStreamType.VIDEO_TYPE : MediaStreamType.AUDIO_TYPE; + this.videoType = null; this.muted = false; if(browser == RTCBrowserType.RTC_BROWSER_FIREFOX) { diff --git a/modules/RTC/RTC.js b/modules/RTC/RTC.js index bc97c7f1f..afc5f6f2d 100644 --- a/modules/RTC/RTC.js +++ b/modules/RTC/RTC.js @@ -106,6 +106,20 @@ var RTC = { function (stream, isUsingScreenStream, callback) { self.changeLocalVideo(stream, isUsingScreenStream, callback); }, DesktopSharingEventTypes.NEW_STREAM_CREATED); + xmpp.addListener(XMPPEvents.CHANGED_STREAMS, function (jid, changedStreams) { + for(var i = 0; i < changedStreams.length; i++) { + var type = changedStreams[i].type; + if (type != "audio") { + var peerStreams = self.remoteStreams[jid]; + if(!peerStreams) + continue; + var videoStream = peerStreams[MediaStreamType.VIDEO_TYPE]; + if(!videoStream) + continue; + videoStream.videoType = changedStreams[i].type; + } + } + }) this.rtcUtils = new RTCUtils(this); this.rtcUtils.obtainAudioAndVideoPermissions(); }, @@ -142,13 +156,39 @@ var RTC = { }, changeLocalVideo: function (stream, isUsingScreenStream, callback) { var oldStream = this.localVideo.getOriginalStream(); - var type = (isUsingScreenStream? "desktop" : "video"); - RTC.localVideo = this.createLocalStream(stream, type, true); + var type = (isUsingScreenStream? "screen" : "video"); + RTC.localVideo = this.createLocalStream(stream, "video", true, type); // Stop the stream to trigger onended event for old stream oldStream.stop(); xmpp.switchStreams(stream, oldStream,callback); - } + }, + /** + * Checks if video identified by given src is desktop stream. + * @param videoSrc eg. + * blob:https%3A//pawel.jitsi.net/9a46e0bd-131e-4d18-9c14-a9264e8db395 + * @returns {boolean} + */ + isVideoSrcDesktop: function (jid) { + if(!jid) + return false; + var isDesktop = false; + var stream = null; + if (xmpp.myJid() && + xmpp.myResource() === jid) { + // local video + stream = this.localVideo; + } else { + var peerStreams = this.remoteStreams[jid]; + if(!peerStreams) + return false; + stream = peerStreams[MediaStreamType.VIDEO_TYPE]; + } + if(stream) + isDesktop = (stream.videoType === "screen"); + + return isDesktop; + } }; module.exports = RTC; diff --git a/modules/UI/UI.js b/modules/UI/UI.js index 0fa850941..14bd74b32 100644 --- a/modules/UI/UI.js +++ b/modules/UI/UI.js @@ -58,9 +58,6 @@ function streamHandler(stream) { case "stream": VideoLayout.changeLocalStream(stream); break; - case "desktop": - VideoLayout.changeLocalVideo(stream); - break; } } @@ -354,12 +351,7 @@ UI.onMucLeft = function (jid) { } }, 10); - // Unlock large video - if (focusedVideoInfo && focusedVideoInfo.jid === jid) - { - console.info("Focused video owner has left the conference"); - focusedVideoInfo = null; - } + VideoLayout.participantLeft(jid); }; diff --git a/modules/UI/videolayout/VideoLayout.js b/modules/UI/videolayout/VideoLayout.js index d6f658843..f6fef6103 100644 --- a/modules/UI/videolayout/VideoLayout.js +++ b/modules/UI/videolayout/VideoLayout.js @@ -15,6 +15,11 @@ var largeVideoState = { updateInProgress: false, newSrc: '' }; +/** + * Currently focused video "src"(displayed in large video). + * @type {String} + */ +var focusedVideoInfo = null; /** * Indicates if we have muted our audio before the conference has started. @@ -92,15 +97,6 @@ function waitForRemoteVideo(selector, ssrc, stream, jid) { if (selector[0].currentTime > 0) { var videoStream = simulcast.getReceivingVideoStream(stream); RTC.attachMediaStream(selector, videoStream); // FIXME: why do i have to do this for FF? - - // FIXME: add a class that will associate peer Jid, video.src, it's ssrc and video type - // in order to get rid of too many maps - if (ssrc && jid) { - jid2Ssrc[Strophe.getResourceFromJid(jid)] = ssrc; - } else { - console.warn("No ssrc given for jid", jid); - } - videoactive(selector); } else { setTimeout(function () { @@ -492,43 +488,6 @@ function createModeratorIndicatorElement(parentElement) { } -/** - * Checks if video identified by given src is desktop stream. - * @param videoSrc eg. - * blob:https%3A//pawel.jitsi.net/9a46e0bd-131e-4d18-9c14-a9264e8db395 - * @returns {boolean} - */ -function isVideoSrcDesktop(jid) { - // FIXME: fix this mapping mess... - // figure out if large video is desktop stream or just a camera - - if(!jid) - return false; - var isDesktop = false; - if (xmpp.myJid() && - xmpp.myResource() === jid) { - // local video - isDesktop = desktopsharing.isUsingScreenStream(); - } else { - // Do we have associations... - var videoSsrc = jid2Ssrc[jid]; - if (videoSsrc) { - var videoType = ssrc2videoType[videoSsrc]; - if (videoType) { - // Finally there... - isDesktop = videoType === 'screen'; - } else { - console.error("No video type for ssrc: " + videoSsrc); - } - } else { - console.error("No ssrc for jid: " + jid); - } - } - return isDesktop; -} - - - var VideoLayout = (function (my) { my.connectionIndicators = {}; @@ -571,7 +530,7 @@ var VideoLayout = (function (my) { my.changeLocalVideo = function(stream) { var flipX = true; - if(stream.type == "desktop") + if(stream.videoType == "screen") flipX = false; var localVideo = document.createElement('video'); localVideo.id = 'localVideo_' + @@ -754,11 +713,8 @@ var VideoLayout = (function (my) { largeVideoState.newSrc = newSrc; largeVideoState.isVisible = $('#largeVideo').is(':visible'); - largeVideoState.isDesktop = isVideoSrcDesktop(resourceJid); - if(jid2Ssrc[largeVideoState.userResourceJid] || - (xmpp.myResource() && - largeVideoState.userResourceJid === - xmpp.myResource())) { + largeVideoState.isDesktop = RTC.isVideoSrcDesktop(resourceJid); + if(largeVideoState.userResourceJid) { largeVideoState.oldResourceJid = largeVideoState.userResourceJid; } else { largeVideoState.oldResourceJid = null; @@ -2133,7 +2089,6 @@ var VideoLayout = (function (my) { } var jid = ssrc2jid[primarySSRC]; - jid2Ssrc[jid] = primarySSRC; if (updateLargeVideo) { VideoLayout.updateLargeVideo(RTC.getVideoSrc(selRemoteVideo[0]), null, @@ -2231,6 +2186,15 @@ var VideoLayout = (function (my) { } }; + my.participantLeft = function (jid) { + // Unlock large video + if (focusedVideoInfo && focusedVideoInfo.jid === jid) + { + console.info("Focused video owner has left the conference"); + focusedVideoInfo = null; + } + } + return my; }(VideoLayout || {})); diff --git a/modules/statistics/RTPStatsCollector.js b/modules/statistics/RTPStatsCollector.js index c7901a23b..bad18d047 100644 --- a/modules/statistics/RTPStatsCollector.js +++ b/modules/statistics/RTPStatsCollector.js @@ -1,4 +1,4 @@ -/* global focusMucJid, ssrc2jid */ +/* global ssrc2jid */ /* jshint -W117 */ /** * Calculates packet lost percent using the number of lost packets and the diff --git a/modules/xmpp/JingleSession.js b/modules/xmpp/JingleSession.js index 8ff3d0efa..6c01cf80c 100644 --- a/modules/xmpp/JingleSession.js +++ b/modules/xmpp/JingleSession.js @@ -53,6 +53,9 @@ function JingleSession(me, sid, connection, service) { this.videoMuteByUser = false; } +//TODO: this array must be removed when firefox implement multistream support +JingleSession.notReceivedSSRCs = []; + JingleSession.prototype.initiate = function (peerjid, isInitiator) { var self = this; if (this.state !== null) { @@ -1354,8 +1357,8 @@ JingleSession.prototype.remoteStreamAdded = function (data) { //TODO: this code should be removed when firefox implement multistream support if(RTC.getBrowserType() == RTCBrowserType.RTC_BROWSER_FIREFOX) { - if((notReceivedSSRCs.length == 0) || - !ssrc2jid[notReceivedSSRCs[notReceivedSSRCs.length - 1]]) + if((JingleSession.notReceivedSSRCs.length == 0) || + !ssrc2jid[JingleSession.notReceivedSSRCs[JingleSession.notReceivedSSRCs.length - 1]]) { // TODO(gp) limit wait duration to 1 sec. setTimeout(function(d) { @@ -1366,7 +1369,7 @@ JingleSession.prototype.remoteStreamAdded = function (data) { return; } - thessrc = notReceivedSSRCs.pop(); + thessrc = JingleSession.notReceivedSSRCs.pop(); if (ssrc2jid[thessrc]) { data.peerjid = ssrc2jid[thessrc]; } diff --git a/modules/xmpp/recording.js b/modules/xmpp/recording.js index 245260c49..f3dc118fb 100644 --- a/modules/xmpp/recording.js +++ b/modules/xmpp/recording.js @@ -23,15 +23,15 @@ function setRecordingToken(token) { recordingToken = token; } -function setRecording(state, token, callback) { +function setRecording(state, token, callback, connection) { if (useJirecon){ - this.setRecordingJirecon(state, token, callback); + this.setRecordingJirecon(state, token, callback, connection); } else { - this.setRecordingColibri(state, token, callback); + this.setRecordingColibri(state, token, callback, connection); } } -function setRecordingJirecon(state, token, callback) { +function setRecordingJirecon(state, token, callback, connection) { if (state == recordingEnabled){ return; } @@ -70,8 +70,8 @@ function setRecordingJirecon(state, token, callback) { // Sends a COLIBRI message which enables or disables (according to 'state') // the recording on the bridge. Waits for the result IQ and calls 'callback' // with the new recording state, according to the IQ. -function setRecordingColibri(state, token, callback) { - var elem = $iq({to: focusMucJid, type: 'set'}); +function setRecordingColibri(state, token, callback, connection) { + var elem = $iq({to: connection.emuc.focusMucJid, type: 'set'}); elem.c('conference', { xmlns: 'http://jitsi.org/protocol/colibri' }); @@ -95,7 +95,7 @@ function setRecordingColibri(state, token, callback) { var Recording = { toggleRecording: function (tokenEmptyCallback, - startingCallback, startedCallback) { + startingCallback, startedCallback, connection) { if (!Moderator.isModerator()) { console.log( 'non-focus, or conference not yet organized:' + @@ -143,7 +143,8 @@ var Recording = { } startedCallback(state); - } + }, + connection ); } diff --git a/modules/xmpp/strophe.emuc.js b/modules/xmpp/strophe.emuc.js index e2ada347f..989a192f0 100644 --- a/modules/xmpp/strophe.emuc.js +++ b/modules/xmpp/strophe.emuc.js @@ -6,6 +6,7 @@ var bridgeIsDown = false; var Moderator = require("./moderator"); +var JingleSession = require("./JingleSession"); module.exports = function(XMPP, eventEmitter) { Strophe.addConnectionPlugin('emuc', { @@ -19,6 +20,7 @@ module.exports = function(XMPP, eventEmitter) { joined: false, isOwner: false, role: null, + focusMucJid: null, init: function (conn) { this.connection = conn; }, @@ -191,7 +193,7 @@ module.exports = function(XMPP, eventEmitter) { this.list_members.push(from); console.log('entered', from, member); if (member.isFocus) { - focusMucJid = from; + this.focusMucJid = from; console.info("Ignore focus: " + from + ", real JID: " + member.jid); } else { @@ -544,8 +546,6 @@ module.exports = function(XMPP, eventEmitter) { API.triggerEvent("participantLeft", {jid: jid}); - delete jid2Ssrc[jid]; - this.connection.jingle.terminateByJid(jid); if (this.getPrezi(jid)) { @@ -568,7 +568,6 @@ module.exports = function(XMPP, eventEmitter) { Object.keys(ssrc2jid).forEach(function (ssrc) { if (ssrc2jid[ssrc] == jid) { delete ssrc2jid[ssrc]; - delete ssrc2videoType[ssrc]; } }); @@ -577,10 +576,10 @@ module.exports = function(XMPP, eventEmitter) { //console.log(jid, 'assoc ssrc', ssrc.getAttribute('type'), ssrc.getAttribute('ssrc')); var ssrcV = ssrc.getAttribute('ssrc'); ssrc2jid[ssrcV] = from; - notReceivedSSRCs.push(ssrcV); + JingleSession.notReceivedSSRCs.push(ssrcV); + var type = ssrc.getAttribute('type'); - ssrc2videoType[ssrcV] = type; var direction = ssrc.getAttribute('direction'); diff --git a/modules/xmpp/strophe.moderate.js b/modules/xmpp/strophe.moderate.js index 64a8bccfa..d78fe8c34 100644 --- a/modules/xmpp/strophe.moderate.js +++ b/modules/xmpp/strophe.moderate.js @@ -18,7 +18,7 @@ module.exports = function (XMPP) { }, setMute: function (jid, mute) { console.info("set mute", mute); - var iqToFocus = $iq({to: focusMucJid, type: 'set'}) + var iqToFocus = $iq({to: this.connection.emuc.focusMucJid, type: 'set'}) .c('mute', { xmlns: 'http://jitsi.org/jitmeet/audio', jid: jid @@ -37,7 +37,7 @@ module.exports = function (XMPP) { }, onMute: function (iq) { var from = iq.getAttribute('from'); - if (from !== focusMucJid) { + if (from !== this.connection.emuc.focusMucJid) { console.warn("Ignored mute from non focus peer"); return false; } diff --git a/modules/xmpp/strophe.rayo.js b/modules/xmpp/strophe.rayo.js index 9d0db5547..283962358 100644 --- a/modules/xmpp/strophe.rayo.js +++ b/modules/xmpp/strophe.rayo.js @@ -21,7 +21,7 @@ module.exports = function() { var req = $iq( { type: 'set', - to: focusMucJid + to: this.connection.emuc.focusMucJid } ); req.c('dial', diff --git a/modules/xmpp/xmpp.js b/modules/xmpp/xmpp.js index 8ce622727..2744041cb 100644 --- a/modules/xmpp/xmpp.js +++ b/modules/xmpp/xmpp.js @@ -323,7 +323,7 @@ var XMPP = { toggleRecording: function (tokenEmptyCallback, startingCallback, startedCallback) { Recording.toggleRecording(tokenEmptyCallback, - startingCallback, startedCallback); + startingCallback, startedCallback, connection); }, addToPresence: function (name, value, dontSend) { switch (name) @@ -353,7 +353,7 @@ var XMPP = { connection.emuc.sendPresence(); }, sendLogs: function (data) { - if(!focusMucJid) + if(!connection.emuc.focusMucJid) return; var deflate = true; @@ -364,7 +364,7 @@ var XMPP = { } content = Base64.encode(content); // XEP-0337-ish - var message = $msg({to: focusMucJid, type: 'normal'}); + var message = $msg({to: connection.emuc.focusMucJid, type: 'normal'}); message.c('log', { xmlns: 'urn:xmpp:eventlog', id: 'PeerConnectionStats'}); message.c('message').t(content).up();