Removes some global variables. Fixes recording.

This commit is contained in:
hristoterezov 2015-01-20 17:56:00 +02:00
parent 6347730dc7
commit 6c4a5bd2bc
18 changed files with 188 additions and 202 deletions

16
app.js
View File

@ -1,23 +1,7 @@
/* jshint -W117 */
/* application specific logic */
var nickname = null;
var focusMucJid = null;
var ssrc2jid = {};
//TODO: this array must be removed when firefox implement multistream support
var notReceivedSSRCs = [];
var jid2Ssrc = {};
/**
* Indicates whether ssrc is camera video or desktop stream.
* FIXME: remove those maps
*/
var ssrc2videoType = {};
/**
* Currently focused video "src"(displayed in large video).
* @type {String}
*/
var focusedVideoInfo = null;
function init() {

View File

@ -27,13 +27,13 @@
<script src="service/desktopsharing/DesktopSharingEventTypes.js?v=1"></script>
<script src="libs/modules/simulcast.bundle.js?v=4"></script>
<script src="libs/modules/connectionquality.bundle.js?v=2"></script>
<script src="libs/modules/UI.bundle.js?v=6"></script>
<script src="libs/modules/statistics.bundle.js?v=2"></script>
<script src="libs/modules/RTC.bundle.js?v=5"></script>
<script src="libs/modules/UI.bundle.js?v=7"></script>
<script src="libs/modules/statistics.bundle.js?v=3"></script>
<script src="libs/modules/RTC.bundle.js?v=6"></script>
<script src="libs/modules/desktopsharing.bundle.js?v=3"></script><!-- desktop sharing -->
<script src="util.js?v=7"></script><!-- utility functions -->
<script src="libs/modules/xmpp.bundle.js?v=1"></script>
<script src="app.js?v=27"></script><!-- application logic -->
<script src="libs/modules/xmpp.bundle.js?v=2"></script>
<script src="app.js?v=28"></script><!-- application logic -->
<script src="libs/modules/API.bundle.js?v=1"></script>
<script src="analytics.js?v=1"></script><!-- google analytics plugin -->

View File

@ -240,11 +240,12 @@ module.exports = DataChannels;
},{}],2:[function(require,module,exports){
//var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
function LocalStream(stream, type, eventEmitter)
function LocalStream(stream, type, eventEmitter, videoType)
{
this.stream = stream;
this.eventEmitter = eventEmitter;
this.type = type;
this.videoType = videoType;
var self = this;
if(type == "audio")
{
@ -359,6 +360,7 @@ function MediaStream(data, sid, ssrc, browser) {
this.ssrc = ssrc;
this.type = (this.stream.getVideoTracks().length > 0)?
MediaStreamType.VIDEO_TYPE : MediaStreamType.AUDIO_TYPE;
this.videoType = null;
this.muted = false;
if(browser == RTCBrowserType.RTC_BROWSER_FIREFOX)
{
@ -493,6 +495,20 @@ var RTC = {
function (stream, isUsingScreenStream, callback) {
self.changeLocalVideo(stream, isUsingScreenStream, callback);
}, DesktopSharingEventTypes.NEW_STREAM_CREATED);
xmpp.addListener(XMPPEvents.CHANGED_STREAMS, function (jid, changedStreams) {
for(var i = 0; i < changedStreams.length; i++) {
var type = changedStreams[i].type;
if (type != "audio") {
var peerStreams = self.remoteStreams[jid];
if(!peerStreams)
continue;
var videoStream = peerStreams[MediaStreamType.VIDEO_TYPE];
if(!videoStream)
continue;
videoStream.videoType = changedStreams[i].type;
}
}
})
this.rtcUtils = new RTCUtils(this);
this.rtcUtils.obtainAudioAndVideoPermissions();
},
@ -529,13 +545,39 @@ var RTC = {
},
changeLocalVideo: function (stream, isUsingScreenStream, callback) {
var oldStream = this.localVideo.getOriginalStream();
var type = (isUsingScreenStream? "desktop" : "video");
RTC.localVideo = this.createLocalStream(stream, type, true);
var type = (isUsingScreenStream? "screen" : "video");
RTC.localVideo = this.createLocalStream(stream, "video", true, type);
// Stop the stream to trigger onended event for old stream
oldStream.stop();
xmpp.switchStreams(stream, oldStream,callback);
},
/**
* Checks if video identified by given src is desktop stream.
* @param videoSrc eg.
* blob:https%3A//pawel.jitsi.net/9a46e0bd-131e-4d18-9c14-a9264e8db395
* @returns {boolean}
*/
isVideoSrcDesktop: function (jid) {
if(!jid)
return false;
var isDesktop = false;
var stream = null;
if (xmpp.myJid() &&
xmpp.myResource() === jid) {
// local video
stream = this.localVideo;
} else {
var peerStreams = this.remoteStreams[jid];
if(!peerStreams)
return false;
stream = peerStreams[MediaStreamType.VIDEO_TYPE];
}
if(stream)
isDesktop = (stream.videoType === "screen");
return isDesktop;
}
};
module.exports = RTC;

View File

@ -59,9 +59,6 @@ function streamHandler(stream) {
case "stream":
VideoLayout.changeLocalStream(stream);
break;
case "desktop":
VideoLayout.changeLocalVideo(stream);
break;
}
}
@ -355,12 +352,7 @@ UI.onMucLeft = function (jid) {
}
}, 10);
// Unlock large video
if (focusedVideoInfo && focusedVideoInfo.jid === jid)
{
console.info("Focused video owner has left the conference");
focusedVideoInfo = null;
}
VideoLayout.participantLeft(jid);
};
@ -4402,6 +4394,11 @@ var largeVideoState = {
updateInProgress: false,
newSrc: ''
};
/**
* Currently focused video "src"(displayed in large video).
* @type {String}
*/
var focusedVideoInfo = null;
/**
* Indicates if we have muted our audio before the conference has started.
@ -4479,15 +4476,6 @@ function waitForRemoteVideo(selector, ssrc, stream, jid) {
if (selector[0].currentTime > 0) {
var videoStream = simulcast.getReceivingVideoStream(stream);
RTC.attachMediaStream(selector, videoStream); // FIXME: why do i have to do this for FF?
// FIXME: add a class that will associate peer Jid, video.src, it's ssrc and video type
// in order to get rid of too many maps
if (ssrc && jid) {
jid2Ssrc[Strophe.getResourceFromJid(jid)] = ssrc;
} else {
console.warn("No ssrc given for jid", jid);
}
videoactive(selector);
} else {
setTimeout(function () {
@ -4879,43 +4867,6 @@ function createModeratorIndicatorElement(parentElement) {
}
/**
* Checks if video identified by given src is desktop stream.
* @param videoSrc eg.
* blob:https%3A//pawel.jitsi.net/9a46e0bd-131e-4d18-9c14-a9264e8db395
* @returns {boolean}
*/
function isVideoSrcDesktop(jid) {
// FIXME: fix this mapping mess...
// figure out if large video is desktop stream or just a camera
if(!jid)
return false;
var isDesktop = false;
if (xmpp.myJid() &&
xmpp.myResource() === jid) {
// local video
isDesktop = desktopsharing.isUsingScreenStream();
} else {
// Do we have associations...
var videoSsrc = jid2Ssrc[jid];
if (videoSsrc) {
var videoType = ssrc2videoType[videoSsrc];
if (videoType) {
// Finally there...
isDesktop = videoType === 'screen';
} else {
console.error("No video type for ssrc: " + videoSsrc);
}
} else {
console.error("No ssrc for jid: " + jid);
}
}
return isDesktop;
}
var VideoLayout = (function (my) {
my.connectionIndicators = {};
@ -4958,7 +4909,7 @@ var VideoLayout = (function (my) {
my.changeLocalVideo = function(stream) {
var flipX = true;
if(stream.type == "desktop")
if(stream.videoType == "screen")
flipX = false;
var localVideo = document.createElement('video');
localVideo.id = 'localVideo_' +
@ -5141,11 +5092,8 @@ var VideoLayout = (function (my) {
largeVideoState.newSrc = newSrc;
largeVideoState.isVisible = $('#largeVideo').is(':visible');
largeVideoState.isDesktop = isVideoSrcDesktop(resourceJid);
if(jid2Ssrc[largeVideoState.userResourceJid] ||
(xmpp.myResource() &&
largeVideoState.userResourceJid ===
xmpp.myResource())) {
largeVideoState.isDesktop = RTC.isVideoSrcDesktop(resourceJid);
if(largeVideoState.userResourceJid) {
largeVideoState.oldResourceJid = largeVideoState.userResourceJid;
} else {
largeVideoState.oldResourceJid = null;
@ -6520,7 +6468,6 @@ var VideoLayout = (function (my) {
}
var jid = ssrc2jid[primarySSRC];
jid2Ssrc[jid] = primarySSRC;
if (updateLargeVideo) {
VideoLayout.updateLargeVideo(RTC.getVideoSrc(selRemoteVideo[0]), null,
@ -6618,6 +6565,15 @@ var VideoLayout = (function (my) {
}
};
my.participantLeft = function (jid) {
// Unlock large video
if (focusedVideoInfo && focusedVideoInfo.jid === jid)
{
console.info("Focused video owner has left the conference");
focusedVideoInfo = null;
}
}
return my;
}(VideoLayout || {}));

View File

@ -130,7 +130,7 @@ LocalStatsCollector.prototype.stop = function () {
module.exports = LocalStatsCollector;
},{}],2:[function(require,module,exports){
/* global focusMucJid, ssrc2jid */
/* global ssrc2jid */
/* jshint -W117 */
/**
* Calculates packet lost percent using the number of lost packets and the

View File

@ -54,6 +54,9 @@ function JingleSession(me, sid, connection, service) {
this.videoMuteByUser = false;
}
//TODO: this array must be removed when firefox implement multistream support
JingleSession.notReceivedSSRCs = [];
JingleSession.prototype.initiate = function (peerjid, isInitiator) {
var self = this;
if (this.state !== null) {
@ -1355,8 +1358,8 @@ JingleSession.prototype.remoteStreamAdded = function (data) {
//TODO: this code should be removed when firefox implement multistream support
if(RTC.getBrowserType() == RTCBrowserType.RTC_BROWSER_FIREFOX)
{
if((notReceivedSSRCs.length == 0) ||
!ssrc2jid[notReceivedSSRCs[notReceivedSSRCs.length - 1]])
if((JingleSession.notReceivedSSRCs.length == 0) ||
!ssrc2jid[JingleSession.notReceivedSSRCs[JingleSession.notReceivedSSRCs.length - 1]])
{
// TODO(gp) limit wait duration to 1 sec.
setTimeout(function(d) {
@ -1367,7 +1370,7 @@ JingleSession.prototype.remoteStreamAdded = function (data) {
return;
}
thessrc = notReceivedSSRCs.pop();
thessrc = JingleSession.notReceivedSSRCs.pop();
if (ssrc2jid[thessrc]) {
data.peerjid = ssrc2jid[thessrc];
}
@ -3079,15 +3082,15 @@ function setRecordingToken(token) {
recordingToken = token;
}
function setRecording(state, token, callback) {
function setRecording(state, token, callback, connection) {
if (useJirecon){
this.setRecordingJirecon(state, token, callback);
this.setRecordingJirecon(state, token, callback, connection);
} else {
this.setRecordingColibri(state, token, callback);
this.setRecordingColibri(state, token, callback, connection);
}
}
function setRecordingJirecon(state, token, callback) {
function setRecordingJirecon(state, token, callback, connection) {
if (state == recordingEnabled){
return;
}
@ -3126,8 +3129,8 @@ function setRecordingJirecon(state, token, callback) {
// Sends a COLIBRI message which enables or disables (according to 'state')
// the recording on the bridge. Waits for the result IQ and calls 'callback'
// with the new recording state, according to the IQ.
function setRecordingColibri(state, token, callback) {
var elem = $iq({to: focusMucJid, type: 'set'});
function setRecordingColibri(state, token, callback, connection) {
var elem = $iq({to: connection.emuc.focusMucJid, type: 'set'});
elem.c('conference', {
xmlns: 'http://jitsi.org/protocol/colibri'
});
@ -3151,7 +3154,7 @@ function setRecordingColibri(state, token, callback) {
var Recording = {
toggleRecording: function (tokenEmptyCallback,
startingCallback, startedCallback) {
startingCallback, startedCallback, connection) {
if (!Moderator.isModerator()) {
console.log(
'non-focus, or conference not yet organized:' +
@ -3199,7 +3202,8 @@ var Recording = {
}
startedCallback(state);
}
},
connection
);
}
@ -3215,6 +3219,7 @@ module.exports = Recording;
var bridgeIsDown = false;
var Moderator = require("./moderator");
var JingleSession = require("./JingleSession");
module.exports = function(XMPP, eventEmitter) {
Strophe.addConnectionPlugin('emuc', {
@ -3228,6 +3233,7 @@ module.exports = function(XMPP, eventEmitter) {
joined: false,
isOwner: false,
role: null,
focusMucJid: null,
init: function (conn) {
this.connection = conn;
},
@ -3400,7 +3406,7 @@ module.exports = function(XMPP, eventEmitter) {
this.list_members.push(from);
console.log('entered', from, member);
if (member.isFocus) {
focusMucJid = from;
this.focusMucJid = from;
console.info("Ignore focus: " + from + ", real JID: " + member.jid);
}
else {
@ -3753,8 +3759,6 @@ module.exports = function(XMPP, eventEmitter) {
API.triggerEvent("participantLeft", {jid: jid});
delete jid2Ssrc[jid];
this.connection.jingle.terminateByJid(jid);
if (this.getPrezi(jid)) {
@ -3777,7 +3781,6 @@ module.exports = function(XMPP, eventEmitter) {
Object.keys(ssrc2jid).forEach(function (ssrc) {
if (ssrc2jid[ssrc] == jid) {
delete ssrc2jid[ssrc];
delete ssrc2videoType[ssrc];
}
});
@ -3786,10 +3789,10 @@ module.exports = function(XMPP, eventEmitter) {
//console.log(jid, 'assoc ssrc', ssrc.getAttribute('type'), ssrc.getAttribute('ssrc'));
var ssrcV = ssrc.getAttribute('ssrc');
ssrc2jid[ssrcV] = from;
notReceivedSSRCs.push(ssrcV);
JingleSession.notReceivedSSRCs.push(ssrcV);
var type = ssrc.getAttribute('type');
ssrc2videoType[ssrcV] = type;
var direction = ssrc.getAttribute('direction');
@ -3822,7 +3825,7 @@ module.exports = function(XMPP, eventEmitter) {
};
},{"./moderator":6}],9:[function(require,module,exports){
},{"./JingleSession":1,"./moderator":6}],9:[function(require,module,exports){
/* jshint -W117 */
var JingleSession = require("./JingleSession");
@ -4202,7 +4205,7 @@ module.exports = function (XMPP) {
},
setMute: function (jid, mute) {
console.info("set mute", mute);
var iqToFocus = $iq({to: focusMucJid, type: 'set'})
var iqToFocus = $iq({to: this.connection.emuc.focusMucJid, type: 'set'})
.c('mute', {
xmlns: 'http://jitsi.org/jitmeet/audio',
jid: jid
@ -4221,7 +4224,7 @@ module.exports = function (XMPP) {
},
onMute: function (iq) {
var from = iq.getAttribute('from');
if (from !== focusMucJid) {
if (from !== this.connection.emuc.focusMucJid) {
console.warn("Ignored mute from non focus peer");
return false;
}
@ -4264,7 +4267,7 @@ module.exports = function() {
var req = $iq(
{
type: 'set',
to: focusMucJid
to: this.connection.emuc.focusMucJid
}
);
req.c('dial',
@ -4707,7 +4710,7 @@ var XMPP = {
toggleRecording: function (tokenEmptyCallback,
startingCallback, startedCallback) {
Recording.toggleRecording(tokenEmptyCallback,
startingCallback, startedCallback);
startingCallback, startedCallback, connection);
},
addToPresence: function (name, value, dontSend) {
switch (name)
@ -4737,7 +4740,7 @@ var XMPP = {
connection.emuc.sendPresence();
},
sendLogs: function (data) {
if(!focusMucJid)
if(!connection.emuc.focusMucJid)
return;
var deflate = true;
@ -4748,7 +4751,7 @@ var XMPP = {
}
content = Base64.encode(content);
// XEP-0337-ish
var message = $msg({to: focusMucJid, type: 'normal'});
var message = $msg({to: connection.emuc.focusMucJid, type: 'normal'});
message.c('log', { xmlns: 'urn:xmpp:eventlog',
id: 'PeerConnectionStats'});
message.c('message').t(content).up();

View File

@ -1,10 +1,11 @@
//var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
function LocalStream(stream, type, eventEmitter)
function LocalStream(stream, type, eventEmitter, videoType)
{
this.stream = stream;
this.eventEmitter = eventEmitter;
this.type = type;
this.videoType = videoType;
var self = this;
if(type == "audio")
{

View File

@ -32,6 +32,7 @@ function MediaStream(data, sid, ssrc, browser) {
this.ssrc = ssrc;
this.type = (this.stream.getVideoTracks().length > 0)?
MediaStreamType.VIDEO_TYPE : MediaStreamType.AUDIO_TYPE;
this.videoType = null;
this.muted = false;
if(browser == RTCBrowserType.RTC_BROWSER_FIREFOX)
{

View File

@ -106,6 +106,20 @@ var RTC = {
function (stream, isUsingScreenStream, callback) {
self.changeLocalVideo(stream, isUsingScreenStream, callback);
}, DesktopSharingEventTypes.NEW_STREAM_CREATED);
xmpp.addListener(XMPPEvents.CHANGED_STREAMS, function (jid, changedStreams) {
for(var i = 0; i < changedStreams.length; i++) {
var type = changedStreams[i].type;
if (type != "audio") {
var peerStreams = self.remoteStreams[jid];
if(!peerStreams)
continue;
var videoStream = peerStreams[MediaStreamType.VIDEO_TYPE];
if(!videoStream)
continue;
videoStream.videoType = changedStreams[i].type;
}
}
})
this.rtcUtils = new RTCUtils(this);
this.rtcUtils.obtainAudioAndVideoPermissions();
},
@ -142,13 +156,39 @@ var RTC = {
},
changeLocalVideo: function (stream, isUsingScreenStream, callback) {
var oldStream = this.localVideo.getOriginalStream();
var type = (isUsingScreenStream? "desktop" : "video");
RTC.localVideo = this.createLocalStream(stream, type, true);
var type = (isUsingScreenStream? "screen" : "video");
RTC.localVideo = this.createLocalStream(stream, "video", true, type);
// Stop the stream to trigger onended event for old stream
oldStream.stop();
xmpp.switchStreams(stream, oldStream,callback);
},
/**
* Checks if video identified by given src is desktop stream.
* @param videoSrc eg.
* blob:https%3A//pawel.jitsi.net/9a46e0bd-131e-4d18-9c14-a9264e8db395
* @returns {boolean}
*/
isVideoSrcDesktop: function (jid) {
if(!jid)
return false;
var isDesktop = false;
var stream = null;
if (xmpp.myJid() &&
xmpp.myResource() === jid) {
// local video
stream = this.localVideo;
} else {
var peerStreams = this.remoteStreams[jid];
if(!peerStreams)
return false;
stream = peerStreams[MediaStreamType.VIDEO_TYPE];
}
if(stream)
isDesktop = (stream.videoType === "screen");
return isDesktop;
}
};
module.exports = RTC;

View File

@ -58,9 +58,6 @@ function streamHandler(stream) {
case "stream":
VideoLayout.changeLocalStream(stream);
break;
case "desktop":
VideoLayout.changeLocalVideo(stream);
break;
}
}
@ -354,12 +351,7 @@ UI.onMucLeft = function (jid) {
}
}, 10);
// Unlock large video
if (focusedVideoInfo && focusedVideoInfo.jid === jid)
{
console.info("Focused video owner has left the conference");
focusedVideoInfo = null;
}
VideoLayout.participantLeft(jid);
};

View File

@ -15,6 +15,11 @@ var largeVideoState = {
updateInProgress: false,
newSrc: ''
};
/**
* Currently focused video "src"(displayed in large video).
* @type {String}
*/
var focusedVideoInfo = null;
/**
* Indicates if we have muted our audio before the conference has started.
@ -92,15 +97,6 @@ function waitForRemoteVideo(selector, ssrc, stream, jid) {
if (selector[0].currentTime > 0) {
var videoStream = simulcast.getReceivingVideoStream(stream);
RTC.attachMediaStream(selector, videoStream); // FIXME: why do i have to do this for FF?
// FIXME: add a class that will associate peer Jid, video.src, it's ssrc and video type
// in order to get rid of too many maps
if (ssrc && jid) {
jid2Ssrc[Strophe.getResourceFromJid(jid)] = ssrc;
} else {
console.warn("No ssrc given for jid", jid);
}
videoactive(selector);
} else {
setTimeout(function () {
@ -492,43 +488,6 @@ function createModeratorIndicatorElement(parentElement) {
}
/**
* Checks if video identified by given src is desktop stream.
* @param videoSrc eg.
* blob:https%3A//pawel.jitsi.net/9a46e0bd-131e-4d18-9c14-a9264e8db395
* @returns {boolean}
*/
function isVideoSrcDesktop(jid) {
// FIXME: fix this mapping mess...
// figure out if large video is desktop stream or just a camera
if(!jid)
return false;
var isDesktop = false;
if (xmpp.myJid() &&
xmpp.myResource() === jid) {
// local video
isDesktop = desktopsharing.isUsingScreenStream();
} else {
// Do we have associations...
var videoSsrc = jid2Ssrc[jid];
if (videoSsrc) {
var videoType = ssrc2videoType[videoSsrc];
if (videoType) {
// Finally there...
isDesktop = videoType === 'screen';
} else {
console.error("No video type for ssrc: " + videoSsrc);
}
} else {
console.error("No ssrc for jid: " + jid);
}
}
return isDesktop;
}
var VideoLayout = (function (my) {
my.connectionIndicators = {};
@ -571,7 +530,7 @@ var VideoLayout = (function (my) {
my.changeLocalVideo = function(stream) {
var flipX = true;
if(stream.type == "desktop")
if(stream.videoType == "screen")
flipX = false;
var localVideo = document.createElement('video');
localVideo.id = 'localVideo_' +
@ -754,11 +713,8 @@ var VideoLayout = (function (my) {
largeVideoState.newSrc = newSrc;
largeVideoState.isVisible = $('#largeVideo').is(':visible');
largeVideoState.isDesktop = isVideoSrcDesktop(resourceJid);
if(jid2Ssrc[largeVideoState.userResourceJid] ||
(xmpp.myResource() &&
largeVideoState.userResourceJid ===
xmpp.myResource())) {
largeVideoState.isDesktop = RTC.isVideoSrcDesktop(resourceJid);
if(largeVideoState.userResourceJid) {
largeVideoState.oldResourceJid = largeVideoState.userResourceJid;
} else {
largeVideoState.oldResourceJid = null;
@ -2133,7 +2089,6 @@ var VideoLayout = (function (my) {
}
var jid = ssrc2jid[primarySSRC];
jid2Ssrc[jid] = primarySSRC;
if (updateLargeVideo) {
VideoLayout.updateLargeVideo(RTC.getVideoSrc(selRemoteVideo[0]), null,
@ -2231,6 +2186,15 @@ var VideoLayout = (function (my) {
}
};
my.participantLeft = function (jid) {
// Unlock large video
if (focusedVideoInfo && focusedVideoInfo.jid === jid)
{
console.info("Focused video owner has left the conference");
focusedVideoInfo = null;
}
}
return my;
}(VideoLayout || {}));

View File

@ -1,4 +1,4 @@
/* global focusMucJid, ssrc2jid */
/* global ssrc2jid */
/* jshint -W117 */
/**
* Calculates packet lost percent using the number of lost packets and the

View File

@ -53,6 +53,9 @@ function JingleSession(me, sid, connection, service) {
this.videoMuteByUser = false;
}
//TODO: this array must be removed when firefox implement multistream support
JingleSession.notReceivedSSRCs = [];
JingleSession.prototype.initiate = function (peerjid, isInitiator) {
var self = this;
if (this.state !== null) {
@ -1354,8 +1357,8 @@ JingleSession.prototype.remoteStreamAdded = function (data) {
//TODO: this code should be removed when firefox implement multistream support
if(RTC.getBrowserType() == RTCBrowserType.RTC_BROWSER_FIREFOX)
{
if((notReceivedSSRCs.length == 0) ||
!ssrc2jid[notReceivedSSRCs[notReceivedSSRCs.length - 1]])
if((JingleSession.notReceivedSSRCs.length == 0) ||
!ssrc2jid[JingleSession.notReceivedSSRCs[JingleSession.notReceivedSSRCs.length - 1]])
{
// TODO(gp) limit wait duration to 1 sec.
setTimeout(function(d) {
@ -1366,7 +1369,7 @@ JingleSession.prototype.remoteStreamAdded = function (data) {
return;
}
thessrc = notReceivedSSRCs.pop();
thessrc = JingleSession.notReceivedSSRCs.pop();
if (ssrc2jid[thessrc]) {
data.peerjid = ssrc2jid[thessrc];
}

View File

@ -23,15 +23,15 @@ function setRecordingToken(token) {
recordingToken = token;
}
function setRecording(state, token, callback) {
function setRecording(state, token, callback, connection) {
if (useJirecon){
this.setRecordingJirecon(state, token, callback);
this.setRecordingJirecon(state, token, callback, connection);
} else {
this.setRecordingColibri(state, token, callback);
this.setRecordingColibri(state, token, callback, connection);
}
}
function setRecordingJirecon(state, token, callback) {
function setRecordingJirecon(state, token, callback, connection) {
if (state == recordingEnabled){
return;
}
@ -70,8 +70,8 @@ function setRecordingJirecon(state, token, callback) {
// Sends a COLIBRI message which enables or disables (according to 'state')
// the recording on the bridge. Waits for the result IQ and calls 'callback'
// with the new recording state, according to the IQ.
function setRecordingColibri(state, token, callback) {
var elem = $iq({to: focusMucJid, type: 'set'});
function setRecordingColibri(state, token, callback, connection) {
var elem = $iq({to: connection.emuc.focusMucJid, type: 'set'});
elem.c('conference', {
xmlns: 'http://jitsi.org/protocol/colibri'
});
@ -95,7 +95,7 @@ function setRecordingColibri(state, token, callback) {
var Recording = {
toggleRecording: function (tokenEmptyCallback,
startingCallback, startedCallback) {
startingCallback, startedCallback, connection) {
if (!Moderator.isModerator()) {
console.log(
'non-focus, or conference not yet organized:' +
@ -143,7 +143,8 @@ var Recording = {
}
startedCallback(state);
}
},
connection
);
}

View File

@ -6,6 +6,7 @@
var bridgeIsDown = false;
var Moderator = require("./moderator");
var JingleSession = require("./JingleSession");
module.exports = function(XMPP, eventEmitter) {
Strophe.addConnectionPlugin('emuc', {
@ -19,6 +20,7 @@ module.exports = function(XMPP, eventEmitter) {
joined: false,
isOwner: false,
role: null,
focusMucJid: null,
init: function (conn) {
this.connection = conn;
},
@ -191,7 +193,7 @@ module.exports = function(XMPP, eventEmitter) {
this.list_members.push(from);
console.log('entered', from, member);
if (member.isFocus) {
focusMucJid = from;
this.focusMucJid = from;
console.info("Ignore focus: " + from + ", real JID: " + member.jid);
}
else {
@ -544,8 +546,6 @@ module.exports = function(XMPP, eventEmitter) {
API.triggerEvent("participantLeft", {jid: jid});
delete jid2Ssrc[jid];
this.connection.jingle.terminateByJid(jid);
if (this.getPrezi(jid)) {
@ -568,7 +568,6 @@ module.exports = function(XMPP, eventEmitter) {
Object.keys(ssrc2jid).forEach(function (ssrc) {
if (ssrc2jid[ssrc] == jid) {
delete ssrc2jid[ssrc];
delete ssrc2videoType[ssrc];
}
});
@ -577,10 +576,10 @@ module.exports = function(XMPP, eventEmitter) {
//console.log(jid, 'assoc ssrc', ssrc.getAttribute('type'), ssrc.getAttribute('ssrc'));
var ssrcV = ssrc.getAttribute('ssrc');
ssrc2jid[ssrcV] = from;
notReceivedSSRCs.push(ssrcV);
JingleSession.notReceivedSSRCs.push(ssrcV);
var type = ssrc.getAttribute('type');
ssrc2videoType[ssrcV] = type;
var direction = ssrc.getAttribute('direction');

View File

@ -18,7 +18,7 @@ module.exports = function (XMPP) {
},
setMute: function (jid, mute) {
console.info("set mute", mute);
var iqToFocus = $iq({to: focusMucJid, type: 'set'})
var iqToFocus = $iq({to: this.connection.emuc.focusMucJid, type: 'set'})
.c('mute', {
xmlns: 'http://jitsi.org/jitmeet/audio',
jid: jid
@ -37,7 +37,7 @@ module.exports = function (XMPP) {
},
onMute: function (iq) {
var from = iq.getAttribute('from');
if (from !== focusMucJid) {
if (from !== this.connection.emuc.focusMucJid) {
console.warn("Ignored mute from non focus peer");
return false;
}

View File

@ -21,7 +21,7 @@ module.exports = function() {
var req = $iq(
{
type: 'set',
to: focusMucJid
to: this.connection.emuc.focusMucJid
}
);
req.c('dial',

View File

@ -323,7 +323,7 @@ var XMPP = {
toggleRecording: function (tokenEmptyCallback,
startingCallback, startedCallback) {
Recording.toggleRecording(tokenEmptyCallback,
startingCallback, startedCallback);
startingCallback, startedCallback, connection);
},
addToPresence: function (name, value, dontSend) {
switch (name)
@ -353,7 +353,7 @@ var XMPP = {
connection.emuc.sendPresence();
},
sendLogs: function (data) {
if(!focusMucJid)
if(!connection.emuc.focusMucJid)
return;
var deflate = true;
@ -364,7 +364,7 @@ var XMPP = {
}
content = Base64.encode(content);
// XEP-0337-ish
var message = $msg({to: focusMucJid, type: 'normal'});
var message = $msg({to: connection.emuc.focusMucJid, type: 'normal'});
message.c('log', { xmlns: 'urn:xmpp:eventlog',
id: 'PeerConnectionStats'});
message.c('message').t(content).up();