Merge branch 'master' into ssfocus
Conflicts: config.js libs/strophe/strophe.jingle.session.js rtp_sts.js
This commit is contained in:
commit
e308025143
227
app.js
227
app.js
|
@ -12,6 +12,10 @@ var roomName = null;
|
|||
var ssrc2jid = {};
|
||||
var mediaStreams = {};
|
||||
var bridgeIsDown = false;
|
||||
//TODO: this array must be removed when firefox implement multistream support
|
||||
var notReceivedSSRCs = [];
|
||||
|
||||
var jid2Ssrc = {};
|
||||
|
||||
/**
|
||||
* The stats collector that process stats data and triggers updates to app.js.
|
||||
|
@ -30,7 +34,6 @@ var localStatsCollector = null;
|
|||
* FIXME: remove those maps
|
||||
*/
|
||||
var ssrc2videoType = {};
|
||||
var videoSrcToSsrc = {};
|
||||
/**
|
||||
* Currently focused video "src"(displayed in large video).
|
||||
* @type {String}
|
||||
|
@ -74,26 +77,43 @@ function init() {
|
|||
if (RTC === null) {
|
||||
window.location.href = 'webrtcrequired.html';
|
||||
return;
|
||||
} else if (RTC.browser !== 'chrome') {
|
||||
} else if (RTC.browser !== 'chrome' &&
|
||||
config.enableFirefoxSupport !== true) {
|
||||
window.location.href = 'chromeonly.html';
|
||||
return;
|
||||
}
|
||||
|
||||
obtainAudioAndVideoPermissions(function (stream) {
|
||||
var audioStream = new webkitMediaStream();
|
||||
var videoStream = new webkitMediaStream();
|
||||
var audioTracks = stream.getAudioTracks();
|
||||
var videoTracks = stream.getVideoTracks();
|
||||
for (var i = 0; i < audioTracks.length; i++) {
|
||||
audioStream.addTrack(audioTracks[i]);
|
||||
}
|
||||
VideoLayout.changeLocalAudio(audioStream);
|
||||
startLocalRtpStatsCollector(audioStream);
|
||||
var audioStream, videoStream;
|
||||
if(window.webkitMediaStream)
|
||||
{
|
||||
var audioStream = new webkitMediaStream();
|
||||
var videoStream = new webkitMediaStream();
|
||||
var audioTracks = stream.getAudioTracks();
|
||||
var videoTracks = stream.getVideoTracks();
|
||||
for (var i = 0; i < audioTracks.length; i++) {
|
||||
audioStream.addTrack(audioTracks[i]);
|
||||
}
|
||||
|
||||
for (i = 0; i < videoTracks.length; i++) {
|
||||
videoStream.addTrack(videoTracks[i]);
|
||||
for (i = 0; i < videoTracks.length; i++) {
|
||||
videoStream.addTrack(videoTracks[i]);
|
||||
}
|
||||
VideoLayout.changeLocalAudio(audioStream);
|
||||
startLocalRtpStatsCollector(audioStream);
|
||||
|
||||
|
||||
VideoLayout.changeLocalVideo(videoStream, true);
|
||||
}
|
||||
VideoLayout.changeLocalVideo(videoStream, true);
|
||||
else
|
||||
{
|
||||
VideoLayout.changeLocalStream(stream);
|
||||
startLocalRtpStatsCollector(stream);
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
maybeDoJoin();
|
||||
});
|
||||
|
||||
|
@ -170,24 +190,33 @@ function connect(jid, password) {
|
|||
*/
|
||||
function obtainAudioAndVideoPermissions(callback) {
|
||||
// Get AV
|
||||
var cb = function (stream) {
|
||||
console.log('got', stream, stream.getAudioTracks().length, stream.getVideoTracks().length);
|
||||
callback(stream);
|
||||
trackUsage('localMedia', {
|
||||
audio: stream.getAudioTracks().length,
|
||||
video: stream.getVideoTracks().length
|
||||
});
|
||||
}
|
||||
getUserMediaWithConstraints(
|
||||
['audio', 'video'],
|
||||
function (avStream) {
|
||||
callback(avStream);
|
||||
trackUsage('localMedia', {
|
||||
audio: avStream.getAudioTracks().length,
|
||||
video: avStream.getVideoTracks().length
|
||||
});
|
||||
},
|
||||
cb,
|
||||
function (error) {
|
||||
console.error('failed to obtain audio/video stream - stop', error);
|
||||
trackUsage('localMediaError', {
|
||||
media: error.media || 'video',
|
||||
name : error.name
|
||||
});
|
||||
messageHandler.showError("Error",
|
||||
"Failed to obtain permissions to use the local microphone" +
|
||||
"and/or camera.");
|
||||
console.error('failed to obtain audio/video stream - trying audio only', error);
|
||||
getUserMediaWithConstraints(
|
||||
['audio'],
|
||||
cb,
|
||||
function (error) {
|
||||
console.error('failed to obtain audio/video stream - stop', error);
|
||||
trackUsage('localMediaError', {
|
||||
media: error.media || 'video',
|
||||
name : error.name
|
||||
});
|
||||
messageHandler.showError("Error",
|
||||
"Failed to obtain permissions to use the local microphone" +
|
||||
"and/or camera.");
|
||||
}
|
||||
);
|
||||
},
|
||||
config.resolution || '360');
|
||||
}
|
||||
|
@ -264,8 +293,7 @@ function doJoinAfterFocus() {
|
|||
connection.emuc.doJoin(roomjid);
|
||||
}
|
||||
|
||||
function waitForRemoteVideo(selector, ssrc, stream) {
|
||||
|
||||
function waitForRemoteVideo(selector, ssrc, stream, jid) {
|
||||
// XXX(gp) so, every call to this function is *always* preceded by a call
|
||||
// to the RTC.attachMediaStream() function but that call is *not* followed
|
||||
// by an update to the videoSrcToSsrc map!
|
||||
|
@ -297,17 +325,17 @@ function waitForRemoteVideo(selector, ssrc, stream) {
|
|||
|
||||
// FIXME: add a class that will associate peer Jid, video.src, it's ssrc and video type
|
||||
// in order to get rid of too many maps
|
||||
if (ssrc && selector.attr('src')) {
|
||||
videoSrcToSsrc[selector.attr('src')] = ssrc;
|
||||
if (ssrc && jid) {
|
||||
jid2Ssrc[Strophe.getResourceFromJid(jid)] = ssrc;
|
||||
} else {
|
||||
console.warn("No ssrc given for video", selector);
|
||||
messageHandler.showError('Warning', 'No ssrc was given for the video.');
|
||||
console.warn("No ssrc given for jid", jid);
|
||||
// messageHandler.showError('Warning', 'No ssrc was given for the video.');
|
||||
}
|
||||
|
||||
$(document).trigger('videoactive.jingle', [selector]);
|
||||
} else {
|
||||
setTimeout(function () {
|
||||
waitForRemoteVideo(selector, ssrc, stream);
|
||||
waitForRemoteVideo(selector, ssrc, stream, jid);
|
||||
}, 250);
|
||||
}
|
||||
}
|
||||
|
@ -320,16 +348,19 @@ function waitForPresence(data, sid) {
|
|||
var sess = connection.jingle.sessions[sid];
|
||||
|
||||
var thessrc;
|
||||
|
||||
// look up an associated JID for a stream id
|
||||
if (data.stream.id.indexOf('mixedmslabel') === -1) {
|
||||
if (data.stream.id && data.stream.id.indexOf('mixedmslabel') === -1) {
|
||||
// look only at a=ssrc: and _not_ at a=ssrc-group: lines
|
||||
|
||||
var ssrclines
|
||||
= SDPUtil.find_lines(sess.peerconnection.remoteDescription.sdp, 'a=ssrc:');
|
||||
ssrclines = ssrclines.filter(function (line) {
|
||||
// NOTE(gp) previously we filtered on the mslabel, but that property
|
||||
// is not always present.
|
||||
// return line.indexOf('mslabel:' + data.stream.label) !== -1;
|
||||
return line.indexOf('msid:' + data.stream.id) !== -1;
|
||||
|
||||
return ((line.indexOf('msid:' + data.stream.id) !== -1));
|
||||
});
|
||||
if (ssrclines.length) {
|
||||
thessrc = ssrclines[0].substring(7).split(' ')[0];
|
||||
|
@ -359,6 +390,27 @@ function waitForPresence(data, sid) {
|
|||
}
|
||||
}
|
||||
|
||||
//TODO: this code should be removed when firefox implement multistream support
|
||||
if(RTC.browser == "firefox")
|
||||
{
|
||||
if((notReceivedSSRCs.length == 0) ||
|
||||
!ssrc2jid[notReceivedSSRCs[notReceivedSSRCs.length - 1]])
|
||||
{
|
||||
// TODO(gp) limit wait duration to 1 sec.
|
||||
setTimeout(function(d, s) {
|
||||
return function() {
|
||||
waitForPresence(d, s);
|
||||
}
|
||||
}(data, sid), 250);
|
||||
return;
|
||||
}
|
||||
|
||||
thessrc = notReceivedSSRCs.pop();
|
||||
if (ssrc2jid[thessrc]) {
|
||||
data.peerjid = ssrc2jid[thessrc];
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE(gp) now that we have simulcast, a media stream can have more than 1
|
||||
// ssrc. We should probably take that into account in our MediaStream
|
||||
// wrapper.
|
||||
|
@ -404,8 +456,6 @@ function waitForPresence(data, sid) {
|
|||
data.stream,
|
||||
data.peerjid,
|
||||
thessrc);
|
||||
if(isVideo && container.id !== 'mixedstream')
|
||||
videoSrcToSsrc[$(container).find('>video')[0].src] = thessrc;
|
||||
}
|
||||
|
||||
// an attempt to work around https://github.com/jitsi/jitmeet/issues/32
|
||||
|
@ -420,25 +470,6 @@ function waitForPresence(data, sid) {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the JID of the user to whom given <tt>videoSrc</tt> belongs.
|
||||
* @param videoSrc the video "src" identifier.
|
||||
* @returns {null | String} the JID of the user to whom given <tt>videoSrc</tt>
|
||||
* belongs.
|
||||
*/
|
||||
function getJidFromVideoSrc(videoSrc)
|
||||
{
|
||||
if (videoSrc === localVideoSrc)
|
||||
return connection.emuc.myroomjid;
|
||||
|
||||
var ssrc = videoSrcToSsrc[videoSrc];
|
||||
if (!ssrc)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
return ssrc2jid[ssrc];
|
||||
}
|
||||
|
||||
// an attempt to work around https://github.com/jitsi/jitmeet/issues/32
|
||||
function sendKeyframe(pc) {
|
||||
console.log('sendkeyframe', pc.iceConnectionState);
|
||||
|
@ -637,15 +668,27 @@ $(document).bind('setLocalDescription.jingle', function (event, sid) {
|
|||
var media = simulcast.parseMedia(sess.peerconnection.localDescription);
|
||||
media.forEach(function (media) {
|
||||
|
||||
// TODO(gp) maybe exclude FID streams?
|
||||
Object.keys(media.sources).forEach(function(ssrc) {
|
||||
if(Object.keys(media.sources).length > 0) {
|
||||
// TODO(gp) maybe exclude FID streams?
|
||||
Object.keys(media.sources).forEach(function (ssrc) {
|
||||
newssrcs.push({
|
||||
'ssrc': ssrc,
|
||||
'type': media.type,
|
||||
'direction': media.direction
|
||||
});
|
||||
});
|
||||
}
|
||||
else if(sess.localStreamsSSRC && sess.localStreamsSSRC[media.type])
|
||||
{
|
||||
newssrcs.push({
|
||||
'ssrc': ssrc,
|
||||
'ssrc': sess.localStreamsSSRC[media.type],
|
||||
'type': media.type,
|
||||
'direction': media.direction
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
console.log('new ssrcs', newssrcs);
|
||||
|
||||
// Have to clear presence map to get rid of removed streams
|
||||
|
@ -678,20 +721,22 @@ $(document).bind('iceconnectionstatechange.jingle', function (event, sid, sessio
|
|||
var metadata = {};
|
||||
metadata.setupTime = (new Date()).getTime() - session.timeChecking;
|
||||
session.peerconnection.getStats(function (res) {
|
||||
res.result().forEach(function (report) {
|
||||
if (report.type == 'googCandidatePair' && report.stat('googActiveConnection') == 'true') {
|
||||
metadata.localCandidateType = report.stat('googLocalCandidateType');
|
||||
metadata.remoteCandidateType = report.stat('googRemoteCandidateType');
|
||||
if(res && res.result) {
|
||||
res.result().forEach(function (report) {
|
||||
if (report.type == 'googCandidatePair' && report.stat('googActiveConnection') == 'true') {
|
||||
metadata.localCandidateType = report.stat('googLocalCandidateType');
|
||||
metadata.remoteCandidateType = report.stat('googRemoteCandidateType');
|
||||
|
||||
// log pair as well so we can get nice pie charts
|
||||
metadata.candidatePair = report.stat('googLocalCandidateType') + ';' + report.stat('googRemoteCandidateType');
|
||||
// log pair as well so we can get nice pie charts
|
||||
metadata.candidatePair = report.stat('googLocalCandidateType') + ';' + report.stat('googRemoteCandidateType');
|
||||
|
||||
if (report.stat('googRemoteAddress').indexOf('[') === 0) {
|
||||
metadata.ipv6 = true;
|
||||
if (report.stat('googRemoteAddress').indexOf('[') === 0) {
|
||||
metadata.ipv6 = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
trackUsage('iceConnected', metadata);
|
||||
});
|
||||
trackUsage('iceConnected', metadata);
|
||||
}
|
||||
});
|
||||
}
|
||||
break;
|
||||
|
@ -788,14 +833,13 @@ $(document).bind('left.muc', function (event, jid) {
|
|||
APIConnector.triggerEvent("participantLeft",{jid: jid});
|
||||
}
|
||||
|
||||
delete jid2Ssrc[jid];
|
||||
|
||||
// Unlock large video
|
||||
if (focusedVideoSrc)
|
||||
if (focusedVideoSrc && focusedVideoSrc.jid === jid)
|
||||
{
|
||||
if (getJidFromVideoSrc(focusedVideoSrc) === jid)
|
||||
{
|
||||
console.info("Focused video owner has left the conference");
|
||||
focusedVideoSrc = null;
|
||||
}
|
||||
console.info("Focused video owner has left the conference");
|
||||
focusedVideoSrc = null;
|
||||
}
|
||||
|
||||
connection.jingle.terminateByJid(jid);
|
||||
|
@ -812,8 +856,6 @@ $(document).bind('presence.muc', function (event, jid, info, pres) {
|
|||
Object.keys(ssrc2jid).forEach(function (ssrc) {
|
||||
if (ssrc2jid[ssrc] == jid) {
|
||||
delete ssrc2jid[ssrc];
|
||||
}
|
||||
if (ssrc2videoType[ssrc] == jid) {
|
||||
delete ssrc2videoType[ssrc];
|
||||
}
|
||||
});
|
||||
|
@ -822,6 +864,7 @@ $(document).bind('presence.muc', function (event, jid, info, pres) {
|
|||
//console.log(jid, 'assoc ssrc', ssrc.getAttribute('type'), ssrc.getAttribute('ssrc'));
|
||||
var ssrcV = ssrc.getAttribute('ssrc');
|
||||
ssrc2jid[ssrcV] = jid;
|
||||
notReceivedSSRCs.push(ssrcV);
|
||||
|
||||
var type = ssrc.getAttribute('type');
|
||||
ssrc2videoType[ssrcV] = type;
|
||||
|
@ -949,16 +992,20 @@ $(document).bind('passwordrequired.main', function (event) {
|
|||
* blob:https%3A//pawel.jitsi.net/9a46e0bd-131e-4d18-9c14-a9264e8db395
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isVideoSrcDesktop(videoSrc) {
|
||||
function isVideoSrcDesktop(jid) {
|
||||
// FIXME: fix this mapping mess...
|
||||
// figure out if large video is desktop stream or just a camera
|
||||
|
||||
if(!jid)
|
||||
return false;
|
||||
var isDesktop = false;
|
||||
if (localVideoSrc === videoSrc) {
|
||||
if (connection.emuc.myroomjid &&
|
||||
Strophe.getResourceFromJid(connection.emuc.myroomjid) === jid) {
|
||||
// local video
|
||||
isDesktop = isUsingScreenStream;
|
||||
} else {
|
||||
// Do we have associations...
|
||||
var videoSsrc = videoSrcToSsrc[videoSrc];
|
||||
var videoSsrc = jid2Ssrc[jid];
|
||||
if (videoSsrc) {
|
||||
var videoType = ssrc2videoType[videoSsrc];
|
||||
if (videoType) {
|
||||
|
@ -968,7 +1015,7 @@ function isVideoSrcDesktop(videoSrc) {
|
|||
console.error("No video type for ssrc: " + videoSsrc);
|
||||
}
|
||||
} else {
|
||||
console.error("No ssrc for src: " + videoSrc);
|
||||
console.error("No ssrc for jid: " + jid);
|
||||
}
|
||||
}
|
||||
return isDesktop;
|
||||
|
@ -1311,6 +1358,8 @@ $(document).ready(function () {
|
|||
VideoLayout.positionLarge(currentVideoWidth, currentVideoHeight);
|
||||
});
|
||||
|
||||
document.getElementById('largeVideo').volume = 0;
|
||||
|
||||
if (!$('#settings').is(':visible')) {
|
||||
console.log('init');
|
||||
init();
|
||||
|
@ -1386,10 +1435,10 @@ function disposeConference(onUnload) {
|
|||
// FIXME: probably removing streams is not required and close() should
|
||||
// be enough
|
||||
if (connection.jingle.localAudio) {
|
||||
handler.peerconnection.removeStream(connection.jingle.localAudio);
|
||||
handler.peerconnection.removeStream(connection.jingle.localAudio, onUnload);
|
||||
}
|
||||
if (connection.jingle.localVideo) {
|
||||
handler.peerconnection.removeStream(connection.jingle.localVideo);
|
||||
handler.peerconnection.removeStream(connection.jingle.localVideo, onUnload);
|
||||
}
|
||||
handler.peerconnection.close();
|
||||
}
|
||||
|
@ -1553,7 +1602,7 @@ function onSelectedEndpointChanged(userJid)
|
|||
dataChannel.send(JSON.stringify({
|
||||
'colibriClass': 'SelectedEndpointChangedEvent',
|
||||
'selectedEndpoint': (!userJid || userJid == null)
|
||||
? null : Strophe.getResourceFromJid(userJid)
|
||||
? null : userJid
|
||||
}));
|
||||
|
||||
return true;
|
||||
|
|
|
@ -4,7 +4,7 @@ var config = {
|
|||
//anonymousdomain: 'guest.example.com',
|
||||
muc: 'conference.jitsi-meet.example.com', // FIXME: use XEP-0030
|
||||
bridge: 'jitsi-videobridge.jitsi-meet.example.com', // FIXME: use XEP-0030
|
||||
call_control: 'callcontrol.jitsi-meet.example.com',
|
||||
//call_control: 'callcontrol.jitsi-meet.example.com',
|
||||
focus: 'focus.jitsi-meet.example.com'
|
||||
},
|
||||
// getroomnode: function (path) { return 'someprefixpossiblybasedonpath'; },
|
||||
|
@ -26,8 +26,10 @@ var config = {
|
|||
adaptiveSimulcast: false,
|
||||
useRtcpMux: true,
|
||||
useBundle: true,
|
||||
enableRecording: true,
|
||||
enableWelcomePage: false,
|
||||
enableRecording: false,
|
||||
enableWelcomePage: true,
|
||||
enableSimulcast: false,
|
||||
enableFirefoxSupport: false, //firefox support is still experimental, only one-to-one conferences with chrome focus
|
||||
// will work when simulcast, bundle, mux, lastN and SCTP are disabled.
|
||||
logStats: false // Enable logging of PeerConnection stats via the focus
|
||||
};
|
||||
|
|
|
@ -104,6 +104,11 @@
|
|||
text-align: center;
|
||||
}
|
||||
|
||||
#largeVideo
|
||||
{
|
||||
object-fit: cover;
|
||||
}
|
||||
|
||||
#presentation,
|
||||
#etherpad,
|
||||
#localVideoWrapper>video,
|
||||
|
|
|
@ -26,8 +26,7 @@ function onDataChannel(event)
|
|||
|
||||
// when the data channel becomes available, tell the bridge about video
|
||||
// selections so that it can do adaptive simulcast,
|
||||
var largeVideoSrc = $('#largeVideo').attr('src');
|
||||
var userJid = getJidFromVideoSrc(largeVideoSrc);
|
||||
var userJid = VideoLayout.getLargeVideoState().userJid;
|
||||
// we want the notification to trigger even if userJid is undefined,
|
||||
// or null.
|
||||
onSelectedEndpointChanged(userJid);
|
||||
|
|
|
@ -453,7 +453,8 @@ ColibriFocus.prototype.createdConference = function (result) {
|
|||
'a=rtpmap:100 VP8/90000\r\n' +
|
||||
'a=rtcp-fb:100 ccm fir\r\n' +
|
||||
'a=rtcp-fb:100 nack\r\n' +
|
||||
'a=rtcp-fb:100 goog-remb\r\n' +
|
||||
'a=rtcp-fb:100 nack pli\r\n' +
|
||||
(config.enableFirefoxSupport? "" : 'a=rtcp-fb:100 goog-remb\r\n') +
|
||||
'a=rtpmap:116 red/90000\r\n' +
|
||||
'a=rtpmap:117 ulpfec/90000\r\n' +
|
||||
(config.useRtcpMux ? 'a=rtcp-mux\r\n' : '') +
|
||||
|
|
|
@ -141,12 +141,28 @@ if (TraceablePeerConnection.prototype.__defineGetter__ !== undefined) {
|
|||
TraceablePeerConnection.prototype.addStream = function (stream) {
|
||||
this.trace('addStream', stream.id);
|
||||
simulcast.resetSender();
|
||||
this.peerconnection.addStream(stream);
|
||||
try
|
||||
{
|
||||
this.peerconnection.addStream(stream);
|
||||
}
|
||||
catch (e)
|
||||
{
|
||||
console.error(e);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
TraceablePeerConnection.prototype.removeStream = function (stream) {
|
||||
TraceablePeerConnection.prototype.removeStream = function (stream, stopStreams) {
|
||||
this.trace('removeStream', stream.id);
|
||||
simulcast.resetSender();
|
||||
if(stopStreams) {
|
||||
stream.getAudioTracks().forEach(function (track) {
|
||||
track.stop();
|
||||
});
|
||||
stream.getVideoTracks().forEach(function (track) {
|
||||
track.stop();
|
||||
});
|
||||
}
|
||||
this.peerconnection.removeStream(stream);
|
||||
};
|
||||
|
||||
|
@ -486,6 +502,11 @@ TraceablePeerConnection.prototype.addIceCandidate = function (candidate, success
|
|||
TraceablePeerConnection.prototype.getStats = function(callback, errback) {
|
||||
if (navigator.mozGetUserMedia) {
|
||||
// ignore for now...
|
||||
if(!errback)
|
||||
errback = function () {
|
||||
|
||||
}
|
||||
this.peerconnection.getStats(null,callback,errback);
|
||||
} else {
|
||||
this.peerconnection.getStats(callback);
|
||||
}
|
||||
|
@ -506,7 +527,40 @@ function setupRTC() {
|
|||
element[0].mozSrcObject = stream;
|
||||
element[0].play();
|
||||
},
|
||||
pc_constraints: {}
|
||||
pc_constraints: {},
|
||||
getLocalSSRC: function (session, callback) {
|
||||
session.peerconnection.getStats(function (s) {
|
||||
var ssrcs = {};
|
||||
s.forEach(function (item) {
|
||||
if (item.type == "outboundrtp" && !item.isRemote)
|
||||
{
|
||||
ssrcs[item.id.split('_')[2]] = item.ssrc;
|
||||
}
|
||||
});
|
||||
session.localStreamsSSRC = {
|
||||
"audio": ssrcs.audio,//for stable 0
|
||||
"video": ssrcs.video// for stable 1
|
||||
};
|
||||
callback(session.localStreamsSSRC);
|
||||
},
|
||||
function () {
|
||||
callback(null);
|
||||
});
|
||||
},
|
||||
getStreamID: function (stream) {
|
||||
var tracks = stream.getVideoTracks();
|
||||
if(!tracks || tracks.length == 0)
|
||||
{
|
||||
tracks = stream.getAudioTracks();
|
||||
}
|
||||
return tracks[0].id.replace(/[\{,\}]/g,"");
|
||||
},
|
||||
getVideoSrc: function (element) {
|
||||
return element.mozSrcObject;
|
||||
},
|
||||
setVideoSrc: function (element, src) {
|
||||
element.mozSrcObject = src;
|
||||
}
|
||||
};
|
||||
if (!MediaStream.prototype.getVideoTracks)
|
||||
MediaStream.prototype.getVideoTracks = function () { return []; };
|
||||
|
@ -525,7 +579,19 @@ function setupRTC() {
|
|||
element.attr('src', webkitURL.createObjectURL(stream));
|
||||
},
|
||||
// DTLS should now be enabled by default but..
|
||||
pc_constraints: {'optional': [{'DtlsSrtpKeyAgreement': 'true'}]}
|
||||
pc_constraints: {'optional': [{'DtlsSrtpKeyAgreement': 'true'}]},
|
||||
getLocalSSRC: function (session, callback) {
|
||||
callback(null);
|
||||
},
|
||||
getStreamID: function (stream) {
|
||||
return stream.id;
|
||||
},
|
||||
getVideoSrc: function (element) {
|
||||
return element.getAttribute("src");
|
||||
},
|
||||
setVideoSrc: function (element, src) {
|
||||
element.setAttribute("src", src);
|
||||
}
|
||||
};
|
||||
if (navigator.userAgent.indexOf('Android') != -1) {
|
||||
RTC.pc_constraints = {}; // disable DTLS on Android
|
||||
|
|
|
@ -88,7 +88,9 @@ Strophe.addConnectionPlugin('jingle', {
|
|||
case 'session-initiate':
|
||||
sess = new JingleSession($(iq).attr('to'), $(iq).find('jingle').attr('sid'), this.connection);
|
||||
// configure session
|
||||
if (this.localAudio) {
|
||||
|
||||
//in firefox we have only one stream object
|
||||
if (this.localAudio != this.localVideo) {
|
||||
sess.localStreams.push(this.localAudio);
|
||||
}
|
||||
if (this.localVideo) {
|
||||
|
@ -173,7 +175,9 @@ Strophe.addConnectionPlugin('jingle', {
|
|||
Math.random().toString(36).substr(2, 12), // random string
|
||||
this.connection);
|
||||
// configure session
|
||||
if (this.localAudio) {
|
||||
|
||||
//in firefox we have only one stream
|
||||
if (this.localAudio != this.localVideo) {
|
||||
sess.localStreams.push(this.localAudio);
|
||||
}
|
||||
if (this.localVideo) {
|
||||
|
|
|
@ -194,7 +194,8 @@ SDP.prototype.removeMediaLines = function(mediaindex, prefix) {
|
|||
}
|
||||
|
||||
// add content's to a jingle element
|
||||
SDP.prototype.toJingle = function (elem, thecreator) {
|
||||
SDP.prototype.toJingle = function (elem, thecreator, ssrcs) {
|
||||
// console.log("SSRC" + ssrcs["audio"] + " - " + ssrcs["video"]);
|
||||
var i, j, k, mline, ssrc, rtpmap, tmp, line, lines;
|
||||
var self = this;
|
||||
// new bundle plan
|
||||
|
@ -221,7 +222,12 @@ SDP.prototype.toJingle = function (elem, thecreator) {
|
|||
if (SDPUtil.find_line(this.media[i], 'a=ssrc:')) {
|
||||
ssrc = SDPUtil.find_line(this.media[i], 'a=ssrc:').substring(7).split(' ')[0]; // take the first
|
||||
} else {
|
||||
ssrc = false;
|
||||
if(ssrcs && ssrcs[mline.media])
|
||||
{
|
||||
ssrc = ssrcs[mline.media];
|
||||
}
|
||||
else
|
||||
ssrc = false;
|
||||
}
|
||||
|
||||
elem.c('content', {creator: thecreator, name: mline.media});
|
||||
|
@ -267,25 +273,60 @@ SDP.prototype.toJingle = function (elem, thecreator) {
|
|||
elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||
// FIXME: group by ssrc and support multiple different ssrcs
|
||||
var ssrclines = SDPUtil.find_lines(this.media[i], 'a=ssrc:');
|
||||
ssrclines.forEach(function(line) {
|
||||
idx = line.indexOf(' ');
|
||||
var linessrc = line.substr(0, idx).substr(7);
|
||||
if (linessrc != ssrc) {
|
||||
if(ssrclines.length > 0) {
|
||||
ssrclines.forEach(function (line) {
|
||||
idx = line.indexOf(' ');
|
||||
var linessrc = line.substr(0, idx).substr(7);
|
||||
if (linessrc != ssrc) {
|
||||
elem.up();
|
||||
ssrc = linessrc;
|
||||
elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||
}
|
||||
var kv = line.substr(idx + 1);
|
||||
elem.c('parameter');
|
||||
if (kv.indexOf(':') == -1) {
|
||||
elem.attrs({ name: kv });
|
||||
} else {
|
||||
elem.attrs({ name: kv.split(':', 2)[0] });
|
||||
elem.attrs({ value: kv.split(':', 2)[1] });
|
||||
}
|
||||
elem.up();
|
||||
ssrc = linessrc;
|
||||
elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||
}
|
||||
var kv = line.substr(idx + 1);
|
||||
elem.c('parameter');
|
||||
if (kv.indexOf(':') == -1) {
|
||||
elem.attrs({ name: kv });
|
||||
} else {
|
||||
elem.attrs({ name: kv.split(':', 2)[0] });
|
||||
elem.attrs({ value: kv.split(':', 2)[1] });
|
||||
}
|
||||
});
|
||||
elem.up();
|
||||
});
|
||||
elem.up();
|
||||
}
|
||||
else
|
||||
{
|
||||
elem.up();
|
||||
elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||
elem.c('parameter');
|
||||
elem.attrs({name: "cname", value:Math.random().toString(36).substring(7)});
|
||||
elem.up();
|
||||
var msid = null;
|
||||
if(mline.media == "audio")
|
||||
{
|
||||
msid = connection.jingle.localAudio.getAudioTracks()[0].id;
|
||||
}
|
||||
else
|
||||
{
|
||||
msid = connection.jingle.localVideo.getVideoTracks()[0].id;
|
||||
}
|
||||
if(msid != null)
|
||||
{
|
||||
msid = msid.replace(/[\{,\}]/g,"");
|
||||
elem.c('parameter');
|
||||
elem.attrs({name: "msid", value:msid});
|
||||
elem.up();
|
||||
elem.c('parameter');
|
||||
elem.attrs({name: "mslabel", value:msid});
|
||||
elem.up();
|
||||
elem.c('parameter');
|
||||
elem.attrs({name: "label", value:msid});
|
||||
elem.up();
|
||||
elem.up();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
// XEP-0339 handle ssrc-group attributes
|
||||
var ssrc_group_lines = SDPUtil.find_lines(this.media[i], 'a=ssrc-group:');
|
||||
|
|
|
@ -36,6 +36,7 @@ function JingleSession(me, sid, connection) {
|
|||
this.reason = null;
|
||||
|
||||
this.wait = true;
|
||||
this.localStreamsSSRC = null;
|
||||
}
|
||||
|
||||
JingleSession.prototype.initiate = function (peerjid, isInitiator) {
|
||||
|
@ -64,6 +65,7 @@ JingleSession.prototype.initiate = function (peerjid, isInitiator) {
|
|||
};
|
||||
this.peerconnection.onaddstream = function (event) {
|
||||
self.remoteStreams.push(event.stream);
|
||||
console.log("REMOTE STREAM ADDED: " + event.stream + " - " + event.stream.id);
|
||||
$(document).trigger('remotestreamadded.jingle', [event, self.sid]);
|
||||
};
|
||||
this.peerconnection.onremovestream = function (event) {
|
||||
|
@ -128,8 +130,7 @@ JingleSession.prototype.accept = function () {
|
|||
initiator: this.initiator,
|
||||
responder: this.responder,
|
||||
sid: this.sid });
|
||||
prsdp.toJingle(accept, this.initiator == this.me ? 'initiator' : 'responder');
|
||||
|
||||
prsdp.toJingle(accept, this.initiator == this.me ? 'initiator' : 'responder', this.localStreamsSSRC);
|
||||
var sdp = this.peerconnection.localDescription.sdp;
|
||||
while (SDPUtil.find_line(sdp, 'a=inactive')) {
|
||||
// FIXME: change any inactive to sendrecv or whatever they were originally
|
||||
|
@ -149,7 +150,7 @@ JingleSession.prototype.accept = function () {
|
|||
function (stanza) {
|
||||
var error = ($(stanza).find('error').length) ? {
|
||||
code: $(stanza).find('error').attr('code'),
|
||||
reason: $(stanza).find('error :first')[0].tagName,
|
||||
reason: $(stanza).find('error :first')[0].tagName
|
||||
}:{};
|
||||
error.source = 'answer';
|
||||
$(document).trigger('error.jingle', [self.sid, error]);
|
||||
|
@ -220,10 +221,10 @@ JingleSession.prototype.sendIceCandidate = function (candidate) {
|
|||
}, 20);
|
||||
|
||||
}
|
||||
this.drip_container.push(event.candidate);
|
||||
this.drip_container.push(candidate);
|
||||
return;
|
||||
} else {
|
||||
self.sendIceCandidate([event.candidate]);
|
||||
self.sendIceCandidate([candidate]);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -237,25 +238,43 @@ JingleSession.prototype.sendIceCandidate = function (candidate) {
|
|||
initiator: this.initiator,
|
||||
sid: this.sid});
|
||||
this.localSDP = new SDP(this.peerconnection.localDescription.sdp);
|
||||
this.localSDP.toJingle(init, this.initiator == this.me ? 'initiator' : 'responder');
|
||||
this.connection.sendIQ(init,
|
||||
function () {
|
||||
//console.log('session initiate ack');
|
||||
var ack = {};
|
||||
ack.source = 'offer';
|
||||
$(document).trigger('ack.jingle', [self.sid, ack]);
|
||||
},
|
||||
function (stanza) {
|
||||
self.state = 'error';
|
||||
self.peerconnection.close();
|
||||
var error = ($(stanza).find('error').length) ? {
|
||||
code: $(stanza).find('error').attr('code'),
|
||||
reason: $(stanza).find('error :first')[0].tagName,
|
||||
}:{};
|
||||
error.source = 'offer';
|
||||
$(document).trigger('error.jingle', [self.sid, error]);
|
||||
},
|
||||
10000);
|
||||
var self = this;
|
||||
var sendJingle = function (ssrc) {
|
||||
if(!ssrc)
|
||||
ssrc = {};
|
||||
self.localSDP.toJingle(init, self.initiator == self.me ? 'initiator' : 'responder', ssrc);
|
||||
self.connection.sendIQ(init,
|
||||
function () {
|
||||
//console.log('session initiate ack');
|
||||
var ack = {};
|
||||
ack.source = 'offer';
|
||||
$(document).trigger('ack.jingle', [self.sid, ack]);
|
||||
},
|
||||
function (stanza) {
|
||||
self.state = 'error';
|
||||
self.peerconnection.close();
|
||||
var error = ($(stanza).find('error').length) ? {
|
||||
code: $(stanza).find('error').attr('code'),
|
||||
reason: $(stanza).find('error :first')[0].tagName,
|
||||
}:{};
|
||||
error.source = 'offer';
|
||||
$(document).trigger('error.jingle', [self.sid, error]);
|
||||
},
|
||||
10000);
|
||||
}
|
||||
|
||||
RTC.getLocalSSRC(this, function (ssrcs) {
|
||||
if(ssrcs)
|
||||
{
|
||||
sendJingle(ssrcs);
|
||||
$(document).trigger("setLocalDescription.jingle", [self.sid]);
|
||||
}
|
||||
else
|
||||
{
|
||||
sendJingle();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
this.lasticecandidate = true;
|
||||
console.log('Have we encountered any srflx candidates? ' + this.hadstuncandidate);
|
||||
|
@ -276,11 +295,12 @@ JingleSession.prototype.sendIceCandidates = function (candidates) {
|
|||
sid: this.sid});
|
||||
for (var mid = 0; mid < this.localSDP.media.length; mid++) {
|
||||
var cands = candidates.filter(function (el) { return el.sdpMLineIndex == mid; });
|
||||
var mline = SDPUtil.parse_mline(this.localSDP.media[mid].split('\r\n')[0]);
|
||||
if (cands.length > 0) {
|
||||
var ice = SDPUtil.iceparams(this.localSDP.media[mid], this.localSDP.session);
|
||||
ice.xmlns = 'urn:xmpp:jingle:transports:ice-udp:1';
|
||||
cand.c('content', {creator: this.initiator == this.me ? 'initiator' : 'responder',
|
||||
name: cands[0].sdpMid
|
||||
name: (cands[0].sdpMid? cands[0].sdpMid : mline.media)
|
||||
}).c('transport', ice);
|
||||
for (var i = 0; i < cands.length; i++) {
|
||||
cand.c('candidate', SDPUtil.candidateToJingle(cands[i].candidate)).up();
|
||||
|
@ -339,14 +359,14 @@ JingleSession.prototype.createdOffer = function (sdp) {
|
|||
var self = this;
|
||||
this.localSDP = new SDP(sdp.sdp);
|
||||
//this.localSDP.mangle();
|
||||
if (this.usetrickle) {
|
||||
var sendJingle = function () {
|
||||
var init = $iq({to: this.peerjid,
|
||||
type: 'set'})
|
||||
.c('jingle', {xmlns: 'urn:xmpp:jingle:1',
|
||||
action: 'session-initiate',
|
||||
initiator: this.initiator,
|
||||
sid: this.sid});
|
||||
this.localSDP.toJingle(init, this.initiator == this.me ? 'initiator' : 'responder');
|
||||
this.localSDP.toJingle(init, this.initiator == this.me ? 'initiator' : 'responder', this.localStreamsSSRC);
|
||||
this.connection.sendIQ(init,
|
||||
function () {
|
||||
var ack = {};
|
||||
|
@ -368,7 +388,16 @@ JingleSession.prototype.createdOffer = function (sdp) {
|
|||
sdp.sdp = this.localSDP.raw;
|
||||
this.peerconnection.setLocalDescription(sdp,
|
||||
function () {
|
||||
$(document).trigger('setLocalDescription.jingle', [self.sid]);
|
||||
if(this.usetrickle)
|
||||
{
|
||||
RTC.getLocalSSRC(function(ssrc)
|
||||
{
|
||||
sendJingle(ssrc);
|
||||
$(document).trigger('setLocalDescription.jingle', [self.sid]);
|
||||
});
|
||||
}
|
||||
else
|
||||
$(document).trigger('setLocalDescription.jingle', [self.sid]);
|
||||
//console.log('setLocalDescription success');
|
||||
},
|
||||
function (e) {
|
||||
|
@ -557,21 +586,9 @@ JingleSession.prototype.createdAnswer = function (sdp, provisional) {
|
|||
var self = this;
|
||||
this.localSDP = new SDP(sdp.sdp);
|
||||
//this.localSDP.mangle();
|
||||
var accept = null;
|
||||
this.usepranswer = provisional === true;
|
||||
if (this.usetrickle) {
|
||||
if (!this.usepranswer) {
|
||||
accept = $iq({to: this.peerjid,
|
||||
type: 'set'})
|
||||
.c('jingle', {xmlns: 'urn:xmpp:jingle:1',
|
||||
action: 'session-accept',
|
||||
initiator: this.initiator,
|
||||
responder: this.responder,
|
||||
sid: this.sid });
|
||||
var publicLocalDesc = simulcast.reverseTransformLocalDescription(sdp);
|
||||
var publicLocalSDP = new SDP(publicLocalDesc.sdp);
|
||||
publicLocalSDP.toJingle(accept, this.initiator == this.me ? 'initiator' : 'responder');
|
||||
} else {
|
||||
if (this.usepranswer) {
|
||||
sdp.type = 'pranswer';
|
||||
for (var i = 0; i < this.localSDP.media.length; i++) {
|
||||
this.localSDP.media[i] = this.localSDP.media[i].replace('a=sendrecv\r\n', 'a=inactive\r\n');
|
||||
|
@ -579,13 +596,19 @@ JingleSession.prototype.createdAnswer = function (sdp, provisional) {
|
|||
this.localSDP.raw = this.localSDP.session + '\r\n' + this.localSDP.media.join('');
|
||||
}
|
||||
}
|
||||
sdp.sdp = this.localSDP.raw;
|
||||
this.peerconnection.setLocalDescription(sdp,
|
||||
function () {
|
||||
$(document).trigger('setLocalDescription.jingle', [self.sid]);
|
||||
//console.log('setLocalDescription success');
|
||||
if (accept)
|
||||
{
|
||||
var self = this;
|
||||
var sendJingle = function (ssrcs) {
|
||||
|
||||
var accept = $iq({to: self.peerjid,
|
||||
type: 'set'})
|
||||
.c('jingle', {xmlns: 'urn:xmpp:jingle:1',
|
||||
action: 'session-accept',
|
||||
initiator: self.initiator,
|
||||
responder: self.responder,
|
||||
sid: self.sid });
|
||||
var publicLocalDesc = simulcast.reverseTransformLocalDescription(sdp);
|
||||
var publicLocalSDP = new SDP(publicLocalDesc.sdp);
|
||||
publicLocalSDP.toJingle(accept, self.initiator == self.me ? 'initiator' : 'responder', ssrcs);
|
||||
this.connection.sendIQ(accept,
|
||||
function () {
|
||||
var ack = {};
|
||||
|
@ -598,12 +621,23 @@ JingleSession.prototype.createdAnswer = function (sdp, provisional) {
|
|||
reason: $(stanza).find('error :first')[0].tagName,
|
||||
}:{};
|
||||
error.source = 'answer';
|
||||
error.stanza = stanza;
|
||||
|
||||
$(document).trigger('error.jingle', [self.sid, error]);
|
||||
},
|
||||
10000);
|
||||
}
|
||||
sdp.sdp = this.localSDP.raw;
|
||||
this.peerconnection.setLocalDescription(sdp,
|
||||
function () {
|
||||
|
||||
//console.log('setLocalDescription success');
|
||||
if (self.usetrickle && !self.usepranswer) {
|
||||
RTC.getLocalSSRC(self, function (ssrc) {
|
||||
sendJingle(ssrc);
|
||||
$(document).trigger('setLocalDescription.jingle', [self.sid]);
|
||||
});
|
||||
}
|
||||
else
|
||||
$(document).trigger('setLocalDescription.jingle', [self.sid]);
|
||||
},
|
||||
function (e) {
|
||||
console.error('setLocalDescription failed', e);
|
||||
|
|
|
@ -82,14 +82,17 @@ SessionBase.prototype.switchStreams = function (new_stream, oldStream, success_c
|
|||
if(self.peerconnection.localDescription) {
|
||||
oldSdp = new SDP(self.peerconnection.localDescription.sdp);
|
||||
}
|
||||
self.peerconnection.removeStream(oldStream);
|
||||
self.peerconnection.removeStream(oldStream, true);
|
||||
self.peerconnection.addStream(new_stream);
|
||||
}
|
||||
|
||||
self.connection.jingle.localVideo = new_stream;
|
||||
|
||||
self.connection.jingle.localStreams = [];
|
||||
self.connection.jingle.localStreams.push(self.connection.jingle.localAudio);
|
||||
|
||||
//in firefox we have only one stream object
|
||||
if(self.connection.jingle.localAudio != self.connection.jingle.localVideo)
|
||||
self.connection.jingle.localStreams.push(self.connection.jingle.localAudio);
|
||||
self.connection.jingle.localStreams.push(self.connection.jingle.localVideo);
|
||||
|
||||
// Conference is not active
|
||||
|
|
File diff suppressed because one or more lines are too long
2
muc.js
2
muc.js
|
@ -112,7 +112,7 @@ Strophe.addConnectionPlugin('emuc', {
|
|||
var create = $iq({type: 'set', to: this.roomjid})
|
||||
.c('query', {xmlns: 'http://jabber.org/protocol/muc#owner'})
|
||||
.c('x', {xmlns: 'jabber:x:data', type: 'submit'});
|
||||
this.connection.send(create); // fire away
|
||||
this.connection.sendIQ(create); // fire away
|
||||
}
|
||||
|
||||
// Parse roles.
|
||||
|
|
207
rtp_sts.js
207
rtp_sts.js
|
@ -80,7 +80,14 @@ PeerStats.prototype.setSsrcResolution = function (ssrc, resolution)
|
|||
*/
|
||||
PeerStats.prototype.setSsrcBitrate = function (ssrc, bitrate)
|
||||
{
|
||||
this.ssrc2bitrate[ssrc] = bitrate;
|
||||
if(this.ssrc2bitrate[ssrc])
|
||||
{
|
||||
this.ssrc2bitrate[ssrc].download += bitrate.download;
|
||||
this.ssrc2bitrate[ssrc].upload += bitrate.upload;
|
||||
}
|
||||
else {
|
||||
this.ssrc2bitrate[ssrc] = bitrate;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -103,6 +110,7 @@ PeerStats.prototype.setSsrcAudioLevel = function (ssrc, audioLevel)
|
|||
*/
|
||||
PeerStats.transport = [];
|
||||
|
||||
|
||||
/**
|
||||
* <tt>StatsCollector</tt> registers for stats updates of given
|
||||
* <tt>peerconnection</tt> in given <tt>interval</tt>. On each update particular
|
||||
|
@ -210,7 +218,15 @@ StatsCollector.prototype.start = function ()
|
|||
self.peerconnection.getStats(
|
||||
function (report)
|
||||
{
|
||||
var results = report.result();
|
||||
var results = null;
|
||||
if(!report || !report.result || typeof report.result != 'function')
|
||||
{
|
||||
results = report;
|
||||
}
|
||||
else
|
||||
{
|
||||
results = report.result();
|
||||
}
|
||||
//console.error("Got interval report", results);
|
||||
self.currentAudioLevelsReport = results;
|
||||
self.processAudioLevelReport();
|
||||
|
@ -229,10 +245,28 @@ StatsCollector.prototype.start = function ()
|
|||
self.peerconnection.getStats(
|
||||
function (report)
|
||||
{
|
||||
var results = report.result();
|
||||
var results = null;
|
||||
if(!report || !report.result || typeof report.result != 'function')
|
||||
{
|
||||
//firefox
|
||||
results = report;
|
||||
}
|
||||
else
|
||||
{
|
||||
//chrome
|
||||
results = report.result();
|
||||
}
|
||||
//console.error("Got interval report", results);
|
||||
self.currentStatsReport = results;
|
||||
self.processStatsReport();
|
||||
try
|
||||
{
|
||||
self.processStatsReport();
|
||||
}
|
||||
catch(e)
|
||||
{
|
||||
console.error("Unsupported key:" + e);
|
||||
}
|
||||
|
||||
self.baselineStatsReport = self.currentStatsReport;
|
||||
},
|
||||
self.errorCallback
|
||||
|
@ -318,6 +352,36 @@ StatsCollector.prototype.logStats = function () {
|
|||
this.statsToBeLogged.stats = {};
|
||||
this.statsToBeLogged.timestamps = [];
|
||||
};
|
||||
var keyMap = {
|
||||
"firefox": {
|
||||
"ssrc": "ssrc",
|
||||
"packetsReceived": "packetsReceived",
|
||||
"packetsLost": "packetsLost",
|
||||
"packetsSent": "packetsSent",
|
||||
"bytesReceived": "bytesReceived",
|
||||
"bytesSent": "bytesSent"
|
||||
},
|
||||
"chrome": {
|
||||
"receiveBandwidth": "googAvailableReceiveBandwidth",
|
||||
"sendBandwidth": "googAvailableSendBandwidth",
|
||||
"remoteAddress": "googRemoteAddress",
|
||||
"transportType": "googTransportType",
|
||||
"localAddress": "googLocalAddress",
|
||||
"activeConnection": "googActiveConnection",
|
||||
"ssrc": "ssrc",
|
||||
"packetsReceived": "packetsReceived",
|
||||
"packetsSent": "packetsSent",
|
||||
"packetsLost": "packetsLost",
|
||||
"bytesReceived": "bytesReceived",
|
||||
"bytesSent": "bytesSent",
|
||||
"googFrameHeightReceived": "googFrameHeightReceived",
|
||||
"googFrameWidthReceived": "googFrameWidthReceived",
|
||||
"googFrameHeightSent": "googFrameHeightSent",
|
||||
"googFrameWidthSent": "googFrameWidthSent",
|
||||
"audioInputLevel": "audioInputLevel",
|
||||
"audioOutputLevel": "audioOutputLevel"
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Stats processing logic.
|
||||
|
@ -329,23 +393,29 @@ StatsCollector.prototype.processStatsReport = function () {
|
|||
|
||||
for (var idx in this.currentStatsReport) {
|
||||
var now = this.currentStatsReport[idx];
|
||||
if (now.stat('googAvailableReceiveBandwidth') ||
|
||||
now.stat('googAvailableSendBandwidth'))
|
||||
{
|
||||
PeerStats.bandwidth = {
|
||||
"download": Math.round(
|
||||
(now.stat('googAvailableReceiveBandwidth')) / 1000),
|
||||
"upload": Math.round(
|
||||
(now.stat('googAvailableSendBandwidth')) / 1000)
|
||||
};
|
||||
try {
|
||||
if (getStatValue(now, 'receiveBandwidth') ||
|
||||
getStatValue(now, 'sendBandwidth')) {
|
||||
PeerStats.bandwidth = {
|
||||
"download": Math.round(
|
||||
(getStatValue(now, 'receiveBandwidth')) / 1000),
|
||||
"upload": Math.round(
|
||||
(getStatValue(now, 'sendBandwidth')) / 1000)
|
||||
};
|
||||
}
|
||||
}
|
||||
catch(e){/*not supported*/}
|
||||
|
||||
if(now.type == 'googCandidatePair')
|
||||
{
|
||||
var ip = now.stat('googRemoteAddress');
|
||||
var type = now.stat("googTransportType");
|
||||
var localIP = now.stat("googLocalAddress");
|
||||
var active = now.stat("googActiveConnection");
|
||||
var ip, type, localIP, active;
|
||||
try {
|
||||
ip = getStatValue(now, 'remoteAddress');
|
||||
type = getStatValue(now, "transportType");
|
||||
localIP = getStatValue(now, "localAddress");
|
||||
active = getStatValue(now, "activeConnection");
|
||||
}
|
||||
catch(e){/*not supported*/}
|
||||
if(!ip || !type || !localIP || active != "true")
|
||||
continue;
|
||||
var addressSaved = false;
|
||||
|
@ -364,17 +434,32 @@ StatsCollector.prototype.processStatsReport = function () {
|
|||
continue;
|
||||
}
|
||||
|
||||
if (now.type != 'ssrc') {
|
||||
if(now.type == "candidatepair")
|
||||
{
|
||||
if(now.state == "succeeded")
|
||||
continue;
|
||||
|
||||
var local = this.currentStatsReport[now.localCandidateId];
|
||||
var remote = this.currentStatsReport[now.remoteCandidateId];
|
||||
PeerStats.transport.push({localip: local.ipAddress + ":" + local.portNumber,
|
||||
ip: remote.ipAddress + ":" + remote.portNumber, type: local.transport});
|
||||
|
||||
}
|
||||
|
||||
if (now.type != 'ssrc' && now.type != "outboundrtp" &&
|
||||
now.type != "inboundrtp") {
|
||||
continue;
|
||||
}
|
||||
|
||||
var before = this.baselineStatsReport[idx];
|
||||
if (!before) {
|
||||
console.warn(now.stat('ssrc') + ' not enough data');
|
||||
console.warn(getStatValue(now, 'ssrc') + ' not enough data');
|
||||
continue;
|
||||
}
|
||||
|
||||
var ssrc = now.stat('ssrc');
|
||||
var ssrc = getStatValue(now, 'ssrc');
|
||||
if(!ssrc)
|
||||
continue;
|
||||
var jid = ssrc2jid[ssrc];
|
||||
if (!jid) {
|
||||
console.warn("No jid for ssrc: " + ssrc);
|
||||
|
@ -390,31 +475,30 @@ StatsCollector.prototype.processStatsReport = function () {
|
|||
|
||||
var isDownloadStream = true;
|
||||
var key = 'packetsReceived';
|
||||
if (!now.stat(key))
|
||||
if (!getStatValue(now, key))
|
||||
{
|
||||
isDownloadStream = false;
|
||||
key = 'packetsSent';
|
||||
if (!now.stat(key))
|
||||
if (!getStatValue(now, key))
|
||||
{
|
||||
console.error("No packetsReceived nor packetSent stat found");
|
||||
this.stop();
|
||||
return;
|
||||
console.warn("No packetsReceived nor packetSent stat found");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
var packetsNow = now.stat(key);
|
||||
var packetsNow = getStatValue(now, key);
|
||||
if(!packetsNow || packetsNow < 0)
|
||||
packetsNow = 0;
|
||||
|
||||
var packetsBefore = before.stat(key);
|
||||
var packetsBefore = getStatValue(before, key);
|
||||
if(!packetsBefore || packetsBefore < 0)
|
||||
packetsBefore = 0;
|
||||
var packetRate = packetsNow - packetsBefore;
|
||||
if(!packetRate || packetRate < 0)
|
||||
packetRate = 0;
|
||||
var currentLoss = now.stat('packetsLost');
|
||||
var currentLoss = getStatValue(now, 'packetsLost');
|
||||
if(!currentLoss || currentLoss < 0)
|
||||
currentLoss = 0;
|
||||
var previousLoss = before.stat('packetsLost');
|
||||
var previousLoss = getStatValue(before, 'packetsLost');
|
||||
if(!previousLoss || previousLoss < 0)
|
||||
previousLoss = 0;
|
||||
var lossRate = currentLoss - previousLoss;
|
||||
|
@ -427,16 +511,18 @@ StatsCollector.prototype.processStatsReport = function () {
|
|||
"packetsLost": lossRate,
|
||||
"isDownloadStream": isDownloadStream});
|
||||
|
||||
|
||||
var bytesReceived = 0, bytesSent = 0;
|
||||
if(now.stat("bytesReceived"))
|
||||
if(getStatValue(now, "bytesReceived"))
|
||||
{
|
||||
bytesReceived = now.stat("bytesReceived") -
|
||||
before.stat("bytesReceived");
|
||||
bytesReceived = getStatValue(now, "bytesReceived") -
|
||||
getStatValue(before, "bytesReceived");
|
||||
}
|
||||
|
||||
if(now.stat("bytesSent"))
|
||||
if(getStatValue(now, "bytesSent"))
|
||||
{
|
||||
bytesSent = now.stat("bytesSent") - before.stat("bytesSent");
|
||||
bytesSent = getStatValue(now, "bytesSent") -
|
||||
getStatValue(before, "bytesSent");
|
||||
}
|
||||
|
||||
var time = Math.round((now.timestamp - before.timestamp) / 1000);
|
||||
|
@ -461,19 +547,21 @@ StatsCollector.prototype.processStatsReport = function () {
|
|||
jidStats.setSsrcBitrate(ssrc, {
|
||||
"download": bytesReceived,
|
||||
"upload": bytesSent});
|
||||
|
||||
var resolution = {height: null, width: null};
|
||||
if(now.stat("googFrameHeightReceived") &&
|
||||
now.stat("googFrameWidthReceived"))
|
||||
{
|
||||
resolution.height = now.stat("googFrameHeightReceived");
|
||||
resolution.width = now.stat("googFrameWidthReceived");
|
||||
}
|
||||
else if(now.stat("googFrameHeightSent") &&
|
||||
now.stat("googFrameWidthSent"))
|
||||
{
|
||||
resolution.height = now.stat("googFrameHeightSent");
|
||||
resolution.width = now.stat("googFrameWidthSent");
|
||||
try {
|
||||
if (getStatValue(now, "googFrameHeightReceived") &&
|
||||
getStatValue(now, "googFrameWidthReceived")) {
|
||||
resolution.height = getStatValue(now, "googFrameHeightReceived");
|
||||
resolution.width = getStatValue(now, "googFrameWidthReceived");
|
||||
}
|
||||
else if (getStatValue(now, "googFrameHeightSent") &&
|
||||
getStatValue(now, "googFrameWidthSent")) {
|
||||
resolution.height = getStatValue(now, "googFrameHeightSent");
|
||||
resolution.width = getStatValue(now, "googFrameWidthSent");
|
||||
}
|
||||
}
|
||||
catch(e){/*not supported*/}
|
||||
|
||||
if(resolution.height && resolution.width)
|
||||
{
|
||||
|
@ -515,6 +603,8 @@ StatsCollector.prototype.processStatsReport = function () {
|
|||
self.jid2stats[jid].ssrc2bitrate[ssrc].download;
|
||||
bitrateUpload +=
|
||||
self.jid2stats[jid].ssrc2bitrate[ssrc].upload;
|
||||
|
||||
delete self.jid2stats[jid].ssrc2bitrate[ssrc];
|
||||
}
|
||||
);
|
||||
resolutions[jid] = self.jid2stats[jid].ssrc2resolution;
|
||||
|
@ -566,11 +656,11 @@ StatsCollector.prototype.processAudioLevelReport = function ()
|
|||
var before = this.baselineAudioLevelsReport[idx];
|
||||
if (!before)
|
||||
{
|
||||
console.warn(now.stat('ssrc') + ' not enough data');
|
||||
console.warn(getStatValue(now, 'ssrc') + ' not enough data');
|
||||
continue;
|
||||
}
|
||||
|
||||
var ssrc = now.stat('ssrc');
|
||||
var ssrc = getStatValue(now, 'ssrc');
|
||||
var jid = ssrc2jid[ssrc];
|
||||
if (!jid)
|
||||
{
|
||||
|
@ -586,9 +676,19 @@ StatsCollector.prototype.processAudioLevelReport = function ()
|
|||
}
|
||||
|
||||
// Audio level
|
||||
var audioLevel = now.stat('audioInputLevel');
|
||||
if (!audioLevel)
|
||||
audioLevel = now.stat('audioOutputLevel');
|
||||
var audioLevel = null;
|
||||
|
||||
try {
|
||||
audioLevel = getStatValue(now, 'audioInputLevel');
|
||||
if (!audioLevel)
|
||||
audioLevel = getStatValue(now, 'audioOutputLevel');
|
||||
}
|
||||
catch(e) {/*not supported*/
|
||||
console.warn("Audio Levels are not available in the statistics.");
|
||||
clearInterval(this.audioLevelsIntervalId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (audioLevel)
|
||||
{
|
||||
// TODO: can't find specs about what this value really is,
|
||||
|
@ -603,3 +703,10 @@ StatsCollector.prototype.processAudioLevelReport = function ()
|
|||
|
||||
|
||||
};
|
||||
|
||||
function getStatValue(item, name) {
|
||||
if(!keyMap[RTC.browser][name])
|
||||
throw "The property isn't supported!";
|
||||
var key = keyMap[RTC.browser][name];
|
||||
return RTC.browser == "chrome"? item.stat(key) : item[key];
|
||||
}
|
247
videolayout.js
247
videolayout.js
|
@ -9,6 +9,9 @@ var VideoLayout = (function (my) {
|
|||
updateInProgress: false,
|
||||
newSrc: ''
|
||||
};
|
||||
|
||||
var defaultLocalDisplayName = "Me";
|
||||
|
||||
my.connectionIndicators = {};
|
||||
|
||||
my.isInLastN = function(resource) {
|
||||
|
@ -17,9 +20,13 @@ var VideoLayout = (function (my) {
|
|||
|| (lastNEndpointsCache && lastNEndpointsCache.indexOf(resource) !== -1);
|
||||
};
|
||||
|
||||
my.changeLocalStream = function (stream) {
|
||||
connection.jingle.localAudio = stream;
|
||||
VideoLayout.changeLocalVideo(stream, true);
|
||||
}
|
||||
|
||||
my.changeLocalAudio = function(stream) {
|
||||
connection.jingle.localAudio = stream;
|
||||
|
||||
RTC.attachMediaStream($('#localAudio'), stream);
|
||||
document.getElementById('localAudio').autoplay = true;
|
||||
document.getElementById('localAudio').volume = 0;
|
||||
|
@ -33,7 +40,7 @@ var VideoLayout = (function (my) {
|
|||
connection.jingle.localVideo = stream;
|
||||
|
||||
var localVideo = document.createElement('video');
|
||||
localVideo.id = 'localVideo_' + stream.id;
|
||||
localVideo.id = 'localVideo_' + RTC.getStreamID(stream);
|
||||
localVideo.autoplay = true;
|
||||
localVideo.volume = 0; // is it required if audio is separated ?
|
||||
localVideo.oncontextmenu = function () { return false; };
|
||||
|
@ -55,10 +62,10 @@ var VideoLayout = (function (my) {
|
|||
// Add click handler to both video and video wrapper elements in case
|
||||
// there's no video.
|
||||
localVideoSelector.click(function () {
|
||||
VideoLayout.handleVideoThumbClicked(localVideo.src);
|
||||
VideoLayout.handleVideoThumbClicked(RTC.getVideoSrc(localVideo), false, connection.emuc.myroomjid);
|
||||
});
|
||||
$('#localVideoContainer').click(function () {
|
||||
VideoLayout.handleVideoThumbClicked(localVideo.src);
|
||||
VideoLayout.handleVideoThumbClicked(RTC.getVideoSrc(localVideo), false, connection.emuc.myroomjid);
|
||||
});
|
||||
|
||||
// Add hover handler
|
||||
|
@ -68,14 +75,14 @@ var VideoLayout = (function (my) {
|
|||
},
|
||||
function() {
|
||||
if (!VideoLayout.isLargeVideoVisible()
|
||||
|| localVideo.src !== $('#largeVideo').attr('src'))
|
||||
|| RTC.getVideoSrc(localVideo) !== RTC.getVideoSrc($('#largeVideo')[0]))
|
||||
VideoLayout.showDisplayName('localVideoContainer', false);
|
||||
}
|
||||
);
|
||||
// Add stream ended handler
|
||||
stream.onended = function () {
|
||||
localVideoContainer.removeChild(localVideo);
|
||||
VideoLayout.updateRemovedVideo(localVideo.src);
|
||||
VideoLayout.updateRemovedVideo(RTC.getVideoSrc(localVideo));
|
||||
};
|
||||
// Flip video x axis if needed
|
||||
flipXLocalVideo = flipX;
|
||||
|
@ -86,9 +93,16 @@ var VideoLayout = (function (my) {
|
|||
var videoStream = simulcast.getLocalVideoStream();
|
||||
RTC.attachMediaStream(localVideoSelector, videoStream);
|
||||
|
||||
localVideoSrc = localVideo.src;
|
||||
localVideoSrc = RTC.getVideoSrc(localVideo);
|
||||
|
||||
var myResourceJid = null;
|
||||
if(connection.emuc.myroomjid)
|
||||
{
|
||||
myResourceJid = Strophe.getResourceFromJid(connection.emuc.myroomjid);
|
||||
}
|
||||
VideoLayout.updateLargeVideo(localVideoSrc, 0,
|
||||
myResourceJid);
|
||||
|
||||
VideoLayout.updateLargeVideo(localVideoSrc, 0);
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -97,7 +111,7 @@ var VideoLayout = (function (my) {
|
|||
* @param removedVideoSrc src stream identifier of the video.
|
||||
*/
|
||||
my.updateRemovedVideo = function(removedVideoSrc) {
|
||||
if (removedVideoSrc === $('#largeVideo').attr('src')) {
|
||||
if (removedVideoSrc === RTC.getVideoSrc($('#largeVideo')[0])) {
|
||||
// this is currently displayed as large
|
||||
// pick the last visible video in the row
|
||||
// if nobody else is left, this picks the local video
|
||||
|
@ -109,7 +123,7 @@ var VideoLayout = (function (my) {
|
|||
console.info("Last visible video no longer exists");
|
||||
pick = $('#remoteVideos>span[id!="mixedstream"]>video').get(0);
|
||||
|
||||
if (!pick || !pick.src) {
|
||||
if (!pick || !RTC.getVideoSrc(pick)) {
|
||||
// Try local video
|
||||
console.info("Fallback to local video...");
|
||||
pick = $('#remoteVideos>span>span>video').get(0);
|
||||
|
@ -118,20 +132,38 @@ var VideoLayout = (function (my) {
|
|||
|
||||
// mute if localvideo
|
||||
if (pick) {
|
||||
VideoLayout.updateLargeVideo(pick.src, pick.volume);
|
||||
var container = pick.parentNode;
|
||||
var jid = null;
|
||||
if(container)
|
||||
{
|
||||
if(container.id == "localVideoWrapper")
|
||||
{
|
||||
jid = Strophe.getResourceFromJid(connection.emuc.myroomjid);
|
||||
}
|
||||
else
|
||||
{
|
||||
jid = VideoLayout.getPeerContainerResourceJid(container);
|
||||
}
|
||||
}
|
||||
|
||||
VideoLayout.updateLargeVideo(RTC.getVideoSrc(pick), pick.volume, jid);
|
||||
} else {
|
||||
console.warn("Failed to elect large video");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
my.getLargeVideoState = function () {
|
||||
return largeVideoState;
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the large video with the given new video source.
|
||||
*/
|
||||
my.updateLargeVideo = function(newSrc, vol) {
|
||||
my.updateLargeVideo = function(newSrc, vol, jid) {
|
||||
console.log('hover in', newSrc);
|
||||
|
||||
if ($('#largeVideo').attr('src') != newSrc) {
|
||||
if (RTC.getVideoSrc($('#largeVideo')[0]) != newSrc) {
|
||||
|
||||
$('#activeSpeakerAvatar').css('visibility', 'hidden');
|
||||
// Due to the simulcast the localVideoSrc may have changed when the
|
||||
|
@ -144,15 +176,22 @@ var VideoLayout = (function (my) {
|
|||
|
||||
largeVideoState.newSrc = newSrc;
|
||||
largeVideoState.isVisible = $('#largeVideo').is(':visible');
|
||||
largeVideoState.isDesktop = isVideoSrcDesktop(newSrc);
|
||||
largeVideoState.userJid = getJidFromVideoSrc(newSrc);
|
||||
largeVideoState.isDesktop = isVideoSrcDesktop(jid);
|
||||
if(jid2Ssrc[largeVideoState.userJid] ||
|
||||
(connection && connection.emuc.myroomjid &&
|
||||
largeVideoState.userJid == Strophe.getResourceFromJid(connection.emuc.myroomjid)))
|
||||
{
|
||||
largeVideoState.oldJid = largeVideoState.userJid;
|
||||
}
|
||||
else
|
||||
{
|
||||
largeVideoState.oldJid = null;
|
||||
}
|
||||
largeVideoState.userJid = jid;
|
||||
|
||||
// Screen stream is already rotated
|
||||
largeVideoState.flipX = (newSrc === localVideoSrc) && flipXLocalVideo;
|
||||
|
||||
var oldSrc = $('#largeVideo').attr('src');
|
||||
largeVideoState.oldJid = getJidFromVideoSrc(oldSrc);
|
||||
|
||||
var userChanged = false;
|
||||
if (largeVideoState.oldJid != largeVideoState.userJid) {
|
||||
userChanged = true;
|
||||
|
@ -170,7 +209,8 @@ var VideoLayout = (function (my) {
|
|||
|
||||
if (!userChanged && largeVideoState.preload
|
||||
&& largeVideoState.preload != null
|
||||
&& $(largeVideoState.preload).attr('src') == newSrc) {
|
||||
&& RTC.getVideoSrc($(largeVideoState.preload)[0]) == newSrc)
|
||||
{
|
||||
|
||||
console.info('Switching to preloaded video');
|
||||
var attributes = $('#largeVideo').prop("attributes");
|
||||
|
@ -196,7 +236,7 @@ var VideoLayout = (function (my) {
|
|||
largeVideoState.preload = null;
|
||||
largeVideoState.preload_ssrc = 0;
|
||||
} else {
|
||||
$('#largeVideo').attr('src', largeVideoState.newSrc);
|
||||
RTC.setVideoSrc($('#largeVideo')[0], largeVideoState.newSrc);
|
||||
}
|
||||
|
||||
var videoTransform = document.getElementById('largeVideo')
|
||||
|
@ -224,14 +264,12 @@ var VideoLayout = (function (my) {
|
|||
// Only if the large video is currently visible.
|
||||
// Disable previous dominant speaker video.
|
||||
if (largeVideoState.oldJid) {
|
||||
var oldResourceJid = Strophe.getResourceFromJid(largeVideoState.oldJid);
|
||||
VideoLayout.enableDominantSpeaker(oldResourceJid, false);
|
||||
VideoLayout.enableDominantSpeaker(largeVideoState.oldJid, false);
|
||||
}
|
||||
|
||||
// Enable new dominant speaker in the remote videos section.
|
||||
if (largeVideoState.userJid) {
|
||||
var resourceJid = Strophe.getResourceFromJid(largeVideoState.userJid);
|
||||
VideoLayout.enableDominantSpeaker(resourceJid, true);
|
||||
VideoLayout.enableDominantSpeaker(largeVideoState.userJid, true);
|
||||
}
|
||||
|
||||
if (userChanged && largeVideoState.isVisible) {
|
||||
|
@ -256,17 +294,20 @@ var VideoLayout = (function (my) {
|
|||
}
|
||||
};
|
||||
|
||||
my.handleVideoThumbClicked = function(videoSrc, noPinnedEndpointChangedEvent) {
|
||||
my.handleVideoThumbClicked = function(videoSrc, noPinnedEndpointChangedEvent, jid) {
|
||||
// Restore style for previously focused video
|
||||
var focusJid = getJidFromVideoSrc(focusedVideoSrc);
|
||||
var oldContainer = getParticipantContainer(focusJid);
|
||||
var oldContainer = null;
|
||||
if(focusedVideoSrc) {
|
||||
var focusJid = focusedVideoSrc.jid;
|
||||
oldContainer = getParticipantContainer(focusJid);
|
||||
}
|
||||
|
||||
if (oldContainer) {
|
||||
oldContainer.removeClass("videoContainerFocused");
|
||||
}
|
||||
|
||||
// Unlock current focused.
|
||||
if (focusedVideoSrc === videoSrc)
|
||||
if (focusedVideoSrc && focusedVideoSrc.src === videoSrc)
|
||||
{
|
||||
focusedVideoSrc = null;
|
||||
var dominantSpeakerVideo = null;
|
||||
|
@ -277,7 +318,7 @@ var VideoLayout = (function (my) {
|
|||
.get(0);
|
||||
|
||||
if (dominantSpeakerVideo) {
|
||||
VideoLayout.updateLargeVideo(dominantSpeakerVideo.src, 1);
|
||||
VideoLayout.updateLargeVideo(RTC.getVideoSrc(dominantSpeakerVideo), 1, currentDominantSpeaker);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -288,17 +329,19 @@ var VideoLayout = (function (my) {
|
|||
}
|
||||
|
||||
// Lock new video
|
||||
focusedVideoSrc = videoSrc;
|
||||
focusedVideoSrc = {
|
||||
src: videoSrc,
|
||||
jid: jid
|
||||
};
|
||||
|
||||
// Update focused/pinned interface.
|
||||
var userJid = getJidFromVideoSrc(videoSrc);
|
||||
if (userJid)
|
||||
if (jid)
|
||||
{
|
||||
var container = getParticipantContainer(userJid);
|
||||
var container = getParticipantContainer(jid);
|
||||
container.addClass("videoContainerFocused");
|
||||
|
||||
if (!noPinnedEndpointChangedEvent) {
|
||||
$(document).trigger("pinnedendpointchanged", [userJid]);
|
||||
$(document).trigger("pinnedendpointchanged", [jid]);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -310,7 +353,7 @@ var VideoLayout = (function (my) {
|
|||
// this isn't a prezi.
|
||||
$(document).trigger("video.selected", [false]);
|
||||
|
||||
VideoLayout.updateLargeVideo(videoSrc, 1);
|
||||
VideoLayout.updateLargeVideo(videoSrc, 1, Strophe.getResourceFromJid(jid));
|
||||
|
||||
$('audio').each(function (idx, el) {
|
||||
if (el.id.indexOf('mixedmslabel') !== -1) {
|
||||
|
@ -356,8 +399,7 @@ var VideoLayout = (function (my) {
|
|||
* Shows/hides the large video.
|
||||
*/
|
||||
my.setLargeVideoVisible = function(isVisible) {
|
||||
var largeVideoJid = getJidFromVideoSrc($('#largeVideo').attr('src'));
|
||||
var resourceJid = Strophe.getResourceFromJid(largeVideoJid);
|
||||
var resourceJid = largeVideoState.userJid;
|
||||
|
||||
if (isVisible) {
|
||||
$('#largeVideo').css({visibility: 'visible'});
|
||||
|
@ -457,7 +499,7 @@ var VideoLayout = (function (my) {
|
|||
? document.createElement('video')
|
||||
: document.createElement('audio');
|
||||
var id = (isVideo ? 'remoteVideo_' : 'remoteAudio_')
|
||||
+ sid + '_' + stream.id;
|
||||
+ sid + '_' + RTC.getStreamID(stream);
|
||||
|
||||
element.id = id;
|
||||
element.autoplay = true;
|
||||
|
@ -487,10 +529,8 @@ var VideoLayout = (function (my) {
|
|||
var videoStream = simulcast.getReceivingVideoStream(stream);
|
||||
RTC.attachMediaStream(sel, videoStream);
|
||||
|
||||
if (isVideo) {
|
||||
waitForRemoteVideo(sel, thessrc, stream);
|
||||
}
|
||||
|
||||
if (isVideo)
|
||||
waitForRemoteVideo(sel, thessrc, stream, peerJid);
|
||||
}
|
||||
|
||||
stream.onended = function () {
|
||||
|
@ -512,7 +552,7 @@ var VideoLayout = (function (my) {
|
|||
var videoThumb = $('#' + container.id + '>video').get(0);
|
||||
|
||||
if (videoThumb)
|
||||
VideoLayout.handleVideoThumbClicked(videoThumb.src);
|
||||
VideoLayout.handleVideoThumbClicked(RTC.getVideoSrc(videoThumb), false, peerJid);
|
||||
|
||||
event.preventDefault();
|
||||
return false;
|
||||
|
@ -527,13 +567,13 @@ var VideoLayout = (function (my) {
|
|||
var videoSrc = null;
|
||||
if ($('#' + container.id + '>video')
|
||||
&& $('#' + container.id + '>video').length > 0) {
|
||||
videoSrc = $('#' + container.id + '>video').get(0).src;
|
||||
videoSrc = RTC.getVideoSrc($('#' + container.id + '>video').get(0));
|
||||
}
|
||||
|
||||
// If the video has been "pinned" by the user we want to
|
||||
// keep the display name on place.
|
||||
if (!VideoLayout.isLargeVideoVisible()
|
||||
|| videoSrc !== $('#largeVideo').attr('src'))
|
||||
|| videoSrc !== RTC.getVideoSrc($('#largeVideo')[0]))
|
||||
VideoLayout.showDisplayName(container.id, false);
|
||||
}
|
||||
);
|
||||
|
@ -558,13 +598,11 @@ var VideoLayout = (function (my) {
|
|||
var removedVideoSrc = null;
|
||||
if (isVideo) {
|
||||
select = $('#' + container.id + '>video');
|
||||
removedVideoSrc = select.get(0).src;
|
||||
removedVideoSrc = RTC.getVideoSrc(select.get(0));
|
||||
}
|
||||
else
|
||||
select = $('#' + container.id + '>audio');
|
||||
|
||||
// Remove video source from the mapping.
|
||||
delete videoSrcToSsrc[removedVideoSrc];
|
||||
|
||||
// Mark video as removed to cancel waiting loop(if video is removed
|
||||
// before has started)
|
||||
|
@ -1004,7 +1042,6 @@ var VideoLayout = (function (my) {
|
|||
videoSpan = document.getElementById(videoContainerId);
|
||||
|
||||
if (!videoSpan) {
|
||||
console.error("No video element for jid", resourceJid);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -1220,22 +1257,6 @@ var VideoLayout = (function (my) {
|
|||
return containerElement.id.substring(i + 12);
|
||||
};
|
||||
|
||||
my.getLargeVideoResource = function () {
|
||||
var largeVideoJid, largeVideoResource;
|
||||
|
||||
// Another approach could be to compare the srcs of the thumbnails and
|
||||
// then call getPeerContainerResourceJid.
|
||||
|
||||
var largeVideoSsrc
|
||||
= videoSrcToSsrc[$('#largeVideo').attr('src')];
|
||||
|
||||
if (largeVideoSsrc
|
||||
/* variables/state checking to prevent exceptions */
|
||||
&& (largeVideoJid = ssrc2jid[largeVideoSsrc])
|
||||
&& (largeVideoResource = Strophe.getResourceFromJid(largeVideoJid)))
|
||||
return largeVideoResource;
|
||||
};
|
||||
|
||||
/**
|
||||
* Adds the remote video menu element for the given <tt>jid</tt> in the
|
||||
* given <tt>parentElement</tt>.
|
||||
|
@ -1338,7 +1359,7 @@ var VideoLayout = (function (my) {
|
|||
// We have a video src, great! Let's update the large video
|
||||
// now.
|
||||
|
||||
VideoLayout.handleVideoThumbClicked(videoThumb.src);
|
||||
VideoLayout.handleVideoThumbClicked(videoThumb.src, false, jid);
|
||||
} else {
|
||||
|
||||
// If we don't have a video src for jid, there's absolutely
|
||||
|
@ -1474,7 +1495,7 @@ var VideoLayout = (function (my) {
|
|||
// Update the large video if the video source is already available,
|
||||
// otherwise wait for the "videoactive.jingle" event.
|
||||
if (video.length && video[0].currentTime > 0)
|
||||
VideoLayout.updateLargeVideo(video[0].src);
|
||||
VideoLayout.updateLargeVideo(RTC.getVideoSrc(video[0]), resourceJid);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -1553,7 +1574,7 @@ var VideoLayout = (function (my) {
|
|||
// it is no longer being received. If resourceJid was being
|
||||
// displayed in the large video we have to switch to another
|
||||
// user.
|
||||
var largeVideoResource = VideoLayout.getLargeVideoResource();
|
||||
var largeVideoResource = largeVideoState.userJid;
|
||||
if (!updateLargeVideo && resourceJid === largeVideoResource) {
|
||||
updateLargeVideo = true;
|
||||
}
|
||||
|
@ -1578,18 +1599,17 @@ var VideoLayout = (function (my) {
|
|||
var videoStream = simulcast.getReceivingVideoStream(
|
||||
mediaStream.stream);
|
||||
RTC.attachMediaStream(sel, videoStream);
|
||||
videoSrcToSsrc[sel.attr('src')] = mediaStream.ssrc;
|
||||
if (lastNPickupJid == mediaStream.peerjid) {
|
||||
// Clean up the lastN pickup jid.
|
||||
lastNPickupJid = null;
|
||||
|
||||
// Don't fire the events again, they've already
|
||||
// been fired in the contact list click handler.
|
||||
VideoLayout.handleVideoThumbClicked($(sel).attr('src'), false);
|
||||
VideoLayout.handleVideoThumbClicked($(sel).attr('src'), false, mediaStream.peerjid);
|
||||
|
||||
updateLargeVideo = false;
|
||||
}
|
||||
waitForRemoteVideo(sel, mediaStream.ssrc, mediaStream.stream);
|
||||
waitForRemoteVideo(sel, mediaStream.ssrc, mediaStream.stream, resourceJid);
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -1646,7 +1666,7 @@ var VideoLayout = (function (my) {
|
|||
|| (parentResourceJid
|
||||
&& VideoLayout.getDominantSpeakerResourceJid()
|
||||
=== parentResourceJid)) {
|
||||
VideoLayout.updateLargeVideo(videoelem.attr('src'), 1);
|
||||
VideoLayout.updateLargeVideo(RTC.getVideoSrc(videoelem[0]), 1, parentResourceJid);
|
||||
}
|
||||
|
||||
VideoLayout.showModeratorIndicator();
|
||||
|
@ -1657,7 +1677,15 @@ var VideoLayout = (function (my) {
|
|||
endpointSimulcastLayers.forEach(function (esl) {
|
||||
|
||||
var resource = esl.endpoint;
|
||||
if (lastNCount < 1 || lastNEndpointsCache.indexOf(resource) === -1) {
|
||||
|
||||
// if lastN is enabled *and* the endpoint is *not* in the lastN set,
|
||||
// then ignore the event (= do not preload anything).
|
||||
//
|
||||
// The bridge could probably stop sending this message if it's for
|
||||
// an endpoint that's not in lastN.
|
||||
|
||||
if (lastNCount != -1
|
||||
&& (lastNCount < 1 || lastNEndpointsCache.indexOf(resource) === -1)) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -1674,12 +1702,8 @@ var VideoLayout = (function (my) {
|
|||
console.info([esl, primarySSRC, msid, session, electedStream]);
|
||||
|
||||
var msidParts = msid.split(' ');
|
||||
var selRemoteVideo = $(['#', 'remoteVideo_', session.sid, '_', msidParts[0]].join(''));
|
||||
|
||||
// FIXME(gp) here we should use the VideoLayout.getPeerContainerResource
|
||||
// and VideoLayout.getLargeVideoResource methods.
|
||||
var preload = (ssrc2jid[videoSrcToSsrc[selRemoteVideo.attr('src')]]
|
||||
== ssrc2jid[videoSrcToSsrc[largeVideoState.newSrc]]);
|
||||
var preload = (Strophe.getResourceFromJid(ssrc2jid[primarySSRC]) == largeVideoState.userJid);
|
||||
|
||||
if (preload) {
|
||||
if (largeVideoState.preload)
|
||||
|
@ -1691,9 +1715,7 @@ var VideoLayout = (function (my) {
|
|||
// ssrcs are unique in an rtp session
|
||||
largeVideoState.preload_ssrc = primarySSRC;
|
||||
|
||||
var electedStreamUrl = webkitURL.createObjectURL(electedStream);
|
||||
largeVideoState.preload
|
||||
.attr('src', electedStreamUrl);
|
||||
RTC.attachMediaStream(largeVideoState.preload, electedStream)
|
||||
}
|
||||
|
||||
} else {
|
||||
|
@ -1709,7 +1731,19 @@ var VideoLayout = (function (my) {
|
|||
endpointSimulcastLayers.forEach(function (esl) {
|
||||
|
||||
var resource = esl.endpoint;
|
||||
if (lastNCount < 1 || lastNEndpointsCache.indexOf(resource) === -1) {
|
||||
|
||||
// if lastN is enabled *and* the endpoint is *not* in the lastN set,
|
||||
// then ignore the event (= do not change large video/thumbnail
|
||||
// SRCs).
|
||||
//
|
||||
// Note that even if we ignore the "changed" event in this event
|
||||
// handler, the bridge must continue sending these events because
|
||||
// the simulcast code in simulcast.js uses it to know what's going
|
||||
// to be streamed by the bridge when/if the endpoint gets back into
|
||||
// the lastN set.
|
||||
|
||||
if (lastNCount != -1
|
||||
&& (lastNCount < 1 || lastNEndpointsCache.indexOf(resource) === -1)) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -1729,21 +1763,15 @@ var VideoLayout = (function (my) {
|
|||
var msidParts = msid.split(' ');
|
||||
var selRemoteVideo = $(['#', 'remoteVideo_', session.sid, '_', msidParts[0]].join(''));
|
||||
|
||||
// FIXME(gp) here we should use the VideoLayout.getPeerContainerResource
|
||||
// and VideoLayout.getLargeVideoResource methods.
|
||||
var updateLargeVideo = (ssrc2jid[videoSrcToSsrc[selRemoteVideo.attr('src')]]
|
||||
== ssrc2jid[videoSrcToSsrc[largeVideoState.newSrc]]);
|
||||
|
||||
// We should only update the focused video src if it's not a
|
||||
// falsy value.
|
||||
var updateFocusedVideoSrc
|
||||
= focusedVideoSrc && focusedVideoSrc !== ''
|
||||
&& (selRemoteVideo.attr('src') == focusedVideoSrc);
|
||||
var updateLargeVideo = (Strophe.getResourceFromJid(ssrc2jid[primarySSRC])
|
||||
== largeVideoState.userJid);
|
||||
var updateFocusedVideoSrc = (focusedVideoSrc && focusedVideoSrc.src && focusedVideoSrc.src != '' &&
|
||||
(RTC.getVideoSrc(selRemoteVideo[0]) == focusedVideoSrc.src));
|
||||
|
||||
var electedStreamUrl;
|
||||
if (largeVideoState.preload_ssrc == primarySSRC)
|
||||
{
|
||||
electedStreamUrl = $(largeVideoState.preload).attr('src');
|
||||
RTC.setVideoSrc(selRemoteVideo[0], RTC.getVideoSrc(largeVideoState.preload[0]));
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -1754,18 +1782,19 @@ var VideoLayout = (function (my) {
|
|||
|
||||
largeVideoState.preload_ssrc = 0;
|
||||
|
||||
electedStreamUrl = webkitURL.createObjectURL(electedStream);
|
||||
RTC.attachMediaStream(selRemoteVideo, electedStream);
|
||||
}
|
||||
|
||||
selRemoteVideo.attr('src', electedStreamUrl);
|
||||
videoSrcToSsrc[selRemoteVideo.attr('src')] = primarySSRC + ''; // what we store there is typeof string.
|
||||
var jid = ssrc2jid[primarySSRC];
|
||||
jid2Ssrc[jid] = primarySSRC;
|
||||
|
||||
if (updateLargeVideo) {
|
||||
VideoLayout.updateLargeVideo(electedStreamUrl);
|
||||
VideoLayout.updateLargeVideo(RTC.getVideoSrc(selRemoteVideo[0]), null,
|
||||
Strophe.getResourceFromJid(jid));
|
||||
}
|
||||
|
||||
if (updateFocusedVideoSrc) {
|
||||
focusedVideoSrc = electedStreamUrl;
|
||||
focusedVideoSrc.src = RTC.getVideoSrc(selRemoteVideo[0]);
|
||||
}
|
||||
|
||||
var videoId;
|
||||
|
@ -1889,19 +1918,25 @@ var VideoLayout = (function (my) {
|
|||
if(this.jid==null)
|
||||
{
|
||||
resolution = "";
|
||||
for(var i in this.resolution)
|
||||
if(this.resolution == null || !Object.keys(this.resolution)
|
||||
|| Object.keys(this.resolution).length == 0)
|
||||
{
|
||||
resolutionValue = this.resolution[i];
|
||||
if(resolutionValue)
|
||||
resolution = "N/A";
|
||||
}
|
||||
else
|
||||
for(var i in this.resolution)
|
||||
{
|
||||
if(resolutionValue.height &&
|
||||
resolutionValue.width)
|
||||
resolutionValue = this.resolution[i];
|
||||
if(resolutionValue)
|
||||
{
|
||||
resolution += (resolution == ""? "" : ", ")
|
||||
+ resolutionValue.width + "x" + resolutionValue.height;
|
||||
if(resolutionValue.height &&
|
||||
resolutionValue.width)
|
||||
{
|
||||
resolution += (resolution == ""? "" : ", ")
|
||||
+ resolutionValue.width + "x" + resolutionValue.height;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else if(!resolutionValue ||
|
||||
!resolutionValue.height ||
|
||||
|
|
Loading…
Reference in New Issue