Implements first version of adaptive simulcast.

This commit is contained in:
George Politis 2014-09-12 19:54:40 +02:00
parent 555bdd7af5
commit 36af4da83d
7 changed files with 278 additions and 44 deletions

17
app.js
View File

@ -279,7 +279,10 @@ function waitForPresence(data, sid) {
var ssrclines
= SDPUtil.find_lines(sess.peerconnection.remoteDescription.sdp, 'a=ssrc:');
ssrclines = ssrclines.filter(function (line) {
return line.indexOf('mslabel:' + data.stream.label) !== -1;
// NOTE(gp) previously we filtered on the mslabel, but that property
// is not always present.
// return line.indexOf('mslabel:' + data.stream.label) !== -1;
return line.indexOf('msid:' + data.stream.id) !== -1;
});
if (ssrclines.length) {
thessrc = ssrclines[0].substring(7).split(' ')[0];
@ -292,6 +295,7 @@ function waitForPresence(data, sid) {
// presence to arrive.
if (!ssrc2jid[thessrc]) {
// TODO(gp) limit wait duration to 1 sec.
setTimeout(function(d, s) {
return function() {
waitForPresence(d, s);
@ -1418,6 +1422,17 @@ $(document).bind('fatalError.jingle',
}
);
$(document).bind("video.selected", function(event, isPresentation, userJid) {
if (!isPresentation && _dataChannels && _dataChannels.length != 0) {
_dataChannels[0].send(JSON.stringify({
'colibriClass': 'SelectedEndpointChangedEvent',
'selectedEndpoint': (isPresentation || !userJid)
// TODO(gp) hmm.. I wonder which one of the Strophe methods to use..
? null : userJid.split('/')[1]
}));
}
});
function callSipButtonClicked()
{
$.prompt('<h2>Enter SIP number</h2>' +

View File

@ -23,6 +23,10 @@ function onDataChannel(event)
//dataChannel.send("Hello bridge!");
// Sends 12 bytes binary message to the bridge
//dataChannel.send(new ArrayBuffer(12));
// TODO(gp) we are supposed to tell the bridge about video selections
// so that it can do adaptive simulcast, What if a video selection has
// been made while the data channels are down or broken?
};
dataChannel.onerror = function (error)
@ -89,6 +93,16 @@ function onDataChannel(event)
var endpointSimulcastLayers = obj.endpointSimulcastLayers;
$(document).trigger('simulcastlayerschanged', [endpointSimulcastLayers]);
}
else if ("StartSimulcastLayerEvent" === colibriClass)
{
var simulcastLayer = obj.simulcastLayer;
$(document).trigger('startsimulcastlayer', simulcastLayer);
}
else if ("StopSimulcastLayerEvent" === colibriClass)
{
var simulcastLayer = obj.simulcastLayer;
$(document).trigger('stopsimulcastlayer', simulcastLayer);
}
else
{
console.debug("Data channel JSON-formatted message: ", obj);

View File

@ -530,7 +530,7 @@ ColibriFocus.prototype.createdConference = function (result) {
bridgeSDP.raw = bridgeSDP.session + bridgeSDP.media.join('');
var bridgeDesc = new RTCSessionDescription({type: 'offer', sdp: bridgeSDP.raw});
var simulcast = new Simulcast();
var bridgeDesc = simulcast.transformBridgeDescription(bridgeDesc);
var bridgeDesc = simulcast.transformRemoteDescription(bridgeDesc);
this.peerconnection.setRemoteDescription(bridgeDesc,
function () {

View File

@ -130,10 +130,14 @@ if (TraceablePeerConnection.prototype.__defineGetter__ !== undefined) {
TraceablePeerConnection.prototype.__defineGetter__('iceConnectionState', function() { return this.peerconnection.iceConnectionState; });
TraceablePeerConnection.prototype.__defineGetter__('localDescription', function() {
var simulcast = new Simulcast();
var publicLocalDescription = simulcast.makeLocalDescriptionPublic(this.peerconnection.localDescription);
var publicLocalDescription = simulcast.reverseTransformLocalDescription(this.peerconnection.localDescription);
return publicLocalDescription;
});
TraceablePeerConnection.prototype.__defineGetter__('remoteDescription', function() { return this.peerconnection.remoteDescription; });
TraceablePeerConnection.prototype.__defineGetter__('remoteDescription', function() {
var simulcast = new Simulcast();
var publicRemoteDescription = simulcast.reverseTransformRemoteDescription(this.peerconnection.remoteDescription);
return publicRemoteDescription;
});
}
TraceablePeerConnection.prototype.addStream = function (stream) {

View File

@ -120,7 +120,7 @@ JingleSession.prototype.accept = function () {
pranswer.sdp = pranswer.sdp.replace('a=inactive', 'a=sendrecv');
}
var simulcast = new Simulcast();
pranswer = simulcast.makeLocalDescriptionPublic(pranswer);
pranswer = simulcast.reverseTransformLocalDescription(pranswer);
var prsdp = new SDP(pranswer.sdp);
var accept = $iq({to: this.peerjid,
type: 'set'})
@ -568,7 +568,7 @@ JingleSession.prototype.createdAnswer = function (sdp, provisional) {
responder: this.responder,
sid: this.sid });
var simulcast = new Simulcast();
var publicLocalDesc = simulcast.makeLocalDescriptionPublic(sdp);
var publicLocalDesc = simulcast.reverseTransformLocalDescription(sdp);
var publicLocalSDP = new SDP(publicLocalDesc.sdp);
publicLocalSDP.toJingle(accept, this.initiator == this.me ? 'initiator' : 'responder');
this.connection.sendIQ(accept,

View File

@ -15,7 +15,7 @@ function Simulcast() {
"use strict";
// global state for all transformers.
var localExplosionMap = {}, localVideoSourceCache, emptyCompoundIndex,
remoteMaps = {
remoteVideoSourceCache, remoteMaps = {
msid2Quality: {},
ssrc2Msid: {},
receivingVideoStreams: {}
@ -45,6 +45,14 @@ function Simulcast() {
this._replaceVideoSources(lines, localVideoSourceCache);
};
Simulcast.prototype._cacheRemoteVideoSources = function (lines) {
remoteVideoSourceCache = this._getVideoSources(lines);
};
Simulcast.prototype._restoreRemoteVideoSources = function (lines) {
this._replaceVideoSources(lines, remoteVideoSourceCache);
};
Simulcast.prototype._replaceVideoSources = function (lines, videoSources) {
var i, inVideo = false, index = -1, howMany = 0;
@ -216,6 +224,12 @@ function Simulcast() {
}
};
/**
* Produces a single stream with multiple tracks for local video sources.
*
* @param lines
* @private
*/
Simulcast.prototype._explodeLocalSimulcastSources = function (lines) {
var sb, msid, sid, tid, videoSources, self;
@ -259,6 +273,12 @@ function Simulcast() {
this._replaceVideoSources(lines, sb);
};
/**
* Groups local video sources together in the ssrc-group:SIM group.
*
* @param lines
* @private
*/
Simulcast.prototype._groupLocalVideoSources = function (lines) {
var sb, videoSources, ssrcs = [], ssrc;
@ -401,6 +421,13 @@ function Simulcast() {
return sb;
};
/**
* Ensures that the simulcast group is present in the answer, _if_ native
* simulcast is enabled,
*
* @param desc
* @returns {*}
*/
Simulcast.prototype.transformAnswer = function (desc) {
if (config.enableSimulcast && config.useNativeSimulcast) {
@ -429,11 +456,53 @@ function Simulcast() {
return desc;
};
Simulcast.prototype.makeLocalDescriptionPublic = function (desc) {
Simulcast.prototype._restoreSimulcastGroups = function (sb) {
this._restoreRemoteVideoSources(sb);
};
/**
* Restores the simulcast groups of the remote description. In
* transformRemoteDescription we remove those in order for the set remote
* description to succeed. The focus needs the signal the groups to new
* participants.
*
* @param desc
* @returns {*}
*/
Simulcast.prototype.reverseTransformRemoteDescription = function (desc) {
var sb;
if (!desc || desc == null)
if (!desc || desc == null) {
return desc;
}
if (config.enableSimulcast) {
sb = desc.sdp.split('\r\n');
this._restoreSimulcastGroups(sb);
desc = new RTCSessionDescription({
type: desc.type,
sdp: sb.join('\r\n')
});
}
return desc;
};
/**
* Prepares the local description for public usage (i.e. to be signaled
* through Jingle to the focus).
*
* @param desc
* @returns {RTCSessionDescription}
*/
Simulcast.prototype.reverseTransformLocalDescription = function (desc) {
var sb;
if (!desc || desc == null) {
return desc;
}
if (config.enableSimulcast) {
@ -480,30 +549,16 @@ function Simulcast() {
this._replaceVideoSources(lines, sb);
};
Simulcast.prototype.transformBridgeDescription = function (desc) {
if (config.enableSimulcast && config.useNativeSimulcast) {
var sb = desc.sdp.split('\r\n');
this._ensureGoogConference(sb);
desc = new RTCSessionDescription({
type: desc.type,
sdp: sb.join('\r\n')
});
if (this.debugLvl && this.debugLvl > 1) {
console.info('Transformed bridge description');
console.info(desc.sdp);
}
}
return desc;
};
Simulcast.prototype._updateRemoteMaps = function (lines) {
var remoteVideoSources = this._parseMedia(lines, ['video'])[0], videoSource, quality;
// (re) initialize the remote maps.
remoteMaps = {
msid2Quality: {},
ssrc2Msid: {},
receivingVideoStreams: {}
};
if (remoteVideoSources.groups && remoteVideoSources.groups.length !== 0) {
remoteVideoSources.groups.forEach(function (group) {
if (group.semantics === 'SIM' && group.ssrcs && group.ssrcs.length !== 0) {
@ -518,6 +573,12 @@ function Simulcast() {
}
};
/**
*
*
* @param desc
* @returns {*}
*/
Simulcast.prototype.transformLocalDescription = function (desc) {
if (config.enableSimulcast && !config.useNativeSimulcast) {
@ -539,14 +600,28 @@ function Simulcast() {
return desc;
};
/**
* Removes the ssrc-group:SIM from the remote description bacause Chrome
* either gets confused and thinks this is an FID group or, if an FID group
* is already present, it fails to set the remote description.
*
* @param desc
* @returns {*}
*/
Simulcast.prototype.transformRemoteDescription = function (desc) {
if (config.enableSimulcast) {
var sb = desc.sdp.split('\r\n');
this._updateRemoteMaps(sb);
this._removeSimulcastGroup(sb); // NOTE(gp) this needs to be called after updateRemoteMaps!
this._ensureGoogConference(sb);
this._cacheRemoteVideoSources(sb);
this._removeSimulcastGroup(sb); // NOTE(gp) this needs to be called after updateRemoteMaps because we need the simulcast group in the _updateRemoteMaps() method.
if (config.useNativeSimulcast) {
// We don't need the goog conference flag if we're not doing
// native simulcast.
this._ensureGoogConference(sb);
}
desc = new RTCSessionDescription({
type: desc.type,
@ -562,20 +637,28 @@ function Simulcast() {
return desc;
};
Simulcast.prototype.setReceivingVideoStream = function (ssrc) {
Simulcast.prototype._setReceivingVideoStream = function (ssrc) {
var receivingTrack = remoteMaps.ssrc2Msid[ssrc],
msidParts = receivingTrack.split(' ');
remoteMaps.receivingVideoStreams[msidParts[0]] = msidParts[1];
};
/**
* Returns a stream with single video track, the one currently being
* received by this endpoint.
*
* @param stream the remote simulcast stream.
* @returns {webkitMediaStream}
*/
Simulcast.prototype.getReceivingVideoStream = function (stream) {
var tracks, track, i, electedTrack, msid, quality = 1, receivingTrackId;
var tracks, i, electedTrack, msid, quality = 1, receivingTrackId;
if (config.enableSimulcast) {
if (remoteMaps.receivingVideoStreams[stream.id])
{
// the bridge has signaled us to receive a specific track.
receivingTrackId = remoteMaps.receivingVideoStreams[stream.id];
tracks = stream.getVideoTracks();
for (i = 0; i < tracks.length; i++) {
@ -587,15 +670,18 @@ function Simulcast() {
}
if (!electedTrack) {
// we don't have an elected track, choose by initial quality.
tracks = stream.getVideoTracks();
for (i = 0; i < tracks.length; i++) {
track = tracks[i];
msid = [stream.id, track.id].join(' ');
msid = [stream.id, tracks[i].id].join(' ');
if (remoteMaps.msid2Quality[msid] === quality) {
electedTrack = track;
electedTrack = tracks[i];
break;
}
}
// TODO(gp) if the initialQuality could not be satisfied, lower
// the requirement and try again.
}
}
@ -604,6 +690,15 @@ function Simulcast() {
: stream;
};
var stream;
/**
* GUM for simulcast.
*
* @param constraints
* @param success
* @param err
*/
Simulcast.prototype.getUserMedia = function (constraints, success, err) {
// TODO(gp) what if we request a resolution not supported by the hardware?
@ -620,7 +715,10 @@ function Simulcast() {
if (config.enableSimulcast && !config.useNativeSimulcast) {
// NOTE(gp) if we request the lq stream first webkitGetUserMedia fails randomly. Tested with Chrome 37.
// NOTE(gp) if we request the lq stream first webkitGetUserMedia
// fails randomly. Tested with Chrome 37. As fippo suggested, the
// reason appears to be that Chrome only acquires the cam once and
// then downscales the picture (https://code.google.com/p/chromium/issues/detail?id=346616#c11)
navigator.webkitGetUserMedia(constraints, function (hqStream) {
@ -641,6 +739,7 @@ function Simulcast() {
localMaps.msids.splice(0, 0, lqStream.getVideoTracks()[0].id);
hqStream.addTrack(lqStream.getVideoTracks()[0]);
stream = hqStream;
success(hqStream);
}, err);
}, err);
@ -656,18 +755,95 @@ function Simulcast() {
// add hq stream to local map
localMaps.msids.push(hqStream.getVideoTracks()[0].id);
stream = hqStream;
success(hqStream);
}, err);
}
};
Simulcast.prototype.getRemoteVideoStreamIdBySSRC = function (primarySSRC) {
return remoteMaps.ssrc2Msid[primarySSRC];
/**
* Gets the fully qualified msid (stream.id + track.id) associated to the
* SSRC.
*
* @param ssrc
* @returns {*}
*/
Simulcast.prototype.getRemoteVideoStreamIdBySSRC = function (ssrc) {
return remoteMaps.ssrc2Msid[ssrc];
};
Simulcast.prototype.parseMedia = function (desc, mediatypes) {
var lines = desc.sdp.split('\r\n');
return this._parseMedia(lines, mediatypes);
};
Simulcast.prototype._startLocalVideoStream = function (ssrc) {
var trackid;
Object.keys(localMaps.msid2ssrc).some(function (tid) {
if (localMaps.msid2ssrc[tid] == ssrc)
{
trackid = tid;
return true;
}
});
stream.getVideoTracks().some(function(track) {
if (track.id === trackid) {
track.enabled = true;
return true;
}
});
};
Simulcast.prototype._stopLocalVideoStream = function (ssrc) {
var trackid;
Object.keys(localMaps.msid2ssrc).some(function (tid) {
if (localMaps.msid2ssrc[tid] == ssrc)
{
trackid = tid;
return true;
}
});
stream.getVideoTracks().some(function(track) {
if (track.id === trackid) {
track.enabled = false;
return true;
}
});
};
Simulcast.prototype.getLocalVideoStream = function() {
var track;
stream.getVideoTracks().some(function(t) {
if ((track = t).enabled) {
return true;
}
});
return new webkitMediaStream([track]);
};
$(document).bind('simulcastlayerschanged', function (event, endpointSimulcastLayers) {
endpointSimulcastLayers.forEach(function (esl) {
var ssrc = esl.simulcastLayer.primarySSRC;
var simulcast = new Simulcast();
simulcast._setReceivingVideoStream(ssrc);
});
});
$(document).bind('startsimulcastlayer', function(event, simulcastLayer) {
var ssrc = simulcastLayer.primarySSRC;
var simulcast = new Simulcast();
simulcast._startLocalVideoStream(ssrc);
});
$(document).bind('stopsimulcastlayer', function(event, simulcastLayer) {
var ssrc = simulcastLayer.primarySSRC;
var simulcast = new Simulcast();
simulcast._stopLocalVideoStream(ssrc);
});
}());

View File

@ -116,6 +116,10 @@ var VideoLayout = (function (my) {
var isVisible = $('#largeVideo').is(':visible');
// we need this here because after the fade the videoSrc may have
// changed.
var isDesktop = isVideoSrcDesktop(newSrc);
$('#largeVideo').fadeOut(300, function () {
var oldSrc = $(this).attr('src');
@ -137,7 +141,7 @@ var VideoLayout = (function (my) {
}
// Change the way we'll be measuring and positioning large video
var isDesktop = isVideoSrcDesktop(newSrc);
getVideoSize = isDesktop
? getDesktopVideoSize
: getCameraVideoSize;
@ -209,7 +213,7 @@ var VideoLayout = (function (my) {
// Triggers a "video.selected" event. The "false" parameter indicates
// this isn't a prezi.
$(document).trigger("video.selected", [false]);
$(document).trigger("video.selected", [false, userJid]);
VideoLayout.updateLargeVideo(videoSrc, 1);
@ -1294,6 +1298,28 @@ var VideoLayout = (function (my) {
}
});
$(document).bind('startsimulcastlayer', function(event, simulcastLayer) {
var localVideoSelector = $('#' + 'localVideo_' + connection.jingle.localVideo.id);
var simulcast = new Simulcast();
var stream = simulcast.getLocalVideoStream();
// Attach WebRTC stream
RTC.attachMediaStream(localVideoSelector, stream);
localVideoSrc = $(localVideoSelector).attr('src');
});
$(document).bind('stopsimulcastlayer', function(event, simulcastLayer) {
var localVideoSelector = $('#' + 'localVideo_' + connection.jingle.localVideo.id);
var simulcast = new Simulcast();
var stream = simulcast.getLocalVideoStream();
// Attach WebRTC stream
RTC.attachMediaStream(localVideoSelector, stream);
localVideoSrc = $(localVideoSelector).attr('src');
});
/**
* On simulcast layers changed event.
*/
@ -1302,7 +1328,6 @@ var VideoLayout = (function (my) {
endpointSimulcastLayers.forEach(function (esl) {
var primarySSRC = esl.simulcastLayer.primarySSRC;
simulcast.setReceivingVideoStream(primarySSRC);
var msid = simulcast.getRemoteVideoStreamIdBySSRC(primarySSRC);
// Get session and stream from msid.