Implements RTC module.

This commit is contained in:
hristoterezov 2014-12-19 15:59:08 +02:00
parent 996b1791d5
commit 5b34a66cb6
20 changed files with 2109 additions and 757 deletions

196
app.js
View File

@ -4,14 +4,12 @@ var connection = null;
var authenticatedUser = false;
var authenticationWindow = null;
var activecall = null;
var RTC = null;
var nickname = null;
var sharedKey = '';
var focusMucJid = null;
var roomUrl = null;
var roomName = null;
var ssrc2jid = {};
var mediaStreams = {};
var bridgeIsDown = false;
//TODO: this array must be removed when firefox implement multistream support
var notReceivedSSRCs = [];
@ -62,49 +60,10 @@ var sessionTerminated = false;
function init() {
Toolbar.setupButtonsFromConfig();
RTC = setupRTC();
if (RTC === null) {
window.location.href = 'webrtcrequired.html';
return;
} else if (RTC.browser !== 'chrome' &&
config.enableFirefoxSupport !== true) {
window.location.href = 'chromeonly.html';
return;
}
obtainAudioAndVideoPermissions(function (stream) {
var audioStream, videoStream;
if(window.webkitMediaStream)
{
var audioStream = new webkitMediaStream();
var videoStream = new webkitMediaStream();
var audioTracks = stream.getAudioTracks();
var videoTracks = stream.getVideoTracks();
for (var i = 0; i < audioTracks.length; i++) {
audioStream.addTrack(audioTracks[i]);
}
for (i = 0; i < videoTracks.length; i++) {
videoStream.addTrack(videoTracks[i]);
}
VideoLayout.changeLocalAudio(audioStream);
statistics.onStreamCreated(audioStream);
VideoLayout.changeLocalVideo(videoStream, true);
}
else
{
VideoLayout.changeLocalStream(stream);
statistics.onStreamCreated(stream);
}
maybeDoJoin();
});
RTC.addStreamListener(maybeDoJoin, StreamEventTypes.EVENT_TYPE_LOCAL_CREATED);
RTC.addStreamListener(VideoLayout.onLocalStreamCreated, StreamEventTypes.EVENT_TYPE_LOCAL_CREATED)
RTC.start();
var jid = document.getElementById('jid').value || config.hosts.anonymousdomain || config.hosts.domain || window.location.hostname;
connect(jid);
@ -132,7 +91,7 @@ function connect(jid, password) {
if (connection.disco) {
// for chrome, add multistream cap
}
connection.jingle.pc_constraints = RTC.pc_constraints;
connection.jingle.pc_constraints = RTC.getPCConstraints();
if (config.useIPv6) {
// https://code.google.com/p/webrtc/issues/detail?id=2828
if (!connection.jingle.pc_constraints.optional) connection.jingle.pc_constraints.optional = [];
@ -175,42 +134,7 @@ function connect(jid, password) {
});
}
/**
* We ask for audio and video combined stream in order to get permissions and
* not to ask twice.
*/
function obtainAudioAndVideoPermissions(callback) {
// Get AV
var cb = function (stream) {
console.log('got', stream, stream.getAudioTracks().length, stream.getVideoTracks().length);
callback(stream);
trackUsage('localMedia', {
audio: stream.getAudioTracks().length,
video: stream.getVideoTracks().length
});
}
getUserMediaWithConstraints(
['audio', 'video'],
cb,
function (error) {
console.error('failed to obtain audio/video stream - trying audio only', error);
getUserMediaWithConstraints(
['audio'],
cb,
function (error) {
console.error('failed to obtain audio/video stream - stop', error);
trackUsage('localMediaError', {
media: error.media || 'video',
name : error.name
});
messageHandler.showError("Error",
"Failed to obtain permissions to use the local microphone" +
"and/or camera.");
}
);
},
config.resolution || '360');
}
function maybeDoJoin() {
if (connection && connection.connected && Strophe.getResourceFromJid(connection.jid) // .connected is true while connecting?
@ -382,7 +306,7 @@ function waitForPresence(data, sid) {
}
//TODO: this code should be removed when firefox implement multistream support
if(RTC.browser == "firefox")
if(RTC.getBrowserType() == RTCBrowserType.RTC_BROWSER_FIREFOX)
{
if((notReceivedSSRCs.length == 0) ||
!ssrc2jid[notReceivedSSRCs[notReceivedSSRCs.length - 1]])
@ -402,15 +326,7 @@ function waitForPresence(data, sid) {
}
}
// NOTE(gp) now that we have simulcast, a media stream can have more than 1
// ssrc. We should probably take that into account in our MediaStream
// wrapper.
var mediaStream = new MediaStream(data, sid, thessrc);
var jid = data.peerjid || connection.emuc.myroomjid;
if(!mediaStreams[jid]) {
mediaStreams[jid] = {};
}
mediaStreams[jid][mediaStream.type] = mediaStream;
RTC.createRemoteStream(data, sid, thessrc);
var container;
var remotes = document.getElementById('remoteVideos');
@ -569,13 +485,8 @@ $(document).bind('callincoming.jingle', function (event, sid) {
// TODO: do we check activecall == null?
activecall = sess;
statistics.onConfereceCreated(sess);
// Bind data channel listener in case we're a regular participant
if (config.openSctp)
{
bindDataChannelListener(sess.peerconnection);
}
statistics.onConferenceCreated(sess);
RTC.onConferenceCreated(sess);
// TODO: check affiliation and/or role
console.log('emuc data for', sess.peerjid, connection.emuc.members[sess.peerjid]);
@ -588,15 +499,7 @@ $(document).bind('callincoming.jingle', function (event, sid) {
$(document).bind('conferenceCreated.jingle', function (event, focus)
{
statistics.onConfereceCreated(getConferenceHandler());
});
$(document).bind('conferenceCreated.jingle', function (event, focus)
{
// Bind data channel listener in case we're the focus
if (config.openSctp)
{
bindDataChannelListener(focus.peerconnection);
}
RTC.onConfereceCreated(focus);
});
$(document).bind('setLocalDescription.jingle', function (event, sid) {
@ -1622,29 +1525,9 @@ $(document).on('webkitfullscreenchange mozfullscreenchange fullscreenchange',
document.mozFullScreen ||
document.webkitIsFullScreen;
if (isFullScreen) {
setView("fullscreen");
}
else {
setView("default");
}
}
);
/**
* Sets the current view.
*/
function setView(viewName) {
// if (viewName == "fullscreen") {
// document.getElementById('videolayout_fullscreen').disabled = false;
// document.getElementById('videolayout_default').disabled = true;
// }
// else {
// document.getElementById('videolayout_default').disabled = false;
// document.getElementById('videolayout_fullscreen').disabled = true;
// }
}
$(document).bind('error.jingle',
function (event, session, error)
{
@ -1662,54 +1545,6 @@ $(document).bind('fatalError.jingle',
}
);
function onSelectedEndpointChanged(userJid)
{
console.log('selected endpoint changed: ', userJid);
if (_dataChannels && _dataChannels.length != 0)
{
_dataChannels.some(function (dataChannel) {
if (dataChannel.readyState == 'open')
{
dataChannel.send(JSON.stringify({
'colibriClass': 'SelectedEndpointChangedEvent',
'selectedEndpoint': (!userJid || userJid == null)
? null : userJid
}));
return true;
}
});
}
}
$(document).bind("selectedendpointchanged", function(event, userJid) {
onSelectedEndpointChanged(userJid);
});
function onPinnedEndpointChanged(userJid)
{
console.log('pinned endpoint changed: ', userJid);
if (_dataChannels && _dataChannels.length != 0)
{
_dataChannels.some(function (dataChannel) {
if (dataChannel.readyState == 'open')
{
dataChannel.send(JSON.stringify({
'colibriClass': 'PinnedEndpointChangedEvent',
'pinnedEndpoint': (!userJid || userJid == null)
? null : Strophe.getResourceFromJid(userJid)
}));
return true;
}
});
}
}
$(document).bind("pinnedendpointchanged", function(event, userJid) {
onPinnedEndpointChanged(userJid);
});
function callSipButtonClicked()
{
var defaultNumber
@ -1768,14 +1603,3 @@ function hangup() {
);
}
$(document).on('videomuted.muc', function(event, jid, value) {
if(mediaStreams[jid] && mediaStreams[jid][MediaStream.VIDEO_TYPE]) {
var stream = mediaStreams[jid][MediaStream.VIDEO_TYPE];
var isMuted = (value === "true");
if (isMuted != stream.muted) {
stream.muted = isMuted;
Avatar.showUserAvatar(jid, isMuted);
}
}
});

View File

@ -129,10 +129,10 @@ var Avatar = (function(my) {
}
}
if (!mediaStreams[jid] || !mediaStreams[jid][MediaStream.VIDEO_TYPE]) {
if (!RTC.remoteStreams[jid] || !RTC.remoteStreams[jid][MediaStreamType.VIDEO_TYPE]) {
return null;
}
return mediaStreams[jid][MediaStream.VIDEO_TYPE].muted;
return RTC.remoteStreams[jid][MediaStream.VIDEO_TYPE].muted;
}
function getGravatarUrl(id, size) {

View File

@ -1,180 +0,0 @@
/* global connection, Strophe, updateLargeVideo, focusedVideoSrc*/
// cache datachannels to avoid garbage collection
// https://code.google.com/p/chromium/issues/detail?id=405545
var _dataChannels = [];
/**
* Callback triggered by PeerConnection when new data channel is opened
* on the bridge.
* @param event the event info object.
*/
function onDataChannel(event)
{
var dataChannel = event.channel;
dataChannel.onopen = function ()
{
console.info("Data channel opened by the Videobridge!", dataChannel);
// Code sample for sending string and/or binary data
// Sends String message to the bridge
//dataChannel.send("Hello bridge!");
// Sends 12 bytes binary message to the bridge
//dataChannel.send(new ArrayBuffer(12));
// when the data channel becomes available, tell the bridge about video
// selections so that it can do adaptive simulcast,
var userJid = VideoLayout.getLargeVideoState().userJid;
// we want the notification to trigger even if userJid is undefined,
// or null.
onSelectedEndpointChanged(userJid);
};
dataChannel.onerror = function (error)
{
console.error("Data Channel Error:", error, dataChannel);
};
dataChannel.onmessage = function (event)
{
var data = event.data;
// JSON
var obj;
try
{
obj = JSON.parse(data);
}
catch (e)
{
console.error(
"Failed to parse data channel message as JSON: ",
data,
dataChannel);
}
if (('undefined' !== typeof(obj)) && (null !== obj))
{
var colibriClass = obj.colibriClass;
if ("DominantSpeakerEndpointChangeEvent" === colibriClass)
{
// Endpoint ID from the Videobridge.
var dominantSpeakerEndpoint = obj.dominantSpeakerEndpoint;
console.info(
"Data channel new dominant speaker event: ",
dominantSpeakerEndpoint);
$(document).trigger(
'dominantspeakerchanged',
[dominantSpeakerEndpoint]);
}
else if ("InLastNChangeEvent" === colibriClass)
{
var oldValue = obj.oldValue;
var newValue = obj.newValue;
// Make sure that oldValue and newValue are of type boolean.
var type;
if ((type = typeof oldValue) !== 'boolean') {
if (type === 'string') {
oldValue = (oldValue == "true");
} else {
oldValue = new Boolean(oldValue).valueOf();
}
}
if ((type = typeof newValue) !== 'boolean') {
if (type === 'string') {
newValue = (newValue == "true");
} else {
newValue = new Boolean(newValue).valueOf();
}
}
$(document).trigger('inlastnchanged', [oldValue, newValue]);
}
else if ("LastNEndpointsChangeEvent" === colibriClass)
{
// The new/latest list of last-n endpoint IDs.
var lastNEndpoints = obj.lastNEndpoints;
// The list of endpoint IDs which are entering the list of
// last-n at this time i.e. were not in the old list of last-n
// endpoint IDs.
var endpointsEnteringLastN = obj.endpointsEnteringLastN;
var stream = obj.stream;
console.log(
"Data channel new last-n event: ",
lastNEndpoints, endpointsEnteringLastN, obj);
$(document).trigger(
'lastnchanged',
[lastNEndpoints, endpointsEnteringLastN, stream]);
}
else if ("SimulcastLayersChangedEvent" === colibriClass)
{
$(document).trigger(
'simulcastlayerschanged',
[obj.endpointSimulcastLayers]);
}
else if ("SimulcastLayersChangingEvent" === colibriClass)
{
$(document).trigger(
'simulcastlayerschanging',
[obj.endpointSimulcastLayers]);
}
else if ("StartSimulcastLayerEvent" === colibriClass)
{
$(document).trigger('startsimulcastlayer', obj.simulcastLayer);
}
else if ("StopSimulcastLayerEvent" === colibriClass)
{
$(document).trigger('stopsimulcastlayer', obj.simulcastLayer);
}
else
{
console.debug("Data channel JSON-formatted message: ", obj);
}
}
};
dataChannel.onclose = function ()
{
console.info("The Data Channel closed", dataChannel);
var idx = _dataChannels.indexOf(dataChannel);
if (idx > -1)
_dataChannels = _dataChannels.splice(idx, 1);
};
_dataChannels.push(dataChannel);
}
/**
* Binds "ondatachannel" event listener to given PeerConnection instance.
* @param peerConnection WebRTC peer connection instance.
*/
function bindDataChannelListener(peerConnection)
{
peerConnection.ondatachannel = onDataChannel;
// Sample code for opening new data channel from Jitsi Meet to the bridge.
// Although it's not a requirement to open separate channels from both bridge
// and peer as single channel can be used for sending and receiving data.
// So either channel opened by the bridge or the one opened here is enough
// for communication with the bridge.
/*
var dataChannelOptions = { reliable: true };
var dataChannel
= peerConnection.createDataChannel("myChannel", dataChannelOptions);
// Can be used only when is in open state
dataChannel.onopen = function ()
{
dataChannel.send("My channel !!!");
};
dataChannel.onmessage = function (event)
{
var msgData = event.data;
console.info("Got My Data Channel Message:", msgData, dataChannel);
};
*/
}

View File

@ -28,7 +28,7 @@ var _desktopSharingEnabled = null;
* Flag 'chrome://flags/#enable-usermedia-screen-capture' must be enabled.
*/
function obtainWebRTCScreen(streamCallback, failCallback) {
getUserMediaWithConstraints(
RTC.getUserMediaWithConstraints(
['screen'],
streamCallback,
failCallback
@ -135,7 +135,7 @@ function doGetStreamFromExtension(streamCallback, failCallback) {
}
console.log("Response from extension: " + response);
if (response.streamId) {
getUserMediaWithConstraints(
RTC.getUserMediaWithConstraints(
['desktop'],
function (stream) {
streamCallback(stream);
@ -303,7 +303,7 @@ function toggleScreenSharing() {
getSwitchStreamFailed);
} else {
// Disable screen stream
getUserMediaWithConstraints(
RTC.getUserMediaWithConstraints(
['video'],
function (stream) {
// We are now using camera stream

View File

@ -34,7 +34,6 @@
<script src="muc.js?v=17"></script><!-- simple MUC library -->
<script src="estos_log.js?v=2"></script><!-- simple stanza logger -->
<script src="desktopsharing.js?v=3"></script><!-- desktop sharing -->
<script src="data_channels.js?v=3"></script><!-- data channels -->
<script src="app.js?v=22"></script><!-- application logic -->
<script src="commands.js?v=1"></script><!-- application logic -->
<script src="chat.js?v=15"></script><!-- chat logic -->
@ -64,7 +63,11 @@
<script src="message_handler.js?v=2"></script>
<script src="api_connector.js?v=2"></script>
<script src="settings_menu.js?v=1"></script>
<script src="libs/modules/statistics.bundle.js"></script>
<script src="service/RTC/RTCBrowserType.js?v=1"></script>
<script src="service/RTC/StreamEventTypes.js?v=1"></script>
<script src="service/RTC/MediaStreamTypes.js?v=1"></script>
<script src="libs/modules/statistics.bundle.js?v=1"></script>
<script src="libs/modules/RTC.bundle.js?v=1"></script>
<script src="avatar.js?v=4"></script><!-- avatars -->
<link rel="stylesheet" href="css/font.css?v=6"/>
<link rel="stylesheet" href="css/toastr.css?v=1">

1126
libs/modules/RTC.bundle.js Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -512,275 +512,3 @@ TraceablePeerConnection.prototype.getStats = function(callback, errback) {
}
};
// mozilla chrome compat layer -- very similar to adapter.js
function setupRTC() {
var RTC = null;
if (navigator.mozGetUserMedia) {
console.log('This appears to be Firefox');
var version = parseInt(navigator.userAgent.match(/Firefox\/([0-9]+)\./)[1], 10);
if (version >= 22) {
RTC = {
peerconnection: mozRTCPeerConnection,
browser: 'firefox',
getUserMedia: navigator.mozGetUserMedia.bind(navigator),
attachMediaStream: function (element, stream) {
element[0].mozSrcObject = stream;
element[0].play();
},
pc_constraints: {},
getLocalSSRC: function (session, callback) {
// NOTE(gp) latest FF nightlies seem to provide the local
// SSRCs in their SDP so there's no longer necessary to
// take it from the peer connection stats.
/*session.peerconnection.getStats(function (s) {
var ssrcs = {};
s.forEach(function (item) {
if (item.type == "outboundrtp" && !item.isRemote)
{
ssrcs[item.id.split('_')[2]] = item.ssrc;
}
});
session.localStreamsSSRC = {
"audio": ssrcs.audio,//for stable 0
"video": ssrcs.video// for stable 1
};
callback(session.localStreamsSSRC);
},
function () {
callback(null);
});*/
callback(null);
},
getStreamID: function (stream) {
var tracks = stream.getVideoTracks();
if(!tracks || tracks.length == 0)
{
tracks = stream.getAudioTracks();
}
return tracks[0].id.replace(/[\{,\}]/g,"");
},
getVideoSrc: function (element) {
return element.mozSrcObject;
},
setVideoSrc: function (element, src) {
element.mozSrcObject = src;
}
};
if (!MediaStream.prototype.getVideoTracks)
MediaStream.prototype.getVideoTracks = function () { return []; };
if (!MediaStream.prototype.getAudioTracks)
MediaStream.prototype.getAudioTracks = function () { return []; };
RTCSessionDescription = mozRTCSessionDescription;
RTCIceCandidate = mozRTCIceCandidate;
}
} else if (navigator.webkitGetUserMedia) {
console.log('This appears to be Chrome');
RTC = {
peerconnection: webkitRTCPeerConnection,
browser: 'chrome',
getUserMedia: navigator.webkitGetUserMedia.bind(navigator),
attachMediaStream: function (element, stream) {
element.attr('src', webkitURL.createObjectURL(stream));
},
// DTLS should now be enabled by default but..
pc_constraints: {'optional': [{'DtlsSrtpKeyAgreement': 'true'}]},
getLocalSSRC: function (session, callback) {
callback(null);
},
getStreamID: function (stream) {
// streams from FF endpoints have the characters '{' and '}'
// that make jQuery choke.
return stream.id.replace(/[\{,\}]/g,"");
},
getVideoSrc: function (element) {
return element.getAttribute("src");
},
setVideoSrc: function (element, src) {
element.setAttribute("src", src);
}
};
if (navigator.userAgent.indexOf('Android') != -1) {
RTC.pc_constraints = {}; // disable DTLS on Android
}
if (!webkitMediaStream.prototype.getVideoTracks) {
webkitMediaStream.prototype.getVideoTracks = function () {
return this.videoTracks;
};
}
if (!webkitMediaStream.prototype.getAudioTracks) {
webkitMediaStream.prototype.getAudioTracks = function () {
return this.audioTracks;
};
}
}
if (RTC === null) {
try { console.log('Browser does not appear to be WebRTC-capable'); } catch (e) { }
}
return RTC;
}
function getUserMediaWithConstraints(um, success_callback, failure_callback, resolution, bandwidth, fps, desktopStream) {
var constraints = {audio: false, video: false};
if (um.indexOf('video') >= 0) {
constraints.video = { mandatory: {}, optional: [] };// same behaviour as true
}
if (um.indexOf('audio') >= 0) {
constraints.audio = { mandatory: {}, optional: []};// same behaviour as true
}
if (um.indexOf('screen') >= 0) {
constraints.video = {
mandatory: {
chromeMediaSource: "screen",
googLeakyBucket: true,
maxWidth: window.screen.width,
maxHeight: window.screen.height,
maxFrameRate: 3
},
optional: []
};
}
if (um.indexOf('desktop') >= 0) {
constraints.video = {
mandatory: {
chromeMediaSource: "desktop",
chromeMediaSourceId: desktopStream,
googLeakyBucket: true,
maxWidth: window.screen.width,
maxHeight: window.screen.height,
maxFrameRate: 3
},
optional: []
}
}
if (constraints.audio) {
// if it is good enough for hangouts...
constraints.audio.optional.push(
{googEchoCancellation: true},
{googAutoGainControl: true},
{googNoiseSupression: true},
{googHighpassFilter: true},
{googNoisesuppression2: true},
{googEchoCancellation2: true},
{googAutoGainControl2: true}
);
}
if (constraints.video) {
constraints.video.optional.push(
{googNoiseReduction: false} // chrome 37 workaround for issue 3807, reenable in M38
);
if (um.indexOf('video') >= 0) {
constraints.video.optional.push(
{googLeakyBucket: true}
);
}
}
// Check if we are running on Android device
var isAndroid = navigator.userAgent.indexOf('Android') != -1;
if (resolution && !constraints.video || isAndroid) {
constraints.video = { mandatory: {}, optional: [] };// same behaviour as true
}
// see https://code.google.com/p/chromium/issues/detail?id=143631#c9 for list of supported resolutions
switch (resolution) {
// 16:9 first
case '1080':
case 'fullhd':
constraints.video.mandatory.minWidth = 1920;
constraints.video.mandatory.minHeight = 1080;
break;
case '720':
case 'hd':
constraints.video.mandatory.minWidth = 1280;
constraints.video.mandatory.minHeight = 720;
break;
case '360':
constraints.video.mandatory.minWidth = 640;
constraints.video.mandatory.minHeight = 360;
break;
case '180':
constraints.video.mandatory.minWidth = 320;
constraints.video.mandatory.minHeight = 180;
break;
// 4:3
case '960':
constraints.video.mandatory.minWidth = 960;
constraints.video.mandatory.minHeight = 720;
break;
case '640':
case 'vga':
constraints.video.mandatory.minWidth = 640;
constraints.video.mandatory.minHeight = 480;
break;
case '320':
constraints.video.mandatory.minWidth = 320;
constraints.video.mandatory.minHeight = 240;
break;
default:
if (isAndroid) {
constraints.video.mandatory.minWidth = 320;
constraints.video.mandatory.minHeight = 240;
constraints.video.mandatory.maxFrameRate = 15;
}
break;
}
if (constraints.video.mandatory.minWidth)
constraints.video.mandatory.maxWidth = constraints.video.mandatory.minWidth;
if (constraints.video.mandatory.minHeight)
constraints.video.mandatory.maxHeight = constraints.video.mandatory.minHeight;
if (bandwidth) { // doesn't work currently, see webrtc issue 1846
if (!constraints.video) constraints.video = {mandatory: {}, optional: []};//same behaviour as true
constraints.video.optional.push({bandwidth: bandwidth});
}
if (fps) { // for some cameras it might be necessary to request 30fps
// so they choose 30fps mjpg over 10fps yuy2
if (!constraints.video) constraints.video = {mandatory: {}, optional: []};// same behaviour as true;
constraints.video.mandatory.minFrameRate = fps;
}
var isFF = navigator.userAgent.toLowerCase().indexOf('firefox') > -1;
try {
if (config.enableSimulcast
&& constraints.video
&& constraints.video.chromeMediaSource !== 'screen'
&& constraints.video.chromeMediaSource !== 'desktop'
&& !isAndroid
// We currently do not support FF, as it doesn't have multistream support.
&& !isFF) {
simulcast.getUserMedia(constraints, function (stream) {
console.log('onUserMediaSuccess');
success_callback(stream);
},
function (error) {
console.warn('Failed to get access to local media. Error ', error);
if (failure_callback) {
failure_callback(error);
}
});
} else {
RTC.getUserMedia(constraints,
function (stream) {
console.log('onUserMediaSuccess');
success_callback(stream);
},
function (error) {
console.warn('Failed to get access to local media. Error ', error);
if (failure_callback) {
failure_callback(error);
}
});
}
} catch (e) {
console.error('GUM failed: ', e);
if(failure_callback) {
failure_callback(e);
}
}
}

View File

@ -262,19 +262,7 @@ JingleSession.prototype.sendIceCandidate = function (candidate) {
},
10000);
}
RTC.getLocalSSRC(this, function (ssrcs) {
if(ssrcs)
{
sendJingle(ssrcs);
$(document).trigger("setLocalDescription.jingle", [self.sid]);
}
else
{
sendJingle();
}
});
sendJingle();
}
this.lasticecandidate = true;
console.log('Have we encountered any srflx candidates? ' + this.hadstuncandidate);
@ -390,11 +378,8 @@ JingleSession.prototype.createdOffer = function (sdp) {
function () {
if(this.usetrickle)
{
RTC.getLocalSSRC(function(ssrc)
{
sendJingle(ssrc);
$(document).trigger('setLocalDescription.jingle', [self.sid]);
});
sendJingle();
$(document).trigger('setLocalDescription.jingle', [self.sid]);
}
else
$(document).trigger('setLocalDescription.jingle', [self.sid]);
@ -631,10 +616,8 @@ JingleSession.prototype.createdAnswer = function (sdp, provisional) {
//console.log('setLocalDescription success');
if (self.usetrickle && !self.usepranswer) {
RTC.getLocalSSRC(self, function (ssrc) {
sendJingle(ssrc);
$(document).trigger('setLocalDescription.jingle', [self.sid]);
});
sendJingle();
$(document).trigger('setLocalDescription.jingle', [self.sid]);
}
else
$(document).trigger('setLocalDescription.jingle', [self.sid]);

235
modules/RTC/DataChannels.js Normal file
View File

@ -0,0 +1,235 @@
/* global connection, Strophe, updateLargeVideo, focusedVideoSrc*/
// cache datachannels to avoid garbage collection
// https://code.google.com/p/chromium/issues/detail?id=405545
var _dataChannels = [];
var DataChannels =
{
/**
* Callback triggered by PeerConnection when new data channel is opened
* on the bridge.
* @param event the event info object.
*/
onDataChannel: function (event)
{
var dataChannel = event.channel;
dataChannel.onopen = function () {
console.info("Data channel opened by the Videobridge!", dataChannel);
// Code sample for sending string and/or binary data
// Sends String message to the bridge
//dataChannel.send("Hello bridge!");
// Sends 12 bytes binary message to the bridge
//dataChannel.send(new ArrayBuffer(12));
// when the data channel becomes available, tell the bridge about video
// selections so that it can do adaptive simulcast,
// we want the notification to trigger even if userJid is undefined,
// or null.
var userJid = VideoLayout.getLargeVideoState().userJid;
// we want the notification to trigger even if userJid is undefined,
// or null.
onSelectedEndpointChanged(userJid);
};
dataChannel.onerror = function (error) {
console.error("Data Channel Error:", error, dataChannel);
};
dataChannel.onmessage = function (event) {
var data = event.data;
// JSON
var obj;
try {
obj = JSON.parse(data);
}
catch (e) {
console.error(
"Failed to parse data channel message as JSON: ",
data,
dataChannel);
}
if (('undefined' !== typeof(obj)) && (null !== obj)) {
var colibriClass = obj.colibriClass;
if ("DominantSpeakerEndpointChangeEvent" === colibriClass) {
// Endpoint ID from the Videobridge.
var dominantSpeakerEndpoint = obj.dominantSpeakerEndpoint;
console.info(
"Data channel new dominant speaker event: ",
dominantSpeakerEndpoint);
$(document).trigger(
'dominantspeakerchanged',
[dominantSpeakerEndpoint]);
}
else if ("InLastNChangeEvent" === colibriClass)
{
var oldValue = obj.oldValue;
var newValue = obj.newValue;
// Make sure that oldValue and newValue are of type boolean.
var type;
if ((type = typeof oldValue) !== 'boolean') {
if (type === 'string') {
oldValue = (oldValue == "true");
} else {
oldValue = new Boolean(oldValue).valueOf();
}
}
if ((type = typeof newValue) !== 'boolean') {
if (type === 'string') {
newValue = (newValue == "true");
} else {
newValue = new Boolean(newValue).valueOf();
}
}
$(document).trigger('inlastnchanged', [oldValue, newValue]);
}
else if ("LastNEndpointsChangeEvent" === colibriClass)
{
// The new/latest list of last-n endpoint IDs.
var lastNEndpoints = obj.lastNEndpoints;
// The list of endpoint IDs which are entering the list of
// last-n at this time i.e. were not in the old list of last-n
// endpoint IDs.
var endpointsEnteringLastN = obj.endpointsEnteringLastN;
var stream = obj.stream;
console.log(
"Data channel new last-n event: ",
lastNEndpoints, endpointsEnteringLastN, obj);
$(document).trigger(
'lastnchanged',
[lastNEndpoints, endpointsEnteringLastN, stream]);
}
else if ("SimulcastLayersChangedEvent" === colibriClass)
{
$(document).trigger(
'simulcastlayerschanged',
[obj.endpointSimulcastLayers]);
}
else if ("SimulcastLayersChangingEvent" === colibriClass)
{
$(document).trigger(
'simulcastlayerschanging',
[obj.endpointSimulcastLayers]);
}
else if ("StartSimulcastLayerEvent" === colibriClass)
{
$(document).trigger('startsimulcastlayer', obj.simulcastLayer);
}
else if ("StopSimulcastLayerEvent" === colibriClass)
{
$(document).trigger('stopsimulcastlayer', obj.simulcastLayer);
}
else
{
console.debug("Data channel JSON-formatted message: ", obj);
}
}
};
dataChannel.onclose = function ()
{
console.info("The Data Channel closed", dataChannel);
var idx = _dataChannels.indexOf(dataChannel);
if (idx > -1)
_dataChannels = _dataChannels.splice(idx, 1);
};
_dataChannels.push(dataChannel);
},
/**
* Binds "ondatachannel" event listener to given PeerConnection instance.
* @param peerConnection WebRTC peer connection instance.
*/
bindDataChannelListener: function (peerConnection) {
if(!config.openSctp)
retrun;
peerConnection.ondatachannel = this.onDataChannel;
// Sample code for opening new data channel from Jitsi Meet to the bridge.
// Although it's not a requirement to open separate channels from both bridge
// and peer as single channel can be used for sending and receiving data.
// So either channel opened by the bridge or the one opened here is enough
// for communication with the bridge.
/*var dataChannelOptions =
{
reliable: true
};
var dataChannel
= peerConnection.createDataChannel("myChannel", dataChannelOptions);
// Can be used only when is in open state
dataChannel.onopen = function ()
{
dataChannel.send("My channel !!!");
};
dataChannel.onmessage = function (event)
{
var msgData = event.data;
console.info("Got My Data Channel Message:", msgData, dataChannel);
};*/
}
}
function onSelectedEndpointChanged(userJid)
{
console.log('selected endpoint changed: ', userJid);
if (_dataChannels && _dataChannels.length != 0)
{
_dataChannels.some(function (dataChannel) {
if (dataChannel.readyState == 'open')
{
dataChannel.send(JSON.stringify({
'colibriClass': 'SelectedEndpointChangedEvent',
'selectedEndpoint': (!userJid || userJid == null)
? null : userJid
}));
return true;
}
});
}
}
$(document).bind("selectedendpointchanged", function(event, userJid) {
onSelectedEndpointChanged(userJid);
});
function onPinnedEndpointChanged(userJid)
{
console.log('pinned endpoint changed: ', userJid);
if (_dataChannels && _dataChannels.length != 0)
{
_dataChannels.some(function (dataChannel) {
if (dataChannel.readyState == 'open')
{
dataChannel.send(JSON.stringify({
'colibriClass': 'PinnedEndpointChangedEvent',
'pinnedEndpoint': (!userJid || userJid == null)
? null : Strophe.getResourceFromJid(userJid)
}));
return true;
}
});
}
}
$(document).bind("pinnedendpointchanged", function(event, userJid) {
onPinnedEndpointChanged(userJid);
});
module.exports = DataChannels;

View File

@ -0,0 +1,66 @@
//var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
function LocalStream(stream, type, eventEmitter)
{
this.stream = stream;
this.eventEmitter = eventEmitter;
this.type = type;
var self = this;
this.stream.onended = function()
{
self.streamEnded();
};
}
LocalStream.prototype.streamEnded = function () {
this.eventEmitter.emit(StreamEventTypes.EVENT_TYPE_LOCAL_ENDED, this);
}
LocalStream.prototype.getOriginalStream = function()
{
return this.stream;
}
LocalStream.prototype.isAudioStream = function () {
return (this.stream.getAudioTracks() && this.stream.getAudioTracks().length > 0);
}
LocalStream.prototype.mute = function()
{
var ismuted = false;
var tracks = [];
if(this.type = "audio")
{
tracks = this.stream.getAudioTracks();
}
else
{
tracks = this.stream.getVideoTracks();
}
for (var idx = 0; idx < tracks.length; idx++) {
ismuted = !tracks[idx].enabled;
tracks[idx].enabled = !tracks[idx].enabled;
}
return ismuted;
}
LocalStream.prototype.isMuted = function () {
var tracks = [];
if(this.type = "audio")
{
tracks = this.stream.getAudioTracks();
}
else
{
tracks = this.stream.getVideoTracks();
}
for (var idx = 0; idx < tracks.length; idx++) {
if(tracks[idx].enabled)
return false;
}
return true;
}
module.exports = LocalStream;

View File

@ -0,0 +1,49 @@
var RTC = require("./RTC.js");
////These lines should be uncommented when require works in app.js
//var RTCBrowserType = require("../../service/RTC/RTCBrowserType.js");
//var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
//var MediaStreamType = require("../../service/RTC/MediaStreamTypes");
/**
* Creates a MediaStream object for the given data, session id and ssrc.
* It is a wrapper class for the MediaStream.
*
* @param data the data object from which we obtain the stream,
* the peerjid, etc.
* @param sid the session id
* @param ssrc the ssrc corresponding to this MediaStream
*
* @constructor
*/
function MediaStream(data, sid, ssrc, eventEmmiter) {
this.sid = sid;
this.stream = data.stream;
this.peerjid = data.peerjid;
this.ssrc = ssrc;
this.type = (this.stream.getVideoTracks().length > 0)?
MediaStreamType.VIDEO_TYPE : MediaStreamType.AUDIO_TYPE;
this.muted = false;
eventEmmiter.emit(StreamEventTypes.EVENT_TYPE_REMOTE_CREATED, this);
}
if(RTC.getBrowserType() == RTCBrowserType.RTC_BROWSER_FIREFOX)
{
if (!MediaStream.prototype.getVideoTracks)
MediaStream.prototype.getVideoTracks = function () { return []; };
if (!MediaStream.prototype.getAudioTracks)
MediaStream.prototype.getAudioTracks = function () { return []; };
}
MediaStream.prototype.getOriginalStream = function()
{
return this.stream;
}
MediaStream.prototype.setMute = function (value)
{
this.stream.muted = value;
this.muted = value;
}
module.exports = MediaStream;

122
modules/RTC/RTC.js Normal file
View File

@ -0,0 +1,122 @@
var EventEmitter = require("events");
var RTCUtils = require("./RTCUtils.js");
//These lines should be uncommented when require works in app.js
//var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
//var XMPPEvents = require("../service/xmpp/XMPPEvents");
var eventEmitter = new EventEmitter();
var RTC = {
rtcUtils: null,
localStreams: [],
remoteStreams: {},
localAudio: null,
localVideo: null,
addStreamListener: function (listener, eventType) {
eventEmitter.on(eventType, listener);
},
removeStreamListener: function (listener, eventType) {
if(!(eventType instanceof StreamEventTypes))
throw "Illegal argument";
eventEmitter.removeListener(eventType, listener);
},
createLocalStream: function (stream, type) {
var LocalStream = require("./LocalStream.js");
var localStream = new LocalStream(stream, type, eventEmitter);
this.localStreams.push(localStream);
if(type == "audio")
{
this.localAudio = localStream;
}
else
{
this.localVideo = localStream;
}
eventEmitter.emit(StreamEventTypes.EVENT_TYPE_LOCAL_CREATED,
localStream);
return localStream;
},
removeLocalStream: function (stream) {
for(var i = 0; i < this.localStreams.length; i++)
{
if(this.localStreams[i].getOriginalStream() === stream) {
delete this.localStreams[i];
return;
}
}
},
createRemoteStream: function (data, sid, thessrc) {
var MediaStream = require("./MediaStream.js")
var remoteStream = new MediaStream(data, sid, thessrc, eventEmitter);
var jid = data.peerjid || connection.emuc.myroomjid;
if(!this.remoteStreams[jid]) {
this.remoteStreams[jid] = {};
}
this.remoteStreams[jid][remoteStream.type]= remoteStream;
return remoteStream;
},
getBrowserType: function () {
return this.rtcUtils.browser;
},
getPCConstraints: function () {
return this.rtcUtils.pc_constraints;
},
getUserMediaWithConstraints:function(um, success_callback,
failure_callback, resolution,
bandwidth, fps, desktopStream)
{
return this.rtcUtils.getUserMediaWithConstraints(um, success_callback,
failure_callback, resolution, bandwidth, fps, desktopStream);
},
attachMediaStream: function (element, stream) {
this.rtcUtils.attachMediaStream(element, stream);
},
getStreamID: function (stream) {
return this.rtcUtils.getStreamID(stream);
},
getVideoSrc: function (element) {
return this.rtcUtils.getVideoSrc(element);
},
setVideoSrc: function (element, src) {
this.rtcUtils.setVideoSrc(element, src);
},
dispose: function() {
if (this.rtcUtils) {
this.rtcUtils = null;
}
},
stop: function () {
this.dispose();
},
start: function () {
this.rtcUtils = new RTCUtils(this);
this.rtcUtils.obtainAudioAndVideoPermissions();
},
onConferenceCreated: function(event) {
var DataChannels = require("./datachannels");
DataChannels.bindDataChannelListener(event.peerconnection);
},
muteRemoteVideoStream: function (jid, value) {
var stream;
if(this.remoteStreams[jid] &&
this.remoteStreams[jid][MediaStreamType.VIDEO_TYPE])
{
stream = this.remoteStreams[jid][MediaStreamType.VIDEO_TYPE];
}
if(!stream)
return false;
var isMuted = (value === "true");
if (isMuted != stream.muted) {
stream.setMute(isMuted);
return true;
}
return false;
}
};
module.exports = RTC;

338
modules/RTC/RTCUtils.js Normal file
View File

@ -0,0 +1,338 @@
//This should be uncommented when app.js supports require
//var RTCBrowserType = require("../../service/RTC/RTCBrowserType.js");
var constraints = {audio: false, video: false};
function setResolutionConstraints(resolution, isAndroid)
{
if (resolution && !constraints.video || isAndroid) {
constraints.video = { mandatory: {}, optional: [] };// same behaviour as true
}
// see https://code.google.com/p/chromium/issues/detail?id=143631#c9 for list of supported resolutions
switch (resolution) {
// 16:9 first
case '1080':
case 'fullhd':
constraints.video.mandatory.minWidth = 1920;
constraints.video.mandatory.minHeight = 1080;
break;
case '720':
case 'hd':
constraints.video.mandatory.minWidth = 1280;
constraints.video.mandatory.minHeight = 720;
break;
case '360':
constraints.video.mandatory.minWidth = 640;
constraints.video.mandatory.minHeight = 360;
break;
case '180':
constraints.video.mandatory.minWidth = 320;
constraints.video.mandatory.minHeight = 180;
break;
// 4:3
case '960':
constraints.video.mandatory.minWidth = 960;
constraints.video.mandatory.minHeight = 720;
break;
case '640':
case 'vga':
constraints.video.mandatory.minWidth = 640;
constraints.video.mandatory.minHeight = 480;
break;
case '320':
constraints.video.mandatory.minWidth = 320;
constraints.video.mandatory.minHeight = 240;
break;
default:
if (isAndroid) {
constraints.video.mandatory.minWidth = 320;
constraints.video.mandatory.minHeight = 240;
constraints.video.mandatory.maxFrameRate = 15;
}
break;
}
if (constraints.video.mandatory.minWidth)
constraints.video.mandatory.maxWidth = constraints.video.mandatory.minWidth;
if (constraints.video.mandatory.minHeight)
constraints.video.mandatory.maxHeight = constraints.video.mandatory.minHeight;
}
function setConstraints(um, resolution, bandwidth, fps, desktopStream, isAndroid)
{
if (um.indexOf('video') >= 0) {
constraints.video = { mandatory: {}, optional: [] };// same behaviour as true
}
if (um.indexOf('audio') >= 0) {
constraints.audio = { mandatory: {}, optional: []};// same behaviour as true
}
if (um.indexOf('screen') >= 0) {
constraints.video = {
mandatory: {
chromeMediaSource: "screen",
googLeakyBucket: true,
maxWidth: window.screen.width,
maxHeight: window.screen.height,
maxFrameRate: 3
},
optional: []
};
}
if (um.indexOf('desktop') >= 0) {
constraints.video = {
mandatory: {
chromeMediaSource: "desktop",
chromeMediaSourceId: desktopStream,
googLeakyBucket: true,
maxWidth: window.screen.width,
maxHeight: window.screen.height,
maxFrameRate: 3
},
optional: []
};
}
if (constraints.audio) {
// if it is good enough for hangouts...
constraints.audio.optional.push(
{googEchoCancellation: true},
{googAutoGainControl: true},
{googNoiseSupression: true},
{googHighpassFilter: true},
{googNoisesuppression2: true},
{googEchoCancellation2: true},
{googAutoGainControl2: true}
);
}
if (constraints.video) {
constraints.video.optional.push(
{googNoiseReduction: false} // chrome 37 workaround for issue 3807, reenable in M38
);
if (um.indexOf('video') >= 0) {
constraints.video.optional.push(
{googLeakyBucket: true}
);
}
}
setResolutionConstraints(resolution, isAndroid);
if (bandwidth) { // doesn't work currently, see webrtc issue 1846
if (!constraints.video) constraints.video = {mandatory: {}, optional: []};//same behaviour as true
constraints.video.optional.push({bandwidth: bandwidth});
}
if (fps) { // for some cameras it might be necessary to request 30fps
// so they choose 30fps mjpg over 10fps yuy2
if (!constraints.video) constraints.video = {mandatory: {}, optional: []};// same behaviour as true;
constraints.video.mandatory.minFrameRate = fps;
}
}
function RTCUtils(RTCService)
{
this.service = RTCService;
if (navigator.mozGetUserMedia) {
console.log('This appears to be Firefox');
var version = parseInt(navigator.userAgent.match(/Firefox\/([0-9]+)\./)[1], 10);
if (version >= 22) {
this.peerconnection = mozRTCPeerConnection;
this.browser = RTCBrowserType.RTC_BROWSER_FIREFOX;
this.getUserMedia = navigator.mozGetUserMedia.bind(navigator);
this.pc_constraints = {};
this.attachMediaStream = function (element, stream) {
element[0].mozSrcObject = stream;
element[0].play();
};
this.getStreamID = function (stream) {
var tracks = stream.getVideoTracks();
if(!tracks || tracks.length == 0)
{
tracks = stream.getAudioTracks();
}
return tracks[0].id.replace(/[\{,\}]/g,"");
};
this.getVideoSrc = function (element) {
return element.mozSrcObject;
};
this.setVideoSrc = function (element, src) {
element.mozSrcObject = src;
};
RTCSessionDescription = mozRTCSessionDescription;
RTCIceCandidate = mozRTCIceCandidate;
}
} else if (navigator.webkitGetUserMedia) {
console.log('This appears to be Chrome');
this.peerconnection = webkitRTCPeerConnection;
this.browser = RTCBrowserType.RTC_BROWSER_CHROME;
this.getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
this.attachMediaStream = function (element, stream) {
element.attr('src', webkitURL.createObjectURL(stream));
};
this.getStreamID = function (stream) {
// streams from FF endpoints have the characters '{' and '}'
// that make jQuery choke.
return stream.id.replace(/[\{,\}]/g,"");
};
this.getVideoSrc = function (element) {
return element.getAttribute("src");
};
this.setVideoSrc = function (element, src) {
element.setAttribute("src", src);
};
// DTLS should now be enabled by default but..
this.pc_constraints = {'optional': [{'DtlsSrtpKeyAgreement': 'true'}]};
if (navigator.userAgent.indexOf('Android') != -1) {
this.pc_constraints = {}; // disable DTLS on Android
}
if (!webkitMediaStream.prototype.getVideoTracks) {
webkitMediaStream.prototype.getVideoTracks = function () {
return this.videoTracks;
};
}
if (!webkitMediaStream.prototype.getAudioTracks) {
webkitMediaStream.prototype.getAudioTracks = function () {
return this.audioTracks;
};
}
}
else
{
try { console.log('Browser does not appear to be WebRTC-capable'); } catch (e) { }
window.location.href = 'webrtcrequired.html';
return;
}
if (this.browser !== RTCBrowserType.RTC_BROWSER_CHROME &&
config.enableFirefoxSupport !== true) {
window.location.href = 'chromeonly.html';
return;
}
}
RTCUtils.prototype.getUserMediaWithConstraints = function(
um, success_callback, failure_callback, resolution,bandwidth, fps,
desktopStream)
{
// Check if we are running on Android device
var isAndroid = navigator.userAgent.indexOf('Android') != -1;
setConstraints(um, resolution, bandwidth, fps, desktopStream, isAndroid);
var isFF = navigator.userAgent.toLowerCase().indexOf('firefox') > -1;
try {
if (config.enableSimulcast
&& constraints.video
&& constraints.video.chromeMediaSource !== 'screen'
&& constraints.video.chromeMediaSource !== 'desktop'
&& !isAndroid
// We currently do not support FF, as it doesn't have multistream support.
&& !isFF) {
simulcast.getUserMedia(constraints, function (stream) {
console.log('onUserMediaSuccess');
success_callback(stream);
},
function (error) {
console.warn('Failed to get access to local media. Error ', error);
if (failure_callback) {
failure_callback(error);
}
});
} else {
RTCUtils.getUserMedia(constraints,
function (stream) {
console.log('onUserMediaSuccess');
success_callback(stream);
},
function (error) {
console.warn('Failed to get access to local media. Error ', error);
if (failure_callback) {
failure_callback(error);
}
});
}
} catch (e) {
console.error('GUM failed: ', e);
if(failure_callback) {
failure_callback(e);
}
}
};
/**
* We ask for audio and video combined stream in order to get permissions and
* not to ask twice.
*/
RTCUtils.prototype.obtainAudioAndVideoPermissions = function() {
var self = this;
// Get AV
var cb = function (stream) {
console.log('got', stream, stream.getAudioTracks().length, stream.getVideoTracks().length);
self.handleLocalStream(stream);
trackUsage('localMedia', {
audio: stream.getAudioTracks().length,
video: stream.getVideoTracks().length
});
};
var self = this;
this.getUserMediaWithConstraints(
['audio', 'video'],
cb,
function (error) {
console.error('failed to obtain audio/video stream - trying audio only', error);
self.getUserMediaWithConstraints(
['audio'],
cb,
function (error) {
console.error('failed to obtain audio/video stream - stop', error);
trackUsage('localMediaError', {
media: error.media || 'video',
name : error.name
});
messageHandler.showError("Error",
"Failed to obtain permissions to use the local microphone" +
"and/or camera.");
}
);
},
config.resolution || '360');
}
RTCUtils.prototype.handleLocalStream = function(stream)
{
if(window.webkitMediaStream)
{
var audioStream = new webkitMediaStream();
var videoStream = new webkitMediaStream();
var audioTracks = stream.getAudioTracks();
var videoTracks = stream.getVideoTracks();
for (var i = 0; i < audioTracks.length; i++) {
audioStream.addTrack(audioTracks[i]);
}
this.service.createLocalStream(audioStream, "audio");
for (i = 0; i < videoTracks.length; i++) {
videoStream.addTrack(videoTracks[i]);
}
this.service.createLocalStream(videoStream, "video");
}
else
{//firefox
this.service.createLocalStream(stream, "stream");
}
};
module.exports = RTCUtils;

View File

@ -14,10 +14,10 @@ function calculatePacketLoss(lostPackets, totalPackets) {
}
function getStatValue(item, name) {
if(!keyMap[RTC.browser][name])
if(!keyMap[RTC.getBrowserType()][name])
throw "The property isn't supported!";
var key = keyMap[RTC.browser][name];
return RTC.browser == "chrome"? item.stat(key) : item[key];
var key = keyMap[RTC.getBrowserType()][name];
return RTC.getBrowserType() == RTCBrowserType.RTC_BROWSER_CHROME? item.stat(key) : item[key];
}
/**
@ -357,36 +357,36 @@ StatsCollector.prototype.logStats = function () {
this.statsToBeLogged.stats = {};
this.statsToBeLogged.timestamps = [];
};
var keyMap = {
"firefox": {
"ssrc": "ssrc",
"packetsReceived": "packetsReceived",
"packetsLost": "packetsLost",
"packetsSent": "packetsSent",
"bytesReceived": "bytesReceived",
"bytesSent": "bytesSent"
},
"chrome": {
"receiveBandwidth": "googAvailableReceiveBandwidth",
"sendBandwidth": "googAvailableSendBandwidth",
"remoteAddress": "googRemoteAddress",
"transportType": "googTransportType",
"localAddress": "googLocalAddress",
"activeConnection": "googActiveConnection",
"ssrc": "ssrc",
"packetsReceived": "packetsReceived",
"packetsSent": "packetsSent",
"packetsLost": "packetsLost",
"bytesReceived": "bytesReceived",
"bytesSent": "bytesSent",
"googFrameHeightReceived": "googFrameHeightReceived",
"googFrameWidthReceived": "googFrameWidthReceived",
"googFrameHeightSent": "googFrameHeightSent",
"googFrameWidthSent": "googFrameWidthSent",
"audioInputLevel": "audioInputLevel",
"audioOutputLevel": "audioOutputLevel"
}
var keyMap = {};
keyMap[RTCBrowserType.RTC_BROWSER_FIREFOX] = {
"ssrc": "ssrc",
"packetsReceived": "packetsReceived",
"packetsLost": "packetsLost",
"packetsSent": "packetsSent",
"bytesReceived": "bytesReceived",
"bytesSent": "bytesSent"
};
keyMap[RTCBrowserType.RTC_BROWSER_CHROME] = {
"receiveBandwidth": "googAvailableReceiveBandwidth",
"sendBandwidth": "googAvailableSendBandwidth",
"remoteAddress": "googRemoteAddress",
"transportType": "googTransportType",
"localAddress": "googLocalAddress",
"activeConnection": "googActiveConnection",
"ssrc": "ssrc",
"packetsReceived": "packetsReceived",
"packetsSent": "packetsSent",
"packetsLost": "packetsLost",
"bytesReceived": "bytesReceived",
"bytesSent": "bytesSent",
"googFrameHeightReceived": "googFrameHeightReceived",
"googFrameWidthReceived": "googFrameWidthReceived",
"googFrameHeightSent": "googFrameHeightSent",
"googFrameWidthSent": "googFrameWidthSent",
"audioInputLevel": "audioInputLevel",
"audioOutputLevel": "audioOutputLevel"
};
/**
* Stats processing logic.

View File

@ -4,7 +4,9 @@
var LocalStats = require("./LocalStatsCollector.js");
var RTPStats = require("./RTPStatsCollector.js");
var EventEmitter = require("events");
//var StreamEventTypes = require("../service/RTC/StreamEventTypes.js");
//These lines should be uncommented when require works in app.js
//var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
//var RTCBrowserType = require("../../service/RTC/RTCBrowserType");
//var XMPPEvents = require("../service/xmpp/XMPPEvents");
var eventEmitter = new EventEmitter();
@ -13,8 +15,6 @@ var localStats = null;
var rtpStats = null;
var RTCService = null;
function stopLocal()
{
if(localStats)
@ -49,6 +49,16 @@ function startRemoteStats (peerconnection) {
}
function onStreamCreated(stream)
{
if(stream.getAudioTracks().length === 0)
return;
localStats = new LocalStats(stream, 100, this,
eventEmitter);
localStats.start();
}
var statistics =
{
@ -103,7 +113,7 @@ var statistics =
stopRemote();
},
onConfereceCreated: function (event) {
onConferenceCreated: function (event) {
startRemoteStats(event.peerconnection);
},
@ -115,15 +125,11 @@ var statistics =
}
},
onStreamCreated: function(stream)
{
if(stream.getAudioTracks().length === 0)
return;
localStats = new LocalStats(stream, 100, this,
eventEmitter);
localStats.start();
start: function () {
RTC.addStreamListener(onStreamCreated,
StreamEventTypes.EVENT_TYPE_LOCAL_CREATED);
}
};

View File

@ -0,0 +1,7 @@
var MediaStreamType = {
VIDEO_TYPE: "Video",
AUDIO_TYPE: "Audio"
};
////These lines should be uncommented when require works in app.js
//module.exports = MediaStreamType;

View File

@ -0,0 +1,7 @@
var RTCBrowserType = {
RTC_BROWSER_CHROME: "rtc_browser.chrome",
RTC_BROWSER_FIREFOX: "rtc_browser.firefox"
};
//module.exports = RTCBrowserType;

View File

@ -0,0 +1,12 @@
var StreamEventTypes = {
EVENT_TYPE_LOCAL_CREATED: "stream.local_created",
EVENT_TYPE_LOCAL_ENDED: "stream.local_ended",
EVENT_TYPE_REMOTE_CREATED: "stream.remote_created",
EVENT_TYPE_REMOTE_ENDED: "stream.remote_ended"
};
//These lines should be uncommented when require works in app.js
//module.exports = StreamEventTypes;

View File

@ -20,6 +20,21 @@ var VideoLayout = (function (my) {
|| (lastNEndpointsCache && lastNEndpointsCache.indexOf(resource) !== -1);
};
my.onLocalStreamCreated = function (stream) {
switch(stream.type)
{
case "audio":
VideoLayout.changeLocalAudio(stream.getOriginalStream());
break;
case "video":
VideoLayout.changeLocalVideo(stream.getOriginalStream(), true);
break;
case "stream":
VideoLayout.changeLocalStream(stream.getOriginalStream());
break;
}
};
my.changeLocalStream = function (stream) {
connection.jingle.localAudio = stream;
VideoLayout.changeLocalVideo(stream, true);
@ -1495,6 +1510,10 @@ var VideoLayout = (function (my) {
* On video muted event.
*/
$(document).bind('videomuted.muc', function (event, jid, isMuted) {
if(!RTC.muteRemoteVideoStream(jid, isMuted))
return;
Avatar.showUserAvatar(jid, isMuted);
var videoSpanId = null;
if (jid === connection.emuc.myroomjid) {
videoSpanId = 'localVideoContainer';
@ -1674,7 +1693,7 @@ var VideoLayout = (function (my) {
console.log("Add to last N", resourceJid);
var jid = connection.emuc.findJidFromResource(resourceJid);
var mediaStream = mediaStreams[jid][MediaStream.VIDEO_TYPE];
var mediaStream = RTC.remoteStreams[jid][MediaStreamType.VIDEO_TYPE];
var sel = $('#participant_' + resourceJid + '>video');
var videoStream = simulcast.getReceivingVideoStream(