More formatting fixes.
This commit is contained in:
parent
fd5a739f3c
commit
2c790f86ad
|
@ -1,4 +1,4 @@
|
|||
/* global Strophe, focusedVideoSrc*/
|
||||
/* global config, APP, Strophe */
|
||||
|
||||
// cache datachannels to avoid garbage collection
|
||||
// https://code.google.com/p/chromium/issues/detail?id=405545
|
||||
|
@ -8,19 +8,13 @@ var _dataChannels = [];
|
|||
var eventEmitter = null;
|
||||
|
||||
|
||||
|
||||
|
||||
var DataChannels =
|
||||
{
|
||||
|
||||
var DataChannels = {
|
||||
/**
|
||||
* Callback triggered by PeerConnection when new data channel is opened
|
||||
* on the bridge.
|
||||
* @param event the event info object.
|
||||
*/
|
||||
|
||||
onDataChannel: function (event)
|
||||
{
|
||||
onDataChannel: function (event) {
|
||||
var dataChannel = event.channel;
|
||||
|
||||
dataChannel.onopen = function () {
|
||||
|
@ -65,8 +59,7 @@ var DataChannels =
|
|||
dominantSpeakerEndpoint);
|
||||
eventEmitter.emit(RTCEvents.DOMINANTSPEAKER_CHANGED, dominantSpeakerEndpoint);
|
||||
}
|
||||
else if ("InLastNChangeEvent" === colibriClass)
|
||||
{
|
||||
else if ("InLastNChangeEvent" === colibriClass) {
|
||||
var oldValue = obj.oldValue;
|
||||
var newValue = obj.newValue;
|
||||
// Make sure that oldValue and newValue are of type boolean.
|
||||
|
@ -89,15 +82,13 @@ var DataChannels =
|
|||
|
||||
eventEmitter.emit(RTCEvents.LASTN_CHANGED, oldValue, newValue);
|
||||
}
|
||||
else if ("LastNEndpointsChangeEvent" === colibriClass)
|
||||
{
|
||||
else if ("LastNEndpointsChangeEvent" === colibriClass) {
|
||||
// The new/latest list of last-n endpoint IDs.
|
||||
var lastNEndpoints = obj.lastNEndpoints;
|
||||
// The list of endpoint IDs which are entering the list of
|
||||
// last-n at this time i.e. were not in the old list of last-n
|
||||
// endpoint IDs.
|
||||
var endpointsEnteringLastN = obj.endpointsEnteringLastN;
|
||||
var stream = obj.stream;
|
||||
|
||||
console.log(
|
||||
"Data channel new last-n event: ",
|
||||
|
@ -105,15 +96,13 @@ var DataChannels =
|
|||
eventEmitter.emit(RTCEvents.LASTN_ENDPOINT_CHANGED,
|
||||
lastNEndpoints, endpointsEnteringLastN, obj);
|
||||
}
|
||||
else
|
||||
{
|
||||
else {
|
||||
console.debug("Data channel JSON-formatted message: ", obj);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
dataChannel.onclose = function ()
|
||||
{
|
||||
dataChannel.onclose = function () {
|
||||
console.info("The Data Channel closed", dataChannel);
|
||||
var idx = _dataChannels.indexOf(dataChannel);
|
||||
if (idx > -1)
|
||||
|
@ -158,19 +147,15 @@ var DataChannels =
|
|||
},
|
||||
handleSelectedEndpointEvent: onSelectedEndpointChanged,
|
||||
handlePinnedEndpointEvent: onPinnedEndpointChanged
|
||||
|
||||
};
|
||||
|
||||
function onSelectedEndpointChanged(userResource)
|
||||
{
|
||||
function onSelectedEndpointChanged(userResource) {
|
||||
console.log('selected endpoint changed: ', userResource);
|
||||
if (_dataChannels && _dataChannels.length != 0)
|
||||
{
|
||||
if (_dataChannels && _dataChannels.length != 0) {
|
||||
_dataChannels.some(function (dataChannel) {
|
||||
if (dataChannel.readyState == 'open')
|
||||
{
|
||||
console.log('sending selected endpoint changed '
|
||||
+ 'notification to the bridge: ', userResource);
|
||||
if (dataChannel.readyState == 'open') {
|
||||
console.log('sending selected endpoint changed ' +
|
||||
'notification to the bridge: ', userResource);
|
||||
dataChannel.send(JSON.stringify({
|
||||
'colibriClass': 'SelectedEndpointChangedEvent',
|
||||
'selectedEndpoint':
|
||||
|
@ -184,14 +169,11 @@ function onSelectedEndpointChanged(userResource)
|
|||
}
|
||||
}
|
||||
|
||||
function onPinnedEndpointChanged(userResource)
|
||||
{
|
||||
function onPinnedEndpointChanged(userResource) {
|
||||
console.log('pinned endpoint changed: ', userResource);
|
||||
if (_dataChannels && _dataChannels.length != 0)
|
||||
{
|
||||
if (_dataChannels && _dataChannels.length != 0) {
|
||||
_dataChannels.some(function (dataChannel) {
|
||||
if (dataChannel.readyState == 'open')
|
||||
{
|
||||
if (dataChannel.readyState == 'open') {
|
||||
dataChannel.send(JSON.stringify({
|
||||
'colibriClass': 'PinnedEndpointChangedEvent',
|
||||
'pinnedEndpoint':
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
/* global APP */
|
||||
var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
|
||||
var RTCEvents = require("../../service/RTC/RTCEvents");
|
||||
|
||||
|
||||
function LocalStream(stream, type, eventEmitter, videoType, isGUMStream)
|
||||
{
|
||||
function LocalStream(stream, type, eventEmitter, videoType, isGUMStream) {
|
||||
this.stream = stream;
|
||||
this.eventEmitter = eventEmitter;
|
||||
this.type = type;
|
||||
|
@ -12,33 +11,29 @@ function LocalStream(stream, type, eventEmitter, videoType, isGUMStream)
|
|||
if(isGUMStream === false)
|
||||
this.isGUMStream = isGUMStream;
|
||||
var self = this;
|
||||
if(type == "audio")
|
||||
{
|
||||
if(type == "audio") {
|
||||
this.getTracks = function () {
|
||||
return self.stream.getAudioTracks();
|
||||
};
|
||||
}
|
||||
else
|
||||
{
|
||||
} else {
|
||||
this.getTracks = function () {
|
||||
return self.stream.getVideoTracks();
|
||||
};
|
||||
}
|
||||
|
||||
this.stream.onended = function()
|
||||
{
|
||||
this.stream.onended = function() {
|
||||
self.streamEnded();
|
||||
};
|
||||
}
|
||||
|
||||
LocalStream.prototype.streamEnded = function () {
|
||||
this.eventEmitter.emit(StreamEventTypes.EVENT_TYPE_LOCAL_ENDED, this);
|
||||
}
|
||||
};
|
||||
|
||||
LocalStream.prototype.getOriginalStream = function()
|
||||
{
|
||||
return this.stream;
|
||||
}
|
||||
};
|
||||
|
||||
LocalStream.prototype.isAudioStream = function () {
|
||||
return this.type === "audio";
|
||||
|
@ -50,37 +45,29 @@ LocalStream.prototype.setMute = function (mute)
|
|||
var eventType = isAudio ? RTCEvents.AUDIO_MUTE : RTCEvents.VIDEO_MUTE;
|
||||
|
||||
if ((window.location.protocol != "https:" && this.isGUMStream) ||
|
||||
(isAudio && this.isGUMStream) || this.videoType === "screen")
|
||||
{
|
||||
(isAudio && this.isGUMStream) || this.videoType === "screen") {
|
||||
var tracks = this.getTracks();
|
||||
|
||||
for (var idx = 0; idx < tracks.length; idx++) {
|
||||
tracks[idx].enabled = !mute;
|
||||
}
|
||||
this.eventEmitter.emit(eventType, mute);
|
||||
}
|
||||
else
|
||||
{
|
||||
} else {
|
||||
if (mute) {
|
||||
APP.xmpp.removeStream(this.stream);
|
||||
this.stream.stop();
|
||||
this.eventEmitter.emit(eventType, true);
|
||||
}
|
||||
else
|
||||
{
|
||||
} else {
|
||||
var self = this;
|
||||
APP.RTC.rtcUtils.obtainAudioAndVideoPermissions(
|
||||
(this.isAudioStream() ? ["audio"] : ["video"]),
|
||||
function (stream) {
|
||||
if (isAudio)
|
||||
{
|
||||
if (isAudio) {
|
||||
APP.RTC.changeLocalAudio(stream,
|
||||
function () {
|
||||
self.eventEmitter.emit(eventType, false);
|
||||
});
|
||||
}
|
||||
else
|
||||
{
|
||||
} else {
|
||||
APP.RTC.changeLocalVideo(stream, false,
|
||||
function () {
|
||||
self.eventEmitter.emit(eventType, false);
|
||||
|
@ -109,6 +96,6 @@ LocalStream.prototype.isMuted = function () {
|
|||
|
||||
LocalStream.prototype.getId = function () {
|
||||
return this.stream.getTracks()[0].id;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = LocalStream;
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
////These lines should be uncommented when require works in app.js
|
||||
var MediaStreamType = require("../../service/RTC/MediaStreamTypes");
|
||||
var StreamEventType = require("../../service/RTC/StreamEventTypes");
|
||||
|
||||
/**
|
||||
* Creates a MediaStream object for the given data, session id and ssrc.
|
||||
|
@ -37,13 +35,11 @@ function MediaStream(data, sid, ssrc, browser, eventEmitter) {
|
|||
}
|
||||
|
||||
|
||||
MediaStream.prototype.getOriginalStream = function()
|
||||
{
|
||||
MediaStream.prototype.getOriginalStream = function() {
|
||||
return this.stream;
|
||||
};
|
||||
|
||||
MediaStream.prototype.setMute = function (value)
|
||||
{
|
||||
MediaStream.prototype.setMute = function (value) {
|
||||
this.stream.muted = value;
|
||||
this.muted = value;
|
||||
};
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
/* global APP */
|
||||
var EventEmitter = require("events");
|
||||
var RTCBrowserType = require("./RTCBrowserType");
|
||||
var RTCUtils = require("./RTCUtils.js");
|
||||
|
@ -87,8 +88,7 @@ var RTC = {
|
|||
return localStream;
|
||||
},
|
||||
removeLocalStream: function (stream) {
|
||||
for(var i = 0; i < this.localStreams.length; i++)
|
||||
{
|
||||
for(var i = 0; i < this.localStreams.length; i++) {
|
||||
if(this.localStreams[i].getOriginalStream() === stream) {
|
||||
delete this.localStreams[i];
|
||||
return;
|
||||
|
@ -173,8 +173,7 @@ var RTC = {
|
|||
var stream;
|
||||
|
||||
if(this.remoteStreams[jid] &&
|
||||
this.remoteStreams[jid][MediaStreamType.VIDEO_TYPE])
|
||||
{
|
||||
this.remoteStreams[jid][MediaStreamType.VIDEO_TYPE]) {
|
||||
stream = this.remoteStreams[jid][MediaStreamType.VIDEO_TYPE];
|
||||
}
|
||||
|
||||
|
@ -201,8 +200,7 @@ var RTC = {
|
|||
var oldStream = this.localVideo.getOriginalStream();
|
||||
var type = (isUsingScreenStream? "screen" : "video");
|
||||
var localCallback = callback;
|
||||
if(this.localVideo.isMuted() && this.localVideo.videoType !== type)
|
||||
{
|
||||
if(this.localVideo.isMuted() && this.localVideo.videoType !== type) {
|
||||
localCallback = function() {
|
||||
APP.xmpp.setVideoMute(false, function(mute) {
|
||||
eventEmitter.emit(RTCEvents.VIDEO_MUTE, mute);
|
||||
|
@ -236,10 +234,9 @@ var RTC = {
|
|||
if (jid === APP.xmpp.myJid()) {
|
||||
var localVideo = APP.RTC.localVideo;
|
||||
return (!localVideo || localVideo.isMuted());
|
||||
}
|
||||
else
|
||||
{
|
||||
if (!APP.RTC.remoteStreams[jid] || !APP.RTC.remoteStreams[jid][MediaStreamType.VIDEO_TYPE]) {
|
||||
} else {
|
||||
if (!APP.RTC.remoteStreams[jid] ||
|
||||
!APP.RTC.remoteStreams[jid][MediaStreamType.VIDEO_TYPE]) {
|
||||
return null;
|
||||
}
|
||||
return APP.RTC.remoteStreams[jid][MediaStreamType.VIDEO_TYPE].muted;
|
||||
|
|
|
@ -49,8 +49,8 @@ var RTCBrowserType = {
|
|||
},
|
||||
|
||||
usesPlanB: function() {
|
||||
return RTCBrowserType.isChrome() || RTCBrowserType.isOpera()
|
||||
|| RTCBrowserType.isTemasysPluginUsed();
|
||||
return RTCBrowserType.isChrome() || RTCBrowserType.isOpera() ||
|
||||
RTCBrowserType.isTemasysPluginUsed();
|
||||
},
|
||||
|
||||
usesUnifiedPlan: function() {
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
/* global config, require, attachMediaStream, getUserMedia */
|
||||
var RTCBrowserType = require("./RTCBrowserType");
|
||||
var Resolutions = require("../../service/RTC/Resolutions");
|
||||
var AdapterJS = require("./adapter.screenshare");
|
||||
|
@ -11,11 +12,9 @@ function getPreviousResolution(resolution) {
|
|||
var order = Resolutions[resolution].order;
|
||||
var res = null;
|
||||
var resName = null;
|
||||
for(var i in Resolutions)
|
||||
{
|
||||
for(var i in Resolutions) {
|
||||
var tmp = Resolutions[i];
|
||||
if(res == null || (res.order < tmp.order && tmp.order < order))
|
||||
{
|
||||
if(res == null || (res.order < tmp.order && tmp.order < order)) {
|
||||
resName = i;
|
||||
res = tmp;
|
||||
}
|
||||
|
@ -23,19 +22,17 @@ function getPreviousResolution(resolution) {
|
|||
return resName;
|
||||
}
|
||||
|
||||
function setResolutionConstraints(constraints, resolution, isAndroid)
|
||||
{
|
||||
function setResolutionConstraints(constraints, resolution, isAndroid) {
|
||||
if (resolution && !constraints.video || isAndroid) {
|
||||
constraints.video = { mandatory: {}, optional: [] };// same behaviour as true
|
||||
// same behaviour as true
|
||||
constraints.video = { mandatory: {}, optional: [] };
|
||||
}
|
||||
|
||||
if(Resolutions[resolution])
|
||||
{
|
||||
if(Resolutions[resolution]) {
|
||||
constraints.video.mandatory.minWidth = Resolutions[resolution].width;
|
||||
constraints.video.mandatory.minHeight = Resolutions[resolution].height;
|
||||
}
|
||||
else
|
||||
{
|
||||
else {
|
||||
if (isAndroid) {
|
||||
constraints.video.mandatory.minWidth = 320;
|
||||
constraints.video.mandatory.minHeight = 240;
|
||||
|
@ -44,9 +41,11 @@ function setResolutionConstraints(constraints, resolution, isAndroid)
|
|||
}
|
||||
|
||||
if (constraints.video.mandatory.minWidth)
|
||||
constraints.video.mandatory.maxWidth = constraints.video.mandatory.minWidth;
|
||||
constraints.video.mandatory.maxWidth =
|
||||
constraints.video.mandatory.minWidth;
|
||||
if (constraints.video.mandatory.minHeight)
|
||||
constraints.video.mandatory.maxHeight = constraints.video.mandatory.minHeight;
|
||||
constraints.video.mandatory.maxHeight =
|
||||
constraints.video.mandatory.minHeight;
|
||||
}
|
||||
|
||||
function getConstraints(um, resolution, bandwidth, fps, desktopStream, isAndroid)
|
||||
|
@ -54,10 +53,12 @@ function getConstraints(um, resolution, bandwidth, fps, desktopStream, isAndroid
|
|||
var constraints = {audio: false, video: false};
|
||||
|
||||
if (um.indexOf('video') >= 0) {
|
||||
constraints.video = { mandatory: {}, optional: [] };// same behaviour as true
|
||||
// same behaviour as true
|
||||
constraints.video = { mandatory: {}, optional: [] };
|
||||
}
|
||||
if (um.indexOf('audio') >= 0) {
|
||||
constraints.audio = { mandatory: {}, optional: []};// same behaviour as true
|
||||
// same behaviour as true
|
||||
constraints.audio = { mandatory: {}, optional: []};
|
||||
}
|
||||
if (um.indexOf('screen') >= 0) {
|
||||
if (RTCBrowserType.isChrome()) {
|
||||
|
@ -126,13 +127,20 @@ function getConstraints(um, resolution, bandwidth, fps, desktopStream, isAndroid
|
|||
setResolutionConstraints(constraints, resolution, isAndroid);
|
||||
}
|
||||
|
||||
if (bandwidth) { // doesn't work currently, see webrtc issue 1846
|
||||
if (!constraints.video) constraints.video = {mandatory: {}, optional: []};//same behaviour as true
|
||||
if (bandwidth) {
|
||||
if (!constraints.video) {
|
||||
//same behaviour as true
|
||||
constraints.video = {mandatory: {}, optional: []};
|
||||
}
|
||||
constraints.video.optional.push({bandwidth: bandwidth});
|
||||
}
|
||||
if (fps) { // for some cameras it might be necessary to request 30fps
|
||||
if (fps) {
|
||||
// for some cameras it might be necessary to request 30fps
|
||||
// so they choose 30fps mjpg over 10fps yuy2
|
||||
if (!constraints.video) constraints.video = {mandatory: {}, optional: []};// same behaviour as true;
|
||||
if (!constraints.video) {
|
||||
// same behaviour as true;
|
||||
constraints.video = {mandatory: {}, optional: []};
|
||||
}
|
||||
constraints.video.mandatory.minFrameRate = fps;
|
||||
}
|
||||
|
||||
|
@ -167,8 +175,7 @@ function RTCUtils(RTCService, onTemasysPluginReady)
|
|||
var id = stream.id;
|
||||
if (!id) {
|
||||
var tracks = stream.getVideoTracks();
|
||||
if (!tracks || tracks.length === 0)
|
||||
{
|
||||
if (!tracks || tracks.length === 0) {
|
||||
tracks = stream.getAudioTracks();
|
||||
}
|
||||
id = tracks[0].id;
|
||||
|
@ -258,7 +265,6 @@ function RTCUtils(RTCService, onTemasysPluginReady)
|
|||
console.warn("Attempt to get video SRC of null element");
|
||||
return null;
|
||||
}
|
||||
var src = null;
|
||||
var children = element.children;
|
||||
for (var i = 0; i !== children.length; ++i) {
|
||||
if (children[i].name === 'streamId') {
|
||||
|
@ -293,8 +299,7 @@ function RTCUtils(RTCService, onTemasysPluginReady)
|
|||
|
||||
RTCUtils.prototype.getUserMediaWithConstraints = function(
|
||||
um, success_callback, failure_callback, resolution,bandwidth, fps,
|
||||
desktopStream)
|
||||
{
|
||||
desktopStream) {
|
||||
currentResolution = resolution;
|
||||
// Check if we are running on Android device
|
||||
var isAndroid = navigator.userAgent.indexOf('Android') != -1;
|
||||
|
@ -331,16 +336,14 @@ RTCUtils.prototype.getUserMediaWithConstraints = function(
|
|||
|
||||
RTCUtils.prototype.setAvailableDevices = function (um, available) {
|
||||
var devices = {};
|
||||
if(um.indexOf("video") != -1)
|
||||
{
|
||||
if(um.indexOf("video") != -1) {
|
||||
devices.video = available;
|
||||
}
|
||||
if(um.indexOf("audio") != -1)
|
||||
{
|
||||
if(um.indexOf("audio") != -1) {
|
||||
devices.audio = available;
|
||||
}
|
||||
this.service.setDeviceAvailability(devices);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* We ask for audio and video combined stream in order to get permissions and
|
||||
|
@ -366,8 +369,7 @@ RTCUtils.prototype.obtainAudioAndVideoPermissions =
|
|||
|
||||
|
||||
if(usageOptions)
|
||||
for(var i = 0; i < devices.length; i++)
|
||||
{
|
||||
for(var i = 0; i < devices.length; i++) {
|
||||
var device = devices[i];
|
||||
if(usageOptions[device] === true)
|
||||
newDevices.push(device);
|
||||
|
@ -375,8 +377,7 @@ RTCUtils.prototype.obtainAudioAndVideoPermissions =
|
|||
else
|
||||
newDevices = devices;
|
||||
|
||||
if(newDevices.length === 0)
|
||||
{
|
||||
if(newDevices.length === 0) {
|
||||
successCallback();
|
||||
return;
|
||||
}
|
||||
|
@ -437,7 +438,6 @@ RTCUtils.prototype.obtainAudioAndVideoPermissions =
|
|||
},
|
||||
config.resolution || '360');
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
RTCUtils.prototype.successCallback = function (stream, usageOptions) {
|
||||
|
@ -467,8 +467,7 @@ RTCUtils.prototype.errorCallback = function (error) {
|
|||
return self.errorCallback(error);
|
||||
}, resolution);
|
||||
}
|
||||
else
|
||||
{
|
||||
else {
|
||||
self.getUserMediaWithConstraints(
|
||||
['audio'],
|
||||
function (stream) {
|
||||
|
@ -481,11 +480,9 @@ RTCUtils.prototype.errorCallback = function (error) {
|
|||
}
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
RTCUtils.prototype.handleLocalStream = function(stream, usageOptions)
|
||||
{
|
||||
RTCUtils.prototype.handleLocalStream = function(stream, usageOptions) {
|
||||
// If this is FF, the stream parameter is *not* a MediaStream object, it's
|
||||
// an object with two properties: audioStream, videoStream.
|
||||
var audioStream, videoStream;
|
||||
|
@ -538,8 +535,8 @@ function DummyMediaStream(id) {
|
|||
this.id = id;
|
||||
this.label = id;
|
||||
this.stop = function() { };
|
||||
this.getAudioTracks = function() { return []; }
|
||||
this.getVideoTracks = function() { return []; }
|
||||
this.getAudioTracks = function() { return []; };
|
||||
this.getVideoTracks = function() { return []; };
|
||||
}
|
||||
|
||||
RTCUtils.prototype.createStream = function(stream, isVideo) {
|
||||
|
@ -549,7 +546,7 @@ RTCUtils.prototype.createStream = function(stream, isVideo) {
|
|||
if (newStream) {
|
||||
var tracks = (isVideo ? stream.getVideoTracks() : stream.getAudioTracks());
|
||||
|
||||
for (i = 0; i < tracks.length; i++) {
|
||||
for (var i = 0; i < tracks.length; i++) {
|
||||
newStream.addTrack(tracks[i]);
|
||||
}
|
||||
}
|
||||
|
@ -560,7 +557,8 @@ RTCUtils.prototype.createStream = function(stream, isVideo) {
|
|||
if (stream) {
|
||||
newStream = stream;
|
||||
} else {
|
||||
newStream = new DummyMediaStream(isVideo ? "dummyVideo" : "dummyAudio");
|
||||
newStream =
|
||||
new DummyMediaStream(isVideo ? "dummyVideo" : "dummyAudio");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue