2015-01-28 14:35:22 +00:00
|
|
|
var RTCBrowserType = require("../../service/RTC/RTCBrowserType.js");
|
2015-02-12 14:42:08 +00:00
|
|
|
var Resolutions = require("../../service/RTC/Resolutions");
|
|
|
|
|
|
|
|
var currentResolution = null;
|
|
|
|
|
|
|
|
function getPreviousResolution(resolution) {
|
|
|
|
if(!Resolutions[resolution])
|
|
|
|
return null;
|
|
|
|
var order = Resolutions[resolution].order;
|
|
|
|
var res = null;
|
|
|
|
var resName = null;
|
|
|
|
for(var i in Resolutions)
|
|
|
|
{
|
|
|
|
var tmp = Resolutions[i];
|
|
|
|
if(res == null || (res.order < tmp.order && tmp.order < order))
|
|
|
|
{
|
|
|
|
resName = i;
|
|
|
|
res = tmp;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return resName;
|
|
|
|
}
|
2014-12-19 13:59:08 +00:00
|
|
|
|
2015-01-05 11:54:46 +00:00
|
|
|
function setResolutionConstraints(constraints, resolution, isAndroid)
|
2014-12-19 13:59:08 +00:00
|
|
|
{
|
|
|
|
if (resolution && !constraints.video || isAndroid) {
|
|
|
|
constraints.video = { mandatory: {}, optional: [] };// same behaviour as true
|
|
|
|
}
|
2015-02-12 14:42:08 +00:00
|
|
|
|
|
|
|
if(Resolutions[resolution])
|
|
|
|
{
|
|
|
|
constraints.video.mandatory.minWidth = Resolutions[resolution].width;
|
|
|
|
constraints.video.mandatory.minHeight = Resolutions[resolution].height;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
if (isAndroid) {
|
2014-12-19 13:59:08 +00:00
|
|
|
constraints.video.mandatory.minWidth = 320;
|
|
|
|
constraints.video.mandatory.minHeight = 240;
|
2015-02-12 14:42:08 +00:00
|
|
|
constraints.video.mandatory.maxFrameRate = 15;
|
|
|
|
}
|
2014-12-19 13:59:08 +00:00
|
|
|
}
|
2015-02-12 14:42:08 +00:00
|
|
|
|
2014-12-19 13:59:08 +00:00
|
|
|
if (constraints.video.mandatory.minWidth)
|
|
|
|
constraints.video.mandatory.maxWidth = constraints.video.mandatory.minWidth;
|
|
|
|
if (constraints.video.mandatory.minHeight)
|
|
|
|
constraints.video.mandatory.maxHeight = constraints.video.mandatory.minHeight;
|
|
|
|
}
|
|
|
|
|
2015-01-05 11:54:46 +00:00
|
|
|
function getConstraints(um, resolution, bandwidth, fps, desktopStream, isAndroid)
|
2014-12-19 13:59:08 +00:00
|
|
|
{
|
2015-01-05 11:54:46 +00:00
|
|
|
var constraints = {audio: false, video: false};
|
|
|
|
|
2014-12-19 13:59:08 +00:00
|
|
|
if (um.indexOf('video') >= 0) {
|
|
|
|
constraints.video = { mandatory: {}, optional: [] };// same behaviour as true
|
|
|
|
}
|
|
|
|
if (um.indexOf('audio') >= 0) {
|
|
|
|
constraints.audio = { mandatory: {}, optional: []};// same behaviour as true
|
|
|
|
}
|
|
|
|
if (um.indexOf('screen') >= 0) {
|
|
|
|
constraints.video = {
|
|
|
|
mandatory: {
|
|
|
|
chromeMediaSource: "screen",
|
|
|
|
googLeakyBucket: true,
|
|
|
|
maxWidth: window.screen.width,
|
|
|
|
maxHeight: window.screen.height,
|
|
|
|
maxFrameRate: 3
|
|
|
|
},
|
|
|
|
optional: []
|
|
|
|
};
|
|
|
|
}
|
|
|
|
if (um.indexOf('desktop') >= 0) {
|
|
|
|
constraints.video = {
|
|
|
|
mandatory: {
|
|
|
|
chromeMediaSource: "desktop",
|
|
|
|
chromeMediaSourceId: desktopStream,
|
|
|
|
googLeakyBucket: true,
|
|
|
|
maxWidth: window.screen.width,
|
|
|
|
maxHeight: window.screen.height,
|
|
|
|
maxFrameRate: 3
|
|
|
|
},
|
|
|
|
optional: []
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
if (constraints.audio) {
|
|
|
|
// if it is good enough for hangouts...
|
|
|
|
constraints.audio.optional.push(
|
|
|
|
{googEchoCancellation: true},
|
|
|
|
{googAutoGainControl: true},
|
|
|
|
{googNoiseSupression: true},
|
|
|
|
{googHighpassFilter: true},
|
|
|
|
{googNoisesuppression2: true},
|
|
|
|
{googEchoCancellation2: true},
|
|
|
|
{googAutoGainControl2: true}
|
|
|
|
);
|
|
|
|
}
|
|
|
|
if (constraints.video) {
|
|
|
|
constraints.video.optional.push(
|
|
|
|
{googNoiseReduction: false} // chrome 37 workaround for issue 3807, reenable in M38
|
|
|
|
);
|
|
|
|
if (um.indexOf('video') >= 0) {
|
|
|
|
constraints.video.optional.push(
|
|
|
|
{googLeakyBucket: true}
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-01-26 17:54:26 +00:00
|
|
|
if (um.indexOf('video') >= 0) {
|
|
|
|
setResolutionConstraints(constraints, resolution, isAndroid);
|
|
|
|
}
|
2014-12-19 13:59:08 +00:00
|
|
|
|
|
|
|
if (bandwidth) { // doesn't work currently, see webrtc issue 1846
|
|
|
|
if (!constraints.video) constraints.video = {mandatory: {}, optional: []};//same behaviour as true
|
|
|
|
constraints.video.optional.push({bandwidth: bandwidth});
|
|
|
|
}
|
|
|
|
if (fps) { // for some cameras it might be necessary to request 30fps
|
|
|
|
// so they choose 30fps mjpg over 10fps yuy2
|
|
|
|
if (!constraints.video) constraints.video = {mandatory: {}, optional: []};// same behaviour as true;
|
|
|
|
constraints.video.mandatory.minFrameRate = fps;
|
|
|
|
}
|
2015-01-05 11:54:46 +00:00
|
|
|
|
|
|
|
return constraints;
|
2014-12-19 13:59:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
function RTCUtils(RTCService)
|
|
|
|
{
|
|
|
|
this.service = RTCService;
|
|
|
|
if (navigator.mozGetUserMedia) {
|
|
|
|
console.log('This appears to be Firefox');
|
|
|
|
var version = parseInt(navigator.userAgent.match(/Firefox\/([0-9]+)\./)[1], 10);
|
2015-05-28 09:45:10 +00:00
|
|
|
if (version >= 40
|
2015-03-31 10:36:00 +00:00
|
|
|
&& !config.enableSimulcast && config.useBundle && config.useRtcpMux) {
|
2014-12-19 13:59:08 +00:00
|
|
|
this.peerconnection = mozRTCPeerConnection;
|
|
|
|
this.browser = RTCBrowserType.RTC_BROWSER_FIREFOX;
|
|
|
|
this.getUserMedia = navigator.mozGetUserMedia.bind(navigator);
|
|
|
|
this.pc_constraints = {};
|
|
|
|
this.attachMediaStream = function (element, stream) {
|
2015-03-05 10:26:44 +00:00
|
|
|
// srcObject is being standardized and FF will eventually
|
|
|
|
// support that unprefixed. FF also supports the
|
|
|
|
// "element.src = URL.createObjectURL(...)" combo, but that
|
|
|
|
// will be deprecated in favour of srcObject.
|
|
|
|
//
|
|
|
|
// https://groups.google.com/forum/#!topic/mozilla.dev.media/pKOiioXonJg
|
|
|
|
// https://github.com/webrtc/samples/issues/302
|
2015-04-22 09:31:08 +00:00
|
|
|
if(!element[0])
|
|
|
|
return;
|
2014-12-19 13:59:08 +00:00
|
|
|
element[0].mozSrcObject = stream;
|
|
|
|
element[0].play();
|
|
|
|
};
|
|
|
|
this.getStreamID = function (stream) {
|
|
|
|
var tracks = stream.getVideoTracks();
|
|
|
|
if(!tracks || tracks.length == 0)
|
|
|
|
{
|
|
|
|
tracks = stream.getAudioTracks();
|
|
|
|
}
|
|
|
|
return tracks[0].id.replace(/[\{,\}]/g,"");
|
|
|
|
};
|
|
|
|
this.getVideoSrc = function (element) {
|
2015-04-22 09:31:08 +00:00
|
|
|
if(!element)
|
|
|
|
return null;
|
2014-12-19 13:59:08 +00:00
|
|
|
return element.mozSrcObject;
|
|
|
|
};
|
|
|
|
this.setVideoSrc = function (element, src) {
|
2015-04-22 09:31:08 +00:00
|
|
|
if(element)
|
|
|
|
element.mozSrcObject = src;
|
2014-12-19 13:59:08 +00:00
|
|
|
};
|
|
|
|
RTCSessionDescription = mozRTCSessionDescription;
|
|
|
|
RTCIceCandidate = mozRTCIceCandidate;
|
2015-03-13 09:11:49 +00:00
|
|
|
} else {
|
|
|
|
window.location.href = 'unsupported_browser.html';
|
|
|
|
return;
|
2014-12-19 13:59:08 +00:00
|
|
|
}
|
2015-03-13 09:11:49 +00:00
|
|
|
|
2014-12-19 13:59:08 +00:00
|
|
|
} else if (navigator.webkitGetUserMedia) {
|
|
|
|
console.log('This appears to be Chrome');
|
|
|
|
this.peerconnection = webkitRTCPeerConnection;
|
|
|
|
this.browser = RTCBrowserType.RTC_BROWSER_CHROME;
|
|
|
|
this.getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
|
|
|
|
this.attachMediaStream = function (element, stream) {
|
|
|
|
element.attr('src', webkitURL.createObjectURL(stream));
|
|
|
|
};
|
|
|
|
this.getStreamID = function (stream) {
|
|
|
|
// streams from FF endpoints have the characters '{' and '}'
|
|
|
|
// that make jQuery choke.
|
|
|
|
return stream.id.replace(/[\{,\}]/g,"");
|
|
|
|
};
|
|
|
|
this.getVideoSrc = function (element) {
|
2015-04-22 09:31:08 +00:00
|
|
|
if(!element)
|
|
|
|
return null;
|
2014-12-19 13:59:08 +00:00
|
|
|
return element.getAttribute("src");
|
|
|
|
};
|
|
|
|
this.setVideoSrc = function (element, src) {
|
2015-04-22 09:31:08 +00:00
|
|
|
if(element)
|
|
|
|
element.setAttribute("src", src);
|
2014-12-19 13:59:08 +00:00
|
|
|
};
|
|
|
|
// DTLS should now be enabled by default but..
|
|
|
|
this.pc_constraints = {'optional': [{'DtlsSrtpKeyAgreement': 'true'}]};
|
|
|
|
if (navigator.userAgent.indexOf('Android') != -1) {
|
|
|
|
this.pc_constraints = {}; // disable DTLS on Android
|
|
|
|
}
|
|
|
|
if (!webkitMediaStream.prototype.getVideoTracks) {
|
|
|
|
webkitMediaStream.prototype.getVideoTracks = function () {
|
|
|
|
return this.videoTracks;
|
|
|
|
};
|
|
|
|
}
|
|
|
|
if (!webkitMediaStream.prototype.getAudioTracks) {
|
|
|
|
webkitMediaStream.prototype.getAudioTracks = function () {
|
|
|
|
return this.audioTracks;
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
try { console.log('Browser does not appear to be WebRTC-capable'); } catch (e) { }
|
|
|
|
|
2015-02-25 09:07:33 +00:00
|
|
|
window.location.href = 'unsupported_browser.html';
|
2014-12-19 13:59:08 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
RTCUtils.prototype.getUserMediaWithConstraints = function(
|
|
|
|
um, success_callback, failure_callback, resolution,bandwidth, fps,
|
|
|
|
desktopStream)
|
|
|
|
{
|
2015-02-12 14:42:08 +00:00
|
|
|
currentResolution = resolution;
|
2014-12-19 13:59:08 +00:00
|
|
|
// Check if we are running on Android device
|
|
|
|
var isAndroid = navigator.userAgent.indexOf('Android') != -1;
|
|
|
|
|
2015-01-05 11:54:46 +00:00
|
|
|
var constraints = getConstraints(
|
|
|
|
um, resolution, bandwidth, fps, desktopStream, isAndroid);
|
2014-12-19 13:59:08 +00:00
|
|
|
|
|
|
|
var isFF = navigator.userAgent.toLowerCase().indexOf('firefox') > -1;
|
|
|
|
|
2015-03-27 09:36:39 +00:00
|
|
|
var self = this;
|
|
|
|
|
2014-12-19 13:59:08 +00:00
|
|
|
try {
|
|
|
|
if (config.enableSimulcast
|
|
|
|
&& constraints.video
|
|
|
|
&& constraints.video.chromeMediaSource !== 'screen'
|
|
|
|
&& constraints.video.chromeMediaSource !== 'desktop'
|
|
|
|
&& !isAndroid
|
|
|
|
|
|
|
|
// We currently do not support FF, as it doesn't have multistream support.
|
|
|
|
&& !isFF) {
|
2015-01-28 14:35:22 +00:00
|
|
|
APP.simulcast.getUserMedia(constraints, function (stream) {
|
2014-12-19 13:59:08 +00:00
|
|
|
console.log('onUserMediaSuccess');
|
2015-03-27 09:36:39 +00:00
|
|
|
self.setAvailableDevices(um, true);
|
2014-12-19 13:59:08 +00:00
|
|
|
success_callback(stream);
|
|
|
|
},
|
|
|
|
function (error) {
|
|
|
|
console.warn('Failed to get access to local media. Error ', error);
|
2015-03-27 09:36:39 +00:00
|
|
|
self.setAvailableDevices(um, false);
|
2014-12-19 13:59:08 +00:00
|
|
|
if (failure_callback) {
|
|
|
|
failure_callback(error);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
|
2015-01-07 14:54:03 +00:00
|
|
|
this.getUserMedia(constraints,
|
2014-12-19 13:59:08 +00:00
|
|
|
function (stream) {
|
|
|
|
console.log('onUserMediaSuccess');
|
2015-03-27 09:36:39 +00:00
|
|
|
self.setAvailableDevices(um, true);
|
2014-12-19 13:59:08 +00:00
|
|
|
success_callback(stream);
|
|
|
|
},
|
|
|
|
function (error) {
|
2015-03-27 09:36:39 +00:00
|
|
|
self.setAvailableDevices(um, false);
|
2015-01-05 11:54:46 +00:00
|
|
|
console.warn('Failed to get access to local media. Error ',
|
|
|
|
error, constraints);
|
2014-12-19 13:59:08 +00:00
|
|
|
if (failure_callback) {
|
|
|
|
failure_callback(error);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
}
|
|
|
|
} catch (e) {
|
|
|
|
console.error('GUM failed: ', e);
|
|
|
|
if(failure_callback) {
|
|
|
|
failure_callback(e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2015-03-27 09:36:39 +00:00
|
|
|
RTCUtils.prototype.setAvailableDevices = function (um, available) {
|
|
|
|
var devices = {};
|
|
|
|
if(um.indexOf("video") != -1)
|
|
|
|
{
|
|
|
|
devices.video = available;
|
|
|
|
}
|
|
|
|
if(um.indexOf("audio") != -1)
|
|
|
|
{
|
|
|
|
devices.audio = available;
|
|
|
|
}
|
|
|
|
this.service.setDeviceAvailability(devices);
|
|
|
|
}
|
|
|
|
|
2014-12-19 13:59:08 +00:00
|
|
|
/**
|
|
|
|
* We ask for audio and video combined stream in order to get permissions and
|
|
|
|
* not to ask twice.
|
|
|
|
*/
|
2015-05-19 15:03:01 +00:00
|
|
|
RTCUtils.prototype.obtainAudioAndVideoPermissions =
|
|
|
|
function(devices, callback, usageOptions)
|
|
|
|
{
|
2014-12-19 13:59:08 +00:00
|
|
|
var self = this;
|
|
|
|
// Get AV
|
2015-02-12 14:42:08 +00:00
|
|
|
|
2015-04-22 09:31:08 +00:00
|
|
|
var successCallback = function (stream) {
|
|
|
|
if(callback)
|
|
|
|
callback(stream, usageOptions);
|
|
|
|
else
|
|
|
|
self.successCallback(stream, usageOptions);
|
|
|
|
};
|
|
|
|
|
2015-03-23 16:12:24 +00:00
|
|
|
if(!devices)
|
|
|
|
devices = ['audio', 'video'];
|
|
|
|
|
2015-04-22 09:31:08 +00:00
|
|
|
var newDevices = [];
|
|
|
|
|
|
|
|
|
|
|
|
if(usageOptions)
|
|
|
|
for(var i = 0; i < devices.length; i++)
|
|
|
|
{
|
|
|
|
var device = devices[i];
|
2015-05-19 15:03:01 +00:00
|
|
|
if(usageOptions[device] === true)
|
2015-04-22 09:31:08 +00:00
|
|
|
newDevices.push(device);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
newDevices = devices;
|
|
|
|
|
|
|
|
if(newDevices.length === 0)
|
|
|
|
{
|
|
|
|
successCallback();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2015-05-15 13:32:01 +00:00
|
|
|
if (navigator.mozGetUserMedia) {
|
|
|
|
|
|
|
|
// With FF we can't split the stream into audio and video because FF
|
|
|
|
// doesn't support media stream constructors. So, we need to get the
|
|
|
|
// audio stream separately from the video stream using two distinct GUM
|
|
|
|
// calls. Not very user friendly :-( but we don't have many other
|
|
|
|
// options neither.
|
|
|
|
//
|
|
|
|
// Note that we pack those 2 streams in a single object and pass it to
|
|
|
|
// the successCallback method.
|
|
|
|
|
|
|
|
self.getUserMediaWithConstraints(
|
|
|
|
['audio'],
|
|
|
|
function (audioStream) {
|
|
|
|
self.getUserMediaWithConstraints(
|
|
|
|
['video'],
|
|
|
|
function (videoStream) {
|
|
|
|
return self.successCallback({
|
|
|
|
audioStream: audioStream,
|
|
|
|
videoStream: videoStream
|
|
|
|
});
|
|
|
|
},
|
|
|
|
function (error) {
|
|
|
|
console.error('failed to obtain video stream - stop',
|
|
|
|
error);
|
|
|
|
return self.successCallback(null);
|
|
|
|
},
|
|
|
|
config.resolution || '360');
|
|
|
|
},
|
|
|
|
function (error) {
|
|
|
|
console.error('failed to obtain audio stream - stop',
|
|
|
|
error);
|
|
|
|
return self.successCallback(null);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
} else {
|
|
|
|
this.getUserMediaWithConstraints(
|
2015-04-22 09:31:08 +00:00
|
|
|
newDevices,
|
2015-02-12 14:42:08 +00:00
|
|
|
function (stream) {
|
2015-04-22 09:31:08 +00:00
|
|
|
successCallback(stream);
|
2015-02-12 14:42:08 +00:00
|
|
|
},
|
2014-12-19 13:59:08 +00:00
|
|
|
function (error) {
|
2015-02-12 14:42:08 +00:00
|
|
|
self.errorCallback(error);
|
2014-12-19 13:59:08 +00:00
|
|
|
},
|
2015-02-12 14:42:08 +00:00
|
|
|
config.resolution || '360');
|
2015-05-15 13:32:01 +00:00
|
|
|
}
|
|
|
|
|
2015-02-12 14:42:08 +00:00
|
|
|
}
|
|
|
|
|
2015-04-22 09:31:08 +00:00
|
|
|
RTCUtils.prototype.successCallback = function (stream, usageOptions) {
|
2015-05-15 13:32:01 +00:00
|
|
|
// If this is FF, the stream parameter is *not* a MediaStream object, it's
|
|
|
|
// an object with two properties: audioStream, videoStream.
|
|
|
|
if(stream && !navigator.mozGetUserMedia)
|
2015-03-24 15:43:33 +00:00
|
|
|
console.log('got', stream, stream.getAudioTracks().length,
|
|
|
|
stream.getVideoTracks().length);
|
2015-04-22 09:31:08 +00:00
|
|
|
this.handleLocalStream(stream, usageOptions);
|
2015-02-12 14:42:08 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
RTCUtils.prototype.errorCallback = function (error) {
|
|
|
|
var self = this;
|
|
|
|
console.error('failed to obtain audio/video stream - trying audio only', error);
|
|
|
|
var resolution = getPreviousResolution(currentResolution);
|
|
|
|
if(typeof error == "object" && error.constraintName && error.name
|
|
|
|
&& (error.name == "ConstraintNotSatisfiedError" ||
|
|
|
|
error.name == "OverconstrainedError") &&
|
|
|
|
(error.constraintName == "minWidth" || error.constraintName == "maxWidth" ||
|
|
|
|
error.constraintName == "minHeight" || error.constraintName == "maxHeight")
|
|
|
|
&& resolution != null)
|
|
|
|
{
|
|
|
|
self.getUserMediaWithConstraints(['audio', 'video'],
|
|
|
|
function (stream) {
|
|
|
|
return self.successCallback(stream);
|
|
|
|
}, function (error) {
|
|
|
|
return self.errorCallback(error);
|
|
|
|
}, resolution);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
self.getUserMediaWithConstraints(
|
|
|
|
['audio'],
|
|
|
|
function (stream) {
|
|
|
|
return self.successCallback(stream);
|
|
|
|
},
|
|
|
|
function (error) {
|
2015-02-20 16:17:46 +00:00
|
|
|
console.error('failed to obtain audio/video stream - stop',
|
|
|
|
error);
|
2015-03-24 15:43:33 +00:00
|
|
|
return self.successCallback(null);
|
2015-02-12 14:42:08 +00:00
|
|
|
}
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2014-12-19 13:59:08 +00:00
|
|
|
}
|
|
|
|
|
2015-04-22 09:31:08 +00:00
|
|
|
RTCUtils.prototype.handleLocalStream = function(stream, usageOptions)
|
2014-12-19 13:59:08 +00:00
|
|
|
{
|
2015-05-15 13:32:01 +00:00
|
|
|
// If this is FF, the stream parameter is *not* a MediaStream object, it's
|
|
|
|
// an object with two properties: audioStream, videoStream.
|
|
|
|
var audioStream, videoStream;
|
2014-12-19 13:59:08 +00:00
|
|
|
if(window.webkitMediaStream)
|
|
|
|
{
|
2015-05-15 13:32:01 +00:00
|
|
|
audioStream = new webkitMediaStream();
|
|
|
|
videoStream = new webkitMediaStream();
|
2015-03-24 15:43:33 +00:00
|
|
|
if(stream) {
|
|
|
|
var audioTracks = stream.getAudioTracks();
|
2014-12-19 13:59:08 +00:00
|
|
|
|
2015-03-24 15:43:33 +00:00
|
|
|
for (var i = 0; i < audioTracks.length; i++) {
|
|
|
|
audioStream.addTrack(audioTracks[i]);
|
|
|
|
}
|
2014-12-19 13:59:08 +00:00
|
|
|
|
2015-03-24 15:43:33 +00:00
|
|
|
var videoTracks = stream.getVideoTracks();
|
|
|
|
|
|
|
|
for (i = 0; i < videoTracks.length; i++) {
|
|
|
|
videoStream.addTrack(videoTracks[i]);
|
|
|
|
}
|
2014-12-19 13:59:08 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{//firefox
|
2015-05-15 13:32:01 +00:00
|
|
|
audioStream = stream.audioStream;
|
|
|
|
videoStream = stream.videoStream;
|
2014-12-19 13:59:08 +00:00
|
|
|
}
|
|
|
|
|
2015-05-19 15:03:01 +00:00
|
|
|
var audioMuted = (usageOptions && usageOptions.audio === false),
|
|
|
|
videoMuted = (usageOptions && usageOptions.video === false);
|
|
|
|
|
|
|
|
var audioGUM = (!usageOptions || usageOptions.audio !== false),
|
|
|
|
videoGUM = (!usageOptions || usageOptions.video !== false);
|
2015-05-15 13:32:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
this.service.createLocalStream(audioStream, "audio", null, null,
|
|
|
|
audioMuted, audioGUM);
|
|
|
|
|
|
|
|
this.service.createLocalStream(videoStream, "video", null, null,
|
|
|
|
videoMuted, videoGUM);
|
2014-12-19 13:59:08 +00:00
|
|
|
};
|
|
|
|
|
2015-04-22 09:31:08 +00:00
|
|
|
RTCUtils.prototype.createStream = function(stream, isVideo)
|
2015-03-24 15:43:33 +00:00
|
|
|
{
|
2015-04-22 09:31:08 +00:00
|
|
|
var newStream = null;
|
2015-03-24 15:43:33 +00:00
|
|
|
if(window.webkitMediaStream)
|
|
|
|
{
|
2015-04-22 09:31:08 +00:00
|
|
|
newStream = new webkitMediaStream();
|
|
|
|
if(newStream)
|
2015-03-24 15:43:33 +00:00
|
|
|
{
|
2015-04-22 09:31:08 +00:00
|
|
|
var tracks = (isVideo? stream.getVideoTracks() : stream.getAudioTracks());
|
2015-03-24 15:43:33 +00:00
|
|
|
|
2015-04-22 09:31:08 +00:00
|
|
|
for (i = 0; i < tracks.length; i++) {
|
|
|
|
newStream.addTrack(tracks[i]);
|
2015-03-24 15:43:33 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
else
|
2015-04-22 09:31:08 +00:00
|
|
|
newStream = stream;
|
2015-03-24 15:43:33 +00:00
|
|
|
|
2015-04-22 09:31:08 +00:00
|
|
|
return newStream;
|
2015-03-24 15:43:33 +00:00
|
|
|
};
|
|
|
|
|
2015-03-05 10:26:44 +00:00
|
|
|
module.exports = RTCUtils;
|