jiti-meet/modules/RTC/RTCUtils.js

569 lines
19 KiB
JavaScript
Raw Normal View History

/* global APP, config, require, attachMediaStream, getUserMedia,
RTCPeerConnection, webkitMediaStream, webkitURL, webkitRTCPeerConnection,
mozRTCIceCandidate, mozRTCSessionDescription, mozRTCPeerConnection */
/* jshint -W101 */
var RTCBrowserType = require("./RTCBrowserType");
var Resolutions = require("../../service/RTC/Resolutions");
var AdapterJS = require("./adapter.screenshare");
var currentResolution = null;
function getPreviousResolution(resolution) {
if(!Resolutions[resolution])
return null;
var order = Resolutions[resolution].order;
var res = null;
var resName = null;
2015-07-28 19:42:01 +00:00
for(var i in Resolutions) {
var tmp = Resolutions[i];
2015-09-11 03:26:29 +00:00
if (!res || (res.order < tmp.order && tmp.order < order)) {
resName = i;
res = tmp;
}
}
return resName;
}
2014-12-19 13:59:08 +00:00
2015-08-17 21:00:09 +00:00
function setResolutionConstraints(constraints, resolution) {
var isAndroid = RTCBrowserType.isAndroid();
if (Resolutions[resolution]) {
constraints.video.mandatory.minWidth = Resolutions[resolution].width;
constraints.video.mandatory.minHeight = Resolutions[resolution].height;
}
else if (isAndroid) {
// FIXME can't remember if the purpose of this was to always request
// low resolution on Android ? if yes it should be moved up front
constraints.video.mandatory.minWidth = 320;
constraints.video.mandatory.minHeight = 240;
constraints.video.mandatory.maxFrameRate = 15;
2014-12-19 13:59:08 +00:00
}
2014-12-19 13:59:08 +00:00
if (constraints.video.mandatory.minWidth)
2015-07-28 19:42:01 +00:00
constraints.video.mandatory.maxWidth =
constraints.video.mandatory.minWidth;
2014-12-19 13:59:08 +00:00
if (constraints.video.mandatory.minHeight)
2015-07-28 19:42:01 +00:00
constraints.video.mandatory.maxHeight =
constraints.video.mandatory.minHeight;
2014-12-19 13:59:08 +00:00
}
2015-08-17 21:00:09 +00:00
function getConstraints(um, resolution, bandwidth, fps, desktopStream) {
var constraints = {audio: false, video: false};
2014-12-19 13:59:08 +00:00
if (um.indexOf('video') >= 0) {
2015-07-28 19:42:01 +00:00
// same behaviour as true
constraints.video = { mandatory: {}, optional: [] };
constraints.video.optional.push({ googLeakyBucket: true });
2015-08-17 21:00:09 +00:00
setResolutionConstraints(constraints, resolution);
2014-12-19 13:59:08 +00:00
}
if (um.indexOf('audio') >= 0) {
if (!RTCBrowserType.isFirefox()) {
// same behaviour as true
constraints.audio = { mandatory: {}, optional: []};
// if it is good enough for hangouts...
constraints.audio.optional.push(
{googEchoCancellation: true},
{googAutoGainControl: true},
{googNoiseSupression: true},
{googHighpassFilter: true},
{googNoisesuppression2: true},
{googEchoCancellation2: true},
{googAutoGainControl2: true}
);
} else {
constraints.audio = true;
}
2014-12-19 13:59:08 +00:00
}
if (um.indexOf('screen') >= 0) {
if (RTCBrowserType.isChrome()) {
constraints.video = {
mandatory: {
chromeMediaSource: "screen",
googLeakyBucket: true,
maxWidth: window.screen.width,
maxHeight: window.screen.height,
maxFrameRate: 3
},
optional: []
};
} else if (RTCBrowserType.isTemasysPluginUsed()) {
constraints.video = {
optional: [
{
sourceId: AdapterJS.WebRTCPlugin.plugin.screensharingKey
}
]
};
2015-08-24 21:04:32 +00:00
} else if (RTCBrowserType.isFirefox()) {
constraints.video = {
mozMediaSource: "window",
mediaSource: "window"
};
} else {
console.error(
"'screen' WebRTC media source is supported only in Chrome" +
" and with Temasys plugin");
}
2014-12-19 13:59:08 +00:00
}
if (um.indexOf('desktop') >= 0) {
constraints.video = {
mandatory: {
chromeMediaSource: "desktop",
chromeMediaSourceId: desktopStream,
googLeakyBucket: true,
maxWidth: window.screen.width,
maxHeight: window.screen.height,
maxFrameRate: 3
},
optional: []
};
}
2015-07-28 19:42:01 +00:00
if (bandwidth) {
if (!constraints.video) {
//same behaviour as true
constraints.video = {mandatory: {}, optional: []};
}
2014-12-19 13:59:08 +00:00
constraints.video.optional.push({bandwidth: bandwidth});
}
2015-07-28 19:42:01 +00:00
if (fps) {
// for some cameras it might be necessary to request 30fps
2014-12-19 13:59:08 +00:00
// so they choose 30fps mjpg over 10fps yuy2
2015-07-28 19:42:01 +00:00
if (!constraints.video) {
// same behaviour as true;
constraints.video = {mandatory: {}, optional: []};
}
2014-12-19 13:59:08 +00:00
constraints.video.mandatory.minFrameRate = fps;
}
// we turn audio for both audio and video tracks, the fake audio & video seems to work
// only when enabled in one getUserMedia call, we cannot get fake audio separate by fake video
// this later can be a problem with some of the tests
if(RTCBrowserType.isFirefox() && config.firefox_fake_device)
{
constraints.audio = true;
constraints.fake = true;
}
return constraints;
2014-12-19 13:59:08 +00:00
}
function RTCUtils(RTCService, onTemasysPluginReady)
2014-12-19 13:59:08 +00:00
{
var self = this;
2014-12-19 13:59:08 +00:00
this.service = RTCService;
if (RTCBrowserType.isFirefox()) {
var FFversion = RTCBrowserType.getFirefoxVersion();
if (FFversion >= 40) {
2014-12-19 13:59:08 +00:00
this.peerconnection = mozRTCPeerConnection;
this.getUserMedia = navigator.mozGetUserMedia.bind(navigator);
this.pc_constraints = {};
this.attachMediaStream = function (element, stream) {
// srcObject is being standardized and FF will eventually
// support that unprefixed. FF also supports the
// "element.src = URL.createObjectURL(...)" combo, but that
// will be deprecated in favour of srcObject.
//
// https://groups.google.com/forum/#!topic/mozilla.dev.media/pKOiioXonJg
// https://github.com/webrtc/samples/issues/302
if(!element[0])
return;
2014-12-19 13:59:08 +00:00
element[0].mozSrcObject = stream;
element[0].play();
};
this.getStreamID = function (stream) {
2015-07-21 08:15:57 +00:00
var id = stream.id;
if (!id) {
var tracks = stream.getVideoTracks();
2015-07-28 19:42:01 +00:00
if (!tracks || tracks.length === 0) {
2015-07-21 08:15:57 +00:00
tracks = stream.getAudioTracks();
}
id = tracks[0].id;
2014-12-19 13:59:08 +00:00
}
return APP.xmpp.filter_special_chars(id);
2014-12-19 13:59:08 +00:00
};
this.getVideoSrc = function (element) {
if(!element)
return null;
2014-12-19 13:59:08 +00:00
return element.mozSrcObject;
};
this.setVideoSrc = function (element, src) {
if(element)
element.mozSrcObject = src;
2014-12-19 13:59:08 +00:00
};
window.RTCSessionDescription = mozRTCSessionDescription;
window.RTCIceCandidate = mozRTCIceCandidate;
} else {
console.error(
"Firefox version too old: " + FFversion + ". Required >= 40.");
window.location.href = 'unsupported_browser.html';
return;
2014-12-19 13:59:08 +00:00
}
} else if (RTCBrowserType.isChrome() || RTCBrowserType.isOpera()) {
2014-12-19 13:59:08 +00:00
this.peerconnection = webkitRTCPeerConnection;
this.getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
this.attachMediaStream = function (element, stream) {
element.attr('src', webkitURL.createObjectURL(stream));
};
this.getStreamID = function (stream) {
// streams from FF endpoints have the characters '{' and '}'
// that make jQuery choke.
return APP.xmpp.filter_special_chars(stream.id);
2014-12-19 13:59:08 +00:00
};
this.getVideoSrc = function (element) {
if(!element)
return null;
2014-12-19 13:59:08 +00:00
return element.getAttribute("src");
};
this.setVideoSrc = function (element, src) {
if(element)
element.setAttribute("src", src);
2014-12-19 13:59:08 +00:00
};
// DTLS should now be enabled by default but..
this.pc_constraints = {'optional': [{'DtlsSrtpKeyAgreement': 'true'}]};
2015-08-17 21:00:09 +00:00
if (RTCBrowserType.isAndroid()) {
2014-12-19 13:59:08 +00:00
this.pc_constraints = {}; // disable DTLS on Android
}
if (!webkitMediaStream.prototype.getVideoTracks) {
webkitMediaStream.prototype.getVideoTracks = function () {
return this.videoTracks;
};
}
if (!webkitMediaStream.prototype.getAudioTracks) {
webkitMediaStream.prototype.getAudioTracks = function () {
return this.audioTracks;
};
}
}
// Detect IE/Safari
else if (RTCBrowserType.isTemasysPluginUsed()) {
//AdapterJS.WebRTCPlugin.setLogLevel(
// AdapterJS.WebRTCPlugin.PLUGIN_LOG_LEVELS.VERBOSE);
2014-12-19 13:59:08 +00:00
AdapterJS.webRTCReady(function (isPlugin) {
self.peerconnection = RTCPeerConnection;
self.getUserMedia = getUserMedia;
self.attachMediaStream = function (elSel, stream) {
if (stream.id === "dummyAudio" || stream.id === "dummyVideo") {
return;
}
attachMediaStream(elSel[0], stream);
};
self.getStreamID = function (stream) {
var id = APP.xmpp.filter_special_chars(stream.label);
return id;
};
self.getVideoSrc = function (element) {
if (!element) {
console.warn("Attempt to get video SRC of null element");
return null;
}
var children = element.children;
for (var i = 0; i !== children.length; ++i) {
if (children[i].name === 'streamId') {
return children[i].value;
}
}
//console.info(element.id + " SRC: " + src);
return null;
};
self.setVideoSrc = function (element, src) {
//console.info("Set video src: ", element, src);
if (!src) {
console.warn("Not attaching video stream, 'src' is null");
return;
}
AdapterJS.WebRTCPlugin.WaitForPluginReady();
var stream = AdapterJS.WebRTCPlugin.plugin
.getStreamWithId(AdapterJS.WebRTCPlugin.pageId, src);
attachMediaStream(element, stream);
};
onTemasysPluginReady(isPlugin);
});
} else {
try {
console.log('Browser does not appear to be WebRTC-capable');
} catch (e) { }
2015-02-25 09:07:33 +00:00
window.location.href = 'unsupported_browser.html';
2014-12-19 13:59:08 +00:00
}
}
RTCUtils.prototype.getUserMediaWithConstraints = function(
um, success_callback, failure_callback, resolution,bandwidth, fps,
2015-07-28 19:42:01 +00:00
desktopStream) {
currentResolution = resolution;
2014-12-19 13:59:08 +00:00
var constraints = getConstraints(
2015-08-17 21:00:09 +00:00
um, resolution, bandwidth, fps, desktopStream);
2014-12-19 13:59:08 +00:00
console.info("Get media constraints", constraints);
2014-12-19 13:59:08 +00:00
var self = this;
2014-12-19 13:59:08 +00:00
try {
2015-06-03 13:58:13 +00:00
this.getUserMedia(constraints,
function (stream) {
console.log('onUserMediaSuccess');
self.setAvailableDevices(um, true);
success_callback(stream);
},
function (error) {
self.setAvailableDevices(um, false);
console.warn('Failed to get access to local media. Error ',
error, constraints);
if (failure_callback) {
failure_callback(error);
}
});
2014-12-19 13:59:08 +00:00
} catch (e) {
console.error('GUM failed: ', e);
if(failure_callback) {
failure_callback(e);
}
}
};
RTCUtils.prototype.setAvailableDevices = function (um, available) {
var devices = {};
2015-07-28 19:42:01 +00:00
if(um.indexOf("video") != -1) {
devices.video = available;
}
2015-07-28 19:42:01 +00:00
if(um.indexOf("audio") != -1) {
devices.audio = available;
}
this.service.setDeviceAvailability(devices);
2015-07-28 19:42:01 +00:00
};
2014-12-19 13:59:08 +00:00
/**
* We ask for audio and video combined stream in order to get permissions and
* not to ask twice.
*/
2015-05-19 15:03:01 +00:00
RTCUtils.prototype.obtainAudioAndVideoPermissions =
function(devices, callback, usageOptions)
{
2014-12-19 13:59:08 +00:00
var self = this;
// Get AV
var successCallback = function (stream) {
if(callback)
callback(stream, usageOptions);
else
self.successCallback(stream, usageOptions);
};
if(!devices)
devices = ['audio', 'video'];
var newDevices = [];
if(usageOptions)
2015-07-28 19:42:01 +00:00
for(var i = 0; i < devices.length; i++) {
var device = devices[i];
2015-05-19 15:03:01 +00:00
if(usageOptions[device] === true)
newDevices.push(device);
}
else
newDevices = devices;
2015-07-28 19:42:01 +00:00
if(newDevices.length === 0) {
successCallback();
return;
}
if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed()) {
2015-05-15 13:32:01 +00:00
// With FF/IE we can't split the stream into audio and video because FF
2015-05-15 13:32:01 +00:00
// doesn't support media stream constructors. So, we need to get the
// audio stream separately from the video stream using two distinct GUM
// calls. Not very user friendly :-( but we don't have many other
// options neither.
//
// Note that we pack those 2 streams in a single object and pass it to
// the successCallback method.
var obtainVideo = function (audioStream) {
self.getUserMediaWithConstraints(
['video'],
function (videoStream) {
return successCallback({
audioStream: audioStream,
videoStream: videoStream
});
},
function (error) {
console.error(
'failed to obtain video stream - stop', error);
self.errorCallback(error);
},
config.resolution || '360');
};
var obtainAudio = function () {
self.getUserMediaWithConstraints(
['audio'],
function (audioStream) {
if (newDevices.indexOf('video') !== -1)
obtainVideo(audioStream);
},
function (error) {
console.error(
'failed to obtain audio stream - stop', error);
self.errorCallback(error);
}
);
};
if (newDevices.indexOf('audio') !== -1) {
obtainAudio();
} else {
obtainVideo(null);
}
2015-05-15 13:32:01 +00:00
} else {
this.getUserMediaWithConstraints(
newDevices,
function (stream) {
successCallback(stream);
},
2014-12-19 13:59:08 +00:00
function (error) {
self.errorCallback(error);
2014-12-19 13:59:08 +00:00
},
config.resolution || '360');
2015-05-15 13:32:01 +00:00
}
};
RTCUtils.prototype.successCallback = function (stream, usageOptions) {
// If this is FF or IE, the stream parameter is *not* a MediaStream object,
// it's an object with two properties: audioStream, videoStream.
if (stream && stream.getAudioTracks && stream.getVideoTracks)
console.log('got', stream, stream.getAudioTracks().length,
stream.getVideoTracks().length);
this.handleLocalStream(stream, usageOptions);
};
RTCUtils.prototype.errorCallback = function (error) {
var self = this;
console.error('failed to obtain audio/video stream - trying audio only', error);
var resolution = getPreviousResolution(currentResolution);
if(typeof error == "object" && error.constraintName && error.name
&& (error.name == "ConstraintNotSatisfiedError" ||
error.name == "OverconstrainedError") &&
(error.constraintName == "minWidth" || error.constraintName == "maxWidth" ||
error.constraintName == "minHeight" || error.constraintName == "maxHeight")
2015-09-11 03:26:29 +00:00
&& resolution)
{
self.getUserMediaWithConstraints(['audio', 'video'],
function (stream) {
return self.successCallback(stream);
}, function (error) {
return self.errorCallback(error);
}, resolution);
}
2015-07-28 19:42:01 +00:00
else {
self.getUserMediaWithConstraints(
['audio'],
function (stream) {
return self.successCallback(stream);
},
function (error) {
console.error('failed to obtain audio/video stream - stop',
error);
return self.successCallback(null);
}
);
}
2015-07-28 19:42:01 +00:00
};
2015-07-28 19:42:01 +00:00
RTCUtils.prototype.handleLocalStream = function(stream, usageOptions) {
2015-05-15 13:32:01 +00:00
// If this is FF, the stream parameter is *not* a MediaStream object, it's
// an object with two properties: audioStream, videoStream.
var audioStream, videoStream;
2014-12-19 13:59:08 +00:00
if(window.webkitMediaStream)
{
2015-05-15 13:32:01 +00:00
audioStream = new webkitMediaStream();
videoStream = new webkitMediaStream();
if(stream) {
var audioTracks = stream.getAudioTracks();
2014-12-19 13:59:08 +00:00
for (var i = 0; i < audioTracks.length; i++) {
audioStream.addTrack(audioTracks[i]);
}
2014-12-19 13:59:08 +00:00
var videoTracks = stream.getVideoTracks();
for (i = 0; i < videoTracks.length; i++) {
videoStream.addTrack(videoTracks[i]);
}
2014-12-19 13:59:08 +00:00
}
}
else if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed())
{ // Firefox and Temasys plugin
if (stream && stream.audioStream)
audioStream = stream.audioStream;
else
audioStream = new DummyMediaStream("dummyAudio");
if (stream && stream.videoStream)
videoStream = stream.videoStream;
else
videoStream = new DummyMediaStream("dummyVideo");
2014-12-19 13:59:08 +00:00
}
2015-05-19 15:03:01 +00:00
var audioMuted = (usageOptions && usageOptions.audio === false),
videoMuted = (usageOptions && usageOptions.video === false);
var audioGUM = (!usageOptions || usageOptions.audio !== false),
videoGUM = (!usageOptions || usageOptions.video !== false);
2015-05-15 13:32:01 +00:00
this.service.createLocalStream(audioStream, "audio", null, null,
audioMuted, audioGUM);
this.service.createLocalStream(videoStream, "video", null, 'camera',
2015-05-15 13:32:01 +00:00
videoMuted, videoGUM);
2014-12-19 13:59:08 +00:00
};
function DummyMediaStream(id) {
this.id = id;
this.label = id;
this.stop = function() { };
2015-07-28 19:42:01 +00:00
this.getAudioTracks = function() { return []; };
this.getVideoTracks = function() { return []; };
}
RTCUtils.prototype.createStream = function(stream, isVideo) {
var newStream = null;
if (window.webkitMediaStream) {
newStream = new webkitMediaStream();
if (newStream) {
var tracks = (isVideo ? stream.getVideoTracks() : stream.getAudioTracks());
2015-07-28 19:42:01 +00:00
for (var i = 0; i < tracks.length; i++) {
newStream.addTrack(tracks[i]);
}
}
}
else {
// FIXME: this is duplicated with 'handleLocalStream' !!!
if (stream) {
newStream = stream;
} else {
2015-07-28 19:42:01 +00:00
newStream =
new DummyMediaStream(isVideo ? "dummyVideo" : "dummyAudio");
}
}
return newStream;
};
module.exports = RTCUtils;