Implements create local streams.

This commit is contained in:
hristoterezov 2015-08-30 16:28:35 -05:00
parent 06a1472d1e
commit a241d4b9b4
7 changed files with 1022 additions and 547 deletions

View File

@ -28,7 +28,7 @@ function JitsiConference(options) {
*/
JitsiConference.prototype.join = function (password) {
this.room.joinRoom(password);
this.room.join(password);
}
/**
@ -47,7 +47,7 @@ JitsiConference.prototype.leave = function () {
* or a JitsiConferenceError if rejected.
*/
JitsiConference.prototype.createLocalTracks = function (options) {
this.rtc.obtainAudioAndVideoPermissions();
return this.rtc.obtainAudioAndVideoPermissions(options || {});
}
/**

View File

@ -22,6 +22,6 @@ var LibJitsiMeet = {
}
//Setups the promise object.
require("es6-promise").polyfill();
window.Promise = window.Promise || require("es6-promise").polyfill();
module.exports = LibJitsiMeet;

File diff suppressed because it is too large Load Diff

View File

@ -20,7 +20,8 @@ function implementOnEndedHandling(stream) {
};
}
function LocalStream(stream, type, eventEmitter, videoType, isGUMStream) {
function LocalStream(RTC, stream, type, eventEmitter, videoType, isGUMStream) {
this.rtc = RTC;
this.stream = stream;
this.eventEmitter = eventEmitter;
this.type = type;
@ -82,16 +83,16 @@ LocalStream.prototype.setMute = function (mute)
this.eventEmitter.emit(eventType, true);
} else {
var self = this;
APP.RTC.rtcUtils.obtainAudioAndVideoPermissions(
(this.isAudioStream() ? ["audio"] : ["video"]),
function (stream) {
this.rtcUtils.obtainAudioAndVideoPermissions(
(this.isAudioStream() ? ["audio"] : ["video"]))
.then(function (stream) {
if (isAudio) {
APP.RTC.changeLocalAudio(stream,
self.rtc.changeLocalAudio(stream,
function () {
self.eventEmitter.emit(eventType, false);
});
} else {
APP.RTC.changeLocalVideo(stream, false,
self.rtc.changeLocalVideo(stream, false,
function () {
self.eventEmitter.emit(eventType, false);
});

View File

@ -42,7 +42,7 @@ function getMediaStreamUsage()
}
function RTC()
function RTC(options)
{
this.rtcUtils = null;
this.devices = {
@ -55,6 +55,7 @@ function RTC()
this.localVideo = null;
this.eventEmitter = new EventEmitter();
var self = this;
this.options = options || {};
desktopsharing.addListener(
function (stream, isUsingScreenStream, callback) {
self.changeLocalVideo(stream, isUsingScreenStream, callback);
@ -75,9 +76,9 @@ function RTC()
}
}
RTC.prototype.obtainAudioAndVideoPermissions = function () {
this.rtcUtils.obtainAudioAndVideoPermissions(
null, null, getMediaStreamUsage());
RTC.prototype.obtainAudioAndVideoPermissions = function (options) {
return this.rtcUtils.obtainAudioAndVideoPermissions(
null, getMediaStreamUsage(), options.resolution);
}
RTC.prototype.onIncommingCall = function(event) {
@ -109,14 +110,14 @@ RTC.prototype.removeStreamListener = function (listener, eventType) {
RTC.prototype.createLocalStreams = function (streams, change) {
for (var i = 0; i < streams.length; i++) {
var localStream = new LocalStream(streams.stream,
streams.type, this.eventEmitter, streams.videoType,
streams.isGUMStream);
var localStream = new LocalStream(this, streams[i].stream,
streams[i].type, this.eventEmitter, streams[i].videoType,
streams[i].isGUMStream);
this.localStreams.push(localStream);
if (streams.isMuted === true)
if (streams[i].isMuted === true)
localStream.setMute(true);
if (streams.type == "audio") {
if (streams[i].type == "audio") {
this.localAudio = localStream;
} else {
this.localVideo = localStream;
@ -125,7 +126,7 @@ RTC.prototype.createLocalStreams = function (streams, change) {
if (change)
eventType = StreamEventTypes.EVENT_TYPE_LOCAL_CHANGED;
this.eventEmitter.emit(eventType, localStream, streams.isMuted);
this.eventEmitter.emit(eventType, localStream, streams[i].isMuted);
}
return this.localStreams;
};

View File

@ -135,7 +135,7 @@ function getConstraints(um, resolution, bandwidth, fps, desktopStream, isAndroid
}
//Options parameter is to pass config options. Currently uses only "useIPv6".
function RTCUtils(RTCService, onTemasysPluginReady, options)
function RTCUtils(RTCService, onTemasysPluginReady)
{
var self = this;
this.service = RTCService;
@ -209,7 +209,7 @@ function RTCUtils(RTCService, onTemasysPluginReady, options)
};
// DTLS should now be enabled by default but..
this.pc_constraints = {'optional': [{'DtlsSrtpKeyAgreement': 'true'}]};
if (options.useIPv6) {
if (this.service.options.useIPv6) {
// https://code.google.com/p/webrtc/issues/detail?id=2828
this.pc_constraints.optional.push({googIPv6: true});
}
@ -340,94 +340,93 @@ RTCUtils.prototype.setAvailableDevices = function (um, available) {
* not to ask twice.
*/
RTCUtils.prototype.obtainAudioAndVideoPermissions =
function(devices, callback, usageOptions)
function(devices, usageOptions, resolution)
{
var self = this;
// Get AV
var successCallback = function (stream) {
if(callback)
callback(stream, usageOptions);
return new Promise(function(resolve, reject) {
var successCallback = function (stream) {
resolve(self.successCallback(stream, usageOptions));
};
if (!devices)
devices = ['audio', 'video'];
var newDevices = [];
if (usageOptions)
for (var i = 0; i < devices.length; i++) {
var device = devices[i];
if (usageOptions[device] === true)
newDevices.push(device);
}
else
self.successCallback(stream, usageOptions);
};
newDevices = devices;
if(!devices)
devices = ['audio', 'video'];
var newDevices = [];
if(usageOptions)
for(var i = 0; i < devices.length; i++) {
var device = devices[i];
if(usageOptions[device] === true)
newDevices.push(device);
if (newDevices.length === 0) {
successCallback();
return;
}
else
newDevices = devices;
if(newDevices.length === 0) {
successCallback();
return;
}
if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed()) {
if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed()) {
// With FF/IE we can't split the stream into audio and video because FF
// doesn't support media stream constructors. So, we need to get the
// audio stream separately from the video stream using two distinct GUM
// calls. Not very user friendly :-( but we don't have many other
// options neither.
//
// Note that we pack those 2 streams in a single object and pass it to
// the successCallback method.
var obtainVideo = function (audioStream) {
self.getUserMediaWithConstraints(
['video'],
function (videoStream) {
return successCallback({
audioStream: audioStream,
videoStream: videoStream
});
},
function (error) {
console.error(
'failed to obtain video stream - stop', error);
self.errorCallback(error);
},
config.resolution || '360');
};
var obtainAudio = function () {
self.getUserMediaWithConstraints(
['audio'],
function (audioStream) {
if (newDevices.indexOf('video') !== -1)
obtainVideo(audioStream);
},
function (error) {
console.error(
'failed to obtain audio stream - stop', error);
self.errorCallback(error);
}
);
};
if (newDevices.indexOf('audio') !== -1) {
obtainAudio();
// With FF/IE we can't split the stream into audio and video because FF
// doesn't support media stream constructors. So, we need to get the
// audio stream separately from the video stream using two distinct GUM
// calls. Not very user friendly :-( but we don't have many other
// options neither.
//
// Note that we pack those 2 streams in a single object and pass it to
// the successCallback method.
var obtainVideo = function (audioStream) {
self.getUserMediaWithConstraints(
['video'],
function (videoStream) {
return successCallback({
audioStream: audioStream,
videoStream: videoStream
});
},
function (error) {
console.error(
'failed to obtain video stream - stop', error);
self.errorCallback(error, resolve);
},
config.resolution || '360');
};
var obtainAudio = function () {
self.getUserMediaWithConstraints(
['audio'],
function (audioStream) {
if (newDevices.indexOf('video') !== -1)
obtainVideo(audioStream);
},
function (error) {
console.error(
'failed to obtain audio stream - stop', error);
self.errorCallback(error, resolve);
}
);
};
if (newDevices.indexOf('audio') !== -1) {
obtainAudio();
} else {
obtainVideo(null);
}
} else {
obtainVideo(null);
this.getUserMediaWithConstraints(
newDevices,
function (stream) {
successCallback(stream);
},
function (error) {
self.errorCallback(error, resolve);
},
resolution || '360');
}
} else {
this.getUserMediaWithConstraints(
newDevices,
function (stream) {
successCallback(stream);
},
function (error) {
self.errorCallback(error);
},
config.resolution || '360');
}
}.bind(this));
};
RTCUtils.prototype.successCallback = function (stream, usageOptions) {
@ -439,7 +438,7 @@ RTCUtils.prototype.successCallback = function (stream, usageOptions) {
return this.handleLocalStream(stream, usageOptions);
};
RTCUtils.prototype.errorCallback = function (error) {
RTCUtils.prototype.errorCallback = function (error, resolve) {
var self = this;
console.error('failed to obtain audio/video stream - trying audio only', error);
var resolution = getPreviousResolution(currentResolution);
@ -452,7 +451,7 @@ RTCUtils.prototype.errorCallback = function (error) {
{
self.getUserMediaWithConstraints(['audio', 'video'],
function (stream) {
return self.successCallback(stream);
resolve(self.successCallback(stream));
}, function (error) {
return self.errorCallback(error);
}, resolution);
@ -461,12 +460,12 @@ RTCUtils.prototype.errorCallback = function (error) {
self.getUserMediaWithConstraints(
['audio'],
function (stream) {
return self.successCallback(stream);
resolve(self.successCallback(stream));
},
function (error) {
console.error('failed to obtain audio/video stream - stop',
error);
return self.successCallback(null);
resolve(self.successCallback(null));
}
);
}

View File

@ -9,7 +9,6 @@ var transform = require("sdp-transform");
var XMPPEvents = require("../../service/xmpp/XMPPEvents");
var RTCBrowserType = require("../RTC/RTCBrowserType");
var SSRCReplacement = require("./LocalSSRCReplacement");
var RTC = require("../RTC/RTC");
// Jingle stuff
function JingleSessionPC(me, sid, connection, service, eventEmitter) {