Detaches createLocalTracks from JitsiConference.
This commit is contained in:
parent
1294cff9af
commit
2c799b3795
|
@ -30,6 +30,7 @@ function JitsiConference(options) {
|
|||
this.xmpp = this.connection.xmpp;
|
||||
this.eventEmitter = new EventEmitter();
|
||||
this.room = this.xmpp.createRoom(this.options.name, null, null, this.options.config);
|
||||
this.room.updateDeviceAvailability(RTC.getDeviceAvailability());
|
||||
this.rtc = new RTC(this.room, options);
|
||||
if(!options.config.disableAudioLevels)
|
||||
this.statistics = new Statistics();
|
||||
|
@ -56,18 +57,6 @@ JitsiConference.prototype.leave = function () {
|
|||
this.room = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the media tracks and returns them trough the callback.
|
||||
* @param options Object with properties / settings specifying the tracks which should be created.
|
||||
* should be created or some additional configurations about resolution for example.
|
||||
* @returns {Promise.<{Array.<JitsiTrack>}, JitsiConferenceError>} A promise that returns an array of created JitsiTracks if resolved,
|
||||
* or a JitsiConferenceError if rejected.
|
||||
*/
|
||||
JitsiConference.prototype.createLocalTracks = function (options) {
|
||||
if(this.rtc)
|
||||
return this.rtc.obtainAudioAndVideoPermissions(options || {});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the local tracks.
|
||||
*/
|
||||
|
@ -178,6 +167,24 @@ JitsiConference.prototype.setDisplayName = function(name) {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds JitsiLocalTrack object to the conference.
|
||||
* @param track the JitsiLocalTrack object.
|
||||
*/
|
||||
JitsiConference.prototype.addTrack = function (track) {
|
||||
this.rtc.addLocalStream(track);
|
||||
this.room.addStream(track.getOriginalStream, function () {});
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes JitsiLocalTrack object to the conference.
|
||||
* @param track the JitsiLocalTrack object.
|
||||
*/
|
||||
JitsiConference.prototype.removeTrack = function (track) {
|
||||
this.room.removeStream(track.getOriginalStream());
|
||||
this.rtc.removeLocalStream(track);
|
||||
}
|
||||
|
||||
/**
|
||||
* Elects the participant with the given id to be the selected participant or the speaker.
|
||||
* @param id the identifier of the participant
|
||||
|
@ -311,6 +318,12 @@ function setupListeners(conference) {
|
|||
function () {
|
||||
conference.statistics.dispose();
|
||||
});
|
||||
RTC.addListener(RTCEvents.AVAILABLE_DEVICES_CHANGED, function (devices) {
|
||||
conference.room.updateDeviceAvailability(devices);
|
||||
});
|
||||
RTC.addListener(StreamEventTypes.TRACK_MUTE_CHANGED, function (track) {
|
||||
conference.eventEmitter.emit(JitsiConferenceEvents.TRACK_MUTE_CHANGED, track);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@ var JitsiConnectionEvents = require("./JitsiConnectionEvents");
|
|||
var JitsiConnectionErrors = require("./JitsiConnectionErrors");
|
||||
var JitsiConferenceErrors = require("./JitsiConferenceErrors");
|
||||
var Logger = require("jitsi-meet-logger");
|
||||
var RTC = require("./modules/RTC/RTC");
|
||||
|
||||
/**
|
||||
* Namespace for the interface of Jitsi Meet Library.
|
||||
|
@ -21,10 +22,20 @@ var LibJitsiMeet = {
|
|||
},
|
||||
logLevels: Logger.levels,
|
||||
init: function (options) {
|
||||
require("./modules/RTC/RTC").init(options || {});
|
||||
RTC.init(options || {});
|
||||
},
|
||||
setLogLevel: function (level) {
|
||||
Logger.setLogLevel(level);
|
||||
},
|
||||
/**
|
||||
* Creates the media tracks and returns them trough the callback.
|
||||
* @param options Object with properties / settings specifying the tracks which should be created.
|
||||
* should be created or some additional configurations about resolution for example.
|
||||
* @returns {Promise.<{Array.<JitsiTrack>}, JitsiConferenceError>} A promise that returns an array of created JitsiTracks if resolved,
|
||||
* or a JitsiConferenceError if rejected.
|
||||
*/
|
||||
createLocalTracks: function (options) {
|
||||
return RTC.obtainAudioAndVideoPermissions(options || {});
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -18,6 +18,19 @@ var options = {
|
|||
clientNode: 'http://jitsi.org/jitsimeet', // The name of client node advertised in XEP-0115 'c' stanza
|
||||
}
|
||||
|
||||
|
||||
|
||||
// var options = {
|
||||
// hosts: {
|
||||
// domain: 'whatever.jitsi.net',
|
||||
// muc: 'conference.whatever.jitsi.net', // FIXME: use XEP-0030
|
||||
// bridge: 'jitsi-videobridge.whatever.jitsi.net', // FIXME: use XEP-0030
|
||||
// },
|
||||
// bosh: '//whatever.jitsi.net/http-bind?ROOM_NAME=conference2', // FIXME: use xep-0156 for that
|
||||
// clientNode: 'http://jitsi.org/jitsimeet', // The name of client node advertised in XEP-0115 'c' stanza
|
||||
// }
|
||||
|
||||
|
||||
var confOptions = {
|
||||
openSctp: true,
|
||||
disableAudioLevels: true
|
||||
|
@ -34,7 +47,7 @@ function onLocalTracks(tracks)
|
|||
tracks[1].attach($("#localVideo"));
|
||||
for(var i = 0; i < localTracks.length; i++)
|
||||
{
|
||||
localTracks[i].start();
|
||||
console.log(localTracks[i]);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -61,7 +74,10 @@ function onRemoteTrack(track) {
|
|||
*/
|
||||
function onConferenceJoined () {
|
||||
console.log("conference joined!");
|
||||
room.createLocalTracks({resolution: "720"}).then(onLocalTracks);
|
||||
for(var i = 0; i < localTracks.length; i++)
|
||||
{
|
||||
room.addTrack(localTracks[i]);
|
||||
}
|
||||
}
|
||||
|
||||
function onUserLeft(id) {
|
||||
|
@ -120,10 +136,12 @@ function unload() {
|
|||
$(window).bind('beforeunload', unload);
|
||||
$(window).bind('unload', unload);
|
||||
|
||||
JitsiMeetJS.setLogLevel(JitsiMeetJS.logLevels.ERROR);
|
||||
|
||||
|
||||
// JitsiMeetJS.setLogLevel(JitsiMeetJS.logLevels.ERROR);
|
||||
|
||||
JitsiMeetJS.init();
|
||||
|
||||
JitsiMeetJS.createLocalTracks({resolution: "720"}).then(onLocalTracks);
|
||||
var connection = new JitsiMeetJS.JitsiConnection(null, null, options);
|
||||
|
||||
var room = null;
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,25 +1,20 @@
|
|||
var JitsiTrack = require("./JitsiTrack");
|
||||
var StreamEventTypes = require("../../service/RTC/StreamEventTypes");
|
||||
var RTC = require("./RTCUtils");
|
||||
var RTCBrowserType = require("./RTCBrowserType");
|
||||
|
||||
/**
|
||||
* Represents a single media track (either audio or video).
|
||||
* @constructor
|
||||
*/
|
||||
function JitsiLocalTrack(RTC, stream, eventEmitter, videoType, isGUMStream,
|
||||
function JitsiLocalTrack(RTC, stream, eventEmitter, videoType,
|
||||
resolution)
|
||||
{
|
||||
JitsiTrack.call(this, RTC, stream);
|
||||
this.eventEmitter = eventEmitter;
|
||||
this.videoType = videoType;
|
||||
this.isGUMStream = true;
|
||||
this.dontFireRemoveEvent = false;
|
||||
this.isStarted = false;
|
||||
this.resolution = resolution;
|
||||
var self = this;
|
||||
if(isGUMStream === false)
|
||||
this.isGUMStream = isGUMStream;
|
||||
this.stream.onended = function () {
|
||||
if(!self.dontFireRemoveEvent)
|
||||
self.eventEmitter.emit(StreamEventTypes.EVENT_TYPE_LOCAL_ENDED, self);
|
||||
|
@ -35,11 +30,15 @@ JitsiLocalTrack.prototype.constructor = JitsiLocalTrack;
|
|||
* @param mute {boolean} if true the track will be muted. Otherwise the track will be unmuted.
|
||||
*/
|
||||
JitsiLocalTrack.prototype._setMute = function (mute) {
|
||||
if(!this.rtc) {
|
||||
console.error("Mute is not supported for streams not attached to conference!");
|
||||
return;
|
||||
}
|
||||
var isAudio = this.type === JitsiTrack.AUDIO;
|
||||
this.dontFireRemoveEvent = false;
|
||||
|
||||
if ((window.location.protocol != "https:" && this.isGUMStream) ||
|
||||
(isAudio && this.isGUMStream) || this.videoType === "screen" ||
|
||||
if ((window.location.protocol != "https:") ||
|
||||
(isAudio) || this.videoType === "screen" ||
|
||||
// FIXME FF does not support 'removeStream' method used to mute
|
||||
RTCBrowserType.isFirefox()) {
|
||||
|
||||
|
@ -66,9 +65,10 @@ JitsiLocalTrack.prototype._setMute = function (mute) {
|
|||
//FIXME: Maybe here we should set the SRC for the containers to something
|
||||
} else {
|
||||
var self = this;
|
||||
this.rtc.obtainAudioAndVideoPermissions(
|
||||
{devices: (isAudio ? ["audio"] : ["video"]),
|
||||
resolution: self.resolution}, true)
|
||||
var RTC = require("./RTCUtils");
|
||||
RTC.obtainAudioAndVideoPermissions(
|
||||
(isAudio ? ["audio"] : ["video"]),
|
||||
self.resolution, true)
|
||||
.then(function (streams) {
|
||||
var stream = null;
|
||||
for(var i = 0; i < streams.length; i++) {
|
||||
|
@ -76,7 +76,6 @@ JitsiLocalTrack.prototype._setMute = function (mute) {
|
|||
if(stream.type === self.type) {
|
||||
self.stream = stream.stream;
|
||||
self.videoType = stream.videoType;
|
||||
self.isGUMStream = stream.isGUMStream;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -109,22 +108,12 @@ JitsiLocalTrack.prototype._setMute = function (mute) {
|
|||
JitsiLocalTrack.prototype.stop = function () {
|
||||
if(!this.stream)
|
||||
return;
|
||||
this.rtc.room.removeStream(this.stream);
|
||||
if(this.rtc)
|
||||
this.rtc.room.removeStream(this.stream);
|
||||
this.stream.stop();
|
||||
this.detach();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Starts sending the track.
|
||||
* NOTE: Works for local tracks only.
|
||||
*/
|
||||
JitsiLocalTrack.prototype.start = function() {
|
||||
this.isStarted = true;
|
||||
this.rtc.room.addStream(this.stream, function () {});
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns <tt>true</tt> - if the stream is muted
|
||||
* and <tt>false</tt> otherwise.
|
||||
|
@ -150,4 +139,12 @@ JitsiLocalTrack.prototype.isMuted = function () {
|
|||
return true;
|
||||
};
|
||||
|
||||
/**
|
||||
* Private method. Updates rtc property of the track.
|
||||
* @param rtc the rtc instance.
|
||||
*/
|
||||
JitsiLocalTrack.prototype._setRTC = function (rtc) {
|
||||
this.rtc = rtc;
|
||||
};
|
||||
|
||||
module.exports = JitsiLocalTrack;
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
var RTC = require("./RTCUtils");
|
||||
var RTCBrowserType = require("./RTCBrowserType");
|
||||
|
||||
/**
|
||||
|
@ -22,14 +21,14 @@ function implementOnEndedHandling(stream) {
|
|||
* Represents a single media track (either audio or video).
|
||||
* @constructor
|
||||
*/
|
||||
function JitsiTrack(RTC, stream)
|
||||
function JitsiTrack(rtc, stream)
|
||||
{
|
||||
/**
|
||||
* Array with the HTML elements that are displaying the streams.
|
||||
* @type {Array}
|
||||
*/
|
||||
this.containers = [];
|
||||
this.rtc = RTC;
|
||||
this.rtc = rtc;
|
||||
this.stream = stream;
|
||||
this.type = (this.stream.getVideoTracks().length > 0)?
|
||||
JitsiTrack.VIDEO : JitsiTrack.AUDIO;
|
||||
|
@ -94,7 +93,7 @@ JitsiTrack.prototype.unmute = function () {
|
|||
*/
|
||||
JitsiTrack.prototype.attach = function (container) {
|
||||
if(this.stream)
|
||||
RTC.attachMediaStream(container, this.stream);
|
||||
require("./RTCUtils").attachMediaStream(container, this.stream);
|
||||
this.containers.push(container);
|
||||
}
|
||||
|
||||
|
@ -126,15 +125,6 @@ JitsiTrack.prototype.detach = function (container) {
|
|||
JitsiTrack.prototype.stop = function () {
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Starts sending the track.
|
||||
* NOTE: Works for local tracks only.
|
||||
*/
|
||||
JitsiTrack.prototype.start = function() {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if this is a video track and the source of the video is a
|
||||
* screen capture as opposed to a camera.
|
||||
|
|
|
@ -1,125 +0,0 @@
|
|||
/* global APP */
|
||||
var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
|
||||
var RTCEvents = require("../../service/RTC/RTCEvents");
|
||||
var RTCBrowserType = require("./RTCBrowserType");
|
||||
|
||||
/**
|
||||
* This implements 'onended' callback normally fired by WebRTC after the stream
|
||||
* is stopped. There is no such behaviour yet in FF, so we have to add it.
|
||||
* @param stream original WebRTC stream object to which 'onended' handling
|
||||
* will be added.
|
||||
*/
|
||||
function implementOnEndedHandling(stream) {
|
||||
var originalStop = stream.stop;
|
||||
stream.stop = function () {
|
||||
originalStop.apply(stream);
|
||||
if (!stream.ended) {
|
||||
stream.ended = true;
|
||||
stream.onended();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function LocalStream(RTC, stream, type, eventEmitter, videoType, isGUMStream) {
|
||||
this.rtc = RTC;
|
||||
this.stream = stream;
|
||||
this.eventEmitter = eventEmitter;
|
||||
this.type = type;
|
||||
this.videoType = videoType;
|
||||
this.isGUMStream = true;
|
||||
if(isGUMStream === false)
|
||||
this.isGUMStream = isGUMStream;
|
||||
var self = this;
|
||||
if(type == "audio") {
|
||||
this.getTracks = function () {
|
||||
return self.stream.getAudioTracks();
|
||||
};
|
||||
} else {
|
||||
this.getTracks = function () {
|
||||
return self.stream.getVideoTracks();
|
||||
};
|
||||
}
|
||||
|
||||
this.stream.onended = function () {
|
||||
self.streamEnded();
|
||||
};
|
||||
if (RTCBrowserType.isFirefox()) {
|
||||
implementOnEndedHandling(this.stream);
|
||||
}
|
||||
}
|
||||
|
||||
LocalStream.prototype.streamEnded = function () {
|
||||
this.eventEmitter.emit(StreamEventTypes.EVENT_TYPE_LOCAL_ENDED, this);
|
||||
};
|
||||
|
||||
LocalStream.prototype.getOriginalStream = function()
|
||||
{
|
||||
return this.stream;
|
||||
};
|
||||
|
||||
LocalStream.prototype.isAudioStream = function () {
|
||||
return this.type === "audio";
|
||||
};
|
||||
|
||||
LocalStream.prototype.setMute = function (mute)
|
||||
{
|
||||
var isAudio = this.isAudioStream();
|
||||
var eventType = isAudio ? RTCEvents.AUDIO_MUTE : RTCEvents.VIDEO_MUTE;
|
||||
|
||||
if ((window.location.protocol != "https:" && this.isGUMStream) ||
|
||||
(isAudio && this.isGUMStream) || this.videoType === "screen" ||
|
||||
// FIXME FF does not support 'removeStream' method used to mute
|
||||
RTCBrowserType.isFirefox()) {
|
||||
|
||||
var tracks = this.getTracks();
|
||||
for (var idx = 0; idx < tracks.length; idx++) {
|
||||
tracks[idx].enabled = !mute;
|
||||
}
|
||||
this.eventEmitter.emit(eventType, mute);
|
||||
} else {
|
||||
if (mute) {
|
||||
APP.xmpp.removeStream(this.stream);
|
||||
this.stream.stop();
|
||||
this.eventEmitter.emit(eventType, true);
|
||||
} else {
|
||||
var self = this;
|
||||
this.rtc.obtainAudioAndVideoPermissions(
|
||||
{devices: (this.isAudioStream() ? ["audio"] : ["video"])})
|
||||
.then(function (stream) {
|
||||
if (isAudio) {
|
||||
self.rtc.changeLocalAudio(stream,
|
||||
function () {
|
||||
self.eventEmitter.emit(eventType, false);
|
||||
});
|
||||
} else {
|
||||
self.rtc.changeLocalVideo(stream, false,
|
||||
function () {
|
||||
self.eventEmitter.emit(eventType, false);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
LocalStream.prototype.isMuted = function () {
|
||||
var tracks = [];
|
||||
if (this.isAudioStream()) {
|
||||
tracks = this.stream.getAudioTracks();
|
||||
} else {
|
||||
if (this.stream.ended)
|
||||
return true;
|
||||
tracks = this.stream.getVideoTracks();
|
||||
}
|
||||
for (var idx = 0; idx < tracks.length; idx++) {
|
||||
if(tracks[idx].enabled)
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
LocalStream.prototype.getId = function () {
|
||||
return this.stream.getTracks()[0].id;
|
||||
};
|
||||
|
||||
module.exports = LocalStream;
|
|
@ -1,47 +0,0 @@
|
|||
var MediaStreamType = require("../../service/RTC/MediaStreamTypes");
|
||||
|
||||
/**
|
||||
* Creates a MediaStream object for the given data, session id and ssrc.
|
||||
* It is a wrapper class for the MediaStream.
|
||||
*
|
||||
* @param data the data object from which we obtain the stream,
|
||||
* the peerjid, etc.
|
||||
* @param sid the session id
|
||||
* @param ssrc the ssrc corresponding to this MediaStream
|
||||
*
|
||||
* @constructor
|
||||
*/
|
||||
function MediaStream(data, sid, ssrc, browser, eventEmitter) {
|
||||
|
||||
// XXX(gp) to minimize headaches in the future, we should build our
|
||||
// abstractions around tracks and not streams. ORTC is track based API.
|
||||
// Mozilla expects m-lines to represent media tracks.
|
||||
//
|
||||
// Practically, what I'm saying is that we should have a MediaTrack class
|
||||
// and not a MediaStream class.
|
||||
//
|
||||
// Also, we should be able to associate multiple SSRCs with a MediaTrack as
|
||||
// a track might have an associated RTX and FEC sources.
|
||||
|
||||
this.sid = sid;
|
||||
this.stream = data.stream;
|
||||
this.peerjid = data.peerjid;
|
||||
this.videoType = data.videoType;
|
||||
this.ssrc = ssrc;
|
||||
this.type = (this.stream.getVideoTracks().length > 0)?
|
||||
MediaStreamType.VIDEO_TYPE : MediaStreamType.AUDIO_TYPE;
|
||||
this.muted = false;
|
||||
this.eventEmitter = eventEmitter;
|
||||
}
|
||||
|
||||
|
||||
MediaStream.prototype.getOriginalStream = function() {
|
||||
return this.stream;
|
||||
};
|
||||
|
||||
MediaStream.prototype.setMute = function (value) {
|
||||
this.stream.muted = value;
|
||||
this.muted = value;
|
||||
};
|
||||
|
||||
module.exports = MediaStream;
|
|
@ -13,42 +13,7 @@ var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
|
|||
var RTCEvents = require("../../service/RTC/RTCEvents.js");
|
||||
var desktopsharing = require("../desktopsharing/desktopsharing");
|
||||
|
||||
function getMediaStreamUsage()
|
||||
{
|
||||
var result = {
|
||||
audio: true,
|
||||
video: true
|
||||
};
|
||||
|
||||
/** There are some issues with the desktop sharing
|
||||
* when this property is enabled.
|
||||
* WARNING: We must change the implementation to start video/audio if we
|
||||
* receive from the focus that the peer is not muted.
|
||||
|
||||
var isSecureConnection = window.location.protocol == "https:";
|
||||
|
||||
if(config.disableEarlyMediaPermissionRequests || !isSecureConnection)
|
||||
{
|
||||
result = {
|
||||
audio: false,
|
||||
video: false
|
||||
};
|
||||
|
||||
}
|
||||
**/
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
var rtcReady = false;
|
||||
|
||||
|
||||
|
||||
function RTC(room, options) {
|
||||
this.devices = {
|
||||
audio: true,
|
||||
video: true
|
||||
};
|
||||
this.room = room;
|
||||
this.localStreams = [];
|
||||
this.remoteStreams = {};
|
||||
|
@ -75,22 +40,22 @@ function RTC(room, options) {
|
|||
* Creates the local MediaStreams.
|
||||
* @param options object for options (NOTE: currently only list of devices and resolution are supported)
|
||||
* @param dontCreateJitsiTrack if <tt>true</tt> objects with the following structure {stream: the Media Stream,
|
||||
* type: "audio" or "video", isMuted: true/false, videoType: "camera" or "desktop"}
|
||||
* type: "audio" or "video", videoType: "camera" or "desktop"}
|
||||
* will be returned trough the Promise, otherwise JitsiTrack objects will be returned.
|
||||
* @returns {*} Promise object that will receive the new JitsiTracks
|
||||
*/
|
||||
RTC.prototype.obtainAudioAndVideoPermissions = function (options, dontCreateJitsiTrack) {
|
||||
return RTCUtils.obtainAudioAndVideoPermissions(this,
|
||||
options.devices, getMediaStreamUsage(), options.resolution, dontCreateJitsiTrack);
|
||||
RTC.obtainAudioAndVideoPermissions = function (options, dontCreateJitsiTrack) {
|
||||
return RTCUtils.obtainAudioAndVideoPermissions(
|
||||
options.devices, options.resolution, dontCreateJitsiTrack);
|
||||
}
|
||||
|
||||
RTC.prototype.onIncommingCall = function(event) {
|
||||
if(this.options.config.openSctp)
|
||||
this.dataChannels = new DataChannels(event.peerconnection, this.eventEmitter);
|
||||
for(var i = 0; i < this.localStreams.length; i++)
|
||||
if(this.localStreams[i].isStarted)
|
||||
if(this.localStreams[i])
|
||||
{
|
||||
this.localStreams[i].start();
|
||||
this.room.addStream(this.localStreams[i].getOriginalStream(), function () {});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -112,56 +77,36 @@ RTC.prototype.removeListener = function (eventType, listener) {
|
|||
this.eventEmitter.removeListener(eventType, listener);
|
||||
};
|
||||
|
||||
RTC.addRTCReadyListener = function (listener) {
|
||||
RTCUtils.eventEmitter.on(RTCEvents.RTC_READY, listener);
|
||||
RTC.addListener = function (eventType, listener) {
|
||||
RTCUtils.addListener(eventType, listener);
|
||||
}
|
||||
|
||||
RTC.removeRTCReadyListener = function (listener) {
|
||||
RTC.removeListener = function (eventType, listener) {
|
||||
RTCUtils.eventEmitter.removeListener(RTCEvents.RTC_READY, listener);
|
||||
RTCUtils.removeListener(eventType, listener)
|
||||
}
|
||||
|
||||
RTC.isRTCReady = function () {
|
||||
return rtcReady;
|
||||
return RTCUtils.isRTCReady();
|
||||
}
|
||||
|
||||
RTC.init = function (options) {
|
||||
// In case of IE we continue from 'onReady' callback
|
||||
// passed to RTCUtils constructor. It will be invoked by Temasys plugin
|
||||
// once it is initialized.
|
||||
var onReady = function () {
|
||||
rtcReady = true;
|
||||
RTCUtils.eventEmitter.emit(RTCEvents.RTC_READY, true);
|
||||
};
|
||||
|
||||
RTCUtils.init(onReady, options || {});
|
||||
|
||||
// Call onReady() if Temasys plugin is not used
|
||||
if (!RTCBrowserType.isTemasysPluginUsed()) {
|
||||
onReady();
|
||||
}
|
||||
RTCUtils.init(options || {});
|
||||
}
|
||||
|
||||
RTC.prototype.createLocalStreams = function (streams, change) {
|
||||
for (var i = 0; i < streams.length; i++) {
|
||||
var localStream = new JitsiLocalTrack(this, streams[i].stream,
|
||||
this.eventEmitter, streams[i].videoType,
|
||||
streams[i].isGUMStream, streams[i].resolution);
|
||||
this.localStreams.push(localStream);
|
||||
if (streams[i].isMuted === true)
|
||||
localStream.setMute(true);
|
||||
RTC.getDeviceAvailability = function () {
|
||||
return RTCUtils.getDeviceAvailability();
|
||||
}
|
||||
|
||||
if (streams[i].type == "audio") {
|
||||
this.localAudio = localStream;
|
||||
} else {
|
||||
this.localVideo = localStream;
|
||||
}
|
||||
var eventType = StreamEventTypes.EVENT_TYPE_LOCAL_CREATED;
|
||||
if (change)
|
||||
eventType = StreamEventTypes.EVENT_TYPE_LOCAL_CHANGED;
|
||||
RTC.prototype.addLocalStream = function (stream) {
|
||||
this.localStreams.push(stream);
|
||||
stream._setRTC(this);
|
||||
|
||||
this.eventEmitter.emit(eventType, localStream, streams[i].isMuted);
|
||||
if (stream.type == "audio") {
|
||||
this.localAudio = stream;
|
||||
} else {
|
||||
this.localVideo = stream;
|
||||
}
|
||||
return this.localStreams;
|
||||
};
|
||||
|
||||
RTC.prototype.removeLocalStream = function (stream) {
|
||||
|
@ -191,7 +136,7 @@ RTC.getPCConstraints = function () {
|
|||
return RTCUtils.pc_constraints;
|
||||
};
|
||||
|
||||
RTC.prototype.getUserMediaWithConstraints = function(um, success_callback,
|
||||
RTC.getUserMediaWithConstraints = function(um, success_callback,
|
||||
failure_callback, resolution,
|
||||
bandwidth, fps, desktopStream)
|
||||
{
|
||||
|
@ -335,14 +280,4 @@ RTC.prototype.setVideoMute = function (mute, callback, options) {
|
|||
}
|
||||
};
|
||||
|
||||
RTC.prototype.setDeviceAvailability = function (devices) {
|
||||
if(!devices)
|
||||
return;
|
||||
if(devices.audio === true || devices.audio === false)
|
||||
this.devices.audio = devices.audio;
|
||||
if(devices.video === true || devices.video === false)
|
||||
this.devices.video = devices.video;
|
||||
this.eventEmitter.emit(RTCEvents.AVAILABLE_DEVICES_CHANGED, this.devices);
|
||||
};
|
||||
|
||||
module.exports = RTC;
|
||||
|
|
|
@ -3,9 +3,21 @@
|
|||
var logger = require("jitsi-meet-logger").getLogger(__filename);
|
||||
var RTCBrowserType = require("./RTCBrowserType");
|
||||
var Resolutions = require("../../service/RTC/Resolutions");
|
||||
var RTCEvents = require("../../service/RTC/RTCEvents");
|
||||
var AdapterJS = require("./adapter.screenshare");
|
||||
var SDPUtil = require("../xmpp/SDPUtil");
|
||||
var EventEmitter = require("events");
|
||||
var JitsiLocalTrack = require("./JitsiLocalTrack");
|
||||
var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
|
||||
|
||||
var eventEmitter = new EventEmitter();
|
||||
|
||||
var devices = {
|
||||
audio: true,
|
||||
video: true
|
||||
};
|
||||
|
||||
var rtcReady = false;
|
||||
|
||||
function DummyMediaStream(id) {
|
||||
this.id = id;
|
||||
|
@ -143,10 +155,29 @@ function getConstraints(um, resolution, bandwidth, fps, desktopStream) {
|
|||
return constraints;
|
||||
}
|
||||
|
||||
function setAvailableDevices(um, available) {
|
||||
var devices = {};
|
||||
if (um.indexOf("video") != -1) {
|
||||
devices.video = available;
|
||||
}
|
||||
if (um.indexOf("audio") != -1) {
|
||||
devices.audio = available;
|
||||
}
|
||||
|
||||
eventEmitter.emit(RTCEvents.AVAILABLE_DEVICES_CHANGED, devices);
|
||||
}
|
||||
|
||||
// In case of IE we continue from 'onReady' callback
|
||||
// passed to RTCUtils constructor. It will be invoked by Temasys plugin
|
||||
// once it is initialized.
|
||||
function onReady () {
|
||||
rtcReady = true;
|
||||
eventEmitter.emit(RTCEvents.RTC_READY, true);
|
||||
};
|
||||
|
||||
//Options parameter is to pass config options. Currently uses only "useIPv6".
|
||||
var RTCUtils = {
|
||||
eventEmitter: new EventEmitter(),
|
||||
init: function (onTemasysPluginReady, options) {
|
||||
init: function (options) {
|
||||
var self = this;
|
||||
if (RTCBrowserType.isFirefox()) {
|
||||
var FFversion = RTCBrowserType.getFirefoxVersion();
|
||||
|
@ -286,7 +317,7 @@ var RTCUtils = {
|
|||
attachMediaStream(element, stream);
|
||||
};
|
||||
|
||||
onTemasysPluginReady(isPlugin);
|
||||
onReady(isPlugin);
|
||||
});
|
||||
} else {
|
||||
try {
|
||||
|
@ -296,26 +327,27 @@ var RTCUtils = {
|
|||
return;
|
||||
}
|
||||
|
||||
// Call onReady() if Temasys plugin is not used
|
||||
if (!RTCBrowserType.isTemasysPluginUsed()) {
|
||||
onReady();
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
|
||||
getUserMediaWithConstraints: function (RTC, um, success_callback, failure_callback, resolution, bandwidth, fps, desktopStream) {
|
||||
getUserMediaWithConstraints: function ( um, success_callback, failure_callback, resolution, bandwidth, fps, desktopStream) {
|
||||
var constraints = getConstraints(
|
||||
um, resolution, bandwidth, fps, desktopStream);
|
||||
|
||||
logger.info("Get media constraints", constraints);
|
||||
|
||||
var self = this;
|
||||
|
||||
try {
|
||||
this.getUserMedia(constraints,
|
||||
function (stream) {
|
||||
logger.log('onUserMediaSuccess');
|
||||
self.setAvailableDevices(RTC, um, true);
|
||||
setAvailableDevices(um, true);
|
||||
success_callback(stream);
|
||||
},
|
||||
function (error) {
|
||||
self.setAvailableDevices(RTC, um, false);
|
||||
setAvailableDevices(um, false);
|
||||
logger.warn('Failed to get access to local media. Error ',
|
||||
error, constraints);
|
||||
if (failure_callback) {
|
||||
|
@ -330,54 +362,29 @@ var RTCUtils = {
|
|||
}
|
||||
},
|
||||
|
||||
setAvailableDevices: function (RTC, um, available) {
|
||||
var devices = {};
|
||||
if (um.indexOf("video") != -1) {
|
||||
devices.video = available;
|
||||
}
|
||||
if (um.indexOf("audio") != -1) {
|
||||
devices.audio = available;
|
||||
}
|
||||
RTC.setDeviceAvailability(devices);
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates the local MediaStreams.
|
||||
* @param RTC the rtc service.
|
||||
* @param devices the devices that will be requested
|
||||
* @param usageOptions object with devices that should be requested.
|
||||
* @param resolution resolution constraints
|
||||
* @param dontCreateJitsiTrack if <tt>true</tt> objects with the following structure {stream: the Media Stream,
|
||||
* type: "audio" or "video", isMuted: true/false, videoType: "camera" or "desktop"}
|
||||
* type: "audio" or "video", videoType: "camera" or "desktop"}
|
||||
* will be returned trough the Promise, otherwise JitsiTrack objects will be returned.
|
||||
* @returns {*} Promise object that will receive the new JitsiTracks
|
||||
*/
|
||||
obtainAudioAndVideoPermissions: function (RTC, devices, usageOptions, resolution, dontCreateJitsiTracks) {
|
||||
obtainAudioAndVideoPermissions: function (devices, resolution, dontCreateJitsiTracks) {
|
||||
var self = this;
|
||||
// Get AV
|
||||
|
||||
return new Promise(function (resolve, reject) {
|
||||
var successCallback = function (stream) {
|
||||
var streams = self.successCallback(RTC , stream, usageOptions, resolution);
|
||||
resolve(dontCreateJitsiTracks? streams: RTC.createLocalStreams(streams));
|
||||
var streams = self.successCallback(stream, resolution);
|
||||
resolve(dontCreateJitsiTracks? streams: self.createLocalTracks(streams));
|
||||
};
|
||||
|
||||
if (!devices)
|
||||
devices = ['audio', 'video'];
|
||||
|
||||
var newDevices = [];
|
||||
|
||||
|
||||
if (usageOptions)
|
||||
for (var i = 0; i < devices.length; i++) {
|
||||
var device = devices[i];
|
||||
if (usageOptions[device] === true)
|
||||
newDevices.push(device);
|
||||
}
|
||||
else
|
||||
newDevices = devices;
|
||||
|
||||
if (newDevices.length === 0) {
|
||||
if (devices.length === 0) {
|
||||
successCallback();
|
||||
return;
|
||||
}
|
||||
|
@ -394,7 +401,6 @@ var RTCUtils = {
|
|||
// the successCallback method.
|
||||
var obtainVideo = function (audioStream) {
|
||||
self.getUserMediaWithConstraints(
|
||||
RTC,
|
||||
['video'],
|
||||
function (videoStream) {
|
||||
return successCallback({
|
||||
|
@ -405,73 +411,68 @@ var RTCUtils = {
|
|||
function (error, resolution) {
|
||||
logger.error(
|
||||
'failed to obtain video stream - stop', error);
|
||||
self.errorCallback(error, resolve, RTC, resolution, dontCreateJitsiTracks);
|
||||
self.errorCallback(error, resolve, resolution, dontCreateJitsiTracks);
|
||||
},
|
||||
resolution || '360');
|
||||
};
|
||||
var obtainAudio = function () {
|
||||
self.getUserMediaWithConstraints(
|
||||
RTC,
|
||||
['audio'],
|
||||
function (audioStream) {
|
||||
if (newDevices.indexOf('video') !== -1)
|
||||
if (devices.indexOf('video') !== -1)
|
||||
obtainVideo(audioStream);
|
||||
},
|
||||
function (error) {
|
||||
logger.error(
|
||||
'failed to obtain audio stream - stop', error);
|
||||
self.errorCallback(error, resolve, RTC, null, dontCreateJitsiTracks);
|
||||
self.errorCallback(error, resolve, null, dontCreateJitsiTracks);
|
||||
}
|
||||
);
|
||||
};
|
||||
if (newDevices.indexOf('audio') !== -1) {
|
||||
if (devices.indexOf('audio') !== -1) {
|
||||
obtainAudio();
|
||||
} else {
|
||||
obtainVideo(null);
|
||||
}
|
||||
} else {
|
||||
this.getUserMediaWithConstraints(
|
||||
RTC,
|
||||
newDevices,
|
||||
devices,
|
||||
function (stream) {
|
||||
successCallback(stream);
|
||||
},
|
||||
function (error, resolution) {
|
||||
self.errorCallback(error, resolve, RTC, resolution, dontCreateJitsiTracks);
|
||||
self.errorCallback(error, resolve, resolution, dontCreateJitsiTracks);
|
||||
},
|
||||
resolution || '360');
|
||||
resolution || '360');
|
||||
}
|
||||
}.bind(this));
|
||||
},
|
||||
|
||||
/**
|
||||
* Successful callback called from GUM.
|
||||
* @param RTC the rtc service
|
||||
* @param stream the new MediaStream
|
||||
* @param usageOptions the list of the devices that should be queried.
|
||||
* @param resolution the resolution of the video stream.
|
||||
* @returns {*}
|
||||
*/
|
||||
successCallback: function (RTC, stream, usageOptions, resolution) {
|
||||
successCallback: function (stream, resolution) {
|
||||
// If this is FF or IE, the stream parameter is *not* a MediaStream object,
|
||||
// it's an object with two properties: audioStream, videoStream.
|
||||
if (stream && stream.getAudioTracks && stream.getVideoTracks)
|
||||
logger.log('got', stream, stream.getAudioTracks().length,
|
||||
stream.getVideoTracks().length);
|
||||
return this.handleLocalStream(RTC, stream, usageOptions, resolution);
|
||||
return this.handleLocalStream(stream, resolution);
|
||||
},
|
||||
|
||||
/**
|
||||
* Error callback called from GUM. Retries the GUM call with different resolutions.
|
||||
* @param error the error
|
||||
* @param resolve the resolve funtion that will be called on success.
|
||||
* @param RTC the rtc service
|
||||
* @param currentResolution the last resolution used for GUM.
|
||||
* @param dontCreateJitsiTracks if <tt>true</tt> objects with the following structure {stream: the Media Stream,
|
||||
* type: "audio" or "video", isMuted: true/false, videoType: "camera" or "desktop"}
|
||||
* type: "audio" or "video", videoType: "camera" or "desktop"}
|
||||
* will be returned trough the Promise, otherwise JitsiTrack objects will be returned.
|
||||
*/
|
||||
errorCallback: function (error, resolve, RTC, currentResolution, dontCreateJitsiTracks) {
|
||||
errorCallback: function (error, resolve, currentResolution, dontCreateJitsiTracks) {
|
||||
var self = this;
|
||||
logger.error('failed to obtain audio/video stream - trying audio only', error);
|
||||
var resolution = getPreviousResolution(currentResolution);
|
||||
|
@ -481,27 +482,26 @@ var RTCUtils = {
|
|||
(error.constraintName == "minWidth" || error.constraintName == "maxWidth" ||
|
||||
error.constraintName == "minHeight" || error.constraintName == "maxHeight")
|
||||
&& resolution != null) {
|
||||
self.getUserMediaWithConstraints(RTC, ['audio', 'video'],
|
||||
self.getUserMediaWithConstraints(['audio', 'video'],
|
||||
function (stream) {
|
||||
var streams = self.successCallback(RTC, stream, resolution);
|
||||
resolve(dontCreateJitsiTracks? streams: RTC.createLocalStreams(streams));
|
||||
var streams = self.successCallback(stream, resolution);
|
||||
resolve(dontCreateJitsiTracks? streams: self.createLocalTracks(streams));
|
||||
}, function (error, resolution) {
|
||||
return self.errorCallback(error, resolve, RTC, resolution, dontCreateJitsiTracks);
|
||||
return self.errorCallback(error, resolve, resolution, dontCreateJitsiTracks);
|
||||
}, resolution);
|
||||
}
|
||||
else {
|
||||
self.getUserMediaWithConstraints(
|
||||
RTC,
|
||||
['audio'],
|
||||
function (stream) {
|
||||
var streams = self.successCallback(RTC, stream, resolution);
|
||||
resolve(dontCreateJitsiTracks? streams: RTC.createLocalStreams(streams));
|
||||
var streams = self.successCallback(stream, resolution);
|
||||
resolve(dontCreateJitsiTracks? streams: self.createLocalTracks(streams));
|
||||
},
|
||||
function (error) {
|
||||
logger.error('failed to obtain audio/video stream - stop',
|
||||
error);
|
||||
var streams = self.successCallback(RTC, null);
|
||||
resolve(dontCreateJitsiTracks? streams: RTC.createLocalStreams(streams));
|
||||
var streams = self.successCallback(null);
|
||||
resolve(dontCreateJitsiTracks? streams: self.createLocalTracks(streams));
|
||||
}
|
||||
);
|
||||
}
|
||||
|
@ -509,13 +509,11 @@ var RTCUtils = {
|
|||
|
||||
/**
|
||||
* Handles the newly created Media Streams.
|
||||
* @param service the rtc service
|
||||
* @param stream the new Media Streams
|
||||
* @param usageOptions the list of the devices that should be queried.
|
||||
* @param resolution the resolution of the video stream.
|
||||
* @returns {*[]} Promise object with the new Media Streams.
|
||||
*/
|
||||
handleLocalStream: function (service, stream, usageOptions, resolution) {
|
||||
handleLocalStream: function (stream, resolution) {
|
||||
var audioStream, videoStream;
|
||||
// If this is FF, the stream parameter is *not* a MediaStream object, it's
|
||||
// an object with two properties: audioStream, videoStream.
|
||||
|
@ -549,17 +547,9 @@ var RTCUtils = {
|
|||
videoStream = new DummyMediaStream("dummyVideo");
|
||||
}
|
||||
|
||||
var audioMuted = (usageOptions && usageOptions.audio === false),
|
||||
videoMuted = (usageOptions && usageOptions.video === false);
|
||||
|
||||
var audioGUM = (!usageOptions || usageOptions.audio !== false),
|
||||
videoGUM = (!usageOptions || usageOptions.video !== false);
|
||||
|
||||
return [
|
||||
{stream: audioStream, type: "audio", isMuted: audioMuted,
|
||||
isGUMStream: audioGUM, videoType: null},
|
||||
{stream: videoStream, type: "video", isMuted: videoMuted,
|
||||
isGUMStream: videoGUM, videoType: "camera",
|
||||
{stream: audioStream, type: "audio", videoType: null},
|
||||
{stream: videoStream, type: "video", videoType: "camera",
|
||||
resolution: resolution}
|
||||
];
|
||||
},
|
||||
|
@ -587,6 +577,33 @@ var RTCUtils = {
|
|||
}
|
||||
|
||||
return newStream;
|
||||
},
|
||||
addListener: function (eventType, listener) {
|
||||
eventEmitter.on(eventType, listener);
|
||||
},
|
||||
removeListener: function (eventType, listener) {
|
||||
eventEmitter.removeListener(eventType, listener);
|
||||
},
|
||||
getDeviceAvailability: function () {
|
||||
return devices;
|
||||
},
|
||||
isRTCReady: function () {
|
||||
return rtcReady;
|
||||
},
|
||||
createLocalTracks: function (streams) {
|
||||
var newStreams = []
|
||||
for (var i = 0; i < streams.length; i++) {
|
||||
var localStream = new JitsiLocalTrack(null, streams[i].stream,
|
||||
eventEmitter, streams[i].videoType, streams[i].resolution);
|
||||
newStreams.push(localStream);
|
||||
if (streams[i].isMuted === true)
|
||||
localStream.setMute(true);
|
||||
|
||||
var eventType = StreamEventTypes.EVENT_TYPE_LOCAL_CREATED;
|
||||
|
||||
eventEmitter.emit(eventType, localStream);
|
||||
}
|
||||
return newStreams;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ var parser = {
|
|||
this.JSON2packet(node.children, packet);
|
||||
packet.up();
|
||||
}
|
||||
packet.up();
|
||||
// packet.up();
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -87,8 +87,25 @@ ChatRoom.prototype.initPresenceMap = function () {
|
|||
"value": navigator.userAgent,
|
||||
"attributes": {xmlns: 'http://jitsi.org/jitmeet/user-agent'}
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
ChatRoom.prototype.updateDeviceAvailability = function (devices) {
|
||||
this.presMap["nodes"].push( {
|
||||
"tagName": "devices",
|
||||
"children": [
|
||||
{
|
||||
"tagName": "audio",
|
||||
"value": devices.audio,
|
||||
},
|
||||
{
|
||||
"tagName": "video",
|
||||
"value": devices.video,
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
ChatRoom.prototype.join = function (password, tokenPassword) {
|
||||
if(password)
|
||||
this.password = password;
|
||||
|
@ -611,14 +628,10 @@ ChatRoom.prototype.remoteStreamAdded = function(data, sid, thessrc) {
|
|||
this.eventEmitter.emit(XMPPEvents.REMOTE_STREAM_RECEIVED, data, sid, thessrc);
|
||||
}
|
||||
|
||||
ChatRoom.prototype.addLocalStreams = function (localStreams) {
|
||||
this.session.addLocalStreams(localStreams);
|
||||
}
|
||||
|
||||
ChatRoom.prototype.getJidBySSRC = function (ssrc) {
|
||||
if (!this.session)
|
||||
return null;
|
||||
return this.session.getSsrcOwner(ssrc);
|
||||
};
|
||||
|
||||
module.exports = ChatRoom;
|
||||
module.exports = ChatRoom;
|
||||
|
|
|
@ -164,16 +164,6 @@ JingleSessionPC.prototype.doInitialize = function () {
|
|||
});
|
||||
};
|
||||
|
||||
JingleSessionPC.prototype.addLocalStreams = function (localStreams) {
|
||||
var self = this;
|
||||
// add any local and relayed stream
|
||||
localStreams.forEach(function(stream) {
|
||||
if(!stream.isStarted())
|
||||
return;
|
||||
self.peerconnection.addStream(stream.getOriginalStream());
|
||||
});
|
||||
}
|
||||
|
||||
function onIceConnectionStateChange(sid, session) {
|
||||
switch (session.peerconnection.iceConnectionState) {
|
||||
case 'checking':
|
||||
|
|
Loading…
Reference in New Issue