Fixes mute issues.

This commit is contained in:
hristoterezov 2015-10-05 17:48:16 -05:00
parent c3c3d8e457
commit d60425f7e4
13 changed files with 796 additions and 588 deletions

View File

@ -207,7 +207,10 @@ function setupListeners(conference) {
});
conference.rtc.addListener(StreamEventTypes.EVENT_TYPE_LOCAL_ENDED, function (stream) {
conference.eventEmitter.emit(JitsiConferenceEvents.TRACK_REMOVED, stream);
})
});
conference.rtc.addListener(StreamEventTypes.TRACK_MUTE_CHANGED, function (track) {
conference.eventEmitter.emit(JitsiConferenceEvents.TRACK_MUTE_CHANGED, track);
});
conference.room.addListener(XMPPEvents.MUC_JOINED, function () {
conference.eventEmitter.emit(JitsiConferenceEvents.CONFERENCE_JOINED);
});

View File

@ -48,13 +48,9 @@ var JitsiConferenceEvents = {
*/
IN_LAST_N_CHANGED: "conference.lastNEndpointsChanged",
/**
* A media track was muted.
* A media track mute status was changed.
*/
TRACK_MUTED: "conference.trackMuted",
/**
* A media track was unmuted.
*/
TRACK_UNMUTED: "conference.trackUnmuted",
TRACK_MUTE_CHANGED: "conference.trackMuteChanged",
/**
* Audio levels of a media track was changed.
*/

View File

@ -73,6 +73,9 @@ function onConnectionSuccess(){
room.on(JitsiMeetJS.events.conference.CONFERENCE_JOINED, onConferenceJoined);
room.on(JitsiMeetJS.events.conference.USER_JOINED, function(id){ remoteTracks[id] = [];});
room.on(JitsiMeetJS.events.conference.USER_LEFT, onUserLeft);
room.on(JitsiMeetJS.events.conference.TRACK_MUTE_CHANGED, function (track) {
console.debug(track.getType() + " - " + track.isMuted());
});
room.join();
};

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,7 @@
var JitsiTrack = require("./JitsiTrack");
var StreamEventTypes = require("../../service/RTC/StreamEventTypes");
var RTCEvents = require("../../service/RTC/RTCEvents");
var RTC = require("./RTCUtils");
var RTCBrowserType = require("./RTCBrowserType");
/**
* Represents a single media track (either audio or video).
@ -28,7 +29,6 @@ JitsiLocalTrack.prototype.constructor = JitsiLocalTrack;
*/
JitsiLocalTrack.prototype._setMute = function (mute) {
var isAudio = this.type === JitsiTrack.AUDIO;
var eventType = isAudio ? RTCEvents.AUDIO_MUTE : RTCEvents.VIDEO_MUTE;
if ((window.location.protocol != "https:" && this.isGUMStream) ||
(isAudio && this.isGUMStream) || this.videoType === "screen" ||
@ -43,7 +43,7 @@ JitsiLocalTrack.prototype._setMute = function (mute) {
this.rtc.room.setAudioMute(mute);
else
this.rtc.room.setVideoMute(mute);
this.eventEmitter.emit(eventType, mute);
this.eventEmitter.emit(StreamEventTypes.TRACK_MUTE_CHANGED, this);
} else {
if (mute) {
this.rtc.room.removeStream(this.stream);
@ -52,25 +52,41 @@ JitsiLocalTrack.prototype._setMute = function (mute) {
this.rtc.room.setAudioMute(mute);
else
this.rtc.room.setVideoMute(mute);
this.eventEmitter.emit(eventType, true);
this.stream = null;
this.eventEmitter.emit(StreamEventTypes.TRACK_MUTE_CHANGED, this);
//FIXME: Maybe here we should set the SRC for the containers to something
} else {
var self = this;
this.rtc.obtainAudioAndVideoPermissions(
{devices: (this.isAudioStream() ? ["audio"] : ["video"])})
.then(function (stream) {
if (isAudio) {
self.rtc.changeLocalAudio(stream,
function () {
this.rtc.room.setAudioMute(mute);
self.eventEmitter.emit(eventType, false);
});
} else {
self.rtc.changeLocalVideo(stream, false,
function () {
this.rtc.room.setVideoMute(mute);
self.eventEmitter.emit(eventType, false);
});
{devices: (isAudio ? ["audio"] : ["video"])}, true)
.then(function (streams) {
var stream = null;
for(var i = 0; i < streams.length; i++) {
stream = streams[i];
if(stream.type === self.type) {
self.stream = stream.stream;
self.videoType = stream.videoType;
self.isGUMStream = stream.isGUMStream;
break;
}
}
if(!stream)
return;
for(var i = 0; i < self.containers.length; i++)
{
RTC.attachMediaStream(self.containers[i], self.stream);
}
self.rtc.room.addStream(stream.stream,
function () {
if(isAudio)
self.rtc.room.setAudioMute(mute);
else
self.rtc.room.setVideoMute(mute);
self.eventEmitter.emit(StreamEventTypes.TRACK_MUTE_CHANGED, self);
});
});
}
}
@ -81,6 +97,8 @@ JitsiLocalTrack.prototype._setMute = function (mute) {
* NOTE: Works for local tracks only.
*/
JitsiLocalTrack.prototype.stop = function () {
if(!this.stream)
return;
this.rtc.room.removeStream(this.stream);
this.stream.stop();
this.detach();
@ -95,4 +113,30 @@ JitsiLocalTrack.prototype.start = function() {
this.rtc.room.addStream(this.stream, function () {});
}
/**
* Returns <tt>true</tt> - if the stream is muted
* and <tt>false</tt> otherwise.
* @returns {boolean} <tt>true</tt> - if the stream is muted
* and <tt>false</tt> otherwise.
*/
JitsiLocalTrack.prototype.isMuted = function () {
if (!this.stream)
return true;
var tracks = [];
var isAudio = this.type === JitsiTrack.AUDIO;
if (isAudio) {
tracks = this.stream.getAudioTracks();
} else {
if (this.stream.ended)
return true;
tracks = this.stream.getVideoTracks();
}
for (var idx = 0; idx < tracks.length; idx++) {
if(tracks[idx].enabled)
return false;
}
return true;
};
module.exports = JitsiLocalTrack;

View File

@ -3,9 +3,14 @@ var StreamEventTypes = require("../../service/RTC/StreamEventTypes");
/**
* Represents a single media track (either audio or video).
* @param RTC the rtc instance.
* @param data object with the stream and some details about it(participant id, video type, etc.)
* @param sid sid for the Media Stream
* @param ssrc ssrc for the Media Stream
* @param eventEmitter the event emitter
* @constructor
*/
function JitsiRemoteTrack(RTC, data, sid, ssrc, browser, eventEmitter) {
function JitsiRemoteTrack(RTC, data, sid, ssrc, eventEmitter) {
JitsiTrack.call(this, RTC, data.stream);
this.rtc = RTC;
this.sid = sid;
@ -25,13 +30,27 @@ function JitsiRemoteTrack(RTC, data, sid, ssrc, browser, eventEmitter) {
JitsiRemoteTrack.prototype = Object.create(JitsiTrack.prototype);
JitsiRemoteTrack.prototype.constructor = JitsiRemoteTrack;
JitsiRemoteTrack.prototype._setMute = function (value) {
/**
* Sets current muted status and fires an events for the change.
* @param value the muted status.
*/
JitsiRemoteTrack.prototype.setMute = function (value) {
this.stream.muted = value;
this.muted = value;
this.eventEmitter.emit(StreamEventTypes.TRACK_MUTE_CHANGED, this);
};
/**
* @returns {JitsiParticipant} to which this track belongs, or null if it is a local track.
* Returns the current muted status of the track.
* @returns {boolean|*|JitsiRemoteTrack.muted} <tt>true</tt> if the track is muted and <tt>false</tt> otherwise.
*/
JitsiRemoteTrack.prototype.isMuted = function () {
return this.muted;
}
/**
* Returns the participant id which owns the track.
* @returns {string} the id of the participants.
*/
JitsiRemoteTrack.prototype.getParitcipantId = function() {
return Strophe.getResourceFromJid(this.peerjid);

View File

@ -24,6 +24,11 @@ function implementOnEndedHandling(stream) {
*/
function JitsiTrack(RTC, stream)
{
/**
* Array with the HTML elements that are displaying the streams.
* @type {Array}
*/
this.containers = [];
this.rtc = RTC;
this.stream = stream;
this.type = (this.stream.getVideoTracks().length > 0)?
@ -84,18 +89,33 @@ JitsiTrack.prototype.unmute = function () {
/**
* Attaches the MediaStream of this track to an HTML container (?).
* Adds the container to the list of containers that are displaying the track.
* @param container the HTML container
*/
JitsiTrack.prototype.attach = function (container) {
RTC.attachMediaStream(container, this.stream);
this.containers.push(container);
}
/**
* Removes the track from the passed HTML container.
* @param container the HTML container
* @param container the HTML container. If <tt>null</tt> all containers are removed.
*/
JitsiTrack.prototype.detach = function (container) {
$(container).find(">video").remove();
for(var i = 0; i < this.containers.length; i++)
{
if(this.containers[i].is(container))
{
this.containers.splice(i,1);
}
if(!container)
{
this.containers[i].find(">video").remove();
}
}
if(container)
$(container).find(">video").remove();
}
/**
@ -103,8 +123,6 @@ JitsiTrack.prototype.detach = function (container) {
* NOTE: Works for local tracks only.
*/
JitsiTrack.prototype.stop = function () {
this.detach();
}

View File

@ -2,6 +2,7 @@
var EventEmitter = require("events");
var RTCBrowserType = require("./RTCBrowserType");
var RTCUtils = require("./RTCUtils.js");
var JitsiTrack = require("./JitsiTrack");
var JitsiLocalTrack = require("./JitsiLocalTrack.js");
var DataChannels = require("./DataChannels");
var JitsiRemoteTrack = require("./JitsiRemoteTrack.js");
@ -61,12 +62,28 @@ function RTC(room, options) {
function (stream, isUsingScreenStream, callback) {
self.changeLocalVideo(stream, isUsingScreenStream, callback);
}, DesktopSharingEventTypes.NEW_STREAM_CREATED);
room.addPresenceListener("videomuted", function (values, from) {
if(self.remoteStreams[from])
self.remoteStreams[from][JitsiTrack.VIDEO].setMute(values.value == "true");
});
room.addPresenceListener("audiomuted", function (values, from) {
if(self.remoteStreams[from])
self.remoteStreams[from][JitsiTrack.AUDIO].setMute(values.value == "true");
});
}
RTC.prototype.obtainAudioAndVideoPermissions = function (options) {
/**
* Creates the local MediaStreams.
* @param options object for options (NOTE: currently only list of devices and resolution are supported)
* @param dontCreateJitsiTrack if <tt>true</tt> objects with the following structure {stream: the Media Stream,
* type: "audio" or "video", isMuted: true/false, videoType: "camera" or "desktop"}
* will be returned trough the Promise, otherwise JitsiTrack objects will be returned.
* @returns {*} Promise object that will receive the new JitsiTracks
*/
RTC.prototype.obtainAudioAndVideoPermissions = function (options, dontCreateJitsiTrack) {
return RTCUtils.obtainAudioAndVideoPermissions(this,
options.devices, getMediaStreamUsage(), options.resolution);
options.devices, getMediaStreamUsage(), options.resolution, dontCreateJitsiTrack);
}
RTC.prototype.onIncommingCall = function(event) {
@ -243,6 +260,13 @@ RTC.prototype.switchVideoStreams = function (new_stream) {
this.localStreams.push(this.localVideo);
};
/**
* Creates <tt>JitsiTrack</tt> instance and replaces it with the local video.
* The method also handles the sdp changes.
* @param stream the new MediaStream received by the browser.
* @param isUsingScreenStream <tt>true</tt> if the stream is for desktop stream.
* @param callback - function that will be called after the operation is completed.
*/
RTC.prototype.changeLocalVideo = function (stream, isUsingScreenStream, callback) {
var oldStream = this.localVideo.getOriginalStream();
var type = (isUsingScreenStream ? "screen" : "camera");
@ -271,6 +295,13 @@ RTC.prototype.changeLocalVideo = function (stream, isUsingScreenStream, callback
this.room.switchStreams(videoStream, oldStream,localCallback);
};
/**
* Creates <tt>JitsiTrack</tt> instance and replaces it with the local audio.
* The method also handles the sdp changes.
* @param stream the new MediaStream received by the browser.
* @param callback - function that will be called after the operation is completed.
*/
RTC.prototype.changeLocalAudio = function (stream, callback) {
var oldStream = this.localAudio.getOriginalStream();
var newStream = RTCUtils.createStream(stream);

View File

@ -343,16 +343,24 @@ var RTCUtils = {
},
/**
* We ask for audio and video combined stream in order to get permissions and
* not to ask twice.
* Creates the local MediaStreams.
* @param RTC the rtc service.
* @param devices the devices that will be requested
* @param usageOptions object with devices that should be requested.
* @param resolution resolution constraints
* @param dontCreateJitsiTrack if <tt>true</tt> objects with the following structure {stream: the Media Stream,
* type: "audio" or "video", isMuted: true/false, videoType: "camera" or "desktop"}
* will be returned trough the Promise, otherwise JitsiTrack objects will be returned.
* @returns {*} Promise object that will receive the new JitsiTracks
*/
obtainAudioAndVideoPermissions: function (RTC, devices, usageOptions, resolution) {
obtainAudioAndVideoPermissions: function (RTC, devices, usageOptions, resolution, dontCreateJitsiTracks) {
var self = this;
// Get AV
return new Promise(function (resolve, reject) {
var successCallback = function (stream) {
resolve(self.successCallback(RTC , stream, usageOptions));
var streams = self.successCallback(RTC , stream, usageOptions);
resolve(dontCreateJitsiTracks? streams: RTC.createLocalStreams(streams));
};
if (!devices)
@ -398,7 +406,7 @@ var RTCUtils = {
function (error, resolution) {
console.error(
'failed to obtain video stream - stop', error);
self.errorCallback(error, resolve, RTC, resolution);
self.errorCallback(error, resolve, RTC, resolution, dontCreateJitsiTracks);
},
config.resolution || '360');
};
@ -413,7 +421,7 @@ var RTCUtils = {
function (error) {
console.error(
'failed to obtain audio stream - stop', error);
self.errorCallback(error, resolve, RTC);
self.errorCallback(error, resolve, RTC, null, dontCreateJitsiTracks);
}
);
};
@ -430,13 +438,20 @@ var RTCUtils = {
successCallback(stream);
},
function (error, resolution) {
self.errorCallback(error, resolve, RTC, resolution);
self.errorCallback(error, resolve, RTC, resolution, dontCreateJitsiTracks);
},
resolution || '360');
}
}.bind(this));
},
/**
* Successful callback called from GUM.
* @param RTC the rtc service
* @param stream the new MediaStream
* @param usageOptions the list of the devices that should be queried.
* @returns {*}
*/
successCallback: function (RTC, stream, usageOptions) {
// If this is FF or IE, the stream parameter is *not* a MediaStream object,
// it's an object with two properties: audioStream, videoStream.
@ -446,7 +461,17 @@ var RTCUtils = {
return this.handleLocalStream(RTC, stream, usageOptions);
},
errorCallback: function (error, resolve, RTC, currentResolution) {
/**
* Error callback called from GUM. Retries the GUM call with different resolutions.
* @param error the error
* @param resolve the resolve funtion that will be called on success.
* @param RTC the rtc service
* @param currentResolution the last resolution used for GUM.
* @param dontCreateJitsiTracks if <tt>true</tt> objects with the following structure {stream: the Media Stream,
* type: "audio" or "video", isMuted: true/false, videoType: "camera" or "desktop"}
* will be returned trough the Promise, otherwise JitsiTrack objects will be returned.
*/
errorCallback: function (error, resolve, RTC, currentResolution, dontCreateJitsiTracks) {
var self = this;
console.error('failed to obtain audio/video stream - trying audio only', error);
var resolution = getPreviousResolution(currentResolution);
@ -458,9 +483,10 @@ var RTCUtils = {
&& resolution != null) {
self.getUserMediaWithConstraints(RTC, ['audio', 'video'],
function (stream) {
resolve(self.successCallback(RTC, stream));
var streams = self.successCallback(RTC, stream);
resolve(dontCreateJitsiTracks? streams: RTC.createLocalStreams(streams));
}, function (error, resolution) {
return self.errorCallback(error, resolve, RTC, resolution);
return self.errorCallback(error, resolve, RTC, resolution, dontCreateJitsiTracks);
}, resolution);
}
else {
@ -468,18 +494,28 @@ var RTCUtils = {
RTC,
['audio'],
function (stream) {
resolve(self.successCallback(RTC, stream));
var streams = self.successCallback(RTC, stream);
resolve(dontCreateJitsiTracks? streams: RTC.createLocalStreams(streams));
},
function (error) {
console.error('failed to obtain audio/video stream - stop',
error);
resolve(self.successCallback(RTC, null));
var streams = self.successCallback(RTC, null);
resolve(dontCreateJitsiTracks? streams: RTC.createLocalStreams(streams));
}
);
}
},
/**
* Handles the newly created Media Streams.
* @param service the rtc service
* @param stream the new Media Streams
* @param usageOptions the list of the devices that should be queried.
* @returns {*[]} Promise object with the new Media Streams.
*/
handleLocalStream: function (service, stream, usageOptions) {
var audioStream, videoStream;
// If this is FF, the stream parameter is *not* a MediaStream object, it's
// an object with two properties: audioStream, videoStream.
if (window.webkitMediaStream) {
@ -518,11 +554,10 @@ var RTCUtils = {
var audioGUM = (!usageOptions || usageOptions.audio !== false),
videoGUM = (!usageOptions || usageOptions.video !== false);
return service.createLocalStreams(
[
return [
{stream: audioStream, type: "audio", isMuted: audioMuted, isGUMStream: audioGUM, videoType: null},
{stream: videoStream, type: "video", isMuted: videoMuted, isGUMStream: videoGUM, videoType: "camera"}
]);
];
},
createStream: function (stream, isVideo) {
@ -537,8 +572,7 @@ var RTCUtils = {
}
}
}
else {
} else {
// FIXME: this is duplicated with 'handleLocalStream' !!!
if (stream) {
newStream = stream;

View File

@ -123,15 +123,17 @@ var statistics = {
APP.xmpp.addListener(XMPPEvents.PEERCONNECTION_READY, function (session) {
CallStats.init(session);
});
APP.RTC.addListener(RTCEvents.AUDIO_MUTE, function (mute) {
CallStats.sendMuteEvent(mute, "audio");
});
//FIXME: that event is changed to TRACK_MUTE_CHANGED
// APP.RTC.addListener(RTCEvents.AUDIO_MUTE, function (mute) {
// CallStats.sendMuteEvent(mute, "audio");
// });
APP.xmpp.addListener(XMPPEvents.CONFERENCE_SETUP_FAILED, function () {
CallStats.sendSetupFailedEvent();
});
APP.RTC.addListener(RTCEvents.VIDEO_MUTE, function (mute) {
CallStats.sendMuteEvent(mute, "video");
});
//FIXME: that event is changed to TRACK_MUTE_CHANGED
// APP.RTC.addListener(RTCEvents.VIDEO_MUTE, function (mute) {
// CallStats.sendMuteEvent(mute, "video");
// });
}
};

View File

@ -526,10 +526,11 @@ ChatRoom.prototype.setAudioMute = function (mute, callback) {
// }
return this.sendAudioInfoPresence(mute, callback);;
return this.sendAudioInfoPresence(mute, callback);
};
ChatRoom.prototype.addAudioInfoToPresence = function (mute) {
this.removeFromPresence("audiomuted");
this.addToPresence("audiomuted",
{attributes:
{"audions": "http://jitsi.org/jitmeet/audio"},
@ -546,6 +547,7 @@ ChatRoom.prototype.sendAudioInfoPresence = function(mute, callback) {
};
ChatRoom.prototype.addVideoInfoToPresence = function (mute) {
this.removeFromPresence("videomuted");
this.addToPresence("videomuted",
{attributes:
{"videons": "http://jitsi.org/jitmeet/video"},

View File

@ -4,9 +4,7 @@ var RTCEvents = {
LASTN_CHANGED: "rtc.lastn_changed",
DOMINANTSPEAKER_CHANGED: "rtc.dominantspeaker_changed",
LASTN_ENDPOINT_CHANGED: "rtc.lastn_endpoint_changed",
AVAILABLE_DEVICES_CHANGED: "rtc.available_devices_changed",
AUDIO_MUTE: "rtc.audio_mute",
VIDEO_MUTE: "rtc.video_mute"
AVAILABLE_DEVICES_CHANGED: "rtc.available_devices_changed"
};
module.exports = RTCEvents;

View File

@ -7,7 +7,8 @@ var StreamEventTypes = {
EVENT_TYPE_REMOTE_CREATED: "stream.remote_created",
EVENT_TYPE_REMOTE_ENDED: "stream.remote_ended"
EVENT_TYPE_REMOTE_ENDED: "stream.remote_ended",
TRACK_MUTE_CHANGED: "rtc.track_mute_changed"
};
module.exports = StreamEventTypes;