fixes after rebase

This commit is contained in:
isymchych 2015-12-30 14:14:56 +02:00
parent b375b14696
commit 6ee6b6e9e5
6 changed files with 152 additions and 95 deletions

113
app.js
View File

@ -36,15 +36,12 @@ const ConnectionErrors = JitsiMeetJS.errors.connection;
const ConferenceEvents = JitsiMeetJS.events.conference;
const ConferenceErrors = JitsiMeetJS.errors.conference;
const TrackEvents = JitsiMeetJS.events.track;
const TrackErrors = JitsiMeetJS.errors.track;
let localVideo, localAudio;
const Commands = {
CONNECTION_QUALITY: "connectionQuality",
EMAIL: "email",
VIDEO_TYPE: "videoType"
VIDEO_TYPE: "videoType",
ETHERPAD: "etherpad",
PREZI: "prezi",
STOP_PREZI: "stop-prezi"
@ -88,6 +85,18 @@ const APP = {
statistics,
settings,
createLocalTracks (...devices) {
return JitsiMeetJS.createLocalTracks({
// copy array to avoid mutations inside library
devices: devices.slice(0),
resolution: config.resolution
}).catch(function (err) {
console.error('failed to create local tracks', ...devices, err);
APP.statistics.onGetUserMediaFailed(err);
return [];
});
},
init () {
let roomName = buildRoomName();
this.conference = {
@ -137,8 +146,10 @@ function initConference(localTracks, connection) {
const addTrack = (track) => {
room.addTrack(track);
if(track.getType() === "audio")
if (track.isAudioTrack()) {
return;
}
room.removeCommand(Commands.VIDEO_TYPE);
room.sendCommand(Commands.VIDEO_TYPE, {
value: track.videoType,
@ -161,40 +172,6 @@ function initConference(localTracks, connection) {
APP.conference.listMembersIds = function () {
return room.getParticipants().map(p => p.getId());
};
/**
* Creates video track (desktop or camera).
* @param type "camera" or "video"
* @param endedHandler onended function
* @returns Promise
*/
APP.conference.createVideoTrack = (type, endedHandler) => {
return JitsiMeetJS.createLocalTracks({
devices: [type], resolution: config.resolution
}).then((tracks) => {
tracks[0].on(TrackEvents.TRACK_STOPPED, endedHandler);
return tracks;
});
};
APP.conference.changeLocalVideo = (track, callback) => {
const localCallback = (newTrack) => {
if (newTrack.isLocal() && newTrack === localVideo) {
if(localVideo.isMuted() &&
localVideo.videoType !== track.videoType) {
localVideo.mute();
}
callback();
room.off(ConferenceEvents.TRACK_ADDED, localCallback);
}
};
room.on(ConferenceEvents.TRACK_ADDED, localCallback);
localVideo.stop();
localVideo = track;
addTrack(track);
APP.UI.addLocalStream(track);
};
APP.conference.sipGatewayEnabled = () => {
return room.isSIPCallingSupported();
@ -205,7 +182,7 @@ function initConference(localTracks, connection) {
return APP.settings.getDisplayName();
}
var participant = room.getParticipantById(id);
let participant = room.getParticipantById(id);
if (participant && participant.getDisplayName()) {
return participant.getDisplayName();
}
@ -214,10 +191,10 @@ function initConference(localTracks, connection) {
// add local streams when joined to the conference
room.on(ConferenceEvents.CONFERENCE_JOINED, function () {
localTracks.forEach(function (track) {
if(track.getType() === "audio") {
if(track.isAudioTrack()) {
localAudio = track;
}
else if (track.getType() === "video") {
else if (track.isVideoTrack()) {
localVideo = track;
}
addTrack(track);
@ -246,7 +223,7 @@ function initConference(localTracks, connection) {
APP.conference.isModerator = room.isModerator();
APP.UI.updateLocalRole(room.isModerator());
} else {
var user = room.getParticipantById(id);
let user = room.getParticipantById(id);
if (user) {
APP.UI.updateUserRole(user);
}
@ -401,8 +378,8 @@ function initConference(localTracks, connection) {
});
});
room.addCommandListener(Commands.VIDEO_TYPE, (data, from) => {
APP.UI.onPeerVideoTypeChanged(from, data.value);
room.addCommandListener(Commands.VIDEO_TYPE, ({value}, from) => {
APP.UI.onPeerVideoTypeChanged(from, value);
});
@ -416,7 +393,7 @@ function initConference(localTracks, connection) {
});
}
var email = APP.settings.getEmail();
let email = APP.settings.getEmail();
email && sendEmail(email);
APP.UI.addListener(UIEvents.EMAIL_CHANGED, function (email) {
APP.settings.setEmail(email);
@ -534,6 +511,29 @@ function initConference(localTracks, connection) {
APP.UI.updateDTMFSupport(isDTMFSupported);
});
APP.desktopsharing.addListener(
DesktopSharingEventTypes.NEW_STREAM_CREATED,
(track, callback) => {
const localCallback = (newTrack) => {
if (newTrack.isLocal() && newTrack === localVideo) {
if(localVideo.isMuted() &&
localVideo.videoType !== track.videoType) {
localVideo.mute();
}
callback();
room.off(ConferenceEvents.TRACK_ADDED, localCallback);
}
};
room.on(ConferenceEvents.TRACK_ADDED, localCallback);
localVideo.stop();
localVideo = track;
addTrack(track);
APP.UI.addLocalStream(track);
}
);
$(window).bind('beforeunload', function () {
room.leave();
});
@ -603,16 +603,6 @@ function initConference(localTracks, connection) {
});
}
function createLocalTracks () {
return JitsiMeetJS.createLocalTracks({
devices: ['audio', 'video']
}).catch(function (err) {
console.error('failed to create local tracks', err);
APP.statistics.onGetUserMediaFailed(err);
return [];
});
}
function connect() {
return openConnection({retry: true}).catch(function (err) {
if (err === ConnectionErrors.PASSWORD_REQUIRED) {
@ -630,7 +620,10 @@ function init() {
JitsiMeetJS.setLogLevel(JitsiMeetJS.logLevels.TRACE);
JitsiMeetJS.init(config).then(function () {
return Promise.all([createLocalTracks(), connect()]);
return Promise.all([
APP.createLocalTracks('audio', 'video'),
connect()
]);
}).then(function ([tracks, connection]) {
console.log('initialized with %s local tracks', tracks.length);
return initConference(tracks, connection);
@ -642,12 +635,6 @@ function init() {
APP.settings.setLanguage(language);
});
APP.desktopsharing.addListener(
DesktopSharingEventTypes.NEW_STREAM_CREATED,
(stream, callback) => {
APP.conference.changeLocalVideo(stream,
callback);
});
APP.desktopsharing.init(JitsiMeetJS.isDesktopSharingEnabled());
APP.statistics.start();
APP.connectionquality.init();

View File

@ -579,6 +579,15 @@ JitsiConference.prototype.toggleRecording = function (token, followEntity) {
reject(new Error("The conference is not created yet!"))});
}
/**
* Returns true if the SIP calls are supported and false otherwise
*/
JitsiConference.prototype.isSIPCallingSupported = function () {
if(this.room)
return this.room.isSIPCallingSupported();
return false;
}
/**
* Dials a number.
* @param number the number
@ -1059,6 +1068,13 @@ var LibJitsiMeet = {
init: function (options) {
return RTC.init(options || {});
},
/**
* Returns whether the desktop sharing is enabled or not.
* @returns {boolean}
*/
isDesktopSharingEnabled: function () {
return RTC.isDesktopSharingEnabled();
},
setLogLevel: function (level) {
Logger.setLogLevel(level);
},
@ -1799,6 +1815,7 @@ module.exports = JitsiRemoteTrack;
var RTCBrowserType = require("./RTCBrowserType");
var JitsiTrackEvents = require("../../JitsiTrackEvents");
var EventEmitter = require("events");
var RTC = require("./RTCUtils");
/**
* This implements 'onended' callback normally fired by WebRTC after the stream
@ -1989,13 +2006,21 @@ JitsiTrack.prototype.isScreenSharing = function(){
* Returns id of the track.
* @returns {string} id of the track or null if this is fake track.
*/
JitsiTrack.prototype.getId = function () {
JitsiTrack.prototype._getId = function () {
var tracks = this.stream.getTracks();
if(!tracks || tracks.length === 0)
return null;
return tracks[0].id;
};
/**
* Returns id of the track.
* @returns {string} id of the track or null if this is fake track.
*/
JitsiTrack.prototype.getId = function () {
return RTC.getStreamID(this.stream);
};
/**
* Checks whether the MediaStream is avtive/not ended.
* When there is no check for active we don't have information and so
@ -2247,6 +2272,14 @@ RTC.stopMediaStream = function (mediaStream) {
RTCUtils.stopMediaStream(mediaStream);
};
/**
* Returns whether the desktop sharing is enabled or not.
* @returns {boolean}
*/
RTC.isDesktopSharingEnabled = function () {
return RTCUtils.isDesktopSharingEnabled();
}
RTC.prototype.getVideoElementName = function () {
return RTCBrowserType.isTemasysPluginUsed() ? 'object' : 'video';
};
@ -3105,7 +3138,7 @@ var RTCUtils = {
var deviceGUM = {
"audio": GUM.bind(self, ["audio"]),
"video": GUM.bind(self, ["video"]),
"desktop": screenObtainer.obtainStream
"desktop": screenObtainer.obtainStream.bind(screenObtainer)
};
// With FF/IE we can't split the stream into audio and video because FF
// doesn't support media stream constructors. So, we need to get the
@ -3211,6 +3244,13 @@ var RTCUtils = {
if (mediaStream.stop) {
mediaStream.stop();
}
},
/**
* Returns whether the desktop sharing is enabled or not.
* @returns {boolean}
*/
isDesktopSharingEnabled: function () {
return screenObtainer.isSupported();
}
};
@ -6344,7 +6384,7 @@ ChatRoom.prototype.onPresence = function (pres) {
ChatRoom.prototype.processNode = function (node, from) {
if(this.presHandlers[node.tagName])
this.presHandlers[node.tagName](node, from);
this.presHandlers[node.tagName](node, Strophe.getResourceFromJid(from));
};
ChatRoom.prototype.sendMessage = function (body, nickname) {
@ -6722,6 +6762,15 @@ ChatRoom.prototype.toggleRecording = function (token, followEntity) {
reject(new Error("The conference is not created yet!"))});
}
/**
* Returns true if the SIP calls are supported and false otherwise
*/
ChatRoom.prototype.isSIPCallingSupported = function () {
if(this.moderator)
return this.moderator.isSipGatewayEnabled();
return false;
}
/**
* Dials a number.
* @param number the number
@ -9088,11 +9137,11 @@ SDP.prototype.toJingle = function (elem, thecreator) {
var msid = null;
if(mline.media == "audio")
{
msid = APP.RTC.localAudio.getId();
msid = APP.RTC.localAudio._getId();
}
else
{
msid = APP.RTC.localVideo.getId();
msid = APP.RTC.localVideo._getId();
}
if(msid != null)
{
@ -9488,7 +9537,6 @@ SDP.prototype.jingle2media = function (content) {
module.exports = SDP;
}).call(this,"/modules/xmpp/SDP.js")
},{"./SDPUtil":32,"jitsi-meet-logger":48}],31:[function(require,module,exports){
var SDPUtil = require("./SDPUtil");
@ -10521,7 +10569,7 @@ function Moderator(roomName, xmpp, emitter) {
// Sip gateway can be enabled by configuring Jigasi host in config.js or
// it will be enabled automatically if focus detects the component through
// service discovery.
this.sipGatewayEnabled =
this.sipGatewayEnabled = this.xmppService.options.hosts &&
this.xmppService.options.hosts.call_control !== undefined;
this.eventEmitter = emitter;

View File

@ -396,8 +396,8 @@ UI.removeUser = function (id, displayName) {
// VideoLayout.setPresenceStatus(Strophe.getResourceFromJid(jid), info.status);
// }
UI.onPeerVideoTypeChanged = (resourceJid, newVideoType) => {
VideoLayout.onVideoTypeChanged(resourceJid, newVideoType);
UI.onPeerVideoTypeChanged = (id, newVideoType) => {
VideoLayout.onVideoTypeChanged(id, newVideoType);
};
UI.updateLocalRole = function (isModerator) {

View File

@ -159,6 +159,7 @@ class VideoContainer extends LargeContainer {
constructor (onPlay) {
super();
this.stream = null;
this.videoType = null;
this.$avatar = $('#activeSpeaker');
this.$wrapper = $('#largeVideoWrapper');
@ -180,7 +181,7 @@ class VideoContainer extends LargeContainer {
getVideoSize (containerWidth, containerHeight) {
let { width, height } = this.getStreamSize();
if (this.stream && this.stream.isScreenSharing()) {
if (this.stream && this.isScreenSharing()) {
return getDesktopVideoSize(width, height, containerWidth, containerHeight);
} else {
return getCameraVideoSize(width, height, containerWidth, containerHeight);
@ -188,7 +189,7 @@ class VideoContainer extends LargeContainer {
}
getVideoPosition (width, height, containerWidth, containerHeight) {
if (this.stream && this.stream.isScreenSharing()) {
if (this.stream && this.isScreenSharing()) {
return getDesktopVideoPosition(width, height, containerWidth, containerHeight);
} else {
return getCameraVideoPosition(width, height, containerWidth, containerHeight);
@ -218,17 +219,22 @@ class VideoContainer extends LargeContainer {
});
}
setStream (stream) {
setStream (stream, videoType) {
this.stream = stream;
this.videoType = videoType;
stream.attach(this.$video);
let flipX = stream.isLocal() && !stream.isScreenSharing();
let flipX = stream.isLocal() && !this.isScreenSharing();
this.$video.css({
transform: flipX ? 'scaleX(-1)' : 'none'
});
}
isScreenSharing () {
return this.videoType === 'desktop';
}
showAvatar (show) {
this.$avatar.css("visibility", show ? "visible" : "hidden");
}
@ -332,7 +338,7 @@ export default class LargeVideoManager {
return this.videoContainer.id;
}
updateLargeVideo (stream) {
updateLargeVideo (stream, videoType) {
let id = getStreamId(stream);
let container = this.getContainer(this.state);
@ -340,7 +346,7 @@ export default class LargeVideoManager {
container.hide().then(() => {
console.info("hover in %s", id);
this.state = VideoContainerType;
this.videoContainer.setStream(stream);
this.videoContainer.setStream(stream, videoType);
this.videoContainer.show();
});
}

View File

@ -153,7 +153,7 @@ var VideoLayout = {
localVideoThumbnail.createConnectionIndicator();
let localId = APP.conference.localId;
this.onVideoTypeChanged(localId, stream.getType());
this.onVideoTypeChanged(localId, stream.videoType);
let {thumbWidth, thumbHeight} = this.calculateThumbnailSize();
AudioLevels.updateAudioLevelCanvas(null, thumbWidth, thumbHeight);
@ -218,7 +218,7 @@ var VideoLayout = {
electLastVisibleVideo () {
// pick the last visible video in the row
// if nobody else is left, this picks the local video
let thumbs = BottomToolbar.getThumbs(true).filter('id!="mixedstream"');
let thumbs = BottomToolbar.getThumbs(true).filter('[id!="mixedstream"]');
let lastVisible = thumbs.filter(':visible:last');
if (lastVisible.length) {
@ -973,7 +973,8 @@ var VideoLayout = {
let smallVideo = this.getSmallVideo(id);
largeVideo.updateLargeVideo(smallVideo.stream);
let videoType = this.getRemoteVideoType(id);
largeVideo.updateLargeVideo(smallVideo.stream, videoType);
smallVideo.enableDominantSpeaker(true);
} else if (currentId) {

View File

@ -1,8 +1,10 @@
/* global APP, config */
/* global APP, JitsiMeetJS, config */
var EventEmitter = require("events");
var DesktopSharingEventTypes
= require("../../service/desktopsharing/DesktopSharingEventTypes");
const TrackEvents = JitsiMeetJS.events.track;
/**
* Indicates that desktop stream is currently in use (for toggle purpose).
* @type {boolean}
@ -35,15 +37,15 @@ function newStreamCreated(track) {
track, streamSwitchDone);
}
function getVideoStreamFailed(error) {
console.error("Failed to obtain the stream to switch to", error);
function getVideoStreamFailed() {
console.error("Failed to obtain the stream to switch to");
switchInProgress = false;
isUsingScreenStream = false;
newStreamCreated(null);
}
function getDesktopStreamFailed(error) {
console.error("Failed to obtain the stream to switch to", error);
function getDesktopStreamFailed() {
console.error("Failed to obtain the stream to switch to");
switchInProgress = false;
}
@ -92,21 +94,34 @@ module.exports = {
return;
}
switchInProgress = true;
let type, handler;
let type;
if (!isUsingScreenStream) {
// Switch to desktop stream
handler = onEndedHandler;
type = "desktop";
} else {
handler = () => {};
type = "video";
}
APP.conference.createVideoTrack(type, handler).then(
(tracks) => {
// We now use screen stream
isUsingScreenStream = type === "desktop";
newStreamCreated(tracks[0]);
}).catch(getDesktopStreamFailed);
APP.createLocalTracks(type).then(function (tracks) {
if (!tracks.length) {
if (type === 'desktop') {
getDesktopStreamFailed();
} else {
getVideoStreamFailed();
}
return;
}
let stream = tracks[0];
// We now use screen stream
isUsingScreenStream = type === "desktop";
if (isUsingScreenStream) {
stream.on(TrackEvents.TRACK_STOPPED, onEndedHandler);
}
newStreamCreated(stream);
});
},
/*
* Exports the event emitter to allow use by ScreenObtainer. Not for outside