Merge pull request #441 from isymchych/jitsi-meet-new

refactoring of Etherpad and Prezi
This commit is contained in:
hristoterezov 2015-12-30 13:52:30 -06:00
commit 9b7ddec703
56 changed files with 2601 additions and 11098 deletions

191
app.js
View File

@ -17,30 +17,32 @@ import URLProcessor from "./modules/config/URLProcessor";
import RoomnameGenerator from './modules/util/RoomnameGenerator';
import CQEvents from './service/connectionquality/CQEvents';
import UIEvents from './service/UI/UIEvents';
import DSEvents from './service/desktopsharing/DesktopSharingEventTypes';
import UI from "./modules/UI/UI";
import statistics from "./modules/statistics/statistics";
import settings from "./modules/settings/Settings";
import {openConnection} from './modules/connection';
import AuthHandler from './modules/AuthHandler';
import createRoomLocker from './modules/RoomLocker';
const DesktopSharingEventTypes =
require("./service/desktopsharing/DesktopSharingEventTypes");
const ConnectionEvents = JitsiMeetJS.events.connection;
const ConnectionErrors = JitsiMeetJS.errors.connection;
const ConferenceEvents = JitsiMeetJS.events.conference;
const ConferenceErrors = JitsiMeetJS.errors.conference;
const TrackEvents = JitsiMeetJS.events.track;
const TrackErrors = JitsiMeetJS.errors.track;
let localVideo, localAudio;
const Commands = {
CONNECTION_QUALITY: "connectionQuality",
EMAIL: "email",
VIDEO_TYPE: "videoType"
VIDEO_TYPE: "videoType",
ETHERPAD: "etherpad",
PREZI: "prezi",
STOP_PREZI: "stop-prezi"
};
function buildRoomName () {
@ -77,6 +79,22 @@ function buildRoomName () {
const APP = {
UI,
statistics,
settings,
createLocalTracks (...devices) {
return JitsiMeetJS.createLocalTracks({
// copy array to avoid mutations inside library
devices: devices.slice(0),
resolution: config.resolution
}).catch(function (err) {
console.error('failed to create local tracks', ...devices, err);
APP.statistics.onGetUserMediaFailed(err);
return [];
});
},
init () {
let roomName = buildRoomName();
this.conference = {
@ -91,29 +109,28 @@ const APP = {
},
muteAudio (mute) {
APP.UI.eventEmitter.emit(UIEvents.AUDIO_MUTED, mute);
APP.statistics.onAudioMute(mute);
},
toggleAudioMuted () {
this.muteAudio(!this.audioMuted);
},
muteVideo (mute) {
APP.UI.eventEmitter.emit(UIEvents.VIDEO_MUTED, mute);
APP.statistics.onVideoMute(mute);
},
toggleVideoMuted () {
this.muteVideo(!this.videoMuted);
}
};
this.UI = require("./modules/UI/UI");
this.API = require("./modules/API/API");
this.connectionquality =
require("./modules/connectionquality/connectionquality");
this.statistics = require("./modules/statistics/statistics");
this.desktopsharing =
require("./modules/desktopsharing/desktopsharing");
this.keyboardshortcut =
require("./modules/keyboardshortcut/keyboardshortcut");
this.translation = require("./modules/translation/translation");
this.settings = require("./modules/settings/Settings");
this.configFetch = require("./modules/config/HttpConfigFetch");
}
};
@ -123,11 +140,14 @@ function initConference(localTracks, connection) {
openSctp: config.openSctp,
disableAudioLevels: config.disableAudioLevels
});
APP.conference._room = room; // FIXME do not use this
const addTrack = (track) => {
room.addTrack(track);
if(track.getType() === "audio")
if (track.isAudioTrack()) {
return;
}
room.removeCommand(Commands.VIDEO_TYPE);
room.sendCommand(Commands.VIDEO_TYPE, {
value: track.videoType,
@ -150,40 +170,6 @@ function initConference(localTracks, connection) {
APP.conference.listMembersIds = function () {
return room.getParticipants().map(p => p.getId());
};
/**
* Creates video track (desktop or camera).
* @param type "camera" or "video"
* @param endedHandler onended function
* @returns Promise
*/
APP.conference.createVideoTrack = (type, endedHandler) => {
return JitsiMeetJS.createLocalTracks({
devices: [type], resolution: config.resolution
}).then((tracks) => {
tracks[0].on(TrackEvents.TRACK_STOPPED, endedHandler);
return tracks;
});
};
APP.conference.changeLocalVideo = (track, callback) => {
const localCallback = (newTrack) => {
if (newTrack.isLocal() && newTrack === localVideo) {
if(localVideo.isMuted() &&
localVideo.videoType !== track.videoType) {
localVideo.mute();
}
callback();
room.off(ConferenceEvents.TRACK_ADDED, localCallback);
}
};
room.on(ConferenceEvents.TRACK_ADDED, localCallback);
localVideo.stop();
localVideo = track;
addTrack(track);
APP.UI.addLocalStream(track);
};
APP.conference.sipGatewayEnabled = () => {
return room.isSIPCallingSupported();
@ -194,7 +180,7 @@ function initConference(localTracks, connection) {
return APP.settings.getDisplayName();
}
var participant = room.getParticipantById(id);
let participant = room.getParticipantById(id);
if (participant && participant.getDisplayName()) {
return participant.getDisplayName();
}
@ -203,10 +189,10 @@ function initConference(localTracks, connection) {
// add local streams when joined to the conference
room.on(ConferenceEvents.CONFERENCE_JOINED, function () {
localTracks.forEach(function (track) {
if(track.getType() === "audio") {
if(track.isAudioTrack()) {
localAudio = track;
}
else if (track.getType() === "video") {
else if (track.isVideoTrack()) {
localVideo = track;
}
addTrack(track);
@ -218,16 +204,14 @@ function initConference(localTracks, connection) {
room.on(ConferenceEvents.USER_JOINED, function (id, user) {
if (APP.conference.isLocalId(id)) {
return;
}
console.error('USER %s connnected', id);
console.error('USER %s connnected', id, user);
// FIXME email???
APP.UI.addUser(id, user.getDisplayName());
});
room.on(ConferenceEvents.USER_LEFT, function (id, user) {
console.error('USER LEFT', id);
console.error('USER %s LEFT', id, user);
APP.UI.removeUser(id, user.getDisplayName());
APP.UI.stopPrezi(id);
});
@ -237,7 +221,7 @@ function initConference(localTracks, connection) {
APP.conference.isModerator = room.isModerator();
APP.UI.updateLocalRole(room.isModerator());
} else {
var user = room.getParticipantById(id);
let user = room.getParticipantById(id);
if (user) {
APP.UI.updateUserRole(user);
}
@ -348,9 +332,12 @@ function initConference(localTracks, connection) {
room.removeCommand(Commands.CONNECTION_QUALITY);
});
// listen to remote stats
room.addCommandListener(Commands.CONNECTION_QUALITY, function (data) {
APP.connectionquality.updateRemoteStats(data.attributes.id, data.value);
});
room.addCommandListener(
Commands.CONNECTION_QUALITY,
function ({value, attributes}) {
APP.connectionquality.updateRemoteStats(attributes.id, value);
}
);
APP.connectionquality.addListener(
CQEvents.REMOTESTATS_UPDATED,
function (id, percent, stats) {
@ -358,8 +345,39 @@ function initConference(localTracks, connection) {
}
);
room.addCommandListener(Commands.VIDEO_TYPE, (data, from) => {
APP.UI.onPeerVideoTypeChanged(from, data.value);
room.addCommandListener(Commands.ETHERPAD, function ({value}) {
APP.UI.initEtherpad(value);
});
room.addCommandListener(Commands.PREZI, function ({value, attributes}) {
APP.UI.showPrezi(attributes.id, value, attributes.slide);
});
room.addCommandListener(Commands.STOP_PREZI, function ({attributes}) {
APP.UI.stopPrezi(attributes.id);
});
APP.UI.addListener(UIEvents.SHARE_PREZI, function (url, slide) {
console.log('Sharing Prezi %s slide %s', url, slide);
room.removeCommand(Commands.PREZI);
room.sendCommand(Commands.PREZI, {
value: url,
attributes: {
id: room.myUserId(),
slide
}
});
});
APP.UI.addListener(UIEvents.STOP_SHARING_PREZI, function () {
room.removeCommand(Commands.PREZI);
room.sendCommandOnce(Commands.STOP_PREZI, {
attributes: {
id: room.myUserId()
}
});
});
room.addCommandListener(Commands.VIDEO_TYPE, ({value}, from) => {
APP.UI.onPeerVideoTypeChanged(from, value);
});
@ -373,7 +391,7 @@ function initConference(localTracks, connection) {
});
}
var email = APP.settings.getEmail();
let email = APP.settings.getEmail();
email && sendEmail(email);
APP.UI.addListener(UIEvents.EMAIL_CHANGED, function (email) {
APP.settings.setEmail(email);
@ -429,6 +447,8 @@ function initConference(localTracks, connection) {
window.location.pathname = "/";
}, 3000);
}
}, function (err) {
console.error(err);
});
});
@ -489,6 +509,35 @@ function initConference(localTracks, connection) {
APP.UI.updateDTMFSupport(isDTMFSupported);
});
APP.UI.addListener(UIEvents.TOGGLE_SCREENSHARING, function () {
APP.desktopsharing.toggleScreenSharing();
});
APP.UI.addListener(DSEvents.SWITCHING_DONE, function (isSharingScreen) {
APP.UI.updateDesktopSharingButtons(isSharingScreen);
});
APP.desktopsharing.addListener(
DSEvents.NEW_STREAM_CREATED,
(track, callback) => {
const localCallback = (newTrack) => {
if (newTrack.isLocal() && newTrack === localVideo) {
if(localVideo.isMuted() &&
localVideo.videoType !== track.videoType) {
localVideo.mute();
}
callback();
room.off(ConferenceEvents.TRACK_ADDED, localCallback);
}
};
room.on(ConferenceEvents.TRACK_ADDED, localCallback);
localVideo.stop();
localVideo = track;
addTrack(track);
APP.UI.addLocalStream(track);
}
);
$(window).bind('beforeunload', function () {
room.leave();
});
@ -558,15 +607,6 @@ function initConference(localTracks, connection) {
});
}
function createLocalTracks () {
return JitsiMeetJS.createLocalTracks({
devices: ['audio', 'video']
}).catch(function (err) {
console.error('failed to create local tracks', err);
return [];
});
}
function connect() {
return openConnection({retry: true}).catch(function (err) {
if (err === ConnectionErrors.PASSWORD_REQUIRED) {
@ -584,7 +624,10 @@ function init() {
JitsiMeetJS.setLogLevel(JitsiMeetJS.logLevels.TRACE);
JitsiMeetJS.init(config).then(function () {
return Promise.all([createLocalTracks(), connect()]);
return Promise.all([
APP.createLocalTracks('audio', 'video'),
connect()
]);
}).then(function ([tracks, connection]) {
console.log('initialized with %s local tracks', tracks.length);
return initConference(tracks, connection);
@ -596,12 +639,6 @@ function init() {
APP.settings.setLanguage(language);
});
APP.desktopsharing.addListener(
DesktopSharingEventTypes.NEW_STREAM_CREATED,
(stream, callback) => {
APP.conference.changeLocalVideo(stream,
callback);
});
APP.desktopsharing.init(JitsiMeetJS.isDesktopSharingEnabled());
APP.statistics.start();
APP.connectionquality.init();
@ -653,7 +690,7 @@ $(document).ready(function () {
URLProcessor.setConfigParametersFromUrl();
APP.init();
APP.translation.init();
APP.translation.init(settings.getLanguage());
if (APP.API.isEnabled()) {
APP.API.init();

View File

@ -34,7 +34,7 @@
}
#remoteVideos .videocontainer {
display: inline-block;
display: none;
background-color: black;
background-size: contain;
border-radius:8px;
@ -378,7 +378,7 @@
padding-right:2px;
height:38px;
width:auto;
background-color: rgba(0,0,0,0.8);
background-color: rgba(0,0,0,0.8);
border: 1px solid rgba(256, 256, 256, 0.2);
border-radius: 6px;
pointer-events: auto;

View File

@ -140,6 +140,25 @@
</div>
<div id="reloadPresentation"><a id="reloadPresentationLink"><i title="Reload Prezi" class="fa fa-repeat fa-lg"></i></a></div>
<div id="videospace">
<div id="largeVideoContainer" class="videocontainer">
<div id="presentation"></div>
<div id="etherpad"></div>
<a target="_new"><div class="watermark leftwatermark"></div></a>
<a target="_new"><div class="watermark rightwatermark"></div></a>
<a class="poweredby" href="http://jitsi.org" target="_new">
<span data-i18n="poweredby"></span> jitsi.org
</a>
<div id="activeSpeaker">
<img id="activeSpeakerAvatar" src=""/>
<canvas id="activeSpeakerAudioLevel"></canvas>
</div>
<div id="largeVideoWrapper">
<video id="largeVideo" muted="true" autoplay oncontextmenu="return false;"></video>
</div>
<span id="videoConnectionMessage"></span>
</div>
<div id="remoteVideos">
<span id="localVideoContainer" class="videocontainer">
<span id="localNick" class="nick"></span>

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
/* global JitsiMeetJS */
/* global JitsiMeetJS, APP */
import LoginDialog from './UI/authentication/LoginDialog';
import UIEvents from '../service/UI/UIEvents';
@ -44,14 +44,16 @@ function doXmppAuth (room, lockPassword) {
// open room
let newRoom = connection.initJitsiConference(room.getName());
newRoom.on(ConferenceEvents.CONFERENCE_FAILED, function (err) {
connection.disconnect();
loginDialog.displayError(err);
});
loginDialog.displayConnectionStatus(
APP.translation.translateString('connection.FETCH_SESSION_ID')
);
newRoom.room.moderator.allocateConferenceFocus(function () {
newRoom.room.moderator.authenticate().then(function () {
connection.disconnect();
loginDialog.close();
loginDialog.displayConnectionStatus(
APP.translation.translateString('connection.GOT_SESSION_ID')
);
if (room.isJoined()) {
// just reallocate focus if already joined
@ -60,8 +62,19 @@ function doXmppAuth (room, lockPassword) {
// or join
room.join(lockPassword);
}
});
loginDialog.close();
}).catch(function (error, code) {
connection.disconnect();
console.error('Auth on the fly failed', error);
let errorMsg = APP.translation.translateString(
'connection.GET_SESSION_ID_ERROR'
);
loginDialog.displayError(errorMsg + code);
});
}, function (err) {
loginDialog.displayError(err);
});

View File

@ -1,211 +0,0 @@
/* global config, APP, Strophe */
/* jshint -W101 */
// cache datachannels to avoid garbage collection
// https://code.google.com/p/chromium/issues/detail?id=405545
var RTCEvents = require("../../service/RTC/RTCEvents");
var _dataChannels = [];
var eventEmitter = null;
var DataChannels = {
/**
* Callback triggered by PeerConnection when new data channel is opened
* on the bridge.
* @param event the event info object.
*/
onDataChannel: function (event) {
var dataChannel = event.channel;
dataChannel.onopen = function () {
console.info("Data channel opened by the Videobridge!", dataChannel);
// Code sample for sending string and/or binary data
// Sends String message to the bridge
//dataChannel.send("Hello bridge!");
// Sends 12 bytes binary message to the bridge
//dataChannel.send(new ArrayBuffer(12));
eventEmitter.emit(RTCEvents.DATA_CHANNEL_OPEN);
};
dataChannel.onerror = function (error) {
console.error("Data Channel Error:", error, dataChannel);
};
dataChannel.onmessage = function (event) {
var data = event.data;
// JSON
var obj;
try {
obj = JSON.parse(data);
}
catch (e) {
console.error(
"Failed to parse data channel message as JSON: ",
data,
dataChannel);
}
if (('undefined' !== typeof(obj)) && (null !== obj)) {
var colibriClass = obj.colibriClass;
if ("DominantSpeakerEndpointChangeEvent" === colibriClass) {
// Endpoint ID from the Videobridge.
var dominantSpeakerEndpoint = obj.dominantSpeakerEndpoint;
console.info(
"Data channel new dominant speaker event: ",
dominantSpeakerEndpoint);
eventEmitter.emit(RTCEvents.DOMINANTSPEAKER_CHANGED, dominantSpeakerEndpoint);
}
else if ("InLastNChangeEvent" === colibriClass) {
var oldValue = obj.oldValue;
var newValue = obj.newValue;
// Make sure that oldValue and newValue are of type boolean.
var type;
if ((type = typeof oldValue) !== 'boolean') {
if (type === 'string') {
oldValue = (oldValue == "true");
} else {
oldValue = Boolean(oldValue).valueOf();
}
}
if ((type = typeof newValue) !== 'boolean') {
if (type === 'string') {
newValue = (newValue == "true");
} else {
newValue = Boolean(newValue).valueOf();
}
}
eventEmitter.emit(RTCEvents.LASTN_CHANGED, oldValue, newValue);
}
else if ("LastNEndpointsChangeEvent" === colibriClass) {
// The new/latest list of last-n endpoint IDs.
var lastNEndpoints = obj.lastNEndpoints;
// The list of endpoint IDs which are entering the list of
// last-n at this time i.e. were not in the old list of last-n
// endpoint IDs.
var endpointsEnteringLastN = obj.endpointsEnteringLastN;
console.info(
"Data channel new last-n event: ",
lastNEndpoints, endpointsEnteringLastN, obj);
eventEmitter.emit(RTCEvents.LASTN_ENDPOINT_CHANGED,
lastNEndpoints, endpointsEnteringLastN, obj);
}
else {
console.debug("Data channel JSON-formatted message: ", obj);
// The received message appears to be appropriately
// formatted (i.e. is a JSON object which assigns a value to
// the mandatory property colibriClass) so don't just
// swallow it, expose it to public consumption.
eventEmitter.emit("rtc.datachannel." + colibriClass, obj);
}
}
};
dataChannel.onclose = function () {
console.info("The Data Channel closed", dataChannel);
var idx = _dataChannels.indexOf(dataChannel);
if (idx > -1)
_dataChannels = _dataChannels.splice(idx, 1);
};
_dataChannels.push(dataChannel);
},
/**
* Binds "ondatachannel" event listener to given PeerConnection instance.
* @param peerConnection WebRTC peer connection instance.
*/
init: function (peerConnection, emitter) {
if(!config.openSctp)
return;
peerConnection.ondatachannel = this.onDataChannel;
eventEmitter = emitter;
// Sample code for opening new data channel from Jitsi Meet to the bridge.
// Although it's not a requirement to open separate channels from both bridge
// and peer as single channel can be used for sending and receiving data.
// So either channel opened by the bridge or the one opened here is enough
// for communication with the bridge.
/*var dataChannelOptions =
{
reliable: true
};
var dataChannel
= peerConnection.createDataChannel("myChannel", dataChannelOptions);
// Can be used only when is in open state
dataChannel.onopen = function ()
{
dataChannel.send("My channel !!!");
};
dataChannel.onmessage = function (event)
{
var msgData = event.data;
console.info("Got My Data Channel Message:", msgData, dataChannel);
};*/
},
handleSelectedEndpointEvent: function (userResource) {
onXXXEndpointChanged("selected", userResource);
},
handlePinnedEndpointEvent: function (userResource) {
onXXXEndpointChanged("pinned", userResource);
},
some: function (callback, thisArg) {
if (_dataChannels && _dataChannels.length !== 0) {
if (thisArg)
return _dataChannels.some(callback, thisArg);
else
return _dataChannels.some(callback);
} else {
return false;
}
}
};
/**
* Notifies Videobridge about a change in the value of a specific
* endpoint-related property such as selected endpoint and pinnned endpoint.
*
* @param xxx the name of the endpoint-related property whose value changed
* @param userResource the new value of the endpoint-related property after the
* change
*/
function onXXXEndpointChanged(xxx, userResource) {
// Derive the correct words from xxx such as selected and Selected, pinned
// and Pinned.
var head = xxx.charAt(0);
var tail = xxx.substring(1);
var lower = head.toLowerCase() + tail;
var upper = head.toUpperCase() + tail;
// Notify Videobridge about the specified endpoint change.
console.log(lower + ' endpoint changed: ', userResource);
DataChannels.some(function (dataChannel) {
if (dataChannel.readyState == 'open') {
console.log(
'sending ' + lower
+ ' endpoint changed notification to the bridge: ',
userResource);
var jsonObject = {};
jsonObject.colibriClass = (upper + 'EndpointChangedEvent');
jsonObject[lower + "Endpoint"]
= (userResource ? userResource : null);
dataChannel.send(JSON.stringify(jsonObject));
return true;
}
});
}
module.exports = DataChannels;

View File

@ -1,145 +0,0 @@
/* global APP */
var MediaStreamType = require("../../service/RTC/MediaStreamTypes");
var RTCEvents = require("../../service/RTC/RTCEvents");
var RTCBrowserType = require("./RTCBrowserType");
var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
/**
* This implements 'onended' callback normally fired by WebRTC after the stream
* is stopped. There is no such behaviour yet in FF, so we have to add it.
* @param stream original WebRTC stream object to which 'onended' handling
* will be added.
*/
function implementOnEndedHandling(localStream) {
var stream = localStream.getOriginalStream();
var originalStop = stream.stop;
stream.stop = function () {
originalStop.apply(stream);
if (localStream.isActive()) {
stream.onended();
}
};
}
function LocalStream(stream, type, eventEmitter, videoType, isGUMStream) {
this.stream = stream;
this.eventEmitter = eventEmitter;
this.type = type;
this.videoType = videoType;
this.isGUMStream = true;
if(isGUMStream === false)
this.isGUMStream = isGUMStream;
var self = this;
if (MediaStreamType.AUDIO_TYPE === type) {
this.getTracks = function () {
return self.stream.getAudioTracks();
};
} else {
this.getTracks = function () {
return self.stream.getVideoTracks();
};
}
APP.RTC.addMediaStreamInactiveHandler(
this.stream,
function () {
self.streamEnded();
});
if (RTCBrowserType.isFirefox()) {
implementOnEndedHandling(this);
}
}
LocalStream.prototype.streamEnded = function () {
this.eventEmitter.emit(StreamEventTypes.EVENT_TYPE_LOCAL_ENDED, this);
};
LocalStream.prototype.getOriginalStream = function()
{
return this.stream;
};
LocalStream.prototype.isAudioStream = function () {
return MediaStreamType.AUDIO_TYPE === this.type;
};
LocalStream.prototype.isVideoStream = function () {
return MediaStreamType.VIDEO_TYPE === this.type;
};
LocalStream.prototype.setMute = function (mute)
{
var isAudio = this.isAudioStream();
var eventType = isAudio ? RTCEvents.AUDIO_MUTE : RTCEvents.VIDEO_MUTE;
if ((window.location.protocol != "https:" && this.isGUMStream) ||
(isAudio && this.isGUMStream) || this.videoType === "screen" ||
// FIXME FF does not support 'removeStream' method used to mute
RTCBrowserType.isFirefox()) {
var tracks = this.getTracks();
for (var idx = 0; idx < tracks.length; idx++) {
tracks[idx].enabled = !mute;
}
this.eventEmitter.emit(eventType, mute);
} else {
if (mute) {
APP.xmpp.removeStream(this.stream);
APP.RTC.stopMediaStream(this.stream);
this.eventEmitter.emit(eventType, true);
} else {
var self = this;
APP.RTC.rtcUtils.obtainAudioAndVideoPermissions(
(this.isAudioStream() ? ["audio"] : ["video"]),
function (stream) {
if (isAudio) {
APP.RTC.changeLocalAudio(stream,
function () {
self.eventEmitter.emit(eventType, false);
});
} else {
APP.RTC.changeLocalVideo(stream, false,
function () {
self.eventEmitter.emit(eventType, false);
});
}
});
}
}
};
LocalStream.prototype.isMuted = function () {
var tracks = [];
if (this.isAudioStream()) {
tracks = this.stream.getAudioTracks();
} else {
if (!this.isActive())
return true;
tracks = this.stream.getVideoTracks();
}
for (var idx = 0; idx < tracks.length; idx++) {
if(tracks[idx].enabled)
return false;
}
return true;
};
LocalStream.prototype.getId = function () {
return this.stream.getTracks()[0].id;
};
/**
* Checks whether the MediaStream is avtive/not ended.
* When there is no check for active we don't have information and so
* will return that stream is active (in case of FF).
* @returns {boolean} whether MediaStream is active.
*/
LocalStream.prototype.isActive = function () {
if((typeof this.stream.active !== "undefined"))
return this.stream.active;
else
return true;
};
module.exports = LocalStream;

View File

@ -1,57 +0,0 @@
var MediaStreamType = require("../../service/RTC/MediaStreamTypes");
/**
* Creates a MediaStream object for the given data, session id and ssrc.
* It is a wrapper class for the MediaStream.
*
* @param data the data object from which we obtain the stream,
* the peerjid, etc.
* @param ssrc the ssrc corresponding to this MediaStream
* @param mute the whether this MediaStream is muted
*
* @constructor
*/
function MediaStream(data, ssrc, browser, eventEmitter, muted, type) {
// XXX(gp) to minimize headaches in the future, we should build our
// abstractions around tracks and not streams. ORTC is track based API.
// Mozilla expects m-lines to represent media tracks.
//
// Practically, what I'm saying is that we should have a MediaTrack class
// and not a MediaStream class.
//
// Also, we should be able to associate multiple SSRCs with a MediaTrack as
// a track might have an associated RTX and FEC sources.
if (!type) {
console.log("Errrm...some code needs an update...");
}
this.stream = data.stream;
this.peerjid = data.peerjid;
this.videoType = data.videoType;
this.ssrc = ssrc;
this.type = type;
this.muted = muted;
this.eventEmitter = eventEmitter;
}
// FIXME duplicated with LocalStream methods - extract base class
MediaStream.prototype.isAudioStream = function () {
return MediaStreamType.AUDIO_TYPE === this.type;
};
MediaStream.prototype.isVideoStream = function () {
return MediaStreamType.VIDEO_TYPE === this.type;
};
MediaStream.prototype.getOriginalStream = function () {
return this.stream;
};
MediaStream.prototype.setMute = function (value) {
this.stream.muted = value;
this.muted = value;
};
module.exports = MediaStream;

View File

@ -1,335 +0,0 @@
/* global APP */
var EventEmitter = require("events");
var RTCBrowserType = require("./RTCBrowserType");
var RTCUtils = require("./RTCUtils.js");
var LocalStream = require("./LocalStream.js");
var DataChannels = require("./DataChannels");
var MediaStream = require("./MediaStream.js");
var DesktopSharingEventTypes
= require("../../service/desktopsharing/DesktopSharingEventTypes");
var MediaStreamType = require("../../service/RTC/MediaStreamTypes");
var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
var RTCEvents = require("../../service/RTC/RTCEvents.js");
var XMPPEvents = require("../../service/xmpp/XMPPEvents");
var UIEvents = require("../../service/UI/UIEvents");
var eventEmitter = new EventEmitter();
function getMediaStreamUsage()
{
var result = {
audio: true,
video: true
};
/** There are some issues with the desktop sharing
* when this property is enabled.
* WARNING: We must change the implementation to start video/audio if we
* receive from the focus that the peer is not muted.
var isSecureConnection = window.location.protocol == "https:";
if(config.disableEarlyMediaPermissionRequests || !isSecureConnection)
{
result = {
audio: false,
video: false
};
}
**/
return result;
}
var RTC = {
// Exposes DataChannels to public consumption (e.g. jitsi-meet-torture)
// without the necessity to require the module.
"DataChannels": DataChannels,
rtcUtils: null,
devices: {
audio: true,
video: true
},
remoteStreams: {},
localAudio: null,
localVideo: null,
addStreamListener: function (listener, eventType) {
eventEmitter.on(eventType, listener);
},
addListener: function (type, listener) {
eventEmitter.on(type, listener);
},
removeStreamListener: function (listener, eventType) {
if(!(eventType instanceof StreamEventTypes))
throw "Illegal argument";
eventEmitter.removeListener(eventType, listener);
},
createLocalStream: function (stream, type, change, videoType,
isMuted, isGUMStream) {
var localStream =
new LocalStream(stream, type, eventEmitter, videoType, isGUMStream);
if(isMuted === true)
localStream.setMute(true);
if (MediaStreamType.AUDIO_TYPE === type) {
this.localAudio = localStream;
} else {
this.localVideo = localStream;
}
var eventType = StreamEventTypes.EVENT_TYPE_LOCAL_CREATED;
if(change)
eventType = StreamEventTypes.EVENT_TYPE_LOCAL_CHANGED;
eventEmitter.emit(eventType, localStream, isMuted);
return localStream;
},
createRemoteStream: function (data, ssrc) {
var jid = data.peerjid || APP.xmpp.myJid();
// check the video muted state from last stored presence if any
var muted = false;
var pres = APP.xmpp.getLastPresence(jid);
if (pres && pres.videoMuted) {
muted = pres.videoMuted;
}
var self = this;
[MediaStreamType.AUDIO_TYPE, MediaStreamType.VIDEO_TYPE].forEach(
function (type) {
var tracks =
type == MediaStreamType.AUDIO_TYPE
? data.stream.getAudioTracks() : data.stream.getVideoTracks();
if (!tracks || !Array.isArray(tracks) || !tracks.length) {
console.log("Not creating a(n) " + type + " stream: no tracks");
return;
}
var remoteStream = new MediaStream(data, ssrc,
RTCBrowserType.getBrowserType(), eventEmitter, muted, type);
if (!self.remoteStreams[jid]) {
self.remoteStreams[jid] = {};
}
self.remoteStreams[jid][type] = remoteStream;
eventEmitter.emit(StreamEventTypes.EVENT_TYPE_REMOTE_CREATED,
remoteStream);
});
},
getPCConstraints: function () {
return this.rtcUtils.pc_constraints;
},
getUserMediaWithConstraints:function(um, success_callback,
failure_callback, resolution,
bandwidth, fps, desktopStream)
{
return this.rtcUtils.getUserMediaWithConstraints(um, success_callback,
failure_callback, resolution, bandwidth, fps, desktopStream);
},
attachMediaStream: function (elSelector, stream) {
this.rtcUtils.attachMediaStream(elSelector, stream);
},
getStreamID: function (stream) {
return this.rtcUtils.getStreamID(stream);
},
getVideoSrc: function (element) {
return this.rtcUtils.getVideoSrc(element);
},
setVideoSrc: function (element, src) {
this.rtcUtils.setVideoSrc(element, src);
},
getVideoElementName: function () {
return RTCBrowserType.isTemasysPluginUsed() ? 'object' : 'video';
},
dispose: function() {
if (this.rtcUtils) {
this.rtcUtils = null;
}
},
stop: function () {
this.dispose();
},
start: function () {
var self = this;
APP.desktopsharing.addListener(
DesktopSharingEventTypes.NEW_STREAM_CREATED,
function (stream, isUsingScreenStream, callback) {
self.changeLocalVideo(stream, isUsingScreenStream, callback);
});
APP.xmpp.addListener(XMPPEvents.CALL_INCOMING, function(event) {
DataChannels.init(event.peerconnection, eventEmitter);
});
APP.UI.addListener(UIEvents.SELECTED_ENDPOINT,
DataChannels.handleSelectedEndpointEvent);
APP.UI.addListener(UIEvents.PINNED_ENDPOINT,
DataChannels.handlePinnedEndpointEvent);
// In case of IE we continue from 'onReady' callback
// passed to RTCUtils constructor. It will be invoked by Temasys plugin
// once it is initialized.
var onReady = function () {
eventEmitter.emit(RTCEvents.RTC_READY, true);
self.rtcUtils.obtainAudioAndVideoPermissions(
null, null, getMediaStreamUsage());
};
this.rtcUtils = new RTCUtils(this, eventEmitter, onReady);
// Call onReady() if Temasys plugin is not used
if (!RTCBrowserType.isTemasysPluginUsed()) {
onReady();
}
},
muteRemoteVideoStream: function (jid, value) {
var stream;
if(this.remoteStreams[jid] &&
this.remoteStreams[jid][MediaStreamType.VIDEO_TYPE]) {
stream = this.remoteStreams[jid][MediaStreamType.VIDEO_TYPE];
}
if(!stream)
return true;
if (value != stream.muted) {
stream.setMute(value);
return true;
}
return false;
},
changeLocalVideo: function (stream, isUsingScreenStream, callback) {
var oldStream = this.localVideo.getOriginalStream();
var type = (isUsingScreenStream ? "screen" : "camera");
var localCallback = callback;
if(this.localVideo.isMuted() && this.localVideo.videoType !== type) {
localCallback = function() {
APP.xmpp.setVideoMute(false, function(mute) {
eventEmitter.emit(RTCEvents.VIDEO_MUTE, mute);
});
callback();
};
}
// FIXME: Workaround for FF/IE/Safari
if (stream && stream.videoStream) {
stream = stream.videoStream;
}
var videoStream = this.rtcUtils.createStream(stream, true);
this.localVideo =
this.createLocalStream(videoStream, "video", true, type);
// Stop the stream
this.stopMediaStream(oldStream);
APP.xmpp.switchStreams(videoStream, oldStream,localCallback);
},
changeLocalAudio: function (stream, callback) {
var oldStream = this.localAudio.getOriginalStream();
var newStream = this.rtcUtils.createStream(stream);
this.localAudio
= this.createLocalStream(
newStream, MediaStreamType.AUDIO_TYPE, true);
// Stop the stream
this.stopMediaStream(oldStream);
APP.xmpp.switchStreams(newStream, oldStream, callback, true);
},
isVideoMuted: function (jid) {
if (jid === APP.xmpp.myJid()) {
var localVideo = APP.RTC.localVideo;
return (!localVideo || localVideo.isMuted());
} else {
if (!APP.RTC.remoteStreams[jid] ||
!APP.RTC.remoteStreams[jid][MediaStreamType.VIDEO_TYPE]) {
return null;
}
return APP.RTC.remoteStreams[jid][MediaStreamType.VIDEO_TYPE].muted;
}
},
setVideoMute: function (mute, callback, options) {
if (!this.localVideo)
return;
if (mute == APP.RTC.localVideo.isMuted())
{
APP.xmpp.sendVideoInfoPresence(mute);
if (callback)
callback(mute);
}
else
{
APP.RTC.localVideo.setMute(mute);
APP.xmpp.setVideoMute(
mute,
callback,
options);
}
},
setDeviceAvailability: function (devices) {
if(!devices)
return;
if(devices.audio === true || devices.audio === false)
this.devices.audio = devices.audio;
if(devices.video === true || devices.video === false)
this.devices.video = devices.video;
eventEmitter.emit(RTCEvents.AVAILABLE_DEVICES_CHANGED, this.devices);
},
/**
* A method to handle stopping of the stream.
* One point to handle the differences in various implementations.
* @param mediaStream MediaStream object to stop.
*/
stopMediaStream: function (mediaStream) {
mediaStream.getTracks().forEach(function (track) {
// stop() not supported with IE
if (track.stop) {
track.stop();
}
});
// leave stop for implementation still using it
if (mediaStream.stop) {
mediaStream.stop();
}
},
/**
* Adds onended/inactive handler to a MediaStream.
* @param mediaStream a MediaStream to attach onended/inactive handler
* @param handler the handler
*/
addMediaStreamInactiveHandler: function (mediaStream, handler) {
if (mediaStream.addEventListener) {
// chrome
if(typeof mediaStream.active !== "undefined")
mediaStream.oninactive = handler;
else
mediaStream.onended = handler;
} else {
// themasys
mediaStream.attachEvent('ended', function () {
handler(mediaStream);
});
}
},
/**
* Removes onended/inactive handler.
* @param mediaStream the MediaStream to remove the handler from.
* @param handler the handler to remove.
*/
removeMediaStreamInactiveHandler: function (mediaStream, handler) {
if (mediaStream.removeEventListener) {
// chrome
if(typeof mediaStream.active !== "undefined")
mediaStream.oninactive = null;
else
mediaStream.onended = null;
} else {
// themasys
mediaStream.detachEvent('ended', handler);
}
}
};
module.exports = RTC;

View File

@ -1,574 +0,0 @@
/* global APP, config, require, attachMediaStream, getUserMedia,
RTCPeerConnection, webkitMediaStream, webkitURL, webkitRTCPeerConnection,
mozRTCIceCandidate, mozRTCSessionDescription, mozRTCPeerConnection */
/* jshint -W101 */
var MediaStreamType = require("../../service/RTC/MediaStreamTypes");
var RTCBrowserType = require("./RTCBrowserType");
var Resolutions = require("../../service/RTC/Resolutions");
var RTCEvents = require("../../service/RTC/RTCEvents");
var AdapterJS = require("./adapter.screenshare");
var currentResolution = null;
function getPreviousResolution(resolution) {
if(!Resolutions[resolution])
return null;
var order = Resolutions[resolution].order;
var res = null;
var resName = null;
for(var i in Resolutions) {
var tmp = Resolutions[i];
if (!res || (res.order < tmp.order && tmp.order < order)) {
resName = i;
res = tmp;
}
}
return resName;
}
function setResolutionConstraints(constraints, resolution) {
var isAndroid = RTCBrowserType.isAndroid();
if (Resolutions[resolution]) {
constraints.video.mandatory.minWidth = Resolutions[resolution].width;
constraints.video.mandatory.minHeight = Resolutions[resolution].height;
}
else if (isAndroid) {
// FIXME can't remember if the purpose of this was to always request
// low resolution on Android ? if yes it should be moved up front
constraints.video.mandatory.minWidth = 320;
constraints.video.mandatory.minHeight = 240;
constraints.video.mandatory.maxFrameRate = 15;
}
if (constraints.video.mandatory.minWidth)
constraints.video.mandatory.maxWidth =
constraints.video.mandatory.minWidth;
if (constraints.video.mandatory.minHeight)
constraints.video.mandatory.maxHeight =
constraints.video.mandatory.minHeight;
}
function getConstraints(um, resolution, bandwidth, fps, desktopStream) {
var constraints = {audio: false, video: false};
if (um.indexOf('video') >= 0) {
// same behaviour as true
constraints.video = { mandatory: {}, optional: [] };
constraints.video.optional.push({ googLeakyBucket: true });
setResolutionConstraints(constraints, resolution);
}
if (um.indexOf('audio') >= 0) {
if (!RTCBrowserType.isFirefox()) {
// same behaviour as true
constraints.audio = { mandatory: {}, optional: []};
// if it is good enough for hangouts...
constraints.audio.optional.push(
{googEchoCancellation: true},
{googAutoGainControl: true},
{googNoiseSupression: true},
{googHighpassFilter: true},
{googNoisesuppression2: true},
{googEchoCancellation2: true},
{googAutoGainControl2: true}
);
} else {
constraints.audio = true;
}
}
if (um.indexOf('screen') >= 0) {
if (RTCBrowserType.isChrome()) {
constraints.video = {
mandatory: {
chromeMediaSource: "screen",
googLeakyBucket: true,
maxWidth: window.screen.width,
maxHeight: window.screen.height,
maxFrameRate: 3
},
optional: []
};
} else if (RTCBrowserType.isTemasysPluginUsed()) {
constraints.video = {
optional: [
{
sourceId: AdapterJS.WebRTCPlugin.plugin.screensharingKey
}
]
};
} else if (RTCBrowserType.isFirefox()) {
constraints.video = {
mozMediaSource: "window",
mediaSource: "window"
};
} else {
console.error(
"'screen' WebRTC media source is supported only in Chrome" +
" and with Temasys plugin");
}
}
if (um.indexOf('desktop') >= 0) {
constraints.video = {
mandatory: {
chromeMediaSource: "desktop",
chromeMediaSourceId: desktopStream,
googLeakyBucket: true,
maxWidth: window.screen.width,
maxHeight: window.screen.height,
maxFrameRate: 3
},
optional: []
};
}
if (bandwidth) {
if (!constraints.video) {
//same behaviour as true
constraints.video = {mandatory: {}, optional: []};
}
constraints.video.optional.push({bandwidth: bandwidth});
}
if (fps) {
// for some cameras it might be necessary to request 30fps
// so they choose 30fps mjpg over 10fps yuy2
if (!constraints.video) {
// same behaviour as true;
constraints.video = {mandatory: {}, optional: []};
}
constraints.video.mandatory.minFrameRate = fps;
}
// we turn audio for both audio and video tracks, the fake audio & video seems to work
// only when enabled in one getUserMedia call, we cannot get fake audio separate by fake video
// this later can be a problem with some of the tests
if(RTCBrowserType.isFirefox() && config.firefox_fake_device)
{
constraints.audio = true;
constraints.fake = true;
}
return constraints;
}
function RTCUtils(RTCService, eventEmitter, onTemasysPluginReady)
{
var self = this;
this.service = RTCService;
this.eventEmitter = eventEmitter;
if (RTCBrowserType.isFirefox()) {
var FFversion = RTCBrowserType.getFirefoxVersion();
if (FFversion >= 40) {
this.peerconnection = mozRTCPeerConnection;
this.getUserMedia = navigator.mozGetUserMedia.bind(navigator);
this.pc_constraints = {};
this.attachMediaStream = function (element, stream) {
// srcObject is being standardized and FF will eventually
// support that unprefixed. FF also supports the
// "element.src = URL.createObjectURL(...)" combo, but that
// will be deprecated in favour of srcObject.
//
// https://groups.google.com/forum/#!topic/mozilla.dev.media/pKOiioXonJg
// https://github.com/webrtc/samples/issues/302
if(!element[0])
return;
element[0].mozSrcObject = stream;
element[0].play();
};
this.getStreamID = function (stream) {
var id = stream.id;
if (!id) {
var tracks = stream.getVideoTracks();
if (!tracks || tracks.length === 0) {
tracks = stream.getAudioTracks();
}
id = tracks[0].id;
}
return APP.xmpp.filter_special_chars(id);
};
this.getVideoSrc = function (element) {
if(!element)
return null;
return element.mozSrcObject;
};
this.setVideoSrc = function (element, src) {
if(element)
element.mozSrcObject = src;
};
window.RTCSessionDescription = mozRTCSessionDescription;
window.RTCIceCandidate = mozRTCIceCandidate;
} else {
console.error(
"Firefox version too old: " + FFversion + ". Required >= 40.");
window.location.href = 'unsupported_browser.html';
return;
}
} else if (RTCBrowserType.isChrome() || RTCBrowserType.isOpera()) {
this.peerconnection = webkitRTCPeerConnection;
this.getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
this.attachMediaStream = function (element, stream) {
element.attr('src', webkitURL.createObjectURL(stream));
};
this.getStreamID = function (stream) {
// streams from FF endpoints have the characters '{' and '}'
// that make jQuery choke.
return APP.xmpp.filter_special_chars(stream.id);
};
this.getVideoSrc = function (element) {
if(!element)
return null;
return element.getAttribute("src");
};
this.setVideoSrc = function (element, src) {
if(element)
element.setAttribute("src", src);
};
// DTLS should now be enabled by default but..
this.pc_constraints = {'optional': [{'DtlsSrtpKeyAgreement': 'true'}]};
if (RTCBrowserType.isAndroid()) {
this.pc_constraints = {}; // disable DTLS on Android
}
if (!webkitMediaStream.prototype.getVideoTracks) {
webkitMediaStream.prototype.getVideoTracks = function () {
return this.videoTracks;
};
}
if (!webkitMediaStream.prototype.getAudioTracks) {
webkitMediaStream.prototype.getAudioTracks = function () {
return this.audioTracks;
};
}
}
// Detect IE/Safari
else if (RTCBrowserType.isTemasysPluginUsed()) {
//AdapterJS.WebRTCPlugin.setLogLevel(
// AdapterJS.WebRTCPlugin.PLUGIN_LOG_LEVELS.VERBOSE);
AdapterJS.webRTCReady(function (isPlugin) {
self.peerconnection = RTCPeerConnection;
self.getUserMedia = getUserMedia;
self.attachMediaStream = function (elSel, stream) {
if (stream.id === "dummyAudio" || stream.id === "dummyVideo") {
return;
}
attachMediaStream(elSel[0], stream);
};
self.getStreamID = function (stream) {
return APP.xmpp.filter_special_chars(stream.label);
};
self.getVideoSrc = function (element) {
if (!element) {
console.warn("Attempt to get video SRC of null element");
return null;
}
var children = element.children;
for (var i = 0; i !== children.length; ++i) {
if (children[i].name === 'streamId') {
return children[i].value;
}
}
//console.info(element.id + " SRC: " + src);
return null;
};
self.setVideoSrc = function (element, src) {
//console.info("Set video src: ", element, src);
if (!src) {
console.warn("Not attaching video stream, 'src' is null");
return;
}
AdapterJS.WebRTCPlugin.WaitForPluginReady();
var stream = AdapterJS.WebRTCPlugin.plugin
.getStreamWithId(AdapterJS.WebRTCPlugin.pageId, src);
attachMediaStream(element, stream);
};
onTemasysPluginReady(isPlugin);
});
} else {
try {
console.log('Browser does not appear to be WebRTC-capable');
} catch (e) { }
window.location.href = 'unsupported_browser.html';
}
}
RTCUtils.prototype.getUserMediaWithConstraints = function(
um, success_callback, failure_callback, resolution,bandwidth, fps,
desktopStream) {
currentResolution = resolution;
var constraints = getConstraints(
um, resolution, bandwidth, fps, desktopStream);
console.info("Get media constraints", constraints);
var self = this;
try {
this.getUserMedia(constraints,
function (stream) {
console.log('onUserMediaSuccess');
self.setAvailableDevices(um, true);
success_callback(stream);
},
function (error) {
self.setAvailableDevices(um, false);
console.warn('Failed to get access to local media. Error ',
error, constraints);
self.eventEmitter.emit(RTCEvents.GET_USER_MEDIA_FAILED, error);
if (failure_callback) {
failure_callback(error);
}
});
} catch (e) {
console.error('GUM failed: ', e);
self.eventEmitter.emit(RTCEvents.GET_USER_MEDIA_FAILED, e);
if(failure_callback) {
failure_callback(e);
}
}
};
RTCUtils.prototype.setAvailableDevices = function (um, available) {
var devices = {};
if(um.indexOf("video") != -1) {
devices.video = available;
}
if(um.indexOf("audio") != -1) {
devices.audio = available;
}
this.service.setDeviceAvailability(devices);
};
/**
* We ask for audio and video combined stream in order to get permissions and
* not to ask twice.
*/
RTCUtils.prototype.obtainAudioAndVideoPermissions =
function(devices, callback, usageOptions)
{
var self = this;
// Get AV
var successCallback = function (stream) {
if(callback)
callback(stream, usageOptions);
else
self.successCallback(stream, usageOptions);
};
if(!devices)
devices = ['audio', 'video'];
var newDevices = [];
if(usageOptions)
for(var i = 0; i < devices.length; i++) {
var device = devices[i];
if(usageOptions[device] === true)
newDevices.push(device);
}
else
newDevices = devices;
if(newDevices.length === 0) {
successCallback();
return;
}
if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed()) {
// With FF/IE we can't split the stream into audio and video because FF
// doesn't support media stream constructors. So, we need to get the
// audio stream separately from the video stream using two distinct GUM
// calls. Not very user friendly :-( but we don't have many other
// options neither.
//
// Note that we pack those 2 streams in a single object and pass it to
// the successCallback method.
var obtainVideo = function (audioStream) {
self.getUserMediaWithConstraints(
['video'],
function (videoStream) {
return successCallback({
audioStream: audioStream,
videoStream: videoStream
});
},
function (error) {
console.error(
'failed to obtain video stream - stop', error);
self.errorCallback(error);
},
config.resolution || '360');
};
var obtainAudio = function () {
self.getUserMediaWithConstraints(
['audio'],
function (audioStream) {
if (newDevices.indexOf('video') !== -1)
obtainVideo(audioStream);
},
function (error) {
console.error(
'failed to obtain audio stream - stop', error);
self.errorCallback(error);
}
);
};
if (newDevices.indexOf('audio') !== -1) {
obtainAudio();
} else {
obtainVideo(null);
}
} else {
this.getUserMediaWithConstraints(
newDevices,
function (stream) {
successCallback(stream);
},
function (error) {
self.errorCallback(error);
},
config.resolution || '360');
}
};
RTCUtils.prototype.successCallback = function (stream, usageOptions) {
// If this is FF or IE, the stream parameter is *not* a MediaStream object,
// it's an object with two properties: audioStream, videoStream.
if (stream && stream.getAudioTracks && stream.getVideoTracks)
console.log('got', stream, stream.getAudioTracks().length,
stream.getVideoTracks().length);
this.handleLocalStream(stream, usageOptions);
};
RTCUtils.prototype.errorCallback = function (error) {
var self = this;
console.error('failed to obtain audio/video stream - trying audio only', error);
var resolution = getPreviousResolution(currentResolution);
if(typeof error == "object" && error.constraintName && error.name
&& (error.name == "ConstraintNotSatisfiedError" ||
error.name == "OverconstrainedError") &&
(error.constraintName == "minWidth" || error.constraintName == "maxWidth" ||
error.constraintName == "minHeight" || error.constraintName == "maxHeight")
&& resolution)
{
self.getUserMediaWithConstraints(['audio', 'video'],
function (stream) {
return self.successCallback(stream);
}, function (error) {
return self.errorCallback(error);
}, resolution);
}
else {
self.getUserMediaWithConstraints(
['audio'],
function (stream) {
return self.successCallback(stream);
},
function (error) {
console.error('failed to obtain audio/video stream - stop',
error);
return self.successCallback(null);
}
);
}
};
RTCUtils.prototype.handleLocalStream = function(stream, usageOptions) {
// If this is FF, the stream parameter is *not* a MediaStream object, it's
// an object with two properties: audioStream, videoStream.
var audioStream, videoStream;
if(window.webkitMediaStream)
{
audioStream = new webkitMediaStream();
videoStream = new webkitMediaStream();
if(stream) {
var audioTracks = stream.getAudioTracks();
for (var i = 0; i < audioTracks.length; i++) {
audioStream.addTrack(audioTracks[i]);
}
var videoTracks = stream.getVideoTracks();
for (i = 0; i < videoTracks.length; i++) {
videoStream.addTrack(videoTracks[i]);
}
}
}
else if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed())
{ // Firefox and Temasys plugin
if (stream && stream.audioStream)
audioStream = stream.audioStream;
else
audioStream = new DummyMediaStream("dummyAudio");
if (stream && stream.videoStream)
videoStream = stream.videoStream;
else
videoStream = new DummyMediaStream("dummyVideo");
}
var audioMuted = (usageOptions && usageOptions.audio === false),
videoMuted = (usageOptions && usageOptions.video === false);
var audioGUM = (!usageOptions || usageOptions.audio !== false),
videoGUM = (!usageOptions || usageOptions.video !== false);
this.service.createLocalStream(
audioStream, MediaStreamType.AUDIO_TYPE, null, null,
audioMuted, audioGUM);
this.service.createLocalStream(
videoStream, MediaStreamType.VIDEO_TYPE, null, 'camera',
videoMuted, videoGUM);
};
function DummyMediaStream(id) {
this.id = id;
this.label = id;
this.stop = function() { };
this.getAudioTracks = function() { return []; };
this.getVideoTracks = function() { return []; };
}
RTCUtils.prototype.createStream = function(stream, isVideo) {
var newStream = null;
if (window.webkitMediaStream) {
newStream = new webkitMediaStream();
if (newStream) {
var tracks = (isVideo ? stream.getVideoTracks() : stream.getAudioTracks());
for (var i = 0; i < tracks.length; i++) {
newStream.addTrack(tracks[i]);
}
}
}
else {
// FIXME: this is duplicated with 'handleLocalStream' !!!
if (stream) {
newStream = stream;
} else {
newStream =
new DummyMediaStream(isVideo ? "dummyVideo" : "dummyAudio");
}
}
return newStream;
};
module.exports = RTCUtils;

File diff suppressed because it is too large Load Diff

View File

@ -92,6 +92,7 @@ export default function createRoomLocker (room) {
return room.lock(newPass).then(function () {
password = newPass;
}).catch(function (err) {
console.error(err);
if (err === ConferenceErrors.PASSWORD_NOT_SUPPORTED) {
notifyPasswordNotSupported();
} else {
@ -111,7 +112,7 @@ export default function createRoomLocker (room) {
},
askToUnlock () {
askToUnlock().then(function () {
return askToUnlock().then(function () {
return lock();
}).then(function () {
AnalyticsAdapter.sendEvent('toolbar.lock.disabled');

View File

@ -1,11 +1,10 @@
/* global $, config, interfaceConfig */
/* global $, APP, config, interfaceConfig */
/*
* Created by Yana Stamcheva on 2/10/15.
*/
var messageHandler = require("./util/MessageHandler");
var callStats = require("../statistics/CallStats");
var APP = require("../../app");
/**
* Constructs the html for the overall feedback window.

View File

@ -2,7 +2,6 @@
/* jshint -W101 */
var UI = {};
import AudioLevels from './audio_levels/AudioLevels';
import Chat from "./side_pannels/chat/Chat";
import Toolbar from "./toolbars/Toolbar";
import ToolbarToggler from "./toolbars/ToolbarToggler";
@ -12,26 +11,26 @@ import Avatar from "./avatar/Avatar";
import PanelToggler from "./side_pannels/SidePanelToggler";
import UIUtil from "./util/UIUtil";
import UIEvents from "../../service/UI/UIEvents";
import CQEvents from '../../service/connectionquality/CQEvents';
import PreziManager from './prezi/Prezi';
import EtherpadManager from './etherpad/Etherpad';
import VideoLayout from "./videolayout/VideoLayout";
import SettingsMenu from "./side_pannels/settings/SettingsMenu";
import Settings from "./../settings/Settings";
var Prezi = require("./prezi/Prezi");
var Etherpad = require("./etherpad/Etherpad");
var EventEmitter = require("events");
var Settings = require("./../settings/Settings");
UI.messageHandler = require("./util/MessageHandler");
var messageHandler = UI.messageHandler;
var JitsiPopover = require("./util/JitsiPopover");
var CQEvents = require("../../service/connectionquality/CQEvents");
var DesktopSharingEventTypes
= require("../../service/desktopsharing/DesktopSharingEventTypes");
var StatisticsEvents = require("../../service/statistics/Events");
var Feedback = require("./Feedback");
var eventEmitter = new EventEmitter();
UI.eventEmitter = eventEmitter;
let preziManager;
let etherpadManager;
function promptDisplayName() {
let nickRequiredMsg = APP.translation.translateString("dialog.displayNameRequired");
let defaultNickMsg = APP.translation.translateString(
@ -77,12 +76,6 @@ function promptDisplayName() {
);
}
function setupPrezi() {
$("#reloadPresentationLink").click(function() {
Prezi.reloadPresentation();
});
}
function setupChat() {
Chat.init(eventEmitter);
$("#toggle_smileys").click(function() {
@ -93,7 +86,7 @@ function setupChat() {
function setupToolbars() {
Toolbar.init(eventEmitter);
Toolbar.setupButtonsFromConfig();
BottomToolbar.init(eventEmitter);
BottomToolbar.setupListeners(eventEmitter);
}
/**
@ -189,20 +182,18 @@ UI.initConference = function () {
};
function registerListeners() {
UI.addListener(UIEvents.LARGEVIDEO_INIT, function () {
AudioLevels.init();
});
UI.addListener(UIEvents.EMAIL_CHANGED, function (email) {
UI.setUserAvatar(APP.conference.localId, email);
});
UI.addListener(UIEvents.PREZI_CLICKED, function () {
Prezi.openPreziDialog();
preziManager.handlePreziButtonClicked();
});
UI.addListener(UIEvents.ETHERPAD_CLICKED, function () {
Etherpad.toggleEtherpad(0);
if (etherpadManager) {
etherpadManager.toggleEtherpad();
}
});
UI.addListener(UIEvents.FULLSCREEN_TOGGLE, toggleFullScreen);
@ -221,7 +212,7 @@ function registerListeners() {
function bindEvents() {
function onResize() {
PanelToggler.resizeChat();
VideoLayout.resizeLargeVideoContainer();
VideoLayout.resizeLargeVideoContainer(PanelToggler.isVisible());
}
// Resize and reposition videos in full screen mode.
@ -253,12 +244,20 @@ UI.start = function () {
registerListeners();
BottomToolbar.init();
VideoLayout.init(eventEmitter);
if (!interfaceConfig.filmStripOnly) {
VideoLayout.initLargeVideo(PanelToggler.isVisible());
}
VideoLayout.resizeLargeVideoContainer(PanelToggler.isVisible());
ContactList.init(eventEmitter);
bindEvents();
setupPrezi();
preziManager = new PreziManager(eventEmitter);
if (!interfaceConfig.filmStripOnly) {
$("#videospace").mousemove(function () {
return ToolbarToggler.showToolbar();
});
@ -278,8 +277,7 @@ UI.start = function () {
$("#header").css("display", "none");
$("#bottomToolbar").css("display", "none");
$("#downloadlog").css("display", "none");
$("#remoteVideos").css("padding", "0px 0px 18px 0px");
$("#remoteVideos").css("right", "0px");
BottomToolbar.setupFilmStripOnly();
messageHandler.disableNotifications();
$('body').popover("disable");
JitsiPopover.enabled = false;
@ -350,9 +348,14 @@ UI.setSubject = function (subject) {
Chat.setSubject(subject);
};
function initEtherpad(name) {
Etherpad.init(name);
}
UI.initEtherpad = function (name) {
if (etherpadManager) {
return;
}
console.log('Etherpad is enabled');
etherpadManager = new EtherpadManager(config.etherpad_base, name);
Toolbar.showEtherpadButton();
};
UI.addUser = function (id, displayName) {
ContactList.addContact(id);
@ -391,8 +394,8 @@ UI.removeUser = function (id, displayName) {
// VideoLayout.setPresenceStatus(Strophe.getResourceFromJid(jid), info.status);
// }
UI.onPeerVideoTypeChanged = (resourceJid, newVideoType) => {
VideoLayout.onVideoTypeChanged(resourceJid, newVideoType);
UI.onPeerVideoTypeChanged = (id, newVideoType) => {
VideoLayout.onVideoTypeChanged(id, newVideoType);
};
UI.updateLocalRole = function (isModerator) {
@ -443,7 +446,6 @@ UI.getSettings = function () {
UI.toggleFilmStrip = function () {
BottomToolbar.toggleFilmStrip();
VideoLayout.updateLargeVideoSize();
};
UI.toggleChat = function () {
@ -592,13 +594,11 @@ UI.handleLastNEndpoints = function (ids) {
};
UI.setAudioLevel = function (id, lvl) {
AudioLevels.updateAudioLevel(
id, lvl, VideoLayout.getLargeVideoId()
);
VideoLayout.setAudioLevel(id, lvl);
};
UI.updateDesktopSharingButtons = function () {
Toolbar.changeDesktopSharingButtonState();
UI.updateDesktopSharingButtons = function (isSharingScreen) {
Toolbar.changeDesktopSharingButtonState(isSharingScreen);
};
UI.hideStats = function () {
@ -609,8 +609,8 @@ UI.updateLocalStats = function (percent, stats) {
VideoLayout.updateLocalConnectionStats(percent, stats);
};
UI.updateRemoteStats = function (jid, percent, stats) {
VideoLayout.updateConnectionStats(jid, percent, stats);
UI.updateRemoteStats = function (id, percent, stats) {
VideoLayout.updateConnectionStats(id, percent, stats);
};
UI.markVideoInterrupted = function (interrupted) {
@ -750,4 +750,14 @@ UI.updateAuthInfo = function (isAuthEnabled, login) {
}
};
UI.showPrezi = function (userId, url, slide) {
preziManager.showPrezi(userId, url, slide);
};
UI.stopPrezi = function (userId) {
if (preziManager.isSharing(userId)) {
preziManager.removePrezi(userId);
}
};
module.exports = UI;

View File

@ -2,6 +2,7 @@
/* jshint -W101 */
import CanvasUtil from './CanvasUtils';
import BottomToolbar from '../toolbars/BottomToolbar';
const LOCAL_LEVEL = 'local';
@ -112,33 +113,6 @@ function getVideoSpanId(id) {
return videoSpanId;
}
/**
* Indicates that the remote video has been resized.
*/
$(document).bind('remotevideo.resized', function (event, width, height) {
let resized = false;
$('#remoteVideos>span>canvas').each(function() {
let canvas = $(this).get(0);
if (canvas.width !== width + interfaceConfig.CANVAS_EXTRA) {
canvas.width = width + interfaceConfig.CANVAS_EXTRA;
resized = true;
}
if (canvas.height !== height + interfaceConfig.CANVAS_EXTRA) {
canvas.height = height + interfaceConfig.CANVAS_EXTRA;
resized = true;
}
});
if (resized) {
Object.keys(audioLevelCanvasCache).forEach(function (id) {
audioLevelCanvasCache[id].width = width + interfaceConfig.CANVAS_EXTRA;
audioLevelCanvasCache[id].height = height + interfaceConfig.CANVAS_EXTRA;
});
}
});
/**
* The audio Levels plugin.
*/
@ -153,7 +127,7 @@ const AudioLevels = {
* Updates the audio level canvas for the given id. If the canvas
* didn't exist we create it.
*/
updateAudioLevelCanvas (id, VideoLayout) {
updateAudioLevelCanvas (id, thumbWidth, thumbHeight) {
let videoSpanId = 'localVideoContainer';
if (id) {
videoSpanId = `participant_${id}`;
@ -172,24 +146,19 @@ const AudioLevels = {
let audioLevelCanvas = $(`#${videoSpanId}>canvas`);
let videoSpaceWidth = $('#remoteVideos').width();
let thumbnailSize = VideoLayout.calculateThumbnailSize(videoSpaceWidth);
let thumbnailWidth = thumbnailSize[0];
let thumbnailHeight = thumbnailSize[1];
if (!audioLevelCanvas || audioLevelCanvas.length === 0) {
audioLevelCanvas = document.createElement('canvas');
audioLevelCanvas.className = "audiolevel";
audioLevelCanvas.style.bottom = `-${interfaceConfig.CANVAS_EXTRA/2}px`;
audioLevelCanvas.style.left = `-${interfaceConfig.CANVAS_EXTRA/2}px`;
resizeAudioLevelCanvas(audioLevelCanvas, thumbnailWidth, thumbnailHeight);
resizeAudioLevelCanvas(audioLevelCanvas, thumbWidth, thumbHeight);
videoSpan.appendChild(audioLevelCanvas);
} else {
audioLevelCanvas = audioLevelCanvas.get(0);
resizeAudioLevelCanvas(audioLevelCanvas, thumbnailWidth, thumbnailHeight);
resizeAudioLevelCanvas(audioLevelCanvas, thumbWidth, thumbHeight);
}
},
@ -248,6 +217,18 @@ const AudioLevels = {
// Fill the shape.
ASDrawContext.fill();
},
updateCanvasSize (thumbWidth, thumbHeight) {
let canvasWidth = thumbWidth + interfaceConfig.CANVAS_EXTRA;
let canvasHeight = thumbHeight + interfaceConfig.CANVAS_EXTRA;
BottomToolbar.getThumbs().children('canvas').width(canvasWidth).height(canvasHeight);
Object.keys(audioLevelCanvasCache).forEach(function (id) {
audioLevelCanvasCache[id].width = canvasWidth;
audioLevelCanvasCache[id].height = canvasHeight;
});
}
};

View File

@ -110,14 +110,21 @@ function Dialog(successCallback, cancelCallback) {
*/
this.displayError = function (message) {
var finishedState = connDialog.getState('finished');
let finishedState = connDialog.getState('finished');
var errorMessageElem = finishedState.find('#errorMessage');
let errorMessageElem = finishedState.find('#errorMessage');
errorMessageElem.text(message);
connDialog.goToState('finished');
};
this.displayConnectionStatus = function (message) {
let connectingState = connDialog.getState('connecting');
let connectionStatus = connectingState.find('#connectionStatus');
connectionStatus.text(message);
};
/**
* Closes LoginDialog.
*/

View File

@ -1,61 +1,17 @@
/* global $, config,
setLargeVideoVisible, Util */
/* global $ */
var VideoLayout = require("../videolayout/VideoLayout");
var Prezi = require("../prezi/Prezi");
var UIUtil = require("../util/UIUtil");
import VideoLayout from "../videolayout/VideoLayout";
import LargeContainer from '../videolayout/LargeContainer';
import UIUtil from "../util/UIUtil";
import SidePanelToggler from "../side_pannels/SidePanelToggler";
import BottomToolbar from '../toolbars/BottomToolbar';
var etherpadName = null;
var etherpadIFrame = null;
var domain = null;
var options = "?showControls=true&showChat=false&showLineNumbers=true" +
"&useMonospaceFont=false";
/**
* Resizes the etherpad.
*/
function resize() {
if ($('#etherpad>iframe').length) {
var remoteVideos = $('#remoteVideos');
var availableHeight
= window.innerHeight - remoteVideos.outerHeight();
var availableWidth = UIUtil.getAvailableVideoWidth();
$('#etherpad>iframe').width(availableWidth);
$('#etherpad>iframe').height(availableHeight);
}
}
/**
* Creates the Etherpad button and adds it to the toolbar.
*/
function enableEtherpadButton() {
if (!$('#toolbar_button_etherpad').is(":visible"))
$('#toolbar_button_etherpad').css({display: 'inline-block'});
}
/**
* Creates the IFrame for the etherpad.
*/
function createIFrame() {
etherpadIFrame = VideoLayout.createEtherpadIframe(
domain + etherpadName + options, function() {
document.domain = document.domain;
bubbleIframeMouseMove(etherpadIFrame);
setTimeout(function() {
// the iframes inside of the etherpad are
// not yet loaded when the etherpad iframe is loaded
var outer = etherpadIFrame.
contentDocument.getElementsByName("ace_outer")[0];
bubbleIframeMouseMove(outer);
var inner = outer.
contentDocument.getElementsByName("ace_inner")[0];
bubbleIframeMouseMove(inner);
}, 2000);
});
}
const options = $.param({
showControns: true,
showChat: false,
showLineNumbers: true,
useMonospaceFont: false
});
function bubbleIframeMouseMove(iframe){
var existingOnMouseMove = iframe.contentWindow.onmousemove;
@ -71,8 +27,8 @@ function bubbleIframeMouseMove(iframe){
e.detail,
e.screenX,
e.screenY,
e.clientX + boundingClientRect.left,
e.clientY + boundingClientRect.top,
e.clientX + boundingClientRect.left,
e.clientY + boundingClientRect.top,
e.ctrlKey,
e.altKey,
e.shiftKey,
@ -84,48 +40,121 @@ function bubbleIframeMouseMove(iframe){
};
}
const DEFAULT_WIDTH = 640;
const DEFAULT_HEIGHT = 480;
var Etherpad = {
/**
* Initializes the etherpad.
*/
init: function (name) {
const EtherpadContainerType = "etherpad";
if (config.etherpad_base && !etherpadName && name) {
class Etherpad extends LargeContainer {
constructor (domain, name) {
super();
domain = config.etherpad_base;
const iframe = document.createElement('iframe');
etherpadName = name;
iframe.src = domain + name + '?' + options;
iframe.frameBorder = 0;
iframe.scrolling = "no";
iframe.width = DEFAULT_WIDTH;
iframe.height = DEFAULT_HEIGHT;
iframe.setAttribute('style', 'visibility: hidden;');
enableEtherpadButton();
this.container.appendChild(iframe);
/**
* Resizes the etherpad, when the window is resized.
*/
$(window).resize(function () {
resize();
});
}
},
iframe.onload = function() {
document.domain = document.domain;
bubbleIframeMouseMove(iframe);
/**
* Opens/hides the Etherpad.
*/
toggleEtherpad: function (isPresentation) {
if (!etherpadIFrame)
createIFrame();
setTimeout(function() {
const doc = iframe.contentDocument;
// the iframes inside of the etherpad are
// not yet loaded when the etherpad iframe is loaded
const outer = doc.getElementsByName("ace_outer")[0];
bubbleIframeMouseMove(outer);
if(VideoLayout.getLargeVideoState() === "etherpad")
{
VideoLayout.setLargeVideoState("video");
}
else
{
VideoLayout.setLargeVideoState("etherpad");
}
resize();
const inner = doc.getElementsByName("ace_inner")[0];
bubbleIframeMouseMove(inner);
}, 2000);
};
this.iframe = iframe;
}
};
module.exports = Etherpad;
get isOpen () {
return !!this.iframe;
}
get container () {
return document.getElementById('etherpad');
}
resize (containerWidth, containerHeight, animate) {
let height = containerHeight - BottomToolbar.getFilmStripHeight();
let width = containerWidth;
$(this.iframe).width(width).height(height);
}
show () {
const $iframe = $(this.iframe);
const $container = $(this.container);
return new Promise(resolve => {
$iframe.fadeIn(300, function () {
document.body.style.background = '#eeeeee';
$iframe.css({visibility: 'visible'});
$container.css({zIndex: 2});
resolve();
});
});
}
hide () {
const $iframe = $(this.iframe);
const $container = $(this.container);
return new Promise(resolve => {
$iframe.fadeOut(300, function () {
$iframe.css({visibility: 'hidden'});
$container.css({zIndex: 0});
resolve();
});
});
}
}
export default class EtherpadManager {
constructor (domain, name) {
if (!domain || !name) {
throw new Error("missing domain or name");
}
this.domain = domain;
this.name = name;
this.etherpad = null;
}
get isOpen () {
return !!this.etherpad;
}
openEtherpad () {
this.etherpad = new Etherpad(this.domain, this.name);
VideoLayout.addLargeVideoContainer(
EtherpadContainerType,
this.etherpad
);
}
toggleEtherpad () {
if (!this.isOpen) {
this.openEtherpad();
}
let isVisible = VideoLayout.isLargeContainerTypeVisible(
EtherpadContainerType
);
VideoLayout.showLargeVideoContainer(EtherpadContainerType, !isVisible);
}
}

View File

@ -1,268 +1,22 @@
/* global $, APP */
/* jshint -W101 */
import UIUtil from "../util/UIUtil";
import VideoLayout from "../videolayout/VideoLayout";
import LargeContainer from '../videolayout/LargeContainer';
import PreziPlayer from './PreziPlayer';
import UIUtil from '../util/UIUtil';
import UIEvents from '../../../service/UI/UIEvents';
import messageHandler from '../util/MessageHandler';
import ToolbarToggler from "../toolbars/ToolbarToggler";
import SidePanelToggler from "../side_pannels/SidePanelToggler";
import BottomToolbar from '../toolbars/BottomToolbar';
var messageHandler = require("../util/MessageHandler");
var PreziPlayer = require("./PreziPlayer");
const defaultPreziLink = "http://prezi.com/wz7vhjycl7e6/my-prezi";
const alphanumRegex = /^[a-z0-9-_\/&\?=;]+$/i;
const aspectRatio = 16.0 / 9.0;
var preziPlayer = null;
/**
* Shows/hides a presentation.
*/
function setPresentationVisible(visible) {
if (visible) {
VideoLayout.setLargeVideoState("prezi");
}
else {
VideoLayout.setLargeVideoState("video");
}
}
var Prezi = {
/**
* Reloads the current presentation.
*/
reloadPresentation: function() {
var iframe = document.getElementById(preziPlayer.options.preziId);
iframe.src = iframe.src;
},
/**
* Returns <tt>true</tt> if the presentation is visible, <tt>false</tt> -
* otherwise.
*/
isPresentationVisible: function () {
return ($('#presentation>iframe') != null
&& $('#presentation>iframe').css('opacity') == 1);
},
/**
* Opens the Prezi dialog, from which the user could choose a presentation
* to load.
*/
openPreziDialog: function() {
var myprezi = APP.xmpp.getPrezi();
if (myprezi) {
messageHandler.openTwoButtonDialog("dialog.removePreziTitle",
null,
"dialog.removePreziMsg",
null,
false,
"dialog.Remove",
function(e,v,m,f) {
if(v) {
APP.xmpp.removePreziFromPresence();
}
}
);
}
else if (preziPlayer != null) {
messageHandler.openTwoButtonDialog("dialog.sharePreziTitle",
null, "dialog.sharePreziMsg",
null,
false,
"dialog.Ok",
function(e,v,m,f) {
$.prompt.close();
}
);
}
else {
var html = APP.translation.generateTranslationHTML(
"dialog.sharePreziTitle");
var cancelButton = APP.translation.generateTranslationHTML(
"dialog.Cancel");
var shareButton = APP.translation.generateTranslationHTML(
"dialog.Share");
var backButton = APP.translation.generateTranslationHTML(
"dialog.Back");
var buttons = [];
var buttons1 = [];
// Cancel button to both states
buttons.push({title: cancelButton, value: false});
buttons1.push({title: cancelButton, value: false});
// Share button
buttons.push({title: shareButton, value: true});
// Back button
buttons1.push({title: backButton, value: true});
var linkError = APP.translation.generateTranslationHTML(
"dialog.preziLinkError");
var defaultUrl = APP.translation.translateString("defaultPreziLink",
{url: "http://prezi.com/wz7vhjycl7e6/my-prezi"});
var openPreziState = {
state0: {
html: '<h2>' + html + '</h2>' +
'<input name="preziUrl" type="text" ' +
'data-i18n="[placeholder]defaultPreziLink" data-i18n-options=\'' +
JSON.stringify({"url": "http://prezi.com/wz7vhjycl7e6/my-prezi"}) +
'\' placeholder="' + defaultUrl + '" autofocus>',
persistent: false,
buttons: buttons,
focus: ':input:first',
defaultButton: 0,
submit: function (e, v, m, f) {
e.preventDefault();
if(v)
{
var preziUrl = f.preziUrl;
if (preziUrl)
{
var urlValue
= encodeURI(UIUtil.escapeHtml(preziUrl));
if (urlValue.indexOf('http://prezi.com/') != 0
&& urlValue.indexOf('https://prezi.com/') != 0)
{
$.prompt.goToState('state1');
return false;
}
else {
var presIdTmp = urlValue.substring(
urlValue.indexOf("prezi.com/") + 10);
if (!isAlphanumeric(presIdTmp)
|| presIdTmp.indexOf('/') < 2) {
$.prompt.goToState('state1');
return false;
}
else {
APP.xmpp.addToPresence("prezi", urlValue);
$.prompt.close();
}
}
}
}
else
$.prompt.close();
}
},
state1: {
html: '<h2>' + html + '</h2>' +
linkError,
persistent: false,
buttons: buttons1,
focus: ':input:first',
defaultButton: 1,
submit: function (e, v, m, f) {
e.preventDefault();
if (v === 0)
$.prompt.close();
else
$.prompt.goToState('state0');
}
}
};
messageHandler.openDialogWithStates(openPreziState);
}
}
};
/**
* A new presentation has been added.
*
* @param event the event indicating the add of a presentation
* @param jid the jid from which the presentation was added
* @param presUrl url of the presentation
* @param currentSlide the current slide to which we should move
*/
function presentationAdded(event, jid, presUrl, currentSlide) {
console.log("presentation added", presUrl);
var presId = getPresentationId(presUrl);
var elementId = 'participant_'
+ Strophe.getResourceFromJid(jid)
+ '_' + presId;
VideoLayout.addPreziContainer(elementId);
var controlsEnabled = false;
if (jid === APP.xmpp.myJid())
controlsEnabled = true;
setPresentationVisible(true);
VideoLayout.setLargeVideoHover(
function (event) {
if (Prezi.isPresentationVisible()) {
var reloadButtonRight = window.innerWidth
- $('#presentation>iframe').offset().left
- $('#presentation>iframe').width();
$('#reloadPresentation').css({ right: reloadButtonRight,
display:'inline-block'});
}
},
function (event) {
if (!Prezi.isPresentationVisible())
$('#reloadPresentation').css({display:'none'});
else {
var e = event.toElement || event.relatedTarget;
if (e && e.id != 'reloadPresentation' && e.id != 'header')
$('#reloadPresentation').css({display:'none'});
}
});
preziPlayer = new PreziPlayer(
'presentation',
{preziId: presId,
width: getPresentationWidth(),
height: getPresentationHeihgt(),
controls: controlsEnabled,
debug: true
});
$('#presentation>iframe').attr('id', preziPlayer.options.preziId);
preziPlayer.on(PreziPlayer.EVENT_STATUS, function(event) {
console.log("prezi status", event.value);
if (event.value == PreziPlayer.STATUS_CONTENT_READY) {
if (jid != APP.xmpp.myJid())
preziPlayer.flyToStep(currentSlide);
}
});
preziPlayer.on(PreziPlayer.EVENT_CURRENT_STEP, function(event) {
console.log("event value", event.value);
APP.xmpp.addToPresence("preziSlide", event.value);
});
$("#" + elementId).css( 'background-image',
'url(../images/avatarprezi.png)');
$("#" + elementId).click(
function () {
setPresentationVisible(true);
}
);
};
/**
* A presentation has been removed.
*
* @param event the event indicating the remove of a presentation
* @param jid the jid for which the presentation was removed
* @param the url of the presentation
*/
function presentationRemoved(event, jid, presUrl) {
console.log('presentation removed', presUrl);
var presId = getPresentationId(presUrl);
setPresentationVisible(false);
$('#participant_'
+ Strophe.getResourceFromJid(jid)
+ '_' + presId).remove();
$('#presentation>iframe').remove();
if (preziPlayer != null) {
preziPlayer.destroy();
preziPlayer = null;
}
};
const DEFAULT_WIDTH = 640;
const DEFAULT_HEIGHT = 480;
/**
* Indicates if the given string is an alphanumeric string.
@ -270,76 +24,354 @@ function presentationRemoved(event, jid, presUrl) {
* purpose of checking URIs.
*/
function isAlphanumeric(unsafeText) {
var regex = /^[a-z0-9-_\/&\?=;]+$/i;
return regex.test(unsafeText);
return alphanumRegex.test(unsafeText);
}
/**
* Returns the presentation id from the given url.
*/
function getPresentationId (presUrl) {
var presIdTmp = presUrl.substring(presUrl.indexOf("prezi.com/") + 10);
return presIdTmp.substring(0, presIdTmp.indexOf('/'));
function getPresentationId (url) {
let presId = url.substring(url.indexOf("prezi.com/") + 10);
return presId.substring(0, presId.indexOf('/'));
}
/**
* Returns the presentation width.
*/
function getPresentationWidth() {
var availableWidth = UIUtil.getAvailableVideoWidth();
var availableHeight = getPresentationHeihgt();
var aspectRatio = 16.0 / 9.0;
if (availableHeight < availableWidth / aspectRatio) {
availableWidth = Math.floor(availableHeight * aspectRatio);
function isPreziLink(url) {
if (url.indexOf('http://prezi.com/') !== 0 && url.indexOf('https://prezi.com/') !== 0) {
return false;
}
return availableWidth;
}
/**
* Returns the presentation height.
*/
function getPresentationHeihgt() {
var remoteVideos = $('#remoteVideos');
return window.innerHeight - remoteVideos.outerHeight();
}
/**
* Resizes the presentation iframe.
*/
function resize() {
if ($('#presentation>iframe')) {
$('#presentation>iframe').width(getPresentationWidth());
$('#presentation>iframe').height(getPresentationHeihgt());
let presId = url.substring(url.indexOf("prezi.com/") + 10);
if (!isAlphanumeric(presId) || presId.indexOf('/') < 2) {
return false;
}
return true;
}
/**
* Presentation has been removed.
*/
$(document).bind('presentationremoved.muc', presentationRemoved);
function notifyOtherIsSharingPrezi() {
messageHandler.openMessageDialog(
"dialog.sharePreziTitle",
"dialog.sharePreziMsg"
);
}
/**
* Presentation has been added.
*/
$(document).bind('presentationadded.muc', presentationAdded);
function proposeToClosePrezi() {
return new Promise(function (resolve, reject) {
messageHandler.openTwoButtonDialog(
"dialog.removePreziTitle",
null,
"dialog.removePreziMsg",
null,
false,
"dialog.Remove",
function(e,v,m,f) {
if (v) {
resolve();
} else {
reject();
}
}
);
/*
* Indicates presentation slide change.
*/
$(document).bind('gotoslide.muc', function (event, jid, presUrl, current) {
if (preziPlayer && preziPlayer.getCurrentStep() != current) {
preziPlayer.flyToStep(current);
});
}
var animationStepsArray = preziPlayer.getAnimationCountOnSteps();
for (var i = 0; i < parseInt(animationStepsArray[current]); i++) {
preziPlayer.flyToStep(current, i);
function requestPreziLink() {
const title = APP.translation.generateTranslationHTML("dialog.sharePreziTitle");
const cancelButton = APP.translation.generateTranslationHTML("dialog.Cancel");
const shareButton = APP.translation.generateTranslationHTML("dialog.Share");
const backButton = APP.translation.generateTranslationHTML("dialog.Back");
const linkError = APP.translation.generateTranslationHTML("dialog.preziLinkError");
const i18nOptions = {url: defaultPreziLink};
const defaultUrl = APP.translation.translateString(
"defaultPreziLink", i18nOptions
);
return new Promise(function (resolve, reject) {
let dialog = messageHandler.openDialogWithStates({
state0: {
html: `
<h2>${title}</h2>
<input name="preziUrl" type="text"
data-i18n="[placeholder]defaultPreziLink"
data-i18n-options="${JSON.stringify(i18nOptions)}"
placeholder="${defaultUrl}" autofocus>`,
persistent: false,
buttons: [
{title: cancelButton, value: false},
{title: shareButton, value: true}
],
focus: ':input:first',
defaultButton: 1,
submit: function (e, v, m, f) {
e.preventDefault();
if (!v) {
reject('cancelled');
dialog.close();
return;
}
let preziUrl = f.preziUrl;
if (!preziUrl) {
return;
}
let urlValue = encodeURI(UIUtil.escapeHtml(preziUrl));
if (!isPreziLink(urlValue)) {
dialog.goToState('state1');
return false;
}
resolve(urlValue);
dialog.close();
}
},
state1: {
html: `<h2>${title}</h2> ${linkError}`,
persistent: false,
buttons: [
{title: cancelButton, value: false},
{title: backButton, value: true}
],
focus: ':input:first',
defaultButton: 1,
submit: function (e, v, m, f) {
e.preventDefault();
if (v === 0) {
reject();
dialog.close();
} else {
dialog.goToState('state0');
}
}
}
});
});
}
export const PreziContainerType = "prezi";
class PreziContainer extends LargeContainer {
constructor ({preziId, isMy, slide, onSlideChanged}) {
super();
this.reloadBtn = $('#reloadPresentation');
let preziPlayer = new PreziPlayer(
'presentation', {
preziId,
width: DEFAULT_WIDTH,
height: DEFAULT_HEIGHT,
controls: isMy,
debug: true
}
);
this.preziPlayer = preziPlayer;
this.$iframe = $(preziPlayer.iframe);
this.$iframe.attr('id', preziId);
preziPlayer.on(PreziPlayer.EVENT_STATUS, function({value}) {
console.log("prezi status", value);
if (value == PreziPlayer.STATUS_CONTENT_READY && !isMy) {
preziPlayer.flyToStep(slide);
}
});
preziPlayer.on(PreziPlayer.EVENT_CURRENT_STEP, function({value}) {
console.log("event value", value);
onSlideChanged(value);
});
}
goToSlide (slide) {
if (this.preziPlayer.getCurrentStep() === slide) {
return;
}
this.preziPlayer.flyToStep(slide);
let animationStepsArray = this.preziPlayer.getAnimationCountOnSteps();
if (!animationStepsArray) {
return;
}
for (var i = 0; i < parseInt(animationStepsArray[slide]); i += 1) {
this.preziPlayer.flyToStep(slide, i);
}
}
});
$(window).resize(function () {
resize();
});
showReloadBtn (show) {
this.reloadBtn.css('display', show ? 'inline-block' : 'none');
}
module.exports = Prezi;
show () {
return new Promise(resolve => {
this.$iframe.fadeIn(300, () => {
this.$iframe.css({opacity: 1});
ToolbarToggler.dockToolbar(true);
resolve();
});
});
}
hide () {
return new Promise(resolve => {
this.$iframe.fadeOut(300, () => {
this.$iframe.css({opacity: 0});
this.showReloadBtn(false);
ToolbarToggler.dockToolbar(false);
resolve();
});
});
}
onHoverIn () {
let rightOffset = window.innerWidth - this.$iframe.offset().left - this.$iframe.width();
this.showReloadBtn(true);
this.reloadBtn.css('right', rightOffset);
}
onHoverOut (event) {
let e = event.toElement || event.relatedTarget;
if (e && e.id != 'reloadPresentation' && e.id != 'header') {
this.showReloadBtn(false);
}
}
resize (containerWidth, containerHeight) {
let height = containerHeight - BottomToolbar.getFilmStripHeight();
let width = containerWidth;
if (height < width / aspectRatio) {
width = Math.floor(height * aspectRatio);
}
this.$iframe.width(width).height(height);
}
close () {
this.showReloadBtn(false);
this.preziPlayer.destroy();
this.$iframe.remove();
}
}
export default class PreziManager {
constructor (emitter) {
this.emitter = emitter;
this.userId = null;
this.url = null;
this.prezi = null;
$("#reloadPresentationLink").click(this.reloadPresentation.bind(this));
}
get isPresenting () {
return !!this.userId;
}
get isMyPrezi () {
return this.userId === APP.conference.localId;
}
isSharing (id) {
return this.userId === id;
}
handlePreziButtonClicked () {
if (!this.isPresenting) {
requestPreziLink().then(
url => this.emitter.emit(UIEvents.SHARE_PREZI, url, 0),
err => console.error('PREZI CANCELED', err)
);
return;
}
if (this.isMyPrezi) {
proposeToClosePrezi().then(() => this.emitter.emit(UIEvents.STOP_SHARING_PREZI));
} else {
notifyOtherIsSharingPrezi();
}
}
reloadPresentation () {
if (!this.prezi) {
return;
}
let iframe = this.prezi.$iframe[0];
iframe.src = iframe.src;
}
showPrezi (id, url, slide) {
if (!this.isPresenting) {
this.createPrezi(id, url, slide);
}
if (this.userId === id && this.url === url) {
this.prezi.goToSlide(slide);
} else {
console.error(this.userId, id);
console.error(this.url, url);
throw new Error("unexpected presentation change");
}
}
createPrezi (id, url, slide) {
console.log("presentation added", url);
this.userId = id;
this.url = url;
let preziId = getPresentationId(url);
let elementId = `participant_${id}_${preziId}`;
this.$thumb = $(VideoLayout.addRemoteVideoContainer(elementId));
VideoLayout.resizeThumbnails();
this.$thumb.css({
'background-image': 'url(../images/avatarprezi.png)'
}).click(() => VideoLayout.showLargeVideoContainer(PreziContainerType, true));
this.prezi = new PreziContainer({
preziId,
isMy: this.isMyPrezi,
slide,
onSlideChanged: newSlide => {
if (this.isMyPrezi) {
this.emitter.emit(UIEvents.SHARE_PREZI, url, newSlide);
}
}
});
VideoLayout.addLargeVideoContainer(PreziContainerType, this.prezi);
VideoLayout.showLargeVideoContainer(PreziContainerType, true);
}
removePrezi (id) {
if (this.userId !== id) {
throw new Error(`cannot close presentation from ${this.userId} instead of ${id}`);
}
this.$thumb.remove();
this.$thumb = null;
// wait until Prezi is hidden, then remove it
VideoLayout.showLargeVideoContainer(PreziContainerType, false).then(() => {
console.log("presentation removed", this.url);
VideoLayout.removeLargeVideoContainer(PreziContainerType);
this.userId = null;
this.url = null;
this.prezi.close();
this.prezi = null;
});
}
}

View File

@ -1,298 +1,290 @@
/* global PreziPlayer */
/* jshint -W101 */
(function() {
"use strict";
var __bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; };
window.PreziPlayer = (function() {
var __bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; };
PreziPlayer.API_VERSION = 1;
PreziPlayer.CURRENT_STEP = 'currentStep';
PreziPlayer.CURRENT_ANIMATION_STEP = 'currentAnimationStep';
PreziPlayer.CURRENT_OBJECT = 'currentObject';
PreziPlayer.STATUS_LOADING = 'loading';
PreziPlayer.STATUS_READY = 'ready';
PreziPlayer.STATUS_CONTENT_READY = 'contentready';
PreziPlayer.EVENT_CURRENT_STEP = "currentStepChange";
PreziPlayer.EVENT_CURRENT_ANIMATION_STEP = "currentAnimationStepChange";
PreziPlayer.EVENT_CURRENT_OBJECT = "currentObjectChange";
PreziPlayer.EVENT_STATUS = "statusChange";
PreziPlayer.EVENT_PLAYING = "isAutoPlayingChange";
PreziPlayer.EVENT_IS_MOVING = "isMovingChange";
PreziPlayer.domain = "https://prezi.com";
PreziPlayer.path = "/player/";
PreziPlayer.players = {};
PreziPlayer.binded_methods = ['changesHandler'];
PreziPlayer.API_VERSION = 1;
PreziPlayer.CURRENT_STEP = 'currentStep';
PreziPlayer.CURRENT_ANIMATION_STEP = 'currentAnimationStep';
PreziPlayer.CURRENT_OBJECT = 'currentObject';
PreziPlayer.STATUS_LOADING = 'loading';
PreziPlayer.STATUS_READY = 'ready';
PreziPlayer.STATUS_CONTENT_READY = 'contentready';
PreziPlayer.EVENT_CURRENT_STEP = "currentStepChange";
PreziPlayer.EVENT_CURRENT_ANIMATION_STEP = "currentAnimationStepChange";
PreziPlayer.EVENT_CURRENT_OBJECT = "currentObjectChange";
PreziPlayer.EVENT_STATUS = "statusChange";
PreziPlayer.EVENT_PLAYING = "isAutoPlayingChange";
PreziPlayer.EVENT_IS_MOVING = "isMovingChange";
PreziPlayer.domain = "https://prezi.com";
PreziPlayer.path = "/player/";
PreziPlayer.players = {};
PreziPlayer.binded_methods = ['changesHandler'];
PreziPlayer.createMultiplePlayers = function(optionArray){
for(var i=0; i<optionArray.length; i++) {
var optionSet = optionArray[i];
new PreziPlayer(optionSet.id, optionSet);
PreziPlayer.createMultiplePlayers = function(optionArray){
for(var i=0; i<optionArray.length; i++) {
var optionSet = optionArray[i];
new PreziPlayer(optionSet.id, optionSet);
}
};
PreziPlayer.messageReceived = function(event){
var message, item, player;
try {
message = JSON.parse(event.data);
if (message.id && (player = PreziPlayer.players[message.id])) {
if (player.options.debug === true) {
if (console && console.log)
console.log('received', message);
}
};
PreziPlayer.messageReceived = function(event){
var message, item, player;
try {
message = JSON.parse(event.data);
if (message.id && (player = PreziPlayer.players[message.id])) {
if (player.options.debug === true) {
if (console && console.log)
console.log('received', message);
}
if (message.type === "changes") {
player.changesHandler(message);
}
for (var i = 0; i < player.callbacks.length; i++) {
item = player.callbacks[i];
if (item && message.type === item.event) {
item.callback(message);
}
}
if (message.type === "changes") {
player.changesHandler(message);
}
for (var i = 0; i < player.callbacks.length; i++) {
item = player.callbacks[i];
if (item && message.type === item.event) {
item.callback(message);
}
} catch (e) { }
};
}
}
} catch (e) { }
};
/*jshint -W004 */
function PreziPlayer(id, options) {
/*jshint +W004 */
var params, paramString = "", _this = this;
if (PreziPlayer.players[id]){
PreziPlayer.players[id].destroy();
}
for(var i=0; i<PreziPlayer.binded_methods.length; i++) {
var method_name = PreziPlayer.binded_methods[i];
_this[method_name] = __bind(_this[method_name], _this);
}
options = options || {};
this.options = options;
this.values = {'status': PreziPlayer.STATUS_LOADING};
this.values[PreziPlayer.CURRENT_STEP] = 0;
this.values[PreziPlayer.CURRENT_ANIMATION_STEP] = 0;
this.values[PreziPlayer.CURRENT_OBJECT] = null;
this.callbacks = [];
this.id = id;
this.embedTo = document.getElementById(id);
if (!this.embedTo) {
throw "The element id is not available.";
}
this.iframe = document.createElement('iframe');
params = [
{ name: 'oid', value: options.preziId },
{ name: 'explorable', value: options.explorable ? 1 : 0 },
{ name: 'controls', value: options.controls ? 1 : 0 }
];
for (i=0; i<params.length; i++) {
var param = params[i];
paramString += (i===0 ? "?" : "&") + param.name + "=" + param.value;
}
this.iframe.src = PreziPlayer.domain + PreziPlayer.path + paramString;
this.iframe.frameBorder = 0;
this.iframe.scrolling = "no";
this.iframe.width = options.width || 640;
this.iframe.height = options.height || 480;
this.embedTo.innerHTML = '';
// JITSI: IN CASE SOMETHING GOES WRONG.
try {
this.embedTo.appendChild(this.iframe);
}
catch (err) {
console.log("CATCH ERROR");
}
function PreziPlayer(id, options) {
/*jshint +W004 */
var params, paramString = "", _this = this;
if (PreziPlayer.players[id]){
PreziPlayer.players[id].destroy();
}
for(var i=0; i<PreziPlayer.binded_methods.length; i++) {
var method_name = PreziPlayer.binded_methods[i];
_this[method_name] = __bind(_this[method_name], _this);
}
options = options || {};
this.options = options;
this.values = {'status': PreziPlayer.STATUS_LOADING};
this.values[PreziPlayer.CURRENT_STEP] = 0;
this.values[PreziPlayer.CURRENT_ANIMATION_STEP] = 0;
this.values[PreziPlayer.CURRENT_OBJECT] = null;
this.callbacks = [];
this.id = id;
this.embedTo = document.getElementById(id);
if (!this.embedTo) {
throw "The element id is not available.";
}
this.iframe = document.createElement('iframe');
params = [
{ name: 'oid', value: options.preziId },
{ name: 'explorable', value: options.explorable ? 1 : 0 },
{ name: 'controls', value: options.controls ? 1 : 0 }
];
for (i=0; i<params.length; i++) {
var param = params[i];
paramString += (i===0 ? "?" : "&") + param.name + "=" + param.value;
}
this.iframe.src = PreziPlayer.domain + PreziPlayer.path + paramString;
this.iframe.frameBorder = 0;
this.iframe.scrolling = "no";
this.iframe.width = options.width || 640;
this.iframe.height = options.height || 480;
this.embedTo.innerHTML = '';
// JITSI: IN CASE SOMETHING GOES WRONG.
try {
this.embedTo.appendChild(this.iframe);
}
catch (err) {
console.log("CATCH ERROR");
}
// JITSI: Increase interval from 200 to 500, which fixes prezi
// crashes for us.
this.initPollInterval = setInterval(function(){
_this.sendMessage({'action': 'init'});
}, 500);
PreziPlayer.players[id] = this;
}
// JITSI: Increase interval from 200 to 500, which fixes prezi
// crashes for us.
this.initPollInterval = setInterval(function(){
_this.sendMessage({'action': 'init'});
}, 500);
PreziPlayer.players[id] = this;
}
PreziPlayer.prototype.changesHandler = function(message) {
var key, value, j, item;
if (this.initPollInterval) {
clearInterval(this.initPollInterval);
this.initPollInterval = false;
}
for (key in message.data) {
if (message.data.hasOwnProperty(key)){
value = message.data[key];
this.values[key] = value;
for (j=0; j<this.callbacks.length; j++) {
item = this.callbacks[j];
if (item && item.event === key + "Change"){
item.callback({type: item.event, value: value});
}
}
}
}
};
PreziPlayer.prototype.destroy = function() {
if (this.initPollInterval) {
clearInterval(this.initPollInterval);
this.initPollInterval = false;
}
this.embedTo.innerHTML = '';
};
PreziPlayer.prototype.sendMessage = function(message) {
if (this.options.debug === true) {
if (console && console.log) console.log('sent', message);
}
message.version = PreziPlayer.API_VERSION;
message.id = this.id;
return this.iframe.contentWindow.postMessage(JSON.stringify(message), '*');
};
PreziPlayer.prototype.nextStep = /* nextStep is DEPRECATED */
PreziPlayer.prototype.flyToNextStep = function() {
return this.sendMessage({
'action': 'present',
'data': ['moveToNextStep']
});
};
PreziPlayer.prototype.previousStep = /* previousStep is DEPRECATED */
PreziPlayer.prototype.flyToPreviousStep = function() {
return this.sendMessage({
'action': 'present',
'data': ['moveToPrevStep']
});
};
PreziPlayer.prototype.toStep = /* toStep is DEPRECATED */
PreziPlayer.prototype.flyToStep = function(step, animation_step) {
var obj = this;
// check animation_step
if (animation_step > 0 &&
obj.values.animationCountOnSteps &&
obj.values.animationCountOnSteps[step] <= animation_step) {
animation_step = obj.values.animationCountOnSteps[step];
}
// jump to animation steps by calling flyToNextStep()
function doAnimationSteps() {
if (obj.values.isMoving) {
setTimeout(doAnimationSteps, 100); // wait until the flight ends
return;
}
while (animation_step-- > 0) {
obj.flyToNextStep(); // do the animation steps
}
}
setTimeout(doAnimationSteps, 200); // 200ms is the internal "reporting" time
// jump to the step
return this.sendMessage({
'action': 'present',
'data': ['moveToStep', step]
});
};
PreziPlayer.prototype.toObject = /* toObject is DEPRECATED */
PreziPlayer.prototype.flyToObject = function(objectId) {
return this.sendMessage({
'action': 'present',
'data': ['moveToObject', objectId]
});
};
PreziPlayer.prototype.play = function(defaultDelay) {
return this.sendMessage({
'action': 'present',
'data': ['startAutoPlay', defaultDelay]
});
};
PreziPlayer.prototype.stop = function() {
return this.sendMessage({
'action': 'present',
'data': ['stopAutoPlay']
});
};
PreziPlayer.prototype.pause = function(defaultDelay) {
return this.sendMessage({
'action': 'present',
'data': ['pauseAutoPlay', defaultDelay]
});
};
PreziPlayer.prototype.getCurrentStep = function() {
return this.values.currentStep;
};
PreziPlayer.prototype.getCurrentAnimationStep = function() {
return this.values.currentAnimationStep;
};
PreziPlayer.prototype.getCurrentObject = function() {
return this.values.currentObject;
};
PreziPlayer.prototype.getStatus = function() {
return this.values.status;
};
PreziPlayer.prototype.isPlaying = function() {
return this.values.isAutoPlaying;
};
PreziPlayer.prototype.getStepCount = function() {
return this.values.stepCount;
};
PreziPlayer.prototype.getAnimationCountOnSteps = function() {
return this.values.animationCountOnSteps;
};
PreziPlayer.prototype.getTitle = function() {
return this.values.title;
};
PreziPlayer.prototype.setDimensions = function(dims) {
for (var parameter in dims) {
this.iframe[parameter] = dims[parameter];
}
};
PreziPlayer.prototype.getDimensions = function() {
return {
width: parseInt(this.iframe.width, 10),
height: parseInt(this.iframe.height, 10)
};
};
PreziPlayer.prototype.on = function(event, callback) {
this.callbacks.push({
event: event,
callback: callback
});
};
PreziPlayer.prototype.off = function(event, callback) {
var j, item;
if (event === undefined) {
this.callbacks = [];
}
j = this.callbacks.length;
while (j--) {
PreziPlayer.prototype.changesHandler = function(message) {
var key, value, j, item;
if (this.initPollInterval) {
clearInterval(this.initPollInterval);
this.initPollInterval = false;
}
for (key in message.data) {
if (message.data.hasOwnProperty(key)){
value = message.data[key];
this.values[key] = value;
for (j=0; j<this.callbacks.length; j++) {
item = this.callbacks[j];
if (item && item.event === event && (callback === undefined || item.callback === callback)){
this.callbacks.splice(j, 1);
if (item && item.event === key + "Change"){
item.callback({type: item.event, value: value});
}
}
};
if (window.addEventListener) {
window.addEventListener('message', PreziPlayer.messageReceived, false);
} else {
window.attachEvent('onmessage', PreziPlayer.messageReceived);
}
}
};
return PreziPlayer;
PreziPlayer.prototype.destroy = function() {
if (this.initPollInterval) {
clearInterval(this.initPollInterval);
this.initPollInterval = false;
}
this.embedTo.innerHTML = '';
};
})();
PreziPlayer.prototype.sendMessage = function(message) {
if (this.options.debug === true) {
if (console && console.log) console.log('sent', message);
}
message.version = PreziPlayer.API_VERSION;
message.id = this.id;
return this.iframe.contentWindow.postMessage(JSON.stringify(message), '*');
};
})();
PreziPlayer.prototype.nextStep = /* nextStep is DEPRECATED */
PreziPlayer.prototype.flyToNextStep = function() {
return this.sendMessage({
'action': 'present',
'data': ['moveToNextStep']
});
};
module.exports = PreziPlayer;
PreziPlayer.prototype.previousStep = /* previousStep is DEPRECATED */
PreziPlayer.prototype.flyToPreviousStep = function() {
return this.sendMessage({
'action': 'present',
'data': ['moveToPrevStep']
});
};
PreziPlayer.prototype.toStep = /* toStep is DEPRECATED */
PreziPlayer.prototype.flyToStep = function(step, animation_step) {
var obj = this;
// check animation_step
if (animation_step > 0 &&
obj.values.animationCountOnSteps &&
obj.values.animationCountOnSteps[step] <= animation_step) {
animation_step = obj.values.animationCountOnSteps[step];
}
// jump to animation steps by calling flyToNextStep()
function doAnimationSteps() {
if (obj.values.isMoving) {
setTimeout(doAnimationSteps, 100); // wait until the flight ends
return;
}
while (animation_step-- > 0) {
obj.flyToNextStep(); // do the animation steps
}
}
setTimeout(doAnimationSteps, 200); // 200ms is the internal "reporting" time
// jump to the step
return this.sendMessage({
'action': 'present',
'data': ['moveToStep', step]
});
};
PreziPlayer.prototype.toObject = /* toObject is DEPRECATED */
PreziPlayer.prototype.flyToObject = function(objectId) {
return this.sendMessage({
'action': 'present',
'data': ['moveToObject', objectId]
});
};
PreziPlayer.prototype.play = function(defaultDelay) {
return this.sendMessage({
'action': 'present',
'data': ['startAutoPlay', defaultDelay]
});
};
PreziPlayer.prototype.stop = function() {
return this.sendMessage({
'action': 'present',
'data': ['stopAutoPlay']
});
};
PreziPlayer.prototype.pause = function(defaultDelay) {
return this.sendMessage({
'action': 'present',
'data': ['pauseAutoPlay', defaultDelay]
});
};
PreziPlayer.prototype.getCurrentStep = function() {
return this.values.currentStep;
};
PreziPlayer.prototype.getCurrentAnimationStep = function() {
return this.values.currentAnimationStep;
};
PreziPlayer.prototype.getCurrentObject = function() {
return this.values.currentObject;
};
PreziPlayer.prototype.getStatus = function() {
return this.values.status;
};
PreziPlayer.prototype.isPlaying = function() {
return this.values.isAutoPlaying;
};
PreziPlayer.prototype.getStepCount = function() {
return this.values.stepCount;
};
PreziPlayer.prototype.getAnimationCountOnSteps = function() {
return this.values.animationCountOnSteps;
};
PreziPlayer.prototype.getTitle = function() {
return this.values.title;
};
PreziPlayer.prototype.setDimensions = function(dims) {
for (var parameter in dims) {
this.iframe[parameter] = dims[parameter];
}
};
PreziPlayer.prototype.getDimensions = function() {
return {
width: parseInt(this.iframe.width, 10),
height: parseInt(this.iframe.height, 10)
};
};
PreziPlayer.prototype.on = function(event, callback) {
this.callbacks.push({
event: event,
callback: callback
});
};
PreziPlayer.prototype.off = function(event, callback) {
var j, item;
if (event === undefined) {
this.callbacks = [];
}
j = this.callbacks.length;
while (j--) {
item = this.callbacks[j];
if (item && item.event === event && (callback === undefined || item.callback === callback)){
this.callbacks.splice(j, 1);
}
}
};
if (window.addEventListener) {
window.addEventListener('message', PreziPlayer.messageReceived, false);
} else {
window.attachEvent('onmessage', PreziPlayer.messageReceived);
}
window.PreziPlayer = PreziPlayer;
export default PreziPlayer;

View File

@ -6,7 +6,6 @@ import SettingsMenu from "./settings/SettingsMenu";
import VideoLayout from "../videolayout/VideoLayout";
import ToolbarToggler from "../toolbars/ToolbarToggler";
import UIUtil from "../util/UIUtil";
import LargeVideo from "../videolayout/LargeVideo";
const buttons = {
'#chatspace': '#chatBottomButton',
@ -47,7 +46,7 @@ function toggle (object, selector, onOpenComplete, onOpen, onClose) {
} else {
// Undock the toolbar when the chat is shown and if we're in a
// video mode.
if (LargeVideo.isLargeVideoVisible()) {
if (VideoLayout.isLargeVideoVisible()) {
ToolbarToggler.dockToolbar(false);
}
@ -62,7 +61,7 @@ function toggle (object, selector, onOpenComplete, onOpen, onClose) {
}
$("#toast-container").animate({
right: (PanelToggler.getPanelSize()[0] + 5)
right: (UIUtil.getSidePanelSize()[0] + 5)
}, {
queue: false,
duration: 500
@ -116,7 +115,7 @@ var PanelToggler = {
},
resizeChat () {
let [width, height] = this.getPanelSize();
let [width, height] = UIUtil.getSidePanelSize();
Chat.resizeChat(width, height);
},
@ -156,21 +155,6 @@ var PanelToggler = {
null);
},
/**
* Returns the size of the side panel.
*/
getPanelSize () {
var availableHeight = window.innerHeight;
var availableWidth = window.innerWidth;
var panelWidth = 200;
if (availableWidth * 0.2 < 200) {
panelWidth = availableWidth * 0.2;
}
return [panelWidth, availableHeight];
},
isVisible () {
return (Chat.isVisible() ||
ContactList.isVisible() ||

View File

@ -9,15 +9,13 @@ const defaultBottomToolbarButtons = {
'filmstrip': '#bottom_toolbar_film_strip'
};
$(document).bind("remotevideo.resized", function (event, width, height) {
let toolbar = $('#bottomToolbar');
let bottom = (height - toolbar.outerHeight())/2 + 18;
toolbar.css({bottom});
});
const BottomToolbar = {
init (emitter) {
init () {
this.filmStrip = $('#remoteVideos');
this.toolbar = $('#bottomToolbar');
},
setupListeners (emitter) {
UIUtil.hideDisabledButtons(defaultBottomToolbarButtons);
const buttonHandlers = {
@ -41,7 +39,69 @@ const BottomToolbar = {
},
toggleFilmStrip () {
$("#remoteVideos").toggleClass("hidden");
this.filmStrip.toggleClass("hidden");
},
isFilmStripVisible () {
return !this.filmStrip.hasClass('hidden');
},
setupFilmStripOnly () {
this.filmStrip.css({
padding: "0px 0px 18px 0px",
right: 0
});
},
getFilmStripHeight () {
if (this.isFilmStripVisible()) {
return this.filmStrip.outerHeight();
} else {
return 0;
}
},
getFilmStripWidth () {
return this.filmStrip.width();
},
resizeThumbnails (thumbWidth, thumbHeight, animate = false) {
return new Promise(resolve => {
this.filmStrip.animate({
// adds 2 px because of small video 1px border
height: thumbHeight + 2
}, {
queue: false,
duration: animate ? 500 : 0
});
this.getThumbs().animate({
height: thumbHeight,
width: thumbWidth
}, {
queue: false,
duration: animate ? 500 : 0,
complete: resolve
});
if (!animate) {
resolve();
}
});
},
resizeToolbar (thumbWidth, thumbHeight) {
let bottom = (thumbHeight - this.toolbar.outerHeight())/2 + 18;
this.toolbar.css({bottom});
},
getThumbs (visible = false) {
let selector = 'span';
if (visible) {
selector += ':visible';
}
return this.filmStrip.children(selector);
}
};

View File

@ -136,7 +136,7 @@ const buttonHandlers = {
} else {
AnalyticsAdapter.sendEvent('toolbar.screen.enabled');
}
APP.desktopsharing.toggleScreenSharing();
emitter.emit(UIEvents.TOGGLE_SCREENSHARING);
},
"toolbar_button_fullScreen": function() {
AnalyticsAdapter.sendEvent('toolbar.fullscreen.enabled');
@ -182,17 +182,17 @@ const buttonHandlers = {
}
};
const defaultToolbarButtons = {
'microphone': '#toolbar_button_mute',
'camera': '#toolbar_button_camera',
'desktop': '#toolbar_button_desktopsharing',
'security': '#toolbar_button_security',
'invite': '#toolbar_button_link',
'chat': '#toolbar_button_chat',
'prezi': '#toolbar_button_prezi',
'etherpad': '#toolbar_button_etherpad',
'fullscreen': '#toolbar_button_fullScreen',
'settings': '#toolbar_button_settings',
'hangup': '#toolbar_button_hangup'
'microphone': '#toolbar_button_mute',
'camera': '#toolbar_button_camera',
'desktop': '#toolbar_button_desktopsharing',
'security': '#toolbar_button_security',
'invite': '#toolbar_button_link',
'chat': '#toolbar_button_chat',
'prezi': '#toolbar_button_prezi',
'etherpad': '#toolbar_button_etherpad',
'fullscreen': '#toolbar_button_fullScreen',
'settings': '#toolbar_button_settings',
'hangup': '#toolbar_button_hangup'
};
function dialpadButtonClicked() {
@ -208,7 +208,7 @@ function showSipNumberInput () {
messageHandler.openTwoButtonDialog(
null, null, null,
`<h2>${sipMsg}</h2>
<input name="sipNumber" type="text" value="${defaultNumber}" autofocus>`,
<input name="sipNumber" type="text" value="${defaultNumber}" autofocus>`,
false, "dialog.Dial",
function (e, v, m, f) {
if (v && f.sipNumber) {
@ -250,7 +250,7 @@ const Toolbar = {
* Disables and enables some of the buttons.
*/
setupButtonsFromConfig () {
if (UIUtil.isButtonEnabled('prezi')) {
if (!UIUtil.isButtonEnabled('prezi')) {
$("#toolbar_button_prezi").css({display: "none"});
}
},
@ -283,6 +283,12 @@ const Toolbar = {
}
},
showEtherpadButton () {
if (!$('#toolbar_button_etherpad').is(":visible")) {
$('#toolbar_button_etherpad').css({display: 'inline-block'});
}
},
// Shows or hides the 'recording' button.
showRecordingButton (show) {
if (UIUtil.isButtonEnabled('recording') && show) {
@ -303,9 +309,8 @@ const Toolbar = {
// checks whether desktop sharing is enabled and whether
// we have params to start automatically sharing
checkAutoEnableDesktopSharing () {
if (UIUtil.isButtonEnabled('desktop')
&& config.autoEnableDesktopSharing) {
APP.desktopsharing.toggleScreenSharing();
if (UIUtil.isButtonEnabled('desktop') && config.autoEnableDesktopSharing) {
emitter.emit(UIEvents.TOGGLE_SCREENSHARING);
}
},

View File

@ -1,6 +1,7 @@
/* global APP, config, $, interfaceConfig */
import UIUtil from '../util/UIUtil';
import BottomToolbar from './BottomToolbar';
let toolbarTimeoutObject;
let toolbarTimeout = interfaceConfig.INITIAL_TOOLBAR_TIMEOUT;
@ -47,7 +48,7 @@ function hideToolbar() {
} else {
header.hide("slide", { direction: "up", duration: 300});
$('#subject').animate({top: "-=40"}, 300);
if ($("#remoteVideos").hasClass("hidden")) {
if (!BottomToolbar.isFilmStripVisible()) {
bottomToolbar.hide(
"slide", {direction: "right", duration: 300}
);

View File

@ -1,19 +1,34 @@
/* global $, config, interfaceConfig */
import PanelToggler from "../side_pannels/SidePanelToggler";
/**
* Created by hristo on 12/22/14.
*/
var UIUtil = {
/**
* Returns the size of the side panel.
*/
getSidePanelSize () {
var availableHeight = window.innerHeight;
var availableWidth = window.innerWidth;
var panelWidth = 200;
if (availableWidth * 0.2 < 200) {
panelWidth = availableWidth * 0.2;
}
return [panelWidth, availableHeight];
},
/**
* Returns the available video width.
*/
getAvailableVideoWidth: function (isVisible) {
if(typeof isVisible === "undefined" || isVisible === null)
isVisible = PanelToggler.isVisible();
var rightPanelWidth
= isVisible ? PanelToggler.getPanelSize()[0] : 0;
getAvailableVideoWidth: function (isSidePanelVisible) {
let rightPanelWidth = 0;
if (isSidePanelVisible) {
rightPanelWidth = UIUtil.getSidePanelSize()[0];
}
return window.innerWidth - rightPanelWidth;
},
@ -118,6 +133,12 @@ import PanelToggler from "../side_pannels/SidePanelToggler";
redirect (url) {
window.location.href = url;
},
isFullScreen () {
return document.fullScreen
|| document.mozFullScreen
|| document.webkitIsFullScreen;
}
};

View File

@ -0,0 +1,24 @@
export default class LargeContainer {
/**
* @returns Promise
*/
show () {
}
/**
* @returns Promise
*/
hide () {
}
resize (containerWidth, containerHeight, animate) {
}
onHoverIn (e) {
}
onHoverOut (e) {
}
}

View File

@ -1,98 +1,21 @@
/* global $, APP, interfaceConfig */
/* jshint -W101 */
import Avatar from "../avatar/Avatar";
import ToolbarToggler from "../toolbars/ToolbarToggler";
import UIUtil from "../util/UIUtil";
import UIEvents from "../../../service/UI/UIEvents";
import LargeContainer from './LargeContainer';
import BottomToolbar from '../toolbars/BottomToolbar';
var RTCBrowserType = require("../../RTC/RTCBrowserType");
const RTCBrowserType = require("../../RTC/RTCBrowserType");
// FIXME: With Temasys we have to re-select everytime
//var video = $('#largeVideo');
const avatarSize = interfaceConfig.ACTIVE_SPEAKER_AVATAR_SIZE;
var currentVideoWidth = null;
var currentVideoHeight = null;
// By default we use camera
var getVideoSize = getCameraVideoSize;
var getVideoPosition = getCameraVideoPosition;
/**
* The small video instance that is displayed in the large video
* @type {SmallVideo}
*/
var currentSmallVideo = null;
/**
* Indicates whether the large video is enabled.
* @type {boolean}
*/
var isEnabled = true;
/**
* Current large video state.
* Possible values - video, prezi or etherpad.
* @type {string}
*/
var state = "video";
/**
* Returns the html element associated with the passed state of large video
* @param state the state.
* @returns {JQuery|*|jQuery|HTMLElement} the container.
*/
function getContainerByState(state) {
var selector = null;
switch (state) {
case "video":
selector = "#largeVideoWrapper";
break;
case "etherpad":
selector = "#etherpad>iframe";
break;
case "prezi":
selector = "#presentation>iframe";
break;
default:
return null;
}
return $(selector);
}
/**
* Sets the size and position of the given video element.
*
* @param video the video element to position
* @param width the desired video width
* @param height the desired video height
* @param horizontalIndent the left and right indent
* @param verticalIndent the top and bottom indent
*/
function positionVideo(video,
width,
height,
horizontalIndent,
verticalIndent,
animate) {
if (animate) {
video.animate({
width: width,
height: height,
top: verticalIndent,
bottom: verticalIndent,
left: horizontalIndent,
right: horizontalIndent
}, {
queue: false,
duration: 500
});
function getStreamId(stream) {
if (stream.isLocal()) {
return APP.conference.localId;
} else {
video.width(width);
video.height(height);
video.css({
top: verticalIndent,
bottom: verticalIndent,
left: horizontalIndent,
right: horizontalIndent
});
return stream.getParticipantId();
}
}
/**
@ -106,80 +29,25 @@ function getDesktopVideoSize(videoWidth,
videoHeight,
videoSpaceWidth,
videoSpaceHeight) {
if (!videoWidth)
videoWidth = currentVideoWidth;
if (!videoHeight)
videoHeight = currentVideoHeight;
var aspectRatio = videoWidth / videoHeight;
let aspectRatio = videoWidth / videoHeight;
var availableWidth = Math.max(videoWidth, videoSpaceWidth);
var availableHeight = Math.max(videoHeight, videoSpaceHeight);
let availableWidth = Math.max(videoWidth, videoSpaceWidth);
let availableHeight = Math.max(videoHeight, videoSpaceHeight);
var filmstrip = $("#remoteVideos");
videoSpaceHeight -= BottomToolbar.getFilmStripHeight();
if (!filmstrip.hasClass("hidden"))
videoSpaceHeight -= filmstrip.outerHeight();
if (availableWidth / aspectRatio >= videoSpaceHeight)
{
if (availableWidth / aspectRatio >= videoSpaceHeight) {
availableHeight = videoSpaceHeight;
availableWidth = availableHeight * aspectRatio;
}
if (availableHeight * aspectRatio >= videoSpaceWidth)
{
if (availableHeight * aspectRatio >= videoSpaceWidth) {
availableWidth = videoSpaceWidth;
availableHeight = availableWidth / aspectRatio;
}
return [availableWidth, availableHeight];
}
/**
* Returns an array of the video horizontal and vertical indents,
* so that if fits its parent.
*
* @return an array with 2 elements, the horizontal indent and the vertical
* indent
*/
function getCameraVideoPosition(videoWidth,
videoHeight,
videoSpaceWidth,
videoSpaceHeight) {
// Parent height isn't completely calculated when we position the video in
// full screen mode and this is why we use the screen height in this case.
// Need to think it further at some point and implement it properly.
var isFullScreen = document.fullScreen ||
document.mozFullScreen ||
document.webkitIsFullScreen;
if (isFullScreen)
videoSpaceHeight = window.innerHeight;
var horizontalIndent = (videoSpaceWidth - videoWidth) / 2;
var verticalIndent = (videoSpaceHeight - videoHeight) / 2;
return [horizontalIndent, verticalIndent];
}
/**
* Returns an array of the video horizontal and vertical indents.
* Centers horizontally and top aligns vertically.
*
* @return an array with 2 elements, the horizontal indent and the vertical
* indent
*/
function getDesktopVideoPosition(videoWidth,
videoHeight,
videoSpaceWidth,
videoSpaceHeight) {
var horizontalIndent = (videoSpaceWidth - videoWidth) / 2;
var verticalIndent = 0;// Top aligned
return [horizontalIndent, verticalIndent];
return { availableWidth, availableHeight };
}
@ -199,15 +67,10 @@ function getCameraVideoSize(videoWidth,
videoSpaceWidth,
videoSpaceHeight) {
if (!videoWidth)
videoWidth = currentVideoWidth;
if (!videoHeight)
videoHeight = currentVideoHeight;
let aspectRatio = videoWidth / videoHeight;
var aspectRatio = videoWidth / videoHeight;
var availableWidth = videoWidth;
var availableHeight = videoHeight;
let availableWidth = videoWidth;
let availableHeight = videoHeight;
if (interfaceConfig.VIDEO_LAYOUT_FIT == 'height') {
availableHeight = videoSpaceHeight;
@ -233,487 +96,347 @@ function getCameraVideoSize(videoWidth,
}
return [availableWidth, availableHeight];
return { availableWidth, availableHeight };
}
/**
* Updates the src of the active speaker avatar
* Returns an array of the video horizontal and vertical indents,
* so that if fits its parent.
*
* @return an array with 2 elements, the horizontal indent and the vertical
* indent
*/
function updateActiveSpeakerAvatarSrc() {
let avatar = $("#activeSpeakerAvatar");
let id = currentSmallVideo.id;
let url = Avatar.getActiveSpeakerUrl(id);
if (id && avatar.attr('src') !== url) {
avatar.attr('src', url);
currentSmallVideo.showAvatar();
function getCameraVideoPosition(videoWidth,
videoHeight,
videoSpaceWidth,
videoSpaceHeight) {
// Parent height isn't completely calculated when we position the video in
// full screen mode and this is why we use the screen height in this case.
// Need to think it further at some point and implement it properly.
if (UIUtil.isFullScreen()) {
videoSpaceHeight = window.innerHeight;
}
let horizontalIndent = (videoSpaceWidth - videoWidth) / 2;
let verticalIndent = (videoSpaceHeight - videoHeight) / 2;
return { horizontalIndent, verticalIndent };
}
/**
* Change the video source of the large video.
* @param isVisible
* Returns an array of the video horizontal and vertical indents.
* Centers horizontally and top aligns vertically.
*
* @return an array with 2 elements, the horizontal indent and the vertical
* indent
*/
function changeVideo(isVisible) {
function getDesktopVideoPosition(videoWidth,
videoHeight,
videoSpaceWidth,
videoSpaceHeight) {
if (!currentSmallVideo) {
console.error("Unable to change large video - no 'currentSmallVideo'");
return;
}
let horizontalIndent = (videoSpaceWidth - videoWidth) / 2;
updateActiveSpeakerAvatarSrc();
let largeVideoElement = $('#largeVideo');
let verticalIndent = 0;// Top aligned
currentSmallVideo.stream.attach(largeVideoElement);
let flipX = currentSmallVideo.flipX;
largeVideoElement.css({
transform: flipX ? "scaleX(-1)" : "none"
});
LargeVideo.updateVideoSizeAndPosition(currentSmallVideo.getVideoType());
// Only if the large video is currently visible.
if (isVisible) {
LargeVideo.VideoLayout.largeVideoUpdated(currentSmallVideo);
$('#largeVideoWrapper').fadeTo(300, 1);
}
return { horizontalIndent, verticalIndent };
}
/**
* Creates the html elements for the large video.
*/
function createLargeVideoHTML()
{
var html = '<div id="largeVideoContainer" class="videocontainer">';
html += '<div id="presentation"></div>' +
'<div id="etherpad"></div>' +
'<a target="_new"><div class="watermark leftwatermark"></div></a>' +
'<a target="_new"><div class="watermark rightwatermark"></div></a>' +
'<a class="poweredby" href="http://jitsi.org" target="_new" >' +
'<span data-i18n="poweredby"></span> jitsi.org' +
'</a>'+
'<div id="activeSpeaker">' +
'<img id="activeSpeakerAvatar" src=""/>' +
'<canvas id="activeSpeakerAudioLevel"></canvas>' +
'</div>' +
'<div id="largeVideoWrapper">' +
'<video id="largeVideo" muted="true"' +
'autoplay oncontextmenu="return false;"></video>' +
'</div id="largeVideoWrapper">' +
'<span id="videoConnectionMessage"></span>';
html += '</div>';
$(html).prependTo("#videospace");
export const VideoContainerType = "video";
if (interfaceConfig.SHOW_JITSI_WATERMARK) {
var leftWatermarkDiv
= $("#largeVideoContainer div[class='watermark leftwatermark']");
leftWatermarkDiv.css({display: 'block'});
leftWatermarkDiv.parent().get(0).href
= interfaceConfig.JITSI_WATERMARK_LINK;
class VideoContainer extends LargeContainer {
// FIXME: With Temasys we have to re-select everytime
get $video () {
return $('#largeVideo');
}
if (interfaceConfig.SHOW_BRAND_WATERMARK) {
var rightWatermarkDiv
= $("#largeVideoContainer div[class='watermark rightwatermark']");
rightWatermarkDiv.css({display: 'block'});
rightWatermarkDiv.parent().get(0).href
= interfaceConfig.BRAND_WATERMARK_LINK;
rightWatermarkDiv.get(0).style.backgroundImage
= "url(images/rightwatermark.png)";
get id () {
if (this.stream) {
return getStreamId(this.stream);
}
}
if (interfaceConfig.SHOW_POWERED_BY) {
$("#largeVideoContainer>a[class='poweredby']").css({display: 'block'});
constructor (onPlay) {
super();
this.stream = null;
this.videoType = null;
this.$avatar = $('#activeSpeaker');
this.$wrapper = $('#largeVideoWrapper');
if (!RTCBrowserType.isIExplorer()) {
this.$video.volume = 0;
}
this.$video.on('play', onPlay);
}
if (!RTCBrowserType.isIExplorer()) {
$('#largeVideo').volume = 0;
}
}
var LargeVideo = {
init: function (VideoLayout, emitter) {
if(!isEnabled)
return;
createLargeVideoHTML();
this.VideoLayout = VideoLayout;
this.eventEmitter = emitter;
this.eventEmitter.emit(UIEvents.LARGEVIDEO_INIT);
var self = this;
// Listen for large video size updates
var largeVideo = $('#largeVideo')[0];
var onplaying = function (arg1, arg2, arg3) {
// re-select
if (RTCBrowserType.isTemasysPluginUsed())
largeVideo = $('#largeVideo')[0];
currentVideoWidth = largeVideo.videoWidth;
currentVideoHeight = largeVideo.videoHeight;
self.position(currentVideoWidth, currentVideoHeight);
getStreamSize () {
let video = this.$video[0];
return {
width: video.videoWidth,
height: video.videoHeight
};
largeVideo.onplaying = onplaying;
},
/**
* Indicates if the large video is currently visible.
*
* @return <tt>true</tt> if visible, <tt>false</tt> - otherwise
*/
isLargeVideoVisible: function() {
return $('#largeVideoWrapper').is(':visible');
},
/**
* Returns <tt>true</tt> if the user is currently displayed on large video.
*/
isCurrentlyOnLarge: function (id) {
return id && id === this.getId();
},
/**
* Updates the large video with the given new video source.
*/
updateLargeVideo: function (id, forceUpdate) {
if(!isEnabled) {
return;
}
let newSmallVideo = this.VideoLayout.getSmallVideo(id);
console.info(`hover in ${id} , video: `, newSmallVideo);
}
if (!newSmallVideo) {
console.error("Small video not found for: " + id);
return;
}
if (!LargeVideo.isCurrentlyOnLarge(id) || forceUpdate) {
$('#activeSpeaker').css('visibility', 'hidden');
let oldId = this.getId();
currentSmallVideo = newSmallVideo;
if (oldId !== id) {
// we want the notification to trigger even if id is undefined,
// or null.
this.eventEmitter.emit(UIEvents.SELECTED_ENDPOINT, id);
}
// We are doing fadeOut/fadeIn animations on parent div which wraps
// largeVideo, because when Temasys plugin is in use it replaces
// <video> elements with plugin <object> tag. In Safari jQuery is
// unable to store values on this plugin object which breaks all
// animation effects performed on it directly.
//
// If for any reason large video was hidden before calling fadeOut
// changeVideo will never be called, so we call show() in chain just
// to be sure
$('#largeVideoWrapper').show().fadeTo(300, 0,
changeVideo.bind($('#largeVideo'), this.isLargeVideoVisible()));
getVideoSize (containerWidth, containerHeight) {
let { width, height } = this.getStreamSize();
if (this.stream && this.isScreenSharing()) {
return getDesktopVideoSize(width, height, containerWidth, containerHeight);
} else {
if (currentSmallVideo) {
currentSmallVideo.showAvatar();
}
return getCameraVideoSize(width, height, containerWidth, containerHeight);
}
}
},
/**
* Shows/hides the large video.
*/
setLargeVideoVisible: function(isVisible) {
if(!isEnabled)
return;
if (isVisible) {
$('#largeVideoWrapper').css({visibility: 'visible'});
$('.watermark').css({visibility: 'visible'});
if(currentSmallVideo)
currentSmallVideo.enableDominantSpeaker(true);
}
else {
$('#largeVideoWrapper').css({visibility: 'hidden'});
$('#activeSpeaker').css('visibility', 'hidden');
$('.watermark').css({visibility: 'hidden'});
if(currentSmallVideo)
currentSmallVideo.enableDominantSpeaker(false);
}
},
onVideoTypeChanged: function (id, newVideoType) {
if (!isEnabled)
return;
if (LargeVideo.isCurrentlyOnLarge(id)) {
LargeVideo.updateVideoSizeAndPosition(newVideoType);
this.position(null, null, null, null, true);
}
},
/**
* Positions the large video.
*
* @param videoWidth the stream video width
* @param videoHeight the stream video height
*/
position: function (videoWidth, videoHeight,
videoSpaceWidth, videoSpaceHeight, animate) {
if(!isEnabled)
return;
if(!videoSpaceWidth)
videoSpaceWidth = $('#videospace').width();
if(!videoSpaceHeight)
videoSpaceHeight = window.innerHeight;
var videoSize = getVideoSize(videoWidth,
videoHeight,
videoSpaceWidth,
videoSpaceHeight);
var largeVideoWidth = videoSize[0];
var largeVideoHeight = videoSize[1];
var videoPosition = getVideoPosition(largeVideoWidth,
largeVideoHeight,
videoSpaceWidth,
videoSpaceHeight);
var horizontalIndent = videoPosition[0];
var verticalIndent = videoPosition[1];
positionVideo($('#largeVideoWrapper'),
largeVideoWidth,
largeVideoHeight,
horizontalIndent, verticalIndent, animate);
},
/**
* Resizes the large html elements.
*
* @param animate boolean property that indicates whether the resize should
* be animated or not.
* @param isSideBarVisible boolean property that indicates whether the chat
* area is displayed or not.
* If that parameter is null the method will check the chat panel
* visibility.
* @param completeFunction a function to be called when the video space is
* resized
* @returns {*[]} array with the current width and height values of the
* largeVideo html element.
*/
resize: function (animate, isSideBarVisible, completeFunction) {
if(!isEnabled)
return;
var availableHeight = window.innerHeight;
var availableWidth = UIUtil.getAvailableVideoWidth(isSideBarVisible);
if (availableWidth < 0 || availableHeight < 0) return;
var avatarSize = interfaceConfig.ACTIVE_SPEAKER_AVATAR_SIZE;
var top = availableHeight / 2 - avatarSize / 4 * 3;
$('#activeSpeaker').css('top', top);
this.VideoLayout
.resizeVideoSpace(animate, isSideBarVisible, completeFunction);
if(animate) {
$('#largeVideoContainer').animate({
width: availableWidth,
height: availableHeight
},
{
queue: false,
duration: 500
});
getVideoPosition (width, height, containerWidth, containerHeight) {
if (this.stream && this.isScreenSharing()) {
return getDesktopVideoPosition(width, height, containerWidth, containerHeight);
} else {
$('#largeVideoContainer').width(availableWidth);
$('#largeVideoContainer').height(availableHeight);
return getCameraVideoPosition(width, height, containerWidth, containerHeight);
}
return [availableWidth, availableHeight];
},
/**
* Resizes the large video.
*
* @param isSideBarVisible indicating if the side bar is visible
* @param completeFunction the callback function to be executed after the
* resize
*/
resizeVideoAreaAnimated: function (isSideBarVisible, completeFunction) {
if(!isEnabled)
return;
var size = this.resize(true, isSideBarVisible, completeFunction);
this.position(null, null, size[0], size[1], true);
},
/**
* Updates the video size and position.
*
* @param videoType the video type indicating if the stream is of type
* desktop or web cam
*/
updateVideoSizeAndPosition: function (videoType) {
if (!videoType)
videoType = currentSmallVideo.getVideoType();
}
var isDesktop = videoType === 'desktop';
resize (containerWidth, containerHeight, animate = false) {
let { width, height } = this.getVideoSize(containerWidth, containerHeight);
let { horizontalIndent, verticalIndent } = this.getVideoPosition(width, height, containerWidth, containerHeight);
// Change the way we'll be measuring and positioning large video
getVideoSize = isDesktop ? getDesktopVideoSize : getCameraVideoSize;
getVideoPosition = isDesktop ? getDesktopVideoPosition :
getCameraVideoPosition;
},
getId: function () {
return currentSmallVideo ? currentSmallVideo.id : null;
},
updateAvatar: function (id) {
if (!isEnabled) {
return;
}
if (id === this.getId()) {
updateActiveSpeakerAvatarSrc();
}
},
showAvatar: function (id, show) {
if (!isEnabled) {
return;
}
if (this.getId() === id && state === "video") {
$("#largeVideoWrapper").css("visibility", show ? "hidden" : "visible");
$('#activeSpeaker').css("visibility", show ? "visible" : "hidden");
return true;
}
return false;
},
/**
* Disables the large video
*/
disable: function () {
isEnabled = false;
},
/**
* Enables the large video
*/
enable: function () {
isEnabled = true;
},
/**
* Returns true if the video is enabled.
*/
isEnabled: function () {
return isEnabled;
},
/**
* Creates the iframe used by the etherpad
* @param src the value for src attribute
* @param onloadHandler handler executed when the iframe loads it content
* @returns {HTMLElement} the iframe
*/
createEtherpadIframe: function (src, onloadHandler) {
if(!isEnabled)
return;
// update avatar position
let top = this.containerHeight / 2 - avatarSize / 4 * 3;
this.$avatar.css('top', top);
var etherpadIFrame = document.createElement('iframe');
etherpadIFrame.src = src;
etherpadIFrame.frameBorder = 0;
etherpadIFrame.scrolling = "no";
etherpadIFrame.width = $('#largeVideoContainer').width() || 640;
etherpadIFrame.height = $('#largeVideoContainer').height() || 480;
etherpadIFrame.setAttribute('style', 'visibility: hidden;');
this.$wrapper.animate({
width,
height,
document.getElementById('etherpad').appendChild(etherpadIFrame);
top: verticalIndent,
bottom: verticalIndent,
etherpadIFrame.onload = onloadHandler;
left: horizontalIndent,
right: horizontalIndent
}, {
queue: false,
duration: animate ? 500 : 0
});
}
return etherpadIFrame;
},
/**
* Changes the state of the large video.
* Possible values - video, prezi, etherpad.
* @param newState - the new state
*/
setState: function (newState) {
if(state === newState)
return;
var currentContainer = getContainerByState(state);
if(!currentContainer)
return;
setStream (stream, videoType) {
this.stream = stream;
this.videoType = videoType;
var self = this;
var oldState = state;
stream.attach(this.$video);
switch (newState)
{
case "etherpad":
$('#activeSpeaker').css('visibility', 'hidden');
currentContainer.fadeOut(300, function () {
if (oldState === "prezi") {
currentContainer.css({opacity: '0'});
$('#reloadPresentation').css({display: 'none'});
}
else {
self.setLargeVideoVisible(false);
}
});
let flipX = stream.isLocal() && !this.isScreenSharing();
this.$video.css({
transform: flipX ? 'scaleX(-1)' : 'none'
});
}
$('#etherpad>iframe').fadeIn(300, function () {
document.body.style.background = '#eeeeee';
$('#etherpad>iframe').css({visibility: 'visible'});
$('#etherpad').css({zIndex: 2});
});
break;
case "prezi":
var prezi = $('#presentation>iframe');
currentContainer.fadeOut(300, function() {
document.body.style.background = 'black';
});
prezi.fadeIn(300, function() {
prezi.css({opacity:'1'});
ToolbarToggler.dockToolbar(true);//fix that
self.setLargeVideoVisible(false);
$('#etherpad>iframe').css({visibility: 'hidden'});
$('#etherpad').css({zIndex: 0});
});
$('#activeSpeaker').css('visibility', 'hidden');
break;
isScreenSharing () {
return this.videoType === 'desktop';
}
case "video":
currentContainer.fadeOut(300, function () {
$('#presentation>iframe').css({opacity:'0'});
$('#reloadPresentation').css({display:'none'});
$('#etherpad>iframe').css({visibility: 'hidden'});
$('#etherpad').css({zIndex: 0});
document.body.style.background = 'black';
ToolbarToggler.dockToolbar(false);//fix that
});
$('#largeVideoWrapper').fadeIn(300, function () {
self.setLargeVideoVisible(true);
});
break;
showAvatar (show) {
this.$avatar.css("visibility", show ? "visible" : "hidden");
}
// We are doing fadeOut/fadeIn animations on parent div which wraps
// largeVideo, because when Temasys plugin is in use it replaces
// <video> elements with plugin <object> tag. In Safari jQuery is
// unable to store values on this plugin object which breaks all
// animation effects performed on it directly.
show () {
let $wrapper = this.$wrapper;
return new Promise(resolve => {
$wrapper.fadeIn(300, function () {
$wrapper.css({visibility: 'visible'});
$('.watermark').css({visibility: 'visible'});
});
resolve();
});
}
hide () {
this.showAvatar(false);
let $wrapper = this.$wrapper;
return new Promise(resolve => {
$wrapper.fadeOut(300, function () {
$wrapper.css({visibility: 'hidden'});
$('.watermark').css({visibility: 'hidden'});
resolve();
});
});
}
}
export default class LargeVideoManager {
constructor () {
this.containers = {};
this.state = VideoContainerType;
this.videoContainer = new VideoContainer(() => this.resizeContainer(VideoContainerType));
this.addContainer(VideoContainerType, this.videoContainer);
this.width = 0;
this.height = 0;
this.$container = $('#largeVideoContainer');
this.$container.css({
display: 'inline-block'
});
if (interfaceConfig.SHOW_JITSI_WATERMARK) {
let leftWatermarkDiv = this.$container.find("div.watermark.leftwatermark");
leftWatermarkDiv.css({display: 'block'});
leftWatermarkDiv.parent().attr('href', interfaceConfig.JITSI_WATERMARK_LINK);
}
state = newState;
if (interfaceConfig.SHOW_BRAND_WATERMARK) {
let rightWatermarkDiv = this.$container.find("div.watermark.rightwatermark");
},
/**
* Returns the current state of the large video.
* @returns {string} the current state - video, prezi or etherpad.
*/
getState: function () {
return state;
},
/**
* Sets hover handlers for the large video container div.
*
* @param inHandler
* @param outHandler
*/
setHover: function(inHandler, outHandler)
{
$('#largeVideoContainer').hover(inHandler, outHandler);
},
rightWatermarkDiv.css({
display: 'block',
backgroundImage: 'url(images/rightwatermark.png)'
});
rightWatermarkDiv.parent().attr('href', interfaceConfig.BRAND_WATERMARK_LINK);
}
if (interfaceConfig.SHOW_POWERED_BY) {
this.$container.children("a.poweredby").css({display: 'block'});
}
this.$container.hover(
e => this.onHoverIn(e),
e => this.onHoverOut(e)
);
}
onHoverIn (e) {
if (!this.state) {
return;
}
let container = this.getContainer(this.state);
container.onHoverIn(e);
}
onHoverOut (e) {
if (!this.state) {
return;
}
let container = this.getContainer(this.state);
container.onHoverOut(e);
}
get id () {
return this.videoContainer.id;
}
updateLargeVideo (stream, videoType) {
let id = getStreamId(stream);
let container = this.getContainer(this.state);
container.hide().then(() => {
console.info("hover in %s", id);
this.state = VideoContainerType;
this.videoContainer.setStream(stream, videoType);
this.videoContainer.show();
});
}
updateContainerSize (isSideBarVisible) {
this.width = UIUtil.getAvailableVideoWidth(isSideBarVisible);
this.height = window.innerHeight;
}
resizeContainer (type, animate = false) {
let container = this.getContainer(type);
container.resize(this.width, this.height, animate);
}
resize (animate) {
// resize all containers
Object.keys(this.containers).forEach(type => this.resizeContainer(type, animate));
this.$container.animate({
width: this.width,
height: this.height
}, {
queue: false,
duration: animate ? 500 : 0
});
}
/**
* Enables/disables the filter indicating a video problem to the user.
*
* @param enable <tt>true</tt> to enable, <tt>false</tt> to disable
*/
enableVideoProblemFilter: function (enable) {
$("#largeVideo").toggleClass("videoProblemFilter", enable);
enableVideoProblemFilter (enable) {
this.videoContainer.$video.toggleClass("videoProblemFilter", enable);
}
};
export default LargeVideo;
/**
* Updates the src of the active speaker avatar
*/
updateAvatar (thumbUrl) {
$("#activeSpeakerAvatar").attr('src', thumbUrl);
}
showAvatar (show) {
this.videoContainer.showAvatar(show);
}
addContainer (type, container) {
if (this.containers[type]) {
throw new Error(`container of type ${type} already exist`);
}
this.containers[type] = container;
this.resizeContainer(type);
}
getContainer (type) {
let container = this.containers[type];
if (!container) {
throw new Error(`container of type ${type} doesn't exist`);
}
return container;
}
removeContainer (type) {
if (!this.containers[type]) {
throw new Error(`container of type ${type} doesn't exist`);
}
delete this.containers[type];
}
showContainer (type) {
if (this.state === type) {
return Promise.resolve();
}
let container = this.getContainer(type);
if (this.state) {
let oldContainer = this.containers[this.state];
if (oldContainer) {
oldContainer.hide();
}
}
this.state = type;
return container.show();
}
}

View File

@ -34,7 +34,8 @@ RemoteVideo.prototype.addRemoteVideoContainer = function() {
if (APP.conference.isModerator) {
this.addRemoteVideoMenu();
}
AudioLevels.updateAudioLevelCanvas(this.id, this.VideoLayout);
let {thumbWidth, thumbHeight} = this.VideoLayout.calculateThumbnailSize();
AudioLevels.updateAudioLevelCanvas(this.id, thumbWidth, thumbHeight);
return this.container;
};
@ -296,6 +297,12 @@ RemoteVideo.prototype.showPeerContainer = function (state) {
};
RemoteVideo.prototype.updateResolution = function (resolution) {
if (this.connectionIndicator) {
this.connectionIndicator.updateResolution(resolution);
}
};
RemoteVideo.prototype.removeConnectionIndicator = function () {
if (this.connectionIndicator)
this.connectionIndicator.remove();

View File

@ -145,8 +145,8 @@ SmallVideo.prototype.bindHoverHandler = function () {
function () {
// If the video has been "pinned" by the user we want to
// keep the display name on place.
if (!LargeVideo.isLargeVideoVisible() ||
!LargeVideo.isCurrentlyOnLarge(self.id))
if (!self.VideoLayout.isLargeVideoVisible() ||
!self.VideoLayout.isCurrentlyOnLarge(self.id))
self.showDisplayName(false);
}
);
@ -254,7 +254,7 @@ SmallVideo.prototype.enableDominantSpeaker = function (isEnable) {
}
if (isEnable) {
this.showDisplayName(LargeVideo.getState() === "video");
this.showDisplayName(this.VideoLayout.isLargeVideoVisible());
if (!this.container.classList.contains("dominantspeaker"))
this.container.classList.add("dominantspeaker");
@ -388,7 +388,10 @@ SmallVideo.prototype.showAvatar = function (show) {
}
}
if (LargeVideo.showAvatar(this.id, show)) {
if (this.VideoLayout.isCurrentlyOnLarge(this.id)
&& this.VideoLayout.isLargeVideoVisible()) {
this.VideoLayout.showLargeVideoAvatar(show);
setVisibility(avatar, false);
setVisibility(video, false);
} else {

View File

@ -2,15 +2,16 @@
/* jshint -W101 */
import AudioLevels from "../audio_levels/AudioLevels";
import BottomToolbar from "../toolbars/BottomToolbar";
import UIEvents from "../../../service/UI/UIEvents";
import UIUtil from "../util/UIUtil";
import RemoteVideo from "./RemoteVideo";
import LargeVideo from "./LargeVideo";
import LargeVideoManager, {VideoContainerType} from "./LargeVideo";
import {PreziContainerType} from '../prezi/Prezi';
import LocalVideo from "./LocalVideo";
var MediaStreamType = require("../../../service/RTC/MediaStreamTypes");
var RTCBrowserType = require('../../RTC/RTCBrowserType');
var remoteVideos = {};
@ -32,6 +33,8 @@ var eventEmitter = null;
*/
var focusedVideoResourceJid = null;
const thumbAspectRatio = 16.0 / 9.0;
/**
* On contact list item clicked.
*/
@ -88,22 +91,31 @@ function getPeerContainerResourceId (containerElement) {
}
}
let largeVideo;
var VideoLayout = {
init (emitter) {
eventEmitter = emitter;
localVideoThumbnail = new LocalVideo(VideoLayout, emitter);
if (interfaceConfig.filmStripOnly) {
LargeVideo.disable();
} else {
LargeVideo.init(VideoLayout, emitter);
}
VideoLayout.resizeLargeVideoContainer();
emitter.addListener(UIEvents.CONTACT_CLICKED, onContactClicked);
},
initLargeVideo (isSideBarVisible) {
largeVideo = new LargeVideoManager();
largeVideo.updateContainerSize(isSideBarVisible);
AudioLevels.init();
},
setAudioLevel(id, lvl) {
if (!largeVideo) {
return;
}
AudioLevels.updateAudioLevel(
id, lvl, largeVideo.id
);
},
isInLastN (resource) {
return lastNCount < 0 || // lastN is disabled
// lastNEndpoints cache not built yet
@ -140,15 +152,16 @@ var VideoLayout = {
localVideoThumbnail.createConnectionIndicator();
let localId = APP.conference.localId;
this.onVideoTypeChanged(localId, stream.getType());
this.onVideoTypeChanged(localId, stream.videoType);
AudioLevels.updateAudioLevelCanvas(null, VideoLayout);
let {thumbWidth, thumbHeight} = this.calculateThumbnailSize();
AudioLevels.updateAudioLevelCanvas(null, thumbWidth, thumbHeight);
localVideoThumbnail.changeVideo(stream);
/* force update if we're currently being displayed */
if (LargeVideo.isCurrentlyOnLarge(localId)) {
LargeVideo.updateLargeVideo(localId, true);
if (this.isCurrentlyOnLarge(localId)) {
this.updateLargeVideo(localId, true);
}
},
@ -156,8 +169,8 @@ var VideoLayout = {
let id = APP.conference.localId;
localVideoThumbnail.joined(id);
if (!LargeVideo.id) {
LargeVideo.updateLargeVideo(id, true);
if (largeVideo && !largeVideo.id) {
this.updateLargeVideo(id, true);
}
},
@ -183,7 +196,7 @@ var VideoLayout = {
* another one instead.
*/
updateRemovedVideo (id) {
if (id !== LargeVideo.getId()) {
if (!this.isCurrentlyOnLarge(id)) {
return;
}
@ -198,15 +211,17 @@ var VideoLayout = {
newId = this.electLastVisibleVideo();
}
LargeVideo.updateLargeVideo(newId);
this.updateLargeVideo(newId);
},
electLastVisibleVideo () {
// pick the last visible video in the row
// if nobody else is left, this picks the local video
let pick = $('#remoteVideos>span[id!="mixedstream"]:visible:last');
if (pick.length) {
let id = getPeerContainerResourceId(pick[0]);
let thumbs = BottomToolbar.getThumbs(true).filter('[id!="mixedstream"]');
let lastVisible = thumbs.filter(':visible:last');
if (lastVisible.length) {
let id = getPeerContainerResourceId(lastVisible[0]);
if (remoteVideos[id]) {
console.info("electLastVisibleVideo: " + id);
return id;
@ -216,9 +231,9 @@ var VideoLayout = {
}
console.info("Last visible video no longer exists");
pick = $('#remoteVideos>span[id!="mixedstream"]');
if (pick.length) {
let id = getPeerContainerResourceId(pick[0]);
thumbs = BottomToolbar.getThumbs();
if (thumbs.length) {
let id = getPeerContainerResourceId(thumbs[0]);
if (remoteVideos[id]) {
console.info("electLastVisibleVideo: " + id);
return id;
@ -242,10 +257,6 @@ var VideoLayout = {
remoteVideos[id].addRemoteStreamElement(stream);
},
getLargeVideoId () {
return LargeVideo.getId();
},
/**
* Return the type of the remote video.
* @param id the id for the remote video
@ -255,25 +266,6 @@ var VideoLayout = {
return remoteVideoTypes[id];
},
/**
* Called when large video update is finished
* @param currentSmallVideo small video currently displayed on large video
*/
largeVideoUpdated (currentSmallVideo) {
// Makes sure that dominant speaker UI
// is enabled only on current small video
localVideoThumbnail.enableDominantSpeaker(localVideoThumbnail === currentSmallVideo);
Object.keys(remoteVideos).forEach(
function (resourceJid) {
var remoteVideo = remoteVideos[resourceJid];
if (remoteVideo) {
remoteVideo.enableDominantSpeaker(
remoteVideo === currentSmallVideo);
}
}
);
},
handleVideoThumbClicked (noPinnedEndpointChangedEvent,
resourceJid) {
if(focusedVideoResourceJid) {
@ -291,7 +283,7 @@ var VideoLayout = {
// Enable the currently set dominant speaker.
if (currentDominantSpeaker) {
if(smallVideo && smallVideo.hasVideo()) {
LargeVideo.updateLargeVideo(currentDominantSpeaker);
this.updateLargeVideo(currentDominantSpeaker);
}
}
@ -314,9 +306,7 @@ var VideoLayout = {
}
}
LargeVideo.setState("video");
LargeVideo.updateLargeVideo(resourceJid);
this.updateLargeVideo(resourceJid);
// Writing volume not allowed in IE
if (!RTCBrowserType.isIExplorer()) {
@ -346,7 +336,7 @@ var VideoLayout = {
// In case this is not currently in the last n we don't show it.
if (localLastNCount && localLastNCount > 0 &&
$('#remoteVideos>span').length >= localLastNCount + 2) {
BottomToolbar.getThumbs().length >= localLastNCount + 2) {
remoteVideo.showPeerContainer('hide');
} else {
VideoLayout.resizeThumbnails();
@ -370,11 +360,11 @@ var VideoLayout = {
// the current dominant speaker.
if ((!focusedVideoResourceJid &&
!currentDominantSpeaker &&
!require("../prezi/Prezi").isPresentationVisible()) ||
!this.isLargeContainerTypeVisible(PreziContainerType)) ||
focusedVideoResourceJid === resourceJid ||
(resourceJid &&
currentDominantSpeaker === resourceJid)) {
LargeVideo.updateLargeVideo(resourceJid, true);
this.updateLargeVideo(resourceJid, true);
}
},
@ -419,100 +409,67 @@ var VideoLayout = {
/**
* Resizes the large video container.
*/
resizeLargeVideoContainer () {
if(LargeVideo.isEnabled()) {
LargeVideo.resize();
resizeLargeVideoContainer (isSideBarVisible) {
if (largeVideo) {
largeVideo.updateContainerSize(isSideBarVisible);
largeVideo.resize(false);
} else {
VideoLayout.resizeVideoSpace();
this.resizeVideoSpace(false, isSideBarVisible);
}
VideoLayout.resizeThumbnails();
LargeVideo.position();
this.resizeThumbnails(false);
},
/**
* Resizes thumbnails.
*/
resizeThumbnails (animate) {
var videoSpaceWidth = $('#remoteVideos').width();
resizeThumbnails (animate = false) {
let {thumbWidth, thumbHeight} = this.calculateThumbnailSize();
var thumbnailSize = VideoLayout.calculateThumbnailSize(videoSpaceWidth);
var width = thumbnailSize[0];
var height = thumbnailSize[1];
$('.userAvatar').css('left', (thumbWidth - thumbHeight) / 2);
$('.userAvatar').css('left', (width - height) / 2);
if(animate) {
$('#remoteVideos').animate({
// adds 2 px because of small video 1px border
height: height + 2
},
{
queue: false,
duration: 500
});
$('#remoteVideos>span').animate({
height: height,
width: width
},
{
queue: false,
duration: 500,
complete: function () {
$(document).trigger(
"remotevideo.resized",
[width,
height]);
}
});
} else {
// size videos so that while keeping AR and max height, we have a
// nice fit
// adds 2 px because of small video 1px border
$('#remoteVideos').height(height + 2);
$('#remoteVideos>span').width(width);
$('#remoteVideos>span').height(height);
$(document).trigger("remotevideo.resized", [width, height]);
}
BottomToolbar.resizeThumbnails(thumbWidth, thumbHeight, animate).then(function () {
BottomToolbar.resizeToolbar(thumbWidth, thumbHeight);
AudioLevels.updateCanvasSize(thumbWidth, thumbHeight);
});
},
/**
* Calculates the thumbnail size.
*
* @param videoSpaceWidth the width of the video space
*/
calculateThumbnailSize (videoSpaceWidth) {
calculateThumbnailSize () {
let videoSpaceWidth = BottomToolbar.getFilmStripWidth();
// Calculate the available height, which is the inner window height
// minus 39px for the header minus 2px for the delimiter lines on the
// top and bottom of the large video, minus the 36px space inside the
// remoteVideos container used for highlighting shadow.
var availableHeight = 100;
let availableHeight = 100;
var numvids = $('#remoteVideos>span:visible').length;
let numvids = BottomToolbar.getThumbs().length;
if (localLastNCount && localLastNCount > 0) {
numvids = Math.min(localLastNCount + 1, numvids);
}
// Remove the 3px borders arround videos and border around the remote
// videos area and the 4 pixels between the local video and the others
//TODO: Find out where the 4 pixels come from and remove them
var availableWinWidth = videoSpaceWidth - 2 * 3 * numvids - 70 - 4;
// Remove the 3px borders arround videos and border around the remote
// videos area and the 4 pixels between the local video and the others
//TODO: Find out where the 4 pixels come from and remove them
let availableWinWidth = videoSpaceWidth - 2 * 3 * numvids - 70 - 4;
var availableWidth = availableWinWidth / numvids;
var aspectRatio = 16.0 / 9.0;
var maxHeight = Math.min(160, availableHeight);
availableHeight
= Math.min( maxHeight,
availableWidth / aspectRatio,
window.innerHeight - 18);
let availableWidth = availableWinWidth / numvids;
let maxHeight = Math.min(160, availableHeight);
availableHeight
= Math.min( maxHeight,
availableWidth / thumbAspectRatio,
window.innerHeight - 18);
if (availableHeight < availableWidth / aspectRatio) {
availableWidth = Math.floor(availableHeight * aspectRatio);
}
if (availableHeight < availableWidth / thumbAspectRatio) {
availableWidth = Math.floor(availableHeight * thumbAspectRatio);
}
return [availableWidth, availableHeight];
return {
thumbWidth: availableWidth,
thumbHeight: availableHeight
};
},
/**
@ -604,7 +561,7 @@ var VideoLayout = {
// Update the large video if the video source is already available,
// otherwise wait for the "videoactive.jingle" event.
if (videoSel[0].currentTime > 0) {
LargeVideo.updateLargeVideo(id);
this.updateLargeVideo(id);
}
}
},
@ -656,7 +613,7 @@ var VideoLayout = {
var updateLargeVideo = false;
// Handle LastN/local LastN changes.
$('#remoteVideos>span').each(function( index, element ) {
BottomToolbar.getThumbs().each(function( index, element ) {
var resourceJid = getPeerContainerResourceId(element);
// We do not want to process any logic for our own(local) video
@ -696,7 +653,7 @@ var VideoLayout = {
// displayed in the large video we have to switch to another
// user.
if (!updateLargeVideo &&
resourceJid === LargeVideo.getId()) {
this.isCurrentlyOnLarge(resourceJid)) {
updateLargeVideo = true;
}
}
@ -716,7 +673,7 @@ var VideoLayout = {
var jid = APP.xmpp.findJidFromResource(resourceJid);
var mediaStream =
APP.RTC.remoteStreams[jid][MediaStreamType.VIDEO_TYPE];
APP.RTC.remoteStreams[jid]['video'];
var sel = remoteVideo.selectVideoElement();
APP.RTC.attachMediaStream(sel, mediaStream.stream);
@ -754,7 +711,7 @@ var VideoLayout = {
continue;
// videoSrcToSsrc needs to be update for this call to succeed.
LargeVideo.updateLargeVideo(resource);
this.updateLargeVideo(resource);
break;
}
}
@ -766,23 +723,25 @@ var VideoLayout = {
* @param object
*/
updateLocalConnectionStats (percent, object) {
var resolution = null;
let resolutions = {};
if (object.resolution !== null) {
resolution = object.resolution;
object.resolution = resolution[APP.xmpp.myJid()];
delete resolution[APP.xmpp.myJid()];
resolutions = object.resolution;
object.resolution = resolutions[APP.conference.localId];
}
localVideoThumbnail.updateStatsIndicator(percent, object);
for (var jid in resolution) {
if (resolution[jid] === null)
continue;
var resourceJid = Strophe.getResourceFromJid(jid);
if (remoteVideos[resourceJid] &&
remoteVideos[resourceJid].connectionIndicator) {
remoteVideos[resourceJid].connectionIndicator.
updateResolution(resolution[jid]);
Object.keys(resolutions).forEach(function (id) {
if (APP.conference.isLocalId(id)) {
return;
}
}
let resolution = resolutions[id];
let remoteVideo = remoteVideos[id];
if (resolution && remoteVideo) {
remoteVideo.updateResolution(resolution);
}
});
},
/**
@ -862,15 +821,9 @@ var VideoLayout = {
}
smallVideo.setVideoType(newVideoType);
LargeVideo.onVideoTypeChanged(id, newVideoType);
},
/**
* Updates the video size and position.
*/
updateLargeVideoSize () {
LargeVideo.updateVideoSizeAndPosition();
LargeVideo.position(null, null, null, null, true);
if (this.isCurrentlyOnLarge(id)) {
this.updateLargeVideo(id, true);
}
},
showMore (jid) {
@ -886,22 +839,8 @@ var VideoLayout = {
}
},
addPreziContainer (id) {
var container = RemoteVideo.createContainer(id);
VideoLayout.resizeThumbnails();
return container;
},
setLargeVideoVisible (isVisible) {
LargeVideo.setLargeVideoVisible(isVisible);
if(!isVisible && focusedVideoResourceJid) {
var smallVideo = VideoLayout.getSmallVideo(focusedVideoResourceJid);
if(smallVideo) {
smallVideo.focus(false);
smallVideo.showAvatar();
}
focusedVideoResourceJid = null;
}
addRemoteVideoContainer (id) {
return RemoteVideo.createContainer(id);
},
/**
@ -912,8 +851,15 @@ var VideoLayout = {
* resized.
*/
resizeVideoArea (isSideBarVisible, callback) {
LargeVideo.resizeVideoAreaAnimated(isSideBarVisible, callback);
VideoLayout.resizeThumbnails(true);
let animate = true;
if (largeVideo) {
largeVideo.updateContainerSize(isSideBarVisible);
largeVideo.resize(animate);
this.resizeVideoSpace(animate, isSideBarVisible, callback);
}
VideoLayout.resizeThumbnails(animate);
},
/**
@ -945,8 +891,7 @@ var VideoLayout = {
complete: completeFunction
});
} else {
$('#videospace').width(availableWidth);
$('#videospace').height(availableHeight);
$('#videospace').width(availableWidth).height(availableHeight);
}
},
@ -968,43 +913,106 @@ var VideoLayout = {
"Missed avatar update - no small video yet for " + id
);
}
LargeVideo.updateAvatar(id, thumbUrl);
},
createEtherpadIframe (src, onloadHandler) {
return LargeVideo.createEtherpadIframe(src, onloadHandler);
},
setLargeVideoState (state) {
LargeVideo.setState(state);
},
getLargeVideoState () {
return LargeVideo.getState();
},
setLargeVideoHover (inHandler, outHandler) {
LargeVideo.setHover(inHandler, outHandler);
if (this.isCurrentlyOnLarge(id)) {
largeVideo.updateAvatar(thumbUrl);
}
},
/**
* Indicates that the video has been interrupted.
*/
onVideoInterrupted () {
LargeVideo.enableVideoProblemFilter(true);
var reconnectingKey = "connection.RECONNECTING";
$('#videoConnectionMessage').attr("data-i18n", reconnectingKey);
this.enableVideoProblemFilter(true);
let reconnectingKey = "connection.RECONNECTING";
$('#videoConnectionMessage')
.text(APP.translation.translateString(reconnectingKey));
$('#videoConnectionMessage').css({display: "block"});
.attr("data-i18n", reconnectingKey)
.text(APP.translation.translateString(reconnectingKey))
.css({display: "block"});
},
/**
* Indicates that the video has been restored.
*/
onVideoRestored () {
LargeVideo.enableVideoProblemFilter(false);
this.enableVideoProblemFilter(false);
$('#videoConnectionMessage').css({display: "none"});
},
enableVideoProblemFilter (enable) {
if (!largeVideo) {
return;
}
largeVideo.enableVideoProblemFilter(enable);
},
isLargeVideoVisible () {
return this.isLargeContainerTypeVisible(VideoContainerType);
},
isCurrentlyOnLarge (id) {
return largeVideo && largeVideo.id === id;
},
updateLargeVideo (id, forceUpdate) {
if (!largeVideo) {
return;
}
let isOnLarge = this.isCurrentlyOnLarge(id);
let currentId = largeVideo.id;
if (!isOnLarge || forceUpdate) {
if (id !== currentId) {
eventEmitter.emit(UIEvents.SELECTED_ENDPOINT, id);
}
if (currentId) {
let currentSmallVideo = this.getSmallVideo(currentId);
currentSmallVideo && currentSmallVideo.enableDominantSpeaker(false);
}
let smallVideo = this.getSmallVideo(id);
let videoType = this.getRemoteVideoType(id);
largeVideo.updateLargeVideo(smallVideo.stream, videoType);
smallVideo.enableDominantSpeaker(true);
} else if (currentId) {
let currentSmallVideo = this.getSmallVideo(currentId);
currentSmallVideo.showAvatar();
}
},
showLargeVideoAvatar (show) {
largeVideo && largeVideo.showAvatar(show);
},
addLargeVideoContainer (type, container) {
largeVideo && largeVideo.addContainer(type, container);
},
removeLargeVideoContainer (type) {
largeVideo && largeVideo.removeContainer(type);
},
/**
* @returns Promise
*/
showLargeVideoContainer (type, show) {
if (!largeVideo) {
return Promise.reject();
}
let isVisible = this.isLargeContainerTypeVisible(type);
if (isVisible === show) {
return Promise.resolve();
}
// if !show then use default type - large video
return largeVideo.showContainer(show ? type : VideoContainerType);
},
isLargeContainerTypeVisible (type) {
return largeVideo && largeVideo.state === type;
}
};

View File

@ -1,7 +1,8 @@
/* global APP, config */
/* global APP, JitsiMeetJS, config */
var EventEmitter = require("events");
var DesktopSharingEventTypes
= require("../../service/desktopsharing/DesktopSharingEventTypes");
import DSEvents from '../../service/desktopsharing/DesktopSharingEventTypes';
const TrackEvents = JitsiMeetJS.events.track;
/**
* Indicates that desktop stream is currently in use (for toggle purpose).
@ -25,25 +26,22 @@ var eventEmitter = new EventEmitter();
function streamSwitchDone() {
switchInProgress = false;
eventEmitter.emit(
DesktopSharingEventTypes.SWITCHING_DONE,
isUsingScreenStream);
eventEmitter.emit(DSEvents.SWITCHING_DONE, isUsingScreenStream);
}
function newStreamCreated(track) {
eventEmitter.emit(DesktopSharingEventTypes.NEW_STREAM_CREATED,
track, streamSwitchDone);
eventEmitter.emit(DSEvents.NEW_STREAM_CREATED, track, streamSwitchDone);
}
function getVideoStreamFailed(error) {
console.error("Failed to obtain the stream to switch to", error);
function getVideoStreamFailed() {
console.error("Failed to obtain the stream to switch to");
switchInProgress = false;
isUsingScreenStream = false;
newStreamCreated(null);
}
function getDesktopStreamFailed(error) {
console.error("Failed to obtain the stream to switch to", error);
function getDesktopStreamFailed() {
console.error("Failed to obtain the stream to switch to");
switchInProgress = false;
}
@ -92,21 +90,34 @@ module.exports = {
return;
}
switchInProgress = true;
let type, handler;
let type;
if (!isUsingScreenStream) {
// Switch to desktop stream
handler = onEndedHandler;
type = "desktop";
} else {
handler = () => {};
type = "video";
}
APP.conference.createVideoTrack(type, handler).then(
(tracks) => {
// We now use screen stream
isUsingScreenStream = type === "desktop";
newStreamCreated(tracks[0]);
}).catch(getDesktopStreamFailed);
APP.createLocalTracks(type).then(function (tracks) {
if (!tracks.length) {
if (type === 'desktop') {
getDesktopStreamFailed();
} else {
getVideoStreamFailed();
}
return;
}
let stream = tracks[0];
// We now use screen stream
isUsingScreenStream = type === "desktop";
if (isUsingScreenStream) {
stream.on(TrackEvents.TRACK_STOPPED, onEndedHandler);
}
newStreamCreated(stream);
});
},
/*
* Exports the event emitter to allow use by ScreenObtainer. Not for outside

View File

@ -1,4 +1,4 @@
var UsernameGenerator = require('../util/UsernameGenerator');
import {generateUsername} from '../util/UsernameGenerator';
var email = '';
var displayName = '';
@ -32,7 +32,7 @@ if (supportsLocalStorage()) {
if (!window.localStorage.callStatsUserName) {
window.localStorage.callStatsUserName
= UsernameGenerator.generateUsername();
= generateUsername();
console.log('generated callstats uid',
window.localStorage.callStatsUserName);
@ -45,10 +45,10 @@ if (supportsLocalStorage()) {
} else {
console.log("local storage is not supported");
userId = generateUniqueId();
callStatsUserName = UsernameGenerator.generateUsername();
callStatsUserName = generateUsername();
}
var Settings = {
export default {
/**
* Sets the local user display name and saves it to local storage
@ -99,10 +99,11 @@ var Settings = {
language: language
};
},
getLanguage () {
return language;
},
setLanguage: function (lang) {
language = lang;
window.localStorage.language = lang;
}
};
module.exports = Settings;

View File

@ -1,15 +1,19 @@
function NoopAnalytics() {}
NoopAnalytics.prototype.sendEvent = function () {};
function AnalyticsAdapter() {
var AnalyticsImpl = window.Analytics || NoopAnalytics;
this.analytics = new AnalyticsImpl();
class NoopAnalytics {
sendEvent () {}
}
AnalyticsAdapter.prototype.sendEvent = function (action, data) {
try {
this.analytics.sendEvent.apply(this.analytics, arguments);
} catch (ignored) {}
};
const AnalyticsImpl = window.Analytics || NoopAnalytics;
module.exports = new AnalyticsAdapter();
class AnalyticsAdapter {
constructor () {
this.analytics = new AnalyticsImpl();
}
sendEvent (...args) {
try {
this.analytics.sendEvent(...args);
} catch (ignored) {}
}
}
export default new AnalyticsAdapter();

View File

@ -1,128 +0,0 @@
/* global config, AudioContext */
/**
* Provides statistics for the local stream.
*/
var RTCBrowserType = require('../RTC/RTCBrowserType');
var StatisticsEvents = require('../../service/statistics/Events');
/**
* Size of the webaudio analyzer buffer.
* @type {number}
*/
var WEBAUDIO_ANALYZER_FFT_SIZE = 2048;
/**
* Value of the webaudio analyzer smoothing time parameter.
* @type {number}
*/
var WEBAUDIO_ANALYZER_SMOOTING_TIME = 0.8;
/**
* Converts time domain data array to audio level.
* @param samples the time domain data array.
* @returns {number} the audio level
*/
function timeDomainDataToAudioLevel(samples) {
var maxVolume = 0;
var length = samples.length;
for (var i = 0; i < length; i++) {
if (maxVolume < samples[i])
maxVolume = samples[i];
}
return parseFloat(((maxVolume - 127) / 128).toFixed(3));
}
/**
* Animates audio level change
* @param newLevel the new audio level
* @param lastLevel the last audio level
* @returns {Number} the audio level to be set
*/
function animateLevel(newLevel, lastLevel) {
var value = 0;
var diff = lastLevel - newLevel;
if(diff > 0.2) {
value = lastLevel - 0.2;
}
else if(diff < -0.4) {
value = lastLevel + 0.4;
}
else {
value = newLevel;
}
return parseFloat(value.toFixed(3));
}
/**
* <tt>LocalStatsCollector</tt> calculates statistics for the local stream.
*
* @param stream the local stream
* @param interval stats refresh interval given in ms.
* @constructor
*/
function LocalStatsCollector(stream, interval,
statisticsService, eventEmitter) {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
this.stream = stream;
this.intervalId = null;
this.intervalMilis = interval;
this.eventEmitter = eventEmitter;
this.audioLevel = 0;
this.statisticsService = statisticsService;
}
/**
* Starts the collecting the statistics.
*/
LocalStatsCollector.prototype.start = function () {
if (config.disableAudioLevels || !window.AudioContext ||
RTCBrowserType.isTemasysPluginUsed())
return;
var context = new AudioContext();
var analyser = context.createAnalyser();
analyser.smoothingTimeConstant = WEBAUDIO_ANALYZER_SMOOTING_TIME;
analyser.fftSize = WEBAUDIO_ANALYZER_FFT_SIZE;
var source = context.createMediaStreamSource(this.stream);
source.connect(analyser);
var self = this;
this.intervalId = setInterval(
function () {
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteTimeDomainData(array);
var audioLevel = timeDomainDataToAudioLevel(array);
if (audioLevel != self.audioLevel) {
self.audioLevel = animateLevel(audioLevel, self.audioLevel);
self.eventEmitter.emit(
StatisticsEvents.AUDIO_LEVEL,
self.statisticsService.LOCAL_JID,
self.audioLevel);
}
},
this.intervalMilis
);
};
/**
* Stops collecting the statistics.
*/
LocalStatsCollector.prototype.stop = function () {
if (this.intervalId) {
clearInterval(this.intervalId);
this.intervalId = null;
}
};
module.exports = LocalStatsCollector;

View File

@ -385,7 +385,7 @@ StatsCollector.prototype.addStatsToBeLogged = function (reports) {
StatsCollector.prototype.logStats = function () {
if(!APP.xmpp.sendLogs(this.statsToBeLogged))
if(!APP.conference._room.xmpp.sendLogs(this.statsToBeLogged))
return;
// Reset the stats
this.statsToBeLogged.stats = {};
@ -501,7 +501,7 @@ StatsCollector.prototype.processStatsReport = function () {
var ssrc = getStatValue(now, 'ssrc');
if(!ssrc)
continue;
var jid = APP.xmpp.getJidFromSSRC(ssrc);
var jid = APP.conference._room.room.getJidBySSRC(ssrc);
if (!jid && (Date.now() - now.timestamp) < 3000) {
console.warn("No jid for ssrc: " + ssrc);
continue;
@ -647,12 +647,20 @@ StatsCollector.prototype.processStatsReport = function () {
upload:
calculatePacketLoss(lostPackets.upload, totalPackets.upload)
};
let idResolution = {};
if (resolutions) { // use id instead of jid
Object.keys(resolutions).forEach(function (jid) {
let id = Strophe.getResourceFromJid(jid);
resolution[id] = resolutions[id];
});
}
this.eventEmitter.emit(StatisticsEvents.CONNECTION_STATS,
{
"bitrate": PeerStats.bitrate,
"packetLoss": PeerStats.packetLoss,
"bandwidth": PeerStats.bandwidth,
"resolution": resolutions,
"resolution": idResolution,
"transport": PeerStats.transport
});
PeerStats.transport = [];
@ -681,7 +689,7 @@ StatsCollector.prototype.processAudioLevelReport = function () {
}
var ssrc = getStatValue(now, 'ssrc');
var jid = APP.xmpp.getJidFromSSRC(ssrc);
var jid = APP.conference._room.room.getJidBySSRC(ssrc);
if (!jid) {
if((Date.now() - now.timestamp) < 3000)
console.warn("No jid for ssrc: " + ssrc);
@ -713,7 +721,7 @@ StatsCollector.prototype.processAudioLevelReport = function () {
// but it seems to vary between 0 and around 32k.
audioLevel = audioLevel / 32767;
jidStats.setSsrcAudioLevel(ssrc, audioLevel);
if (jid != APP.xmpp.myJid()) {
if (jid != APP.conference._room.room.myroomjid) {
this.eventEmitter.emit(
StatisticsEvents.AUDIO_LEVEL, jid, audioLevel);
}

View File

@ -2,7 +2,6 @@
/**
* Created by hristo on 8/4/14.
*/
var LocalStats = require("./LocalStatsCollector.js");
var RTPStats = require("./RTPStatsCollector.js");
var EventEmitter = require("events");
var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
@ -13,17 +12,8 @@ var StatisticsEvents = require("../../service/statistics/Events");
var eventEmitter = new EventEmitter();
var localStats = null;
var rtpStats = null;
function stopLocal() {
if (localStats) {
localStats.stop();
localStats = null;
}
}
function stopRemote() {
if (rtpStats) {
rtpStats.stop();
@ -41,26 +31,15 @@ function startRemoteStats (peerconnection) {
rtpStats.start();
}
function onStreamCreated(stream) {
if(stream.getOriginalStream().getAudioTracks().length === 0) {
return;
}
localStats = new LocalStats(stream.getOriginalStream(), 200, statistics,
eventEmitter);
localStats.start();
}
function onDisposeConference(onUnload) {
CallStats.sendTerminateEvent();
stopRemote();
if(onUnload) {
stopLocal();
if (onUnload) {
eventEmitter.removeAllListeners();
}
}
var statistics = {
export default {
/**
* Indicates that this audio level is for local jid.
* @type {string}
@ -74,65 +53,61 @@ var statistics = {
eventEmitter.removeListener(type, listener);
},
stop: function () {
stopLocal();
stopRemote();
if(eventEmitter)
{
if (eventEmitter) {
eventEmitter.removeAllListeners();
}
},
stopRemoteStatistics: function()
{
stopRemote();
onAudioMute (mute) {
CallStats.sendMuteEvent(mute, "audio");
},
onVideoMute (mute) {
CallStats.sendMuteEvent(mute, "video");
},
onGetUserMediaFailed (e) {
CallStats.sendGetUserMediaFailed(e);
},
start: function () {
return;
APP.RTC.addStreamListener(onStreamCreated,
StreamEventTypes.EVENT_TYPE_LOCAL_CREATED);
APP.xmpp.addListener(XMPPEvents.DISPOSE_CONFERENCE,
onDisposeConference);
const xmpp = APP.conference._room.xmpp;
xmpp.addListener(
XMPPEvents.DISPOSE_CONFERENCE,
onDisposeConference
);
//FIXME: we may want to change CALL INCOMING event to
// onnegotiationneeded
APP.xmpp.addListener(XMPPEvents.CALL_INCOMING, function (event) {
xmpp.addListener(XMPPEvents.CALL_INCOMING, function (event) {
startRemoteStats(event.peerconnection);
// CallStats.init(event);
// CallStats.init(event);
});
APP.xmpp.addListener(XMPPEvents.PEERCONNECTION_READY,
xmpp.addListener(
XMPPEvents.PEERCONNECTION_READY,
function (session) {
CallStats.init(session);
});
APP.RTC.addListener(RTCEvents.AUDIO_MUTE, function (mute) {
CallStats.sendMuteEvent(mute, "audio");
});
APP.xmpp.addListener(XMPPEvents.CONFERENCE_SETUP_FAILED, function () {
CallStats.init(session);
}
);
xmpp.addListener(XMPPEvents.CONFERENCE_SETUP_FAILED, function () {
CallStats.sendSetupFailedEvent();
});
APP.RTC.addListener(RTCEvents.VIDEO_MUTE, function (mute) {
CallStats.sendMuteEvent(mute, "video");
});
APP.RTC.addListener(RTCEvents.GET_USER_MEDIA_FAILED, function (e) {
CallStats.sendGetUserMediaFailed(e);
});
APP.xmpp.addListener(RTCEvents.CREATE_OFFER_FAILED, function (e, pc) {
xmpp.addListener(RTCEvents.CREATE_OFFER_FAILED, function (e, pc) {
CallStats.sendCreateOfferFailed(e, pc);
});
APP.xmpp.addListener(RTCEvents.CREATE_ANSWER_FAILED, function (e, pc) {
xmpp.addListener(RTCEvents.CREATE_ANSWER_FAILED, function (e, pc) {
CallStats.sendCreateAnswerFailed(e, pc);
});
APP.xmpp.addListener(
xmpp.addListener(
RTCEvents.SET_LOCAL_DESCRIPTION_FAILED,
function (e, pc) {
CallStats.sendSetLocalDescFailed(e, pc);
}
);
APP.xmpp.addListener(
xmpp.addListener(
RTCEvents.SET_REMOTE_DESCRIPTION_FAILED,
function (e, pc) {
CallStats.sendSetRemoteDescFailed(e, pc);
}
);
APP.xmpp.addListener(
xmpp.addListener(
RTCEvents.ADD_ICE_CANDIDATE_FAILED,
function (e, pc) {
CallStats.sendAddIceCandidateFailed(e, pc);
@ -140,8 +115,3 @@ var statistics = {
);
}
};
module.exports = statistics;

View File

@ -1,7 +1,6 @@
/* global $, require, config, interfaceConfig */
var i18n = require("i18next-client");
var languages = require("../../service/translation/languages");
var Settings = require("../settings/Settings");
var DEFAULT_LANG = languages.EN;
i18n.addPostProcessor("resolveAppName", function(value, key, options) {
@ -68,7 +67,7 @@ function initCompleted(t) {
$("[data-i18n]").i18n();
}
function checkForParameter() {
function getLangFromQuery() {
var query = window.location.search.substring(1);
var vars = query.split("&");
for (var i=0;i<vars.length;i++) {
@ -82,27 +81,11 @@ function checkForParameter() {
}
module.exports = {
init: function (lang) {
var options = defaultOptions;
init: function (settingsLang) {
let options = defaultOptions;
if(!lang)
{
lang = checkForParameter();
if(!lang)
{
var settings = Settings.getSettings();
if(settings)
lang = settings.language;
if(!lang && config.defaultLanguage)
{
lang = config.defaultLanguage;
}
}
}
if(lang) {
let lang = getLangFromQuery() || settingsLang || config.defaultLanguage;
if (lang) {
options.lng = lang;
}
@ -124,8 +107,7 @@ module.exports = {
},
generateTranslationHTML: function (key, options) {
var str = "<span data-i18n=\"" + key + "\"";
if(options)
{
if (options) {
str += " data-i18n-options=\"" + JSON.stringify(options) + "\"";
}
str += ">";

View File

@ -1,4 +1,4 @@
var RandomUtil = require('./RandomUtil');
import RandomUtil from './RandomUtil';
/**
* from faker.js - Copyright (c) 2014-2015 Matthew Bergman & Marak Squires
@ -417,13 +417,9 @@ var names = [
* Generate random username.
* @returns {string} random username
*/
function generateUsername () {
export function generateUsername () {
var name = RandomUtil.randomElement(names);
var suffix = RandomUtil.randomAlphanumStr(3);
return name + '-' + suffix;
}
module.exports = {
generateUsername: generateUsername
};

View File

@ -1,127 +0,0 @@
/*
* JingleSession provides an API to manage a single Jingle session. We will
* have different implementations depending on the underlying interface used
* (i.e. WebRTC and ORTC) and here we hold the code common to all of them.
*/
function JingleSession(me, sid, connection, service, eventEmitter) {
/**
* Our JID.
*/
this.me = me;
/**
* The Jingle session identifier.
*/
this.sid = sid;
/**
* The XMPP connection.
*/
this.connection = connection;
/**
* The XMPP service.
*/
this.service = service;
/**
* The event emitter.
*/
this.eventEmitter = eventEmitter;
/**
* Whether to use dripping or not. Dripping is sending trickle candidates
* not one-by-one.
* Note: currently we do not support 'false'.
*/
this.usedrip = true;
/**
* When dripping is used, stores ICE candidates which are to be sent.
*/
this.drip_container = [];
// Media constraints. Is this WebRTC only?
this.media_constraints = null;
// ICE servers config (RTCConfiguration?).
this.ice_config = {};
}
/**
* Prepares this object to initiate a session.
* @param peerjid the JID of the remote peer.
* @param isInitiator whether we will be the Jingle initiator.
* @param media_constraints
* @param ice_config
*/
JingleSession.prototype.initialize = function(peerjid, isInitiator,
media_constraints, ice_config) {
this.media_constraints = media_constraints;
this.ice_config = ice_config;
if (this.state !== null) {
console.error('attempt to initiate on session ' + this.sid +
'in state ' + this.state);
return;
}
this.state = 'pending';
this.initiator = isInitiator ? this.me : peerjid;
this.responder = !isInitiator ? this.me : peerjid;
this.peerjid = peerjid;
this.doInitialize();
};
/**
* Finishes initialization.
*/
JingleSession.prototype.doInitialize = function() {};
/**
* Adds the ICE candidates found in the 'contents' array as remote candidates?
* Note: currently only used on transport-info
*/
JingleSession.prototype.addIceCandidates = function(contents) {};
/**
* Handles an 'add-source' event.
*
* @param contents an array of Jingle 'content' elements.
*/
JingleSession.prototype.addSources = function(contents) {};
/**
* Handles a 'remove-source' event.
*
* @param contents an array of Jingle 'content' elements.
*/
JingleSession.prototype.removeSources = function(contents) {};
/**
* Terminates this Jingle session (stops sending media and closes the streams?)
*/
JingleSession.prototype.terminate = function() {};
/**
* Sends a Jingle session-terminate message to the peer and terminates the
* session.
* @param reason
* @param text
*/
JingleSession.prototype.sendTerminate = function(reason, text) {};
/**
* Handles an offer from the remote peer (prepares to accept a session).
* @param jingle the 'jingle' XML element.
*/
JingleSession.prototype.setOffer = function(jingle) {};
/**
* Handles an answer from the remote peer (prepares to accept a session).
* @param jingle the 'jingle' XML element.
*/
JingleSession.prototype.setAnswer = function(jingle) {};
module.exports = JingleSession;

File diff suppressed because it is too large Load Diff

View File

@ -1,268 +0,0 @@
/* global $ */
/*
Here we do modifications of local video SSRCs. There are 2 situations we have
to handle:
1. We generate SSRC for local recvonly video stream. This is the case when we
have no local camera and it is not generated automatically, but SSRC=1 is
used implicitly. If that happens RTCP packets will be dropped by the JVB
and we won't be able to request video key frames correctly.
2. A hack to re-use SSRC of the first video stream for any new stream created
in future. It turned out that Chrome may keep on using the SSRC of removed
video stream in RTCP even though a new one has been created. So we just
want to avoid that by re-using it. Jingle 'source-remove'/'source-add'
notifications are blocked once first video SSRC is advertised to the focus.
What this hack does:
1. Stores the SSRC of the first video stream created by
a) scanning Jingle session-accept/session-invite for existing video SSRC
b) watching for 'source-add' for new video stream if it has not been
created in step a)
2. Exposes method 'mungeLocalVideoSSRC' which replaces any new video SSRC with
the stored one. It is called by 'TracablePeerConnection' before local SDP is
returned to the other parts of the application.
3. Scans 'source-remove'/'source-add' notifications for stored video SSRC and
blocks those notifications. This makes Jicofo and all participants think
that it exists all the time even if the video stream has been removed or
replaced locally. Thanks to that there is no additional signaling activity
on video mute or when switching to the desktop stream.
*/
var SDP = require('./SDP');
var RTCBrowserType = require('../RTC/RTCBrowserType');
/**
* The hack is enabled on all browsers except FF by default
* FIXME finish the hack once removeStream method is implemented in FF
* @type {boolean}
*/
var isEnabled = !RTCBrowserType.isFirefox();
/**
* Stored SSRC of local video stream.
*/
var localVideoSSRC;
/**
* SSRC used for recvonly video stream when we have no local camera.
* This is in order to tell Chrome what SSRC should be used in RTCP requests
* instead of 1.
*/
var localRecvOnlySSRC;
/**
* cname for <tt>localRecvOnlySSRC</tt>
*/
var localRecvOnlyCName;
/**
* Method removes <source> element which describes <tt>localVideoSSRC</tt>
* from given Jingle IQ.
* @param modifyIq 'source-add' or 'source-remove' Jingle IQ.
* @param actionName display name of the action which will be printed in log
* messages.
* @returns {*} modified Jingle IQ, so that it does not contain <source> element
* corresponding to <tt>localVideoSSRC</tt> or <tt>null</tt> if no
* other SSRCs left to be signaled after removing it.
*/
var filterOutSource = function (modifyIq, actionName) {
var modifyIqTree = $(modifyIq.tree());
if (!localVideoSSRC)
return modifyIqTree[0];
var videoSSRC = modifyIqTree.find(
'>jingle>content[name="video"]' +
'>description>source[ssrc="' + localVideoSSRC + '"]');
if (!videoSSRC.length) {
return modifyIqTree[0];
}
console.info(
'Blocking ' + actionName + ' for local video SSRC: ' + localVideoSSRC);
videoSSRC.remove();
// Check if any sources still left to be added/removed
if (modifyIqTree.find('>jingle>content>description>source').length) {
return modifyIqTree[0];
} else {
return null;
}
};
/**
* Scans given Jingle IQ for video SSRC and stores it.
* @param jingleIq the Jingle IQ to be scanned for video SSRC.
*/
var storeLocalVideoSSRC = function (jingleIq) {
var videoSSRCs =
$(jingleIq.tree())
.find('>jingle>content[name="video"]>description>source');
videoSSRCs.each(function (idx, ssrcElem) {
if (localVideoSSRC)
return;
// We consider SSRC real only if it has msid attribute
// recvonly streams in FF do not have it as well as local SSRCs
// we generate for recvonly streams in Chrome
var ssrSel = $(ssrcElem);
var msid = ssrSel.find('>parameter[name="msid"]');
if (msid.length) {
var ssrcVal = ssrSel.attr('ssrc');
if (ssrcVal) {
localVideoSSRC = ssrcVal;
console.info('Stored local video SSRC' +
' for future re-use: ' + localVideoSSRC);
}
}
});
};
/**
* Generates new SSRC for local video recvonly stream.
* FIXME what about eventual SSRC collision ?
*/
function generateRecvonlySSRC() {
//
localRecvOnlySSRC =
Math.random().toString(10).substring(2, 11);
localRecvOnlyCName =
Math.random().toString(36).substring(2);
console.info(
"Generated local recvonly SSRC: " + localRecvOnlySSRC +
", cname: " + localRecvOnlyCName);
}
var LocalSSRCReplacement = {
/**
* Method must be called before 'session-initiate' or 'session-invite' is
* sent. Scans the IQ for local video SSRC and stores it if detected.
*
* @param sessionInit our 'session-initiate' or 'session-accept' Jingle IQ
* which will be scanned for local video SSRC.
*/
processSessionInit: function (sessionInit) {
if (!isEnabled)
return;
if (localVideoSSRC) {
console.error("Local SSRC stored already: " + localVideoSSRC);
return;
}
storeLocalVideoSSRC(sessionInit);
},
/**
* If we have local video SSRC stored searched given
* <tt>localDescription</tt> for video SSRC and makes sure it is replaced
* with the stored one.
* @param localDescription local description object that will have local
* video SSRC replaced with the stored one
* @returns modified <tt>localDescription</tt> object.
*/
mungeLocalVideoSSRC: function (localDescription) {
if (!isEnabled)
return localDescription;
if (!localDescription) {
console.warn("localDescription is null or undefined");
return localDescription;
}
// IF we have local video SSRC stored make sure it is replaced
// with old SSRC
if (localVideoSSRC) {
var newSdp = new SDP(localDescription.sdp);
if (newSdp.media[1].indexOf("a=ssrc:") !== -1 &&
!newSdp.containsSSRC(localVideoSSRC)) {
// Get new video SSRC
var map = newSdp.getMediaSsrcMap();
var videoPart = map[1];
var videoSSRCs = videoPart.ssrcs;
var newSSRC = Object.keys(videoSSRCs)[0];
console.info(
"Replacing new video SSRC: " + newSSRC +
" with " + localVideoSSRC);
localDescription.sdp =
newSdp.raw.replace(
new RegExp('a=ssrc:' + newSSRC, 'g'),
'a=ssrc:' + localVideoSSRC);
}
} else {
// Make sure we have any SSRC for recvonly video stream
var sdp = new SDP(localDescription.sdp);
if (sdp.media[1] && sdp.media[1].indexOf('a=ssrc:') === -1 &&
sdp.media[1].indexOf('a=recvonly') !== -1) {
if (!localRecvOnlySSRC) {
generateRecvonlySSRC();
}
console.info('No SSRC in video recvonly stream' +
' - adding SSRC: ' + localRecvOnlySSRC);
sdp.media[1] += 'a=ssrc:' + localRecvOnlySSRC +
' cname:' + localRecvOnlyCName + '\r\n';
localDescription.sdp = sdp.session + sdp.media.join('');
}
}
return localDescription;
},
/**
* Method must be called before 'source-add' notification is sent. In case
* we have local video SSRC advertised already it will be removed from the
* notification. If no other SSRCs are described by given IQ null will be
* returned which means that there is no point in sending the notification.
* @param sourceAdd 'source-add' Jingle IQ to be processed
* @returns modified 'source-add' IQ which can be sent to the focus or
* <tt>null</tt> if no notification shall be sent. It is no longer
* a Strophe IQ Builder instance, but DOM element tree.
*/
processSourceAdd: function (sourceAdd) {
if (!isEnabled)
return sourceAdd;
if (!localVideoSSRC) {
// Store local SSRC if available
storeLocalVideoSSRC(sourceAdd);
return sourceAdd;
} else {
return filterOutSource(sourceAdd, 'source-add');
}
},
/**
* Method must be called before 'source-remove' notification is sent.
* Removes local video SSRC from the notification. If there are no other
* SSRCs described in the given IQ <tt>null</tt> will be returned which
* means that there is no point in sending the notification.
* @param sourceRemove 'source-remove' Jingle IQ to be processed
* @returns modified 'source-remove' IQ which can be sent to the focus or
* <tt>null</tt> if no notification shall be sent. It is no longer
* a Strophe IQ Builder instance, but DOM element tree.
*/
processSourceRemove: function (sourceRemove) {
if (!isEnabled)
return sourceRemove;
return filterOutSource(sourceRemove, 'source-remove');
},
/**
* Turns the hack on or off
* @param enabled <tt>true</tt> to enable the hack or <tt>false</tt>
* to disable it
*/
setEnabled: function (enabled) {
isEnabled = enabled;
}
};
module.exports = LocalSSRCReplacement;

View File

@ -1,645 +0,0 @@
/* jshint -W101 */
/* jshint -W117 */
var SDPUtil = require("./SDPUtil");
// SDP STUFF
function SDP(sdp) {
/**
* Whether or not to remove TCP ice candidates when translating from/to jingle.
* @type {boolean}
*/
this.removeTcpCandidates = false;
/**
* Whether or not to remove UDP ice candidates when translating from/to jingle.
* @type {boolean}
*/
this.removeUdpCandidates = false;
this.media = sdp.split('\r\nm=');
for (var i = 1; i < this.media.length; i++) {
this.media[i] = 'm=' + this.media[i];
if (i != this.media.length - 1) {
this.media[i] += '\r\n';
}
}
this.session = this.media.shift() + '\r\n';
this.raw = this.session + this.media.join('');
}
/**
* Returns map of MediaChannel mapped per channel idx.
*/
SDP.prototype.getMediaSsrcMap = function() {
var self = this;
var media_ssrcs = {};
var tmp;
for (var mediaindex = 0; mediaindex < self.media.length; mediaindex++) {
tmp = SDPUtil.find_lines(self.media[mediaindex], 'a=ssrc:');
var mid = SDPUtil.parse_mid(SDPUtil.find_line(self.media[mediaindex], 'a=mid:'));
var media = {
mediaindex: mediaindex,
mid: mid,
ssrcs: {},
ssrcGroups: []
};
media_ssrcs[mediaindex] = media;
tmp.forEach(function (line) {
var linessrc = line.substring(7).split(' ')[0];
// allocate new ChannelSsrc
if(!media.ssrcs[linessrc]) {
media.ssrcs[linessrc] = {
ssrc: linessrc,
lines: []
};
}
media.ssrcs[linessrc].lines.push(line);
});
tmp = SDPUtil.find_lines(self.media[mediaindex], 'a=ssrc-group:');
tmp.forEach(function(line){
var idx = line.indexOf(' ');
var semantics = line.substr(0, idx).substr(13);
var ssrcs = line.substr(14 + semantics.length).split(' ');
if (ssrcs.length) {
media.ssrcGroups.push({
semantics: semantics,
ssrcs: ssrcs
});
}
});
}
return media_ssrcs;
};
/**
* Returns <tt>true</tt> if this SDP contains given SSRC.
* @param ssrc the ssrc to check.
* @returns {boolean} <tt>true</tt> if this SDP contains given SSRC.
*/
SDP.prototype.containsSSRC = function(ssrc) {
var medias = this.getMediaSsrcMap();
Object.keys(medias).forEach(function(mediaindex){
var media = medias[mediaindex];
//console.log("Check", channel, ssrc);
if(Object.keys(media.ssrcs).indexOf(ssrc) != -1){
return true;
}
});
return false;
};
// remove iSAC and CN from SDP
SDP.prototype.mangle = function () {
var i, j, mline, lines, rtpmap, newdesc;
for (i = 0; i < this.media.length; i++) {
lines = this.media[i].split('\r\n');
lines.pop(); // remove empty last element
mline = SDPUtil.parse_mline(lines.shift());
if (mline.media != 'audio')
continue;
newdesc = '';
mline.fmt.length = 0;
for (j = 0; j < lines.length; j++) {
if (lines[j].substr(0, 9) == 'a=rtpmap:') {
rtpmap = SDPUtil.parse_rtpmap(lines[j]);
if (rtpmap.name == 'CN' || rtpmap.name == 'ISAC')
continue;
mline.fmt.push(rtpmap.id);
newdesc += lines[j] + '\r\n';
} else {
newdesc += lines[j] + '\r\n';
}
}
this.media[i] = SDPUtil.build_mline(mline) + '\r\n';
this.media[i] += newdesc;
}
this.raw = this.session + this.media.join('');
};
// remove lines matching prefix from session section
SDP.prototype.removeSessionLines = function(prefix) {
var self = this;
var lines = SDPUtil.find_lines(this.session, prefix);
lines.forEach(function(line) {
self.session = self.session.replace(line + '\r\n', '');
});
this.raw = this.session + this.media.join('');
return lines;
};
// remove lines matching prefix from a media section specified by mediaindex
// TODO: non-numeric mediaindex could match mid
SDP.prototype.removeMediaLines = function(mediaindex, prefix) {
var self = this;
var lines = SDPUtil.find_lines(this.media[mediaindex], prefix);
lines.forEach(function(line) {
self.media[mediaindex] = self.media[mediaindex].replace(line + '\r\n', '');
});
this.raw = this.session + this.media.join('');
return lines;
};
// add content's to a jingle element
SDP.prototype.toJingle = function (elem, thecreator, ssrcs) {
// console.log("SSRC" + ssrcs["audio"] + " - " + ssrcs["video"]);
var i, j, k, mline, ssrc, rtpmap, tmp, lines;
// new bundle plan
if (SDPUtil.find_line(this.session, 'a=group:')) {
lines = SDPUtil.find_lines(this.session, 'a=group:');
for (i = 0; i < lines.length; i++) {
tmp = lines[i].split(' ');
var semantics = tmp.shift().substr(8);
elem.c('group', {xmlns: 'urn:xmpp:jingle:apps:grouping:0', semantics:semantics});
for (j = 0; j < tmp.length; j++) {
elem.c('content', {name: tmp[j]}).up();
}
elem.up();
}
}
for (i = 0; i < this.media.length; i++) {
mline = SDPUtil.parse_mline(this.media[i].split('\r\n')[0]);
if (!(mline.media === 'audio' ||
mline.media === 'video' ||
mline.media === 'application'))
{
continue;
}
if (SDPUtil.find_line(this.media[i], 'a=ssrc:')) {
ssrc = SDPUtil.find_line(this.media[i], 'a=ssrc:').substring(7).split(' ')[0]; // take the first
} else {
if(ssrcs && ssrcs[mline.media]) {
ssrc = ssrcs[mline.media];
} else {
ssrc = false;
}
}
elem.c('content', {creator: thecreator, name: mline.media});
if (SDPUtil.find_line(this.media[i], 'a=mid:')) {
// prefer identifier from a=mid if present
var mid = SDPUtil.parse_mid(SDPUtil.find_line(this.media[i], 'a=mid:'));
elem.attrs({ name: mid });
}
if (SDPUtil.find_line(this.media[i], 'a=rtpmap:').length) {
elem.c('description',
{xmlns: 'urn:xmpp:jingle:apps:rtp:1',
media: mline.media });
if (ssrc) {
elem.attrs({ssrc: ssrc});
}
for (j = 0; j < mline.fmt.length; j++) {
rtpmap = SDPUtil.find_line(this.media[i], 'a=rtpmap:' + mline.fmt[j]);
elem.c('payload-type', SDPUtil.parse_rtpmap(rtpmap));
// put any 'a=fmtp:' + mline.fmt[j] lines into <param name=foo value=bar/>
if (SDPUtil.find_line(this.media[i], 'a=fmtp:' + mline.fmt[j])) {
tmp = SDPUtil.parse_fmtp(SDPUtil.find_line(this.media[i], 'a=fmtp:' + mline.fmt[j]));
for (k = 0; k < tmp.length; k++) {
elem.c('parameter', tmp[k]).up();
}
}
this.rtcpFbToJingle(i, elem, mline.fmt[j]); // XEP-0293 -- map a=rtcp-fb
elem.up();
}
if (SDPUtil.find_line(this.media[i], 'a=crypto:', this.session)) {
elem.c('encryption', {required: 1});
var crypto = SDPUtil.find_lines(this.media[i], 'a=crypto:', this.session);
crypto.forEach(function(line) {
elem.c('crypto', SDPUtil.parse_crypto(line)).up();
});
elem.up(); // end of encryption
}
if (ssrc) {
// new style mapping
elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
// FIXME: group by ssrc and support multiple different ssrcs
var ssrclines = SDPUtil.find_lines(this.media[i], 'a=ssrc:');
if(ssrclines.length > 0) {
ssrclines.forEach(function (line) {
var idx = line.indexOf(' ');
var linessrc = line.substr(0, idx).substr(7);
if (linessrc != ssrc) {
elem.up();
ssrc = linessrc;
elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
}
var kv = line.substr(idx + 1);
elem.c('parameter');
if (kv.indexOf(':') == -1) {
elem.attrs({ name: kv });
} else {
var k = kv.split(':', 2)[0];
elem.attrs({ name: k });
var v = kv.split(':', 2)[1];
v = SDPUtil.filter_special_chars(v);
elem.attrs({ value: v });
}
elem.up();
});
} else {
elem.up();
elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
elem.c('parameter');
elem.attrs({name: "cname", value:Math.random().toString(36).substring(7)});
elem.up();
var msid = null;
if(mline.media == "audio") {
msid = APP.RTC.localAudio.getId();
} else {
msid = APP.RTC.localVideo.getId();
}
if(msid !== null) {
msid = SDPUtil.filter_special_chars(msid);
elem.c('parameter');
elem.attrs({name: "msid", value:msid});
elem.up();
elem.c('parameter');
elem.attrs({name: "mslabel", value:msid});
elem.up();
elem.c('parameter');
elem.attrs({name: "label", value:msid});
elem.up();
}
}
elem.up();
// XEP-0339 handle ssrc-group attributes
var ssrc_group_lines = SDPUtil.find_lines(this.media[i], 'a=ssrc-group:');
ssrc_group_lines.forEach(function(line) {
var idx = line.indexOf(' ');
var semantics = line.substr(0, idx).substr(13);
var ssrcs = line.substr(14 + semantics.length).split(' ');
if (ssrcs.length) {
elem.c('ssrc-group', { semantics: semantics, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
ssrcs.forEach(function(ssrc) {
elem.c('source', { ssrc: ssrc })
.up();
});
elem.up();
}
});
}
if (SDPUtil.find_line(this.media[i], 'a=rtcp-mux')) {
elem.c('rtcp-mux').up();
}
// XEP-0293 -- map a=rtcp-fb:*
this.rtcpFbToJingle(i, elem, '*');
// XEP-0294
if (SDPUtil.find_line(this.media[i], 'a=extmap:')) {
lines = SDPUtil.find_lines(this.media[i], 'a=extmap:');
for (j = 0; j < lines.length; j++) {
tmp = SDPUtil.parse_extmap(lines[j]);
elem.c('rtp-hdrext', { xmlns: 'urn:xmpp:jingle:apps:rtp:rtp-hdrext:0',
uri: tmp.uri,
id: tmp.value });
if (tmp.hasOwnProperty('direction')) {
switch (tmp.direction) {
case 'sendonly':
elem.attrs({senders: 'responder'});
break;
case 'recvonly':
elem.attrs({senders: 'initiator'});
break;
case 'sendrecv':
elem.attrs({senders: 'both'});
break;
case 'inactive':
elem.attrs({senders: 'none'});
break;
}
}
// TODO: handle params
elem.up();
}
}
elem.up(); // end of description
}
// map ice-ufrag/pwd, dtls fingerprint, candidates
this.transportToJingle(i, elem);
if (SDPUtil.find_line(this.media[i], 'a=sendrecv', this.session)) {
elem.attrs({senders: 'both'});
} else if (SDPUtil.find_line(this.media[i], 'a=sendonly', this.session)) {
elem.attrs({senders: 'initiator'});
} else if (SDPUtil.find_line(this.media[i], 'a=recvonly', this.session)) {
elem.attrs({senders: 'responder'});
} else if (SDPUtil.find_line(this.media[i], 'a=inactive', this.session)) {
elem.attrs({senders: 'none'});
}
if (mline.port == '0') {
// estos hack to reject an m-line
elem.attrs({senders: 'rejected'});
}
elem.up(); // end of content
}
elem.up();
return elem;
};
SDP.prototype.transportToJingle = function (mediaindex, elem) {
var tmp, sctpmap, sctpAttrs, fingerprints;
var self = this;
elem.c('transport');
// XEP-0343 DTLS/SCTP
if (SDPUtil.find_line(this.media[mediaindex], 'a=sctpmap:').length)
{
sctpmap = SDPUtil.find_line(
this.media[mediaindex], 'a=sctpmap:', self.session);
if (sctpmap)
{
sctpAttrs = SDPUtil.parse_sctpmap(sctpmap);
elem.c('sctpmap',
{
xmlns: 'urn:xmpp:jingle:transports:dtls-sctp:1',
number: sctpAttrs[0], /* SCTP port */
protocol: sctpAttrs[1] /* protocol */
});
// Optional stream count attribute
if (sctpAttrs.length > 2)
elem.attrs({ streams: sctpAttrs[2]});
elem.up();
}
}
// XEP-0320
fingerprints = SDPUtil.find_lines(this.media[mediaindex], 'a=fingerprint:', this.session);
fingerprints.forEach(function(line) {
tmp = SDPUtil.parse_fingerprint(line);
tmp.xmlns = 'urn:xmpp:jingle:apps:dtls:0';
elem.c('fingerprint').t(tmp.fingerprint);
delete tmp.fingerprint;
line = SDPUtil.find_line(self.media[mediaindex], 'a=setup:', self.session);
if (line) {
tmp.setup = line.substr(8);
}
elem.attrs(tmp);
elem.up(); // end of fingerprint
});
tmp = SDPUtil.iceparams(this.media[mediaindex], this.session);
if (tmp) {
tmp.xmlns = 'urn:xmpp:jingle:transports:ice-udp:1';
elem.attrs(tmp);
// XEP-0176
if (SDPUtil.find_line(this.media[mediaindex], 'a=candidate:', this.session)) { // add any a=candidate lines
var lines = SDPUtil.find_lines(this.media[mediaindex], 'a=candidate:', this.session);
lines.forEach(function (line) {
var candidate = SDPUtil.candidateToJingle(line);
var protocol = (candidate &&
typeof candidate.protocol === 'string')
? candidate.protocol.toLowerCase() : '';
if ((self.removeTcpCandidates && protocol === 'tcp') ||
(self.removeUdpCandidates && protocol === 'udp')) {
return;
}
elem.c('candidate', candidate).up();
});
}
}
elem.up(); // end of transport
};
SDP.prototype.rtcpFbToJingle = function (mediaindex, elem, payloadtype) { // XEP-0293
var lines = SDPUtil.find_lines(this.media[mediaindex], 'a=rtcp-fb:' + payloadtype);
lines.forEach(function (line) {
var tmp = SDPUtil.parse_rtcpfb(line);
if (tmp.type == 'trr-int') {
elem.c('rtcp-fb-trr-int', {xmlns: 'urn:xmpp:jingle:apps:rtp:rtcp-fb:0', value: tmp.params[0]});
elem.up();
} else {
elem.c('rtcp-fb', {xmlns: 'urn:xmpp:jingle:apps:rtp:rtcp-fb:0', type: tmp.type});
if (tmp.params.length > 0) {
elem.attrs({'subtype': tmp.params[0]});
}
elem.up();
}
});
};
SDP.prototype.rtcpFbFromJingle = function (elem, payloadtype) { // XEP-0293
var media = '';
var tmp = elem.find('>rtcp-fb-trr-int[xmlns="urn:xmpp:jingle:apps:rtp:rtcp-fb:0"]');
if (tmp.length) {
media += 'a=rtcp-fb:' + '*' + ' ' + 'trr-int' + ' ';
if (tmp.attr('value')) {
media += tmp.attr('value');
} else {
media += '0';
}
media += '\r\n';
}
tmp = elem.find('>rtcp-fb[xmlns="urn:xmpp:jingle:apps:rtp:rtcp-fb:0"]');
tmp.each(function () {
media += 'a=rtcp-fb:' + payloadtype + ' ' + $(this).attr('type');
if ($(this).attr('subtype')) {
media += ' ' + $(this).attr('subtype');
}
media += '\r\n';
});
return media;
};
// construct an SDP from a jingle stanza
SDP.prototype.fromJingle = function (jingle) {
var self = this;
this.raw = 'v=0\r\n' +
'o=- 1923518516 2 IN IP4 0.0.0.0\r\n' +// FIXME
's=-\r\n' +
't=0 0\r\n';
// http://tools.ietf.org/html/draft-ietf-mmusic-sdp-bundle-negotiation-04#section-8
if ($(jingle).find('>group[xmlns="urn:xmpp:jingle:apps:grouping:0"]').length) {
$(jingle).find('>group[xmlns="urn:xmpp:jingle:apps:grouping:0"]').each(function (idx, group) {
var contents = $(group).find('>content').map(function (idx, content) {
return content.getAttribute('name');
}).get();
if (contents.length > 0) {
self.raw += 'a=group:' + (group.getAttribute('semantics') || group.getAttribute('type')) + ' ' + contents.join(' ') + '\r\n';
}
});
}
this.session = this.raw;
jingle.find('>content').each(function () {
var m = self.jingle2media($(this));
self.media.push(m);
});
// reconstruct msid-semantic -- apparently not necessary
/*
var msid = SDPUtil.parse_ssrc(this.raw);
if (msid.hasOwnProperty('mslabel')) {
this.session += "a=msid-semantic: WMS " + msid.mslabel + "\r\n";
}
*/
this.raw = this.session + this.media.join('');
};
// translate a jingle content element into an an SDP media part
SDP.prototype.jingle2media = function (content) {
var media = '',
desc = content.find('description'),
ssrc = desc.attr('ssrc'),
self = this,
tmp;
var sctp = content.find(
'>transport>sctpmap[xmlns="urn:xmpp:jingle:transports:dtls-sctp:1"]');
tmp = { media: desc.attr('media') };
tmp.port = '1';
if (content.attr('senders') == 'rejected') {
// estos hack to reject an m-line.
tmp.port = '0';
}
if (content.find('>transport>fingerprint').length || desc.find('encryption').length) {
if (sctp.length)
tmp.proto = 'DTLS/SCTP';
else
tmp.proto = 'RTP/SAVPF';
} else {
tmp.proto = 'RTP/AVPF';
}
if (!sctp.length) {
tmp.fmt = desc.find('payload-type').map(
function () { return this.getAttribute('id'); }).get();
media += SDPUtil.build_mline(tmp) + '\r\n';
} else {
media += 'm=application 1 DTLS/SCTP ' + sctp.attr('number') + '\r\n';
media += 'a=sctpmap:' + sctp.attr('number') +
' ' + sctp.attr('protocol');
var streamCount = sctp.attr('streams');
if (streamCount)
media += ' ' + streamCount + '\r\n';
else
media += '\r\n';
}
media += 'c=IN IP4 0.0.0.0\r\n';
if (!sctp.length)
media += 'a=rtcp:1 IN IP4 0.0.0.0\r\n';
tmp = content.find('>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]');
if (tmp.length) {
if (tmp.attr('ufrag')) {
media += SDPUtil.build_iceufrag(tmp.attr('ufrag')) + '\r\n';
}
if (tmp.attr('pwd')) {
media += SDPUtil.build_icepwd(tmp.attr('pwd')) + '\r\n';
}
tmp.find('>fingerprint').each(function () {
// FIXME: check namespace at some point
media += 'a=fingerprint:' + this.getAttribute('hash');
media += ' ' + $(this).text();
media += '\r\n';
if (this.getAttribute('setup')) {
media += 'a=setup:' + this.getAttribute('setup') + '\r\n';
}
});
}
switch (content.attr('senders')) {
case 'initiator':
media += 'a=sendonly\r\n';
break;
case 'responder':
media += 'a=recvonly\r\n';
break;
case 'none':
media += 'a=inactive\r\n';
break;
case 'both':
media += 'a=sendrecv\r\n';
break;
}
media += 'a=mid:' + content.attr('name') + '\r\n';
// <description><rtcp-mux/></description>
// see http://code.google.com/p/libjingle/issues/detail?id=309 -- no spec though
// and http://mail.jabber.org/pipermail/jingle/2011-December/001761.html
if (desc.find('rtcp-mux').length) {
media += 'a=rtcp-mux\r\n';
}
if (desc.find('encryption').length) {
desc.find('encryption>crypto').each(function () {
media += 'a=crypto:' + this.getAttribute('tag');
media += ' ' + this.getAttribute('crypto-suite');
media += ' ' + this.getAttribute('key-params');
if (this.getAttribute('session-params')) {
media += ' ' + this.getAttribute('session-params');
}
media += '\r\n';
});
}
desc.find('payload-type').each(function () {
media += SDPUtil.build_rtpmap(this) + '\r\n';
if ($(this).find('>parameter').length) {
media += 'a=fmtp:' + this.getAttribute('id') + ' ';
media += $(this).find('parameter').map(function () {
return (this.getAttribute('name')
? (this.getAttribute('name') + '=') : '') +
this.getAttribute('value');
}).get().join('; ');
media += '\r\n';
}
// xep-0293
media += self.rtcpFbFromJingle($(this), this.getAttribute('id'));
});
// xep-0293
media += self.rtcpFbFromJingle(desc, '*');
// xep-0294
tmp = desc.find('>rtp-hdrext[xmlns="urn:xmpp:jingle:apps:rtp:rtp-hdrext:0"]');
tmp.each(function () {
media += 'a=extmap:' + this.getAttribute('id') + ' ' + this.getAttribute('uri') + '\r\n';
});
content.find('>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]>candidate').each(function () {
var protocol = this.getAttribute('protocol');
protocol = (typeof protocol === 'string') ? protocol.toLowerCase(): '';
if ((self.removeTcpCandidates && protocol === 'tcp') ||
(self.removeUdpCandidates && protocol === 'udp')) {
return;
}
media += SDPUtil.candidateFromJingle(this);
});
// XEP-0339 handle ssrc-group attributes
content.find('description>ssrc-group[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]').each(function() {
var semantics = this.getAttribute('semantics');
var ssrcs = $(this).find('>source').map(function() {
return this.getAttribute('ssrc');
}).get();
if (ssrcs.length) {
media += 'a=ssrc-group:' + semantics + ' ' + ssrcs.join(' ') + '\r\n';
}
});
tmp = content.find('description>source[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]');
tmp.each(function () {
var ssrc = this.getAttribute('ssrc');
$(this).find('>parameter').each(function () {
var name = this.getAttribute('name');
var value = this.getAttribute('value');
value = SDPUtil.filter_special_chars(value);
media += 'a=ssrc:' + ssrc + ' ' + name;
if (value && value.length)
media += ':' + value;
media += '\r\n';
});
});
return media;
};
module.exports = SDP;

View File

@ -1,168 +0,0 @@
var SDPUtil = require("./SDPUtil");
function SDPDiffer(mySDP, otherSDP)
{
this.mySDP = mySDP;
this.otherSDP = otherSDP;
}
/**
* Returns map of MediaChannel that contains media contained in
* 'mySDP', but not contained in 'otherSdp'. Mapped by channel idx.
*/
SDPDiffer.prototype.getNewMedia = function() {
// this could be useful in Array.prototype.
function arrayEquals(array) {
// if the other array is a falsy value, return
if (!array)
return false;
// compare lengths - can save a lot of time
if (this.length != array.length)
return false;
for (var i = 0, l=this.length; i < l; i++) {
// Check if we have nested arrays
if (this[i] instanceof Array && array[i] instanceof Array) {
// recurse into the nested arrays
if (!this[i].equals(array[i]))
return false;
}
else if (this[i] != array[i]) {
// Warning - two different object instances will never be
// equal: {x:20} != {x:20}
return false;
}
}
return true;
}
var myMedias = this.mySDP.getMediaSsrcMap();
var othersMedias = this.otherSDP.getMediaSsrcMap();
var newMedia = {};
Object.keys(othersMedias).forEach(function(othersMediaIdx) {
var myMedia = myMedias[othersMediaIdx];
var othersMedia = othersMedias[othersMediaIdx];
if(!myMedia && othersMedia) {
// Add whole channel
newMedia[othersMediaIdx] = othersMedia;
return;
}
// Look for new ssrcs across the channel
Object.keys(othersMedia.ssrcs).forEach(function(ssrc) {
if(Object.keys(myMedia.ssrcs).indexOf(ssrc) === -1) {
// Allocate channel if we've found ssrc that doesn't exist in
// our channel
if(!newMedia[othersMediaIdx]){
newMedia[othersMediaIdx] = {
mediaindex: othersMedia.mediaindex,
mid: othersMedia.mid,
ssrcs: {},
ssrcGroups: []
};
}
newMedia[othersMediaIdx].ssrcs[ssrc] = othersMedia.ssrcs[ssrc];
}
});
// Look for new ssrc groups across the channels
othersMedia.ssrcGroups.forEach(function(otherSsrcGroup){
// try to match the other ssrc-group with an ssrc-group of ours
var matched = false;
for (var i = 0; i < myMedia.ssrcGroups.length; i++) {
var mySsrcGroup = myMedia.ssrcGroups[i];
if (otherSsrcGroup.semantics == mySsrcGroup.semantics &&
arrayEquals.apply(otherSsrcGroup.ssrcs,
[mySsrcGroup.ssrcs])) {
matched = true;
break;
}
}
if (!matched) {
// Allocate channel if we've found an ssrc-group that doesn't
// exist in our channel
if(!newMedia[othersMediaIdx]){
newMedia[othersMediaIdx] = {
mediaindex: othersMedia.mediaindex,
mid: othersMedia.mid,
ssrcs: {},
ssrcGroups: []
};
}
newMedia[othersMediaIdx].ssrcGroups.push(otherSsrcGroup);
}
});
});
return newMedia;
};
/**
* TODO: document!
*/
SDPDiffer.prototype.toJingle = function(modify) {
var sdpMediaSsrcs = this.getNewMedia();
var modified = false;
Object.keys(sdpMediaSsrcs).forEach(function(mediaindex){
modified = true;
var media = sdpMediaSsrcs[mediaindex];
modify.c('content', {name: media.mid});
modify.c('description',
{xmlns:'urn:xmpp:jingle:apps:rtp:1', media: media.mid});
// FIXME: not completely sure this operates on blocks and / or handles
// different ssrcs correctly
// generate sources from lines
Object.keys(media.ssrcs).forEach(function(ssrcNum) {
var mediaSsrc = media.ssrcs[ssrcNum];
modify.c('source', { xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
modify.attrs({ssrc: mediaSsrc.ssrc});
// iterate over ssrc lines
mediaSsrc.lines.forEach(function (line) {
var idx = line.indexOf(' ');
var kv = line.substr(idx + 1);
modify.c('parameter');
if (kv.indexOf(':') == -1) {
modify.attrs({ name: kv });
} else {
var nv = kv.split(':', 2);
var name = nv[0];
var value = SDPUtil.filter_special_chars(nv[1]);
modify.attrs({ name: name });
modify.attrs({ value: value });
}
modify.up(); // end of parameter
});
modify.up(); // end of source
});
// generate source groups from lines
media.ssrcGroups.forEach(function(ssrcGroup) {
if (ssrcGroup.ssrcs.length) {
modify.c('ssrc-group', {
semantics: ssrcGroup.semantics,
xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0'
});
ssrcGroup.ssrcs.forEach(function (ssrc) {
modify.c('source', { ssrc: ssrc })
.up(); // end of source
});
modify.up(); // end of ssrc-group
}
});
modify.up(); // end of description
modify.up(); // end of content
});
return modified;
};
module.exports = SDPDiffer;

View File

@ -1,361 +0,0 @@
/* jshint -W101 */
var RTCBrowserType = require('../RTC/RTCBrowserType');
var SDPUtil = {
filter_special_chars: function (text) {
return text.replace(/[\\\/\{,\}\+]/g, "");
},
iceparams: function (mediadesc, sessiondesc) {
var data = null;
if (SDPUtil.find_line(mediadesc, 'a=ice-ufrag:', sessiondesc) &&
SDPUtil.find_line(mediadesc, 'a=ice-pwd:', sessiondesc)) {
data = {
ufrag: SDPUtil.parse_iceufrag(SDPUtil.find_line(mediadesc, 'a=ice-ufrag:', sessiondesc)),
pwd: SDPUtil.parse_icepwd(SDPUtil.find_line(mediadesc, 'a=ice-pwd:', sessiondesc))
};
}
return data;
},
parse_iceufrag: function (line) {
return line.substring(12);
},
build_iceufrag: function (frag) {
return 'a=ice-ufrag:' + frag;
},
parse_icepwd: function (line) {
return line.substring(10);
},
build_icepwd: function (pwd) {
return 'a=ice-pwd:' + pwd;
},
parse_mid: function (line) {
return line.substring(6);
},
parse_mline: function (line) {
var parts = line.substring(2).split(' '),
data = {};
data.media = parts.shift();
data.port = parts.shift();
data.proto = parts.shift();
if (parts[parts.length - 1] === '') { // trailing whitespace
parts.pop();
}
data.fmt = parts;
return data;
},
build_mline: function (mline) {
return 'm=' + mline.media + ' ' + mline.port + ' ' + mline.proto + ' ' + mline.fmt.join(' ');
},
parse_rtpmap: function (line) {
var parts = line.substring(9).split(' '),
data = {};
data.id = parts.shift();
parts = parts[0].split('/');
data.name = parts.shift();
data.clockrate = parts.shift();
data.channels = parts.length ? parts.shift() : '1';
return data;
},
/**
* Parses SDP line "a=sctpmap:..." and extracts SCTP port from it.
* @param line eg. "a=sctpmap:5000 webrtc-datachannel"
* @returns [SCTP port number, protocol, streams]
*/
parse_sctpmap: function (line)
{
var parts = line.substring(10).split(' ');
var sctpPort = parts[0];
var protocol = parts[1];
// Stream count is optional
var streamCount = parts.length > 2 ? parts[2] : null;
return [sctpPort, protocol, streamCount];// SCTP port
},
build_rtpmap: function (el) {
var line = 'a=rtpmap:' + el.getAttribute('id') + ' ' + el.getAttribute('name') + '/' + el.getAttribute('clockrate');
if (el.getAttribute('channels') && el.getAttribute('channels') != '1') {
line += '/' + el.getAttribute('channels');
}
return line;
},
parse_crypto: function (line) {
var parts = line.substring(9).split(' '),
data = {};
data.tag = parts.shift();
data['crypto-suite'] = parts.shift();
data['key-params'] = parts.shift();
if (parts.length) {
data['session-params'] = parts.join(' ');
}
return data;
},
parse_fingerprint: function (line) { // RFC 4572
var parts = line.substring(14).split(' '),
data = {};
data.hash = parts.shift();
data.fingerprint = parts.shift();
// TODO assert that fingerprint satisfies 2UHEX *(":" 2UHEX) ?
return data;
},
parse_fmtp: function (line) {
var parts = line.split(' '),
i, key, value,
data = [];
parts.shift();
parts = parts.join(' ').split(';');
for (i = 0; i < parts.length; i++) {
key = parts[i].split('=')[0];
while (key.length && key[0] == ' ') {
key = key.substring(1);
}
value = parts[i].split('=')[1];
if (key && value) {
data.push({name: key, value: value});
} else if (key) {
// rfc 4733 (DTMF) style stuff
data.push({name: '', value: key});
}
}
return data;
},
parse_icecandidate: function (line) {
var candidate = {},
elems = line.split(' ');
candidate.foundation = elems[0].substring(12);
candidate.component = elems[1];
candidate.protocol = elems[2].toLowerCase();
candidate.priority = elems[3];
candidate.ip = elems[4];
candidate.port = elems[5];
// elems[6] => "typ"
candidate.type = elems[7];
candidate.generation = 0; // default value, may be overwritten below
for (var i = 8; i < elems.length; i += 2) {
switch (elems[i]) {
case 'raddr':
candidate['rel-addr'] = elems[i + 1];
break;
case 'rport':
candidate['rel-port'] = elems[i + 1];
break;
case 'generation':
candidate.generation = elems[i + 1];
break;
case 'tcptype':
candidate.tcptype = elems[i + 1];
break;
default: // TODO
console.log('parse_icecandidate not translating "' + elems[i] + '" = "' + elems[i + 1] + '"');
}
}
candidate.network = '1';
candidate.id = Math.random().toString(36).substr(2, 10); // not applicable to SDP -- FIXME: should be unique, not just random
return candidate;
},
build_icecandidate: function (cand) {
var line = ['a=candidate:' + cand.foundation, cand.component, cand.protocol, cand.priority, cand.ip, cand.port, 'typ', cand.type].join(' ');
line += ' ';
switch (cand.type) {
case 'srflx':
case 'prflx':
case 'relay':
if (cand.hasOwnAttribute('rel-addr') && cand.hasOwnAttribute('rel-port')) {
line += 'raddr';
line += ' ';
line += cand['rel-addr'];
line += ' ';
line += 'rport';
line += ' ';
line += cand['rel-port'];
line += ' ';
}
break;
}
if (cand.hasOwnAttribute('tcptype')) {
line += 'tcptype';
line += ' ';
line += cand.tcptype;
line += ' ';
}
line += 'generation';
line += ' ';
line += cand.hasOwnAttribute('generation') ? cand.generation : '0';
return line;
},
parse_ssrc: function (desc) {
// proprietary mapping of a=ssrc lines
// TODO: see "Jingle RTP Source Description" by Juberti and P. Thatcher on google docs
// and parse according to that
var lines = desc.split('\r\n'),
data = {};
for (var i = 0; i < lines.length; i++) {
if (lines[i].substring(0, 7) == 'a=ssrc:') {
var idx = lines[i].indexOf(' ');
data[lines[i].substr(idx + 1).split(':', 2)[0]] = lines[i].substr(idx + 1).split(':', 2)[1];
}
}
return data;
},
parse_rtcpfb: function (line) {
var parts = line.substr(10).split(' ');
var data = {};
data.pt = parts.shift();
data.type = parts.shift();
data.params = parts;
return data;
},
parse_extmap: function (line) {
var parts = line.substr(9).split(' ');
var data = {};
data.value = parts.shift();
if (data.value.indexOf('/') != -1) {
data.direction = data.value.substr(data.value.indexOf('/') + 1);
data.value = data.value.substr(0, data.value.indexOf('/'));
} else {
data.direction = 'both';
}
data.uri = parts.shift();
data.params = parts;
return data;
},
find_line: function (haystack, needle, sessionpart) {
var lines = haystack.split('\r\n');
for (var i = 0; i < lines.length; i++) {
if (lines[i].substring(0, needle.length) == needle) {
return lines[i];
}
}
if (!sessionpart) {
return false;
}
// search session part
lines = sessionpart.split('\r\n');
for (var j = 0; j < lines.length; j++) {
if (lines[j].substring(0, needle.length) == needle) {
return lines[j];
}
}
return false;
},
find_lines: function (haystack, needle, sessionpart) {
var lines = haystack.split('\r\n'),
needles = [];
for (var i = 0; i < lines.length; i++) {
if (lines[i].substring(0, needle.length) == needle)
needles.push(lines[i]);
}
if (needles.length || !sessionpart) {
return needles;
}
// search session part
lines = sessionpart.split('\r\n');
for (var j = 0; j < lines.length; j++) {
if (lines[j].substring(0, needle.length) == needle) {
needles.push(lines[j]);
}
}
return needles;
},
candidateToJingle: function (line) {
// a=candidate:2979166662 1 udp 2113937151 192.168.2.100 57698 typ host generation 0
// <candidate component=... foundation=... generation=... id=... ip=... network=... port=... priority=... protocol=... type=.../>
if (line.indexOf('candidate:') === 0) {
line = 'a=' + line;
} else if (line.substring(0, 12) != 'a=candidate:') {
console.log('parseCandidate called with a line that is not a candidate line');
console.log(line);
return null;
}
if (line.substring(line.length - 2) == '\r\n') // chomp it
line = line.substring(0, line.length - 2);
var candidate = {},
elems = line.split(' '),
i;
if (elems[6] != 'typ') {
console.log('did not find typ in the right place');
console.log(line);
return null;
}
candidate.foundation = elems[0].substring(12);
candidate.component = elems[1];
candidate.protocol = elems[2].toLowerCase();
candidate.priority = elems[3];
candidate.ip = elems[4];
candidate.port = elems[5];
// elems[6] => "typ"
candidate.type = elems[7];
candidate.generation = '0'; // default, may be overwritten below
for (i = 8; i < elems.length; i += 2) {
switch (elems[i]) {
case 'raddr':
candidate['rel-addr'] = elems[i + 1];
break;
case 'rport':
candidate['rel-port'] = elems[i + 1];
break;
case 'generation':
candidate.generation = elems[i + 1];
break;
case 'tcptype':
candidate.tcptype = elems[i + 1];
break;
default: // TODO
console.log('not translating "' + elems[i] + '" = "' + elems[i + 1] + '"');
}
}
candidate.network = '1';
candidate.id = Math.random().toString(36).substr(2, 10); // not applicable to SDP -- FIXME: should be unique, not just random
return candidate;
},
candidateFromJingle: function (cand) {
var line = 'a=candidate:';
line += cand.getAttribute('foundation');
line += ' ';
line += cand.getAttribute('component');
line += ' ';
var protocol = cand.getAttribute('protocol');
// use tcp candidates for FF
if (RTCBrowserType.isFirefox() && protocol.toLowerCase() == 'ssltcp') {
protocol = 'tcp';
}
line += ' ';
line += cand.getAttribute('priority');
line += ' ';
line += cand.getAttribute('ip');
line += ' ';
line += cand.getAttribute('port');
line += ' ';
line += 'typ';
line += ' ' + cand.getAttribute('type');
line += ' ';
switch (cand.getAttribute('type')) {
case 'srflx':
case 'prflx':
case 'relay':
if (cand.getAttribute('rel-addr') && cand.getAttribute('rel-port')) {
line += 'raddr';
line += ' ';
line += cand.getAttribute('rel-addr');
line += ' ';
line += 'rport';
line += ' ';
line += cand.getAttribute('rel-port');
line += ' ';
}
break;
}
if (protocol.toLowerCase() == 'tcp') {
line += 'tcptype';
line += ' ';
line += cand.getAttribute('tcptype');
line += ' ';
}
line += 'generation';
line += ' ';
line += cand.getAttribute('generation') || '0';
return line + '\r\n';
}
};
module.exports = SDPUtil;

View File

@ -1,449 +0,0 @@
/* global $, config, mozRTCPeerConnection, RTCPeerConnection,
webkitRTCPeerConnection, RTCSessionDescription */
/* jshint -W101 */
var RTC = require('../RTC/RTC');
var RTCBrowserType = require("../RTC/RTCBrowserType");
var RTCEvents = require("../../service/RTC/RTCEvents");
var SSRCReplacement = require("./LocalSSRCReplacement");
function TraceablePeerConnection(ice_config, constraints, session) {
var self = this;
var RTCPeerConnectionType = null;
if (RTCBrowserType.isFirefox()) {
RTCPeerConnectionType = mozRTCPeerConnection;
} else if (RTCBrowserType.isTemasysPluginUsed()) {
RTCPeerConnectionType = RTCPeerConnection;
} else {
RTCPeerConnectionType = webkitRTCPeerConnection;
}
self.eventEmitter = session.eventEmitter;
this.peerconnection = new RTCPeerConnectionType(ice_config, constraints);
this.updateLog = [];
this.stats = {};
this.statsinterval = null;
this.maxstats = 0; // limit to 300 values, i.e. 5 minutes; set to 0 to disable
var Interop = require('sdp-interop').Interop;
this.interop = new Interop();
var Simulcast = require('sdp-simulcast');
this.simulcast = new Simulcast({numOfLayers: 3, explodeRemoteSimulcast: false});
// override as desired
this.trace = function (what, info) {
/*console.warn('WTRACE', what, info);
if (info && RTCBrowserType.isIExplorer()) {
if (info.length > 1024) {
console.warn('WTRACE', what, info.substr(1024));
}
if (info.length > 2048) {
console.warn('WTRACE', what, info.substr(2048));
}
}*/
self.updateLog.push({
time: new Date(),
type: what,
value: info || ""
});
};
this.onicecandidate = null;
this.peerconnection.onicecandidate = function (event) {
// FIXME: this causes stack overflow with Temasys Plugin
if (!RTCBrowserType.isTemasysPluginUsed())
self.trace('onicecandidate', JSON.stringify(event.candidate, null, ' '));
if (self.onicecandidate !== null) {
self.onicecandidate(event);
}
};
this.onaddstream = null;
this.peerconnection.onaddstream = function (event) {
self.trace('onaddstream', event.stream.id);
if (self.onaddstream !== null) {
self.onaddstream(event);
}
};
this.onremovestream = null;
this.peerconnection.onremovestream = function (event) {
self.trace('onremovestream', event.stream.id);
if (self.onremovestream !== null) {
self.onremovestream(event);
}
};
this.onsignalingstatechange = null;
this.peerconnection.onsignalingstatechange = function (event) {
self.trace('onsignalingstatechange', self.signalingState);
if (self.onsignalingstatechange !== null) {
self.onsignalingstatechange(event);
}
};
this.oniceconnectionstatechange = null;
this.peerconnection.oniceconnectionstatechange = function (event) {
self.trace('oniceconnectionstatechange', self.iceConnectionState);
if (self.oniceconnectionstatechange !== null) {
self.oniceconnectionstatechange(event);
}
};
this.onnegotiationneeded = null;
this.peerconnection.onnegotiationneeded = function (event) {
self.trace('onnegotiationneeded');
if (self.onnegotiationneeded !== null) {
self.onnegotiationneeded(event);
}
};
self.ondatachannel = null;
this.peerconnection.ondatachannel = function (event) {
self.trace('ondatachannel', event);
if (self.ondatachannel !== null) {
self.ondatachannel(event);
}
};
// XXX: do all non-firefox browsers which we support also support this?
if (!RTCBrowserType.isFirefox() && this.maxstats) {
this.statsinterval = window.setInterval(function() {
self.peerconnection.getStats(function(stats) {
var results = stats.result();
var now = new Date();
for (var i = 0; i < results.length; ++i) {
results[i].names().forEach(function (name) {
var id = results[i].id + '-' + name;
if (!self.stats[id]) {
self.stats[id] = {
startTime: now,
endTime: now,
values: [],
times: []
};
}
self.stats[id].values.push(results[i].stat(name));
self.stats[id].times.push(now.getTime());
if (self.stats[id].values.length > self.maxstats) {
self.stats[id].values.shift();
self.stats[id].times.shift();
}
self.stats[id].endTime = now;
});
}
});
}, 1000);
}
}
/**
* Returns a string representation of a SessionDescription object.
*/
var dumpSDP = function(description) {
if (typeof description === 'undefined' || description === null) {
return '';
}
return 'type: ' + description.type + '\r\n' + description.sdp;
};
/**
* Takes a SessionDescription object and returns a "normalized" version.
* Currently it only takes care of ordering the a=ssrc lines.
*/
var normalizePlanB = function(desc) {
if (typeof desc !== 'object' || desc === null ||
typeof desc.sdp !== 'string') {
console.warn('An empty description was passed as an argument.');
return desc;
}
var transform = require('sdp-transform');
var session = transform.parse(desc.sdp);
if (typeof session !== 'undefined' && typeof session.media !== 'undefined' &&
Array.isArray(session.media)) {
session.media.forEach(function (mLine) {
// Chrome appears to be picky about the order in which a=ssrc lines
// are listed in an m-line when rtx is enabled (and thus there are
// a=ssrc-group lines with FID semantics). Specifically if we have
// "a=ssrc-group:FID S1 S2" and the "a=ssrc:S2" lines appear before
// the "a=ssrc:S1" lines, SRD fails.
// So, put SSRC which appear as the first SSRC in an FID ssrc-group
// first.
var firstSsrcs = [];
var newSsrcLines = [];
if (typeof mLine.ssrcGroups !== 'undefined' && Array.isArray(mLine.ssrcGroups)) {
mLine.ssrcGroups.forEach(function (group) {
if (typeof group.semantics !== 'undefined' &&
group.semantics === 'FID') {
if (typeof group.ssrcs !== 'undefined') {
firstSsrcs.push(Number(group.ssrcs.split(' ')[0]));
}
}
});
}
if (typeof mLine.ssrcs !== 'undefined' && Array.isArray(mLine.ssrcs)) {
var i;
for (i = 0; i<mLine.ssrcs.length; i++){
if (typeof mLine.ssrcs[i] === 'object'
&& typeof mLine.ssrcs[i].id !== 'undefined'
&& !$.inArray(mLine.ssrcs[i].id, firstSsrcs)) {
newSsrcLines.push(mLine.ssrcs[i]);
delete mLine.ssrcs[i];
}
}
for (i = 0; i<mLine.ssrcs.length; i++){
if (typeof mLine.ssrcs[i] !== 'undefined') {
newSsrcLines.push(mLine.ssrcs[i]);
}
}
mLine.ssrcs = newSsrcLines;
}
});
}
var resStr = transform.write(session);
return new RTCSessionDescription({
type: desc.type,
sdp: resStr
});
};
if (TraceablePeerConnection.prototype.__defineGetter__ !== undefined) {
TraceablePeerConnection.prototype.__defineGetter__(
'signalingState',
function() { return this.peerconnection.signalingState; });
TraceablePeerConnection.prototype.__defineGetter__(
'iceConnectionState',
function() { return this.peerconnection.iceConnectionState; });
TraceablePeerConnection.prototype.__defineGetter__(
'localDescription',
function() {
var desc = this.peerconnection.localDescription;
// FIXME this should probably be after the Unified Plan -> Plan B
// transformation.
desc = SSRCReplacement.mungeLocalVideoSSRC(desc);
this.trace('getLocalDescription::preTransform', dumpSDP(desc));
// if we're running on FF, transform to Plan B first.
if (RTCBrowserType.usesUnifiedPlan()) {
desc = this.interop.toPlanB(desc);
this.trace('getLocalDescription::postTransform (Plan B)', dumpSDP(desc));
}
return desc;
});
TraceablePeerConnection.prototype.__defineGetter__(
'remoteDescription',
function() {
var desc = this.peerconnection.remoteDescription;
this.trace('getRemoteDescription::preTransform', dumpSDP(desc));
// if we're running on FF, transform to Plan B first.
if (RTCBrowserType.usesUnifiedPlan()) {
desc = this.interop.toPlanB(desc);
this.trace('getRemoteDescription::postTransform (Plan B)', dumpSDP(desc));
}
return desc;
});
}
TraceablePeerConnection.prototype.addStream = function (stream) {
this.trace('addStream', stream.id);
try
{
this.peerconnection.addStream(stream);
}
catch (e)
{
console.error(e);
}
};
TraceablePeerConnection.prototype.removeStream = function (stream, stopStreams) {
this.trace('removeStream', stream.id);
if(stopStreams) {
RTC.stopMediaStream(stream);
}
try {
// FF doesn't support this yet.
if (this.peerconnection.removeStream)
this.peerconnection.removeStream(stream);
} catch (e) {
console.error(e);
}
};
TraceablePeerConnection.prototype.createDataChannel = function (label, opts) {
this.trace('createDataChannel', label, opts);
return this.peerconnection.createDataChannel(label, opts);
};
TraceablePeerConnection.prototype.setLocalDescription
= function (description, successCallback, failureCallback) {
this.trace('setLocalDescription::preTransform', dumpSDP(description));
// if we're running on FF, transform to Plan A first.
if (RTCBrowserType.usesUnifiedPlan()) {
description = this.interop.toUnifiedPlan(description);
this.trace('setLocalDescription::postTransform (Plan A)', dumpSDP(description));
}
var self = this;
this.peerconnection.setLocalDescription(description,
function () {
self.trace('setLocalDescriptionOnSuccess');
successCallback();
},
function (err) {
self.trace('setLocalDescriptionOnFailure', err);
self.eventEmitter.emit(RTCEvents.SET_LOCAL_DESCRIPTION_FAILED, err, self.peerconnection);
failureCallback(err);
}
);
/*
if (this.statsinterval === null && this.maxstats > 0) {
// start gathering stats
}
*/
};
TraceablePeerConnection.prototype.setRemoteDescription
= function (description, successCallback, failureCallback) {
this.trace('setRemoteDescription::preTransform', dumpSDP(description));
// TODO the focus should squeze or explode the remote simulcast
description = this.simulcast.mungeRemoteDescription(description);
this.trace('setRemoteDescription::postTransform (simulcast)', dumpSDP(description));
// if we're running on FF, transform to Plan A first.
if (RTCBrowserType.usesUnifiedPlan()) {
description = this.interop.toUnifiedPlan(description);
this.trace('setRemoteDescription::postTransform (Plan A)', dumpSDP(description));
}
if (RTCBrowserType.usesPlanB()) {
description = normalizePlanB(description);
}
var self = this;
this.peerconnection.setRemoteDescription(description,
function () {
self.trace('setRemoteDescriptionOnSuccess');
successCallback();
},
function (err) {
self.trace('setRemoteDescriptionOnFailure', err);
self.eventEmitter.emit(RTCEvents.SET_REMOTE_DESCRIPTION_FAILED, err, self.peerconnection);
failureCallback(err);
}
);
/*
if (this.statsinterval === null && this.maxstats > 0) {
// start gathering stats
}
*/
};
TraceablePeerConnection.prototype.close = function () {
this.trace('stop');
if (this.statsinterval !== null) {
window.clearInterval(this.statsinterval);
this.statsinterval = null;
}
this.peerconnection.close();
};
TraceablePeerConnection.prototype.createOffer
= function (successCallback, failureCallback, constraints) {
var self = this;
this.trace('createOffer', JSON.stringify(constraints, null, ' '));
this.peerconnection.createOffer(
function (offer) {
self.trace('createOfferOnSuccess::preTransform', dumpSDP(offer));
// NOTE this is not tested because in meet the focus generates the
// offer.
// if we're running on FF, transform to Plan B first.
if (RTCBrowserType.usesUnifiedPlan()) {
offer = self.interop.toPlanB(offer);
self.trace('createOfferOnSuccess::postTransform (Plan B)', dumpSDP(offer));
}
offer = SSRCReplacement.mungeLocalVideoSSRC(offer);
if (config.enableSimulcast && self.simulcast.isSupported()) {
offer = self.simulcast.mungeLocalDescription(offer);
self.trace('createOfferOnSuccess::postTransform (simulcast)', dumpSDP(offer));
}
successCallback(offer);
},
function(err) {
self.trace('createOfferOnFailure', err);
self.eventEmitter.emit(RTCEvents.CREATE_OFFER_FAILED, err, self.peerconnection);
failureCallback(err);
},
constraints
);
};
TraceablePeerConnection.prototype.createAnswer
= function (successCallback, failureCallback, constraints) {
var self = this;
this.trace('createAnswer', JSON.stringify(constraints, null, ' '));
this.peerconnection.createAnswer(
function (answer) {
self.trace('createAnswerOnSuccess::preTransform', dumpSDP(answer));
// if we're running on FF, transform to Plan A first.
if (RTCBrowserType.usesUnifiedPlan()) {
answer = self.interop.toPlanB(answer);
self.trace('createAnswerOnSuccess::postTransform (Plan B)', dumpSDP(answer));
}
// munge local video SSRC
answer = SSRCReplacement.mungeLocalVideoSSRC(answer);
if (config.enableSimulcast && self.simulcast.isSupported()) {
answer = self.simulcast.mungeLocalDescription(answer);
self.trace('createAnswerOnSuccess::postTransform (simulcast)', dumpSDP(answer));
}
successCallback(answer);
},
function(err) {
self.trace('createAnswerOnFailure', err);
self.eventEmitter.emit(RTCEvents.CREATE_ANSWER_FAILED, err, self.peerconnection);
failureCallback(err);
},
constraints
);
};
TraceablePeerConnection.prototype.addIceCandidate
= function (candidate, successCallback, failureCallback) {
//var self = this;
this.trace('addIceCandidate', JSON.stringify(candidate, null, ' '));
this.peerconnection.addIceCandidate(candidate);
/* maybe later
this.peerconnection.addIceCandidate(candidate,
function () {
self.trace('addIceCandidateOnSuccess');
successCallback();
},
function (err) {
self.trace('addIceCandidateOnFailure', err);
failureCallback(err);
}
);
*/
};
TraceablePeerConnection.prototype.getStats = function(callback, errback) {
// TODO: Is this the correct way to handle Opera, Temasys?
if (RTCBrowserType.isFirefox()) {
// ignore for now...
if(!errback)
errback = function () {};
this.peerconnection.getStats(null, callback, errback);
} else {
this.peerconnection.getStats(callback);
}
};
module.exports = TraceablePeerConnection;

View File

@ -1,434 +0,0 @@
/* global $, $iq, APP, config, messageHandler,
roomName, sessionTerminated, Strophe, Util */
var XMPPEvents = require("../../service/xmpp/XMPPEvents");
var Settings = require("../settings/Settings");
var AuthenticationEvents
= require("../../service/authentication/AuthenticationEvents");
/**
* Contains logic responsible for enabling/disabling functionality available
* only to moderator users.
*/
var connection = null;
var focusUserJid;
function createExpBackoffTimer(step) {
var count = 1;
return function (reset) {
// Reset call
if (reset) {
count = 1;
return;
}
// Calculate next timeout
var timeout = Math.pow(2, count - 1);
count += 1;
return timeout * step;
};
}
var getNextTimeout = createExpBackoffTimer(1000);
var getNextErrorTimeout = createExpBackoffTimer(1000);
// External authentication stuff
var externalAuthEnabled = false;
// Sip gateway can be enabled by configuring Jigasi host in config.js or
// it will be enabled automatically if focus detects the component through
// service discovery.
var sipGatewayEnabled;
var eventEmitter = null;
var Moderator = {
isModerator: function () {
return connection && connection.emuc.isModerator();
},
isPeerModerator: function (peerJid) {
return connection &&
connection.emuc.getMemberRole(peerJid) === 'moderator';
},
isExternalAuthEnabled: function () {
return externalAuthEnabled;
},
isSipGatewayEnabled: function () {
return sipGatewayEnabled;
},
setConnection: function (con) {
connection = con;
},
init: function (xmpp, emitter) {
this.xmppService = xmpp;
eventEmitter = emitter;
sipGatewayEnabled =
config.hosts && config.hosts.call_control !== undefined;
// Message listener that talks to POPUP window
function listener(event) {
if (event.data && event.data.sessionId) {
if (event.origin !== window.location.origin) {
console.warn("Ignoring sessionId from different origin: " +
event.origin);
return;
}
localStorage.setItem('sessionId', event.data.sessionId);
// After popup is closed we will authenticate
}
}
// Register
if (window.addEventListener) {
window.addEventListener("message", listener, false);
} else {
window.attachEvent("onmessage", listener);
}
},
onMucMemberLeft: function (jid) {
console.info("Someone left is it focus ? " + jid);
var resource = Strophe.getResourceFromJid(jid);
if (resource === 'focus' && !this.xmppService.sessionTerminated) {
console.info(
"Focus has left the room - leaving conference");
//hangUp();
// We'd rather reload to have everything re-initialized
// FIXME: show some message before reload
location.reload();
}
},
setFocusUserJid: function (focusJid) {
if (!focusUserJid) {
focusUserJid = focusJid;
console.info("Focus jid set to: " + focusUserJid);
}
},
getFocusUserJid: function () {
return focusUserJid;
},
getFocusComponent: function () {
// Get focus component address
var focusComponent = config.hosts.focus;
// If not specified use default: 'focus.domain'
if (!focusComponent) {
focusComponent = 'focus.' + config.hosts.domain;
}
return focusComponent;
},
createConferenceIq: function (roomName) {
// Generate create conference IQ
var elem = $iq({to: Moderator.getFocusComponent(), type: 'set'});
// Session Id used for authentication
var sessionId = localStorage.getItem('sessionId');
var machineUID = Settings.getSettings().uid;
console.info(
"Session ID: " + sessionId + " machine UID: " + machineUID);
elem.c('conference', {
xmlns: 'http://jitsi.org/protocol/focus',
room: roomName,
'machine-uid': machineUID
});
if (sessionId) {
elem.attrs({ 'session-id': sessionId});
}
if (config.hosts.bridge !== undefined) {
elem.c(
'property',
{ name: 'bridge', value: config.hosts.bridge})
.up();
}
// Tell the focus we have Jigasi configured
if (config.hosts.call_control !== undefined) {
elem.c(
'property',
{ name: 'call_control', value: config.hosts.call_control})
.up();
}
if (config.channelLastN !== undefined) {
elem.c(
'property',
{ name: 'channelLastN', value: config.channelLastN})
.up();
}
if (config.adaptiveLastN !== undefined) {
elem.c(
'property',
{ name: 'adaptiveLastN', value: config.adaptiveLastN})
.up();
}
if (config.adaptiveSimulcast !== undefined) {
elem.c(
'property',
{ name: 'adaptiveSimulcast', value: config.adaptiveSimulcast})
.up();
}
if (config.openSctp !== undefined) {
elem.c(
'property',
{ name: 'openSctp', value: config.openSctp})
.up();
}
if(config.startAudioMuted !== undefined)
{
elem.c(
'property',
{ name: 'startAudioMuted', value: config.startAudioMuted})
.up();
}
if(config.startVideoMuted !== undefined)
{
elem.c(
'property',
{ name: 'startVideoMuted', value: config.startVideoMuted})
.up();
}
elem.c(
'property',
{ name: 'simulcastMode', value: 'rewriting'})
.up();
elem.up();
return elem;
},
parseSessionId: function (resultIq) {
var sessionId = $(resultIq).find('conference').attr('session-id');
if (sessionId) {
console.info('Received sessionId: ' + sessionId);
localStorage.setItem('sessionId', sessionId);
}
},
parseConfigOptions: function (resultIq) {
Moderator.setFocusUserJid(
$(resultIq).find('conference').attr('focusjid'));
var authenticationEnabled
= $(resultIq).find(
'>conference>property' +
'[name=\'authentication\'][value=\'true\']').length > 0;
console.info("Authentication enabled: " + authenticationEnabled);
externalAuthEnabled = $(resultIq).find(
'>conference>property' +
'[name=\'externalAuth\'][value=\'true\']').length > 0;
console.info('External authentication enabled: ' + externalAuthEnabled);
if (!externalAuthEnabled) {
// We expect to receive sessionId in 'internal' authentication mode
Moderator.parseSessionId(resultIq);
}
var authIdentity = $(resultIq).find('>conference').attr('identity');
eventEmitter.emit(AuthenticationEvents.IDENTITY_UPDATED,
authenticationEnabled, authIdentity);
// Check if focus has auto-detected Jigasi component(this will be also
// included if we have passed our host from the config)
if ($(resultIq).find(
'>conference>property' +
'[name=\'sipGatewayEnabled\'][value=\'true\']').length) {
sipGatewayEnabled = true;
}
console.info("Sip gateway enabled: " + sipGatewayEnabled);
},
// FIXME: we need to show the fact that we're waiting for the focus
// to the user(or that focus is not available)
allocateConferenceFocus: function (roomName, callback) {
// Try to use focus user JID from the config
Moderator.setFocusUserJid(config.focusUserJid);
// Send create conference IQ
var iq = Moderator.createConferenceIq(roomName);
var self = this;
connection.sendIQ(
iq,
function (result) {
// Setup config options
Moderator.parseConfigOptions(result);
if ('true' === $(result).find('conference').attr('ready')) {
// Reset both timers
getNextTimeout(true);
getNextErrorTimeout(true);
// Exec callback
callback();
} else {
var waitMs = getNextTimeout();
console.info("Waiting for the focus... " + waitMs);
// Reset error timeout
getNextErrorTimeout(true);
window.setTimeout(
function () {
Moderator.allocateConferenceFocus(
roomName, callback);
}, waitMs);
}
},
function (error) {
// Invalid session ? remove and try again
// without session ID to get a new one
var invalidSession
= $(error).find('>error>session-invalid').length;
if (invalidSession) {
console.info("Session expired! - removing");
localStorage.removeItem("sessionId");
}
if ($(error).find('>error>graceful-shutdown').length) {
eventEmitter.emit(XMPPEvents.GRACEFUL_SHUTDOWN);
return;
}
// Check for error returned by the reservation system
var reservationErr = $(error).find('>error>reservation-error');
if (reservationErr.length) {
// Trigger error event
var errorCode = reservationErr.attr('error-code');
var errorMsg;
if ($(error).find('>error>text')) {
errorMsg = $(error).find('>error>text').text();
}
eventEmitter.emit(
XMPPEvents.RESERVATION_ERROR, errorCode, errorMsg);
return;
}
// Not authorized to create new room
if ($(error).find('>error>not-authorized').length) {
console.warn("Unauthorized to start the conference", error);
var toDomain
= Strophe.getDomainFromJid(error.getAttribute('to'));
if (toDomain !== config.hosts.anonymousdomain) {
// FIXME: "is external" should come either from
// the focus or config.js
externalAuthEnabled = true;
}
eventEmitter.emit(
XMPPEvents.AUTHENTICATION_REQUIRED,
function () {
Moderator.allocateConferenceFocus(
roomName, callback);
});
return;
}
var waitMs = getNextErrorTimeout();
console.error("Focus error, retry after " + waitMs, error);
// Show message
var focusComponent = Moderator.getFocusComponent();
var retrySec = waitMs / 1000;
// FIXME: message is duplicated ?
// Do not show in case of session invalid
// which means just a retry
if (!invalidSession) {
eventEmitter.emit(XMPPEvents.FOCUS_DISCONNECTED,
focusComponent, retrySec);
}
// Reset response timeout
getNextTimeout(true);
window.setTimeout(
function () {
Moderator.allocateConferenceFocus(roomName, callback);
}, waitMs);
}
);
},
getLoginUrl: function (roomName, urlCallback) {
var iq = $iq({to: Moderator.getFocusComponent(), type: 'get'});
iq.c('login-url', {
xmlns: 'http://jitsi.org/protocol/focus',
room: roomName,
'machine-uid': Settings.getSettings().uid
});
connection.sendIQ(
iq,
function (result) {
var url = $(result).find('login-url').attr('url');
url = url = decodeURIComponent(url);
if (url) {
console.info("Got auth url: " + url);
urlCallback(url);
} else {
console.error(
"Failed to get auth url from the focus", result);
}
},
function (error) {
console.error("Get auth url error", error);
}
);
},
getPopupLoginUrl: function (roomName, urlCallback) {
var iq = $iq({to: Moderator.getFocusComponent(), type: 'get'});
iq.c('login-url', {
xmlns: 'http://jitsi.org/protocol/focus',
room: roomName,
'machine-uid': Settings.getSettings().uid,
popup: true
});
connection.sendIQ(
iq,
function (result) {
var url = $(result).find('login-url').attr('url');
url = url = decodeURIComponent(url);
if (url) {
console.info("Got POPUP auth url: " + url);
urlCallback(url);
} else {
console.error(
"Failed to get POPUP auth url from the focus", result);
}
},
function (error) {
console.error('Get POPUP auth url error', error);
}
);
},
logout: function (callback) {
var iq = $iq({to: Moderator.getFocusComponent(), type: 'set'});
var sessionId = localStorage.getItem('sessionId');
if (!sessionId) {
callback();
return;
}
iq.c('logout', {
xmlns: 'http://jitsi.org/protocol/focus',
'session-id': sessionId
});
connection.sendIQ(
iq,
function (result) {
var logoutUrl = $(result).find('logout').attr('logout-url');
if (logoutUrl) {
logoutUrl = decodeURIComponent(logoutUrl);
}
console.info("Log out OK, url: " + logoutUrl, result);
localStorage.removeItem('sessionId');
callback(logoutUrl);
},
function (error) {
console.error("Logout error", error);
}
);
}
};
module.exports = Moderator;

View File

@ -1,178 +0,0 @@
/* global $, $iq, config, connection, focusMucJid, messageHandler,
Toolbar, Util */
var Moderator = require("./moderator");
var recordingToken = null;
var recordingEnabled;
/**
* Whether to use a jirecon component for recording, or use the videobridge
* through COLIBRI.
*/
var useJirecon;
/**
* The ID of the jirecon recording session. Jirecon generates it when we
* initially start recording, and it needs to be used in subsequent requests
* to jirecon.
*/
var jireconRid = null;
/**
* The callback to update the recording button. Currently used from colibri
* after receiving a pending status.
*/
var recordingStateChangeCallback = null;
function setRecordingToken(token) {
recordingToken = token;
}
function setRecordingJirecon(state, token, callback, connection) {
if (state == recordingEnabled){
return;
}
var iq = $iq({to: config.hosts.jirecon, type: 'set'})
.c('recording', {xmlns: 'http://jitsi.org/protocol/jirecon',
action: (state === 'on') ? 'start' : 'stop',
mucjid: connection.emuc.roomjid});
if (state === 'off'){
iq.attrs({rid: jireconRid});
}
console.log('Start recording');
connection.sendIQ(
iq,
function (result) {
// TODO wait for an IQ with the real status, since this is
// provisional?
jireconRid = $(result).find('recording').attr('rid');
console.log('Recording ' +
((state === 'on') ? 'started' : 'stopped') +
'(jirecon)' + result);
recordingEnabled = state;
if (state === 'off'){
jireconRid = null;
}
callback(state);
},
function (error) {
console.log('Failed to start recording, error: ', error);
callback(recordingEnabled);
});
}
// Sends a COLIBRI message which enables or disables (according to 'state')
// the recording on the bridge. Waits for the result IQ and calls 'callback'
// with the new recording state, according to the IQ.
function setRecordingColibri(state, token, callback, connection) {
var elem = $iq({to: connection.emuc.focusMucJid, type: 'set'});
elem.c('conference', {
xmlns: 'http://jitsi.org/protocol/colibri'
});
elem.c('recording', {state: state, token: token});
connection.sendIQ(elem,
function (result) {
console.log('Set recording "', state, '". Result:', result);
var recordingElem = $(result).find('>conference>recording');
var newState = recordingElem.attr('state');
recordingEnabled = newState;
callback(newState);
if (newState === 'pending' && !recordingStateChangeCallback) {
recordingStateChangeCallback = callback;
connection.addHandler(function(iq){
var state = $(iq).find('recording').attr('state');
if (state)
recordingStateChangeCallback(state);
}, 'http://jitsi.org/protocol/colibri', 'iq', null, null, null);
}
},
function (error) {
console.warn(error);
callback(recordingEnabled);
}
);
}
function setRecording(state, token, callback, connection) {
if (useJirecon){
setRecordingJirecon(state, token, callback, connection);
} else {
setRecordingColibri(state, token, callback, connection);
}
}
var Recording = {
init: function () {
useJirecon = config.hosts &&
(typeof config.hosts.jirecon != "undefined");
},
toggleRecording: function (tokenEmptyCallback,
recordingStateChangeCallback,
connection) {
if (!Moderator.isModerator()) {
console.log(
'non-focus, or conference not yet organized:' +
' not enabling recording');
return;
}
var self = this;
// Jirecon does not (currently) support a token.
if (!recordingToken && !useJirecon) {
tokenEmptyCallback(function (value) {
setRecordingToken(value);
self.toggleRecording(tokenEmptyCallback,
recordingStateChangeCallback,
connection);
});
return;
}
var oldState = recordingEnabled;
var newState = (oldState === 'off' || !oldState) ? 'on' : 'off';
setRecording(newState,
recordingToken,
function (state) {
console.log("New recording state: ", state);
if (state === oldState) {
// FIXME: new focus:
// this will not work when moderator changes
// during active session. Then it will assume that
// recording status has changed to true, but it might have
// been already true(and we only received actual status from
// the focus).
//
// SO we start with status null, so that it is initialized
// here and will fail only after second click, so if invalid
// token was used we have to press the button twice before
// current status will be fetched and token will be reset.
//
// Reliable way would be to return authentication error.
// Or status update when moderator connects.
// Or we have to stop recording session when current
// moderator leaves the room.
// Failed to change, reset the token because it might
// have been wrong
setRecordingToken(null);
}
recordingStateChangeCallback(state);
},
connection
);
}
};
module.exports = Recording;

View File

@ -1,706 +0,0 @@
/* jshint -W117 */
/* a simple MUC connection plugin
* can only handle a single MUC room
*/
var XMPPEvents = require("../../service/xmpp/XMPPEvents");
var Moderator = require("./moderator");
module.exports = function(XMPP, eventEmitter) {
Strophe.addConnectionPlugin('emuc', {
connection: null,
roomjid: null,
myroomjid: null,
members: {},
list_members: [], // so we can elect a new focus
presMap: {},
preziMap: {},
lastPresenceMap: {},
joined: false,
isOwner: false,
role: null,
focusMucJid: null,
bridgeIsDown: false,
init: function (conn) {
this.connection = conn;
},
initPresenceMap: function (myroomjid) {
this.presMap['to'] = myroomjid;
this.presMap['xns'] = 'http://jabber.org/protocol/muc';
if (APP.RTC.localAudio && APP.RTC.localAudio.isMuted()) {
this.addAudioInfoToPresence(true);
}
if (APP.RTC.localVideo && APP.RTC.localVideo.isMuted()) {
this.addVideoInfoToPresence(true);
}
},
doJoin: function (jid, password) {
this.myroomjid = jid;
console.info("Joined MUC as " + this.myroomjid);
this.initPresenceMap(this.myroomjid);
if (!this.roomjid) {
this.roomjid = Strophe.getBareJidFromJid(jid);
// add handlers (just once)
this.connection.addHandler(this.onPresence.bind(this), null, 'presence', null, null, this.roomjid, {matchBare: true});
this.connection.addHandler(this.onPresenceUnavailable.bind(this), null, 'presence', 'unavailable', null, this.roomjid, {matchBare: true});
this.connection.addHandler(this.onPresenceError.bind(this), null, 'presence', 'error', null, this.roomjid, {matchBare: true});
this.connection.addHandler(this.onMessage.bind(this), null, 'message', null, null, this.roomjid, {matchBare: true});
}
if (password !== undefined) {
this.presMap['password'] = password;
}
this.sendPresence();
},
doLeave: function () {
console.log("do leave", this.myroomjid);
var pres = $pres({to: this.myroomjid, type: 'unavailable' });
this.presMap.length = 0;
this.connection.send(pres);
},
createNonAnonymousRoom: function () {
// http://xmpp.org/extensions/xep-0045.html#createroom-reserved
var getForm = $iq({type: 'get', to: this.roomjid})
.c('query', {xmlns: 'http://jabber.org/protocol/muc#owner'})
.c('x', {xmlns: 'jabber:x:data', type: 'submit'});
var self = this;
this.connection.sendIQ(getForm, function (form) {
if (!$(form).find(
'>query>x[xmlns="jabber:x:data"]' +
'>field[var="muc#roomconfig_whois"]').length) {
console.error('non-anonymous rooms not supported');
return;
}
var formSubmit = $iq({to: this.roomjid, type: 'set'})
.c('query', {xmlns: 'http://jabber.org/protocol/muc#owner'});
formSubmit.c('x', {xmlns: 'jabber:x:data', type: 'submit'});
formSubmit.c('field', {'var': 'FORM_TYPE'})
.c('value')
.t('http://jabber.org/protocol/muc#roomconfig').up().up();
formSubmit.c('field', {'var': 'muc#roomconfig_whois'})
.c('value').t('anyone').up().up();
self.connection.sendIQ(formSubmit);
}, function (error) {
console.error("Error getting room configuration form");
});
},
onPresence: function (pres) {
var from = pres.getAttribute('from');
// What is this for? A workaround for something?
if (pres.getAttribute('type')) {
return true;
}
// Parse etherpad tag.
var etherpad = $(pres).find('>etherpad');
if (etherpad.length) {
if (config.etherpad_base) {
eventEmitter.emit(XMPPEvents.ETHERPAD, etherpad.text());
}
}
var url;
// Parse prezi tag.
var presentation = $(pres).find('>prezi');
if (presentation.length) {
url = presentation.attr('url');
var current = presentation.find('>current').text();
console.log('presentation info received from', from, url);
if (this.preziMap[from] == null) {
this.preziMap[from] = url;
$(document).trigger('presentationadded.muc', [from, url, current]);
}
else {
$(document).trigger('gotoslide.muc', [from, url, current]);
}
}
else if (this.preziMap[from] != null) {
url = this.preziMap[from];
delete this.preziMap[from];
$(document).trigger('presentationremoved.muc', [from, url]);
}
// store the last presence for participant
this.lastPresenceMap[from] = {};
// Parse audio info tag.
var audioMuted = $(pres).find('>audiomuted');
if (audioMuted.length) {
eventEmitter.emit(XMPPEvents.PARTICIPANT_AUDIO_MUTED,
from, (audioMuted.text() === "true"));
}
// Parse video info tag.
var videoMuted = $(pres).find('>videomuted');
if (videoMuted.length) {
var value = (videoMuted.text() === "true");
this.lastPresenceMap[from].videoMuted = value;
eventEmitter.emit(XMPPEvents.PARTICIPANT_VIDEO_MUTED, from, value);
}
var startMuted = $(pres).find('>startmuted');
if (startMuted.length && Moderator.isPeerModerator(from)) {
eventEmitter.emit(XMPPEvents.START_MUTED_SETTING_CHANGED,
startMuted.attr("audio") === "true",
startMuted.attr("video") === "true");
}
var devices = $(pres).find('>devices');
if(devices.length)
{
var audio = devices.find('>audio');
var video = devices.find('>video');
var devicesValues = {audio: false, video: false};
if(audio.length && audio.text() === "true")
{
devicesValues.audio = true;
}
if(video.length && video.text() === "true")
{
devicesValues.video = true;
}
eventEmitter.emit(XMPPEvents.DEVICE_AVAILABLE,
Strophe.getResourceFromJid(from), devicesValues);
}
var videoType = $(pres).find('>videoType');
if (videoType.length)
{
if (videoType.text().length)
{
eventEmitter.emit(XMPPEvents.PARTICIPANT_VIDEO_TYPE_CHANGED,
Strophe.getResourceFromJid(from), videoType.text());
}
}
var stats = $(pres).find('>stats');
if (stats.length) {
var statsObj = {};
Strophe.forEachChild(stats[0], "stat", function (el) {
statsObj[el.getAttribute("name")] = el.getAttribute("value");
});
eventEmitter.emit(XMPPEvents.REMOTE_STATS, from, statsObj);
}
// Parse status.
if ($(pres).find('>x[xmlns="http://jabber.org/protocol/muc#user"]>status[code="201"]').length) {
this.isOwner = true;
this.createNonAnonymousRoom();
}
// Parse roles.
var member = {};
member.show = $(pres).find('>show').text();
member.status = $(pres).find('>status').text();
var tmp = $(pres).find('>x[xmlns="http://jabber.org/protocol/muc#user"]>item');
member.affiliation = tmp.attr('affiliation');
member.role = tmp.attr('role');
// Focus recognition
member.jid = tmp.attr('jid');
member.isFocus = false;
if (member.jid
&& member.jid.indexOf(Moderator.getFocusUserJid() + "/") == 0) {
member.isFocus = true;
}
var nicktag = $(pres).find('>nick[xmlns="http://jabber.org/protocol/nick"]');
member.displayName = (nicktag.length > 0 ? nicktag.text() : null);
if (from == this.myroomjid) {
if (member.affiliation == 'owner') this.isOwner = true;
if (this.role !== member.role) {
this.role = member.role;
eventEmitter.emit(XMPPEvents.LOCAL_ROLE_CHANGED,
from, member, pres, Moderator.isModerator());
}
if (!this.joined) {
this.joined = true;
console.log("(TIME) MUC joined:\t",
window.performance.now());
eventEmitter.emit(XMPPEvents.MUC_JOINED, from, member);
this.list_members.push(from);
}
} else if (this.members[from] === undefined) {
// new participant
this.members[from] = member;
this.list_members.push(from);
console.log('entered', from, member);
if (member.isFocus) {
this.focusMucJid = from;
console.info("Ignore focus: " + from + ", real JID: " + member.jid);
}
else {
var id = $(pres).find('>userId').text();
var email = $(pres).find('>email');
if (email.length > 0) {
id = email.text();
}
eventEmitter.emit(XMPPEvents.MUC_MEMBER_JOINED, from, id, member.displayName);
}
} else {
// Presence update for existing participant
// Watch role change:
if (this.members[from].role != member.role) {
this.members[from].role = member.role;
eventEmitter.emit(XMPPEvents.MUC_ROLE_CHANGED,
member.role, member.displayName);
}
// store the new
if(member.displayName)
this.members[from].displayName = member.displayName;
}
// Always trigger presence to update bindings
this.parsePresence(from, member, pres);
// Trigger status message update
if (member.status) {
eventEmitter.emit(XMPPEvents.PRESENCE_STATUS, from, member);
}
return true;
},
onPresenceUnavailable: function (pres) {
var from = pres.getAttribute('from');
// room destroyed ?
if ($(pres).find('>x[xmlns="http://jabber.org/protocol/muc#user"]' +
'>destroy').length) {
var reason;
var reasonSelect = $(pres).find(
'>x[xmlns="http://jabber.org/protocol/muc#user"]' +
'>destroy>reason');
if (reasonSelect.length) {
reason = reasonSelect.text();
}
XMPP.disposeConference(false);
eventEmitter.emit(XMPPEvents.MUC_DESTROYED, reason);
return true;
}
// Status code 110 indicates that this notification is "self-presence".
if (!$(pres).find('>x[xmlns="http://jabber.org/protocol/muc#user"]>status[code="110"]').length) {
delete this.members[from];
this.list_members.splice(this.list_members.indexOf(from), 1);
this.onParticipantLeft(from);
}
// If the status code is 110 this means we're leaving and we would like
// to remove everyone else from our view, so we trigger the event.
else if (this.list_members.length > 1) {
for (var i = 0; i < this.list_members.length; i++) {
var member = this.list_members[i];
delete this.members[i];
this.list_members.splice(i, 1);
this.onParticipantLeft(member);
}
}
if ($(pres).find('>x[xmlns="http://jabber.org/protocol/muc#user"]>status[code="307"]').length) {
$(document).trigger('kicked.muc', [from]);
if (this.myroomjid === from) {
XMPP.disposeConference(false);
eventEmitter.emit(XMPPEvents.KICKED);
}
}
if (this.lastPresenceMap[from] != null) {
delete this.lastPresenceMap[from];
}
return true;
},
onPresenceError: function (pres) {
var from = pres.getAttribute('from');
if ($(pres).find('>error[type="auth"]>not-authorized[xmlns="urn:ietf:params:xml:ns:xmpp-stanzas"]').length) {
console.log('on password required', from);
var self = this;
eventEmitter.emit(XMPPEvents.PASSWORD_REQUIRED, function (value) {
self.doJoin(from, value);
});
} else if ($(pres).find(
'>error[type="cancel"]>not-allowed[xmlns="urn:ietf:params:xml:ns:xmpp-stanzas"]').length) {
var toDomain = Strophe.getDomainFromJid(pres.getAttribute('to'));
if (toDomain === config.hosts.anonymousdomain) {
// enter the room by replying with 'not-authorized'. This would
// result in reconnection from authorized domain.
// We're either missing Jicofo/Prosody config for anonymous
// domains or something is wrong.
// XMPP.promptLogin();
eventEmitter.emit(XMPPEvents.ROOM_JOIN_ERROR, pres);
} else {
console.warn('onPresError ', pres);
eventEmitter.emit(XMPPEvents.ROOM_CONNECT_ERROR, pres);
}
} else {
console.warn('onPresError ', pres);
eventEmitter.emit(XMPPEvents.ROOM_CONNECT_ERROR, pres);
}
return true;
},
sendMessage: function (body, nickname) {
var msg = $msg({to: this.roomjid, type: 'groupchat'});
msg.c('body', body).up();
if (nickname) {
msg.c('nick', {xmlns: 'http://jabber.org/protocol/nick'}).t(nickname).up().up();
}
this.connection.send(msg);
eventEmitter.emit(XMPPEvents.SENDING_CHAT_MESSAGE, body);
},
setSubject: function (subject) {
var msg = $msg({to: this.roomjid, type: 'groupchat'});
msg.c('subject', subject);
this.connection.send(msg);
console.log("topic changed to " + subject);
},
onMessage: function (msg) {
// FIXME: this is a hack. but jingle on muc makes nickchanges hard
var from = msg.getAttribute('from');
var nick =
$(msg).find('>nick[xmlns="http://jabber.org/protocol/nick"]')
.text() ||
Strophe.getResourceFromJid(from);
var txt = $(msg).find('>body').text();
var type = msg.getAttribute("type");
if (type == "error") {
eventEmitter.emit(XMPPEvents.CHAT_ERROR_RECEIVED,
$(msg).find('>text').text(), txt);
return true;
}
var subject = $(msg).find('>subject');
if (subject.length) {
var subjectText = subject.text();
if (subjectText || subjectText == "") {
eventEmitter.emit(XMPPEvents.SUBJECT_CHANGED, subjectText);
console.log("Subject is changed to " + subjectText);
}
}
// xep-0203 delay
var stamp = $(msg).find('>delay').attr('stamp');
if (!stamp) {
// or xep-0091 delay, UTC timestamp
stamp = $(msg).find('>[xmlns="jabber:x:delay"]').attr('stamp');
if (stamp) {
// the format is CCYYMMDDThh:mm:ss
var dateParts = stamp.match(/(\d{4})(\d{2})(\d{2}T\d{2}:\d{2}:\d{2})/);
stamp = dateParts[1] + "-" + dateParts[2] + "-" + dateParts[3] + "Z";
}
}
if (txt) {
console.log('chat', nick, txt);
eventEmitter.emit(XMPPEvents.MESSAGE_RECEIVED,
from, nick, txt, this.myroomjid, stamp);
}
return true;
},
lockRoom: function (key, onSuccess, onError, onNotSupported) {
//http://xmpp.org/extensions/xep-0045.html#roomconfig
var ob = this;
this.connection.sendIQ($iq({to: this.roomjid, type: 'get'}).c('query', {xmlns: 'http://jabber.org/protocol/muc#owner'}),
function (res) {
if ($(res).find('>query>x[xmlns="jabber:x:data"]>field[var="muc#roomconfig_roomsecret"]').length) {
var formsubmit = $iq({to: ob.roomjid, type: 'set'}).c('query', {xmlns: 'http://jabber.org/protocol/muc#owner'});
formsubmit.c('x', {xmlns: 'jabber:x:data', type: 'submit'});
formsubmit.c('field', {'var': 'FORM_TYPE'}).c('value').t('http://jabber.org/protocol/muc#roomconfig').up().up();
formsubmit.c('field', {'var': 'muc#roomconfig_roomsecret'}).c('value').t(key).up().up();
// Fixes a bug in prosody 0.9.+ https://code.google.com/p/lxmppd/issues/detail?id=373
formsubmit.c('field', {'var': 'muc#roomconfig_whois'}).c('value').t('anyone').up().up();
// FIXME: is muc#roomconfig_passwordprotectedroom required?
ob.connection.sendIQ(formsubmit,
onSuccess,
onError);
} else {
onNotSupported();
}
}, onError);
},
kick: function (jid) {
var kickIQ = $iq({to: this.roomjid, type: 'set'})
.c('query', {xmlns: 'http://jabber.org/protocol/muc#admin'})
.c('item', {nick: Strophe.getResourceFromJid(jid), role: 'none'})
.c('reason').t('You have been kicked.').up().up().up();
this.connection.sendIQ(
kickIQ,
function (result) {
console.log('Kick participant with jid: ', jid, result);
},
function (error) {
console.log('Kick participant error: ', error);
});
},
sendPresence: function () {
if (!this.presMap['to']) {
// Too early to send presence - not initialized
return;
}
var pres = $pres({to: this.presMap['to'] });
pres.c('x', {xmlns: this.presMap['xns']});
if (this.presMap['password']) {
pres.c('password').t(this.presMap['password']).up();
}
pres.up();
// Send XEP-0115 'c' stanza that contains our capabilities info
if (this.connection.caps) {
this.connection.caps.node = config.clientNode;
pres.c('c', this.connection.caps.generateCapsAttrs()).up();
}
pres.c('user-agent', {xmlns: 'http://jitsi.org/jitmeet/user-agent'})
.t(navigator.userAgent).up();
if (this.presMap['bridgeIsDown']) {
pres.c('bridgeIsDown').up();
}
if (this.presMap['email']) {
pres.c('email').t(this.presMap['email']).up();
}
if (this.presMap['userId']) {
pres.c('userId').t(this.presMap['userId']).up();
}
if (this.presMap['displayName']) {
// XEP-0172
pres.c('nick', {xmlns: 'http://jabber.org/protocol/nick'})
.t(this.presMap['displayName']).up();
}
if(this.presMap["devices"])
{
pres.c('devices').c('audio').t(this.presMap['devices'].audio).up()
.c('video').t(this.presMap['devices'].video).up().up();
}
if (this.presMap['audions']) {
pres.c('audiomuted', {xmlns: this.presMap['audions']})
.t(this.presMap['audiomuted']).up();
}
if (this.presMap['videons']) {
pres.c('videomuted', {xmlns: this.presMap['videons']})
.t(this.presMap['videomuted']).up();
}
if (this.presMap['videoTypeNs']) {
pres.c('videoType', { xmlns: this.presMap['videoTypeNs'] })
.t(this.presMap['videoType']).up();
}
if (this.presMap['statsns']) {
var stats = pres.c('stats', {xmlns: this.presMap['statsns']});
for (var stat in this.presMap["stats"])
if (this.presMap["stats"][stat] != null)
stats.c("stat", {name: stat, value: this.presMap["stats"][stat]}).up();
pres.up();
}
if (this.presMap['prezins']) {
pres.c('prezi',
{xmlns: this.presMap['prezins'],
'url': this.presMap['preziurl']})
.c('current').t(this.presMap['prezicurrent']).up().up();
}
// This is only for backward compatibility with clients which
// don't support getting sources from Jingle (i.e. jirecon).
if (this.presMap['medians']) {
pres.c('media', {xmlns: this.presMap['medians']});
var sourceNumber = 0;
Object.keys(this.presMap).forEach(function (key) {
if (key.indexOf('source') >= 0) {
sourceNumber++;
}
});
if (sourceNumber > 0) {
for (var i = 1; i <= sourceNumber / 3; i++) {
pres.c('source',
{
type: this.presMap['source' + i + '_type'],
ssrc: this.presMap['source' + i + '_ssrc'],
direction: this.presMap['source' + i + '_direction']
|| 'sendrecv'
}
).up();
}
}
pres.up();
}
if(this.presMap["startMuted"] !== undefined)
{
pres.c("startmuted", {audio: this.presMap["startMuted"].audio,
video: this.presMap["startMuted"].video,
xmlns: "http://jitsi.org/jitmeet/start-muted"});
delete this.presMap["startMuted"];
}
if (config.token) {
pres.c('token', { xmlns: 'http://jitsi.org/jitmeet/auth-token'}).t(config.token).up();
}
pres.up();
this.connection.send(pres);
},
addDisplayNameToPresence: function (displayName) {
this.presMap['displayName'] = displayName;
},
// This is only for backward compatibility with clients which
// don't support getting sources from Jingle (i.e. jirecon).
addMediaToPresence: function (sourceNumber, mtype, ssrcs, direction) {
if (!this.presMap['medians'])
this.presMap['medians'] = 'http://estos.de/ns/mjs';
this.presMap['source' + sourceNumber + '_type'] = mtype;
this.presMap['source' + sourceNumber + '_ssrc'] = ssrcs;
this.presMap['source' + sourceNumber + '_direction'] = direction;
},
// This is only for backward compatibility with clients which
// don't support getting sources from Jingle (i.e. jirecon).
clearPresenceMedia: function () {
var self = this;
Object.keys(this.presMap).forEach(function (key) {
if (key.indexOf('source') != -1) {
delete self.presMap[key];
}
});
},
addDevicesToPresence: function (devices) {
this.presMap['devices'] = devices;
},
/**
* Adds the info about the type of our video stream.
* @param videoType 'camera' or 'screen'
*/
addVideoTypeToPresence: function (videoType) {
this.presMap['videoTypeNs'] = 'http://jitsi.org/jitmeet/video';
this.presMap['videoType'] = videoType;
},
addPreziToPresence: function (url, currentSlide) {
this.presMap['prezins'] = 'http://jitsi.org/jitmeet/prezi';
this.presMap['preziurl'] = url;
this.presMap['prezicurrent'] = currentSlide;
},
removePreziFromPresence: function () {
delete this.presMap['prezins'];
delete this.presMap['preziurl'];
delete this.presMap['prezicurrent'];
},
addCurrentSlideToPresence: function (currentSlide) {
this.presMap['prezicurrent'] = currentSlide;
},
getPrezi: function (roomjid) {
return this.preziMap[roomjid];
},
addAudioInfoToPresence: function (isMuted) {
this.presMap['audions'] = 'http://jitsi.org/jitmeet/audio';
this.presMap['audiomuted'] = isMuted.toString();
},
addVideoInfoToPresence: function (isMuted) {
this.presMap['videons'] = 'http://jitsi.org/jitmeet/video';
this.presMap['videomuted'] = isMuted.toString();
},
addConnectionInfoToPresence: function (stats) {
this.presMap['statsns'] = 'http://jitsi.org/jitmeet/stats';
this.presMap['stats'] = stats;
},
findJidFromResource: function (resourceJid) {
if (resourceJid &&
resourceJid === Strophe.getResourceFromJid(this.myroomjid)) {
return this.myroomjid;
}
var peerJid = null;
Object.keys(this.members).some(function (jid) {
peerJid = jid;
return Strophe.getResourceFromJid(jid) === resourceJid;
});
return peerJid;
},
addBridgeIsDownToPresence: function () {
this.presMap['bridgeIsDown'] = true;
},
addEmailToPresence: function (email) {
this.presMap['email'] = email;
},
addUserIdToPresence: function (userId) {
this.presMap['userId'] = userId;
},
addStartMutedToPresence: function (audio, video) {
this.presMap["startMuted"] = {audio: audio, video: video};
},
isModerator: function () {
return this.role === 'moderator';
},
getMemberRole: function (peerJid) {
if (this.members[peerJid]) {
return this.members[peerJid].role;
}
return null;
},
onParticipantLeft: function (jid) {
eventEmitter.emit(XMPPEvents.MUC_MEMBER_LEFT, jid);
this.connection.jingle.terminateByJid(jid);
if (this.getPrezi(jid)) {
$(document).trigger('presentationremoved.muc',
[jid, this.getPrezi(jid)]);
}
Moderator.onMucMemberLeft(jid);
},
parsePresence: function (from, member, pres) {
if($(pres).find(">bridgeIsDown").length > 0 && !this.bridgeIsDown) {
this.bridgeIsDown = true;
eventEmitter.emit(XMPPEvents.BRIDGE_DOWN);
}
if(member.isFocus)
return;
var displayName = !config.displayJids
? member.displayName : Strophe.getResourceFromJid(from);
if (displayName && displayName.length > 0) {
eventEmitter.emit(XMPPEvents.DISPLAY_NAME_CHANGED, from, displayName);
}
var id = $(pres).find('>userID').text();
var email = $(pres).find('>email');
if (email.length > 0) {
id = email.text();
}
eventEmitter.emit(XMPPEvents.USER_ID_CHANGED, from, id);
}
});
};

View File

@ -1,341 +0,0 @@
/* jshint -W117 */
/* jshint -W101 */
var JingleSession = require("./JingleSessionPC");
var XMPPEvents = require("../../service/xmpp/XMPPEvents");
var RTCBrowserType = require("../RTC/RTCBrowserType");
module.exports = function(XMPP, eventEmitter) {
Strophe.addConnectionPlugin('jingle', {
connection: null,
sessions: {},
jid2session: {},
ice_config: {iceServers: []},
pc_constraints: {},
activecall: null,
media_constraints: {
mandatory: {
'OfferToReceiveAudio': true,
'OfferToReceiveVideo': true
}
// MozDontOfferDataChannel: true when this is firefox
},
init: function (conn) {
this.connection = conn;
if (this.connection.disco) {
// http://xmpp.org/extensions/xep-0167.html#support
// http://xmpp.org/extensions/xep-0176.html#support
this.connection.disco.addFeature('urn:xmpp:jingle:1');
this.connection.disco.addFeature('urn:xmpp:jingle:apps:rtp:1');
this.connection.disco.addFeature('urn:xmpp:jingle:transports:ice-udp:1');
this.connection.disco.addFeature('urn:xmpp:jingle:apps:dtls:0');
this.connection.disco.addFeature('urn:xmpp:jingle:transports:dtls-sctp:1');
this.connection.disco.addFeature('urn:xmpp:jingle:apps:rtp:audio');
this.connection.disco.addFeature('urn:xmpp:jingle:apps:rtp:video');
if (RTCBrowserType.isChrome() || RTCBrowserType.isOpera() ||
RTCBrowserType.isTemasysPluginUsed()) {
this.connection.disco.addFeature('urn:ietf:rfc:4588');
}
// this is dealt with by SDP O/A so we don't need to announce this
//this.connection.disco.addFeature('urn:xmpp:jingle:apps:rtp:rtcp-fb:0'); // XEP-0293
//this.connection.disco.addFeature('urn:xmpp:jingle:apps:rtp:rtp-hdrext:0'); // XEP-0294
this.connection.disco.addFeature('urn:ietf:rfc:5761'); // rtcp-mux
this.connection.disco.addFeature('urn:ietf:rfc:5888'); // a=group, e.g. bundle
//this.connection.disco.addFeature('urn:ietf:rfc:5576'); // a=ssrc
}
this.connection.addHandler(this.onJingle.bind(this), 'urn:xmpp:jingle:1', 'iq', 'set', null, null);
},
onJingle: function (iq) {
var sid = $(iq).find('jingle').attr('sid');
var action = $(iq).find('jingle').attr('action');
var fromJid = iq.getAttribute('from');
// send ack first
var ack = $iq({type: 'result',
to: fromJid,
id: iq.getAttribute('id')
});
console.log('on jingle ' + action + ' from ' + fromJid, iq);
var sess = this.sessions[sid];
if ('session-initiate' != action) {
if (sess === null) {
ack.type = 'error';
ack.c('error', {type: 'cancel'})
.c('item-not-found', {xmlns: 'urn:ietf:params:xml:ns:xmpp-stanzas'}).up()
.c('unknown-session', {xmlns: 'urn:xmpp:jingle:errors:1'});
this.connection.send(ack);
return true;
}
// local jid is not checked
if (fromJid != sess.peerjid) {
console.warn('jid mismatch for session id', sid, fromJid, sess.peerjid);
ack.type = 'error';
ack.c('error', {type: 'cancel'})
.c('item-not-found', {xmlns: 'urn:ietf:params:xml:ns:xmpp-stanzas'}).up()
.c('unknown-session', {xmlns: 'urn:xmpp:jingle:errors:1'});
this.connection.send(ack);
return true;
}
} else if (sess !== undefined) {
// existing session with same session id
// this might be out-of-order if the sess.peerjid is the same as from
ack.type = 'error';
ack.c('error', {type: 'cancel'})
.c('service-unavailable', {xmlns: 'urn:ietf:params:xml:ns:xmpp-stanzas'}).up();
console.warn('duplicate session id', sid);
this.connection.send(ack);
return true;
}
// FIXME: check for a defined action
this.connection.send(ack);
// see http://xmpp.org/extensions/xep-0166.html#concepts-session
switch (action) {
case 'session-initiate':
console.log("(TIME) received session-initiate:\t",
window.performance.now(), iq);
var startMuted = $(iq).find('jingle>startmuted');
if (startMuted && startMuted.length > 0) {
var audioMuted = startMuted.attr("audio");
var videoMuted = startMuted.attr("video");
eventEmitter.emit(XMPPEvents.START_MUTED_FROM_FOCUS,
audioMuted === "true", videoMuted === "true");
}
sess = new JingleSession(
$(iq).attr('to'), $(iq).find('jingle').attr('sid'),
this.connection, XMPP, eventEmitter);
// configure session
sess.media_constraints = this.media_constraints;
sess.pc_constraints = this.pc_constraints;
sess.ice_config = this.ice_config;
sess.initialize(fromJid, false);
// FIXME: setRemoteDescription should only be done when this call is to be accepted
sess.setOffer($(iq).find('>jingle'));
this.sessions[sess.sid] = sess;
this.jid2session[sess.peerjid] = sess;
// the callback should either
// .sendAnswer and .accept
// or .sendTerminate -- not necessarily synchronous
// TODO: do we check activecall == null?
this.connection.jingle.activecall = sess;
eventEmitter.emit(XMPPEvents.CALL_INCOMING, sess);
// TODO: check affiliation and/or role
console.log('emuc data for', sess.peerjid,
this.connection.emuc.members[sess.peerjid]);
sess.sendAnswer();
sess.accept();
break;
case 'session-accept':
sess.setAnswer($(iq).find('>jingle'));
sess.accept();
$(document).trigger('callaccepted.jingle', [sess.sid]);
break;
case 'session-terminate':
if (!sess) {
break;
}
console.log('terminating...', sess.sid);
sess.terminate();
this.terminate(sess.sid);
if ($(iq).find('>jingle>reason').length) {
$(document).trigger('callterminated.jingle', [
sess.sid,
sess.peerjid,
$(iq).find('>jingle>reason>:first')[0].tagName,
$(iq).find('>jingle>reason>text').text()
]);
} else {
$(document).trigger('callterminated.jingle',
[sess.sid, sess.peerjid]);
}
break;
case 'transport-info':
sess.addIceCandidate($(iq).find('>jingle>content'));
break;
case 'session-info':
var affected;
if ($(iq).find('>jingle>ringing[xmlns="urn:xmpp:jingle:apps:rtp:info:1"]').length) {
$(document).trigger('ringing.jingle', [sess.sid]);
} else if ($(iq).find('>jingle>mute[xmlns="urn:xmpp:jingle:apps:rtp:info:1"]').length) {
affected = $(iq).find('>jingle>mute[xmlns="urn:xmpp:jingle:apps:rtp:info:1"]').attr('name');
$(document).trigger('mute.jingle', [sess.sid, affected]);
} else if ($(iq).find('>jingle>unmute[xmlns="urn:xmpp:jingle:apps:rtp:info:1"]').length) {
affected = $(iq).find('>jingle>unmute[xmlns="urn:xmpp:jingle:apps:rtp:info:1"]').attr('name');
$(document).trigger('unmute.jingle', [sess.sid, affected]);
}
break;
case 'addsource': // FIXME: proprietary, un-jingleish
case 'source-add': // FIXME: proprietary
console.info("source-add", iq);
sess.addSource($(iq).find('>jingle>content'));
break;
case 'removesource': // FIXME: proprietary, un-jingleish
case 'source-remove': // FIXME: proprietary
console.info("source-remove", iq);
sess.removeSource($(iq).find('>jingle>content'));
break;
default:
console.warn('jingle action not implemented', action);
break;
}
return true;
},
initiate: function (peerjid, myjid) { // initiate a new jinglesession to peerjid
var sess = new JingleSession(myjid || this.connection.jid,
Math.random().toString(36).substr(2, 12), // random string
this.connection, XMPP, eventEmitter);
// configure session
sess.media_constraints = this.media_constraints;
sess.pc_constraints = this.pc_constraints;
sess.ice_config = this.ice_config;
sess.initialize(peerjid, true);
this.sessions[sess.sid] = sess;
this.jid2session[sess.peerjid] = sess;
sess.sendOffer();
return sess;
},
terminate: function (sid, reason, text) { // terminate by sessionid (or all sessions)
if (sid === null || sid === undefined) {
for (sid in this.sessions) {
if (this.sessions[sid].state != 'ended') {
this.sessions[sid].sendTerminate(reason || (!this.sessions[sid].active()) ? 'cancel' : null, text);
this.sessions[sid].terminate();
}
delete this.jid2session[this.sessions[sid].peerjid];
delete this.sessions[sid];
}
} else if (this.sessions.hasOwnProperty(sid)) {
if (this.sessions[sid].state != 'ended') {
this.sessions[sid].sendTerminate(reason || (!this.sessions[sid].active()) ? 'cancel' : null, text);
this.sessions[sid].terminate();
}
delete this.jid2session[this.sessions[sid].peerjid];
delete this.sessions[sid];
}
},
// Used to terminate a session when an unavailable presence is received.
terminateByJid: function (jid) {
if (this.jid2session.hasOwnProperty(jid)) {
var sess = this.jid2session[jid];
if (sess) {
sess.terminate();
console.log('peer went away silently', jid);
delete this.sessions[sess.sid];
delete this.jid2session[jid];
$(document).trigger('callterminated.jingle',
[sess.sid, jid], 'gone');
}
}
},
terminateRemoteByJid: function (jid, reason) {
if (this.jid2session.hasOwnProperty(jid)) {
var sess = this.jid2session[jid];
if (sess) {
sess.sendTerminate(reason || (!sess.active()) ? 'kick' : null);
sess.terminate();
console.log('terminate peer with jid', sess.sid, jid);
delete this.sessions[sess.sid];
delete this.jid2session[jid];
$(document).trigger('callterminated.jingle',
[sess.sid, jid, 'kicked']);
}
}
},
getStunAndTurnCredentials: function () {
// get stun and turn configuration from server via xep-0215
// uses time-limited credentials as described in
// http://tools.ietf.org/html/draft-uberti-behave-turn-rest-00
//
// see https://code.google.com/p/prosody-modules/source/browse/mod_turncredentials/mod_turncredentials.lua
// for a prosody module which implements this
//
// currently, this doesn't work with updateIce and therefore credentials with a long
// validity have to be fetched before creating the peerconnection
// TODO: implement refresh via updateIce as described in
// https://code.google.com/p/webrtc/issues/detail?id=1650
var self = this;
this.connection.sendIQ(
$iq({type: 'get', to: this.connection.domain})
.c('services', {xmlns: 'urn:xmpp:extdisco:1'}).c('service', {host: 'turn.' + this.connection.domain}),
function (res) {
var iceservers = [];
$(res).find('>services>service').each(function (idx, el) {
el = $(el);
var dict = {};
var type = el.attr('type');
switch (type) {
case 'stun':
dict.url = 'stun:' + el.attr('host');
if (el.attr('port')) {
dict.url += ':' + el.attr('port');
}
iceservers.push(dict);
break;
case 'turn':
case 'turns':
dict.url = type + ':';
if (el.attr('username')) { // https://code.google.com/p/webrtc/issues/detail?id=1508
if (navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./) && parseInt(navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./)[2], 10) < 28) {
dict.url += el.attr('username') + '@';
} else {
dict.username = el.attr('username'); // only works in M28
}
}
dict.url += el.attr('host');
if (el.attr('port') && el.attr('port') != '3478') {
dict.url += ':' + el.attr('port');
}
if (el.attr('transport') && el.attr('transport') != 'udp') {
dict.url += '?transport=' + el.attr('transport');
}
if (el.attr('password')) {
dict.credential = el.attr('password');
}
iceservers.push(dict);
break;
}
});
self.ice_config.iceServers = iceservers;
},
function (err) {
console.warn('getting turn credentials failed', err);
console.warn('is mod_turncredentials or similar installed?');
}
);
// implement push?
},
/**
* Returns the data saved in 'updateLog' in a format to be logged.
*/
getLog: function () {
var data = {};
var self = this;
Object.keys(this.sessions).forEach(function (sid) {
var session = self.sessions[sid];
if (session.peerconnection && session.peerconnection.updateLog) {
// FIXME: should probably be a .dump call
data["jingle_" + session.sid] = {
updateLog: session.peerconnection.updateLog,
stats: session.peerconnection.stats,
url: window.location.href
};
}
});
return data;
}
});
};

View File

@ -1,20 +0,0 @@
/* global Strophe */
module.exports = function () {
Strophe.addConnectionPlugin('logger', {
// logs raw stanzas and makes them available for download as JSON
connection: null,
log: [],
init: function (conn) {
this.connection = conn;
this.connection.rawInput = this.log_incoming.bind(this);
this.connection.rawOutput = this.log_outgoing.bind(this);
},
log_incoming: function (stanza) {
this.log.push([new Date().getTime(), 'incoming', stanza]);
},
log_outgoing: function (stanza) {
this.log.push([new Date().getTime(), 'outgoing', stanza]);
}
});
};

View File

@ -1,60 +0,0 @@
/* global $, $iq, config, connection, focusMucJid, forceMuted, Strophe */
/**
* Moderate connection plugin.
*/
var XMPPEvents = require("../../service/xmpp/XMPPEvents");
module.exports = function (XMPP, eventEmitter) {
Strophe.addConnectionPlugin('moderate', {
connection: null,
init: function (conn) {
this.connection = conn;
this.connection.addHandler(this.onMute.bind(this),
'http://jitsi.org/jitmeet/audio',
'iq',
'set',
null,
null);
},
setMute: function (jid, mute) {
console.info("set mute", mute);
var iqToFocus =
$iq({to: this.connection.emuc.focusMucJid, type: 'set'})
.c('mute', {
xmlns: 'http://jitsi.org/jitmeet/audio',
jid: jid
})
.t(mute.toString())
.up();
this.connection.sendIQ(
iqToFocus,
function (result) {
console.log('set mute', result);
},
function (error) {
console.log('set mute error', error);
});
},
onMute: function (iq) {
var from = iq.getAttribute('from');
if (from !== this.connection.emuc.focusMucJid) {
console.warn("Ignored mute from non focus peer");
return false;
}
var mute = $(iq).find('mute');
if (mute.length) {
var doMuteAudio = mute.text() === "true";
eventEmitter.emit(XMPPEvents.AUDIO_MUTED_BY_FOCUS, doMuteAudio);
XMPP.forceMuted = doMuteAudio;
}
return true;
},
eject: function (jid) {
// We're not the focus, so can't terminate
//connection.jingle.terminateRemoteByJid(jid, 'kick');
this.connection.emuc.kick(jid);
}
});
};

View File

@ -1,121 +0,0 @@
/* global $, $iq, Strophe */
var XMPPEvents = require("../../service/xmpp/XMPPEvents");
/**
* Ping every 20 sec
*/
var PING_INTERVAL = 20000;
/**
* Ping timeout error after 15 sec of waiting.
*/
var PING_TIMEOUT = 15000;
/**
* Will close the connection after 3 consecutive ping errors.
*/
var PING_THRESHOLD = 3;
/**
* XEP-0199 ping plugin.
*
* Registers "urn:xmpp:ping" namespace under Strophe.NS.PING.
*/
module.exports = function (XMPP, eventEmitter) {
Strophe.addConnectionPlugin('ping', {
connection: null,
failedPings: 0,
/**
* Initializes the plugin. Method called by Strophe.
* @param connection Strophe connection instance.
*/
init: function (connection) {
this.connection = connection;
Strophe.addNamespace('PING', "urn:xmpp:ping");
},
/**
* Sends "ping" to given <tt>jid</tt>
* @param jid the JID to which ping request will be sent.
* @param success callback called on success.
* @param error callback called on error.
* @param timeout ms how long are we going to wait for the response. On
* timeout <tt>error<//t> callback is called with undefined error
* argument.
*/
ping: function (jid, success, error, timeout) {
var iq = $iq({type: 'get', to: jid});
iq.c('ping', {xmlns: Strophe.NS.PING});
this.connection.sendIQ(iq, success, error, timeout);
},
/**
* Checks if given <tt>jid</tt> has XEP-0199 ping support.
* @param jid the JID to be checked for ping support.
* @param callback function with boolean argument which will be
* <tt>true</tt> if XEP-0199 ping is supported by given <tt>jid</tt>
*/
hasPingSupport: function (jid, callback) {
this.connection.disco.info(
jid, null,
function (result) {
var ping = $(result).find('>>feature[var="urn:xmpp:ping"]');
callback(ping.length > 0);
},
function (error) {
console.error("Ping feature discovery error", error);
callback(false);
}
);
},
/**
* Starts to send ping in given interval to specified remote JID.
* This plugin supports only one such task and <tt>stopInterval</tt>
* must be called before starting a new one.
* @param remoteJid remote JID to which ping requests will be sent to.
* @param interval task interval in ms.
*/
startInterval: function (remoteJid, interval) {
if (this.intervalId) {
console.error("Ping task scheduled already");
return;
}
if (!interval)
interval = PING_INTERVAL;
var self = this;
this.intervalId = window.setInterval(function () {
self.ping(remoteJid,
function (result) {
// Ping OK
self.failedPings = 0;
},
function (error) {
self.failedPings += 1;
console.error(
"Ping " + (error ? "error" : "timeout"), error);
if (self.failedPings >= PING_THRESHOLD) {
self.connection.disconnect();
}
}, PING_TIMEOUT);
}, interval);
console.info("XMPP pings will be sent every " + interval + " ms");
},
/**
* Stops current "ping" interval task.
*/
stopInterval: function () {
if (this.intervalId) {
window.clearInterval(this.intervalId);
this.intervalId = null;
this.failedPings = 0;
console.info("Ping interval cleared");
}
}
});
};

View File

@ -1,96 +0,0 @@
/* jshint -W117 */
module.exports = function() {
Strophe.addConnectionPlugin('rayo',
{
RAYO_XMLNS: 'urn:xmpp:rayo:1',
connection: null,
init: function (conn) {
this.connection = conn;
if (this.connection.disco) {
this.connection.disco.addFeature('urn:xmpp:rayo:client:1');
}
this.connection.addHandler(
this.onRayo.bind(this), this.RAYO_XMLNS, 'iq', 'set',
null, null);
},
onRayo: function (iq) {
console.info("Rayo IQ", iq);
},
dial: function (to, from, roomName, roomPass) {
var self = this;
var req = $iq(
{
type: 'set',
to: this.connection.emuc.focusMucJid
}
);
req.c('dial',
{
xmlns: this.RAYO_XMLNS,
to: to,
from: from
});
req.c('header',
{
name: 'JvbRoomName',
value: roomName
}).up();
if (roomPass !== null && roomPass.length) {
req.c('header',
{
name: 'JvbRoomPassword',
value: roomPass
}).up();
}
this.connection.sendIQ(
req,
function (result) {
console.info('Dial result ', result);
var resource = $(result).find('ref').attr('uri');
self.call_resource = resource.substr('xmpp:'.length);
console.info(
"Received call resource: " + self.call_resource);
},
function (error) {
console.info('Dial error ', error);
}
);
},
hang_up: function () {
if (!this.call_resource) {
console.warn("No call in progress");
return;
}
var self = this;
var req = $iq(
{
type: 'set',
to: this.call_resource
}
);
req.c('hangup',
{
xmlns: this.RAYO_XMLNS
});
this.connection.sendIQ(
req,
function (result) {
console.info('Hangup result ', result);
self.call_resource = null;
},
function (error) {
console.info('Hangup error ', error);
self.call_resource = null;
}
);
}
}
);
};

View File

@ -1,43 +0,0 @@
/* global Strophe */
/**
* Strophe logger implementation. Logs from level WARN and above.
*/
module.exports = function () {
Strophe.log = function (level, msg) {
switch (level) {
case Strophe.LogLevel.WARN:
console.warn("Strophe: " + msg);
break;
case Strophe.LogLevel.ERROR:
case Strophe.LogLevel.FATAL:
console.error("Strophe: " + msg);
break;
}
};
Strophe.getStatusString = function (status) {
switch (status) {
case Strophe.Status.ERROR:
return "ERROR";
case Strophe.Status.CONNECTING:
return "CONNECTING";
case Strophe.Status.CONNFAIL:
return "CONNFAIL";
case Strophe.Status.AUTHENTICATING:
return "AUTHENTICATING";
case Strophe.Status.AUTHFAIL:
return "AUTHFAIL";
case Strophe.Status.CONNECTED:
return "CONNECTED";
case Strophe.Status.DISCONNECTED:
return "DISCONNECTED";
case Strophe.Status.DISCONNECTING:
return "DISCONNECTING";
case Strophe.Status.ATTACHED:
return "ATTACHED";
default:
return "unknown";
}
};
};

View File

@ -1,616 +0,0 @@
/* global $, APP, config, Strophe, Base64, $msg */
/* jshint -W101 */
var Moderator = require("./moderator");
var EventEmitter = require("events");
var Recording = require("./recording");
var SDP = require("./SDP");
var SDPUtil = require("./SDPUtil");
var Settings = require("../settings/Settings");
var Pako = require("pako");
var StreamEventTypes = require("../../service/RTC/StreamEventTypes");
var RTCEvents = require("../../service/RTC/RTCEvents");
var XMPPEvents = require("../../service/xmpp/XMPPEvents");
var retry = require('retry');
var RandomUtil = require("../util/RandomUtil");
var eventEmitter = new EventEmitter();
var connection = null;
var authenticatedUser = false;
/**
* Utility method that generates user name based on random hex values.
* Eg. 12345678-1234-1234-12345678
* @returns {string}
*/
function generateUserName() {
return RandomUtil.randomHexString(8) + "-" + RandomUtil.randomHexString(4)
+ "-" + RandomUtil.randomHexString(4) + "-"
+ RandomUtil.randomHexString(8);
}
function connect(jid, password) {
var faultTolerantConnect = retry.operation({
retries: 3
});
// fault tolerant connect
faultTolerantConnect.attempt(function () {
connection = XMPP.createConnection();
Moderator.setConnection(connection);
connection.jingle.pc_constraints = APP.RTC.getPCConstraints();
if (config.useIPv6) {
// https://code.google.com/p/webrtc/issues/detail?id=2828
if (!connection.jingle.pc_constraints.optional)
connection.jingle.pc_constraints.optional = [];
connection.jingle.pc_constraints.optional.push({googIPv6: true});
}
// Include user info in MUC presence
var settings = Settings.getSettings();
if (settings.email) {
connection.emuc.addEmailToPresence(settings.email);
}
if (settings.uid) {
connection.emuc.addUserIdToPresence(settings.uid);
}
if (settings.displayName) {
connection.emuc.addDisplayNameToPresence(settings.displayName);
}
// connection.connect() starts the connection process.
//
// As the connection process proceeds, the user supplied callback will
// be triggered multiple times with status updates. The callback should
// take two arguments - the status code and the error condition.
//
// The status code will be one of the values in the Strophe.Status
// constants. The error condition will be one of the conditions defined
// in RFC 3920 or the condition strophe-parsererror.
//
// The Parameters wait, hold and route are optional and only relevant
// for BOSH connections. Please see XEP 124 for a more detailed
// explanation of the optional parameters.
//
// Connection status constants for use by the connection handler
// callback.
//
// Status.ERROR - An error has occurred (websockets specific)
// Status.CONNECTING - The connection is currently being made
// Status.CONNFAIL - The connection attempt failed
// Status.AUTHENTICATING - The connection is authenticating
// Status.AUTHFAIL - The authentication attempt failed
// Status.CONNECTED - The connection has succeeded
// Status.DISCONNECTED - The connection has been terminated
// Status.DISCONNECTING - The connection is currently being terminated
// Status.ATTACHED - The connection has been attached
var anonymousConnectionFailed = false;
var connectionFailed = false;
var lastErrorMsg;
connection.connect(jid, password, function (status, msg) {
console.log("(TIME) Strophe " + Strophe.getStatusString(status) +
(msg ? "[" + msg + "]" : "") +
"\t:" + window.performance.now());
if (status === Strophe.Status.CONNECTED) {
if (config.useStunTurn) {
connection.jingle.getStunAndTurnCredentials();
}
console.info("My Jabber ID: " + connection.jid);
// Schedule ping ?
var pingJid = connection.domain;
connection.ping.hasPingSupport(
pingJid,
function (hasPing) {
if (hasPing)
connection.ping.startInterval(pingJid);
else
console.warn("Ping NOT supported by " + pingJid);
}
);
if (password)
authenticatedUser = true;
maybeDoJoin();
} else if (status === Strophe.Status.CONNFAIL) {
if (msg === 'x-strophe-bad-non-anon-jid') {
anonymousConnectionFailed = true;
} else {
connectionFailed = true;
}
lastErrorMsg = msg;
} else if (status === Strophe.Status.DISCONNECTED) {
// Stop ping interval
connection.ping.stopInterval();
if (anonymousConnectionFailed) {
// prompt user for username and password
XMPP.promptLogin();
} else {
// Strophe already has built-in HTTP/BOSH error handling and
// request retry logic. Requests are resent automatically
// until their error count reaches 5. Strophe.js disconnects
// if the error count is > 5. We are not replicating this
// here.
//
// The "problem" is that failed HTTP/BOSH requests don't
// trigger a callback with a status update, so when a
// callback with status Strophe.Status.DISCONNECTED arrives,
// we can't be sure if it's a graceful disconnect or if it's
// triggered by some HTTP/BOSH error.
//
// But that's a minor issue in Jitsi Meet as we never
// disconnect anyway, not even when the user closes the
// browser window (which is kind of wrong, but the point is
// that we should never ever get disconnected).
//
// On the other hand, failed connections due to XMPP layer
// errors, trigger a callback with status Strophe.Status.CONNFAIL.
//
// Here we implement retry logic for failed connections due
// to XMPP layer errors and we display an error to the user
// if we get disconnected from the XMPP server permanently.
// If the connection failed, retry.
if (connectionFailed &&
faultTolerantConnect.retry("connection-failed")) {
return;
}
// If we failed to connect to the XMPP server, fire an event
// to let all the interested module now about it.
eventEmitter.emit(XMPPEvents.CONNECTION_FAILED,
msg ? msg : lastErrorMsg);
}
} else if (status === Strophe.Status.AUTHFAIL) {
// wrong password or username, prompt user
XMPP.promptLogin();
}
});
});
}
function maybeDoJoin() {
if (connection && connection.connected &&
Strophe.getResourceFromJid(connection.jid) &&
(APP.RTC.localAudio || APP.RTC.localVideo)) {
// .connected is true while connecting?
doJoin();
}
}
function doJoin() {
eventEmitter.emit(XMPPEvents.READY_TO_JOIN);
}
function initStrophePlugins()
{
require("./strophe.emuc")(XMPP, eventEmitter);
require("./strophe.jingle")(XMPP, eventEmitter);
require("./strophe.moderate")(XMPP, eventEmitter);
require("./strophe.util")();
require("./strophe.ping")(XMPP, eventEmitter);
require("./strophe.rayo")();
require("./strophe.logger")();
}
/**
* If given <tt>localStream</tt> is video one this method will advertise it's
* video type in MUC presence.
* @param localStream new or modified <tt>LocalStream</tt>.
*/
function broadcastLocalVideoType(localStream) {
if (localStream.videoType)
XMPP.addToPresence('videoType', localStream.videoType);
}
function registerListeners() {
APP.RTC.addStreamListener(
function (localStream) {
maybeDoJoin();
broadcastLocalVideoType(localStream);
},
StreamEventTypes.EVENT_TYPE_LOCAL_CREATED
);
APP.RTC.addStreamListener(
broadcastLocalVideoType,
StreamEventTypes.EVENT_TYPE_LOCAL_CHANGED
);
APP.RTC.addListener(RTCEvents.AVAILABLE_DEVICES_CHANGED, function (devices) {
XMPP.addToPresence("devices", devices);
});
}
var unload = (function () {
var unloaded = false;
return function () {
if (unloaded) { return; }
unloaded = true;
if (connection && connection.connected) {
// ensure signout
$.ajax({
type: 'POST',
url: config.bosh,
async: false,
cache: false,
contentType: 'application/xml',
data: "<body rid='" +
(connection.rid || connection._proto.rid) +
"' xmlns='http://jabber.org/protocol/httpbind' sid='" +
(connection.sid || connection._proto.sid) +
"' type='terminate'>" +
"<presence xmlns='jabber:client' type='unavailable'/>" +
"</body>",
success: function (data) {
console.log('signed out');
console.log(data);
},
error: function (XMLHttpRequest, textStatus, errorThrown) {
console.log('signout error',
textStatus + ' (' + errorThrown + ')');
}
});
}
XMPP.disposeConference(true);
};
})();
function setupEvents() {
// In recent versions of FF the 'beforeunload' event is not fired when the
// window or the tab is closed. It is only fired when we leave the page
// (change URL). If this participant doesn't unload properly, then it
// becomes a ghost for the rest of the participants that stay in the
// conference. Thankfully handling the 'unload' event in addition to the
// 'beforeunload' event seems to guarantee the execution of the 'unload'
// method at least once.
//
// The 'unload' method can safely be run multiple times, it will actually do
// something only the first time that it's run, so we're don't have to worry
// about browsers that fire both events.
$(window).bind('beforeunload', unload);
$(window).bind('unload', unload);
}
var XMPP = {
getConnection: function(){ return connection; },
sessionTerminated: false,
/**
* XMPP connection status
*/
Status: Strophe.Status,
/**
* Remembers if we were muted by the focus.
* @type {boolean}
*/
forceMuted: false,
start: function () {
setupEvents();
initStrophePlugins();
registerListeners();
Moderator.init(this, eventEmitter);
Recording.init();
var configDomain = config.hosts.anonymousdomain || config.hosts.domain;
// Force authenticated domain if room is appended with '?login=true'
if (config.hosts.anonymousdomain &&
window.location.search.indexOf("login=true") !== -1) {
configDomain = config.hosts.domain;
}
var jid = configDomain || window.location.hostname;
var password = null;
if (config.token) {
password = config.token;
if (config.id) {
jid = config.id + "@" + jid;
} else {
jid = generateUserName() + "@" + jid;
}
}
connect(jid, password);
},
createConnection: function () {
var bosh = config.bosh || '/http-bind';
// adds the room name used to the bosh connection
return new Strophe.Connection(bosh + '?room=' + APP.UI.getRoomNode());
},
getStatusString: function (status) {
return Strophe.getStatusString(status);
},
promptLogin: function () {
eventEmitter.emit(XMPPEvents.PROMPT_FOR_LOGIN, connect);
},
joinRoom: function(roomName, useNicks, nick) {
var roomjid = roomName;
if (useNicks) {
if (nick) {
roomjid += '/' + nick;
} else {
roomjid += '/' + Strophe.getNodeFromJid(connection.jid);
}
} else {
var tmpJid = Strophe.getNodeFromJid(connection.jid);
if(!authenticatedUser)
tmpJid = tmpJid.substr(0, 8);
roomjid += '/' + tmpJid;
}
connection.emuc.doJoin(roomjid);
},
myJid: function () {
if(!connection)
return null;
return connection.emuc.myroomjid;
},
myResource: function () {
if(!connection || ! connection.emuc.myroomjid)
return null;
return Strophe.getResourceFromJid(connection.emuc.myroomjid);
},
getLastPresence: function (from) {
if(!connection)
return null;
return connection.emuc.lastPresenceMap[from];
},
disposeConference: function (onUnload) {
var handler = connection.jingle.activecall;
if (handler && handler.peerconnection) {
// FIXME: probably removing streams is not required and close() should
// be enough
if (APP.RTC.localAudio) {
handler.peerconnection.removeStream(
APP.RTC.localAudio.getOriginalStream(), onUnload);
}
if (APP.RTC.localVideo) {
handler.peerconnection.removeStream(
APP.RTC.localVideo.getOriginalStream(), onUnload);
}
handler.peerconnection.close();
}
eventEmitter.emit(XMPPEvents.DISPOSE_CONFERENCE, onUnload);
connection.jingle.activecall = null;
if (!onUnload) {
this.sessionTerminated = true;
connection.emuc.doLeave();
}
},
addListener: function(type, listener) {
eventEmitter.on(type, listener);
},
removeListener: function (type, listener) {
eventEmitter.removeListener(type, listener);
},
allocateConferenceFocus: function(roomName, callback) {
Moderator.allocateConferenceFocus(roomName, callback);
},
getLoginUrl: function (roomName, callback) {
Moderator.getLoginUrl(roomName, callback);
},
getPopupLoginUrl: function (roomName, callback) {
Moderator.getPopupLoginUrl(roomName, callback);
},
isModerator: function () {
return Moderator.isModerator();
},
isSipGatewayEnabled: function () {
return Moderator.isSipGatewayEnabled();
},
isExternalAuthEnabled: function () {
return Moderator.isExternalAuthEnabled();
},
isConferenceInProgress: function () {
return connection && connection.jingle.activecall &&
connection.jingle.activecall.peerconnection;
},
switchStreams: function (stream, oldStream, callback, isAudio) {
if (this.isConferenceInProgress()) {
// FIXME: will block switchInProgress on true value in case of exception
connection.jingle.activecall.switchStreams(stream, oldStream, callback, isAudio);
} else {
// We are done immediately
console.warn("No conference handler or conference not started yet");
callback();
}
},
sendVideoInfoPresence: function (mute) {
if(!connection)
return;
connection.emuc.addVideoInfoToPresence(mute);
connection.emuc.sendPresence();
},
setVideoMute: function (mute, callback, options) {
if(!connection)
return;
var self = this;
var localCallback = function (mute) {
self.sendVideoInfoPresence(mute);
return callback(mute);
};
if(connection.jingle.activecall)
{
connection.jingle.activecall.setVideoMute(
mute, localCallback, options);
}
else {
localCallback(mute);
}
},
setAudioMute: function (mute, callback) {
if (!(connection && APP.RTC.localAudio)) {
return false;
}
if (this.forceMuted && !mute) {
console.info("Asking focus for unmute");
connection.moderate.setMute(connection.emuc.myroomjid, mute);
// FIXME: wait for result before resetting muted status
this.forceMuted = false;
}
if (mute == APP.RTC.localAudio.isMuted()) {
// Nothing to do
return true;
}
APP.RTC.localAudio.setMute(mute);
this.sendAudioInfoPresence(mute, callback);
return true;
},
sendAudioInfoPresence: function(mute, callback) {
if(connection) {
connection.emuc.addAudioInfoToPresence(mute);
connection.emuc.sendPresence();
}
callback();
return true;
},
toggleRecording: function (tokenEmptyCallback,
recordingStateChangeCallback) {
Recording.toggleRecording(tokenEmptyCallback,
recordingStateChangeCallback, connection);
},
addToPresence: function (name, value, dontSend) {
switch (name) {
case "displayName":
connection.emuc.addDisplayNameToPresence(value);
break;
case "prezi":
connection.emuc.addPreziToPresence(value, 0);
break;
case "preziSlide":
connection.emuc.addCurrentSlideToPresence(value);
break;
case "connectionQuality":
connection.emuc.addConnectionInfoToPresence(value);
break;
case "email":
connection.emuc.addEmailToPresence(value);
break;
case "devices":
connection.emuc.addDevicesToPresence(value);
break;
case "videoType":
connection.emuc.addVideoTypeToPresence(value);
break;
case "startMuted":
if(!Moderator.isModerator())
return;
connection.emuc.addStartMutedToPresence(value[0],
value[1]);
break;
default :
console.log("Unknown tag for presence: " + name);
return;
}
if (!dontSend)
connection.emuc.sendPresence();
},
/**
* Sends 'data' as a log message to the focus. Returns true iff a message
* was sent.
* @param data
* @returns {boolean} true iff a message was sent.
*/
sendLogs: function (data) {
if(!connection.emuc.focusMucJid)
return false;
var deflate = true;
var content = JSON.stringify(data);
if (deflate) {
content = String.fromCharCode.apply(null, Pako.deflateRaw(content));
}
content = Base64.encode(content);
// XEP-0337-ish
var message = $msg({to: connection.emuc.focusMucJid, type: 'normal'});
message.c('log', { xmlns: 'urn:xmpp:eventlog',
id: 'PeerConnectionStats'});
message.c('message').t(content).up();
if (deflate) {
message.c('tag', {name: "deflated", value: "true"}).up();
}
message.up();
connection.send(message);
return true;
},
// Gets the logs from strophe.jingle.
getJingleLog: function () {
return connection.jingle ? connection.jingle.getLog() : {};
},
// Gets the logs from strophe.
getXmppLog: function () {
return connection.logger ? connection.logger.log : null;
},
getPrezi: function () {
return connection.emuc.getPrezi(this.myJid());
},
removePreziFromPresence: function () {
connection.emuc.removePreziFromPresence();
connection.emuc.sendPresence();
},
sendChatMessage: function (message, nickname) {
connection.emuc.sendMessage(message, nickname);
},
setSubject: function (topic) {
connection.emuc.setSubject(topic);
},
lockRoom: function (key, onSuccess, onError, onNotSupported) {
connection.emuc.lockRoom(key, onSuccess, onError, onNotSupported);
},
dial: function (to, from, roomName,roomPass) {
connection.rayo.dial(to, from, roomName,roomPass);
},
setMute: function (jid, mute) {
connection.moderate.setMute(jid, mute);
},
eject: function (jid) {
connection.moderate.eject(jid);
},
logout: function (callback) {
Moderator.logout(callback);
},
findJidFromResource: function (resource) {
return connection.emuc.findJidFromResource(resource);
},
getMembers: function () {
return connection.emuc.members;
},
getJidFromSSRC: function (ssrc) {
if (!this.isConferenceInProgress())
return null;
return connection.jingle.activecall.getSsrcOwner(ssrc);
},
// Returns true iff we have joined the MUC.
isMUCJoined: function () {
return connection === null ? false : connection.emuc.joined;
},
getSessions: function () {
return connection.jingle.sessions;
},
removeStream: function (stream) {
if (!this.isConferenceInProgress())
return;
connection.jingle.activecall.peerconnection.removeStream(stream);
},
filter_special_chars: function (text) {
return SDPUtil.filter_special_chars(text);
}
};
module.exports = XMPP;

View File

@ -2,7 +2,6 @@ export default {
NICKNAME_CHANGED: "UI.nickname_changed",
SELECTED_ENDPOINT: "UI.selected_endpoint",
PINNED_ENDPOINT: "UI.pinned_endpoint",
LARGEVIDEO_INIT: "UI.largevideo_init",
/**
* Notifies that local user created text message.
*/
@ -22,6 +21,9 @@ export default {
AUDIO_MUTED: "UI.audio_muted",
VIDEO_MUTED: "UI.video_muted",
PREZI_CLICKED: "UI.prezi_clicked",
SHARE_PREZI: "UI.share_prezi",
PREZI_SLIDE_CHANGED: "UI.prezi_slide_changed",
STOP_SHARING_PREZI: "UI.stop_sharing_prezi",
ETHERPAD_CLICKED: "UI.etherpad_clicked",
ROOM_LOCK_CLICKED: "UI.room_lock_clicked",
USER_INVITED: "UI.user_invited",
@ -32,6 +34,7 @@ export default {
TOGGLE_SETTINGS: "UI.toggle_settings",
TOGGLE_CONTACT_LIST: "UI.toggle_contact_list",
TOGGLE_FILM_STRIP: "UI.toggle_film_strip",
TOGGLE_SCREENSHARING: "UI.toggle_screensharing",
CONTACT_CLICKED: "UI.contact_clicked",
HANGUP: "UI.hangup",
LOGOUT: "UI.logout",

View File

@ -1,6 +1,4 @@
var DesktopSharingEventTypes = {
INIT: "ds.init",
export default {
SWITCHING_DONE: "ds.switching_done",
NEW_STREAM_CREATED: "ds.new_stream_created",
@ -11,5 +9,3 @@ var DesktopSharingEventTypes = {
*/
FIREFOX_EXTENSION_NEEDED: "ds.firefox_extension_needed"
};
module.exports = DesktopSharingEventTypes;