(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.JitsiMeetJS = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o a list of participant identifiers containing all conference participants. */ JitsiConference.prototype.getParticipants = function() { return Object.keys(this.participants).map(function (key) { return this.participants[key]; }, this); }; /** * @returns {JitsiParticipant} the participant in this conference with the specified id (or * undefined if there isn't one). * @param id the id of the participant. */ JitsiConference.prototype.getParticipantById = function(id) { return this.participants[id]; }; /** * Kick participant from this conference. * @param {string} id id of the participant to kick */ JitsiConference.prototype.kickParticipant = function (id) { var participant = this.getParticipantById(id); if (!participant) { return; } this.room.kick(participant.getJid()); }; /** * Kick participant from this conference. * @param {string} id id of the participant to kick */ JitsiConference.prototype.muteParticipant = function (id) { var participant = this.getParticipantById(id); if (!participant) { return; } this.room.muteParticipant(participant.getJid(), true); }; JitsiConference.prototype.onMemberJoined = function (jid, nick, role) { var id = Strophe.getResourceFromJid(jid); if (id === 'focus' || this.myUserId() === id) { return; } var participant = new JitsiParticipant(jid, this, nick); participant._role = role; this.participants[id] = participant; this.eventEmitter.emit(JitsiConferenceEvents.USER_JOINED, id, participant); this.xmpp.connection.disco.info( jid, "node", function(iq) { participant._supportsDTMF = $(iq).find( '>query>feature[var="urn:xmpp:jingle:dtmf:0"]').length > 0; this.updateDTMFSupport(); }.bind(this) ); }; JitsiConference.prototype.onMemberLeft = function (jid) { var id = Strophe.getResourceFromJid(jid); if (id === 'focus' || this.myUserId() === id) { return; } var participant = this.participants[id]; delete this.participants[id]; this.eventEmitter.emit(JitsiConferenceEvents.USER_LEFT, id, participant); }; JitsiConference.prototype.onUserRoleChanged = function (jid, role) { var id = Strophe.getResourceFromJid(jid); var participant = this.getParticipantById(id); if (!participant) { return; } participant._role = role; this.eventEmitter.emit(JitsiConferenceEvents.USER_ROLE_CHANGED, id, role); }; JitsiConference.prototype.onDisplayNameChanged = function (jid, displayName) { var id = Strophe.getResourceFromJid(jid); var participant = this.getParticipantById(id); if (!participant) { return; } participant._displayName = displayName; this.eventEmitter.emit(JitsiConferenceEvents.DISPLAY_NAME_CHANGED, id, displayName); }; JitsiConference.prototype.onTrackAdded = function (track) { var id = track.getParticipantId(); var participant = this.getParticipantById(id); if (!participant) { return; } // add track to JitsiParticipant participant._tracks.push(track); var emitter = this.eventEmitter; track.addEventListener( JitsiTrackEvents.TRACK_STOPPED, function () { // remove track from JitsiParticipant var pos = participant._tracks.indexOf(track); if (pos > -1) { participant._tracks.splice(pos, 1); } emitter.emit(JitsiConferenceEvents.TRACK_REMOVED, track); } ); track.addEventListener( JitsiTrackEvents.TRACK_MUTE_CHANGED, function () { emitter.emit(JitsiConferenceEvents.TRACK_MUTE_CHANGED, track); } ); track.addEventListener( JitsiTrackEvents.TRACK_AUDIO_LEVEL_CHANGED, function (audioLevel) { emitter.emit(JitsiConferenceEvents.TRACK_AUDIO_LEVEL_CHANGED, id, audioLevel); } ); this.eventEmitter.emit(JitsiConferenceEvents.TRACK_ADDED, track); }; JitsiConference.prototype.updateDTMFSupport = function () { var somebodySupportsDTMF = false; var participants = this.getParticipants(); // check if at least 1 participant supports DTMF for (var i = 0; i < participants.length; i += 1) { if (participants[i].supportsDTMF()) { somebodySupportsDTMF = true; break; } } if (somebodySupportsDTMF !== this.somebodySupportsDTMF) { this.somebodySupportsDTMF = somebodySupportsDTMF; this.eventEmitter.emit(JitsiConferenceEvents.DTMF_SUPPORT_CHANGED, somebodySupportsDTMF); } }; /** * Allows to check if there is at least one user in the conference * that supports DTMF. * @returns {boolean} true if somebody supports DTMF, false otherwise */ JitsiConference.prototype.isDTMFSupported = function () { return this.somebodySupportsDTMF; }; /** * Returns the local user's ID * @return {string} local user's ID */ JitsiConference.prototype.myUserId = function () { return (this.room && this.room.myroomjid)? Strophe.getResourceFromJid(this.room.myroomjid) : null; }; JitsiConference.prototype.sendTones = function (tones, duration, pause) { if (!this.dtmfManager) { var connection = this.xmpp.connection.jingle.activecall.peerconnection; if (!connection) { logger.warn("cannot sendTones: no conneciton"); return; } var tracks = this.getLocalTracks().filter(function (track) { return track.isAudioTrack(); }); if (!tracks.length) { logger.warn("cannot sendTones: no local audio stream"); return; } this.dtmfManager = new JitsiDTMFManager(tracks[0], connection); } this.dtmfManager.sendTones(tones, duration, pause); }; /** * Returns true if the recording is supproted and false if not. */ JitsiConference.prototype.isRecordingSupported = function () { if(this.room) return this.room.isRecordingSupported(); return false; }; /** * Returns null if the recording is not supported, "on" if the recording started * and "off" if the recording is not started. */ JitsiConference.prototype.getRecordingState = function () { if(this.room) return this.room.getRecordingState(); return "off"; } /** * Returns the url of the recorded video. */ JitsiConference.prototype.getRecordingURL = function () { if(this.room) return this.room.getRecordingURL(); return null; } /** * Starts/stops the recording */ JitsiConference.prototype.toggleRecording = function (options) { if(this.room) return this.room.toggleRecording(options, function (status, error) { this.eventEmitter.emit( JitsiConferenceEvents.RECORDING_STATE_CHANGED, status, error); }.bind(this)); this.eventEmitter.emit( JitsiConferenceEvents.RECORDING_STATE_CHANGED, "error", new Error("The conference is not created yet!")); } /** * Returns true if the SIP calls are supported and false otherwise */ JitsiConference.prototype.isSIPCallingSupported = function () { if(this.room) return this.room.isSIPCallingSupported(); return false; } /** * Dials a number. * @param number the number */ JitsiConference.prototype.dial = function (number) { if(this.room) return this.room.dial(number); return new Promise(function(resolve, reject){ reject(new Error("The conference is not created yet!"))}); } /** * Hangup an existing call */ JitsiConference.prototype.hangup = function () { if(this.room) return this.room.hangup(); return new Promise(function(resolve, reject){ reject(new Error("The conference is not created yet!"))}); } /** * Returns the phone number for joining the conference. */ JitsiConference.prototype.getPhoneNumber = function () { if(this.room) return this.room.getPhoneNumber(); return null; } /** * Returns the pin for joining the conference with phone. */ JitsiConference.prototype.getPhonePin = function () { if(this.room) return this.room.getPhonePin(); return null; } /** * Returns the connection state for the current room. Its ice connection state * for its session. */ JitsiConference.prototype.getConnectionState = function () { if(this.room) return this.room.getConnectionState(); return null; } /** * Make all new participants mute their audio/video on join. * @param policy {Object} object with 2 boolean properties for video and audio: * @param {boolean} audio if audio should be muted. * @param {boolean} video if video should be muted. */ JitsiConference.prototype.setStartMutedPolicy = function (policy) { if (!this.isModerator()) { return; } this.startMutedPolicy = policy; this.room.removeFromPresence("startmuted"); this.room.addToPresence("startmuted", { attributes: { audio: policy.audio, video: policy.video, xmlns: 'http://jitsi.org/jitmeet/start-muted' } }); this.room.sendPresence(); }; /** * Returns current start muted policy * @returns {Object} with 2 proprties - audio and video. */ JitsiConference.prototype.getStartMutedPolicy = function () { return this.startMutedPolicy; }; /** * Check if audio is muted on join. */ JitsiConference.prototype.isStartAudioMuted = function () { return this.startAudioMuted; }; /** * Check if video is muted on join. */ JitsiConference.prototype.isStartVideoMuted = function () { return this.startVideoMuted; }; /** * Get object with internal logs. */ JitsiConference.prototype.getLogs = function () { var data = this.xmpp.getJingleLog(); var metadata = {}; metadata.time = new Date(); metadata.url = window.location.href; metadata.ua = navigator.userAgent; var log = this.xmpp.getXmppLog(); if (log) { metadata.xmpp = log; } data.metadata = metadata; return data; }; /** * Sends the given feedback through CallStats if enabled. * * @param overallFeedback an integer between 1 and 5 indicating the * user feedback * @param detailedFeedback detailed feedback from the user. Not yet used */ JitsiConference.prototype.sendFeedback = function(overallFeedback, detailedFeedback){ this.statistics.sendFeedback(overallFeedback, detailedFeedback); } /** * Returns true if the callstats integration is enabled, otherwise returns * false. * * @returns true if the callstats integration is enabled, otherwise returns * false. */ JitsiConference.prototype.isCallstatsEnabled = function () { return this.statistics.isCallstatsEnabled(); } /** * Setups the listeners needed for the conference. * @param conference the conference */ function setupListeners(conference) { conference.xmpp.addListener(XMPPEvents.CALL_INCOMING, function (event) { conference.rtc.onIncommingCall(event); conference.statistics.startRemoteStats(event.peerconnection); }); conference.room.addListener(XMPPEvents.REMOTE_STREAM_RECEIVED, function (data, sid, thessrc) { var track = conference.rtc.createRemoteStream(data, sid, thessrc); if (track) { conference.onTrackAdded(track); } } ); conference.rtc.addListener(RTCEvents.FAKE_VIDEO_TRACK_CREATED, function (track) { conference.onTrackAdded(track); } ); conference.room.addListener(XMPPEvents.AUDIO_MUTED_BY_FOCUS, function (value) { conference.rtc.setAudioMute(value); } ); conference.room.addListener(XMPPEvents.SUBJECT_CHANGED, function (subject) { conference.eventEmitter.emit(JitsiConferenceEvents.SUBJECT_CHANGED, subject); }); conference.room.addListener(XMPPEvents.MUC_JOINED, function () { conference.eventEmitter.emit(JitsiConferenceEvents.CONFERENCE_JOINED); }); conference.room.addListener(XMPPEvents.ROOM_JOIN_ERROR, function (pres) { conference.eventEmitter.emit(JitsiConferenceEvents.CONFERENCE_FAILED, JitsiConferenceErrors.CONNECTION_ERROR, pres); }); conference.room.addListener(XMPPEvents.ROOM_CONNECT_ERROR, function (pres) { conference.eventEmitter.emit(JitsiConferenceEvents.CONFERENCE_FAILED, JitsiConferenceErrors.CONNECTION_ERROR, pres); }); conference.room.addListener(XMPPEvents.PASSWORD_REQUIRED, function (pres) { conference.eventEmitter.emit(JitsiConferenceEvents.CONFERENCE_FAILED, JitsiConferenceErrors.PASSWORD_REQUIRED, pres); }); conference.room.addListener(XMPPEvents.AUTHENTICATION_REQUIRED, function () { conference.eventEmitter.emit(JitsiConferenceEvents.CONFERENCE_FAILED, JitsiConferenceErrors.AUTHENTICATION_REQUIRED); }); conference.room.addListener(XMPPEvents.BRIDGE_DOWN, function () { conference.eventEmitter.emit(JitsiConferenceEvents.CONFERENCE_FAILED, JitsiConferenceErrors.VIDEOBRIDGE_NOT_AVAILABLE); }); conference.room.addListener(XMPPEvents.RESERVATION_ERROR, function (code, msg) { conference.eventEmitter.emit(JitsiConferenceEvents.CONFERENCE_FAILED, JitsiConferenceErrors.RESERVATION_ERROR, code, msg); }); conference.room.addListener(XMPPEvents.GRACEFUL_SHUTDOWN, function () { conference.eventEmitter.emit(JitsiConferenceEvents.CONFERENCE_FAILED, JitsiConferenceErrors.GRACEFUL_SHUTDOWN); }); conference.room.addListener(XMPPEvents.JINGLE_FATAL_ERROR, function () { conference.eventEmitter.emit(JitsiConferenceEvents.CONFERENCE_FAILED, JitsiConferenceErrors.JINGLE_FATAL_ERROR); }); conference.room.addListener(XMPPEvents.MUC_DESTROYED, function (reason) { conference.eventEmitter.emit(JitsiConferenceEvents.CONFERENCE_FAILED, JitsiConferenceErrors.CONFERENCE_DESTROYED, reason); }); conference.room.addListener(XMPPEvents.CHAT_ERROR_RECEIVED, function (err, msg) { conference.eventEmitter.emit(JitsiConferenceEvents.CONFERENCE_ERROR, JitsiConferenceErrors.CHAT_ERROR, err, msg); }); conference.room.addListener(XMPPEvents.FOCUS_DISCONNECTED, function (focus, retrySec) { conference.eventEmitter.emit(JitsiConferenceEvents.CONFERENCE_FAILED, JitsiConferenceErrors.FOCUS_DISCONNECTED, focus, retrySec); }); // FIXME // conference.room.addListener(XMPPEvents.MUC_JOINED, function () { // conference.eventEmitter.emit(JitsiConferenceEvents.CONFERENCE_LEFT); // }); conference.room.addListener(XMPPEvents.KICKED, function () { conference.eventEmitter.emit(JitsiConferenceEvents.KICKED); }); conference.room.addListener(XMPPEvents.MUC_MEMBER_JOINED, conference.onMemberJoined.bind(conference)); conference.room.addListener(XMPPEvents.MUC_MEMBER_LEFT, conference.onMemberLeft.bind(conference)); conference.room.addListener(XMPPEvents.DISPLAY_NAME_CHANGED, conference.onDisplayNameChanged.bind(conference)); conference.room.addListener(XMPPEvents.LOCAL_ROLE_CHANGED, function (role) { conference.eventEmitter.emit(JitsiConferenceEvents.USER_ROLE_CHANGED, conference.myUserId(), role); }); conference.room.addListener(XMPPEvents.MUC_ROLE_CHANGED, conference.onUserRoleChanged.bind(conference)); conference.room.addListener(XMPPEvents.CONNECTION_INTERRUPTED, function () { conference.eventEmitter.emit(JitsiConferenceEvents.CONNECTION_INTERRUPTED); }); conference.room.addListener(XMPPEvents.RECORDING_STATE_CHANGED, function () { conference.eventEmitter.emit( JitsiConferenceEvents.RECORDING_STATE_CHANGED); }); conference.room.addListener(XMPPEvents.PHONE_NUMBER_CHANGED, function () { conference.eventEmitter.emit( JitsiConferenceEvents.PHONE_NUMBER_CHANGED); }); conference.room.addListener(XMPPEvents.CONNECTION_RESTORED, function () { conference.eventEmitter.emit(JitsiConferenceEvents.CONNECTION_RESTORED); }); conference.room.addListener(XMPPEvents.CONFERENCE_SETUP_FAILED, function () { conference.eventEmitter.emit(JitsiConferenceEvents.CONFERENCE_FAILED, JitsiConferenceErrors.SETUP_FAILED); }); conference.room.addListener(AuthenticationEvents.IDENTITY_UPDATED, function (authEnabled, authIdentity) { conference.authEnabled = authEnabled; conference.authIdentity = authIdentity; }); conference.room.addListener(XMPPEvents.MESSAGE_RECEIVED, function (jid, displayName, txt, myJid, ts) { var id = Strophe.getResourceFromJid(jid); conference.eventEmitter.emit(JitsiConferenceEvents.MESSAGE_RECEIVED, id, txt, ts); }); conference.room.addListener(XMPPEvents.PRESENCE_STATUS, function (jid, status) { var id = Strophe.getResourceFromJid(jid); var participant = conference.getParticipantById(id); if (!participant || participant._status === status) { return; } participant._status = status; conference.eventEmitter.emit(JitsiConferenceEvents.USER_STATUS_CHANGED, id, status); }); conference.rtc.addListener(RTCEvents.DOMINANTSPEAKER_CHANGED, function (id) { if(conference.lastDominantSpeaker !== id && conference.room) { conference.lastDominantSpeaker = id; conference.eventEmitter.emit(JitsiConferenceEvents.DOMINANT_SPEAKER_CHANGED, id); } }); conference.rtc.addListener(RTCEvents.LASTN_CHANGED, function (oldValue, newValue) { conference.eventEmitter.emit(JitsiConferenceEvents.IN_LAST_N_CHANGED, oldValue, newValue); }); conference.rtc.addListener(RTCEvents.LASTN_ENDPOINT_CHANGED, function (lastNEndpoints, endpointsEnteringLastN) { conference.eventEmitter.emit(JitsiConferenceEvents.LAST_N_ENDPOINTS_CHANGED, lastNEndpoints, endpointsEnteringLastN); }); conference.xmpp.addListener(XMPPEvents.PASSWORD_REQUIRED, function () { conference.eventEmitter.emit(JitsiConferenceErrors.PASSWORD_REQUIRED); }); conference.xmpp.addListener(XMPPEvents.START_MUTED_FROM_FOCUS, function (audioMuted, videoMuted) { conference.startAudioMuted = audioMuted; conference.startVideoMuted = videoMuted; // mute existing local tracks because this is initial mute from // Jicofo conference.getLocalTracks().forEach(function (track) { if (conference.startAudioMuted && track.isAudioTrack()) { track.mute(); } if (conference.startVideoMuted && track.isVideoTrack()) { track.mute(); } }); conference.eventEmitter.emit(JitsiConferenceEvents.STARTED_MUTED); }); conference.room.addPresenceListener("startmuted", function (data, from) { var isModerator = false; if (conference.myUserId() === from && conference.isModerator()) { isModerator = true; } else { var participant = conference.getParticipantById(from); if (participant && participant.isModerator()) { isModerator = true; } } if (!isModerator) { return; } var startAudioMuted = data.attributes.audio === 'true'; var startVideoMuted = data.attributes.video === 'true'; var updated = false; if (startAudioMuted !== conference.startMutedPolicy.audio) { conference.startMutedPolicy.audio = startAudioMuted; updated = true; } if (startVideoMuted !== conference.startMutedPolicy.video) { conference.startMutedPolicy.video = startVideoMuted; updated = true; } if (updated) { conference.eventEmitter.emit( JitsiConferenceEvents.START_MUTED_POLICY_CHANGED, conference.startMutedPolicy ); } }); conference.rtc.addListener(RTCEvents.AVAILABLE_DEVICES_CHANGED, function (devices) { conference.room.updateDeviceAvailability(devices); }); conference.room.addPresenceListener("devices", function (data, from) { var isAudioAvailable = false; var isVideoAvailable = false; data.children.forEach(function (config) { if (config.tagName === 'audio') { isAudioAvailable = config.value === 'true'; } if (config.tagName === 'video') { isVideoAvailable = config.value === 'true'; } }); var availableDevices; if (conference.myUserId() === from) { availableDevices = conference.availableDevices; } else { var participant = conference.getParticipantById(from); if (!participant) { return; } availableDevices = participant._availableDevices; } var updated = false; if (availableDevices.audio !== isAudioAvailable) { updated = true; availableDevices.audio = isAudioAvailable; } if (availableDevices.video !== isVideoAvailable) { updated = true; availableDevices.video = isVideoAvailable; } if (updated) { conference.eventEmitter.emit(JitsiConferenceEvents.AVAILABLE_DEVICES_CHANGED, from, availableDevices); } }); if(conference.statistics) { //FIXME: Maybe remove event should not be associated with the conference. conference.statistics.addAudioLevelListener(function (ssrc, level) { var userId = null; var jid = conference.room.getJidBySSRC(ssrc); if (!jid) return; conference.rtc.setAudioLevel(jid, level); }); conference.xmpp.addListener(XMPPEvents.DISPOSE_CONFERENCE, function () { conference.statistics.dispose(); }); conference.room.addListener(XMPPEvents.PEERCONNECTION_READY, function (session) { conference.statistics.startCallStats( session, conference.settings); }); conference.room.addListener(XMPPEvents.CONFERENCE_SETUP_FAILED, function () { conference.statistics.sendSetupFailedEvent(); }); conference.on(JitsiConferenceEvents.TRACK_MUTE_CHANGED, function (track) { if(!track.isLocal()) return; var type = (track.getType() === "audio")? "audio" : "video"; conference.statistics.sendMuteEvent(track.isMuted(), type); }); conference.room.addListener(XMPPEvents.CREATE_OFFER_FAILED, function (e, pc) { conference.statistics.sendCreateOfferFailed(e, pc); }); conference.room.addListener(XMPPEvents.CREATE_ANSWER_FAILED, function (e, pc) { conference.statistics.sendCreateAnswerFailed(e, pc); }); conference.room.addListener(XMPPEvents.SET_LOCAL_DESCRIPTION_FAILED, function (e, pc) { conference.statistics.sendSetLocalDescFailed(e, pc); } ); conference.room.addListener(XMPPEvents.SET_REMOTE_DESCRIPTION_FAILED, function (e, pc) { conference.statistics.sendSetRemoteDescFailed(e, pc); } ); conference.room.addListener(XMPPEvents.ADD_ICE_CANDIDATE_FAILED, function (e, pc) { conference.statistics.sendAddIceCandidateFailed(e, pc); } ); } } module.exports = JitsiConference; }).call(this,"/JitsiConference.js") },{"./JitsiConferenceErrors":2,"./JitsiConferenceEvents":3,"./JitsiParticipant":8,"./JitsiTrackEvents":10,"./modules/DTMF/JitsiDTMFManager":11,"./modules/RTC/RTC":16,"./modules/settings/Settings":21,"./modules/statistics/statistics":25,"./service/RTC/RTCEvents":131,"./service/authentication/AuthenticationEvents":133,"./service/xmpp/XMPPEvents":137,"events":51,"jitsi-meet-logger":79}],2:[function(require,module,exports){ /** * Enumeration with the errors for the conference. * @type {{string: string}} */ var JitsiConferenceErrors = { /** * Indicates that a password is required in order to join the conference. */ PASSWORD_REQUIRED: "conference.passwordRequired", /** * Indicates that client must be authenticated to create the conference. */ AUTHENTICATION_REQUIRED: "conference.authenticationRequired", /** * Indicates that password cannot be set for this conference. */ PASSWORD_NOT_SUPPORTED: "conference.passwordNotSupported", /** * Indicates that a connection error occurred when trying to join a * conference. */ CONNECTION_ERROR: "conference.connectionError", /** * Indicates that the conference setup failed. */ SETUP_FAILED: "conference.setup_failed", /** * Indicates that there is no available videobridge. */ VIDEOBRIDGE_NOT_AVAILABLE: "conference.videobridgeNotAvailable", /** * Indicates that reservation system returned error. */ RESERVATION_ERROR: "conference.reservationError", /** * Indicates that graceful shutdown happened. */ GRACEFUL_SHUTDOWN: "conference.gracefulShutdown", /** * Indicates that jingle fatal error happened. */ JINGLE_FATAL_ERROR: "conference.jingleFatalError", /** * Indicates that conference has been destroyed. */ CONFERENCE_DESTROYED: "conference.destroyed", /** * Indicates that chat error occurred. */ CHAT_ERROR: "conference.chatError", /** * Indicates that focus error happened. */ FOCUS_DISCONNECTED: "conference.focusDisconnected" /** * Many more errors TBD here. */ }; module.exports = JitsiConferenceErrors; },{}],3:[function(require,module,exports){ /** * Enumeration with the events for the conference. * @type {{string: string}} */ var JitsiConferenceEvents = { /** * A new media track was added to the conference. */ TRACK_ADDED: "conference.trackAdded", /** * The media track was removed from the conference. */ TRACK_REMOVED: "conference.trackRemoved", /** * The dominant speaker was changed. */ DOMINANT_SPEAKER_CHANGED: "conference.dominantSpeaker", /** * A new user joinned the conference. */ USER_JOINED: "conference.userJoined", /** * A user has left the conference. */ USER_LEFT: "conference.userLeft", /** * User role changed. */ USER_ROLE_CHANGED: "conference.roleChanged", /** * User status changed. */ USER_STATUS_CHANGED: "conference.statusChanged", /** * New text message was received. */ MESSAGE_RECEIVED: "conference.messageReceived", /** * A user has changed it display name */ DISPLAY_NAME_CHANGED: "conference.displayNameChanged", /** * Indicates that subject of the conference has changed. */ SUBJECT_CHANGED: "conference.subjectChanged", /** * A participant avatar has changed. */ AVATAR_CHANGED: "conference.avatarChanged", /** * New connection statistics are received. */ CONNECTION_STATS_RECEIVED: "conference.connectionStatsReceived", /** * The Last N set is changed. */ LAST_N_ENDPOINTS_CHANGED: "conference.lastNEndpointsChanged", /** * You are included / excluded in somebody's last N set */ IN_LAST_N_CHANGED: "conference.lastNEndpointsChanged", /** * A media track ( attached to the conference) mute status was changed. */ TRACK_MUTE_CHANGED: "conference.trackMuteChanged", /** * Audio levels of a media track ( attached to the conference) was changed. */ TRACK_AUDIO_LEVEL_CHANGED: "conference.audioLevelsChanged", /** * Indicates that the connection to the conference has been interrupted * for some reason. */ CONNECTION_INTERRUPTED: "conference.connectionInterrupted", /** * Indicates that the connection to the conference has been restored. */ CONNECTION_RESTORED: "conference.connectionRestored", /** * Indicates that conference failed. */ CONFERENCE_FAILED: "conference.failed", /** * Indicates that an error occured. */ CONFERENCE_ERROR: "conference.error", /** * Indicates that conference has been joined. */ CONFERENCE_JOINED: "conference.joined", /** * Indicates that conference has been left. */ CONFERENCE_LEFT: "conference.left", /** * You are kicked from the conference. */ KICKED: "conferenece.kicked", /** * Indicates that start muted settings changed. */ START_MUTED_POLICY_CHANGED: "conference.start_muted_policy_changed", /** * Indicates that the local user has started muted. */ STARTED_MUTED: "conference.started_muted", /** * Indicates that DTMF support changed. */ DTMF_SUPPORT_CHANGED: "conference.dtmfSupportChanged", /** * Indicates that recording state changed. */ RECORDING_STATE_CHANGED: "conference.recordingStateChanged", /** * Indicates that phone number changed. */ PHONE_NUMBER_CHANGED: "conference.phoneNumberChanged", /** * Indicates that available devices changed. */ AVAILABLE_DEVICES_CHANGED: "conference.availableDevicesChanged" }; module.exports = JitsiConferenceEvents; },{}],4:[function(require,module,exports){ var JitsiConference = require("./JitsiConference"); var XMPP = require("./modules/xmpp/xmpp"); /** * Creates new connection object for the Jitsi Meet server side video conferencing service. Provides access to the * JitsiConference interface. * @param appID identification for the provider of Jitsi Meet video conferencing services. * @param token the JWT token used to authenticate with the server(optional) * @param options Object with properties / settings related to connection with the server. * @constructor */ function JitsiConnection(appID, token, options) { this.appID = appID; this.token = token; this.options = options; this.xmpp = new XMPP(options); this.conferences = {}; } /** * Connect the client with the server. * @param options {object} connecting options (for example authentications parameters). */ JitsiConnection.prototype.connect = function (options) { if(!options) options = {}; this.xmpp.connect(options.id, options.password); } /** * Disconnect the client from the server. */ JitsiConnection.prototype.disconnect = function () { // XXX Forward any arguments passed to JitsiConnection.disconnect to // XMPP.disconnect. For example, the caller of JitsiConnection.disconnect // may optionally pass the event which triggered the disconnect in order to // provide the implementation with finer-grained context. var x = this.xmpp; x.disconnect.apply(x, arguments); } /** * This method allows renewal of the tokens if they are expiring. * @param token the new token. */ JitsiConnection.prototype.setToken = function (token) { this.token = token; } /** * Creates and joins new conference. * @param name the name of the conference; if null - a generated name will be * provided from the api * @param options Object with properties / settings related to the conference * that will be created. * @returns {JitsiConference} returns the new conference object. */ JitsiConnection.prototype.initJitsiConference = function (name, options) { this.conferences[name] = new JitsiConference({name: name, config: options, connection: this}); return this.conferences[name]; } /** * Subscribes the passed listener to the event. * @param event {JitsiConnectionEvents} the connection event. * @param listener {Function} the function that will receive the event */ JitsiConnection.prototype.addEventListener = function (event, listener) { this.xmpp.addListener(event, listener); } /** * Unsubscribes the passed handler. * @param event {JitsiConnectionEvents} the connection event. * @param listener {Function} the function that will receive the event */ JitsiConnection.prototype.removeEventListener = function (event, listener) { this.xmpp.removeListener(event, listener); } module.exports = JitsiConnection; },{"./JitsiConference":1,"./modules/xmpp/xmpp":44}],5:[function(require,module,exports){ /** * Enumeration with the errors for the connection. * @type {{string: string}} */ var JitsiConnectionErrors = { /** * Indicates that a password is required in order to join the conference. */ PASSWORD_REQUIRED: "connection.passwordRequired", /** * Indicates that a connection error occurred when trying to join a * conference. */ CONNECTION_ERROR: "connection.connectionError", /** * Not specified errors. */ OTHER_ERROR: "connection.otherError" }; module.exports = JitsiConnectionErrors; },{}],6:[function(require,module,exports){ /** * Enumeration with the events for the connection. * @type {{string: string}} */ var JitsiConnnectionEvents = { /** * Indicates that the connection has been failed for some reason. */ CONNECTION_FAILED: "connection.connectionFailed", /** * Indicates that the connection has been established. */ CONNECTION_ESTABLISHED: "connection.connectionEstablished", /** * Indicates that the connection has been disconnected. */ CONNECTION_DISCONNECTED: "connection.connectionDisconnected", /** * Indicates that the perfomed action cannot be executed because the * connection is not in the correct state(connected, disconnected, etc.) */ WRONG_STATE: "connection.wrongState" }; module.exports = JitsiConnnectionEvents; },{}],7:[function(require,module,exports){ var JitsiConnection = require("./JitsiConnection"); var JitsiConferenceEvents = require("./JitsiConferenceEvents"); var JitsiConnectionEvents = require("./JitsiConnectionEvents"); var JitsiConnectionErrors = require("./JitsiConnectionErrors"); var JitsiConferenceErrors = require("./JitsiConferenceErrors"); var JitsiTrackEvents = require("./JitsiTrackEvents"); var JitsiTrackErrors = require("./JitsiTrackErrors"); var Logger = require("jitsi-meet-logger"); var RTC = require("./modules/RTC/RTC"); var Statistics = require("./modules/statistics/statistics"); var Resolutions = require("./service/RTC/Resolutions"); var ScriptUtil = require("./modules/util/ScriptUtil"); function getLowerResolution(resolution) { if(!Resolutions[resolution]) return null; var order = Resolutions[resolution].order; var res = null; var resName = null; for(var i in Resolutions) { var tmp = Resolutions[i]; if (!res || (res.order < tmp.order && tmp.order < order)) { resName = i; res = tmp; } } return resName; } /** * Namespace for the interface of Jitsi Meet Library. */ var LibJitsiMeet = { JitsiConnection: JitsiConnection, events: { conference: JitsiConferenceEvents, connection: JitsiConnectionEvents, track: JitsiTrackEvents }, errors: { conference: JitsiConferenceErrors, connection: JitsiConnectionErrors, track: JitsiTrackErrors }, logLevels: Logger.levels, /** * Array of functions that will receive the GUM error. */ _gumFailedHandler: [], init: function (options) { Statistics.audioLevelsEnabled = !options.disableAudioLevels || true; return RTC.init(options || {}); }, /** * Returns whether the desktop sharing is enabled or not. * @returns {boolean} */ isDesktopSharingEnabled: function () { return RTC.isDesktopSharingEnabled(); }, setLogLevel: function (level) { Logger.setLogLevel(level); }, /** * Creates the media tracks and returns them trough the callback. * @param options Object with properties / settings specifying the tracks which should be created. * should be created or some additional configurations about resolution for example. * @param {Array} options.devices the devices that will be requested * @param {string} options.resolution resolution constraints * @param {bool} options.dontCreateJitsiTrack if true objects with the following structure {stream: the Media Stream, * type: "audio" or "video", videoType: "camera" or "desktop"} * will be returned trough the Promise, otherwise JitsiTrack objects will be returned. * @param {string} options.cameraDeviceId * @param {string} options.micDeviceId * @returns {Promise.<{Array.}, JitsiConferenceError>} * A promise that returns an array of created JitsiTracks if resolved, * or a JitsiConferenceError if rejected. */ createLocalTracks: function (options) { return RTC.obtainAudioAndVideoPermissions(options || {}).then( function(tracks) { if(!RTC.options.disableAudioLevels) for(var i = 0; i < tracks.length; i++) { var track = tracks[i]; var mStream = track.getOriginalStream(); if(track.getType() === "audio"){ Statistics.startLocalStats(mStream, track.setAudioLevel.bind(track)); track.addEventListener( JitsiTrackEvents.TRACK_STOPPED, function(){ Statistics.stopLocalStats(mStream); }); } } return tracks; }).catch(function (error) { this._gumFailedHandler.forEach(function (handler) { handler(error); }); if(!this._gumFailedHandler.length) Statistics.sendGetUserMediaFailed(error); if(error === JitsiTrackErrors.UNSUPPORTED_RESOLUTION) { var oldResolution = options.resolution || '360'; var newResolution = getLowerResolution(oldResolution); if(newResolution === null) return Promise.reject(error); options.resolution = newResolution; return LibJitsiMeet.createLocalTracks(options); } return Promise.reject(error); }.bind(this)); }, /** * Checks if its possible to enumerate available cameras/micropones. * @returns {boolean} true if available, false otherwise. */ isDeviceListAvailable: function () { return RTC.isDeviceListAvailable(); }, /** * Returns true if changing the camera / microphone device is supported and * false if not. * @returns {boolean} true if available, false otherwise. */ isDeviceChangeAvailable: function () { return RTC.isDeviceChangeAvailable(); }, enumerateDevices: function (callback) { RTC.enumerateDevices(callback); }, /** * Represents a hub/namespace for utility functionality which may be of * interest to LibJitsiMeet clients. */ util: { ScriptUtil: ScriptUtil, }, }; //Setups the promise object. window.Promise = window.Promise || require("es6-promise").Promise; module.exports = LibJitsiMeet; },{"./JitsiConferenceErrors":2,"./JitsiConferenceEvents":3,"./JitsiConnection":4,"./JitsiConnectionErrors":5,"./JitsiConnectionEvents":6,"./JitsiTrackErrors":9,"./JitsiTrackEvents":10,"./modules/RTC/RTC":16,"./modules/statistics/statistics":25,"./modules/util/ScriptUtil":27,"./service/RTC/Resolutions":132,"es6-promise":72,"jitsi-meet-logger":79}],8:[function(require,module,exports){ /* global Strophe */ /** * Represents a participant in (a member of) a conference. */ function JitsiParticipant(jid, conference, displayName){ this._jid = jid; this._id = Strophe.getResourceFromJid(jid); this._conference = conference; this._displayName = displayName; this._supportsDTMF = false; this._tracks = []; this._role = 'none'; this._status = null; this._availableDevices = { audio: undefined, video: undefined }; } /** * @returns {JitsiConference} The conference that this participant belongs to. */ JitsiParticipant.prototype.getConference = function() { return this._conference; }; /** * @returns {Array.} The list of media tracks for this participant. */ JitsiParticipant.prototype.getTracks = function() { return this._tracks; }; /** * @returns {String} The ID of this participant. */ JitsiParticipant.prototype.getId = function() { return this._id; }; /** * @returns {String} The JID of this participant. */ JitsiParticipant.prototype.getJid = function() { return this._jid; }; /** * @returns {String} The human-readable display name of this participant. */ JitsiParticipant.prototype.getDisplayName = function() { return this._displayName; }; /** * @returns {String} The status of the participant. */ JitsiParticipant.prototype.getStatus = function () { return this._status; }; /** * @returns {Boolean} Whether this participant is a moderator or not. */ JitsiParticipant.prototype.isModerator = function() { return this._role === 'moderator'; }; // Gets a link to an etherpad instance advertised by the participant? //JitsiParticipant.prototype.getEtherpad = function() { // //} /* * @returns {Boolean} Whether this participant has muted their audio. */ JitsiParticipant.prototype.isAudioMuted = function() { return this.getTracks().reduce(function (track, isAudioMuted) { return isAudioMuted && (track.isVideoTrack() || track.isMuted()); }, true); }; /* * @returns {Boolean} Whether this participant has muted their video. */ JitsiParticipant.prototype.isVideoMuted = function() { return this.getTracks().reduce(function (track, isVideoMuted) { return isVideoMuted && (track.isAudioTrack() || track.isMuted()); }, true); }; /* * @returns {???} The latest statistics reported by this participant * (i.e. info used to populate the GSM bars) * TODO: do we expose this or handle it internally? */ JitsiParticipant.prototype.getLatestStats = function() { }; /** * @returns {String} The role of this participant. */ JitsiParticipant.prototype.getRole = function() { return this._role; }; /* * @returns {Boolean} Whether this participant is * the conference focus (i.e. jicofo). */ JitsiParticipant.prototype.isFocus = function() { }; /* * @returns {Boolean} Whether this participant is * a conference recorder (i.e. jirecon). */ JitsiParticipant.prototype.isRecorder = function() { }; /* * @returns {Boolean} Whether this participant is a SIP gateway (i.e. jigasi). */ JitsiParticipant.prototype.isSipGateway = function() { }; /** * @returns {Boolean} Whether this participant * is currently sharing their screen. */ JitsiParticipant.prototype.isScreenSharing = function() { }; /** * @returns {String} The user agent of this participant * (i.e. browser userAgent string). */ JitsiParticipant.prototype.getUserAgent = function() { }; /** * Kicks the participant from the conference (requires certain privileges). */ JitsiParticipant.prototype.kick = function() { }; /** * Asks this participant to mute themselves. */ JitsiParticipant.prototype.askToMute = function() { }; JitsiParticipant.prototype.supportsDTMF = function () { return this._supportsDTMF; }; module.exports = JitsiParticipant; },{}],9:[function(require,module,exports){ module.exports = { /** * Returns JitsiTrackErrors based on the error object passed by GUM * @param error the error * @param {Array} devices Array with the requested devices */ parseError: function (error, devices) { devices = devices || []; if (typeof error == "object" && error.constraintName && error.name && (error.name == "ConstraintNotSatisfiedError" || error.name == "OverconstrainedError") && (error.constraintName == "minWidth" || error.constraintName == "maxWidth" || error.constraintName == "minHeight" || error.constraintName == "maxHeight") && devices.indexOf("video") !== -1) { return this.UNSUPPORTED_RESOLUTION; } else if(typeof error === "object" && error.type === "jitsiError") { return error.errorObject; } else { return this.GENERAL; } }, UNSUPPORTED_RESOLUTION: "gum.unsupported_resolution", FIREFOX_EXTENSION_NEEDED: "gum.firefox_extension_needed", GENERAL: "gum.general" }; },{}],10:[function(require,module,exports){ var JitsiTrackEvents = { /** * A media track mute status was changed. */ TRACK_MUTE_CHANGED: "track.trackMuteChanged", /** * Audio levels of a this track was changed. */ TRACK_AUDIO_LEVEL_CHANGED: "track.audioLevelsChanged", /** * The media track was removed to the conference. */ TRACK_STOPPED: "track.stopped" }; module.exports = JitsiTrackEvents; },{}],11:[function(require,module,exports){ (function (__filename){ var logger = require("jitsi-meet-logger").getLogger(__filename); function JitsiDTMFManager (localAudio, peerConnection) { var tracks = localAudio._getTracks(); if (!tracks.length) { throw new Error("Failed to initialize DTMFSender: no audio track."); } this.dtmfSender = peerConnection.peerconnection.createDTMFSender(tracks[0]); logger.debug("Initialized DTMFSender"); } JitsiDTMFManager.prototype.sendTones = function (tones, duration, pause) { this.dtmfSender.insertDTMF(tones, (duration || 200), (pause || 200)); }; }).call(this,"/modules/DTMF/JitsiDTMFManager.js") },{"jitsi-meet-logger":79}],12:[function(require,module,exports){ (function (__filename){ /* global config, APP, Strophe */ // cache datachannels to avoid garbage collection // https://code.google.com/p/chromium/issues/detail?id=405545 var logger = require("jitsi-meet-logger").getLogger(__filename); var RTCEvents = require("../../service/RTC/RTCEvents"); /** * Binds "ondatachannel" event listener to given PeerConnection instance. * @param peerConnection WebRTC peer connection instance. */ function DataChannels(peerConnection, emitter) { peerConnection.ondatachannel = this.onDataChannel.bind(this); this.eventEmitter = emitter; this._dataChannels = []; // Sample code for opening new data channel from Jitsi Meet to the bridge. // Although it's not a requirement to open separate channels from both bridge // and peer as single channel can be used for sending and receiving data. // So either channel opened by the bridge or the one opened here is enough // for communication with the bridge. /*var dataChannelOptions = { reliable: true }; var dataChannel = peerConnection.createDataChannel("myChannel", dataChannelOptions); // Can be used only when is in open state dataChannel.onopen = function () { dataChannel.send("My channel !!!"); }; dataChannel.onmessage = function (event) { var msgData = event.data; logger.info("Got My Data Channel Message:", msgData, dataChannel); };*/ }; /** * Callback triggered by PeerConnection when new data channel is opened * on the bridge. * @param event the event info object. */ DataChannels.prototype.onDataChannel = function (event) { var dataChannel = event.channel; var self = this; var lastSelectedEndpoint = null; dataChannel.onopen = function () { logger.info("Data channel opened by the Videobridge!", dataChannel); // Code sample for sending string and/or binary data // Sends String message to the bridge //dataChannel.send("Hello bridge!"); // Sends 12 bytes binary message to the bridge //dataChannel.send(new ArrayBuffer(12)); self.eventEmitter.emit(RTCEvents.DATA_CHANNEL_OPEN); // when the data channel becomes available, tell the bridge about video // selections so that it can do adaptive simulcast, // we want the notification to trigger even if userJid is undefined, // or null. self.handleSelectedEndpointEvent(self.lastSelectedEndpoint); }; dataChannel.onerror = function (error) { logger.error("Data Channel Error:", error, dataChannel); }; dataChannel.onmessage = function (event) { var data = event.data; // JSON var obj; try { obj = JSON.parse(data); } catch (e) { logger.error( "Failed to parse data channel message as JSON: ", data, dataChannel); } if (('undefined' !== typeof(obj)) && (null !== obj)) { var colibriClass = obj.colibriClass; if ("DominantSpeakerEndpointChangeEvent" === colibriClass) { // Endpoint ID from the Videobridge. var dominantSpeakerEndpoint = obj.dominantSpeakerEndpoint; logger.info( "Data channel new dominant speaker event: ", dominantSpeakerEndpoint); self.eventEmitter.emit(RTCEvents.DOMINANTSPEAKER_CHANGED, dominantSpeakerEndpoint); } else if ("InLastNChangeEvent" === colibriClass) { var oldValue = obj.oldValue; var newValue = obj.newValue; // Make sure that oldValue and newValue are of type boolean. var type; if ((type = typeof oldValue) !== 'boolean') { if (type === 'string') { oldValue = (oldValue == "true"); } else { oldValue = new Boolean(oldValue).valueOf(); } } if ((type = typeof newValue) !== 'boolean') { if (type === 'string') { newValue = (newValue == "true"); } else { newValue = new Boolean(newValue).valueOf(); } } self.eventEmitter.emit(RTCEvents.LASTN_CHANGED, oldValue, newValue); } else if ("LastNEndpointsChangeEvent" === colibriClass) { // The new/latest list of last-n endpoint IDs. var lastNEndpoints = obj.lastNEndpoints; // The list of endpoint IDs which are entering the list of // last-n at this time i.e. were not in the old list of last-n // endpoint IDs. var endpointsEnteringLastN = obj.endpointsEnteringLastN; logger.info( "Data channel new last-n event: ", lastNEndpoints, endpointsEnteringLastN, obj); self.eventEmitter.emit(RTCEvents.LASTN_ENDPOINT_CHANGED, lastNEndpoints, endpointsEnteringLastN, obj); } else { logger.debug("Data channel JSON-formatted message: ", obj); // The received message appears to be appropriately formatted // (i.e. is a JSON object which assigns a value to the mandatory // property colibriClass) so don't just swallow it, expose it to // public consumption. self.eventEmitter.emit("rtc.datachannel." + colibriClass, obj); } } }; dataChannel.onclose = function () { logger.info("The Data Channel closed", dataChannel); var idx = self._dataChannels.indexOf(dataChannel); if (idx > -1) self._dataChannels = self._dataChannels.splice(idx, 1); }; this._dataChannels.push(dataChannel); }; DataChannels.prototype.handleSelectedEndpointEvent = function (userResource) { this.lastSelectedEndpoint = userResource; this._onXXXEndpointChanged("selected", userResource); } DataChannels.prototype.handlePinnedEndpointEvent = function (userResource) { this._onXXXEndpointChanged("pinnned", userResource); } /** * Notifies Videobridge about a change in the value of a specific * endpoint-related property such as selected endpoint and pinnned endpoint. * * @param xxx the name of the endpoint-related property whose value changed * @param userResource the new value of the endpoint-related property after the * change */ DataChannels.prototype._onXXXEndpointChanged = function (xxx, userResource) { // Derive the correct words from xxx such as selected and Selected, pinned // and Pinned. var head = xxx.charAt(0); var tail = xxx.substring(1); var lower = head.toLowerCase() + tail; var upper = head.toUpperCase() + tail; // Notify Videobridge about the specified endpoint change. console.log(lower + ' endpoint changed: ', userResource); this._some(function (dataChannel) { if (dataChannel.readyState == 'open') { console.log( 'sending ' + lower + ' endpoint changed notification to the bridge: ', userResource); var jsonObject = {}; jsonObject.colibriClass = (upper + 'EndpointChangedEvent'); jsonObject[lower + "Endpoint"] = (userResource ? userResource : null); dataChannel.send(JSON.stringify(jsonObject)); return true; } }); } DataChannels.prototype._some = function (callback, thisArg) { var dataChannels = this._dataChannels; if (dataChannels && dataChannels.length !== 0) { if (thisArg) return dataChannels.some(callback, thisArg); else return dataChannels.some(callback); } else { return false; } } module.exports = DataChannels; }).call(this,"/modules/RTC/DataChannels.js") },{"../../service/RTC/RTCEvents":131,"jitsi-meet-logger":79}],13:[function(require,module,exports){ var JitsiTrack = require("./JitsiTrack"); var RTCBrowserType = require("./RTCBrowserType"); var JitsiTrackEvents = require('../../JitsiTrackEvents'); var RTCUtils = require("./RTCUtils"); /** * Represents a single media track (either audio or video). * @constructor */ function JitsiLocalTrack(stream, videoType, resolution, deviceId) { this.videoType = videoType; this.dontFireRemoveEvent = false; this.resolution = resolution; this.deviceId = deviceId; this.startMuted = false; var self = this; JitsiTrack.call(this, null, stream, function () { if(!this.dontFireRemoveEvent) this.eventEmitter.emit( JitsiTrackEvents.TRACK_STOPPED); this.dontFireRemoveEvent = false; }.bind(this)); } JitsiLocalTrack.prototype = Object.create(JitsiTrack.prototype); JitsiLocalTrack.prototype.constructor = JitsiLocalTrack; /** * Mutes / unmutes the track. * @param mute {boolean} if true the track will be muted. Otherwise the track will be unmuted. */ JitsiLocalTrack.prototype._setMute = function (mute) { if (this.isMuted() === mute) { return; } if(!this.rtc) { this.startMuted = mute; return; } var isAudio = this.type === JitsiTrack.AUDIO; this.dontFireRemoveEvent = false; if ((window.location.protocol != "https:") || (isAudio) || this.videoType === "desktop" || // FIXME FF does not support 'removeStream' method used to mute RTCBrowserType.isFirefox()) { var tracks = this._getTracks(); for (var idx = 0; idx < tracks.length; idx++) { tracks[idx].enabled = !mute; } if(isAudio) this.rtc.room.setAudioMute(mute); else this.rtc.room.setVideoMute(mute); this.eventEmitter.emit(JitsiTrackEvents.TRACK_MUTE_CHANGED); } else { if (mute) { this.dontFireRemoveEvent = true; this.rtc.room.removeStream(this.stream, function () {}); RTCUtils.stopMediaStream(this.stream); if(isAudio) this.rtc.room.setAudioMute(mute); else this.rtc.room.setVideoMute(mute); this.stream = null; this.eventEmitter.emit(JitsiTrackEvents.TRACK_MUTE_CHANGED); //FIXME: Maybe here we should set the SRC for the containers to something } else { var self = this; var streamOptions = { devices: (isAudio ? ["audio"] : ["video"]), resolution: self.resolution }; if (isAudio) { streamOptions['micDeviceId'] = self.deviceId; } else {self.videoType === 'camera'} { streamOptions['cameraDeviceId'] = self.deviceId; } RTCUtils.obtainAudioAndVideoPermissions(streamOptions) .then(function (streams) { var stream = null; for(var i = 0; i < streams.length; i++) { stream = streams[i]; if(stream.type === self.type) { self.stream = stream.stream; self.videoType = stream.videoType; break; } } if(!stream) return; for(var i = 0; i < self.containers.length; i++) { RTCUtils.attachMediaStream( self.containers[i], self.stream); } self.rtc.room.addStream(stream.stream, function () { if(isAudio) self.rtc.room.setAudioMute(mute); else self.rtc.room.setVideoMute(mute); self.eventEmitter.emit( JitsiTrackEvents.TRACK_MUTE_CHANGED); }); }); } } } /** * Stops sending the media track. And removes it from the HTML. * NOTE: Works for local tracks only. */ JitsiLocalTrack.prototype.stop = function () { if(!this.stream) return; if(this.rtc) this.rtc.room.removeStream(this.stream, function () {}); RTCUtils.stopMediaStream(this.stream); this.detach(); } /** * Returns true - if the stream is muted * and false otherwise. * @returns {boolean} true - if the stream is muted * and false otherwise. */ JitsiLocalTrack.prototype.isMuted = function () { if (!this.stream) return true; var tracks = []; var isAudio = this.type === JitsiTrack.AUDIO; if (isAudio) { tracks = this.stream.getAudioTracks(); } else { if (!this.isActive()) return true; tracks = this.stream.getVideoTracks(); } for (var idx = 0; idx < tracks.length; idx++) { if(tracks[idx].enabled) return false; } return true; }; /** * Private method. Updates rtc property of the track. * @param rtc the rtc instance. */ JitsiLocalTrack.prototype._setRTC = function (rtc) { this.rtc = rtc; }; /** * Return true; */ JitsiLocalTrack.prototype.isLocal = function () { return true; } module.exports = JitsiLocalTrack; },{"../../JitsiTrackEvents":10,"./JitsiTrack":15,"./RTCBrowserType":17,"./RTCUtils":18}],14:[function(require,module,exports){ var JitsiTrack = require("./JitsiTrack"); var JitsiTrackEvents = require("../../JitsiTrackEvents"); /** * Represents a single media track (either audio or video). * @param RTC the rtc instance. * @param data object with the stream and some details about it(participant id, video type, etc.) * @param sid sid for the Media Stream * @param ssrc ssrc for the Media Stream * @param eventEmitter the event emitter * @constructor */ function JitsiRemoteTrack(RTC, data, sid, ssrc) { JitsiTrack.call(this, RTC, data.stream, function () { this.eventEmitter.emit(JitsiTrackEvents.TRACK_STOPPED); }.bind(this), data.jitsiTrackType); this.rtc = RTC; this.sid = sid; this.stream = data.stream; this.peerjid = data.peerjid; this.videoType = data.videoType; this.ssrc = ssrc; this.muted = false; if((this.type === JitsiTrack.AUDIO && data.audiomuted) || (this.type === JitsiTrack.VIDEO && data.videomuted)) { this.muted = true; } } JitsiRemoteTrack.prototype = Object.create(JitsiTrack.prototype); JitsiRemoteTrack.prototype.constructor = JitsiRemoteTrack; /** * Sets current muted status and fires an events for the change. * @param value the muted status. */ JitsiRemoteTrack.prototype.setMute = function (value) { if(this.muted === value) return; // we can have a fake video stream if(this.stream) this.stream.muted = value; this.muted = value; this.eventEmitter.emit(JitsiTrackEvents.TRACK_MUTE_CHANGED); }; /** * Returns the current muted status of the track. * @returns {boolean|*|JitsiRemoteTrack.muted} true if the track is muted and false otherwise. */ JitsiRemoteTrack.prototype.isMuted = function () { return this.muted; }; /** * Returns the participant id which owns the track. * @returns {string} the id of the participants. */ JitsiRemoteTrack.prototype.getParticipantId = function() { return Strophe.getResourceFromJid(this.peerjid); }; /** * Return false; */ JitsiRemoteTrack.prototype.isLocal = function () { return false; }; delete JitsiRemoteTrack.prototype.stop; module.exports = JitsiRemoteTrack; },{"../../JitsiTrackEvents":10,"./JitsiTrack":15}],15:[function(require,module,exports){ var RTCBrowserType = require("./RTCBrowserType"); var JitsiTrackEvents = require("../../JitsiTrackEvents"); var EventEmitter = require("events"); var RTC = require("./RTCUtils"); /** * This implements 'onended' callback normally fired by WebRTC after the stream * is stopped. There is no such behaviour yet in FF, so we have to add it. * @param jitsiTrack our track object holding the original WebRTC stream object * to which 'onended' handling will be added. */ function implementOnEndedHandling(jitsiTrack) { var stream = jitsiTrack.getOriginalStream(); if(!stream) return; var originalStop = stream.stop; stream.stop = function () { originalStop.apply(stream); if (jitsiTrack.isActive()) { stream.onended(); } }; } /** * Adds onended/oninactive handler to a MediaStream. * @param mediaStream a MediaStream to attach onended/oninactive handler * @param handler the handler */ function addMediaStreamInactiveHandler(mediaStream, handler) { if(RTCBrowserType.isTemasysPluginUsed()) { // themasys //FIXME: Seems that not working properly. if(mediaStream.onended) { mediaStream.onended = handler; } else if(mediaStream.addEventListener) { mediaStream.addEventListener('ended', function () { handler(mediaStream); }); } else if(mediaStream.attachEvent) { mediaStream.attachEvent('ended', function () { handler(mediaStream); }); } } else { if(typeof mediaStream.active !== "undefined") mediaStream.oninactive = handler; else mediaStream.onended = handler; } } /** * Represents a single media track (either audio or video). * @constructor * @param rtc the rtc instance * @param stream the stream * @param streamInactiveHandler the function that will handle * onended/oninactive events of the stream. * @param jitsiTrackType optionally a type can be specified. * This is the case where we are creating a dummy track with no stream * Currently this happens when a remote side is starting with video muted */ function JitsiTrack(rtc, stream, streamInactiveHandler, jitsiTrackType) { /** * Array with the HTML elements that are displaying the streams. * @type {Array} */ this.containers = []; this.rtc = rtc; this.stream = stream; this.eventEmitter = new EventEmitter(); this.audioLevel = -1; this.type = jitsiTrackType || (this.stream.getVideoTracks().length > 0)? JitsiTrack.VIDEO : JitsiTrack.AUDIO; if(this.type == JitsiTrack.AUDIO) { this._getTracks = function () { return this.stream.getAudioTracks(); }.bind(this); } else { this._getTracks = function () { return this.stream.getVideoTracks(); }.bind(this); } if (RTCBrowserType.isFirefox() && this.stream) { implementOnEndedHandling(this); } if(stream) addMediaStreamInactiveHandler(stream, streamInactiveHandler); } /** * JitsiTrack video type. * @type {string} */ JitsiTrack.VIDEO = "video"; /** * JitsiTrack audio type. * @type {string} */ JitsiTrack.AUDIO = "audio"; /** * Returns the type (audio or video) of this track. */ JitsiTrack.prototype.getType = function() { return this.type; }; /** * Check if this is audiotrack. */ JitsiTrack.prototype.isAudioTrack = function () { return this.getType() === JitsiTrack.AUDIO; }; /** * Check if this is videotrack. */ JitsiTrack.prototype.isVideoTrack = function () { return this.getType() === JitsiTrack.VIDEO; }; /** * Returns the RTCMediaStream from the browser (?). */ JitsiTrack.prototype.getOriginalStream = function() { return this.stream; } /** * Mutes the track. */ JitsiTrack.prototype.mute = function () { this._setMute(true); } /** * Unmutes the stream. */ JitsiTrack.prototype.unmute = function () { this._setMute(false); } /** * Attaches the MediaStream of this track to an HTML container (?). * Adds the container to the list of containers that are displaying the track. * @param container the HTML container */ JitsiTrack.prototype.attach = function (container) { if(this.stream) require("./RTCUtils").attachMediaStream(container, this.stream); this.containers.push(container); } /** * Removes the track from the passed HTML container. * @param container the HTML container. If null all containers are removed. */ JitsiTrack.prototype.detach = function (container) { for(var i = 0; i < this.containers.length; i++) { if(this.containers[i].is(container)) { this.containers.splice(i,1); } if(!container) { this.containers[i].find(">video").remove(); } } if(container) $(container).find(">video").remove(); } /** * Stops sending the media track. And removes it from the HTML. * NOTE: Works for local tracks only. */ JitsiTrack.prototype.stop = function () { } /** * Returns true if this is a video track and the source of the video is a * screen capture as opposed to a camera. */ JitsiTrack.prototype.isScreenSharing = function(){ } /** * Returns id of the track. * @returns {string} id of the track or null if this is fake track. */ JitsiTrack.prototype._getId = function () { var tracks = this.stream.getTracks(); if(!tracks || tracks.length === 0) return null; return tracks[0].id; }; /** * Returns id of the track. * @returns {string} id of the track or null if this is fake track. */ JitsiTrack.prototype.getId = function () { if(this.stream) return RTC.getStreamID(this.stream); else return null; }; /** * Checks whether the MediaStream is avtive/not ended. * When there is no check for active we don't have information and so * will return that stream is active (in case of FF). * @returns {boolean} whether MediaStream is active. */ JitsiTrack.prototype.isActive = function () { if((typeof this.stream.active !== "undefined")) return this.stream.active; else return true; }; /** * Attaches a handler for events(For example - "audio level changed".). * All possible event are defined in JitsiTrackEvents. * @param eventId the event ID. * @param handler handler for the event. */ JitsiTrack.prototype.on = function (eventId, handler) { if(this.eventEmitter) this.eventEmitter.on(eventId, handler); } /** * Removes event listener * @param eventId the event ID. * @param [handler] optional, the specific handler to unbind */ JitsiTrack.prototype.off = function (eventId, handler) { if(this.eventEmitter) this.eventEmitter.removeListener(eventId, handler); } // Common aliases for event emitter JitsiTrack.prototype.addEventListener = JitsiTrack.prototype.on; JitsiTrack.prototype.removeEventListener = JitsiTrack.prototype.off; /** * Sets the audio level for the stream * @param audioLevel the new audio level */ JitsiTrack.prototype.setAudioLevel = function (audioLevel) { if(this.audioLevel !== audioLevel) { this.eventEmitter.emit(JitsiTrackEvents.TRACK_AUDIO_LEVEL_CHANGED, audioLevel); this.audioLevel = audioLevel; } } module.exports = JitsiTrack; },{"../../JitsiTrackEvents":10,"./RTCBrowserType":17,"./RTCUtils":18,"events":51}],16:[function(require,module,exports){ /* global APP */ var EventEmitter = require("events"); var RTCBrowserType = require("./RTCBrowserType"); var RTCUtils = require("./RTCUtils.js"); var JitsiTrack = require("./JitsiTrack"); var JitsiLocalTrack = require("./JitsiLocalTrack.js"); var DataChannels = require("./DataChannels"); var JitsiRemoteTrack = require("./JitsiRemoteTrack.js"); var MediaStreamType = require("../../service/RTC/MediaStreamTypes"); var RTCEvents = require("../../service/RTC/RTCEvents.js"); function createLocalTracks(streams, options) { var newStreams = [] var deviceId = null; for (var i = 0; i < streams.length; i++) { if (streams[i].type === 'audio') { deviceId = options.micDeviceId; } else if (streams[i].videoType === 'camera'){ deviceId = options.cameraDeviceId; } var localStream = new JitsiLocalTrack(streams[i].stream, streams[i].videoType, streams[i].resolution, deviceId); newStreams.push(localStream); if (streams[i].isMuted === true) localStream.setMute(true); } return newStreams; } function RTC(room, options) { this.room = room; this.localStreams = []; //FIXME: we should start removing those streams. //FIXME: We should support multiple streams per jid. this.remoteStreams = {}; this.localAudio = null; this.localVideo = null; this.eventEmitter = new EventEmitter(); var self = this; this.options = options || {}; room.addPresenceListener("videomuted", function (values, from) { if(self.remoteStreams[from]) { // If there is no video track, but we receive it is muted, // we need to create a dummy track which we will mute, so we can // notify interested about the muting if(!self.remoteStreams[from][JitsiTrack.VIDEO]) { var track = self.createRemoteStream( {peerjid:room.roomjid + "/" + from, videoType:"camera", jitsiTrackType:JitsiTrack.VIDEO}, null, null); self.eventEmitter .emit(RTCEvents.FAKE_VIDEO_TRACK_CREATED, track); } self.remoteStreams[from][JitsiTrack.VIDEO] .setMute(values.value == "true"); } }); room.addPresenceListener("audiomuted", function (values, from) { if(self.remoteStreams[from]) { self.remoteStreams[from][JitsiTrack.AUDIO] .setMute(values.value == "true"); } }); } /** * Creates the local MediaStreams. * @param {Object} [options] optional parameters * @param {Array} options.devices the devices that will be requested * @param {string} options.resolution resolution constraints * @param {bool} options.dontCreateJitsiTrack if true objects with the following structure {stream: the Media Stream, * type: "audio" or "video", videoType: "camera" or "desktop"} * will be returned trough the Promise, otherwise JitsiTrack objects will be returned. * @param {string} options.cameraDeviceId * @param {string} options.micDeviceId * @returns {*} Promise object that will receive the new JitsiTracks */ RTC.obtainAudioAndVideoPermissions = function (options) { return RTCUtils.obtainAudioAndVideoPermissions(options).then(function (streams) { return createLocalTracks(streams, options); }); } RTC.prototype.onIncommingCall = function(event) { if(this.options.config.openSctp) this.dataChannels = new DataChannels(event.peerconnection, this.eventEmitter); for(var i = 0; i < this.localStreams.length; i++) if(this.localStreams[i]) { this.room.addStream(this.localStreams[i].getOriginalStream(), function () {}); } } RTC.prototype.selectedEndpoint = function (id) { if(this.dataChannels) this.dataChannels.handleSelectedEndpointEvent(id); } RTC.prototype.pinEndpoint = function (id) { if(this.dataChannels) this.dataChannels.handlePinnedEndpointEvent(id); } RTC.prototype.addListener = function (type, listener) { this.eventEmitter.on(type, listener); }; RTC.prototype.removeListener = function (eventType, listener) { this.eventEmitter.removeListener(eventType, listener); }; RTC.addListener = function (eventType, listener) { RTCUtils.addListener(eventType, listener); } RTC.removeListener = function (eventType, listener) { RTCUtils.removeListener(eventType, listener) } RTC.isRTCReady = function () { return RTCUtils.isRTCReady(); } RTC.init = function (options) { this.options = options || {}; return RTCUtils.init(this.options); } RTC.getDeviceAvailability = function () { return RTCUtils.getDeviceAvailability(); } RTC.prototype.addLocalStream = function (stream) { this.localStreams.push(stream); stream._setRTC(this); if (stream.type == "audio") { this.localAudio = stream; } else { this.localVideo = stream; } }; /** * Set mute for all local audio streams attached to the conference. * @param value the mute value */ RTC.prototype.setAudioMute = function (value) { for(var i = 0; i < this.localStreams.length; i++) { var stream = this.localStreams[i]; if(stream.getType() !== "audio") { continue; } stream._setMute(value); } } RTC.prototype.removeLocalStream = function (track) { var pos = this.localStreams.indexOf(track); if (pos > -1) { this.localStreams.splice(pos, 1); } }; RTC.prototype.createRemoteStream = function (data, sid, thessrc) { var remoteStream = new JitsiRemoteTrack(this, data, sid, thessrc); if(!data.peerjid) return; var jid = Strophe.getResourceFromJid(data.peerjid); if(!this.remoteStreams[jid]) { this.remoteStreams[jid] = {}; } this.remoteStreams[jid][remoteStream.type]= remoteStream; return remoteStream; }; RTC.getPCConstraints = function () { return RTCUtils.pc_constraints; }; RTC.attachMediaStream = function (elSelector, stream) { RTCUtils.attachMediaStream(elSelector, stream); }; RTC.getStreamID = function (stream) { return RTCUtils.getStreamID(stream); }; RTC.getVideoSrc = function (element) { return RTCUtils.getVideoSrc(element); }; /** * Returns true if retrieving the the list of input devices is supported and * false if not. */ RTC.isDeviceListAvailable = function () { return RTCUtils.isDeviceListAvailable(); }; /** * Returns true if changing the camera / microphone device is supported and * false if not. */ RTC.isDeviceChangeAvailable = function () { return RTCUtils.isDeviceChangeAvailable(); } /** * Allows to receive list of available cameras/microphones. * @param {function} callback would receive array of devices as an argument */ RTC.enumerateDevices = function (callback) { RTCUtils.enumerateDevices(callback); }; RTC.setVideoSrc = function (element, src) { RTCUtils.setVideoSrc(element, src); }; /** * A method to handle stopping of the stream. * One point to handle the differences in various implementations. * @param mediaStream MediaStream object to stop. */ RTC.stopMediaStream = function (mediaStream) { RTCUtils.stopMediaStream(mediaStream); }; /** * Returns whether the desktop sharing is enabled or not. * @returns {boolean} */ RTC.isDesktopSharingEnabled = function () { return RTCUtils.isDesktopSharingEnabled(); } RTC.prototype.getVideoElementName = function () { return RTCBrowserType.isTemasysPluginUsed() ? 'object' : 'video'; }; RTC.prototype.dispose = function() { }; RTC.prototype.switchVideoStreams = function (newStream) { this.localVideo.stream = newStream; this.localStreams = []; //in firefox we have only one stream object if (this.localAudio.getOriginalStream() != newStream) this.localStreams.push(this.localAudio); this.localStreams.push(this.localVideo); }; RTC.prototype.setAudioLevel = function (jid, audioLevel) { if(!jid) return; var resource = Strophe.getResourceFromJid(jid); if(this.remoteStreams[resource] && this.remoteStreams[resource][JitsiTrack.AUDIO]) this.remoteStreams[resource][JitsiTrack.AUDIO].setAudioLevel(audioLevel); } module.exports = RTC; },{"../../service/RTC/MediaStreamTypes":130,"../../service/RTC/RTCEvents.js":131,"./DataChannels":12,"./JitsiLocalTrack.js":13,"./JitsiRemoteTrack.js":14,"./JitsiTrack":15,"./RTCBrowserType":17,"./RTCUtils.js":18,"events":51}],17:[function(require,module,exports){ var currentBrowser; var browserVersion; var isAndroid; var RTCBrowserType = { RTC_BROWSER_CHROME: "rtc_browser.chrome", RTC_BROWSER_OPERA: "rtc_browser.opera", RTC_BROWSER_FIREFOX: "rtc_browser.firefox", RTC_BROWSER_IEXPLORER: "rtc_browser.iexplorer", RTC_BROWSER_SAFARI: "rtc_browser.safari", getBrowserType: function () { return currentBrowser; }, isChrome: function () { return currentBrowser === RTCBrowserType.RTC_BROWSER_CHROME; }, isOpera: function () { return currentBrowser === RTCBrowserType.RTC_BROWSER_OPERA; }, isFirefox: function () { return currentBrowser === RTCBrowserType.RTC_BROWSER_FIREFOX; }, isIExplorer: function () { return currentBrowser === RTCBrowserType.RTC_BROWSER_IEXPLORER; }, isSafari: function () { return currentBrowser === RTCBrowserType.RTC_BROWSER_SAFARI; }, isTemasysPluginUsed: function () { return RTCBrowserType.isIExplorer() || RTCBrowserType.isSafari(); }, getFirefoxVersion: function () { return RTCBrowserType.isFirefox() ? browserVersion : null; }, getChromeVersion: function () { return RTCBrowserType.isChrome() ? browserVersion : null; }, usesPlanB: function() { return RTCBrowserType.isChrome() || RTCBrowserType.isOpera() || RTCBrowserType.isTemasysPluginUsed(); }, usesUnifiedPlan: function() { return RTCBrowserType.isFirefox(); }, /** * Whether the browser is running on an android device. */ isAndroid: function() { return isAndroid; } // Add version getters for other browsers when needed }; // detectOpera() must be called before detectChrome() !!! // otherwise Opera wil be detected as Chrome function detectChrome() { if (navigator.webkitGetUserMedia) { currentBrowser = RTCBrowserType.RTC_BROWSER_CHROME; var userAgent = navigator.userAgent.toLowerCase(); // We can assume that user agent is chrome, because it's // enforced when 'ext' streaming method is set var ver = parseInt(userAgent.match(/chrome\/(\d+)\./)[1], 10); console.log("This appears to be Chrome, ver: " + ver); return ver; } return null; } function detectOpera() { var userAgent = navigator.userAgent; if (userAgent.match(/Opera|OPR/)) { currentBrowser = RTCBrowserType.RTC_BROWSER_OPERA; var version = userAgent.match(/(Opera|OPR) ?\/?(\d+)\.?/)[2]; console.info("This appears to be Opera, ver: " + version); return version; } return null; } function detectFirefox() { if (navigator.mozGetUserMedia) { currentBrowser = RTCBrowserType.RTC_BROWSER_FIREFOX; var version = parseInt( navigator.userAgent.match(/Firefox\/([0-9]+)\./)[1], 10); console.log('This appears to be Firefox, ver: ' + version); return version; } return null; } function detectSafari() { if ((/^((?!chrome).)*safari/i.test(navigator.userAgent))) { currentBrowser = RTCBrowserType.RTC_BROWSER_SAFARI; console.info("This appears to be Safari"); // FIXME detect Safari version when needed return 1; } return null; } function detectIE() { var version; var ua = window.navigator.userAgent; var msie = ua.indexOf('MSIE '); if (msie > 0) { // IE 10 or older => return version number version = parseInt(ua.substring(msie + 5, ua.indexOf('.', msie)), 10); } var trident = ua.indexOf('Trident/'); if (!version && trident > 0) { // IE 11 => return version number var rv = ua.indexOf('rv:'); version = parseInt(ua.substring(rv + 3, ua.indexOf('.', rv)), 10); } var edge = ua.indexOf('Edge/'); if (!version && edge > 0) { // IE 12 => return version number version = parseInt(ua.substring(edge + 5, ua.indexOf('.', edge)), 10); } if (version) { currentBrowser = RTCBrowserType.RTC_BROWSER_IEXPLORER; console.info("This appears to be IExplorer, ver: " + version); } return version; } function detectBrowser() { var version; var detectors = [ detectOpera, detectChrome, detectFirefox, detectIE, detectSafari ]; // Try all browser detectors for (var i = 0; i < detectors.length; i++) { version = detectors[i](); if (version) return version; } console.warn("Browser type defaults to Safari ver 1"); currentBrowser = RTCBrowserType.RTC_BROWSER_SAFARI; return 1; } browserVersion = detectBrowser(); isAndroid = navigator.userAgent.indexOf('Android') != -1; module.exports = RTCBrowserType; },{}],18:[function(require,module,exports){ (function (__filename){ /* global config, require, attachMediaStream, getUserMedia, RTCPeerConnection, RTCSessionDescription, RTCIceCandidate, MediaStreamTrack, mozRTCPeerConnection, mozRTCSessionDescription, mozRTCIceCandidate, webkitRTCPeerConnection, webkitMediaStream, webkitURL */ /* jshint -W101 */ var logger = require("jitsi-meet-logger").getLogger(__filename); var RTCBrowserType = require("./RTCBrowserType"); var Resolutions = require("../../service/RTC/Resolutions"); var RTCEvents = require("../../service/RTC/RTCEvents"); var AdapterJS = require("./adapter.screenshare"); var SDPUtil = require("../xmpp/SDPUtil"); var EventEmitter = require("events"); var screenObtainer = require("./ScreenObtainer"); var JitsiTrackErrors = require("../../JitsiTrackErrors"); var eventEmitter = new EventEmitter(); var devices = { audio: true, video: true }; var rtcReady = false; function setResolutionConstraints(constraints, resolution) { var isAndroid = RTCBrowserType.isAndroid(); if (Resolutions[resolution]) { constraints.video.mandatory.minWidth = Resolutions[resolution].width; constraints.video.mandatory.minHeight = Resolutions[resolution].height; } else if (isAndroid) { // FIXME can't remember if the purpose of this was to always request // low resolution on Android ? if yes it should be moved up front constraints.video.mandatory.minWidth = 320; constraints.video.mandatory.minHeight = 240; constraints.video.mandatory.maxFrameRate = 15; } if (constraints.video.mandatory.minWidth) constraints.video.mandatory.maxWidth = constraints.video.mandatory.minWidth; if (constraints.video.mandatory.minHeight) constraints.video.mandatory.maxHeight = constraints.video.mandatory.minHeight; } /** * @param {string[]} um required user media types * * @param {Object} [options={}] optional parameters * @param {string} options.resolution * @param {number} options.bandwidth * @param {number} options.fps * @param {string} options.desktopStream * @param {string} options.cameraDeviceId * @param {string} options.micDeviceId * @param {bool} firefox_fake_device */ function getConstraints(um, options) { var constraints = {audio: false, video: false}; if (um.indexOf('video') >= 0) { // same behaviour as true constraints.video = { mandatory: {}, optional: [] }; if (options.cameraDeviceId) { constraints.video.optional.push({ sourceId: options.cameraDeviceId }); } constraints.video.optional.push({ googLeakyBucket: true }); setResolutionConstraints(constraints, options.resolution); } if (um.indexOf('audio') >= 0) { if (!RTCBrowserType.isFirefox()) { // same behaviour as true constraints.audio = { mandatory: {}, optional: []}; if (options.micDeviceId) { constraints.audio.optional.push({ sourceId: options.micDeviceId }); } // if it is good enough for hangouts... constraints.audio.optional.push( {googEchoCancellation: true}, {googAutoGainControl: true}, {googNoiseSupression: true}, {googHighpassFilter: true}, {googNoisesuppression2: true}, {googEchoCancellation2: true}, {googAutoGainControl2: true} ); } else { if (options.micDeviceId) { constraints.audio = { mandatory: {}, optional: [{ sourceId: options.micDeviceId }]}; } else { constraints.audio = true; } } } if (um.indexOf('screen') >= 0) { if (RTCBrowserType.isChrome()) { constraints.video = { mandatory: { chromeMediaSource: "screen", googLeakyBucket: true, maxWidth: window.screen.width, maxHeight: window.screen.height, maxFrameRate: 3 }, optional: [] }; } else if (RTCBrowserType.isTemasysPluginUsed()) { constraints.video = { optional: [ { sourceId: AdapterJS.WebRTCPlugin.plugin.screensharingKey } ] }; } else if (RTCBrowserType.isFirefox()) { constraints.video = { mozMediaSource: "window", mediaSource: "window" }; } else { logger.error( "'screen' WebRTC media source is supported only in Chrome" + " and with Temasys plugin"); } } if (um.indexOf('desktop') >= 0) { constraints.video = { mandatory: { chromeMediaSource: "desktop", chromeMediaSourceId: options.desktopStream, googLeakyBucket: true, maxWidth: window.screen.width, maxHeight: window.screen.height, maxFrameRate: 3 }, optional: [] }; } if (options.bandwidth) { if (!constraints.video) { //same behaviour as true constraints.video = {mandatory: {}, optional: []}; } constraints.video.optional.push({bandwidth: options.bandwidth}); } if (options.fps) { // for some cameras it might be necessary to request 30fps // so they choose 30fps mjpg over 10fps yuy2 if (!constraints.video) { // same behaviour as true; constraints.video = {mandatory: {}, optional: []}; } constraints.video.mandatory.minFrameRate = options.fps; } // we turn audio for both audio and video tracks, the fake audio & video seems to work // only when enabled in one getUserMedia call, we cannot get fake audio separate by fake video // this later can be a problem with some of the tests if(RTCBrowserType.isFirefox() && options.firefox_fake_device) { // seems to be fixed now, removing this experimental fix, as having // multiple audio tracks brake the tests //constraints.audio = true; constraints.fake = true; } return constraints; } function setAvailableDevices(um, available) { if (um.indexOf("video") != -1) { devices.video = available; } if (um.indexOf("audio") != -1) { devices.audio = available; } eventEmitter.emit(RTCEvents.AVAILABLE_DEVICES_CHANGED, devices); } // In case of IE we continue from 'onReady' callback // passed to RTCUtils constructor. It will be invoked by Temasys plugin // once it is initialized. function onReady (options, GUM) { rtcReady = true; eventEmitter.emit(RTCEvents.RTC_READY, true); screenObtainer.init(options, GUM); } /** * Apply function with arguments if function exists. * Do nothing if function not provided. * @param {function} [fn] function to apply * @param {Array} [args=[]] arguments for function */ function maybeApply(fn, args) { if (fn) { fn.apply(null, args || []); } } var getUserMediaStatus = { initialized: false, callbacks: [] }; /** * Wrap `getUserMedia` to allow others to know if it was executed at least * once or not. Wrapper function uses `getUserMediaStatus` object. * @param {Function} getUserMedia native function * @returns {Function} wrapped function */ function wrapGetUserMedia(getUserMedia) { return function (constraints, successCallback, errorCallback) { getUserMedia(constraints, function (stream) { maybeApply(successCallback, [stream]); if (!getUserMediaStatus.initialized) { getUserMediaStatus.initialized = true; getUserMediaStatus.callbacks.forEach(function (callback) { callback(); }); getUserMediaStatus.callbacks.length = 0; } }, function (error) { maybeApply(errorCallback, [error]); }); }; } /** * Create stub device which equals to auto selected device. * @param {string} kind if that should be `audio` or `video` device * @returns {Object} stub device description in `enumerateDevices` format */ function createAutoDeviceInfo(kind) { return { facing: null, label: 'Auto', kind: kind, deviceId: '', groupId: null }; } /** * Execute function after getUserMedia was executed at least once. * @param {Function} callback function to execute after getUserMedia */ function afterUserMediaInitialized(callback) { if (getUserMediaStatus.initialized) { callback(); } else { getUserMediaStatus.callbacks.push(callback); } } /** * Wrapper function which makes enumerateDevices to wait * until someone executes getUserMedia first time. * @param {Function} enumerateDevices native function * @returns {Funtion} wrapped function */ function wrapEnumerateDevices(enumerateDevices) { return function (callback) { // enumerate devices only after initial getUserMedia afterUserMediaInitialized(function () { enumerateDevices().then(function (devices) { //add auto devices devices.unshift( createAutoDeviceInfo('audioinput'), createAutoDeviceInfo('videoinput') ); callback(devices); }, function (err) { console.error('cannot enumerate devices: ', err); // return only auto devices callback([createAutoDeviceInfo('audioInput'), createAutoDeviceInfo('videoinput')]); }); }); }; } /** * Use old MediaStreamTrack to get devices list and * convert it to enumerateDevices format. * @param {Function} callback function to call when received devices list. */ function enumerateDevicesThroughMediaStreamTrack (callback) { MediaStreamTrack.getSources(function (sources) { var devices = sources.map(function (source) { var kind = (source.kind || '').toLowerCase(); return { facing: source.facing || null, label: source.label, kind: kind ? kind + 'input': null, deviceId: source.id, groupId: source.groupId || null }; }); //add auto devices devices.unshift( createAutoDeviceInfo('audioinput'), createAutoDeviceInfo('videoinput') ); callback(devices); }); } function obtainDevices(options) { if(!options.devices || options.devices.length === 0) { return options.successCallback(options.streams || {}); } var device = options.devices.splice(0, 1); var devices = []; devices.push(device); options.deviceGUM[device](function (stream) { options.streams = options.streams || {}; options.streams[device] = stream; obtainDevices(options); }, function (error) { Object.keys(options.streams).forEach(function(device) { RTCUtils.stopMediaStream(options.streams[device]); }); logger.error( "failed to obtain " + device + " stream - stop", error); options.errorCallback(JitsiTrackErrors.parseError(error, devices)); }); } /** * Handles the newly created Media Streams. * @param streams the new Media Streams * @param resolution the resolution of the video streams * @returns {*[]} object that describes the new streams */ function handleLocalStream(streams, resolution) { var audioStream, videoStream, desktopStream, res = []; // If this is FF, the stream parameter is *not* a MediaStream object, it's // an object with two properties: audioStream, videoStream. if (window.webkitMediaStream) { var audioVideo = streams.audioVideo; if (audioVideo) { var audioTracks = audioVideo.getAudioTracks(); if(audioTracks.length) { audioStream = new webkitMediaStream(); for (var i = 0; i < audioTracks.length; i++) { audioStream.addTrack(audioTracks[i]); } } var videoTracks = audioVideo.getVideoTracks(); if(videoTracks.length) { videoStream = new webkitMediaStream(); for (var j = 0; j < videoTracks.length; j++) { videoStream.addTrack(videoTracks[j]); } } } if (streams && streams.desktopStream) desktopStream = streams.desktopStream; } else if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed()) { // Firefox and Temasys plugin if (streams && streams.audio) audioStream = streams.audio; if (streams && streams.video) videoStream = streams.video; if(streams && streams.desktop) desktopStream = streams.desktop; } if (desktopStream) res.push({stream: desktopStream, type: "video", videoType: "desktop"}); if(audioStream) res.push({stream: audioStream, type: "audio", videoType: null}); if(videoStream) res.push({stream: videoStream, type: "video", videoType: "camera", resolution: resolution}); return res; } //Options parameter is to pass config options. Currently uses only "useIPv6". var RTCUtils = { init: function (options) { return new Promise(function(resolve, reject) { if (RTCBrowserType.isFirefox()) { var FFversion = RTCBrowserType.getFirefoxVersion(); if (FFversion < 40) { logger.error( "Firefox version too old: " + FFversion + ". Required >= 40."); reject(new Error("Firefox version too old: " + FFversion + ". Required >= 40.")); return; } this.peerconnection = mozRTCPeerConnection; this.getUserMedia = wrapGetUserMedia(navigator.mozGetUserMedia.bind(navigator)); this.enumerateDevices = wrapEnumerateDevices( navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices) ); this.pc_constraints = {}; this.attachMediaStream = function (element, stream) { // srcObject is being standardized and FF will eventually // support that unprefixed. FF also supports the // "element.src = URL.createObjectURL(...)" combo, but that // will be deprecated in favour of srcObject. // // https://groups.google.com/forum/#!topic/mozilla.dev.media/pKOiioXonJg // https://github.com/webrtc/samples/issues/302 if (!element[0]) return; element[0].mozSrcObject = stream; element[0].play(); }; this.getStreamID = function (stream) { var id = stream.id; if (!id) { var tracks = stream.getVideoTracks(); if (!tracks || tracks.length === 0) { tracks = stream.getAudioTracks(); } id = tracks[0].id; } return SDPUtil.filter_special_chars(id); }; this.getVideoSrc = function (element) { if (!element) return null; return element.mozSrcObject; }; this.setVideoSrc = function (element, src) { if (element) element.mozSrcObject = src; }; RTCSessionDescription = mozRTCSessionDescription; RTCIceCandidate = mozRTCIceCandidate; } else if (RTCBrowserType.isChrome() || RTCBrowserType.isOpera()) { this.peerconnection = webkitRTCPeerConnection; var getUserMedia = navigator.webkitGetUserMedia.bind(navigator); if (navigator.mediaDevices) { this.getUserMedia = wrapGetUserMedia(getUserMedia); this.enumerateDevices = wrapEnumerateDevices( navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices) ); } else { this.getUserMedia = getUserMedia; this.enumerateDevices = enumerateDevicesThroughMediaStreamTrack; } this.attachMediaStream = function (element, stream) { // saves the created url for the stream, so we can reuse it // and not keep creating urls if (!stream.jitsiObjectURL) { stream.jitsiObjectURL = webkitURL.createObjectURL(stream); } element.attr('src', stream.jitsiObjectURL); }; this.getStreamID = function (stream) { // streams from FF endpoints have the characters '{' and '}' // that make jQuery choke. return SDPUtil.filter_special_chars(stream.id); }; this.getVideoSrc = function (element) { if (!element) return null; return element.getAttribute("src"); }; this.setVideoSrc = function (element, src) { if (element) element.setAttribute("src", src); }; // DTLS should now be enabled by default but.. this.pc_constraints = {'optional': [ {'DtlsSrtpKeyAgreement': 'true'} ]}; if (options.useIPv6) { // https://code.google.com/p/webrtc/issues/detail?id=2828 this.pc_constraints.optional.push({googIPv6: true}); } if (RTCBrowserType.isAndroid()) { this.pc_constraints = {}; // disable DTLS on Android } if (!webkitMediaStream.prototype.getVideoTracks) { webkitMediaStream.prototype.getVideoTracks = function () { return this.videoTracks; }; } if (!webkitMediaStream.prototype.getAudioTracks) { webkitMediaStream.prototype.getAudioTracks = function () { return this.audioTracks; }; } } // Detect IE/Safari else if (RTCBrowserType.isTemasysPluginUsed()) { //AdapterJS.WebRTCPlugin.setLogLevel( // AdapterJS.WebRTCPlugin.PLUGIN_LOG_LEVELS.VERBOSE); var self = this; AdapterJS.webRTCReady(function (isPlugin) { self.peerconnection = RTCPeerConnection; self.getUserMedia = window.getUserMedia; self.enumerateDevices = enumerateDevicesThroughMediaStreamTrack; self.attachMediaStream = function (elSel, stream) { if (stream.id === "dummyAudio" || stream.id === "dummyVideo") { return; } attachMediaStream(elSel[0], stream); }; self.getStreamID = function (stream) { var id = SDPUtil.filter_special_chars(stream.label); return id; }; self.getVideoSrc = function (element) { if (!element) { logger.warn("Attempt to get video SRC of null element"); return null; } var children = element.children; for (var i = 0; i !== children.length; ++i) { if (children[i].name === 'streamId') { return children[i].value; } } //logger.info(element.id + " SRC: " + src); return null; }; self.setVideoSrc = function (element, src) { //logger.info("Set video src: ", element, src); if (!src) { logger.warn("Not attaching video stream, 'src' is null"); return; } AdapterJS.WebRTCPlugin.WaitForPluginReady(); var stream = AdapterJS.WebRTCPlugin.plugin .getStreamWithId(AdapterJS.WebRTCPlugin.pageId, src); attachMediaStream(element, stream); }; onReady(options, self.getUserMediaWithConstraints); resolve(); }); } else { try { logger.error('Browser does not appear to be WebRTC-capable'); } catch (e) { } reject('Browser does not appear to be WebRTC-capable'); return; } // Call onReady() if Temasys plugin is not used if (!RTCBrowserType.isTemasysPluginUsed()) { onReady(options, this.getUserMediaWithConstraints); resolve(); } }.bind(this)); }, /** * @param {string[]} um required user media types * @param {function} success_callback * @param {Function} failure_callback * @param {Object} [options] optional parameters * @param {string} options.resolution * @param {number} options.bandwidth * @param {number} options.fps * @param {string} options.desktopStream * @param {string} options.cameraDeviceId * @param {string} options.micDeviceId **/ getUserMediaWithConstraints: function ( um, success_callback, failure_callback, options) { options = options || {}; var resolution = options.resolution; var constraints = getConstraints(um, options); logger.info("Get media constraints", constraints); try { this.getUserMedia(constraints, function (stream) { logger.log('onUserMediaSuccess'); setAvailableDevices(um, true); success_callback(stream); }, function (error) { setAvailableDevices(um, false); logger.warn('Failed to get access to local media. Error ', error, constraints); if (failure_callback) { failure_callback(error, resolution); } }); } catch (e) { logger.error('GUM failed: ', e); if (failure_callback) { failure_callback(e); } } }, /** * Creates the local MediaStreams. * @param {Object} [options] optional parameters * @param {Array} options.devices the devices that will be requested * @param {string} options.resolution resolution constraints * @param {bool} options.dontCreateJitsiTrack if true objects with the following structure {stream: the Media Stream, * type: "audio" or "video", videoType: "camera" or "desktop"} * will be returned trough the Promise, otherwise JitsiTrack objects will be returned. * @param {string} options.cameraDeviceId * @param {string} options.micDeviceId * @returns {*} Promise object that will receive the new JitsiTracks */ obtainAudioAndVideoPermissions: function (options) { var self = this; options = options || {}; return new Promise(function (resolve, reject) { var successCallback = function (stream) { resolve(handleLocalStream(stream, options.resolution)); }; options.devices = options.devices || ['audio', 'video']; if(!screenObtainer.isSupported() && options.devices.indexOf("desktop") !== -1){ reject(new Error("Desktop sharing is not supported!")); } if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed()) { var GUM = function (device, s, e) { this.getUserMediaWithConstraints(device, s, e, options); }; var deviceGUM = { "audio": GUM.bind(self, ["audio"]), "video": GUM.bind(self, ["video"]) }; if(screenObtainer.isSupported()){ deviceGUM["desktop"] = screenObtainer.obtainStream.bind( screenObtainer); } // With FF/IE we can't split the stream into audio and video because FF // doesn't support media stream constructors. So, we need to get the // audio stream separately from the video stream using two distinct GUM // calls. Not very user friendly :-( but we don't have many other // options neither. // // Note that we pack those 2 streams in a single object and pass it to // the successCallback method. obtainDevices({ devices: options.devices, streams: [], successCallback: successCallback, errorCallback: reject, deviceGUM: deviceGUM }); } else { var hasDesktop = options.devices.indexOf('desktop') > -1; if (hasDesktop) { options.devices.splice(options.devices.indexOf("desktop"), 1); } options.resolution = options.resolution || '360'; if(options.devices.length) { this.getUserMediaWithConstraints( options.devices, function (stream) { if((options.devices.indexOf("audio") !== -1 && !stream.getAudioTracks().length) || (options.devices.indexOf("video") !== -1 && !stream.getVideoTracks().length)) { self.stopMediaStream(stream); reject(JitsiTrackErrors.parseError( new Error("Unable to get the audio and " + "video tracks."), options.devices)); return; } if(hasDesktop) { screenObtainer.obtainStream( function (desktopStream) { successCallback({audioVideo: stream, desktopStream: desktopStream}); }, function (error) { self.stopMediaStream(stream); reject( JitsiTrackErrors.parseError(error, options.devices)); }); } else { successCallback({audioVideo: stream}); } }, function (error) { reject(JitsiTrackErrors.parseError(error, options.devices)); }, options); } else if (hasDesktop) { screenObtainer.obtainStream( function (stream) { successCallback({desktopStream: stream}); }, function (error) { reject( JitsiTrackErrors.parseError(error, ["desktop"])); }); } } }.bind(this)); }, addListener: function (eventType, listener) { eventEmitter.on(eventType, listener); }, removeListener: function (eventType, listener) { eventEmitter.removeListener(eventType, listener); }, getDeviceAvailability: function () { return devices; }, isRTCReady: function () { return rtcReady; }, /** * Checks if its possible to enumerate available cameras/micropones. * @returns {boolean} true if available, false otherwise. */ isDeviceListAvailable: function () { var isEnumerateDevicesAvailable = navigator.mediaDevices && navigator.mediaDevices.enumerateDevices; if (isEnumerateDevicesAvailable) { return true; } return (MediaStreamTrack && MediaStreamTrack.getSources)? true : false; }, /** * Returns true if changing the camera / microphone device is supported and * false if not. */ isDeviceChangeAvailable: function () { if(RTCBrowserType.isChrome() || RTCBrowserType.isOpera() || RTCBrowserType.isTemasysPluginUsed()) return true; return false; }, /** * A method to handle stopping of the stream. * One point to handle the differences in various implementations. * @param mediaStream MediaStream object to stop. */ stopMediaStream: function (mediaStream) { mediaStream.getTracks().forEach(function (track) { // stop() not supported with IE if (!RTCBrowserType.isTemasysPluginUsed() && track.stop) { track.stop(); } }); // leave stop for implementation still using it if (mediaStream.stop) { mediaStream.stop(); } // if we have done createObjectURL, lets clean it if (mediaStream.jitsiObjectURL) { webkitURL.revokeObjectURL(mediaStream.jitsiObjectURL); } }, /** * Returns whether the desktop sharing is enabled or not. * @returns {boolean} */ isDesktopSharingEnabled: function () { return screenObtainer.isSupported(); } }; module.exports = RTCUtils; }).call(this,"/modules/RTC/RTCUtils.js") },{"../../JitsiTrackErrors":9,"../../service/RTC/RTCEvents":131,"../../service/RTC/Resolutions":132,"../xmpp/SDPUtil":34,"./RTCBrowserType":17,"./ScreenObtainer":19,"./adapter.screenshare":20,"events":51,"jitsi-meet-logger":79}],19:[function(require,module,exports){ (function (__filename){ /* global chrome, $, alert */ /* jshint -W003 */ var logger = require("jitsi-meet-logger").getLogger(__filename); var RTCBrowserType = require("./RTCBrowserType"); var AdapterJS = require("./adapter.screenshare"); var DesktopSharingEventTypes = require("../../service/desktopsharing/DesktopSharingEventTypes"); var JitsiTrackErrors = require("../../JitsiTrackErrors"); /** * Indicates whether the Chrome desktop sharing extension is installed. * @type {boolean} */ var chromeExtInstalled = false; /** * Indicates whether an update of the Chrome desktop sharing extension is * required. * @type {boolean} */ var chromeExtUpdateRequired = false; /** * Whether the jidesha extension for firefox is installed for the domain on * which we are running. Null designates an unknown value. * @type {null} */ var firefoxExtInstalled = null; /** * If set to true, detection of an installed firefox extension will be started * again the next time obtainScreenOnFirefox is called (e.g. next time the * user tries to enable screen sharing). */ var reDetectFirefoxExtension = false; var GUM = null; /** * Handles obtaining a stream from a screen capture on different browsers. */ var ScreenObtainer = { obtainStream: null, /** * Initializes the function used to obtain a screen capture * (this.obtainStream). * * If the browser is Chrome, it uses the value of * 'options.desktopSharingChromeMethod' (or 'options.desktopSharing') to * decide whether to use the a Chrome extension (if the value is 'ext'), * use the "screen" media source (if the value is 'webrtc'), * or disable screen capture (if the value is other). * Note that for the "screen" media source to work the * 'chrome://flags/#enable-usermedia-screen-capture' flag must be set. */ init: function(options, gum) { var obtainDesktopStream = null; this.options = options = options || {}; GUM = gum; if (RTCBrowserType.isFirefox()) initFirefoxExtensionDetection(options); // TODO remove this, options.desktopSharing is deprecated. var chromeMethod = (options.desktopSharingChromeMethod || options.desktopSharing); if (RTCBrowserType.isTemasysPluginUsed()) { if (!AdapterJS.WebRTCPlugin.plugin.HasScreensharingFeature) { logger.info("Screensharing not supported by this plugin " + "version"); } else if(!AdapterJS.WebRTCPlugin.plugin.isScreensharingAvailable) { logger.info( "Screensharing not available with Temasys plugin on" + " this site"); } else { obtainDesktopStream = obtainWebRTCScreen; logger.info("Using Temasys plugin for desktop sharing"); } } else if (RTCBrowserType.isChrome()) { if (chromeMethod == "ext") { if (RTCBrowserType.getChromeVersion() >= 34) { obtainDesktopStream = this.obtainScreenFromExtension; logger.info("Using Chrome extension for desktop sharing"); initChromeExtension(options); } else { logger.info("Chrome extension not supported until ver 34"); } } else if (chromeMethod == "webrtc") { obtainDesktopStream = obtainWebRTCScreen; logger.info("Using Chrome WebRTC for desktop sharing"); } } else if (RTCBrowserType.isFirefox()) { if (options.desktopSharingFirefoxDisabled) { obtainDesktopStream = null; } else if (window.location.protocol === "http:"){ logger.log("Screen sharing is not supported over HTTP. " + "Use of HTTPS is required."); obtainDesktopStream = null; } else { obtainDesktopStream = this.obtainScreenOnFirefox; } } if (!obtainDesktopStream) { logger.info("Desktop sharing disabled"); } this.obtainStream = obtainDesktopStream; }, /** * Checks whether obtaining a screen capture is supported in the current * environment. * @returns {boolean} */ isSupported: function() { return !!this.obtainStream; }, /** * Obtains a screen capture stream on Firefox. * @param callback * @param errorCallback */ obtainScreenOnFirefox: function (callback, errorCallback) { var self = this; var extensionRequired = false; if (this.options.desktopSharingFirefoxMaxVersionExtRequired === -1 || (this.options.desktopSharingFirefoxMaxVersionExtRequired >= 0 && RTCBrowserType.getFirefoxVersion() <= this.options.desktopSharingFirefoxMaxVersionExtRequired)) { extensionRequired = true; logger.log("Jidesha extension required on firefox version " + RTCBrowserType.getFirefoxVersion()); } if (!extensionRequired || firefoxExtInstalled === true) { obtainWebRTCScreen(callback, errorCallback); return; } if (reDetectFirefoxExtension) { reDetectFirefoxExtension = false; initFirefoxExtensionDetection(this.options); } // Give it some (more) time to initialize, and assume lack of // extension if it hasn't. if (firefoxExtInstalled === null) { window.setTimeout( function() { if (firefoxExtInstalled === null) firefoxExtInstalled = false; self.obtainScreenOnFirefox(callback, errorCallback); }, 300 ); logger.log("Waiting for detection of jidesha on firefox to " + "finish."); return; } // We need an extension and it isn't installed. // Make sure we check for the extension when the user clicks again. firefoxExtInstalled = null; reDetectFirefoxExtension = true; // Make sure desktopsharing knows that we failed, so that it doesn't get // stuck in 'switching' mode. errorCallback({ type: "jitsiError", errorObject: JitsiTrackErrors.FIREFOX_EXTENSION_NEEDED }); }, /** * Asks Chrome extension to call chooseDesktopMedia and gets chrome * 'desktop' stream for returned stream token. */ obtainScreenFromExtension: function (streamCallback, failCallback) { var self = this; if (chromeExtInstalled) { doGetStreamFromExtension(this.options, streamCallback, failCallback); } else { if (chromeExtUpdateRequired) { alert( 'Jitsi Desktop Streamer requires update. ' + 'Changes will take effect after next Chrome restart.'); } chrome.webstore.install( getWebStoreInstallUrl(this.options), function (arg) { logger.log("Extension installed successfully", arg); chromeExtInstalled = true; // We need to give a moment for the endpoint to become // available window.setTimeout(function () { doGetStreamFromExtension(self.options, streamCallback, failCallback); }, 500); }, function (arg) { logger.log("Failed to install the extension", arg); failCallback(arg); } ); } } }; /** * Obtains a desktop stream using getUserMedia. * For this to work on Chrome, the * 'chrome://flags/#enable-usermedia-screen-capture' flag must be enabled. * * On firefox, the document's domain must be white-listed in the * 'media.getusermedia.screensharing.allowed_domains' preference in * 'about:config'. */ function obtainWebRTCScreen(streamCallback, failCallback) { GUM( ['screen'], streamCallback, failCallback ); } /** * Constructs inline install URL for Chrome desktop streaming extension. * The 'chromeExtensionId' must be defined in options parameter. * @param options supports "desktopSharingChromeExtId" and "chromeExtensionId" * @returns {string} */ function getWebStoreInstallUrl(options) { //TODO remove chromeExtensionId (deprecated) return "https://chrome.google.com/webstore/detail/" + (options.desktopSharingChromeExtId || options.chromeExtensionId); } /** * Checks whether an update of the Chrome extension is required. * @param minVersion minimal required version * @param extVersion current extension version * @returns {boolean} */ function isUpdateRequired(minVersion, extVersion) { try { var s1 = minVersion.split('.'); var s2 = extVersion.split('.'); var len = Math.max(s1.length, s2.length); for (var i = 0; i < len; i++) { var n1 = 0, n2 = 0; if (i < s1.length) n1 = parseInt(s1[i]); if (i < s2.length) n2 = parseInt(s2[i]); if (isNaN(n1) || isNaN(n2)) { return true; } else if (n1 !== n2) { return n1 > n2; } } // will happen if both versions have identical numbers in // their components (even if one of them is longer, has more components) return false; } catch (e) { logger.error("Failed to parse extension version", e); return true; } } function checkChromeExtInstalled(callback, options) { if (!chrome || !chrome.runtime) { // No API, so no extension for sure callback(false, false); return; } chrome.runtime.sendMessage( //TODO: remove chromeExtensionId (deprecated) (options.desktopSharingChromeExtId || options.chromeExtensionId), { getVersion: true }, function (response) { if (!response || !response.version) { // Communication failure - assume that no endpoint exists logger.warn( "Extension not installed?: ", chrome.runtime.lastError); callback(false, false); return; } // Check installed extension version var extVersion = response.version; logger.log('Extension version is: ' + extVersion); //TODO: remove minChromeExtVersion (deprecated) var updateRequired = isUpdateRequired( (options.desktopSharingChromeMinExtVersion || options.minChromeExtVersion), extVersion); callback(!updateRequired, updateRequired); } ); } function doGetStreamFromExtension(options, streamCallback, failCallback) { // Sends 'getStream' msg to the extension. // Extension id must be defined in the config. chrome.runtime.sendMessage( //TODO: remove chromeExtensionId (deprecated) (options.desktopSharingChromeExtId || options.chromeExtensionId), { getStream: true, //TODO: remove desktopSharingSources (deprecated). sources: (options.desktopSharingChromeSources || options.desktopSharingSources) }, function (response) { if (!response) { failCallback(chrome.runtime.lastError); return; } logger.log("Response from extension: " + response); if (response.streamId) { GUM( ['desktop'], function (stream) { streamCallback(stream); }, failCallback, {desktopStream: response.streamId}); } else { failCallback("Extension failed to get the stream"); } } ); } /** * Initializes with extension id set in * config.js to support inline installs. Host site must be selected as main * website of published extension. * @param options supports "desktopSharingChromeExtId" and "chromeExtensionId" */ function initInlineInstalls(options) { $("link[rel=chrome-webstore-item]").attr("href", getWebStoreInstallUrl(options)); } function initChromeExtension(options) { // Initialize Chrome extension inline installs initInlineInstalls(options); // Check if extension is installed checkChromeExtInstalled(function (installed, updateRequired) { chromeExtInstalled = installed; chromeExtUpdateRequired = updateRequired; logger.info( "Chrome extension installed: " + chromeExtInstalled + " updateRequired: " + chromeExtUpdateRequired); }, options); } /** * Starts the detection of an installed jidesha extension for firefox. * @param options supports "desktopSharingFirefoxDisabled", * "desktopSharingFirefoxExtId" and "chromeExtensionId" */ function initFirefoxExtensionDetection(options) { if (options.desktopSharingFirefoxDisabled) { return; } if (firefoxExtInstalled === false || firefoxExtInstalled === true) return; if (!options.desktopSharingFirefoxExtId) { firefoxExtInstalled = false; return; } var img = document.createElement('img'); img.onload = function(){ logger.log("Detected firefox screen sharing extension."); firefoxExtInstalled = true; }; img.onerror = function(){ logger.log("Detected lack of firefox screen sharing extension."); firefoxExtInstalled = false; }; // The jidesha extension exposes an empty image file under the url: // "chrome://EXT_ID/content/DOMAIN.png" // Where EXT_ID is the ID of the extension with "@" replaced by ".", and // DOMAIN is a domain whitelisted by the extension. var src = "chrome://" + (options.desktopSharingFirefoxExtId.replace('@', '.')) + "/content/" + document.location.hostname + ".png"; img.setAttribute('src', src); } module.exports = ScreenObtainer; }).call(this,"/modules/RTC/ScreenObtainer.js") },{"../../JitsiTrackErrors":9,"../../service/desktopsharing/DesktopSharingEventTypes":134,"./RTCBrowserType":17,"./adapter.screenshare":20,"jitsi-meet-logger":79}],20:[function(require,module,exports){ (function (__filename){ /*! adapterjs - v0.12.3 - 2015-11-16 */ var console = require("jitsi-meet-logger").getLogger(__filename); // Adapter's interface. var AdapterJS = AdapterJS || {}; // Browserify compatibility if(typeof exports !== 'undefined') { module.exports = AdapterJS; } AdapterJS.options = AdapterJS.options || {}; // uncomment to get virtual webcams // AdapterJS.options.getAllCams = true; // uncomment to prevent the install prompt when the plugin in not yet installed // AdapterJS.options.hidePluginInstallPrompt = true; // AdapterJS version AdapterJS.VERSION = '0.12.3'; // This function will be called when the WebRTC API is ready to be used // Whether it is the native implementation (Chrome, Firefox, Opera) or // the plugin // You may Override this function to synchronise the start of your application // with the WebRTC API being ready. // If you decide not to override use this synchronisation, it may result in // an extensive CPU usage on the plugin start (once per tab loaded) // Params: // - isUsingPlugin: true is the WebRTC plugin is being used, false otherwise // AdapterJS.onwebrtcready = AdapterJS.onwebrtcready || function(isUsingPlugin) { // The WebRTC API is ready. // Override me and do whatever you want here }; // Sets a callback function to be called when the WebRTC interface is ready. // The first argument is the function to callback.\ // Throws an error if the first argument is not a function AdapterJS.webRTCReady = function (callback) { if (typeof callback !== 'function') { throw new Error('Callback provided is not a function'); } if (true === AdapterJS.onwebrtcreadyDone) { // All WebRTC interfaces are ready, just call the callback callback(null !== AdapterJS.WebRTCPlugin.plugin); } else { // will be triggered automatically when your browser/plugin is ready. AdapterJS.onwebrtcready = callback; } }; // Plugin namespace AdapterJS.WebRTCPlugin = AdapterJS.WebRTCPlugin || {}; // The object to store plugin information AdapterJS.WebRTCPlugin.pluginInfo = { prefix : 'Tem', plugName : 'TemWebRTCPlugin', pluginId : 'plugin0', type : 'application/x-temwebrtcplugin', onload : '__TemWebRTCReady0', portalLink : 'http://skylink.io/plugin/', downloadLink : null, //set below companyName: 'Temasys' }; if(!!navigator.platform.match(/^Mac/i)) { AdapterJS.WebRTCPlugin.pluginInfo.downloadLink = 'http://bit.ly/1n77hco'; } else if(!!navigator.platform.match(/^Win/i)) { AdapterJS.WebRTCPlugin.pluginInfo.downloadLink = 'http://bit.ly/1kkS4FN'; } AdapterJS.WebRTCPlugin.TAGS = { NONE : 'none', AUDIO : 'audio', VIDEO : 'video' }; // Unique identifier of each opened page AdapterJS.WebRTCPlugin.pageId = Math.random().toString(36).slice(2); // Use this whenever you want to call the plugin. AdapterJS.WebRTCPlugin.plugin = null; // Set log level for the plugin once it is ready. // The different values are // This is an asynchronous function that will run when the plugin is ready AdapterJS.WebRTCPlugin.setLogLevel = null; // Defines webrtc's JS interface according to the plugin's implementation. // Define plugin Browsers as WebRTC Interface. AdapterJS.WebRTCPlugin.defineWebRTCInterface = null; // This function detects whether or not a plugin is installed. // Checks if Not IE (firefox, for example), else if it's IE, // we're running IE and do something. If not it is not supported. AdapterJS.WebRTCPlugin.isPluginInstalled = null; // Lets adapter.js wait until the the document is ready before injecting the plugin AdapterJS.WebRTCPlugin.pluginInjectionInterval = null; // Inject the HTML DOM object element into the page. AdapterJS.WebRTCPlugin.injectPlugin = null; // States of readiness that the plugin goes through when // being injected and stated AdapterJS.WebRTCPlugin.PLUGIN_STATES = { NONE : 0, // no plugin use INITIALIZING : 1, // Detected need for plugin INJECTING : 2, // Injecting plugin INJECTED: 3, // Plugin element injected but not usable yet READY: 4 // Plugin ready to be used }; // Current state of the plugin. You cannot use the plugin before this is // equal to AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY AdapterJS.WebRTCPlugin.pluginState = AdapterJS.WebRTCPlugin.PLUGIN_STATES.NONE; // True is AdapterJS.onwebrtcready was already called, false otherwise // Used to make sure AdapterJS.onwebrtcready is only called once AdapterJS.onwebrtcreadyDone = false; // Log levels for the plugin. // To be set by calling AdapterJS.WebRTCPlugin.setLogLevel /* Log outputs are prefixed in some cases. INFO: Information reported by the plugin. ERROR: Errors originating from within the plugin. WEBRTC: Error originating from within the libWebRTC library */ // From the least verbose to the most verbose AdapterJS.WebRTCPlugin.PLUGIN_LOG_LEVELS = { NONE : 'NONE', ERROR : 'ERROR', WARNING : 'WARNING', INFO: 'INFO', VERBOSE: 'VERBOSE', SENSITIVE: 'SENSITIVE' }; // Does a waiting check before proceeding to load the plugin. AdapterJS.WebRTCPlugin.WaitForPluginReady = null; // This methid will use an interval to wait for the plugin to be ready. AdapterJS.WebRTCPlugin.callWhenPluginReady = null; // !!!! WARNING: DO NOT OVERRIDE THIS FUNCTION. !!! // This function will be called when plugin is ready. It sends necessary // details to the plugin. // The function will wait for the document to be ready and the set the // plugin state to AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY, // indicating that it can start being requested. // This function is not in the IE/Safari condition brackets so that // TemPluginLoaded function might be called on Chrome/Firefox. // This function is the only private function that is not encapsulated to // allow the plugin method to be called. __TemWebRTCReady0 = function () { if (document.readyState === 'complete') { AdapterJS.WebRTCPlugin.pluginState = AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY; AdapterJS.maybeThroughWebRTCReady(); } else { AdapterJS.WebRTCPlugin.documentReadyInterval = setInterval(function () { if (document.readyState === 'complete') { // TODO: update comments, we wait for the document to be ready clearInterval(AdapterJS.WebRTCPlugin.documentReadyInterval); AdapterJS.WebRTCPlugin.pluginState = AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY; AdapterJS.maybeThroughWebRTCReady(); } }, 100); } }; AdapterJS.maybeThroughWebRTCReady = function() { if (!AdapterJS.onwebrtcreadyDone) { AdapterJS.onwebrtcreadyDone = true; if (typeof(AdapterJS.onwebrtcready) === 'function') { AdapterJS.onwebrtcready(AdapterJS.WebRTCPlugin.plugin !== null); } } }; // Text namespace AdapterJS.TEXT = { PLUGIN: { REQUIRE_INSTALLATION: 'This website requires you to install a WebRTC-enabling plugin ' + 'to work on this browser.', NOT_SUPPORTED: 'Your browser does not support WebRTC.', BUTTON: 'Install Now' }, REFRESH: { REQUIRE_REFRESH: 'Please refresh page', BUTTON: 'Refresh Page' } }; // The result of ice connection states. // - starting: Ice connection is starting. // - checking: Ice connection is checking. // - connected Ice connection is connected. // - completed Ice connection is connected. // - done Ice connection has been completed. // - disconnected Ice connection has been disconnected. // - failed Ice connection has failed. // - closed Ice connection is closed. AdapterJS._iceConnectionStates = { starting : 'starting', checking : 'checking', connected : 'connected', completed : 'connected', done : 'completed', disconnected : 'disconnected', failed : 'failed', closed : 'closed' }; //The IceConnection states that has been fired for each peer. AdapterJS._iceConnectionFiredStates = []; // Check if WebRTC Interface is defined. AdapterJS.isDefined = null; // This function helps to retrieve the webrtc detected browser information. // This sets: // - webrtcDetectedBrowser: The browser agent name. // - webrtcDetectedVersion: The browser version. // - webrtcDetectedType: The types of webRTC support. // - 'moz': Mozilla implementation of webRTC. // - 'webkit': WebKit implementation of webRTC. // - 'plugin': Using the plugin implementation. AdapterJS.parseWebrtcDetectedBrowser = function () { var hasMatch, checkMatch = navigator.userAgent.match( /(opera|chrome|safari|firefox|msie|trident(?=\/))\/?\s*(\d+)/i) || []; if (/trident/i.test(checkMatch[1])) { hasMatch = /\brv[ :]+(\d+)/g.exec(navigator.userAgent) || []; webrtcDetectedBrowser = 'IE'; webrtcDetectedVersion = parseInt(hasMatch[1] || '0', 10); } else if (checkMatch[1] === 'Chrome') { hasMatch = navigator.userAgent.match(/\bOPR\/(\d+)/); if (hasMatch !== null) { webrtcDetectedBrowser = 'opera'; webrtcDetectedVersion = parseInt(hasMatch[1], 10); } } if (navigator.userAgent.indexOf('Safari')) { if (typeof InstallTrigger !== 'undefined') { webrtcDetectedBrowser = 'firefox'; } else if (/*@cc_on!@*/ false || !!document.documentMode) { webrtcDetectedBrowser = 'IE'; } else if ( Object.prototype.toString.call(window.HTMLElement).indexOf('Constructor') > 0) { webrtcDetectedBrowser = 'safari'; } else if (!!window.opera || navigator.userAgent.indexOf(' OPR/') >= 0) { webrtcDetectedBrowser = 'opera'; } else if (!!window.chrome) { webrtcDetectedBrowser = 'chrome'; } } if (!webrtcDetectedBrowser) { webrtcDetectedVersion = checkMatch[1]; } if (!webrtcDetectedVersion) { try { checkMatch = (checkMatch[2]) ? [checkMatch[1], checkMatch[2]] : [navigator.appName, navigator.appVersion, '-?']; if ((hasMatch = navigator.userAgent.match(/version\/(\d+)/i)) !== null) { checkMatch.splice(1, 1, hasMatch[1]); } webrtcDetectedVersion = parseInt(checkMatch[1], 10); } catch (error) { } } }; // To fix configuration as some browsers does not support // the 'urls' attribute. AdapterJS.maybeFixConfiguration = function (pcConfig) { if (pcConfig === null) { return; } for (var i = 0; i < pcConfig.iceServers.length; i++) { if (pcConfig.iceServers[i].hasOwnProperty('urls')) { pcConfig.iceServers[i].url = pcConfig.iceServers[i].urls; delete pcConfig.iceServers[i].urls; } } }; AdapterJS.addEvent = function(elem, evnt, func) { if (elem.addEventListener) { // W3C DOM elem.addEventListener(evnt, func, false); } else if (elem.attachEvent) {// OLD IE DOM elem.attachEvent('on'+evnt, func); } else { // No much to do elem[evnt] = func; } }; AdapterJS.renderNotificationBar = function (text, buttonText, buttonLink, openNewTab, displayRefreshBar) { // only inject once the page is ready if (document.readyState !== 'complete') { return; } var w = window; var i = document.createElement('iframe'); i.style.position = 'fixed'; i.style.top = '-41px'; i.style.left = 0; i.style.right = 0; i.style.width = '100%'; i.style.height = '40px'; i.style.backgroundColor = '#ffffe1'; i.style.border = 'none'; i.style.borderBottom = '1px solid #888888'; i.style.zIndex = '9999999'; if(typeof i.style.webkitTransition === 'string') { i.style.webkitTransition = 'all .5s ease-out'; } else if(typeof i.style.transition === 'string') { i.style.transition = 'all .5s ease-out'; } document.body.appendChild(i); c = (i.contentWindow) ? i.contentWindow : (i.contentDocument.document) ? i.contentDocument.document : i.contentDocument; c.document.open(); c.document.write('' + text + ''); if(buttonText && buttonLink) { c.document.write(''); c.document.close(); // On click on okay AdapterJS.addEvent(c.document.getElementById('okay'), 'click', function(e) { if (!!displayRefreshBar) { AdapterJS.renderNotificationBar(AdapterJS.TEXT.EXTENSION ? AdapterJS.TEXT.EXTENSION.REQUIRE_REFRESH : AdapterJS.TEXT.REFRESH.REQUIRE_REFRESH, AdapterJS.TEXT.REFRESH.BUTTON, 'javascript:location.reload()'); } window.open(buttonLink, !!openNewTab ? '_blank' : '_top'); e.preventDefault(); try { event.cancelBubble = true; } catch(error) { } var pluginInstallInterval = setInterval(function(){ if(! isIE) { navigator.plugins.refresh(false); } AdapterJS.WebRTCPlugin.isPluginInstalled( AdapterJS.WebRTCPlugin.pluginInfo.prefix, AdapterJS.WebRTCPlugin.pluginInfo.plugName, function() { // plugin now installed clearInterval(pluginInstallInterval); AdapterJS.WebRTCPlugin.defineWebRTCInterface(); }, function() { // still no plugin detected, nothing to do }); } , 500); }); // On click on Cancel AdapterJS.addEvent(c.document.getElementById('cancel'), 'click', function(e) { w.document.body.removeChild(i); }); } else { c.document.close(); } setTimeout(function() { if(typeof i.style.webkitTransform === 'string') { i.style.webkitTransform = 'translateY(40px)'; } else if(typeof i.style.transform === 'string') { i.style.transform = 'translateY(40px)'; } else { i.style.top = '0px'; } }, 300); }; // ----------------------------------------------------------- // Detected webrtc implementation. Types are: // - 'moz': Mozilla implementation of webRTC. // - 'webkit': WebKit implementation of webRTC. // - 'plugin': Using the plugin implementation. webrtcDetectedType = null; // Detected webrtc datachannel support. Types are: // - 'SCTP': SCTP datachannel support. // - 'RTP': RTP datachannel support. webrtcDetectedDCSupport = null; // Set the settings for creating DataChannels, MediaStream for // Cross-browser compability. // - This is only for SCTP based support browsers. // the 'urls' attribute. checkMediaDataChannelSettings = function (peerBrowserAgent, peerBrowserVersion, callback, constraints) { if (typeof callback !== 'function') { return; } var beOfferer = true; var isLocalFirefox = webrtcDetectedBrowser === 'firefox'; // Nightly version does not require MozDontOfferDataChannel for interop var isLocalFirefoxInterop = webrtcDetectedType === 'moz' && webrtcDetectedVersion > 30; var isPeerFirefox = peerBrowserAgent === 'firefox'; var isPeerFirefoxInterop = peerBrowserAgent === 'firefox' && ((peerBrowserVersion) ? (peerBrowserVersion > 30) : false); // Resends an updated version of constraints for MozDataChannel to work // If other userAgent is firefox and user is firefox, remove MozDataChannel if ((isLocalFirefox && isPeerFirefox) || (isLocalFirefoxInterop)) { try { delete constraints.mandatory.MozDontOfferDataChannel; } catch (error) { console.error('Failed deleting MozDontOfferDataChannel'); console.error(error); } } else if ((isLocalFirefox && !isPeerFirefox)) { constraints.mandatory.MozDontOfferDataChannel = true; } if (!isLocalFirefox) { // temporary measure to remove Moz* constraints in non Firefox browsers for (var prop in constraints.mandatory) { if (constraints.mandatory.hasOwnProperty(prop)) { if (prop.indexOf('Moz') !== -1) { delete constraints.mandatory[prop]; } } } } // Firefox (not interopable) cannot offer DataChannel as it will cause problems to the // interopability of the media stream if (isLocalFirefox && !isPeerFirefox && !isLocalFirefoxInterop) { beOfferer = false; } callback(beOfferer, constraints); }; // Handles the differences for all browsers ice connection state output. // - Tested outcomes are: // - Chrome (offerer) : 'checking' > 'completed' > 'completed' // - Chrome (answerer) : 'checking' > 'connected' // - Firefox (offerer) : 'checking' > 'connected' // - Firefox (answerer): 'checking' > 'connected' checkIceConnectionState = function (peerId, iceConnectionState, callback) { if (typeof callback !== 'function') { console.warn('No callback specified in checkIceConnectionState. Aborted.'); return; } peerId = (peerId) ? peerId : 'peer'; if (!AdapterJS._iceConnectionFiredStates[peerId] || iceConnectionState === AdapterJS._iceConnectionStates.disconnected || iceConnectionState === AdapterJS._iceConnectionStates.failed || iceConnectionState === AdapterJS._iceConnectionStates.closed) { AdapterJS._iceConnectionFiredStates[peerId] = []; } iceConnectionState = AdapterJS._iceConnectionStates[iceConnectionState]; if (AdapterJS._iceConnectionFiredStates[peerId].indexOf(iceConnectionState) < 0) { AdapterJS._iceConnectionFiredStates[peerId].push(iceConnectionState); if (iceConnectionState === AdapterJS._iceConnectionStates.connected) { setTimeout(function () { AdapterJS._iceConnectionFiredStates[peerId] .push(AdapterJS._iceConnectionStates.done); callback(AdapterJS._iceConnectionStates.done); }, 1000); } callback(iceConnectionState); } return; }; // Firefox: // - Creates iceServer from the url for Firefox. // - Create iceServer with stun url. // - Create iceServer with turn url. // - Ignore the transport parameter from TURN url for FF version <=27. // - Return null for createIceServer if transport=tcp. // - FF 27 and above supports transport parameters in TURN url, // - So passing in the full url to create iceServer. // Chrome: // - Creates iceServer from the url for Chrome M33 and earlier. // - Create iceServer with stun url. // - Chrome M28 & above uses below TURN format. // Plugin: // - Creates Ice Server for Plugin Browsers // - If Stun - Create iceServer with stun url. // - Else - Create iceServer with turn url // - This is a WebRTC Function createIceServer = null; // Firefox: // - Creates IceServers for Firefox // - Use .url for FireFox. // - Multiple Urls support // Chrome: // - Creates iceServers from the urls for Chrome M34 and above. // - .urls is supported since Chrome M34. // - Multiple Urls support // Plugin: // - Creates Ice Servers for Plugin Browsers // - Multiple Urls support // - This is a WebRTC Function createIceServers = null; //------------------------------------------------------------ //The RTCPeerConnection object. RTCPeerConnection = null; // Creates RTCSessionDescription object for Plugin Browsers RTCSessionDescription = (typeof RTCSessionDescription === 'function') ? RTCSessionDescription : null; // Creates RTCIceCandidate object for Plugin Browsers RTCIceCandidate = (typeof RTCIceCandidate === 'function') ? RTCIceCandidate : null; // Get UserMedia (only difference is the prefix). // Code from Adam Barth. getUserMedia = null; // Attach a media stream to an element. attachMediaStream = null; // Re-attach a media stream to an element. reattachMediaStream = null; // Detected browser agent name. Types are: // - 'firefox': Firefox browser. // - 'chrome': Chrome browser. // - 'opera': Opera browser. // - 'safari': Safari browser. // - 'IE' - Internet Explorer browser. webrtcDetectedBrowser = null; // Detected browser version. webrtcDetectedVersion = null; // Check for browser types and react accordingly if (navigator.mozGetUserMedia) { webrtcDetectedBrowser = 'firefox'; webrtcDetectedVersion = parseInt(navigator .userAgent.match(/Firefox\/([0-9]+)\./)[1], 10); webrtcDetectedType = 'moz'; webrtcDetectedDCSupport = 'SCTP'; RTCPeerConnection = function (pcConfig, pcConstraints) { AdapterJS.maybeFixConfiguration(pcConfig); return new mozRTCPeerConnection(pcConfig, pcConstraints); }; // The RTCSessionDescription object. RTCSessionDescription = mozRTCSessionDescription; window.RTCSessionDescription = RTCSessionDescription; // The RTCIceCandidate object. RTCIceCandidate = mozRTCIceCandidate; window.RTCIceCandidate = RTCIceCandidate; window.getUserMedia = navigator.mozGetUserMedia.bind(navigator); navigator.getUserMedia = window.getUserMedia; // Shim for MediaStreamTrack.getSources. MediaStreamTrack.getSources = function(successCb) { setTimeout(function() { var infos = [ { kind: 'audio', id: 'default', label:'', facing:'' }, { kind: 'video', id: 'default', label:'', facing:'' } ]; successCb(infos); }, 0); }; createIceServer = function (url, username, password) { var iceServer = null; var url_parts = url.split(':'); if (url_parts[0].indexOf('stun') === 0) { iceServer = { url : url }; } else if (url_parts[0].indexOf('turn') === 0) { if (webrtcDetectedVersion < 27) { var turn_url_parts = url.split('?'); if (turn_url_parts.length === 1 || turn_url_parts[1].indexOf('transport=udp') === 0) { iceServer = { url : turn_url_parts[0], credential : password, username : username }; } } else { iceServer = { url : url, credential : password, username : username }; } } return iceServer; }; createIceServers = function (urls, username, password) { var iceServers = []; for (var i = 0; i < urls.length; i++) { var iceServer = createIceServer(urls[i], username, password); if (iceServer !== null) { iceServers.push(iceServer); } } return iceServers; }; attachMediaStream = function (element, stream) { element.mozSrcObject = stream; if (stream !== null) element.play(); return element; }; reattachMediaStream = function (to, from) { to.mozSrcObject = from.mozSrcObject; to.play(); return to; }; MediaStreamTrack.getSources = MediaStreamTrack.getSources || function (callback) { if (!callback) { throw new TypeError('Failed to execute \'getSources\' on \'MediaStreamTrack\'' + ': 1 argument required, but only 0 present.'); } return callback([]); }; // Fake get{Video,Audio}Tracks if (!MediaStream.prototype.getVideoTracks) { MediaStream.prototype.getVideoTracks = function () { return []; }; } if (!MediaStream.prototype.getAudioTracks) { MediaStream.prototype.getAudioTracks = function () { return []; }; } AdapterJS.maybeThroughWebRTCReady(); } else if (navigator.webkitGetUserMedia) { webrtcDetectedBrowser = 'chrome'; webrtcDetectedType = 'webkit'; webrtcDetectedVersion = parseInt(navigator .userAgent.match(/Chrom(e|ium)\/([0-9]+)\./)[2], 10); // check if browser is opera 20+ var checkIfOpera = navigator.userAgent.match(/\bOPR\/(\d+)/); if (checkIfOpera !== null) { webrtcDetectedBrowser = 'opera'; webrtcDetectedVersion = parseInt(checkIfOpera[1], 10); } // check browser datachannel support if ((webrtcDetectedBrowser === 'chrome' && webrtcDetectedVersion >= 31) || (webrtcDetectedBrowser === 'opera' && webrtcDetectedVersion >= 20)) { webrtcDetectedDCSupport = 'SCTP'; } else if (webrtcDetectedBrowser === 'chrome' && webrtcDetectedVersion < 30 && webrtcDetectedVersion > 24) { webrtcDetectedDCSupport = 'RTP'; } else { webrtcDetectedDCSupport = ''; } createIceServer = function (url, username, password) { var iceServer = null; var url_parts = url.split(':'); if (url_parts[0].indexOf('stun') === 0) { iceServer = { 'url' : url }; } else if (url_parts[0].indexOf('turn') === 0) { iceServer = { 'url' : url, 'credential' : password, 'username' : username }; } return iceServer; }; createIceServers = function (urls, username, password) { var iceServers = []; if (webrtcDetectedVersion >= 34) { iceServers = { 'urls' : urls, 'credential' : password, 'username' : username }; } else { for (var i = 0; i < urls.length; i++) { var iceServer = createIceServer(urls[i], username, password); if (iceServer !== null) { iceServers.push(iceServer); } } } return iceServers; }; RTCPeerConnection = function (pcConfig, pcConstraints) { if (webrtcDetectedVersion < 34) { AdapterJS.maybeFixConfiguration(pcConfig); } return new webkitRTCPeerConnection(pcConfig, pcConstraints); }; window.getUserMedia = navigator.webkitGetUserMedia.bind(navigator); navigator.getUserMedia = window.getUserMedia; attachMediaStream = function (element, stream) { if (typeof element.srcObject !== 'undefined') { element.srcObject = stream; } else if (typeof element.mozSrcObject !== 'undefined') { element.mozSrcObject = stream; } else if (typeof element.src !== 'undefined') { element.src = (stream === null ? '' : URL.createObjectURL(stream)); } else { console.log('Error attaching stream to element.'); } return element; }; reattachMediaStream = function (to, from) { to.src = from.src; return to; }; AdapterJS.maybeThroughWebRTCReady(); } else if (navigator.mediaDevices && navigator.userAgent.match( /Edge\/(\d+).(\d+)$/)) { webrtcDetectedBrowser = 'edge'; webrtcDetectedVersion = parseInt(navigator.userAgent.match(/Edge\/(\d+).(\d+)$/)[2], 10); // the minimum version still supported by adapter. webrtcMinimumVersion = 12; window.getUserMedia = navigator.getUserMedia.bind(navigator); attachMediaStream = function(element, stream) { element.srcObject = stream; return element; }; reattachMediaStream = function(to, from) { to.srcObject = from.srcObject; return to; }; AdapterJS.maybeThroughWebRTCReady(); } else { // TRY TO USE PLUGIN // IE 9 is not offering an implementation of console.log until you open a console if (typeof console !== 'object' || typeof console.log !== 'function') { /* jshint -W020 */ console = {} || console; // Implemented based on console specs from MDN // You may override these functions console.log = function (arg) {}; console.info = function (arg) {}; console.error = function (arg) {}; console.dir = function (arg) {}; console.exception = function (arg) {}; console.trace = function (arg) {}; console.warn = function (arg) {}; console.count = function (arg) {}; console.debug = function (arg) {}; console.count = function (arg) {}; console.time = function (arg) {}; console.timeEnd = function (arg) {}; console.group = function (arg) {}; console.groupCollapsed = function (arg) {}; console.groupEnd = function (arg) {}; /* jshint +W020 */ } webrtcDetectedType = 'plugin'; webrtcDetectedDCSupport = 'plugin'; AdapterJS.parseWebrtcDetectedBrowser(); isIE = webrtcDetectedBrowser === 'IE'; /* jshint -W035 */ AdapterJS.WebRTCPlugin.WaitForPluginReady = function() { while (AdapterJS.WebRTCPlugin.pluginState !== AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY) { /* empty because it needs to prevent the function from running. */ } }; /* jshint +W035 */ AdapterJS.WebRTCPlugin.callWhenPluginReady = function (callback) { if (AdapterJS.WebRTCPlugin.pluginState === AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY) { // Call immediately if possible // Once the plugin is set, the code will always take this path callback(); } else { // otherwise start a 100ms interval var checkPluginReadyState = setInterval(function () { if (AdapterJS.WebRTCPlugin.pluginState === AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY) { clearInterval(checkPluginReadyState); callback(); } }, 100); } }; AdapterJS.WebRTCPlugin.setLogLevel = function(logLevel) { AdapterJS.WebRTCPlugin.callWhenPluginReady(function() { AdapterJS.WebRTCPlugin.plugin.setLogLevel(logLevel); }); }; AdapterJS.WebRTCPlugin.injectPlugin = function () { // only inject once the page is ready if (document.readyState !== 'complete') { return; } // Prevent multiple injections if (AdapterJS.WebRTCPlugin.pluginState !== AdapterJS.WebRTCPlugin.PLUGIN_STATES.INITIALIZING) { return; } AdapterJS.WebRTCPlugin.pluginState = AdapterJS.WebRTCPlugin.PLUGIN_STATES.INJECTING; if (webrtcDetectedBrowser === 'IE' && webrtcDetectedVersion <= 10) { var frag = document.createDocumentFragment(); AdapterJS.WebRTCPlugin.plugin = document.createElement('div'); AdapterJS.WebRTCPlugin.plugin.innerHTML = '' + ' ' + ' ' + ' ' + '' + '' + // uncomment to be able to use virtual cams (AdapterJS.options.getAllCams ? '':'') + ''; while (AdapterJS.WebRTCPlugin.plugin.firstChild) { frag.appendChild(AdapterJS.WebRTCPlugin.plugin.firstChild); } document.body.appendChild(frag); // Need to re-fetch the plugin AdapterJS.WebRTCPlugin.plugin = document.getElementById(AdapterJS.WebRTCPlugin.pluginInfo.pluginId); } else { // Load Plugin AdapterJS.WebRTCPlugin.plugin = document.createElement('object'); AdapterJS.WebRTCPlugin.plugin.id = AdapterJS.WebRTCPlugin.pluginInfo.pluginId; // IE will only start the plugin if it's ACTUALLY visible if (isIE) { AdapterJS.WebRTCPlugin.plugin.width = '1px'; AdapterJS.WebRTCPlugin.plugin.height = '1px'; } else { // The size of the plugin on Safari should be 0x0px // so that the autorisation prompt is at the top AdapterJS.WebRTCPlugin.plugin.width = '0px'; AdapterJS.WebRTCPlugin.plugin.height = '0px'; } AdapterJS.WebRTCPlugin.plugin.type = AdapterJS.WebRTCPlugin.pluginInfo.type; AdapterJS.WebRTCPlugin.plugin.innerHTML = '' + '' + ' ' + (AdapterJS.options.getAllCams ? '':'') + '' + ''; document.body.appendChild(AdapterJS.WebRTCPlugin.plugin); } AdapterJS.WebRTCPlugin.pluginState = AdapterJS.WebRTCPlugin.PLUGIN_STATES.INJECTED; }; AdapterJS.WebRTCPlugin.isPluginInstalled = function (comName, plugName, installedCb, notInstalledCb) { if (!isIE) { var pluginArray = navigator.plugins; for (var i = 0; i < pluginArray.length; i++) { if (pluginArray[i].name.indexOf(plugName) >= 0) { installedCb(); return; } } notInstalledCb(); } else { try { var axo = new ActiveXObject(comName + '.' + plugName); } catch (e) { notInstalledCb(); return; } installedCb(); } }; AdapterJS.WebRTCPlugin.defineWebRTCInterface = function () { if (AdapterJS.WebRTCPlugin.pluginState === AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY) { console.error("AdapterJS - WebRTC interface has already been defined"); return; } AdapterJS.WebRTCPlugin.pluginState = AdapterJS.WebRTCPlugin.PLUGIN_STATES.INITIALIZING; AdapterJS.isDefined = function (variable) { return variable !== null && variable !== undefined; }; createIceServer = function (url, username, password) { var iceServer = null; var url_parts = url.split(':'); if (url_parts[0].indexOf('stun') === 0) { iceServer = { 'url' : url, 'hasCredentials' : false }; } else if (url_parts[0].indexOf('turn') === 0) { iceServer = { 'url' : url, 'hasCredentials' : true, 'credential' : password, 'username' : username }; } return iceServer; }; createIceServers = function (urls, username, password) { var iceServers = []; for (var i = 0; i < urls.length; ++i) { iceServers.push(createIceServer(urls[i], username, password)); } return iceServers; }; RTCSessionDescription = function (info) { AdapterJS.WebRTCPlugin.WaitForPluginReady(); return AdapterJS.WebRTCPlugin.plugin. ConstructSessionDescription(info.type, info.sdp); }; RTCPeerConnection = function (servers, constraints) { var iceServers = null; if (servers) { iceServers = servers.iceServers; for (var i = 0; i < iceServers.length; i++) { if (iceServers[i].urls && !iceServers[i].url) { iceServers[i].url = iceServers[i].urls; } iceServers[i].hasCredentials = AdapterJS. isDefined(iceServers[i].username) && AdapterJS.isDefined(iceServers[i].credential); } } var mandatory = (constraints && constraints.mandatory) ? constraints.mandatory : null; var optional = (constraints && constraints.optional) ? constraints.optional : null; AdapterJS.WebRTCPlugin.WaitForPluginReady(); return AdapterJS.WebRTCPlugin.plugin. PeerConnection(AdapterJS.WebRTCPlugin.pageId, iceServers, mandatory, optional); }; MediaStreamTrack = {}; MediaStreamTrack.getSources = function (callback) { AdapterJS.WebRTCPlugin.callWhenPluginReady(function() { AdapterJS.WebRTCPlugin.plugin.GetSources(callback); }); }; window.getUserMedia = function (constraints, successCallback, failureCallback) { constraints.audio = constraints.audio || false; constraints.video = constraints.video || false; AdapterJS.WebRTCPlugin.callWhenPluginReady(function() { AdapterJS.WebRTCPlugin.plugin. getUserMedia(constraints, successCallback, failureCallback); }); }; window.navigator.getUserMedia = window.getUserMedia; attachMediaStream = function (element, stream) { if (!element || !element.parentNode) { return; } var streamId; if (stream === null) { streamId = ''; } else { if (typeof stream.enableSoundTracks !== 'undefined') { stream.enableSoundTracks(true); } streamId = stream.id; } var elementId = element.id.length === 0 ? Math.random().toString(36).slice(2) : element.id; var nodeName = element.nodeName.toLowerCase(); if (nodeName !== 'object') { // not a plugin tag yet var tag; switch(nodeName) { case 'audio': tag = AdapterJS.WebRTCPlugin.TAGS.AUDIO; break; case 'video': tag = AdapterJS.WebRTCPlugin.TAGS.VIDEO; break; default: tag = AdapterJS.WebRTCPlugin.TAGS.NONE; } var frag = document.createDocumentFragment(); var temp = document.createElement('div'); var classHTML = ''; if (element.className) { classHTML = 'class="' + element.className + '" '; } else if (element.attributes && element.attributes['class']) { classHTML = 'class="' + element.attributes['class'].value + '" '; } temp.innerHTML = '' + ' ' + ' ' + ' ' + ' ' + ' ' + ''; while (temp.firstChild) { frag.appendChild(temp.firstChild); } var height = ''; var width = ''; if (element.clientWidth || element.clientHeight) { width = element.clientWidth; height = element.clientHeight; } else if (element.width || element.height) { width = element.width; height = element.height; } element.parentNode.insertBefore(frag, element); frag = document.getElementById(elementId); frag.width = width; frag.height = height; element.parentNode.removeChild(element); } else { // already an tag, just change the stream id var children = element.children; for (var i = 0; i !== children.length; ++i) { if (children[i].name === 'streamId') { children[i].value = streamId; break; } } element.setStreamId(streamId); } var newElement = document.getElementById(elementId); AdapterJS.forwardEventHandlers(newElement, element, Object.getPrototypeOf(element)); return newElement; }; reattachMediaStream = function (to, from) { var stream = null; var children = from.children; for (var i = 0; i !== children.length; ++i) { if (children[i].name === 'streamId') { AdapterJS.WebRTCPlugin.WaitForPluginReady(); stream = AdapterJS.WebRTCPlugin.plugin .getStreamWithId(AdapterJS.WebRTCPlugin.pageId, children[i].value); break; } } if (stream !== null) { return attachMediaStream(to, stream); } else { console.log('Could not find the stream associated with this element'); } }; AdapterJS.forwardEventHandlers = function (destElem, srcElem, prototype) { properties = Object.getOwnPropertyNames( prototype ); for(prop in properties) { propName = properties[prop]; if (typeof(propName.slice) === 'function') { if (propName.slice(0,2) == 'on' && srcElem[propName] != null) { if (isIE) { destElem.attachEvent(propName,srcElem[propName]); } else { destElem.addEventListener(propName.slice(2), srcElem[propName], false) } } else { //TODO (http://jira.temasys.com.sg/browse/TWP-328) Forward non-event properties ? } } } var subPrototype = Object.getPrototypeOf(prototype) if(subPrototype != null) { AdapterJS.forwardEventHandlers(destElem, srcElem, subPrototype); } } RTCIceCandidate = function (candidate) { if (!candidate.sdpMid) { candidate.sdpMid = ''; } AdapterJS.WebRTCPlugin.WaitForPluginReady(); return AdapterJS.WebRTCPlugin.plugin.ConstructIceCandidate( candidate.sdpMid, candidate.sdpMLineIndex, candidate.candidate ); }; // inject plugin AdapterJS.addEvent(document, 'readystatechange', AdapterJS.WebRTCPlugin.injectPlugin); AdapterJS.WebRTCPlugin.injectPlugin(); }; // This function will be called if the plugin is needed (browser different // from Chrome or Firefox), but the plugin is not installed. AdapterJS.WebRTCPlugin.pluginNeededButNotInstalledCb = AdapterJS.WebRTCPlugin.pluginNeededButNotInstalledCb || function() { AdapterJS.addEvent(document, 'readystatechange', AdapterJS.WebRTCPlugin.pluginNeededButNotInstalledCbPriv); AdapterJS.WebRTCPlugin.pluginNeededButNotInstalledCbPriv(); }; AdapterJS.WebRTCPlugin.pluginNeededButNotInstalledCbPriv = function () { if (AdapterJS.options.hidePluginInstallPrompt) { return; } var downloadLink = AdapterJS.WebRTCPlugin.pluginInfo.downloadLink; if(downloadLink) { // if download link var popupString; if (AdapterJS.WebRTCPlugin.pluginInfo.portalLink) { // is portal link popupString = 'This website requires you to install the ' + ' ' + AdapterJS.WebRTCPlugin.pluginInfo.companyName + ' WebRTC Plugin' + ' to work on this browser.'; } else { // no portal link, just print a generic explanation popupString = AdapterJS.TEXT.PLUGIN.REQUIRE_INSTALLATION; } AdapterJS.renderNotificationBar(popupString, AdapterJS.TEXT.PLUGIN.BUTTON, downloadLink); } else { // no download link, just print a generic explanation AdapterJS.renderNotificationBar(AdapterJS.TEXT.PLUGIN.NOT_SUPPORTED); } }; // Try to detect the plugin and act accordingly AdapterJS.WebRTCPlugin.isPluginInstalled( AdapterJS.WebRTCPlugin.pluginInfo.prefix, AdapterJS.WebRTCPlugin.pluginInfo.plugName, AdapterJS.WebRTCPlugin.defineWebRTCInterface, AdapterJS.WebRTCPlugin.pluginNeededButNotInstalledCb); } }).call(this,"/modules/RTC/adapter.screenshare.js") },{"jitsi-meet-logger":79}],21:[function(require,module,exports){ (function (__filename){ var logger = require("jitsi-meet-logger").getLogger(__filename); var UsernameGenerator = require('../util/UsernameGenerator'); function supportsLocalStorage() { try { return 'localStorage' in window && window.localStorage !== null; } catch (e) { logger.log("localstorage is not supported"); return false; } } function generateUniqueId() { function _p8() { return (Math.random().toString(16) + "000000000").substr(2, 8); } return _p8() + _p8() + _p8() + _p8(); } function Settings(conferenceID) { this.displayName = ''; this.userId; this.confSettings = null; this.conferenceID = conferenceID; this.callStatsUserName; if (supportsLocalStorage()) { if(!window.localStorage.getItem(conferenceID)) this.confSettings = {}; else this.confSettings = JSON.parse(window.localStorage.getItem(conferenceID)); if(!this.confSettings.jitsiMeetId) { this.confSettings.jitsiMeetId = generateUniqueId(); logger.log("generated id", this.confSettings.jitsiMeetId); this.save(); } if (!this.confSettings.callStatsUserName) { this.confSettings.callStatsUserName = UsernameGenerator.generateUsername(); logger.log('generated callstats uid', this.confSettings.callStatsUserName); this.save(); } this.userId = this.confSettings.jitsiMeetId || ''; this.displayName = this.confSettings.displayname || ''; this.callStatsUserName = this.confSettings.callStatsUserName || ''; } else { logger.log("local storage is not supported"); this.userId = generateUniqueId(); this.callStatsUserName = UsernameGenerator.generateUsername(); } } Settings.prototype.save = function () { if(!supportsLocalStorage()) window.localStorage.setItem(this.conferenceID, JSON.stringify(this.confSettings)); } Settings.prototype.setDisplayName = function (newDisplayName) { this.displayName = newDisplayName; if(this.confSettings != null) this.confSettings.displayname = displayName; this.save(); return this.displayName; } Settings.prototype.getSettings = function () { return { displayName: this.displayName, uid: this.userId }; } /** * Returns fake username for callstats * @returns {string} fake username for callstats */ Settings.prototype.getCallStatsUserName = function () { return this.callStatsUserName; } module.exports = Settings; }).call(this,"/modules/settings/Settings.js") },{"../util/UsernameGenerator":28,"jitsi-meet-logger":79}],22:[function(require,module,exports){ (function (__filename){ /* global $, Strophe, callstats */ var logger = require("jitsi-meet-logger").getLogger(__filename); var jsSHA = require('jssha'); var io = require('socket.io-client'); /** * @const * @see http://www.callstats.io/api/#enumeration-of-wrtcfuncnames */ var wrtcFuncNames = { createOffer: "createOffer", createAnswer: "createAnswer", setLocalDescription: "setLocalDescription", setRemoteDescription: "setRemoteDescription", addIceCandidate: "addIceCandidate", getUserMedia: "getUserMedia" }; var callStats = null; function initCallback (err, msg) { logger.log("CallStats Status: err=" + err + " msg=" + msg); } /** * Returns a function which invokes f in a try/catch block, logs any exception * to the console, and then swallows it. * * @param f the function to invoke in a try/catch block * @return a function which invokes f in a try/catch block, logs any exception * to the console, and then swallows it */ function _try_catch (f) { return function () { try { f.apply(this, arguments); } catch (e) { logger.error(e); } }; } /** * Creates new CallStats instance that handles all callstats API calls. * @param peerConnection {JingleSessionPC} the session object * @param Settings {Settings} the settings instance. Declared in * /modules/settings/Settings.js * @param options {object} credentials for callstats. */ var CallStats = _try_catch(function(jingleSession, Settings, options) { try{ //check weather that should work with more than 1 peerconnection if(!callStats) { callStats = new callstats($, io, jsSHA); } else { return; } this.session = jingleSession; this.peerconnection = jingleSession.peerconnection.peerconnection; this.userID = Settings.getCallStatsUserName(); //FIXME: change it to something else (maybe roomName) var location = window.location; this.confID = location.hostname + location.pathname; //userID is generated or given by the origin server callStats.initialize(options.callStatsID, options.callStatsSecret, this.userID, initCallback); callStats.addNewFabric(this.peerconnection, Strophe.getResourceFromJid(jingleSession.peerjid), callStats.fabricUsage.multiplex, this.confID, this.pcCallback.bind(this)); } catch (e) { // The callstats.io API failed to initialize (e.g. because its // download failed to succeed in general or on time). Further // attempts to utilize it cannot possibly succeed. callStats = null; logger.error(e); } // notify callstats about failures if there were any if (CallStats.pendingErrors.length) { CallStats.pendingErrors.forEach(function (error) { CallStats._reportError.call(this, error.type, error.error, error.pc); }, this); CallStats.pendingErrors.length = 0; } }); // some errors may happen before CallStats init // in this case we accumulate them in this array // and send them to callstats on init CallStats.pendingErrors = []; CallStats.prototype.pcCallback = _try_catch(function (err, msg) { if (!callStats) { return; } logger.log("Monitoring status: "+ err + " msg: " + msg); callStats.sendFabricEvent(this.peerconnection, callStats.fabricEvent.fabricSetup, this.confID); }); /** * Notifies CallStats for mute events * @param mute {boolean} true for muted and false for not muted * @param type {String} "audio"/"video" */ CallStats.prototype.sendMuteEvent = _try_catch(function (mute, type) { if (!callStats) { return; } var event = null; if (type === "video") { event = (mute? callStats.fabricEvent.videoPause : callStats.fabricEvent.videoResume); } else { event = (mute? callStats.fabricEvent.audioMute : callStats.fabricEvent.audioUnmute); } callStats.sendFabricEvent(this.peerconnection, event, this.confID); }); /** * Notifies CallStats for connection setup errors */ CallStats.prototype.sendTerminateEvent = _try_catch(function () { if(!callStats) { return; } callStats.sendFabricEvent(this.peerconnection, callStats.fabricEvent.fabricTerminated, this.confID); }); /** * Notifies CallStats for connection setup errors */ CallStats.prototype.sendSetupFailedEvent = _try_catch(function () { if(!callStats) { return; } callStats.sendFabricEvent(this.peerconnection, callStats.fabricEvent.fabricSetupFailed, this.confID); }); /** * Sends the given feedback through CallStats. * * @param overallFeedback an integer between 1 and 5 indicating the * user feedback * @param detailedFeedback detailed feedback from the user. Not yet used */ CallStats.prototype.sendFeedback = _try_catch( function(overallFeedback, detailedFeedback) { if(!callStats) { return; } var feedbackString = '{"userID":"' + this.userID + '"' + ', "overall":' + overallFeedback + ', "comment": "' + detailedFeedback + '"}'; var feedbackJSON = JSON.parse(feedbackString); callStats.sendUserFeedback(this.confID, feedbackJSON); }); /** * Reports an error to callstats. * * @param type the type of the error, which will be one of the wrtcFuncNames * @param e the error * @param pc the peerconnection * @private */ CallStats._reportError = function (type, e, pc) { if (callStats) { callStats.reportError(pc, this.confID, type, e); } else { CallStats.pendingErrors.push({ type: type, error: e, pc: pc}); } // else just ignore it }; /** * Notifies CallStats that getUserMedia failed. * * @param {Error} e error to send * @param {CallStats} cs callstats instance related to the error (optional) */ CallStats.sendGetUserMediaFailed = _try_catch(function (e, cs) { CallStats._reportError.call(cs, wrtcFuncNames.getUserMedia, e, null); }); /** * Notifies CallStats that peer connection failed to create offer. * * @param {Error} e error to send * @param {RTCPeerConnection} pc connection on which failure occured. * @param {CallStats} cs callstats instance related to the error (optional) */ CallStats.sendCreateOfferFailed = _try_catch(function (e, pc, cs) { CallStats._reportError.call(cs, wrtcFuncNames.createOffer, e, pc); }); /** * Notifies CallStats that peer connection failed to create answer. * * @param {Error} e error to send * @param {RTCPeerConnection} pc connection on which failure occured. * @param {CallStats} cs callstats instance related to the error (optional) */ CallStats.sendCreateAnswerFailed = _try_catch(function (e, pc, cs) { CallStats._reportError.call(cs, wrtcFuncNames.createAnswer, e, pc); }); /** * Notifies CallStats that peer connection failed to set local description. * * @param {Error} e error to send * @param {RTCPeerConnection} pc connection on which failure occured. * @param {CallStats} cs callstats instance related to the error (optional) */ CallStats.sendSetLocalDescFailed = _try_catch(function (e, pc, cs) { CallStats._reportError.call(cs, wrtcFuncNames.setLocalDescription, e, pc); }); /** * Notifies CallStats that peer connection failed to set remote description. * * @param {Error} e error to send * @param {RTCPeerConnection} pc connection on which failure occured. * @param {CallStats} cs callstats instance related to the error (optional) */ CallStats.sendSetRemoteDescFailed = _try_catch(function (e, pc, cs) { CallStats._reportError.call(cs, wrtcFuncNames.setRemoteDescription, e, pc); }); /** * Notifies CallStats that peer connection failed to add ICE candidate. * * @param {Error} e error to send * @param {RTCPeerConnection} pc connection on which failure occured. * @param {CallStats} cs callstats instance related to the error (optional) */ CallStats.sendAddIceCandidateFailed = _try_catch(function (e, pc, cs) { CallStats._reportError.call(cs, wrtcFuncNames.addIceCandidate, e, pc); }); module.exports = CallStats; }).call(this,"/modules/statistics/CallStats.js") },{"jitsi-meet-logger":79,"jssha":81,"socket.io-client":118}],23:[function(require,module,exports){ /* global config */ /** * Provides statistics for the local stream. */ var RTCBrowserType = require('../RTC/RTCBrowserType'); /** * Size of the webaudio analyzer buffer. * @type {number} */ var WEBAUDIO_ANALYZER_FFT_SIZE = 2048; /** * Value of the webaudio analyzer smoothing time parameter. * @type {number} */ var WEBAUDIO_ANALYZER_SMOOTING_TIME = 0.8; window.AudioContext = window.AudioContext || window.webkitAudioContext; var context = null; if(window.AudioContext) { context = new AudioContext(); } /** * Converts time domain data array to audio level. * @param samples the time domain data array. * @returns {number} the audio level */ function timeDomainDataToAudioLevel(samples) { var maxVolume = 0; var length = samples.length; for (var i = 0; i < length; i++) { if (maxVolume < samples[i]) maxVolume = samples[i]; } return parseFloat(((maxVolume - 127) / 128).toFixed(3)); } /** * Animates audio level change * @param newLevel the new audio level * @param lastLevel the last audio level * @returns {Number} the audio level to be set */ function animateLevel(newLevel, lastLevel) { var value = 0; var diff = lastLevel - newLevel; if(diff > 0.2) { value = lastLevel - 0.2; } else if(diff < -0.4) { value = lastLevel + 0.4; } else { value = newLevel; } return parseFloat(value.toFixed(3)); } /** * LocalStatsCollector calculates statistics for the local stream. * * @param stream the local stream * @param interval stats refresh interval given in ms. * @param callback function that receives the audio levels. * @constructor */ function LocalStatsCollector(stream, interval, callback) { this.stream = stream; this.intervalId = null; this.intervalMilis = interval; this.audioLevel = 0; this.callback = callback; } /** * Starts the collecting the statistics. */ LocalStatsCollector.prototype.start = function () { if (!context || RTCBrowserType.isTemasysPluginUsed()) return; var analyser = context.createAnalyser(); analyser.smoothingTimeConstant = WEBAUDIO_ANALYZER_SMOOTING_TIME; analyser.fftSize = WEBAUDIO_ANALYZER_FFT_SIZE; var source = context.createMediaStreamSource(this.stream); source.connect(analyser); var self = this; this.intervalId = setInterval( function () { var array = new Uint8Array(analyser.frequencyBinCount); analyser.getByteTimeDomainData(array); var audioLevel = timeDomainDataToAudioLevel(array); if (audioLevel != self.audioLevel) { self.audioLevel = animateLevel(audioLevel, self.audioLevel); self.callback(self.audioLevel); } }, this.intervalMilis ); }; /** * Stops collecting the statistics. */ LocalStatsCollector.prototype.stop = function () { if (this.intervalId) { clearInterval(this.intervalId); this.intervalId = null; } }; module.exports = LocalStatsCollector; },{"../RTC/RTCBrowserType":17}],24:[function(require,module,exports){ (function (__filename){ /* global require, ssrc2jid */ /* jshint -W117 */ var logger = require("jitsi-meet-logger").getLogger(__filename); var RTCBrowserType = require("../RTC/RTCBrowserType"); var StatisticsEvents = require("../../service/statistics/Events"); /* Whether we support the browser we are running into for logging statistics */ var browserSupported = RTCBrowserType.isChrome() || RTCBrowserType.isOpera(); var keyMap = {}; keyMap[RTCBrowserType.RTC_BROWSER_FIREFOX] = { "ssrc": "ssrc", "packetsReceived": "packetsReceived", "packetsLost": "packetsLost", "packetsSent": "packetsSent", "bytesReceived": "bytesReceived", "bytesSent": "bytesSent" }; keyMap[RTCBrowserType.RTC_BROWSER_CHROME] = { "receiveBandwidth": "googAvailableReceiveBandwidth", "sendBandwidth": "googAvailableSendBandwidth", "remoteAddress": "googRemoteAddress", "transportType": "googTransportType", "localAddress": "googLocalAddress", "activeConnection": "googActiveConnection", "ssrc": "ssrc", "packetsReceived": "packetsReceived", "packetsSent": "packetsSent", "packetsLost": "packetsLost", "bytesReceived": "bytesReceived", "bytesSent": "bytesSent", "googFrameHeightReceived": "googFrameHeightReceived", "googFrameWidthReceived": "googFrameWidthReceived", "googFrameHeightSent": "googFrameHeightSent", "googFrameWidthSent": "googFrameWidthSent", "audioInputLevel": "audioInputLevel", "audioOutputLevel": "audioOutputLevel" }; keyMap[RTCBrowserType.RTC_BROWSER_OPERA] = keyMap[RTCBrowserType.RTC_BROWSER_CHROME]; /** * Calculates packet lost percent using the number of lost packets and the * number of all packet. * @param lostPackets the number of lost packets * @param totalPackets the number of all packets. * @returns {number} packet loss percent */ function calculatePacketLoss(lostPackets, totalPackets) { if(!totalPackets || totalPackets <= 0 || !lostPackets || lostPackets <= 0) return 0; return Math.round((lostPackets/totalPackets)*100); } function getStatValue(item, name) { var browserType = RTCBrowserType.getBrowserType(); if (!keyMap[browserType][name]) throw "The property isn't supported!"; var key = keyMap[browserType][name]; return (RTCBrowserType.isChrome() || RTCBrowserType.isOpera()) ? item.stat(key) : item[key]; } function formatAudioLevel(audioLevel) { return Math.min(Math.max(audioLevel, 0), 1); } /** * Checks whether a certain record should be included in the logged statistics. */ function acceptStat(reportId, reportType, statName) { if (reportType == "googCandidatePair" && statName == "googChannelId") return false; if (reportType == "ssrc") { if (statName == "googTrackId" || statName == "transportId" || statName == "ssrc") return false; } return true; } /** * Checks whether a certain record should be included in the logged statistics. */ function acceptReport(id, type) { if (id.substring(0, 15) == "googCertificate" || id.substring(0, 9) == "googTrack" || id.substring(0, 20) == "googLibjingleSession") return false; if (type == "googComponent") return false; return true; } /** * Peer statistics data holder. * @constructor */ function PeerStats() { this.ssrc2Loss = {}; this.ssrc2AudioLevel = {}; this.ssrc2bitrate = {}; this.ssrc2resolution = {}; } /** * Sets packets loss rate for given ssrc that blong to the peer * represented by this instance. * @param lossRate new packet loss rate value to be set. */ PeerStats.prototype.setSsrcLoss = function (lossRate) { this.ssrc2Loss = lossRate; }; /** * Sets resolution that belong to the ssrc * represented by this instance. * @param resolution new resolution value to be set. */ PeerStats.prototype.setSsrcResolution = function (resolution) { if(resolution === null && this.ssrc2resolution[ssrc]) { delete this.ssrc2resolution[ssrc]; } else if(resolution !== null) this.ssrc2resolution[ssrc] = resolution; }; /** * Sets the bit rate for given ssrc that blong to the peer * represented by this instance. * @param ssrc audio or video RTP stream SSRC. * @param bitrate new bitrate value to be set. */ PeerStats.prototype.setSsrcBitrate = function (ssrc, bitrate) { if(this.ssrc2bitrate[ssrc]) { this.ssrc2bitrate[ssrc].download += bitrate.download; this.ssrc2bitrate[ssrc].upload += bitrate.upload; } else { this.ssrc2bitrate[ssrc] = bitrate; } }; /** * Sets new audio level(input or output) for given ssrc that identifies * the stream which belongs to the peer represented by this instance. * @param ssrc RTP stream SSRC for which current audio level value will be * updated. * @param audioLevel the new audio level value to be set. Value is truncated to * fit the range from 0 to 1. */ PeerStats.prototype.setSsrcAudioLevel = function (ssrc, audioLevel) { // Range limit 0 - 1 this.ssrc2AudioLevel[ssrc] = formatAudioLevel(audioLevel); }; function ConferenceStats() { /** * The bandwidth * @type {{}} */ this.bandwidth = {}; /** * The bit rate * @type {{}} */ this.bitrate = {}; /** * The packet loss rate * @type {{}} */ this.packetLoss = null; /** * Array with the transport information. * @type {Array} */ this.transport = []; } /** * StatsCollector registers for stats updates of given * peerconnection in given interval. On each update particular * stats are extracted and put in {@link PeerStats} objects. Once the processing * is done audioLevelsUpdateCallback is called with this * instance as an event source. * * @param peerconnection webRTC peer connection object. * @param interval stats refresh interval given in ms. * @param {function(StatsCollector)} audioLevelsUpdateCallback the callback * called on stats update. * @param config {object} supports the following properties - disableAudioLevels, disableStats, logStats * @constructor */ function StatsCollector(peerconnection, audioLevelsInterval, statsInterval, eventEmitter, config) { this.peerconnection = peerconnection; this.baselineAudioLevelsReport = null; this.currentAudioLevelsReport = null; this.currentStatsReport = null; this.baselineStatsReport = null; this.audioLevelsIntervalId = null; this.eventEmitter = eventEmitter; this.config = config || {}; this.conferenceStats = new ConferenceStats(); /** * Gather PeerConnection stats once every this many milliseconds. */ this.GATHER_INTERVAL = 15000; /** * Log stats via the focus once every this many milliseconds. */ this.LOG_INTERVAL = 60000; /** * Gather stats and store them in this.statsToBeLogged. */ this.gatherStatsIntervalId = null; /** * Send the stats already saved in this.statsToBeLogged to be logged via * the focus. */ this.logStatsIntervalId = null; /** * Stores the statistics which will be send to the focus to be logged. */ this.statsToBeLogged = { timestamps: [], stats: {} }; // Updates stats interval this.audioLevelsIntervalMilis = audioLevelsInterval; this.statsIntervalId = null; this.statsIntervalMilis = statsInterval; // Map of ssrcs to PeerStats this.ssrc2stats = {}; } module.exports = StatsCollector; /** * Stops stats updates. */ StatsCollector.prototype.stop = function () { if (this.audioLevelsIntervalId) { clearInterval(this.audioLevelsIntervalId); this.audioLevelsIntervalId = null; } if (this.statsIntervalId) { clearInterval(this.statsIntervalId); this.statsIntervalId = null; } if(this.logStatsIntervalId) { clearInterval(this.logStatsIntervalId); this.logStatsIntervalId = null; } if(this.gatherStatsIntervalId) { clearInterval(this.gatherStatsIntervalId); this.gatherStatsIntervalId = null; } }; /** * Callback passed to getStats method. * @param error an error that occurred on getStats call. */ StatsCollector.prototype.errorCallback = function (error) { logger.error("Get stats error", error); this.stop(); }; /** * Starts stats updates. */ StatsCollector.prototype.start = function () { var self = this; this.audioLevelsIntervalId = setInterval( function () { // Interval updates self.peerconnection.getStats( function (report) { var results = null; if (!report || !report.result || typeof report.result != 'function') { results = report; } else { results = report.result(); } //logger.error("Got interval report", results); self.currentAudioLevelsReport = results; self.processAudioLevelReport(); self.baselineAudioLevelsReport = self.currentAudioLevelsReport; }, self.errorCallback ); }, self.audioLevelsIntervalMilis ); // if (!this.config.disableStats && browserSupported) { // this.statsIntervalId = setInterval( // function () { // // Interval updates // self.peerconnection.getStats( // function (report) { // var results = null; // if (!report || !report.result || // typeof report.result != 'function') { // //firefox // results = report; // } // else { // //chrome // results = report.result(); // } // //logger.error("Got interval report", results); // self.currentStatsReport = results; // try { // self.processStatsReport(); // } // catch (e) { // logger.error("Unsupported key:" + e, e); // } // // self.baselineStatsReport = self.currentStatsReport; // }, // self.errorCallback // ); // }, // self.statsIntervalMilis // ); // } // // if (this.config.logStats && browserSupported) { // this.gatherStatsIntervalId = setInterval( // function () { // self.peerconnection.getStats( // function (report) { // self.addStatsToBeLogged(report.result()); // }, // function () { // } // ); // }, // this.GATHER_INTERVAL // ); // // this.logStatsIntervalId = setInterval( // function() { self.logStats(); }, // this.LOG_INTERVAL); // } }; /** * Converts the stats to the format used for logging, and saves the data in * this.statsToBeLogged. * @param reports Reports as given by webkitRTCPerConnection.getStats. */ StatsCollector.prototype.addStatsToBeLogged = function (reports) { var self = this; var num_records = this.statsToBeLogged.timestamps.length; this.statsToBeLogged.timestamps.push(new Date().getTime()); reports.map(function (report) { if (!acceptReport(report.id, report.type)) return; var stat = self.statsToBeLogged.stats[report.id]; if (!stat) { stat = self.statsToBeLogged.stats[report.id] = {}; } stat.type = report.type; report.names().map(function (name) { if (!acceptStat(report.id, report.type, name)) return; var values = stat[name]; if (!values) { values = stat[name] = []; } while (values.length < num_records) { values.push(null); } values.push(report.stat(name)); }); }); }; //FIXME: //StatsCollector.prototype.logStats = function () { // // if(!APP.xmpp.sendLogs(this.statsToBeLogged)) // return; // // Reset the stats // this.statsToBeLogged.stats = {}; // this.statsToBeLogged.timestamps = []; //}; /** * Stats processing logic. */ StatsCollector.prototype.processStatsReport = function () { if (!this.baselineStatsReport) { return; } for (var idx in this.currentStatsReport) { var now = this.currentStatsReport[idx]; try { if (getStatValue(now, 'receiveBandwidth') || getStatValue(now, 'sendBandwidth')) { this.conferenceStats.bandwidth = { "download": Math.round( (getStatValue(now, 'receiveBandwidth')) / 1000), "upload": Math.round( (getStatValue(now, 'sendBandwidth')) / 1000) }; } } catch(e){/*not supported*/} if(now.type == 'googCandidatePair') { var ip, type, localIP, active; try { ip = getStatValue(now, 'remoteAddress'); type = getStatValue(now, "transportType"); localIP = getStatValue(now, "localAddress"); active = getStatValue(now, "activeConnection"); } catch(e){/*not supported*/} if(!ip || !type || !localIP || active != "true") continue; var addressSaved = false; for(var i = 0; i < this.conferenceStats.transport.length; i++) { if(this.conferenceStats.transport[i].ip == ip && this.conferenceStats.transport[i].type == type && this.conferenceStats.transport[i].localip == localIP) { addressSaved = true; } } if(addressSaved) continue; this.conferenceStats.transport.push({localip: localIP, ip: ip, type: type}); continue; } if(now.type == "candidatepair") { if(now.state == "succeeded") continue; var local = this.currentStatsReport[now.localCandidateId]; var remote = this.currentStatsReport[now.remoteCandidateId]; this.conferenceStats.transport.push({localip: local.ipAddress + ":" + local.portNumber, ip: remote.ipAddress + ":" + remote.portNumber, type: local.transport}); } if (now.type != 'ssrc' && now.type != "outboundrtp" && now.type != "inboundrtp") { continue; } var before = this.baselineStatsReport[idx]; if (!before) { logger.warn(getStatValue(now, 'ssrc') + ' not enough data'); continue; } var ssrc = getStatValue(now, 'ssrc'); if(!ssrc) continue; var ssrcStats = this.ssrc2stats[ssrc]; if (!ssrcStats) { ssrcStats = new PeerStats(); this.ssrc2stats[ssrc] = ssrcStats; } var isDownloadStream = true; var key = 'packetsReceived'; var packetsNow = getStatValue(now, key); if (typeof packetsNow === 'undefined' || packetsNow === null) { isDownloadStream = false; key = 'packetsSent'; packetsNow = getStatValue(now, key); if (typeof packetsNow === 'undefined' || packetsNow === null) { console.warn("No packetsReceived nor packetsSent stat found"); continue; } } if (!packetsNow || packetsNow < 0) packetsNow = 0; var packetsBefore = getStatValue(before, key); if (!packetsBefore || packetsBefore < 0) packetsBefore = 0; var packetRate = packetsNow - packetsBefore; if (!packetRate || packetRate < 0) packetRate = 0; var currentLoss = getStatValue(now, 'packetsLost'); if (!currentLoss || currentLoss < 0) currentLoss = 0; var previousLoss = getStatValue(before, 'packetsLost'); if (!previousLoss || previousLoss < 0) previousLoss = 0; var lossRate = currentLoss - previousLoss; if (!lossRate || lossRate < 0) lossRate = 0; var packetsTotal = (packetRate + lossRate); ssrcStats.setSsrcLoss(ssrc, {"packetsTotal": packetsTotal, "packetsLost": lossRate, "isDownloadStream": isDownloadStream}); var bytesReceived = 0, bytesSent = 0; if(getStatValue(now, "bytesReceived")) { bytesReceived = getStatValue(now, "bytesReceived") - getStatValue(before, "bytesReceived"); } if (getStatValue(now, "bytesSent")) { bytesSent = getStatValue(now, "bytesSent") - getStatValue(before, "bytesSent"); } var time = Math.round((now.timestamp - before.timestamp) / 1000); if (bytesReceived <= 0 || time <= 0) { bytesReceived = 0; } else { bytesReceived = Math.round(((bytesReceived * 8) / time) / 1000); } if (bytesSent <= 0 || time <= 0) { bytesSent = 0; } else { bytesSent = Math.round(((bytesSent * 8) / time) / 1000); } ssrcStats.setSsrcBitrate(ssrc, { "download": bytesReceived, "upload": bytesSent}); var resolution = {height: null, width: null}; try { if (getStatValue(now, "googFrameHeightReceived") && getStatValue(now, "googFrameWidthReceived")) { resolution.height = getStatValue(now, "googFrameHeightReceived"); resolution.width = getStatValue(now, "googFrameWidthReceived"); } else if (getStatValue(now, "googFrameHeightSent") && getStatValue(now, "googFrameWidthSent")) { resolution.height = getStatValue(now, "googFrameHeightSent"); resolution.width = getStatValue(now, "googFrameWidthSent"); } } catch(e){/*not supported*/} if (resolution.height && resolution.width) { ssrcStats.setSsrcResolution(ssrc, resolution); } else { ssrcStats.setSsrcResolution(ssrc, null); } } var self = this; // Jid stats var totalPackets = {download: 0, upload: 0}; var lostPackets = {download: 0, upload: 0}; var bitrateDownload = 0; var bitrateUpload = 0; var resolutions = {}; Object.keys(this.ssrc2stats).forEach( function (jid) { Object.keys(self.ssrc2stats[jid].ssrc2Loss).forEach( function (ssrc) { var type = "upload"; if(self.ssrc2stats[jid].ssrc2Loss[ssrc].isDownloadStream) type = "download"; totalPackets[type] += self.ssrc2stats[jid].ssrc2Loss[ssrc].packetsTotal; lostPackets[type] += self.ssrc2stats[jid].ssrc2Loss[ssrc].packetsLost; } ); Object.keys(self.ssrc2stats[jid].ssrc2bitrate).forEach( function (ssrc) { bitrateDownload += self.ssrc2stats[jid].ssrc2bitrate[ssrc].download; bitrateUpload += self.ssrc2stats[jid].ssrc2bitrate[ssrc].upload; delete self.ssrc2stats[jid].ssrc2bitrate[ssrc]; } ); resolutions[jid] = self.ssrc2stats[jid].ssrc2resolution; } ); this.conferenceStats.bitrate = {"upload": bitrateUpload, "download": bitrateDownload}; this.conferenceStats.packetLoss = { total: calculatePacketLoss(lostPackets.download + lostPackets.upload, totalPackets.download + totalPackets.upload), download: calculatePacketLoss(lostPackets.download, totalPackets.download), upload: calculatePacketLoss(lostPackets.upload, totalPackets.upload) }; this.eventEmitter.emit(StatisticsEvents.CONNECTION_STATS, { "bitrate": this.conferenceStats.bitrate, "packetLoss": this.conferenceStats.packetLoss, "bandwidth": this.conferenceStats.bandwidth, "resolution": resolutions, "transport": this.conferenceStats.transport }); this.conferenceStats.transport = []; }; /** * Stats processing logic. */ StatsCollector.prototype.processAudioLevelReport = function () { if (!this.baselineAudioLevelsReport) { return; } for (var idx in this.currentAudioLevelsReport) { var now = this.currentAudioLevelsReport[idx]; //if we don't have "packetsReceived" this is local stream if (now.type != 'ssrc' || !getStatValue(now, 'packetsReceived')) { continue; } var before = this.baselineAudioLevelsReport[idx]; if (!before) { logger.warn(getStatValue(now, 'ssrc') + ' not enough data'); continue; } var ssrc = getStatValue(now, 'ssrc'); if (!ssrc) { if((Date.now() - now.timestamp) < 3000) logger.warn("No ssrc: "); continue; } var ssrcStats = this.ssrc2stats[ssrc]; if (!ssrcStats) { ssrcStats = new PeerStats(); this.ssrc2stats[ssrc] = ssrcStats; } // Audio level var audioLevel = null; try { audioLevel = getStatValue(now, 'audioInputLevel'); if (!audioLevel) audioLevel = getStatValue(now, 'audioOutputLevel'); } catch(e) {/*not supported*/ logger.warn("Audio Levels are not available in the statistics."); clearInterval(this.audioLevelsIntervalId); return; } if (audioLevel) { // TODO: can't find specs about what this value really is, // but it seems to vary between 0 and around 32k. audioLevel = audioLevel / 32767; ssrcStats.setSsrcAudioLevel(ssrc, audioLevel); this.eventEmitter.emit( StatisticsEvents.AUDIO_LEVEL, ssrc, audioLevel); } } }; }).call(this,"/modules/statistics/RTPStatsCollector.js") },{"../../service/statistics/Events":135,"../RTC/RTCBrowserType":17,"jitsi-meet-logger":79}],25:[function(require,module,exports){ /* global require, APP */ var LocalStats = require("./LocalStatsCollector.js"); var RTPStats = require("./RTPStatsCollector.js"); var EventEmitter = require("events"); var StatisticsEvents = require("../../service/statistics/Events"); var CallStats = require("./CallStats"); var ScriptUtil = require('../util/ScriptUtil'); // Since callstats.io is a third party, we cannot guarantee the quality of // their service. More specifically, their server may take noticeably long // time to respond. Consequently, it is in our best interest (in the sense // that the intergration of callstats.io is pretty important to us but not // enough to allow it to prevent people from joining a conference) to (1) // start downloading their API as soon as possible and (2) do the // downloading asynchronously. function loadCallStatsAPI() { ScriptUtil.loadScript( 'https://api.callstats.io/static/callstats.min.js', /* async */ true, /* prepend */ true); // FIXME At the time of this writing, we hope that the callstats.io API will // have loaded by the time we needed it (i.e. CallStats.init is invoked). } var eventEmitter = new EventEmitter(); function Statistics(options) { this.rtpStats = null; this.eventEmitter = new EventEmitter(); this.options = options || {}; this.callStatsIntegrationEnabled = this.options.callStatsID && this.options.callStatsSecret // Even though AppID and AppSecret may be specified, the integration of // callstats.io may be disabled because of globally-disallowed requests // to any third parties. && (this.options.disableThirdPartyRequests !== true); if(this.callStatsIntegrationEnabled) loadCallStatsAPI(); this.callStats = null; } Statistics.audioLevelsEnabled = false; Statistics.prototype.startRemoteStats = function (peerconnection) { if(!Statistics.audioLevelsEnabled) return; if (this.rtpStats) { this.rtpStats.stop(); } this.rtpStats = new RTPStats(peerconnection, 200, 2000, this.eventEmitter); this.rtpStats.start(); } Statistics.localStats = []; Statistics.startLocalStats = function (stream, callback) { if(!Statistics.audioLevelsEnabled) return; var localStats = new LocalStats(stream, 200, callback); this.localStats.push(localStats); localStats.start(); } Statistics.prototype.addAudioLevelListener = function(listener) { if(!Statistics.audioLevelsEnabled) return; this.eventEmitter.on(StatisticsEvents.AUDIO_LEVEL, listener); } Statistics.prototype.removeAudioLevelListener = function(listener) { if(!Statistics.audioLevelsEnabled) return; this.eventEmitter.removeListener(StatisticsEvents.AUDIO_LEVEL, listener); } Statistics.prototype.dispose = function () { if(Statistics.audioLevelsEnabled) { Statistics.stopAllLocalStats(); this.stopRemote(); if(this.eventEmitter) this.eventEmitter.removeAllListeners(); if(eventEmitter) eventEmitter.removeAllListeners(); } if(this.callstats) { this.callstats.sendTerminateEvent(); this.callstats = null; } } Statistics.stopAllLocalStats = function () { if(!Statistics.audioLevelsEnabled) return; for(var i = 0; i < this.localStats.length; i++) this.localStats[i].stop(); this.localStats = []; } Statistics.stopLocalStats = function (stream) { if(!Statistics.audioLevelsEnabled) return; for(var i = 0; i < Statistics.localStats.length; i++) if(Statistics.localStats[i].stream === stream){ var localStats = Statistics.localStats.splice(i, 1); localStats.stop(); break; } } Statistics.prototype.stopRemote = function () { if (this.rtpStats && Statistics.audioLevelsEnabled) { this.rtpStats.stop(); this.eventEmitter.emit(StatisticsEvents.STOP); this.rtpStats = null; } }; /** * Obtains audio level reported in the stats for specified peer. * @param peerJid full MUC jid of the user for whom we want to obtain last * audio level. * @param ssrc the SSRC of audio stream for which we want to obtain audio * level. * @returns {*} a float form 0 to 1 that represents current audio level or * null if for any reason the value is not available * at this time. */ Statistics.prototype.getPeerSSRCAudioLevel = function (peerJid, ssrc) { if(!Statistics.audioLevelsEnabled) return; var peerStats = this.rtpStats.jid2stats[peerJid]; return peerStats ? peerStats.ssrc2AudioLevel[ssrc] : null; }; //CALSTATS METHODS /** * Initializes the callstats.io API. * @param peerConnection {JingleSessionPC} the session object * @param Settings {Settings} the settings instance. Declared in * /modules/settings/Settings.js */ Statistics.prototype.startCallStats = function (session, settings) { if(this.callStatsIntegrationEnabled && !this.callstats) { this.callstats = new CallStats(session, settings, this.options); } } /** * Returns true if the callstats integration is enabled, otherwise returns * false. * * @returns true if the callstats integration is enabled, otherwise returns * false. */ Statistics.prototype.isCallstatsEnabled = function () { return this.callStatsIntegrationEnabled; } /** * Notifies CallStats for connection setup errors */ Statistics.prototype.sendSetupFailedEvent = function () { if(this.callStatsIntegrationEnabled && this.callstats) this.callstats.sendSetupFailedEvent(); } /** * Notifies CallStats for mute events * @param mute {boolean} true for muted and false for not muted * @param type {String} "audio"/"video" */ Statistics.prototype.sendMuteEvent = function (muted, type) { if(this.callStatsIntegrationEnabled && this.callstats) this.callstats.sendMuteEvent(muted, type); } /** * Notifies CallStats that getUserMedia failed. * * @param {Error} e error to send */ Statistics.prototype.sendGetUserMediaFailed = function (e) { if(this.callStatsIntegrationEnabled) CallStats.sendGetUserMediaFailed(e, this.callstats); }; /** * Notifies CallStats that getUserMedia failed. * * @param {Error} e error to send */ Statistics.sendGetUserMediaFailed = function (e) { CallStats.sendGetUserMediaFailed(e, null); }; /** * Notifies CallStats that peer connection failed to create offer. * * @param {Error} e error to send * @param {RTCPeerConnection} pc connection on which failure occured. */ Statistics.prototype.sendCreateOfferFailed = function (e, pc) { if(this.callStatsIntegrationEnabled) CallStats.sendCreateOfferFailed(e, pc, this.callstats); }; /** * Notifies CallStats that peer connection failed to create answer. * * @param {Error} e error to send * @param {RTCPeerConnection} pc connection on which failure occured. */ Statistics.prototype.sendCreateAnswerFailed = function (e, pc) { if(this.callStatsIntegrationEnabled) CallStats.sendCreateAnswerFailed(e, pc, this.callstats); }; /** * Notifies CallStats that peer connection failed to set local description. * * @param {Error} e error to send * @param {RTCPeerConnection} pc connection on which failure occured. */ Statistics.prototype.sendSetLocalDescFailed = function (e, pc) { if(this.callStatsIntegrationEnabled) CallStats.sendSetLocalDescFailed(e, pc, this.callstats); } /** * Notifies CallStats that peer connection failed to set remote description. * * @param {Error} e error to send * @param {RTCPeerConnection} pc connection on which failure occured. */ Statistics.prototype.sendSetRemoteDescFailed = function (e, pc) { if(this.callStatsIntegrationEnabled) CallStats.sendSetRemoteDescFailed(e, pc, this.callstats); } /** * Notifies CallStats that peer connection failed to add ICE candidate. * * @param {Error} e error to send * @param {RTCPeerConnection} pc connection on which failure occured. */ Statistics.prototype.sendAddIceCandidateFailed = function (e, pc) { if(this.callStatsIntegrationEnabled) CallStats.sendAddIceCandidateFailed(e, pc, this.callstats); } /** * Sends the given feedback through CallStats. * * @param overallFeedback an integer between 1 and 5 indicating the * user feedback * @param detailedFeedback detailed feedback from the user. Not yet used */ Statistics.prototype.sendFeedback = function(overallFeedback, detailedFeedback){ if(this.callStatsIntegrationEnabled && this.callstats) this.callstats.sendFeedback(overallFeedback, detailedFeedback); } Statistics.LOCAL_JID = require("../../service/statistics/constants").LOCAL_JID; module.exports = Statistics; },{"../../service/statistics/Events":135,"../../service/statistics/constants":136,"../util/ScriptUtil":27,"./CallStats":22,"./LocalStatsCollector.js":23,"./RTPStatsCollector.js":24,"events":51}],26:[function(require,module,exports){ /** /** * @const */ var ALPHANUM = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'; /** * Hexadecimal digits. * @const */ var HEX_DIGITS = '0123456789abcdef'; /** * Generates random int within the range [min, max] * @param min the minimum value for the generated number * @param max the maximum value for the generated number * @returns random int number */ function randomInt(min, max) { return Math.floor(Math.random() * (max - min + 1)) + min; } /** * Get random element from array or string. * @param {Array|string} arr source * @returns array element or string character */ function randomElement(arr) { return arr[randomInt(0, arr.length - 1)]; } /** * Generate random alphanumeric string. * @param {number} length expected string length * @returns {string} random string of specified length */ function randomAlphanumStr(length) { var result = ''; for (var i = 0; i < length; i += 1) { result += randomElement(ALPHANUM); } return result; } /** * Exported interface. */ var RandomUtil = { /** * Returns a random hex digit. * @returns {*} */ randomHexDigit: function() { return randomElement(HEX_DIGITS); }, /** * Returns a random string of hex digits with length 'len'. * @param len the length. */ randomHexString: function (len) { var ret = ''; while (len--) { ret += this.randomHexDigit(); } return ret; }, randomElement: randomElement, randomAlphanumStr: randomAlphanumStr, randomInt: randomInt }; module.exports = RandomUtil; },{}],27:[function(require,module,exports){ /** * Implements utility functions which facilitate the dealing with scripts such * as the download and execution of a JavaScript file. */ var ScriptUtil = { /** * Loads a script from a specific source. * * @param src the source from the which the script is to be (down)loaded * @param async true to asynchronously load the script or false to * synchronously load the script * @param prepend true to schedule the loading of the script as soon as * possible or false to schedule the loading of the script at the end of the * scripts known at the time */ loadScript: function (src, async, prepend) { var d = document; var tagName = 'script'; var script = d.createElement(tagName); var referenceNode = d.getElementsByTagName(tagName)[0]; script.async = async; script.src = src; if (prepend) { referenceNode.parentNode.insertBefore(script, referenceNode); } else { referenceNode.parentNode.appendChild(script); } }, }; module.exports = ScriptUtil; },{}],28:[function(require,module,exports){ var RandomUtil = require('./RandomUtil'); /** * from faker.js - Copyright (c) 2014-2015 Matthew Bergman & Marak Squires * MIT License * http://github.com/marak/faker.js/ * * @const */ var names = [ "Aaliyah", "Aaron", "Abagail", "Abbey", "Abbie", "Abbigail", "Abby", "Abdiel", "Abdul", "Abdullah", "Abe", "Abel", "Abelardo", "Abigail", "Abigale", "Abigayle", "Abner", "Abraham", "Ada", "Adah", "Adalberto", "Adaline", "Adam", "Adan", "Addie", "Addison", "Adela", "Adelbert", "Adele", "Adelia", "Adeline", "Adell", "Adella", "Adelle", "Aditya", "Adolf", "Adolfo", "Adolph", "Adolphus", "Adonis", "Adrain", "Adrian", "Adriana", "Adrianna", "Adriel", "Adrien", "Adrienne", "Afton", "Aglae", "Agnes", "Agustin", "Agustina", "Ahmad", "Ahmed", "Aida", "Aidan", "Aiden", "Aileen", "Aimee", "Aisha", "Aiyana", "Akeem", "Al", "Alaina", "Alan", "Alana", "Alanis", "Alanna", "Alayna", "Alba", "Albert", "Alberta", "Albertha", "Alberto", "Albin", "Albina", "Alda", "Alden", "Alec", "Aleen", "Alejandra", "Alejandrin", "Alek", "Alena", "Alene", "Alessandra", "Alessandro", "Alessia", "Aletha", "Alex", "Alexa", "Alexander", "Alexandra", "Alexandre", "Alexandrea", "Alexandria", "Alexandrine", "Alexandro", "Alexane", "Alexanne", "Alexie", "Alexis", "Alexys", "Alexzander", "Alf", "Alfonso", "Alfonzo", "Alford", "Alfred", "Alfreda", "Alfredo", "Ali", "Alia", "Alice", "Alicia", "Alisa", "Alisha", "Alison", "Alivia", "Aliya", "Aliyah", "Aliza", "Alize", "Allan", "Allen", "Allene", "Allie", "Allison", "Ally", "Alphonso", "Alta", "Althea", "Alva", "Alvah", "Alvena", "Alvera", "Alverta", "Alvina", "Alvis", "Alyce", "Alycia", "Alysa", "Alysha", "Alyson", "Alysson", "Amalia", "Amanda", "Amani", "Amara", "Amari", "Amaya", "Amber", "Ambrose", "Amelia", "Amelie", "Amely", "America", "Americo", "Amie", "Amina", "Amir", "Amira", "Amiya", "Amos", "Amparo", "Amy", "Amya", "Ana", "Anabel", "Anabelle", "Anahi", "Anais", "Anastacio", "Anastasia", "Anderson", "Andre", "Andreane", "Andreanne", "Andres", "Andrew", "Andy", "Angel", "Angela", "Angelica", "Angelina", "Angeline", "Angelita", "Angelo", "Angie", "Angus", "Anibal", "Anika", "Anissa", "Anita", "Aniya", "Aniyah", "Anjali", "Anna", "Annabel", "Annabell", "Annabelle", "Annalise", "Annamae", "Annamarie", "Anne", "Annetta", "Annette", "Annie", "Ansel", "Ansley", "Anthony", "Antoinette", "Antone", "Antonetta", "Antonette", "Antonia", "Antonietta", "Antonina", "Antonio", "Antwan", "Antwon", "Anya", "April", "Ara", "Araceli", "Aracely", "Arch", "Archibald", "Ardella", "Arden", "Ardith", "Arely", "Ari", "Ariane", "Arianna", "Aric", "Ariel", "Arielle", "Arjun", "Arlene", "Arlie", "Arlo", "Armand", "Armando", "Armani", "Arnaldo", "Arne", "Arno", "Arnold", "Arnoldo", "Arnulfo", "Aron", "Art", "Arthur", "Arturo", "Arvel", "Arvid", "Arvilla", "Aryanna", "Asa", "Asha", "Ashlee", "Ashleigh", "Ashley", "Ashly", "Ashlynn", "Ashton", "Ashtyn", "Asia", "Assunta", "Astrid", "Athena", "Aubree", "Aubrey", "Audie", "Audra", "Audreanne", "Audrey", "August", "Augusta", "Augustine", "Augustus", "Aurelia", "Aurelie", "Aurelio", "Aurore", "Austen", "Austin", "Austyn", "Autumn", "Ava", "Avery", "Avis", "Axel", "Ayana", "Ayden", "Ayla", "Aylin", "Baby", "Bailee", "Bailey", "Barbara", "Barney", "Baron", "Barrett", "Barry", "Bart", "Bartholome", "Barton", "Baylee", "Beatrice", "Beau", "Beaulah", "Bell", "Bella", "Belle", "Ben", "Benedict", "Benjamin", "Bennett", "Bennie", "Benny", "Benton", "Berenice", "Bernadette", "Bernadine", "Bernard", "Bernardo", "Berneice", "Bernhard", "Bernice", "Bernie", "Berniece", "Bernita", "Berry", "Bert", "Berta", "Bertha", "Bertram", "Bertrand", "Beryl", "Bessie", "Beth", "Bethany", "Bethel", "Betsy", "Bette", "Bettie", "Betty", "Bettye", "Beulah", "Beverly", "Bianka", "Bill", "Billie", "Billy", "Birdie", "Blair", "Blaise", "Blake", "Blanca", "Blanche", "Blaze", "Bo", "Bobbie", "Bobby", "Bonita", "Bonnie", "Boris", "Boyd", "Brad", "Braden", "Bradford", "Bradley", "Bradly", "Brady", "Braeden", "Brain", "Brandi", "Brando", "Brandon", "Brandt", "Brandy", "Brandyn", "Brannon", "Branson", "Brant", "Braulio", "Braxton", "Brayan", "Breana", "Breanna", "Breanne", "Brenda", "Brendan", "Brenden", "Brendon", "Brenna", "Brennan", "Brennon", "Brent", "Bret", "Brett", "Bria", "Brian", "Briana", "Brianne", "Brice", "Bridget", "Bridgette", "Bridie", "Brielle", "Brigitte", "Brionna", "Brisa", "Britney", "Brittany", "Brock", "Broderick", "Brody", "Brook", "Brooke", "Brooklyn", "Brooks", "Brown", "Bruce", "Bryana", "Bryce", "Brycen", "Bryon", "Buck", "Bud", "Buddy", "Buford", "Bulah", "Burdette", "Burley", "Burnice", "Buster", "Cade", "Caden", "Caesar", "Caitlyn", "Cale", "Caleb", "Caleigh", "Cali", "Calista", "Callie", "Camden", "Cameron", "Camila", "Camilla", "Camille", "Camren", "Camron", "Camryn", "Camylle", "Candace", "Candelario", "Candice", "Candida", "Candido", "Cara", "Carey", "Carissa", "Carlee", "Carleton", "Carley", "Carli", "Carlie", "Carlo", "Carlos", "Carlotta", "Carmel", "Carmela", "Carmella", "Carmelo", "Carmen", "Carmine", "Carol", "Carolanne", "Carole", "Carolina", "Caroline", "Carolyn", "Carolyne", "Carrie", "Carroll", "Carson", "Carter", "Cary", "Casandra", "Casey", "Casimer", "Casimir", "Casper", "Cassandra", "Cassandre", "Cassidy", "Cassie", "Catalina", "Caterina", "Catharine", "Catherine", "Cathrine", "Cathryn", "Cathy", "Cayla", "Ceasar", "Cecelia", "Cecil", "Cecile", "Cecilia", "Cedrick", "Celestine", "Celestino", "Celia", "Celine", "Cesar", "Chad", "Chadd", "Chadrick", "Chaim", "Chance", "Chandler", "Chanel", "Chanelle", "Charity", "Charlene", "Charles", "Charley", "Charlie", "Charlotte", "Chase", "Chasity", "Chauncey", "Chaya", "Chaz", "Chelsea", "Chelsey", "Chelsie", "Chesley", "Chester", "Chet", "Cheyanne", "Cheyenne", "Chloe", "Chris", "Christ", "Christa", "Christelle", "Christian", "Christiana", "Christina", "Christine", "Christop", "Christophe", "Christopher", "Christy", "Chyna", "Ciara", "Cicero", "Cielo", "Cierra", "Cindy", "Citlalli", "Clair", "Claire", "Clara", "Clarabelle", "Clare", "Clarissa", "Clark", "Claud", "Claude", "Claudia", "Claudie", "Claudine", "Clay", "Clemens", "Clement", "Clementina", "Clementine", "Clemmie", "Cleo", "Cleora", "Cleta", "Cletus", "Cleve", "Cleveland", "Clifford", "Clifton", "Clint", "Clinton", "Clotilde", "Clovis", "Cloyd", "Clyde", "Coby", "Cody", "Colby", "Cole", "Coleman", "Colin", "Colleen", "Collin", "Colt", "Colten", "Colton", "Columbus", "Concepcion", "Conner", "Connie", "Connor", "Conor", "Conrad", "Constance", "Constantin", "Consuelo", "Cooper", "Cora", "Coralie", "Corbin", "Cordelia", "Cordell", "Cordia", "Cordie", "Corene", "Corine", "Cornelius", "Cornell", "Corrine", "Cortez", "Cortney", "Cory", "Coty", "Courtney", "Coy", "Craig", "Crawford", "Creola", "Cristal", "Cristian", "Cristina", "Cristobal", "Cristopher", "Cruz", "Crystal", "Crystel", "Cullen", "Curt", "Curtis", "Cydney", "Cynthia", "Cyril", "Cyrus", "Dagmar", "Dahlia", "Daija", "Daisha", "Daisy", "Dakota", "Dale", "Dallas", "Dallin", "Dalton", "Damaris", "Dameon", "Damian", "Damien", "Damion", "Damon", "Dan", "Dana", "Dandre", "Dane", "D'angelo", "Dangelo", "Danial", "Daniela", "Daniella", "Danielle", "Danika", "Dannie", "Danny", "Dante", "Danyka", "Daphne", "Daphnee", "Daphney", "Darby", "Daren", "Darian", "Dariana", "Darien", "Dario", "Darion", "Darius", "Darlene", "Daron", "Darrel", "Darrell", "Darren", "Darrick", "Darrin", "Darrion", "Darron", "Darryl", "Darwin", "Daryl", "Dashawn", "Dasia", "Dave", "David", "Davin", "Davion", "Davon", "Davonte", "Dawn", "Dawson", "Dax", "Dayana", "Dayna", "Dayne", "Dayton", "Dean", "Deangelo", "Deanna", "Deborah", "Declan", "Dedric", "Dedrick", "Dee", "Deion", "Deja", "Dejah", "Dejon", "Dejuan", "Delaney", "Delbert", "Delfina", "Delia", "Delilah", "Dell", "Della", "Delmer", "Delores", "Delpha", "Delphia", "Delphine", "Delta", "Demarco", "Demarcus", "Demario", "Demetris", "Demetrius", "Demond", "Dena", "Denis", "Dennis", "Deon", "Deondre", "Deontae", "Deonte", "Dereck", "Derek", "Derick", "Deron", "Derrick", "Deshaun", "Deshawn", "Desiree", "Desmond", "Dessie", "Destany", "Destin", "Destinee", "Destiney", "Destini", "Destiny", "Devan", "Devante", "Deven", "Devin", "Devon", "Devonte", "Devyn", "Dewayne", "Dewitt", "Dexter", "Diamond", "Diana", "Dianna", "Diego", "Dillan", "Dillon", "Dimitri", "Dina", "Dino", "Dion", "Dixie", "Dock", "Dolly", "Dolores", "Domenic", "Domenica", "Domenick", "Domenico", "Domingo", "Dominic", "Dominique", "Don", "Donald", "Donato", "Donavon", "Donna", "Donnell", "Donnie", "Donny", "Dora", "Dorcas", "Dorian", "Doris", "Dorothea", "Dorothy", "Dorris", "Dortha", "Dorthy", "Doug", "Douglas", "Dovie", "Doyle", "Drake", "Drew", "Duane", "Dudley", "Dulce", "Duncan", "Durward", "Dustin", "Dusty", "Dwight", "Dylan", "Earl", "Earlene", "Earline", "Earnest", "Earnestine", "Easter", "Easton", "Ebba", "Ebony", "Ed", "Eda", "Edd", "Eddie", "Eden", "Edgar", "Edgardo", "Edison", "Edmond", "Edmund", "Edna", "Eduardo", "Edward", "Edwardo", "Edwin", "Edwina", "Edyth", "Edythe", "Effie", "Efrain", "Efren", "Eileen", "Einar", "Eino", "Eladio", "Elaina", "Elbert", "Elda", "Eldon", "Eldora", "Eldred", "Eldridge", "Eleanora", "Eleanore", "Eleazar", "Electa", "Elena", "Elenor", "Elenora", "Eleonore", "Elfrieda", "Eli", "Elian", "Eliane", "Elias", "Eliezer", "Elijah", "Elinor", "Elinore", "Elisa", "Elisabeth", "Elise", "Eliseo", "Elisha", "Elissa", "Eliza", "Elizabeth", "Ella", "Ellen", "Ellie", "Elliot", "Elliott", "Ellis", "Ellsworth", "Elmer", "Elmira", "Elmo", "Elmore", "Elna", "Elnora", "Elody", "Eloisa", "Eloise", "Elouise", "Eloy", "Elroy", "Elsa", "Else", "Elsie", "Elta", "Elton", "Elva", "Elvera", "Elvie", "Elvis", "Elwin", "Elwyn", "Elyse", "Elyssa", "Elza", "Emanuel", "Emelia", "Emelie", "Emely", "Emerald", "Emerson", "Emery", "Emie", "Emil", "Emile", "Emilia", "Emiliano", "Emilie", "Emilio", "Emily", "Emma", "Emmalee", "Emmanuel", "Emmanuelle", "Emmet", "Emmett", "Emmie", "Emmitt", "Emmy", "Emory", "Ena", "Enid", "Enoch", "Enola", "Enos", "Enrico", "Enrique", "Ephraim", "Era", "Eriberto", "Eric", "Erica", "Erich", "Erick", "Ericka", "Erik", "Erika", "Erin", "Erling", "Erna", "Ernest", "Ernestina", "Ernestine", "Ernesto", "Ernie", "Ervin", "Erwin", "Eryn", "Esmeralda", "Esperanza", "Esta", "Esteban", "Estefania", "Estel", "Estell", "Estella", "Estelle", "Estevan", "Esther", "Estrella", "Etha", "Ethan", "Ethel", "Ethelyn", "Ethyl", "Ettie", "Eudora", "Eugene", "Eugenia", "Eula", "Eulah", "Eulalia", "Euna", "Eunice", "Eusebio", "Eva", "Evalyn", "Evan", "Evangeline", "Evans", "Eve", "Eveline", "Evelyn", "Everardo", "Everett", "Everette", "Evert", "Evie", "Ewald", "Ewell", "Ezekiel", "Ezequiel", "Ezra", "Fabian", "Fabiola", "Fae", "Fannie", "Fanny", "Fatima", "Faustino", "Fausto", "Favian", "Fay", "Faye", "Federico", "Felicia", "Felicita", "Felicity", "Felipa", "Felipe", "Felix", "Felton", "Fermin", "Fern", "Fernando", "Ferne", "Fidel", "Filiberto", "Filomena", "Finn", "Fiona", "Flavie", "Flavio", "Fleta", "Fletcher", "Flo", "Florence", "Florencio", "Florian", "Florida", "Florine", "Flossie", "Floy", "Floyd", "Ford", "Forest", "Forrest", "Foster", "Frances", "Francesca", "Francesco", "Francis", "Francisca", "Francisco", "Franco", "Frank", "Frankie", "Franz", "Fred", "Freda", "Freddie", "Freddy", "Frederic", "Frederick", "Frederik", "Frederique", "Fredrick", "Fredy", "Freeda", "Freeman", "Freida", "Frida", "Frieda", "Friedrich", "Fritz", "Furman", "Gabe", "Gabriel", "Gabriella", "Gabrielle", "Gaetano", "Gage", "Gail", "Gardner", "Garett", "Garfield", "Garland", "Garnet", "Garnett", "Garret", "Garrett", "Garrick", "Garrison", "Garry", "Garth", "Gaston", "Gavin", "Gay", "Gayle", "Gaylord", "Gene", "General", "Genesis", "Genevieve", "Gennaro", "Genoveva", "Geo", "Geoffrey", "George", "Georgette", "Georgiana", "Georgianna", "Geovanni", "Geovanny", "Geovany", "Gerald", "Geraldine", "Gerard", "Gerardo", "Gerda", "Gerhard", "Germaine", "German", "Gerry", "Gerson", "Gertrude", "Gia", "Gianni", "Gideon", "Gilbert", "Gilberto", "Gilda", "Giles", "Gillian", "Gina", "Gino", "Giovani", "Giovanna", "Giovanni", "Giovanny", "Gisselle", "Giuseppe", "Gladyce", "Gladys", "Glen", "Glenda", "Glenna", "Glennie", "Gloria", "Godfrey", "Golda", "Golden", "Gonzalo", "Gordon", "Grace", "Gracie", "Graciela", "Grady", "Graham", "Grant", "Granville", "Grayce", "Grayson", "Green", "Greg", "Gregg", "Gregoria", "Gregorio", "Gregory", "Greta", "Gretchen", "Greyson", "Griffin", "Grover", "Guadalupe", "Gudrun", "Guido", "Guillermo", "Guiseppe", "Gunnar", "Gunner", "Gus", "Gussie", "Gust", "Gustave", "Guy", "Gwen", "Gwendolyn", "Hadley", "Hailee", "Hailey", "Hailie", "Hal", "Haleigh", "Haley", "Halie", "Halle", "Hallie", "Hank", "Hanna", "Hannah", "Hans", "Hardy", "Harley", "Harmon", "Harmony", "Harold", "Harrison", "Harry", "Harvey", "Haskell", "Hassan", "Hassie", "Hattie", "Haven", "Hayden", "Haylee", "Hayley", "Haylie", "Hazel", "Hazle", "Heath", "Heather", "Heaven", "Heber", "Hector", "Heidi", "Helen", "Helena", "Helene", "Helga", "Hellen", "Helmer", "Heloise", "Henderson", "Henri", "Henriette", "Henry", "Herbert", "Herman", "Hermann", "Hermina", "Herminia", "Herminio", "Hershel", "Herta", "Hertha", "Hester", "Hettie", "Hilario", "Hilbert", "Hilda", "Hildegard", "Hillard", "Hillary", "Hilma", "Hilton", "Hipolito", "Hiram", "Hobart", "Holden", "Hollie", "Hollis", "Holly", "Hope", "Horace", "Horacio", "Hortense", "Hosea", "Houston", "Howard", "Howell", "Hoyt", "Hubert", "Hudson", "Hugh", "Hulda", "Humberto", "Hunter", "Hyman", "Ian", "Ibrahim", "Icie", "Ida", "Idell", "Idella", "Ignacio", "Ignatius", "Ike", "Ila", "Ilene", "Iliana", "Ima", "Imani", "Imelda", "Immanuel", "Imogene", "Ines", "Irma", "Irving", "Irwin", "Isaac", "Isabel", "Isabell", "Isabella", "Isabelle", "Isac", "Isadore", "Isai", "Isaiah", "Isaias", "Isidro", "Ismael", "Isobel", "Isom", "Israel", "Issac", "Itzel", "Iva", "Ivah", "Ivory", "Ivy", "Izabella", "Izaiah", "Jabari", "Jace", "Jacey", "Jacinthe", "Jacinto", "Jack", "Jackeline", "Jackie", "Jacklyn", "Jackson", "Jacky", "Jaclyn", "Jacquelyn", "Jacques", "Jacynthe", "Jada", "Jade", "Jaden", "Jadon", "Jadyn", "Jaeden", "Jaida", "Jaiden", "Jailyn", "Jaime", "Jairo", "Jakayla", "Jake", "Jakob", "Jaleel", "Jalen", "Jalon", "Jalyn", "Jamaal", "Jamal", "Jamar", "Jamarcus", "Jamel", "Jameson", "Jamey", "Jamie", "Jamil", "Jamir", "Jamison", "Jammie", "Jan", "Jana", "Janae", "Jane", "Janelle", "Janessa", "Janet", "Janice", "Janick", "Janie", "Janis", "Janiya", "Jannie", "Jany", "Jaquan", "Jaquelin", "Jaqueline", "Jared", "Jaren", "Jarod", "Jaron", "Jarred", "Jarrell", "Jarret", "Jarrett", "Jarrod", "Jarvis", "Jasen", "Jasmin", "Jason", "Jasper", "Jaunita", "Javier", "Javon", "Javonte", "Jay", "Jayce", "Jaycee", "Jayda", "Jayde", "Jayden", "Jaydon", "Jaylan", "Jaylen", "Jaylin", "Jaylon", "Jayme", "Jayne", "Jayson", "Jazlyn", "Jazmin", "Jazmyn", "Jazmyne", "Jean", "Jeanette", "Jeanie", "Jeanne", "Jed", "Jedediah", "Jedidiah", "Jeff", "Jefferey", "Jeffery", "Jeffrey", "Jeffry", "Jena", "Jenifer", "Jennie", "Jennifer", "Jennings", "Jennyfer", "Jensen", "Jerad", "Jerald", "Jeramie", "Jeramy", "Jerel", "Jeremie", "Jeremy", "Jermain", "Jermaine", "Jermey", "Jerod", "Jerome", "Jeromy", "Jerrell", "Jerrod", "Jerrold", "Jerry", "Jess", "Jesse", "Jessica", "Jessie", "Jessika", "Jessy", "Jessyca", "Jesus", "Jett", "Jettie", "Jevon", "Jewel", "Jewell", "Jillian", "Jimmie", "Jimmy", "Jo", "Joan", "Joana", "Joanie", "Joanne", "Joannie", "Joanny", "Joany", "Joaquin", "Jocelyn", "Jodie", "Jody", "Joe", "Joel", "Joelle", "Joesph", "Joey", "Johan", "Johann", "Johanna", "Johathan", "John", "Johnathan", "Johnathon", "Johnnie", "Johnny", "Johnpaul", "Johnson", "Jolie", "Jon", "Jonas", "Jonatan", "Jonathan", "Jonathon", "Jordan", "Jordane", "Jordi", "Jordon", "Jordy", "Jordyn", "Jorge", "Jose", "Josefa", "Josefina", "Joseph", "Josephine", "Josh", "Joshua", "Joshuah", "Josiah", "Josiane", "Josianne", "Josie", "Josue", "Jovan", "Jovani", "Jovanny", "Jovany", "Joy", "Joyce", "Juana", "Juanita", "Judah", "Judd", "Jude", "Judge", "Judson", "Judy", "Jules", "Julia", "Julian", "Juliana", "Julianne", "Julie", "Julien", "Juliet", "Julio", "Julius", "June", "Junior", "Junius", "Justen", "Justice", "Justina", "Justine", "Juston", "Justus", "Justyn", "Juvenal", "Juwan", "Kacey", "Kaci", "Kacie", "Kade", "Kaden", "Kadin", "Kaela", "Kaelyn", "Kaia", "Kailee", "Kailey", "Kailyn", "Kaitlin", "Kaitlyn", "Kale", "Kaleb", "Kaleigh", "Kaley", "Kali", "Kallie", "Kameron", "Kamille", "Kamren", "Kamron", "Kamryn", "Kane", "Kara", "Kareem", "Karelle", "Karen", "Kari", "Kariane", "Karianne", "Karina", "Karine", "Karl", "Karlee", "Karley", "Karli", "Karlie", "Karolann", "Karson", "Kasandra", "Kasey", "Kassandra", "Katarina", "Katelin", "Katelyn", "Katelynn", "Katharina", "Katherine", "Katheryn", "Kathleen", "Kathlyn", "Kathryn", "Kathryne", "Katlyn", "Katlynn", "Katrina", "Katrine", "Kattie", "Kavon", "Kay", "Kaya", "Kaycee", "Kayden", "Kayla", "Kaylah", "Kaylee", "Kayleigh", "Kayley", "Kayli", "Kaylie", "Kaylin", "Keagan", "Keanu", "Keara", "Keaton", "Keegan", "Keeley", "Keely", "Keenan", "Keira", "Keith", "Kellen", "Kelley", "Kelli", "Kellie", "Kelly", "Kelsi", "Kelsie", "Kelton", "Kelvin", "Ken", "Kendall", "Kendra", "Kendrick", "Kenna", "Kennedi", "Kennedy", "Kenneth", "Kennith", "Kenny", "Kenton", "Kenya", "Kenyatta", "Kenyon", "Keon", "Keshaun", "Keshawn", "Keven", "Kevin", "Kevon", "Keyon", "Keyshawn", "Khalid", "Khalil", "Kian", "Kiana", "Kianna", "Kiara", "Kiarra", "Kiel", "Kiera", "Kieran", "Kiley", "Kim", "Kimberly", "King", "Kip", "Kira", "Kirk", "Kirsten", "Kirstin", "Kitty", "Kobe", "Koby", "Kody", "Kolby", "Kole", "Korbin", "Korey", "Kory", "Kraig", "Kris", "Krista", "Kristian", "Kristin", "Kristina", "Kristofer", "Kristoffer", "Kristopher", "Kristy", "Krystal", "Krystel", "Krystina", "Kurt", "Kurtis", "Kyla", "Kyle", "Kylee", "Kyleigh", "Kyler", "Kylie", "Kyra", "Lacey", "Lacy", "Ladarius", "Lafayette", "Laila", "Laisha", "Lamar", "Lambert", "Lamont", "Lance", "Landen", "Lane", "Laney", "Larissa", "Laron", "Larry", "Larue", "Laura", "Laurel", "Lauren", "Laurence", "Lauretta", "Lauriane", "Laurianne", "Laurie", "Laurine", "Laury", "Lauryn", "Lavada", "Lavern", "Laverna", "Laverne", "Lavina", "Lavinia", "Lavon", "Lavonne", "Lawrence", "Lawson", "Layla", "Layne", "Lazaro", "Lea", "Leann", "Leanna", "Leanne", "Leatha", "Leda", "Lee", "Leif", "Leila", "Leilani", "Lela", "Lelah", "Leland", "Lelia", "Lempi", "Lemuel", "Lenna", "Lennie", "Lenny", "Lenora", "Lenore", "Leo", "Leola", "Leon", "Leonard", "Leonardo", "Leone", "Leonel", "Leonie", "Leonor", "Leonora", "Leopold", "Leopoldo", "Leora", "Lera", "Lesley", "Leslie", "Lesly", "Lessie", "Lester", "Leta", "Letha", "Letitia", "Levi", "Lew", "Lewis", "Lexi", "Lexie", "Lexus", "Lia", "Liam", "Liana", "Libbie", "Libby", "Lila", "Lilian", "Liliana", "Liliane", "Lilla", "Lillian", "Lilliana", "Lillie", "Lilly", "Lily", "Lilyan", "Lina", "Lincoln", "Linda", "Lindsay", "Lindsey", "Linnea", "Linnie", "Linwood", "Lionel", "Lisa", "Lisandro", "Lisette", "Litzy", "Liza", "Lizeth", "Lizzie", "Llewellyn", "Lloyd", "Logan", "Lois", "Lola", "Lolita", "Loma", "Lon", "London", "Lonie", "Lonnie", "Lonny", "Lonzo", "Lora", "Loraine", "Loren", "Lorena", "Lorenz", "Lorenza", "Lorenzo", "Lori", "Lorine", "Lorna", "Lottie", "Lou", "Louie", "Louisa", "Lourdes", "Louvenia", "Lowell", "Loy", "Loyal", "Loyce", "Lucas", "Luciano", "Lucie", "Lucienne", "Lucile", "Lucinda", "Lucio", "Lucious", "Lucius", "Lucy", "Ludie", "Ludwig", "Lue", "Luella", "Luigi", "Luis", "Luisa", "Lukas", "Lula", "Lulu", "Luna", "Lupe", "Lura", "Lurline", "Luther", "Luz", "Lyda", "Lydia", "Lyla", "Lynn", "Lyric", "Lysanne", "Mabel", "Mabelle", "Mable", "Mac", "Macey", "Maci", "Macie", "Mack", "Mackenzie", "Macy", "Madaline", "Madalyn", "Maddison", "Madeline", "Madelyn", "Madelynn", "Madge", "Madie", "Madilyn", "Madisen", "Madison", "Madisyn", "Madonna", "Madyson", "Mae", "Maegan", "Maeve", "Mafalda", "Magali", "Magdalen", "Magdalena", "Maggie", "Magnolia", "Magnus", "Maia", "Maida", "Maiya", "Major", "Makayla", "Makenna", "Makenzie", "Malachi", "Malcolm", "Malika", "Malinda", "Mallie", "Mallory", "Malvina", "Mandy", "Manley", "Manuel", "Manuela", "Mara", "Marc", "Marcel", "Marcelina", "Marcelino", "Marcella", "Marcelle", "Marcellus", "Marcelo", "Marcia", "Marco", "Marcos", "Marcus", "Margaret", "Margarete", "Margarett", "Margaretta", "Margarette", "Margarita", "Marge", "Margie", "Margot", "Margret", "Marguerite", "Maria", "Mariah", "Mariam", "Marian", "Mariana", "Mariane", "Marianna", "Marianne", "Mariano", "Maribel", "Marie", "Mariela", "Marielle", "Marietta", "Marilie", "Marilou", "Marilyne", "Marina", "Mario", "Marion", "Marisa", "Marisol", "Maritza", "Marjolaine", "Marjorie", "Marjory", "Mark", "Markus", "Marlee", "Marlen", "Marlene", "Marley", "Marlin", "Marlon", "Marques", "Marquis", "Marquise", "Marshall", "Marta", "Martin", "Martina", "Martine", "Marty", "Marvin", "Mary", "Maryam", "Maryjane", "Maryse", "Mason", "Mateo", "Mathew", "Mathias", "Mathilde", "Matilda", "Matilde", "Matt", "Matteo", "Mattie", "Maud", "Maude", "Maudie", "Maureen", "Maurice", "Mauricio", "Maurine", "Maverick", "Mavis", "Max", "Maxie", "Maxime", "Maximilian", "Maximillia", "Maximillian", "Maximo", "Maximus", "Maxine", "Maxwell", "May", "Maya", "Maybell", "Maybelle", "Maye", "Maymie", "Maynard", "Mayra", "Mazie", "Mckayla", "Mckenna", "Mckenzie", "Meagan", "Meaghan", "Meda", "Megane", "Meggie", "Meghan", "Mekhi", "Melany", "Melba", "Melisa", "Melissa", "Mellie", "Melody", "Melvin", "Melvina", "Melyna", "Melyssa", "Mercedes", "Meredith", "Merl", "Merle", "Merlin", "Merritt", "Mertie", "Mervin", "Meta", "Mia", "Micaela", "Micah", "Michael", "Michaela", "Michale", "Micheal", "Michel", "Michele", "Michelle", "Miguel", "Mikayla", "Mike", "Mikel", "Milan", "Miles", "Milford", "Miller", "Millie", "Milo", "Milton", "Mina", "Minerva", "Minnie", "Miracle", "Mireille", "Mireya", "Misael", "Missouri", "Misty", "Mitchel", "Mitchell", "Mittie", "Modesta", "Modesto", "Mohamed", "Mohammad", "Mohammed", "Moises", "Mollie", "Molly", "Mona", "Monica", "Monique", "Monroe", "Monserrat", "Monserrate", "Montana", "Monte", "Monty", "Morgan", "Moriah", "Morris", "Mortimer", "Morton", "Mose", "Moses", "Moshe", "Mossie", "Mozell", "Mozelle", "Muhammad", "Muriel", "Murl", "Murphy", "Murray", "Mustafa", "Mya", "Myah", "Mylene", "Myles", "Myra", "Myriam", "Myrl", "Myrna", "Myron", "Myrtice", "Myrtie", "Myrtis", "Myrtle", "Nadia", "Nakia", "Name", "Nannie", "Naomi", "Naomie", "Napoleon", "Narciso", "Nash", "Nasir", "Nat", "Natalia", "Natalie", "Natasha", "Nathan", "Nathanael", "Nathanial", "Nathaniel", "Nathen", "Nayeli", "Neal", "Ned", "Nedra", "Neha", "Neil", "Nelda", "Nella", "Nelle", "Nellie", "Nels", "Nelson", "Neoma", "Nestor", "Nettie", "Neva", "Newell", "Newton", "Nia", "Nicholas", "Nicholaus", "Nichole", "Nick", "Nicklaus", "Nickolas", "Nico", "Nicola", "Nicolas", "Nicole", "Nicolette", "Nigel", "Nikita", "Nikki", "Nikko", "Niko", "Nikolas", "Nils", "Nina", "Noah", "Noble", "Noe", "Noel", "Noelia", "Noemi", "Noemie", "Noemy", "Nola", "Nolan", "Nona", "Nora", "Norbert", "Norberto", "Norene", "Norma", "Norris", "Norval", "Norwood", "Nova", "Novella", "Nya", "Nyah", "Nyasia", "Obie", "Oceane", "Ocie", "Octavia", "Oda", "Odell", "Odessa", "Odie", "Ofelia", "Okey", "Ola", "Olaf", "Ole", "Olen", "Oleta", "Olga", "Olin", "Oliver", "Ollie", "Oma", "Omari", "Omer", "Ona", "Onie", "Opal", "Ophelia", "Ora", "Oral", "Oran", "Oren", "Orie", "Orin", "Orion", "Orland", "Orlando", "Orlo", "Orpha", "Orrin", "Orval", "Orville", "Osbaldo", "Osborne", "Oscar", "Osvaldo", "Oswald", "Oswaldo", "Otha", "Otho", "Otilia", "Otis", "Ottilie", "Ottis", "Otto", "Ova", "Owen", "Ozella", "Pablo", "Paige", "Palma", "Pamela", "Pansy", "Paolo", "Paris", "Parker", "Pascale", "Pasquale", "Pat", "Patience", "Patricia", "Patrick", "Patsy", "Pattie", "Paul", "Paula", "Pauline", "Paxton", "Payton", "Pearl", "Pearlie", "Pearline", "Pedro", "Peggie", "Penelope", "Percival", "Percy", "Perry", "Pete", "Peter", "Petra", "Peyton", "Philip", "Phoebe", "Phyllis", "Pierce", "Pierre", "Pietro", "Pink", "Pinkie", "Piper", "Polly", "Porter", "Precious", "Presley", "Preston", "Price", "Prince", "Princess", "Priscilla", "Providenci", "Prudence", "Queen", "Queenie", "Quentin", "Quincy", "Quinn", "Quinten", "Quinton", "Rachael", "Rachel", "Rachelle", "Rae", "Raegan", "Rafael", "Rafaela", "Raheem", "Rahsaan", "Rahul", "Raina", "Raleigh", "Ralph", "Ramiro", "Ramon", "Ramona", "Randal", "Randall", "Randi", "Randy", "Ransom", "Raoul", "Raphael", "Raphaelle", "Raquel", "Rashad", "Rashawn", "Rasheed", "Raul", "Raven", "Ray", "Raymond", "Raymundo", "Reagan", "Reanna", "Reba", "Rebeca", "Rebecca", "Rebeka", "Rebekah", "Reece", "Reed", "Reese", "Regan", "Reggie", "Reginald", "Reid", "Reilly", "Reina", "Reinhold", "Remington", "Rene", "Renee", "Ressie", "Reta", "Retha", "Retta", "Reuben", "Reva", "Rex", "Rey", "Reyes", "Reymundo", "Reyna", "Reynold", "Rhea", "Rhett", "Rhianna", "Rhiannon", "Rhoda", "Ricardo", "Richard", "Richie", "Richmond", "Rick", "Rickey", "Rickie", "Ricky", "Rico", "Rigoberto", "Riley", "Rita", "River", "Robb", "Robbie", "Robert", "Roberta", "Roberto", "Robin", "Robyn", "Rocio", "Rocky", "Rod", "Roderick", "Rodger", "Rodolfo", "Rodrick", "Rodrigo", "Roel", "Rogelio", "Roger", "Rogers", "Rolando", "Rollin", "Roma", "Romaine", "Roman", "Ron", "Ronaldo", "Ronny", "Roosevelt", "Rory", "Rosa", "Rosalee", "Rosalia", "Rosalind", "Rosalinda", "Rosalyn", "Rosamond", "Rosanna", "Rosario", "Roscoe", "Rose", "Rosella", "Roselyn", "Rosemarie", "Rosemary", "Rosendo", "Rosetta", "Rosie", "Rosina", "Roslyn", "Ross", "Rossie", "Rowan", "Rowena", "Rowland", "Roxane", "Roxanne", "Roy", "Royal", "Royce", "Rozella", "Ruben", "Rubie", "Ruby", "Rubye", "Rudolph", "Rudy", "Rupert", "Russ", "Russel", "Russell", "Rusty", "Ruth", "Ruthe", "Ruthie", "Ryan", "Ryann", "Ryder", "Rylan", "Rylee", "Ryleigh", "Ryley", "Sabina", "Sabrina", "Sabryna", "Sadie", "Sadye", "Sage", "Saige", "Sallie", "Sally", "Salma", "Salvador", "Salvatore", "Sam", "Samanta", "Samantha", "Samara", "Samir", "Sammie", "Sammy", "Samson", "Sandra", "Sandrine", "Sandy", "Sanford", "Santa", "Santiago", "Santina", "Santino", "Santos", "Sarah", "Sarai", "Sarina", "Sasha", "Saul", "Savanah", "Savanna", "Savannah", "Savion", "Scarlett", "Schuyler", "Scot", "Scottie", "Scotty", "Seamus", "Sean", "Sebastian", "Sedrick", "Selena", "Selina", "Selmer", "Serena", "Serenity", "Seth", "Shad", "Shaina", "Shakira", "Shana", "Shane", "Shanel", "Shanelle", "Shania", "Shanie", "Shaniya", "Shanna", "Shannon", "Shanny", "Shanon", "Shany", "Sharon", "Shaun", "Shawn", "Shawna", "Shaylee", "Shayna", "Shayne", "Shea", "Sheila", "Sheldon", "Shemar", "Sheridan", "Sherman", "Sherwood", "Shirley", "Shyann", "Shyanne", "Sibyl", "Sid", "Sidney", "Sienna", "Sierra", "Sigmund", "Sigrid", "Sigurd", "Silas", "Sim", "Simeon", "Simone", "Sincere", "Sister", "Skye", "Skyla", "Skylar", "Sofia", "Soledad", "Solon", "Sonia", "Sonny", "Sonya", "Sophia", "Sophie", "Spencer", "Stacey", "Stacy", "Stan", "Stanford", "Stanley", "Stanton", "Stefan", "Stefanie", "Stella", "Stephan", "Stephania", "Stephanie", "Stephany", "Stephen", "Stephon", "Sterling", "Steve", "Stevie", "Stewart", "Stone", "Stuart", "Summer", "Sunny", "Susan", "Susana", "Susanna", "Susie", "Suzanne", "Sven", "Syble", "Sydnee", "Sydney", "Sydni", "Sydnie", "Sylvan", "Sylvester", "Sylvia", "Tabitha", "Tad", "Talia", "Talon", "Tamara", "Tamia", "Tania", "Tanner", "Tanya", "Tara", "Taryn", "Tate", "Tatum", "Tatyana", "Taurean", "Tavares", "Taya", "Taylor", "Teagan", "Ted", "Telly", "Terence", "Teresa", "Terrance", "Terrell", "Terrence", "Terrill", "Terry", "Tess", "Tessie", "Tevin", "Thad", "Thaddeus", "Thalia", "Thea", "Thelma", "Theo", "Theodora", "Theodore", "Theresa", "Therese", "Theresia", "Theron", "Thomas", "Thora", "Thurman", "Tia", "Tiana", "Tianna", "Tiara", "Tierra", "Tiffany", "Tillman", "Timmothy", "Timmy", "Timothy", "Tina", "Tito", "Titus", "Tobin", "Toby", "Tod", "Tom", "Tomas", "Tomasa", "Tommie", "Toney", "Toni", "Tony", "Torey", "Torrance", "Torrey", "Toy", "Trace", "Tracey", "Tracy", "Travis", "Travon", "Tre", "Tremaine", "Tremayne", "Trent", "Trenton", "Tressa", "Tressie", "Treva", "Trever", "Trevion", "Trevor", "Trey", "Trinity", "Trisha", "Tristian", "Tristin", "Triston", "Troy", "Trudie", "Trycia", "Trystan", "Turner", "Twila", "Tyler", "Tyra", "Tyree", "Tyreek", "Tyrel", "Tyrell", "Tyrese", "Tyrique", "Tyshawn", "Tyson", "Ubaldo", "Ulices", "Ulises", "Una", "Unique", "Urban", "Uriah", "Uriel", "Ursula", "Vada", "Valentin", "Valentina", "Valentine", "Valerie", "Vallie", "Van", "Vance", "Vanessa", "Vaughn", "Veda", "Velda", "Vella", "Velma", "Velva", "Vena", "Verda", "Verdie", "Vergie", "Verla", "Verlie", "Vern", "Verna", "Verner", "Vernice", "Vernie", "Vernon", "Verona", "Veronica", "Vesta", "Vicenta", "Vicente", "Vickie", "Vicky", "Victor", "Victoria", "Vida", "Vidal", "Vilma", "Vince", "Vincent", "Vincenza", "Vincenzo", "Vinnie", "Viola", "Violet", "Violette", "Virgie", "Virgil", "Virginia", "Virginie", "Vita", "Vito", "Viva", "Vivian", "Viviane", "Vivianne", "Vivien", "Vivienne", "Vladimir", "Wade", "Waino", "Waldo", "Walker", "Wallace", "Walter", "Walton", "Wanda", "Ward", "Warren", "Watson", "Wava", "Waylon", "Wayne", "Webster", "Weldon", "Wellington", "Wendell", "Wendy", "Werner", "Westley", "Weston", "Whitney", "Wilber", "Wilbert", "Wilburn", "Wiley", "Wilford", "Wilfred", "Wilfredo", "Wilfrid", "Wilhelm", "Wilhelmine", "Will", "Willa", "Willard", "William", "Willie", "Willis", "Willow", "Willy", "Wilma", "Wilmer", "Wilson", "Wilton", "Winfield", "Winifred", "Winnifred", "Winona", "Winston", "Woodrow", "Wyatt", "Wyman", "Xander", "Xavier", "Xzavier", "Yadira", "Yasmeen", "Yasmin", "Yasmine", "Yazmin", "Yesenia", "Yessenia", "Yolanda", "Yoshiko", "Yvette", "Yvonne", "Zachariah", "Zachary", "Zachery", "Zack", "Zackary", "Zackery", "Zakary", "Zander", "Zane", "Zaria", "Zechariah", "Zelda", "Zella", "Zelma", "Zena", "Zetta", "Zion", "Zita", "Zoe", "Zoey", "Zoie", "Zoila", "Zola", "Zora", "Zula" ]; /** * Generate random username. * @returns {string} random username */ function generateUsername () { var name = RandomUtil.randomElement(names); var suffix = RandomUtil.randomAlphanumStr(3); return name + '-' + suffix; } module.exports = { generateUsername: generateUsername }; },{"./RandomUtil":26}],29:[function(require,module,exports){ (function (__filename){ /* global Strophe, $, $pres, $iq, $msg */ /* jshint -W101,-W069 */ var logger = require("jitsi-meet-logger").getLogger(__filename); var XMPPEvents = require("../../service/xmpp/XMPPEvents"); var Moderator = require("./moderator"); var EventEmitter = require("events"); var Recorder = require("./recording"); var JIBRI_XMLNS = 'http://jitsi.org/protocol/jibri'; var parser = { packet2JSON: function (packet, nodes) { var self = this; $(packet).children().each(function (index) { var tagName = $(this).prop("tagName"); var node = { tagName: tagName }; node.attributes = {}; $($(this)[0].attributes).each(function( index, attr ) { node.attributes[ attr.name ] = attr.value; }); var text = Strophe.getText($(this)[0]); if (text) { node.value = text; } node.children = []; nodes.push(node); self.packet2JSON($(this), node.children); }); }, JSON2packet: function (nodes, packet) { for(var i = 0; i < nodes.length; i++) { var node = nodes[i]; if(!node || node === null){ continue; } packet.c(node.tagName, node.attributes); if(node.value) packet.t(node.value); if(node.children) this.JSON2packet(node.children, packet); packet.up(); } // packet.up(); } }; /** * Returns array of JS objects from the presence JSON associated with the passed nodeName * @param pres the presence JSON * @param nodeName the name of the node (videomuted, audiomuted, etc) */ function filterNodeFromPresenceJSON(pres, nodeName){ var res = []; for(var i = 0; i < pres.length; i++) if(pres[i].tagName === nodeName) res.push(pres[i]); return res; } function ChatRoom(connection, jid, password, XMPP, options, settings) { this.eventEmitter = new EventEmitter(); this.xmpp = XMPP; this.connection = connection; this.roomjid = Strophe.getBareJidFromJid(jid); this.myroomjid = jid; this.password = password; logger.info("Joined MUC as " + this.myroomjid); this.members = {}; this.presMap = {}; this.presHandlers = {}; this.joined = false; this.role = 'none'; this.focusMucJid = null; this.bridgeIsDown = false; this.options = options || {}; this.moderator = new Moderator(this.roomjid, this.xmpp, this.eventEmitter, settings); this.initPresenceMap(); this.session = null; var self = this; this.lastPresences = {}; this.phoneNumber = null; this.phonePin = null; } ChatRoom.prototype.initPresenceMap = function () { this.presMap['to'] = this.myroomjid; this.presMap['xns'] = 'http://jabber.org/protocol/muc'; this.presMap["nodes"] = []; this.presMap["nodes"].push( { "tagName": "user-agent", "value": navigator.userAgent, "attributes": {xmlns: 'http://jitsi.org/jitmeet/user-agent'} }); }; ChatRoom.prototype.updateDeviceAvailability = function (devices) { this.presMap["nodes"].push( { "tagName": "devices", "children": [ { "tagName": "audio", "value": devices.audio, }, { "tagName": "video", "value": devices.video, } ] }); }; ChatRoom.prototype.join = function (password) { if(password) this.password = password; var self = this; this.moderator.allocateConferenceFocus(function() { self.sendPresence(true); }.bind(this)); }; ChatRoom.prototype.sendPresence = function (fromJoin) { if (!this.presMap['to'] || (!this.joined && !fromJoin)) { // Too early to send presence - not initialized return; } var pres = $pres({to: this.presMap['to'] }); pres.c('x', {xmlns: this.presMap['xns']}); if (this.password) { pres.c('password').t(this.password).up(); } pres.up(); // Send XEP-0115 'c' stanza that contains our capabilities info if (this.connection.caps) { this.connection.caps.node = this.xmpp.options.clientNode; pres.c('c', this.connection.caps.generateCapsAttrs()).up(); } parser.JSON2packet(this.presMap.nodes, pres); this.connection.send(pres); }; ChatRoom.prototype.doLeave = function () { logger.log("do leave", this.myroomjid); var pres = $pres({to: this.myroomjid, type: 'unavailable' }); this.presMap.length = 0; // XXX Strophe is asynchronously sending by default. Unfortunately, that // means that there may not be enough time to send the unavailable presence. // Switching Strophe to synchronous sending is not much of an option because // it may lead to a noticeable delay in navigating away from the current // location. As a compromise, we will try to increase the chances of sending // the unavailable presence within the short time span that we have upon // unloading by invoking flush() on the connection. We flush() once before // sending/queuing the unavailable presence in order to attemtp to have the // unavailable presence at the top of the send queue. We flush() once more // after sending/queuing the unavailable presence in order to attempt to // have it sent as soon as possible. this.connection.flush(); this.connection.send(pres); this.connection.flush(); }; ChatRoom.prototype.createNonAnonymousRoom = function () { // http://xmpp.org/extensions/xep-0045.html#createroom-reserved var getForm = $iq({type: 'get', to: this.roomjid}) .c('query', {xmlns: 'http://jabber.org/protocol/muc#owner'}) .c('x', {xmlns: 'jabber:x:data', type: 'submit'}); var self = this; this.connection.sendIQ(getForm, function (form) { if (!$(form).find( '>query>x[xmlns="jabber:x:data"]' + '>field[var="muc#roomconfig_whois"]').length) { logger.error('non-anonymous rooms not supported'); return; } var formSubmit = $iq({to: this.roomjid, type: 'set'}) .c('query', {xmlns: 'http://jabber.org/protocol/muc#owner'}); formSubmit.c('x', {xmlns: 'jabber:x:data', type: 'submit'}); formSubmit.c('field', {'var': 'FORM_TYPE'}) .c('value') .t('http://jabber.org/protocol/muc#roomconfig').up().up(); formSubmit.c('field', {'var': 'muc#roomconfig_whois'}) .c('value').t('anyone').up().up(); self.connection.sendIQ(formSubmit); }, function (error) { logger.error("Error getting room configuration form"); }); }; ChatRoom.prototype.onPresence = function (pres) { var from = pres.getAttribute('from'); // Parse roles. var member = {}; member.show = $(pres).find('>show').text(); member.status = $(pres).find('>status').text(); var tmp = $(pres).find('>x[xmlns="http://jabber.org/protocol/muc#user"]>item'); member.affiliation = tmp.attr('affiliation'); member.role = tmp.attr('role'); // Focus recognition member.jid = tmp.attr('jid'); member.isFocus = false; if (member.jid && member.jid.indexOf(this.moderator.getFocusUserJid() + "/") === 0) { member.isFocus = true; } $(pres).find(">x").remove(); var nodes = []; parser.packet2JSON(pres, nodes); this.lastPresences[from] = nodes; var jibri = null; // process nodes to extract data needed for MUC_JOINED and MUC_MEMBER_JOINED // events for(var i = 0; i < nodes.length; i++) { var node = nodes[i]; switch(node.tagName) { case "nick": member.nick = node.value; break; case "userId": member.id = node.value; break; } } if (from == this.myroomjid) { if (member.affiliation == 'owner' && this.role !== member.role) { this.role = member.role; this.eventEmitter.emit( XMPPEvents.LOCAL_ROLE_CHANGED, this.role); } if (!this.joined) { this.joined = true; console.log("(TIME) MUC joined:\t", window.performance.now()); this.eventEmitter.emit(XMPPEvents.MUC_JOINED); } } else if (this.members[from] === undefined) { // new participant this.members[from] = member; logger.log('entered', from, member); if (member.isFocus) { this.focusMucJid = from; if(!this.recording) { this.recording = new Recorder(this.options.recordingType, this.eventEmitter, this.connection, this.focusMucJid, this.options.jirecon, this.roomjid); if(this.lastJibri) this.recording.handleJibriPresence(this.lastJibri); } logger.info("Ignore focus: " + from + ", real JID: " + member.jid); } else { this.eventEmitter.emit( XMPPEvents.MUC_MEMBER_JOINED, from, member.nick, member.role); } } else { // Presence update for existing participant // Watch role change: if (this.members[from].role != member.role) { this.members[from].role = member.role; this.eventEmitter.emit( XMPPEvents.MUC_ROLE_CHANGED, from, member.role); } // store the new display name if(member.displayName) this.members[from].displayName = member.displayName; } // after we had fired member or room joined events, lets fire events // for the rest info we got in presence for(var i = 0; i < nodes.length; i++) { var node = nodes[i]; switch(node.tagName) { case "nick": if(!member.isFocus) { var displayName = !this.xmpp.options.displayJids ? member.nick : Strophe.getResourceFromJid(from); if (displayName && displayName.length > 0) { this.eventEmitter.emit( XMPPEvents.DISPLAY_NAME_CHANGED, from, displayName); } } break; case "bridgeIsDown": if(!this.bridgeIsDown) { this.bridgeIsDown = true; this.eventEmitter.emit(XMPPEvents.BRIDGE_DOWN); } break; case "jibri-recording-status": var jibri = node; break; case "call-control": var att = node.attributes; if(!att) break; this.phoneNumber = att.phone || null; this.phonePin = att.pin || null; this.eventEmitter.emit(XMPPEvents.PHONE_NUMBER_CHANGED); break; default : this.processNode(node, from); } } // Trigger status message update if (member.status) { this.eventEmitter.emit(XMPPEvents.PRESENCE_STATUS, from, member.status); } if(jibri) { this.lastJibri = jibri; if(this.recording) this.recording.handleJibriPresence(jibri); } }; ChatRoom.prototype.processNode = function (node, from) { if(this.presHandlers[node.tagName]) this.presHandlers[node.tagName](node, Strophe.getResourceFromJid(from)); }; ChatRoom.prototype.sendMessage = function (body, nickname) { var msg = $msg({to: this.roomjid, type: 'groupchat'}); msg.c('body', body).up(); if (nickname) { msg.c('nick', {xmlns: 'http://jabber.org/protocol/nick'}).t(nickname).up().up(); } this.connection.send(msg); this.eventEmitter.emit(XMPPEvents.SENDING_CHAT_MESSAGE, body); }; ChatRoom.prototype.setSubject = function (subject) { var msg = $msg({to: this.roomjid, type: 'groupchat'}); msg.c('subject', subject); this.connection.send(msg); }; ChatRoom.prototype.onParticipantLeft = function (jid) { delete this.lastPresences[jid]; this.eventEmitter.emit(XMPPEvents.MUC_MEMBER_LEFT, jid); this.moderator.onMucMemberLeft(jid); }; ChatRoom.prototype.onPresenceUnavailable = function (pres, from) { // room destroyed ? if ($(pres).find('>x[xmlns="http://jabber.org/protocol/muc#user"]' + '>destroy').length) { var reason; var reasonSelect = $(pres).find( '>x[xmlns="http://jabber.org/protocol/muc#user"]' + '>destroy>reason'); if (reasonSelect.length) { reason = reasonSelect.text(); } this.xmpp.leaveRoom(this.roomjid); this.eventEmitter.emit(XMPPEvents.MUC_DESTROYED, reason); delete this.connection.emuc.rooms[Strophe.getBareJidFromJid(from)]; return true; } // Status code 110 indicates that this notification is "self-presence". if (!$(pres).find('>x[xmlns="http://jabber.org/protocol/muc#user"]>status[code="110"]').length) { delete this.members[from]; this.onParticipantLeft(from); } // If the status code is 110 this means we're leaving and we would like // to remove everyone else from our view, so we trigger the event. else if (Object.keys(this.members).length > 1) { for (var i in this.members) { var member = this.members[i]; delete this.members[i]; this.onParticipantLeft(member); } } if ($(pres).find('>x[xmlns="http://jabber.org/protocol/muc#user"]>status[code="307"]').length) { if (this.myroomjid === from) { this.xmpp.leaveRoom(this.roomjid); this.eventEmitter.emit(XMPPEvents.KICKED); } } }; ChatRoom.prototype.onMessage = function (msg, from) { var nick = $(msg).find('>nick[xmlns="http://jabber.org/protocol/nick"]') .text() || Strophe.getResourceFromJid(from); var txt = $(msg).find('>body').text(); var type = msg.getAttribute("type"); if (type == "error") { this.eventEmitter.emit(XMPPEvents.CHAT_ERROR_RECEIVED, $(msg).find('>text').text(), txt); return true; } var subject = $(msg).find('>subject'); if (subject.length) { var subjectText = subject.text(); if (subjectText || subjectText === "") { this.eventEmitter.emit(XMPPEvents.SUBJECT_CHANGED, subjectText); logger.log("Subject is changed to " + subjectText); } } // xep-0203 delay var stamp = $(msg).find('>delay').attr('stamp'); if (!stamp) { // or xep-0091 delay, UTC timestamp stamp = $(msg).find('>[xmlns="jabber:x:delay"]').attr('stamp'); if (stamp) { // the format is CCYYMMDDThh:mm:ss var dateParts = stamp.match(/(\d{4})(\d{2})(\d{2}T\d{2}:\d{2}:\d{2})/); stamp = dateParts[1] + "-" + dateParts[2] + "-" + dateParts[3] + "Z"; } } if (txt) { logger.log('chat', nick, txt); this.eventEmitter.emit(XMPPEvents.MESSAGE_RECEIVED, from, nick, txt, this.myroomjid, stamp); } }; ChatRoom.prototype.onPresenceError = function (pres, from) { if ($(pres).find('>error[type="auth"]>not-authorized[xmlns="urn:ietf:params:xml:ns:xmpp-stanzas"]').length) { logger.log('on password required', from); this.eventEmitter.emit(XMPPEvents.PASSWORD_REQUIRED); } else if ($(pres).find( '>error[type="cancel"]>not-allowed[xmlns="urn:ietf:params:xml:ns:xmpp-stanzas"]').length) { var toDomain = Strophe.getDomainFromJid(pres.getAttribute('to')); if (toDomain === this.xmpp.options.hosts.anonymousdomain) { // enter the room by replying with 'not-authorized'. This would // result in reconnection from authorized domain. // We're either missing Jicofo/Prosody config for anonymous // domains or something is wrong. this.eventEmitter.emit(XMPPEvents.ROOM_JOIN_ERROR, pres); } else { logger.warn('onPresError ', pres); this.eventEmitter.emit(XMPPEvents.ROOM_CONNECT_ERROR, pres); } } else { logger.warn('onPresError ', pres); this.eventEmitter.emit(XMPPEvents.ROOM_CONNECT_ERROR, pres); } }; ChatRoom.prototype.kick = function (jid) { var kickIQ = $iq({to: this.roomjid, type: 'set'}) .c('query', {xmlns: 'http://jabber.org/protocol/muc#admin'}) .c('item', {nick: Strophe.getResourceFromJid(jid), role: 'none'}) .c('reason').t('You have been kicked.').up().up().up(); this.connection.sendIQ( kickIQ, function (result) { logger.log('Kick participant with jid: ', jid, result); }, function (error) { logger.log('Kick participant error: ', error); }); }; ChatRoom.prototype.lockRoom = function (key, onSuccess, onError, onNotSupported) { //http://xmpp.org/extensions/xep-0045.html#roomconfig var ob = this; this.connection.sendIQ($iq({to: this.roomjid, type: 'get'}).c('query', {xmlns: 'http://jabber.org/protocol/muc#owner'}), function (res) { if ($(res).find('>query>x[xmlns="jabber:x:data"]>field[var="muc#roomconfig_roomsecret"]').length) { var formsubmit = $iq({to: ob.roomjid, type: 'set'}).c('query', {xmlns: 'http://jabber.org/protocol/muc#owner'}); formsubmit.c('x', {xmlns: 'jabber:x:data', type: 'submit'}); formsubmit.c('field', {'var': 'FORM_TYPE'}).c('value').t('http://jabber.org/protocol/muc#roomconfig').up().up(); formsubmit.c('field', {'var': 'muc#roomconfig_roomsecret'}).c('value').t(key).up().up(); // Fixes a bug in prosody 0.9.+ https://code.google.com/p/lxmppd/issues/detail?id=373 formsubmit.c('field', {'var': 'muc#roomconfig_whois'}).c('value').t('anyone').up().up(); // FIXME: is muc#roomconfig_passwordprotectedroom required? ob.connection.sendIQ(formsubmit, onSuccess, onError); } else { onNotSupported(); } }, onError); }; ChatRoom.prototype.addToPresence = function (key, values) { values.tagName = key; this.presMap["nodes"].push(values); }; ChatRoom.prototype.removeFromPresence = function (key) { for(var i = 0; i < this.presMap.nodes.length; i++) { if(key === this.presMap.nodes[i].tagName) this.presMap.nodes.splice(i, 1); } }; ChatRoom.prototype.addPresenceListener = function (name, handler) { this.presHandlers[name] = handler; }; ChatRoom.prototype.removePresenceListener = function (name) { delete this.presHandlers[name]; }; ChatRoom.prototype.isModerator = function () { return this.role === 'moderator'; }; ChatRoom.prototype.getMemberRole = function (peerJid) { if (this.members[peerJid]) { return this.members[peerJid].role; } return null; }; ChatRoom.prototype.setJingleSession = function(session){ this.session = session; this.session.room = this; }; ChatRoom.prototype.removeStream = function (stream, callback) { if(!this.session) return; this.session.removeStream(stream, callback); }; ChatRoom.prototype.switchStreams = function (stream, oldStream, callback, isAudio) { if(this.session) { // FIXME: will block switchInProgress on true value in case of exception this.session.switchStreams(stream, oldStream, callback, isAudio); } else { // We are done immediately logger.warn("No conference handler or conference not started yet"); callback(); } }; ChatRoom.prototype.addStream = function (stream, callback) { if(this.session) { // FIXME: will block switchInProgress on true value in case of exception this.session.addStream(stream, callback); } else { // We are done immediately logger.warn("No conference handler or conference not started yet"); callback(); } }; ChatRoom.prototype.setVideoMute = function (mute, callback, options) { var self = this; var localCallback = function (mute) { self.sendVideoInfoPresence(mute); if(callback) callback(mute); }; if(this.session) { this.session.setVideoMute( mute, localCallback, options); } else { localCallback(mute); } }; ChatRoom.prototype.setAudioMute = function (mute, callback) { //This will be for remote streams only // if (this.forceMuted && !mute) { // logger.info("Asking focus for unmute"); // this.connection.moderate.setMute(this.connection.emuc.myroomjid, mute); // // FIXME: wait for result before resetting muted status // this.forceMuted = false; // } return this.sendAudioInfoPresence(mute, callback); }; ChatRoom.prototype.addAudioInfoToPresence = function (mute) { this.removeFromPresence("audiomuted"); this.addToPresence("audiomuted", {attributes: {"audions": "http://jitsi.org/jitmeet/audio"}, value: mute.toString()}); }; ChatRoom.prototype.sendAudioInfoPresence = function(mute, callback) { this.addAudioInfoToPresence(mute); if(this.connection) { this.sendPresence(); } if(callback) callback(); }; ChatRoom.prototype.addVideoInfoToPresence = function (mute) { this.removeFromPresence("videomuted"); this.addToPresence("videomuted", {attributes: {"videons": "http://jitsi.org/jitmeet/video"}, value: mute.toString()}); }; ChatRoom.prototype.sendVideoInfoPresence = function (mute) { this.addVideoInfoToPresence(mute); if(!this.connection) return; this.sendPresence(); }; ChatRoom.prototype.addListener = function(type, listener) { this.eventEmitter.on(type, listener); }; ChatRoom.prototype.removeListener = function (type, listener) { this.eventEmitter.removeListener(type, listener); }; ChatRoom.prototype.remoteStreamAdded = function(data, sid, thessrc) { if(this.lastPresences[data.peerjid]) { var pres = this.lastPresences[data.peerjid]; var audiomuted = filterNodeFromPresenceJSON(pres, "audiomuted"); var videomuted = filterNodeFromPresenceJSON(pres, "videomuted"); data.videomuted = ((videomuted.length > 0 && videomuted[0] && videomuted[0]["value"] === "true")? true : false); data.audiomuted = ((audiomuted.length > 0 && audiomuted[0] && audiomuted[0]["value"] === "true")? true : false); } this.eventEmitter.emit(XMPPEvents.REMOTE_STREAM_RECEIVED, data, sid, thessrc); }; ChatRoom.prototype.getJidBySSRC = function (ssrc) { if (!this.session) return null; return this.session.getSsrcOwner(ssrc); }; /** * Returns true if the recording is supproted and false if not. */ ChatRoom.prototype.isRecordingSupported = function () { if(this.recording) return this.recording.isSupported(); return false; }; /** * Returns null if the recording is not supported, "on" if the recording started * and "off" if the recording is not started. */ ChatRoom.prototype.getRecordingState = function () { if(this.recording) return this.recording.getState(); return "off"; } /** * Returns the url of the recorded video. */ ChatRoom.prototype.getRecordingURL = function () { if(this.recording) return this.recording.getURL(); return null; } /** * Starts/stops the recording * @param token token for authentication * @param statusChangeHandler {function} receives the new status as argument. */ ChatRoom.prototype.toggleRecording = function (options, statusChangeHandler) { if(this.recording) return this.recording.toggleRecording(options, statusChangeHandler); return statusChangeHandler("error", new Error("The conference is not created yet!")); } /** * Returns true if the SIP calls are supported and false otherwise */ ChatRoom.prototype.isSIPCallingSupported = function () { if(this.moderator) return this.moderator.isSipGatewayEnabled(); return false; } /** * Dials a number. * @param number the number */ ChatRoom.prototype.dial = function (number) { return this.connection.rayo.dial(number, "fromnumber", Strophe.getNodeFromJid(this.myroomjid), this.password, this.focusMucJid); } /** * Hangup an existing call */ ChatRoom.prototype.hangup = function () { return this.connection.rayo.hangup(); } /** * Returns the phone number for joining the conference. */ ChatRoom.prototype.getPhoneNumber = function () { return this.phoneNumber; } /** * Returns the pin for joining the conference with phone. */ ChatRoom.prototype.getPhonePin = function () { return this.phonePin; } /** * Returns the connection state for the current session. */ ChatRoom.prototype.getConnectionState = function () { if(!this.session) return null; return this.session.getIceConnectionState(); } /** * Mutes remote participant. * @param jid of the participant * @param mute */ ChatRoom.prototype.muteParticipant = function (jid, mute) { logger.info("set mute", mute); var iqToFocus = $iq( {to: this.focusMucJid, type: 'set'}) .c('mute', { xmlns: 'http://jitsi.org/jitmeet/audio', jid: jid }) .t(mute.toString()) .up(); this.connection.sendIQ( iqToFocus, function (result) { logger.log('set mute', result); }, function (error) { logger.log('set mute error', error); }); } ChatRoom.prototype.onMute = function (iq) { var from = iq.getAttribute('from'); if (from !== this.focusMucJid) { logger.warn("Ignored mute from non focus peer"); return false; } var mute = $(iq).find('mute'); if (mute.length) { var doMuteAudio = mute.text() === "true"; this.eventEmitter.emit(XMPPEvents.AUDIO_MUTED_BY_FOCUS, doMuteAudio); } return true; } module.exports = ChatRoom; }).call(this,"/modules/xmpp/ChatRoom.js") },{"../../service/xmpp/XMPPEvents":137,"./moderator":36,"./recording":37,"events":51,"jitsi-meet-logger":79}],30:[function(require,module,exports){ (function (__filename){ /* * JingleSession provides an API to manage a single Jingle session. We will * have different implementations depending on the underlying interface used * (i.e. WebRTC and ORTC) and here we hold the code common to all of them. */ var logger = require("jitsi-meet-logger").getLogger(__filename); function JingleSession(me, sid, connection, service, eventEmitter) { /** * Our JID. */ this.me = me; /** * The Jingle session identifier. */ this.sid = sid; /** * The XMPP connection. */ this.connection = connection; /** * The XMPP service. */ this.service = service; /** * The event emitter. */ this.eventEmitter = eventEmitter; /** * Whether to use dripping or not. Dripping is sending trickle candidates * not one-by-one. * Note: currently we do not support 'false'. */ this.usedrip = true; /** * When dripping is used, stores ICE candidates which are to be sent. */ this.drip_container = []; // Media constraints. Is this WebRTC only? this.media_constraints = null; // ICE servers config (RTCConfiguration?). this.ice_config = {}; // The chat room instance associated with the session. this.room = null; } /** * Prepares this object to initiate a session. * @param peerjid the JID of the remote peer. * @param isInitiator whether we will be the Jingle initiator. * @param media_constraints * @param ice_config */ JingleSession.prototype.initialize = function(peerjid, isInitiator, media_constraints, ice_config) { this.media_constraints = media_constraints; this.ice_config = ice_config; if (this.state !== null) { logger.error('attempt to initiate on session ' + this.sid + 'in state ' + this.state); return; } this.state = 'pending'; this.initiator = isInitiator ? this.me : peerjid; this.responder = !isInitiator ? this.me : peerjid; this.peerjid = peerjid; this.doInitialize(); }; /** * Finishes initialization. */ JingleSession.prototype.doInitialize = function() {}; /** * Adds the ICE candidates found in the 'contents' array as remote candidates? * Note: currently only used on transport-info */ JingleSession.prototype.addIceCandidates = function(contents) {}; /** * Handles an 'add-source' event. * * @param contents an array of Jingle 'content' elements. */ JingleSession.prototype.addSources = function(contents) {}; /** * Handles a 'remove-source' event. * * @param contents an array of Jingle 'content' elements. */ JingleSession.prototype.removeSources = function(contents) {}; /** * Terminates this Jingle session (stops sending media and closes the streams?) */ JingleSession.prototype.terminate = function() {}; /** * Sends a Jingle session-terminate message to the peer and terminates the * session. * @param reason * @param text */ JingleSession.prototype.sendTerminate = function(reason, text) {}; /** * Handles an offer from the remote peer (prepares to accept a session). * @param jingle the 'jingle' XML element. */ JingleSession.prototype.setOffer = function(jingle) {}; /** * Handles an answer from the remote peer (prepares to accept a session). * @param jingle the 'jingle' XML element. */ JingleSession.prototype.setAnswer = function(jingle) {}; module.exports = JingleSession; }).call(this,"/modules/xmpp/JingleSession.js") },{"jitsi-meet-logger":79}],31:[function(require,module,exports){ (function (__filename){ /* jshint -W117 */ var logger = require("jitsi-meet-logger").getLogger(__filename); var JingleSession = require("./JingleSession"); var TraceablePeerConnection = require("./TraceablePeerConnection"); var SDPDiffer = require("./SDPDiffer"); var SDPUtil = require("./SDPUtil"); var SDP = require("./SDP"); var async = require("async"); var transform = require("sdp-transform"); var XMPPEvents = require("../../service/xmpp/XMPPEvents"); var RTCBrowserType = require("../RTC/RTCBrowserType"); var RTC = require("../RTC/RTC"); // Jingle stuff function JingleSessionPC(me, sid, connection, service) { JingleSession.call(this, me, sid, connection, service); this.initiator = null; this.responder = null; this.peerjid = null; this.state = null; this.localSDP = null; this.remoteSDP = null; this.relayedStreams = []; this.usetrickle = true; this.usepranswer = false; // early transport warmup -- mind you, this might fail. depends on webrtc issue 1718 this.hadstuncandidate = false; this.hadturncandidate = false; this.lasticecandidate = false; this.statsinterval = null; this.reason = null; this.addssrc = []; this.removessrc = []; this.pendingop = null; this.switchstreams = false; this.addingStreams = false; this.wait = true; /** * A map that stores SSRCs of local streams * @type {{}} maps media type('audio' or 'video') to SSRC number */ this.localStreamsSSRC = {}; this.ssrcOwners = {}; this.ssrcVideoTypes = {}; this.webrtcIceUdpDisable = !!this.service.options.webrtcIceUdpDisable; this.webrtcIceTcpDisable = !!this.service.options.webrtcIceTcpDisable; /** * The indicator which determines whether the (local) video has been muted * in response to a user command in contrast to an automatic decision made * by the application logic. */ this.videoMuteByUser = false; this.modifySourcesQueue = async.queue(this._modifySources.bind(this), 1); // We start with the queue paused. We resume it when the signaling state is // stable and the ice connection state is connected. this.modifySourcesQueue.pause(); } //XXX this is badly broken... JingleSessionPC.prototype = JingleSession.prototype; JingleSessionPC.prototype.constructor = JingleSessionPC; JingleSessionPC.prototype.setOffer = function(offer) { this.setRemoteDescription(offer, 'offer'); }; JingleSessionPC.prototype.setAnswer = function(answer) { this.setRemoteDescription(answer, 'answer'); }; JingleSessionPC.prototype.updateModifySourcesQueue = function() { var signalingState = this.peerconnection.signalingState; var iceConnectionState = this.peerconnection.iceConnectionState; if (signalingState === 'stable' && iceConnectionState === 'connected') { this.modifySourcesQueue.resume(); } else { this.modifySourcesQueue.pause(); } }; JingleSessionPC.prototype.doInitialize = function () { var self = this; this.hadstuncandidate = false; this.hadturncandidate = false; this.lasticecandidate = false; // True if reconnect is in progress this.isreconnect = false; // Set to true if the connection was ever stable this.wasstable = false; this.peerconnection = new TraceablePeerConnection( this.connection.jingle.ice_config, RTC.getPCConstraints(), this); this.peerconnection.onicecandidate = function (event) { var protocol; if (event && event.candidate) { protocol = (typeof event.candidate.protocol === 'string') ? event.candidate.protocol.toLowerCase() : ''; if ((self.webrtcIceTcpDisable && protocol == 'tcp') || (self.webrtcIceUdpDisable && protocol == 'udp')) { return; } } self.sendIceCandidate(event.candidate); }; this.peerconnection.onaddstream = function (event) { if (event.stream.id !== 'default') { logger.log("REMOTE STREAM ADDED: ", event.stream , event.stream.id); self.remoteStreamAdded(event); } else { // This is a recvonly stream. Clients that implement Unified Plan, // such as Firefox use recvonly "streams/channels/tracks" for // receiving remote stream/tracks, as opposed to Plan B where there // are only 3 channels: audio, video and data. logger.log("RECVONLY REMOTE STREAM IGNORED: " + event.stream + " - " + event.stream.id); } }; this.peerconnection.onremovestream = function (event) { // Remove the stream from remoteStreams // FIXME: remotestreamremoved.jingle not defined anywhere(unused) $(document).trigger('remotestreamremoved.jingle', [event, self.sid]); }; this.peerconnection.onsignalingstatechange = function (event) { if (!(self && self.peerconnection)) return; if (self.peerconnection.signalingState === 'stable') { self.wasstable = true; } self.updateModifySourcesQueue(); }; /** * The oniceconnectionstatechange event handler contains the code to execute when the iceconnectionstatechange event, * of type Event, is received by this RTCPeerConnection. Such an event is sent when the value of * RTCPeerConnection.iceConnectionState changes. * * @param event the event containing information about the change */ this.peerconnection.oniceconnectionstatechange = function (event) { if (!(self && self.peerconnection)) return; logger.log("(TIME) ICE " + self.peerconnection.iceConnectionState + ":\t", window.performance.now()); self.updateModifySourcesQueue(); switch (self.peerconnection.iceConnectionState) { case 'connected': // Informs interested parties that the connection has been restored. if (self.peerconnection.signalingState === 'stable' && self.isreconnect) self.room.eventEmitter.emit(XMPPEvents.CONNECTION_RESTORED); self.isreconnect = false; break; case 'disconnected': self.isreconnect = true; // Informs interested parties that the connection has been interrupted. if (self.wasstable) self.room.eventEmitter.emit(XMPPEvents.CONNECTION_INTERRUPTED); break; case 'failed': self.room.eventEmitter.emit(XMPPEvents.CONFERENCE_SETUP_FAILED); break; } onIceConnectionStateChange(self.sid, self); }; this.peerconnection.onnegotiationneeded = function (event) { self.room.eventEmitter.emit(XMPPEvents.PEERCONNECTION_READY, self); }; this.relayedStreams.forEach(function(stream) { self.peerconnection.addStream(stream); }); }; function onIceConnectionStateChange(sid, session) { switch (session.peerconnection.iceConnectionState) { case 'checking': session.timeChecking = (new Date()).getTime(); session.firstconnect = true; break; case 'completed': // on caller side case 'connected': if (session.firstconnect) { session.firstconnect = false; var metadata = {}; metadata.setupTime = (new Date()).getTime() - session.timeChecking; session.peerconnection.getStats(function (res) { if(res && res.result) { res.result().forEach(function (report) { if (report.type == 'googCandidatePair' && report.stat('googActiveConnection') == 'true') { metadata.localCandidateType = report.stat('googLocalCandidateType'); metadata.remoteCandidateType = report.stat('googRemoteCandidateType'); // log pair as well so we can get nice pie // charts metadata.candidatePair = report.stat('googLocalCandidateType') + ';' + report.stat('googRemoteCandidateType'); if (report.stat('googRemoteAddress').indexOf('[') === 0) { metadata.ipv6 = true; } } }); } }); } break; } } JingleSessionPC.prototype.accept = function () { this.state = 'active'; var pranswer = this.peerconnection.localDescription; if (!pranswer || pranswer.type != 'pranswer') { return; } logger.log('going from pranswer to answer'); if (this.usetrickle) { // remove candidates already sent from session-accept var lines = SDPUtil.find_lines(pranswer.sdp, 'a=candidate:'); for (var i = 0; i < lines.length; i++) { pranswer.sdp = pranswer.sdp.replace(lines[i] + '\r\n', ''); } } while (SDPUtil.find_line(pranswer.sdp, 'a=inactive')) { // FIXME: change any inactive to sendrecv or whatever they were originally pranswer.sdp = pranswer.sdp.replace('a=inactive', 'a=sendrecv'); } var prsdp = new SDP(pranswer.sdp); if (this.webrtcIceTcpDisable) { prsdp.removeTcpCandidates = true; } if (this.webrtcIceUdpDisable) { prsdp.removeUdpCandidates = true; } var accept = $iq({to: this.peerjid, type: 'set'}) .c('jingle', {xmlns: 'urn:xmpp:jingle:1', action: 'session-accept', initiator: this.initiator, responder: this.responder, sid: this.sid }); // FIXME why do we generate session-accept in 3 different places ? prsdp.toJingle( accept, this.initiator == this.me ? 'initiator' : 'responder'); var sdp = this.peerconnection.localDescription.sdp; while (SDPUtil.find_line(sdp, 'a=inactive')) { // FIXME: change any inactive to sendrecv or whatever they were originally sdp = sdp.replace('a=inactive', 'a=sendrecv'); } var self = this; this.peerconnection.setLocalDescription(new RTCSessionDescription({type: 'answer', sdp: sdp}), function () { //logger.log('setLocalDescription success'); self.setLocalDescription(); self.connection.sendIQ(accept, function () { var ack = {}; ack.source = 'answer'; $(document).trigger('ack.jingle', [self.sid, ack]); }, function (stanza) { var error = ($(stanza).find('error').length) ? { code: $(stanza).find('error').attr('code'), reason: $(stanza).find('error :first')[0].tagName }:{}; error.source = 'answer'; JingleSessionPC.onJingleError(self.sid, error); }, 10000); }, function (e) { logger.error('setLocalDescription failed', e); self.room.eventEmitter.emit(XMPPEvents.CONFERENCE_SETUP_FAILED); } ); }; JingleSessionPC.prototype.terminate = function (reason) { this.state = 'ended'; this.reason = reason; this.peerconnection.close(); if (this.statsinterval !== null) { window.clearInterval(this.statsinterval); this.statsinterval = null; } }; JingleSessionPC.prototype.active = function () { return this.state == 'active'; }; JingleSessionPC.prototype.sendIceCandidate = function (candidate) { var self = this; if (candidate && !this.lasticecandidate) { var ice = SDPUtil.iceparams(this.localSDP.media[candidate.sdpMLineIndex], this.localSDP.session); var jcand = SDPUtil.candidateToJingle(candidate.candidate); if (!(ice && jcand)) { logger.error('failed to get ice && jcand'); return; } ice.xmlns = 'urn:xmpp:jingle:transports:ice-udp:1'; if (jcand.type === 'srflx') { this.hadstuncandidate = true; } else if (jcand.type === 'relay') { this.hadturncandidate = true; } if (this.usetrickle) { if (this.usedrip) { if (this.drip_container.length === 0) { // start 20ms callout window.setTimeout(function () { if (self.drip_container.length === 0) return; self.sendIceCandidates(self.drip_container); self.drip_container = []; }, 20); } this.drip_container.push(candidate); return; } else { self.sendIceCandidate([candidate]); } } } else { //logger.log('sendIceCandidate: last candidate.'); if (!this.usetrickle) { //logger.log('should send full offer now...'); //FIXME why do we generate session-accept in 3 different places ? var init = $iq({to: this.peerjid, type: 'set'}) .c('jingle', {xmlns: 'urn:xmpp:jingle:1', action: this.peerconnection.localDescription.type == 'offer' ? 'session-initiate' : 'session-accept', initiator: this.initiator, sid: this.sid}); this.localSDP = new SDP(this.peerconnection.localDescription.sdp); if (self.webrtcIceTcpDisable) { this.localSDP.removeTcpCandidates = true; } if (self.webrtcIceUdpDisable) { this.localSDP.removeUdpCandidates = true; } var sendJingle = function (ssrc) { if(!ssrc) ssrc = {}; self.localSDP.toJingle( init, self.initiator == self.me ? 'initiator' : 'responder', ssrc); self.connection.sendIQ(init, function () { //logger.log('session initiate ack'); var ack = {}; ack.source = 'offer'; $(document).trigger('ack.jingle', [self.sid, ack]); }, function (stanza) { self.state = 'error'; self.peerconnection.close(); var error = ($(stanza).find('error').length) ? { code: $(stanza).find('error').attr('code'), reason: $(stanza).find('error :first')[0].tagName, }:{}; error.source = 'offer'; JingleSessionPC.onJingleError(self.sid, error); }, 10000); }; sendJingle(); } this.lasticecandidate = true; logger.log('Have we encountered any srflx candidates? ' + this.hadstuncandidate); logger.log('Have we encountered any relay candidates? ' + this.hadturncandidate); if (!(this.hadstuncandidate || this.hadturncandidate) && this.peerconnection.signalingState != 'closed') { $(document).trigger('nostuncandidates.jingle', [this.sid]); } } }; JingleSessionPC.prototype.sendIceCandidates = function (candidates) { logger.log('sendIceCandidates', candidates); var cand = $iq({to: this.peerjid, type: 'set'}) .c('jingle', {xmlns: 'urn:xmpp:jingle:1', action: 'transport-info', initiator: this.initiator, sid: this.sid}); for (var mid = 0; mid < this.localSDP.media.length; mid++) { var cands = candidates.filter(function (el) { return el.sdpMLineIndex == mid; }); var mline = SDPUtil.parse_mline(this.localSDP.media[mid].split('\r\n')[0]); if (cands.length > 0) { var ice = SDPUtil.iceparams(this.localSDP.media[mid], this.localSDP.session); ice.xmlns = 'urn:xmpp:jingle:transports:ice-udp:1'; cand.c('content', {creator: this.initiator == this.me ? 'initiator' : 'responder', name: (cands[0].sdpMid? cands[0].sdpMid : mline.media) }).c('transport', ice); for (var i = 0; i < cands.length; i++) { cand.c('candidate', SDPUtil.candidateToJingle(cands[i].candidate)).up(); } // add fingerprint if (SDPUtil.find_line(this.localSDP.media[mid], 'a=fingerprint:', this.localSDP.session)) { var tmp = SDPUtil.parse_fingerprint(SDPUtil.find_line(this.localSDP.media[mid], 'a=fingerprint:', this.localSDP.session)); tmp.required = true; cand.c( 'fingerprint', {xmlns: 'urn:xmpp:jingle:apps:dtls:0'}) .t(tmp.fingerprint); delete tmp.fingerprint; cand.attrs(tmp); cand.up(); } cand.up(); // transport cand.up(); // content } } // might merge last-candidate notification into this, but it is called alot later. See webrtc issue #2340 //logger.log('was this the last candidate', this.lasticecandidate); this.connection.sendIQ(cand, function () { var ack = {}; ack.source = 'transportinfo'; $(document).trigger('ack.jingle', [this.sid, ack]); }, function (stanza) { var error = ($(stanza).find('error').length) ? { code: $(stanza).find('error').attr('code'), reason: $(stanza).find('error :first')[0].tagName, }:{}; error.source = 'transportinfo'; JingleSessionPC.onJingleError(this.sid, error); }, 10000); }; JingleSessionPC.prototype.sendOffer = function () { //logger.log('sendOffer...'); var self = this; this.peerconnection.createOffer(function (sdp) { self.createdOffer(sdp); }, function (e) { logger.error('createOffer failed', e); }, this.media_constraints ); }; // FIXME createdOffer is never used in jitsi-meet JingleSessionPC.prototype.createdOffer = function (sdp) { //logger.log('createdOffer', sdp); var self = this; this.localSDP = new SDP(sdp.sdp); //this.localSDP.mangle(); var sendJingle = function () { var init = $iq({to: this.peerjid, type: 'set'}) .c('jingle', {xmlns: 'urn:xmpp:jingle:1', action: 'session-initiate', initiator: this.initiator, sid: this.sid}); self.localSDP.toJingle( init, this.initiator == this.me ? 'initiator' : 'responder'); self.connection.sendIQ(init, function () { var ack = {}; ack.source = 'offer'; $(document).trigger('ack.jingle', [self.sid, ack]); }, function (stanza) { self.state = 'error'; self.peerconnection.close(); var error = ($(stanza).find('error').length) ? { code: $(stanza).find('error').attr('code'), reason: $(stanza).find('error :first')[0].tagName, }:{}; error.source = 'offer'; JingleSessionPC.onJingleError(self.sid, error); }, 10000); } sdp.sdp = this.localSDP.raw; this.peerconnection.setLocalDescription(sdp, function () { if(self.usetrickle) { sendJingle(); } self.setLocalDescription(); //logger.log('setLocalDescription success'); }, function (e) { logger.error('setLocalDescription failed', e); self.room.eventEmitter.emit(XMPPEvents.CONFERENCE_SETUP_FAILED); } ); var cands = SDPUtil.find_lines(this.localSDP.raw, 'a=candidate:'); for (var i = 0; i < cands.length; i++) { var cand = SDPUtil.parse_icecandidate(cands[i]); if (cand.type == 'srflx') { this.hadstuncandidate = true; } else if (cand.type == 'relay') { this.hadturncandidate = true; } } }; JingleSessionPC.prototype.readSsrcInfo = function (contents) { var self = this; $(contents).each(function (idx, content) { var name = $(content).attr('name'); var mediaType = this.getAttribute('name'); var ssrcs = $(content).find('description>source[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]'); ssrcs.each(function () { var ssrc = this.getAttribute('ssrc'); $(this).find('>ssrc-info[xmlns="http://jitsi.org/jitmeet"]').each( function () { var owner = this.getAttribute('owner'); self.ssrcOwners[ssrc] = owner; } ); }); }); }; /** * Returns the SSRC of local audio stream. * @param mediaType 'audio' or 'video' media type * @returns {*} the SSRC number of local audio or video stream. */ JingleSessionPC.prototype.getLocalSSRC = function (mediaType) { return this.localStreamsSSRC[mediaType]; }; JingleSessionPC.prototype.getSsrcOwner = function (ssrc) { return this.ssrcOwners[ssrc]; }; JingleSessionPC.prototype.setRemoteDescription = function (elem, desctype) { //logger.log('setting remote description... ', desctype); this.remoteSDP = new SDP(''); if (this.webrtcIceTcpDisable) { this.remoteSDP.removeTcpCandidates = true; } if (this.webrtcIceUdpDisable) { this.remoteSDP.removeUdpCandidates = true; } this.remoteSDP.fromJingle(elem); this.readSsrcInfo($(elem).find(">content")); if (this.peerconnection.remoteDescription) { logger.log('setRemoteDescription when remote description is not null, should be pranswer', this.peerconnection.remoteDescription); if (this.peerconnection.remoteDescription.type == 'pranswer') { var pranswer = new SDP(this.peerconnection.remoteDescription.sdp); for (var i = 0; i < pranswer.media.length; i++) { // make sure we have ice ufrag and pwd if (!SDPUtil.find_line(this.remoteSDP.media[i], 'a=ice-ufrag:', this.remoteSDP.session)) { if (SDPUtil.find_line(pranswer.media[i], 'a=ice-ufrag:', pranswer.session)) { this.remoteSDP.media[i] += SDPUtil.find_line(pranswer.media[i], 'a=ice-ufrag:', pranswer.session) + '\r\n'; } else { logger.warn('no ice ufrag?'); } if (SDPUtil.find_line(pranswer.media[i], 'a=ice-pwd:', pranswer.session)) { this.remoteSDP.media[i] += SDPUtil.find_line(pranswer.media[i], 'a=ice-pwd:', pranswer.session) + '\r\n'; } else { logger.warn('no ice pwd?'); } } // copy over candidates var lines = SDPUtil.find_lines(pranswer.media[i], 'a=candidate:'); for (var j = 0; j < lines.length; j++) { this.remoteSDP.media[i] += lines[j] + '\r\n'; } } this.remoteSDP.raw = this.remoteSDP.session + this.remoteSDP.media.join(''); } } var remotedesc = new RTCSessionDescription({type: desctype, sdp: this.remoteSDP.raw}); this.peerconnection.setRemoteDescription(remotedesc, function () { //logger.log('setRemoteDescription success'); }, function (e) { logger.error('setRemoteDescription error', e); JingleSessionPC.onJingleFatalError(self, e); } ); }; /** * Adds remote ICE candidates to this Jingle session. * @param elem An array of Jingle "content" elements? */ JingleSessionPC.prototype.addIceCandidate = function (elem) { var self = this; if (this.peerconnection.signalingState == 'closed') { return; } if (!this.peerconnection.remoteDescription && this.peerconnection.signalingState == 'have-local-offer') { logger.log('trickle ice candidate arriving before session accept...'); // create a PRANSWER for setRemoteDescription if (!this.remoteSDP) { var cobbled = 'v=0\r\n' + 'o=- 1923518516 2 IN IP4 0.0.0.0\r\n' +// FIXME 's=-\r\n' + 't=0 0\r\n'; // first, take some things from the local description for (var i = 0; i < this.localSDP.media.length; i++) { cobbled += SDPUtil.find_line(this.localSDP.media[i], 'm=') + '\r\n'; cobbled += SDPUtil.find_lines(this.localSDP.media[i], 'a=rtpmap:').join('\r\n') + '\r\n'; if (SDPUtil.find_line(this.localSDP.media[i], 'a=mid:')) { cobbled += SDPUtil.find_line(this.localSDP.media[i], 'a=mid:') + '\r\n'; } cobbled += 'a=inactive\r\n'; } this.remoteSDP = new SDP(cobbled); } // then add things like ice and dtls from remote candidate elem.each(function () { for (var i = 0; i < self.remoteSDP.media.length; i++) { if (SDPUtil.find_line(self.remoteSDP.media[i], 'a=mid:' + $(this).attr('name')) || self.remoteSDP.media[i].indexOf('m=' + $(this).attr('name')) === 0) { if (!SDPUtil.find_line(self.remoteSDP.media[i], 'a=ice-ufrag:')) { var tmp = $(this).find('transport'); self.remoteSDP.media[i] += 'a=ice-ufrag:' + tmp.attr('ufrag') + '\r\n'; self.remoteSDP.media[i] += 'a=ice-pwd:' + tmp.attr('pwd') + '\r\n'; tmp = $(this).find('transport>fingerprint'); if (tmp.length) { self.remoteSDP.media[i] += 'a=fingerprint:' + tmp.attr('hash') + ' ' + tmp.text() + '\r\n'; } else { logger.log('no dtls fingerprint (webrtc issue #1718?)'); self.remoteSDP.media[i] += 'a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:BAADBAADBAADBAADBAADBAADBAADBAADBAADBAAD\r\n'; } break; } } } }); this.remoteSDP.raw = this.remoteSDP.session + this.remoteSDP.media.join(''); // we need a complete SDP with ice-ufrag/ice-pwd in all parts // this makes the assumption that the PRANSWER is constructed such that the ice-ufrag is in all mediaparts // but it could be in the session part as well. since the code above constructs this sdp this can't happen however var iscomplete = this.remoteSDP.media.filter(function (mediapart) { return SDPUtil.find_line(mediapart, 'a=ice-ufrag:'); }).length == this.remoteSDP.media.length; if (iscomplete) { logger.log('setting pranswer'); try { this.peerconnection.setRemoteDescription(new RTCSessionDescription({type: 'pranswer', sdp: this.remoteSDP.raw }), function() { }, function(e) { logger.log('setRemoteDescription pranswer failed', e.toString()); }); } catch (e) { logger.error('setting pranswer failed', e); } } else { //logger.log('not yet setting pranswer'); } } // operate on each content element elem.each(function () { // would love to deactivate this, but firefox still requires it var idx = -1; var i; for (i = 0; i < self.remoteSDP.media.length; i++) { if (SDPUtil.find_line(self.remoteSDP.media[i], 'a=mid:' + $(this).attr('name')) || self.remoteSDP.media[i].indexOf('m=' + $(this).attr('name')) === 0) { idx = i; break; } } if (idx == -1) { // fall back to localdescription for (i = 0; i < self.localSDP.media.length; i++) { if (SDPUtil.find_line(self.localSDP.media[i], 'a=mid:' + $(this).attr('name')) || self.localSDP.media[i].indexOf('m=' + $(this).attr('name')) === 0) { idx = i; break; } } } var name = $(this).attr('name'); // TODO: check ice-pwd and ice-ufrag? $(this).find('transport>candidate').each(function () { var line, candidate; var protocol = this.getAttribute('protocol'); protocol = (typeof protocol === 'string') ? protocol.toLowerCase() : ''; if ((self.webrtcIceTcpDisable && protocol == 'tcp') || (self.webrtcIceUdpDisable && protocol == 'udp')) { return; } line = SDPUtil.candidateFromJingle(this); candidate = new RTCIceCandidate({sdpMLineIndex: idx, sdpMid: name, candidate: line}); try { self.peerconnection.addIceCandidate(candidate); } catch (e) { logger.error('addIceCandidate failed', e.toString(), line); self.room.eventEmitter.emit(XMPPEvents.ADD_ICE_CANDIDATE_FAILED, err, self.peerconnection); } }); }); }; JingleSessionPC.prototype.sendAnswer = function (provisional) { //logger.log('createAnswer', provisional); var self = this; this.peerconnection.createAnswer( function (sdp) { self.createdAnswer(sdp, provisional); }, function (e) { logger.error('createAnswer failed', e); self.room.eventEmitter.emit(XMPPEvents.CONFERENCE_SETUP_FAILED); }, this.media_constraints ); }; JingleSessionPC.prototype.createdAnswer = function (sdp, provisional) { //logger.log('createAnswer callback'); var self = this; this.localSDP = new SDP(sdp.sdp); //this.localSDP.mangle(); this.usepranswer = provisional === true; if (this.usetrickle) { if (this.usepranswer) { sdp.type = 'pranswer'; for (var i = 0; i < this.localSDP.media.length; i++) { this.localSDP.media[i] = this.localSDP.media[i].replace('a=sendrecv\r\n', 'a=inactive\r\n'); } this.localSDP.raw = this.localSDP.session + '\r\n' + this.localSDP.media.join(''); } } var self = this; var sendJingle = function (ssrcs) { // FIXME why do we generate session-accept in 3 different places ? var accept = $iq({to: self.peerjid, type: 'set'}) .c('jingle', {xmlns: 'urn:xmpp:jingle:1', action: 'session-accept', initiator: self.initiator, responder: self.responder, sid: self.sid }); if (self.webrtcIceTcpDisable) { self.localSDP.removeTcpCandidates = true; } if (self.webrtcIceUdpDisable) { self.localSDP.removeUdpCandidates = true; } self.localSDP.toJingle( accept, self.initiator == self.me ? 'initiator' : 'responder', ssrcs); self.connection.sendIQ(accept, function () { var ack = {}; ack.source = 'answer'; $(document).trigger('ack.jingle', [self.sid, ack]); }, function (stanza) { var error = ($(stanza).find('error').length) ? { code: $(stanza).find('error').attr('code'), reason: $(stanza).find('error :first')[0].tagName, }:{}; error.source = 'answer'; JingleSessionPC.onJingleError(self.sid, error); }, 10000); } sdp.sdp = this.localSDP.raw; this.peerconnection.setLocalDescription(sdp, function () { //logger.log('setLocalDescription success'); if (self.usetrickle && !self.usepranswer) { sendJingle(); } self.setLocalDescription(); }, function (e) { logger.error('setLocalDescription failed', e); self.room.eventEmitter.emit(XMPPEvents.CONFERENCE_SETUP_FAILED); } ); var cands = SDPUtil.find_lines(this.localSDP.raw, 'a=candidate:'); for (var j = 0; j < cands.length; j++) { var cand = SDPUtil.parse_icecandidate(cands[j]); if (cand.type == 'srflx') { this.hadstuncandidate = true; } else if (cand.type == 'relay') { this.hadturncandidate = true; } } }; JingleSessionPC.prototype.sendTerminate = function (reason, text) { var self = this, term = $iq({to: this.peerjid, type: 'set'}) .c('jingle', {xmlns: 'urn:xmpp:jingle:1', action: 'session-terminate', initiator: this.initiator, sid: this.sid}) .c('reason') .c(reason || 'success'); if (text) { term.up().c('text').t(text); } this.connection.sendIQ(term, function () { self.peerconnection.close(); self.peerconnection = null; self.terminate(); var ack = {}; ack.source = 'terminate'; $(document).trigger('ack.jingle', [self.sid, ack]); }, function (stanza) { var error = ($(stanza).find('error').length) ? { code: $(stanza).find('error').attr('code'), reason: $(stanza).find('error :first')[0].tagName, }:{}; $(document).trigger('ack.jingle', [self.sid, error]); }, 10000); if (this.statsinterval !== null) { window.clearInterval(this.statsinterval); this.statsinterval = null; } }; /** * Handles a Jingle source-add message for this Jingle session. * @param elem An array of Jingle "content" elements. */ JingleSessionPC.prototype.addSource = function (elem) { var self = this; // FIXME: dirty waiting if (!this.peerconnection.localDescription) { logger.warn("addSource - localDescription not ready yet") setTimeout(function() { self.addSource(elem); }, 200 ); return; } logger.log('addssrc', new Date().getTime()); logger.log('ice', this.peerconnection.iceConnectionState); this.readSsrcInfo(elem); var sdp = new SDP(this.peerconnection.remoteDescription.sdp); var mySdp = new SDP(this.peerconnection.localDescription.sdp); $(elem).each(function (idx, content) { var name = $(content).attr('name'); var lines = ''; $(content).find('ssrc-group[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]').each(function() { var semantics = this.getAttribute('semantics'); var ssrcs = $(this).find('>source').map(function () { return this.getAttribute('ssrc'); }).get(); if (ssrcs.length) { lines += 'a=ssrc-group:' + semantics + ' ' + ssrcs.join(' ') + '\r\n'; } }); var tmp = $(content).find('source[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]'); // can handle both >source and >description>source tmp.each(function () { var ssrc = $(this).attr('ssrc'); if(mySdp.containsSSRC(ssrc)){ /** * This happens when multiple participants change their streams at the same time and * ColibriFocus.modifySources have to wait for stable state. In the meantime multiple * addssrc are scheduled for update IQ. See */ logger.warn("Got add stream request for my own ssrc: "+ssrc); return; } if (sdp.containsSSRC(ssrc)) { logger.warn("Source-add request for existing SSRC: " + ssrc); return; } $(this).find('>parameter').each(function () { lines += 'a=ssrc:' + ssrc + ' ' + $(this).attr('name'); if ($(this).attr('value') && $(this).attr('value').length) lines += ':' + $(this).attr('value'); lines += '\r\n'; }); }); sdp.media.forEach(function(media, idx) { if (!SDPUtil.find_line(media, 'a=mid:' + name)) return; sdp.media[idx] += lines; if (!self.addssrc[idx]) self.addssrc[idx] = ''; self.addssrc[idx] += lines; }); sdp.raw = sdp.session + sdp.media.join(''); }); this.modifySourcesQueue.push(function() { // When a source is added and if this is FF, a new channel is allocated // for receiving the added source. We need to diffuse the SSRC of this // new recvonly channel to the rest of the peers. logger.log('modify sources done'); var newSdp = new SDP(self.peerconnection.localDescription.sdp); logger.log("SDPs", mySdp, newSdp); self.notifyMySSRCUpdate(mySdp, newSdp); }); }; /** * Handles a Jingle source-remove message for this Jingle session. * @param elem An array of Jingle "content" elements. */ JingleSessionPC.prototype.removeSource = function (elem) { var self = this; // FIXME: dirty waiting if (!this.peerconnection.localDescription) { logger.warn("removeSource - localDescription not ready yet"); setTimeout(function() { self.removeSource(elem); }, 200 ); return; } logger.log('removessrc', new Date().getTime()); logger.log('ice', this.peerconnection.iceConnectionState); var sdp = new SDP(this.peerconnection.remoteDescription.sdp); var mySdp = new SDP(this.peerconnection.localDescription.sdp); $(elem).each(function (idx, content) { var name = $(content).attr('name'); var lines = ''; $(content).find('ssrc-group[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]').each(function() { var semantics = this.getAttribute('semantics'); var ssrcs = $(this).find('>source').map(function () { return this.getAttribute('ssrc'); }).get(); if (ssrcs.length) { lines += 'a=ssrc-group:' + semantics + ' ' + ssrcs.join(' ') + '\r\n'; } }); var tmp = $(content).find('source[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]'); // can handle both >source and >description>source tmp.each(function () { var ssrc = $(this).attr('ssrc'); // This should never happen, but can be useful for bug detection if(mySdp.containsSSRC(ssrc)){ logger.error("Got remove stream request for my own ssrc: "+ssrc); return; } $(this).find('>parameter').each(function () { lines += 'a=ssrc:' + ssrc + ' ' + $(this).attr('name'); if ($(this).attr('value') && $(this).attr('value').length) lines += ':' + $(this).attr('value'); lines += '\r\n'; }); }); sdp.media.forEach(function(media, idx) { if (!SDPUtil.find_line(media, 'a=mid:' + name)) return; sdp.media[idx] += lines; if (!self.removessrc[idx]) self.removessrc[idx] = ''; self.removessrc[idx] += lines; }); sdp.raw = sdp.session + sdp.media.join(''); }); this.modifySourcesQueue.push(function() { // When a source is removed and if this is FF, the recvonly channel that // receives the remote stream is deactivated . We need to diffuse the // recvonly SSRC removal to the rest of the peers. logger.log('modify sources done'); var newSdp = new SDP(self.peerconnection.localDescription.sdp); logger.log("SDPs", mySdp, newSdp); self.notifyMySSRCUpdate(mySdp, newSdp); }); }; JingleSessionPC.prototype._modifySources = function (successCallback, queueCallback) { var self = this; if (this.peerconnection.signalingState == 'closed') return; if (!(this.addssrc.length || this.removessrc.length || this.pendingop !== null || this.switchstreams || this.addingStreams)){ // There is nothing to do since scheduled job might have been executed by another succeeding call this.setLocalDescription(); if(successCallback){ successCallback(); } queueCallback(); return; } // Reset switch streams flags this.switchstreams = false; this.addingStreams = false; var sdp = new SDP(this.peerconnection.remoteDescription.sdp); // add sources this.addssrc.forEach(function(lines, idx) { sdp.media[idx] += lines; }); this.addssrc = []; // remove sources this.removessrc.forEach(function(lines, idx) { lines = lines.split('\r\n'); lines.pop(); // remove empty last element; lines.forEach(function(line) { sdp.media[idx] = sdp.media[idx].replace(line + '\r\n', ''); }); }); this.removessrc = []; sdp.raw = sdp.session + sdp.media.join(''); this.peerconnection.setRemoteDescription(new RTCSessionDescription({type: 'offer', sdp: sdp.raw}), function() { if(self.signalingState == 'closed') { logger.error("createAnswer attempt on closed state"); queueCallback("createAnswer attempt on closed state"); return; } self.peerconnection.createAnswer( function(modifiedAnswer) { // change video direction, see https://github.com/jitsi/jitmeet/issues/41 if (self.pendingop !== null) { var sdp = new SDP(modifiedAnswer.sdp); if (sdp.media.length > 1) { switch(self.pendingop) { case 'mute': sdp.media[1] = sdp.media[1].replace('a=sendrecv', 'a=recvonly'); break; case 'unmute': sdp.media[1] = sdp.media[1].replace('a=recvonly', 'a=sendrecv'); break; } sdp.raw = sdp.session + sdp.media.join(''); modifiedAnswer.sdp = sdp.raw; } self.pendingop = null; } // FIXME: pushing down an answer while ice connection state // is still checking is bad... //logger.log(self.peerconnection.iceConnectionState); // trying to work around another chrome bug //modifiedAnswer.sdp = modifiedAnswer.sdp.replace(/a=setup:active/g, 'a=setup:actpass'); self.peerconnection.setLocalDescription(modifiedAnswer, function() { //logger.log('modified setLocalDescription ok'); self.setLocalDescription(); if(successCallback){ successCallback(); } queueCallback(); }, function(error) { logger.error('modified setLocalDescription failed', error); queueCallback(error); } ); }, function(error) { logger.error('modified answer failed', error); queueCallback(error); } ); }, function(error) { logger.error('modify failed', error); queueCallback(error); } ); }; /** * Switches video streams. * @param newStream new stream that will be used as video of this session. * @param oldStream old video stream of this session. * @param successCallback callback executed after successful stream switch. * @param isAudio whether the streams are audio (if true) or video (if false). */ JingleSessionPC.prototype.switchStreams = function (newStream, oldStream, successCallback, isAudio) { var self = this; var sender, newTrack; var senderKind = isAudio ? 'audio' : 'video'; // Remember SDP to figure out added/removed SSRCs var oldSdp = null; if (self.peerconnection) { if (self.peerconnection.localDescription) { oldSdp = new SDP(self.peerconnection.localDescription.sdp); } if (RTCBrowserType.getBrowserType() === RTCBrowserType.RTC_BROWSER_FIREFOX) { // On Firefox we don't replace MediaStreams as this messes up the // m-lines (which can't be removed in Plan Unified) and brings a lot // of complications. Instead, we use the RTPSender and replace just // the track. // Find the right sender (for audio or video) self.peerconnection.peerconnection.getSenders().some(function (s) { if (s.track && s.track.kind === senderKind) { sender = s; return true; } }); if (sender) { // We assume that our streams have a single track, either audio // or video. newTrack = isAudio ? newStream.getAudioTracks()[0] : newStream.getVideoTracks()[0]; sender.replaceTrack(newTrack) .then(function() { console.log("Replaced a track, isAudio=" + isAudio); }) .catch(function(err) { console.log("Failed to replace a track: " + err); }); } else { console.log("Cannot switch tracks: no RTPSender."); } } else { self.peerconnection.removeStream(oldStream, true); if (newStream) { self.peerconnection.addStream(newStream); } } } // Conference is not active if (!oldSdp) { successCallback(); return; } self.switchstreams = true; self.modifySourcesQueue.push(function() { logger.log('modify sources done'); successCallback(); var newSdp = new SDP(self.peerconnection.localDescription.sdp); logger.log("SDPs", oldSdp, newSdp); self.notifyMySSRCUpdate(oldSdp, newSdp); }); }; /** * Adds streams. * @param stream new stream that will be added. * @param success_callback callback executed after successful stream addition. */ JingleSessionPC.prototype.addStream = function (stream, callback) { var self = this; // Remember SDP to figure out added/removed SSRCs var oldSdp = null; if(this.peerconnection) { if(this.peerconnection.localDescription) { oldSdp = new SDP(this.peerconnection.localDescription.sdp); } if(stream) this.peerconnection.addStream(stream); } // Conference is not active if(!oldSdp || !this.peerconnection) { callback(); return; } this.addingStreams = true; this.modifySourcesQueue.push(function() { logger.log('modify sources done'); callback(); var newSdp = new SDP(self.peerconnection.localDescription.sdp); logger.log("SDPs", oldSdp, newSdp); self.notifyMySSRCUpdate(oldSdp, newSdp); }); } /** * Remove streams. * @param stream stream that will be removed. * @param success_callback callback executed after successful stream addition. */ JingleSessionPC.prototype.removeStream = function (stream, callback) { var self = this; // Remember SDP to figure out added/removed SSRCs var oldSdp = null; if(this.peerconnection) { if(this.peerconnection.localDescription) { oldSdp = new SDP(this.peerconnection.localDescription.sdp); } if (RTCBrowserType.getBrowserType() === RTCBrowserType.RTC_BROWSER_FIREFOX) { var sender = null; // On Firefox we don't replace MediaStreams as this messes up the // m-lines (which can't be removed in Plan Unified) and brings a lot // of complications. Instead, we use the RTPSender and replace just // the track. var track = null; if(stream.getAudioTracks() && stream.getAudioTracks().length) { track = stream.getAudioTracks()[0]; } else if(stream.getVideoTracks() && stream.getVideoTracks().length) { track = stream.getVideoTracks()[0]; } if(!track) { console.log("Cannot switch tracks: no tracks."); return; } // Find the right sender (for audio or video) self.peerconnection.peerconnection.getSenders().some(function (s) { if (s.track === track) { sender = s; return true; } }); if (sender) { self.peerconnection.peerconnection.removeTrack(sender); // .then(function() { // console.log("Replaced a track, isAudio=" + isAudio); // }) // .catch(function(err) { // console.log("Failed to replace a track: " + err); // }); } else { console.log("Cannot switch tracks: no RTPSender."); } } else if(stream) this.peerconnection.removeStream(stream); } // Conference is not active if(!oldSdp || !this.peerconnection) { callback(); return; } this.addingStreams = true; this.modifySourcesQueue.push(function() { logger.log('modify sources done'); callback(); var newSdp = new SDP(self.peerconnection.localDescription.sdp); logger.log("SDPs", oldSdp, newSdp); self.notifyMySSRCUpdate(oldSdp, newSdp); }); } /** * Figures out added/removed ssrcs and send update IQs. * @param old_sdp SDP object for old description. * @param new_sdp SDP object for new description. */ JingleSessionPC.prototype.notifyMySSRCUpdate = function (old_sdp, new_sdp) { if (!(this.peerconnection.signalingState == 'stable' && this.peerconnection.iceConnectionState == 'connected')){ logger.log("Too early to send updates"); return; } // send source-remove IQ. sdpDiffer = new SDPDiffer(new_sdp, old_sdp); var remove = $iq({to: this.peerjid, type: 'set'}) .c('jingle', { xmlns: 'urn:xmpp:jingle:1', action: 'source-remove', initiator: this.initiator, sid: this.sid } ); var removed = sdpDiffer.toJingle(remove); if (removed && remove) { logger.info("Sending source-remove", remove); this.connection.sendIQ(remove, function (res) { logger.info('got remove result', res); }, function (err) { logger.error('got remove error', err); } ); } else { logger.log('removal not necessary'); } // send source-add IQ. var sdpDiffer = new SDPDiffer(old_sdp, new_sdp); var add = $iq({to: this.peerjid, type: 'set'}) .c('jingle', { xmlns: 'urn:xmpp:jingle:1', action: 'source-add', initiator: this.initiator, sid: this.sid } ); var added = sdpDiffer.toJingle(add); if (added && add) { logger.info("Sending source-add", add); this.connection.sendIQ(add, function (res) { logger.info('got add result', res); }, function (err) { logger.error('got add error', err); } ); } else { logger.log('addition not necessary'); } }; /** * Mutes/unmutes the (local) video i.e. enables/disables all video tracks. * * @param mute true to mute the (local) video i.e. to disable all video * tracks; otherwise, false * @param callback a function to be invoked with mute after all video * tracks have been enabled/disabled. The function may, optionally, return * another function which is to be invoked after the whole mute/unmute operation * has completed successfully. * @param options an object which specifies optional arguments such as the * boolean key byUser with default value true which * specifies whether the method was initiated in response to a user command (in * contrast to an automatic decision made by the application logic) */ JingleSessionPC.prototype.setVideoMute = function (mute, callback, options) { var byUser; if (options) { byUser = options.byUser; if (typeof byUser === 'undefined') { byUser = true; } } else { byUser = true; } // The user's command to mute the (local) video takes precedence over any // automatic decision made by the application logic. if (byUser) { this.videoMuteByUser = mute; } else if (this.videoMuteByUser) { return; } this.hardMuteVideo(mute); var self = this; var oldSdp = null; if(self.peerconnection) { if(self.peerconnection.localDescription) { oldSdp = new SDP(self.peerconnection.localDescription.sdp); } } this.modifySourcesQueue.push(function() { logger.log('modify sources done'); callback(mute); var newSdp = new SDP(self.peerconnection.localDescription.sdp); logger.log("SDPs", oldSdp, newSdp); self.notifyMySSRCUpdate(oldSdp, newSdp); }); }; JingleSessionPC.prototype.hardMuteVideo = function (muted) { this.pendingop = muted ? 'mute' : 'unmute'; }; JingleSessionPC.prototype.sendMute = function (muted, content) { var info = $iq({to: this.peerjid, type: 'set'}) .c('jingle', {xmlns: 'urn:xmpp:jingle:1', action: 'session-info', initiator: this.initiator, sid: this.sid }); info.c(muted ? 'mute' : 'unmute', {xmlns: 'urn:xmpp:jingle:apps:rtp:info:1'}); info.attrs({'creator': this.me == this.initiator ? 'creator' : 'responder'}); if (content) { info.attrs({'name': content}); } this.connection.send(info); }; JingleSessionPC.prototype.sendRinging = function () { var info = $iq({to: this.peerjid, type: 'set'}) .c('jingle', {xmlns: 'urn:xmpp:jingle:1', action: 'session-info', initiator: this.initiator, sid: this.sid }); info.c('ringing', {xmlns: 'urn:xmpp:jingle:apps:rtp:info:1'}); this.connection.send(info); }; JingleSessionPC.prototype.getStats = function (interval) { var self = this; var recv = {audio: 0, video: 0}; var lost = {audio: 0, video: 0}; var lastrecv = {audio: 0, video: 0}; var lastlost = {audio: 0, video: 0}; var loss = {audio: 0, video: 0}; var delta = {audio: 0, video: 0}; this.statsinterval = window.setInterval(function () { if (self && self.peerconnection && self.peerconnection.getStats) { self.peerconnection.getStats(function (stats) { var results = stats.result(); // TODO: there are so much statistics you can get from this.. for (var i = 0; i < results.length; ++i) { if (results[i].type == 'ssrc') { var packetsrecv = results[i].stat('packetsReceived'); var packetslost = results[i].stat('packetsLost'); if (packetsrecv && packetslost) { packetsrecv = parseInt(packetsrecv, 10); packetslost = parseInt(packetslost, 10); if (results[i].stat('googFrameRateReceived')) { lastlost.video = lost.video; lastrecv.video = recv.video; recv.video = packetsrecv; lost.video = packetslost; } else { lastlost.audio = lost.audio; lastrecv.audio = recv.audio; recv.audio = packetsrecv; lost.audio = packetslost; } } } } delta.audio = recv.audio - lastrecv.audio; delta.video = recv.video - lastrecv.video; loss.audio = (delta.audio > 0) ? Math.ceil(100 * (lost.audio - lastlost.audio) / delta.audio) : 0; loss.video = (delta.video > 0) ? Math.ceil(100 * (lost.video - lastlost.video) / delta.video) : 0; $(document).trigger('packetloss.jingle', [self.sid, loss]); }); } }, interval || 3000); return this.statsinterval; }; JingleSessionPC.onJingleError = function (session, error) { logger.error("Jingle error", error); } JingleSessionPC.onJingleFatalError = function (session, error) { this.room.eventEmitter.emit(XMPPEvents.CONFERENCE_SETUP_FAILED); this.room.eventEmitter.emit(XMPPEvents.JINGLE_FATAL_ERROR, session, error); } JingleSessionPC.prototype.setLocalDescription = function () { var self = this; var newssrcs = []; if(!this.peerconnection.localDescription) return; var session = transform.parse(this.peerconnection.localDescription.sdp); var i; session.media.forEach(function (media) { if (media.ssrcs && media.ssrcs.length > 0) { // TODO(gp) maybe exclude FID streams? media.ssrcs.forEach(function (ssrc) { if (ssrc.attribute !== 'cname') { return; } newssrcs.push({ 'ssrc': ssrc.id, 'type': media.type }); // FIXME allows for only one SSRC per media type self.localStreamsSSRC[media.type] = ssrc.id; }); } }); logger.log('new ssrcs', newssrcs); // Bind us as local SSRCs owner if (newssrcs.length > 0) { for (i = 0; i < newssrcs.length; i++) { var ssrc = newssrcs[i].ssrc; var myJid = self.me; self.ssrcOwners[ssrc] = myJid; } } } // an attempt to work around https://github.com/jitsi/jitmeet/issues/32 JingleSessionPC.prototype.sendKeyframe = function () { var pc = this.peerconnection; logger.log('sendkeyframe', pc.iceConnectionState); if (pc.iceConnectionState !== 'connected') return; // safe... var self = this; pc.setRemoteDescription( pc.remoteDescription, function () { pc.createAnswer( function (modifiedAnswer) { pc.setLocalDescription( modifiedAnswer, function () { // noop }, function (error) { logger.log('triggerKeyframe setLocalDescription failed', error); self.room.eventEmitter.emit(XMPPEvents.SET_LOCAL_DESCRIPTION_ERROR); } ); }, function (error) { logger.log('triggerKeyframe createAnswer failed', error); self.room.eventEmitter.emit(XMPPEvents.CREATE_ANSWER_ERROR); } ); }, function (error) { logger.log('triggerKeyframe setRemoteDescription failed', error); eventEmitter.emit(XMPPEvents.SET_REMOTE_DESCRIPTION_ERROR); } ); } JingleSessionPC.prototype.remoteStreamAdded = function (data, times) { var self = this; var thessrc; var streamId = RTC.getStreamID(data.stream); // look up an associated JID for a stream id if (!streamId) { logger.error("No stream ID for", data.stream); } else if (streamId && streamId.indexOf('mixedmslabel') === -1) { // look only at a=ssrc: and _not_ at a=ssrc-group: lines var ssrclines = this.peerconnection.remoteDescription? SDPUtil.find_lines(this.peerconnection.remoteDescription.sdp, 'a=ssrc:') : []; ssrclines = ssrclines.filter(function (line) { // NOTE(gp) previously we filtered on the mslabel, but that property // is not always present. // return line.indexOf('mslabel:' + data.stream.label) !== -1; if (RTCBrowserType.isTemasysPluginUsed()) { return ((line.indexOf('mslabel:' + streamId) !== -1)); } else { return ((line.indexOf('msid:' + streamId) !== -1)); } }); if (ssrclines.length) { thessrc = ssrclines[0].substring(7).split(' ')[0]; if (!self.ssrcOwners[thessrc]) { logger.error("No SSRC owner known for: " + thessrc); return; } data.peerjid = self.ssrcOwners[thessrc]; logger.log('associated jid', self.ssrcOwners[thessrc]); } else { logger.error("No SSRC lines for ", streamId); } } this.room.remoteStreamAdded(data, this.sid, thessrc); var isVideo = data.stream.getVideoTracks().length > 0; // an attempt to work around https://github.com/jitsi/jitmeet/issues/32 if (isVideo && data.peerjid && this.peerjid === data.peerjid && data.stream.getVideoTracks().length === 0 && RTC.localVideo.getTracks().length > 0) { window.setTimeout(function () { self.sendKeyframe(); }, 3000); } } /** * Returns the ice connection state for the peer connection. * @returns the ice connection state for the peer connection. */ JingleSessionPC.prototype.getIceConnectionState = function () { return this.peerconnection.iceConnectionState; } module.exports = JingleSessionPC; }).call(this,"/modules/xmpp/JingleSessionPC.js") },{"../../service/xmpp/XMPPEvents":137,"../RTC/RTC":16,"../RTC/RTCBrowserType":17,"./JingleSession":30,"./SDP":32,"./SDPDiffer":33,"./SDPUtil":34,"./TraceablePeerConnection":35,"async":47,"jitsi-meet-logger":79,"sdp-transform":115}],32:[function(require,module,exports){ (function (__filename){ /* jshint -W117 */ var logger = require("jitsi-meet-logger").getLogger(__filename); var SDPUtil = require("./SDPUtil"); // SDP STUFF function SDP(sdp) { /** * Whether or not to remove TCP ice candidates when translating from/to jingle. * @type {boolean} */ this.removeTcpCandidates = false; /** * Whether or not to remove UDP ice candidates when translating from/to jingle. * @type {boolean} */ this.removeUdpCandidates = false; this.media = sdp.split('\r\nm='); for (var i = 1; i < this.media.length; i++) { this.media[i] = 'm=' + this.media[i]; if (i != this.media.length - 1) { this.media[i] += '\r\n'; } } this.session = this.media.shift() + '\r\n'; this.raw = this.session + this.media.join(''); } /** * Returns map of MediaChannel mapped per channel idx. */ SDP.prototype.getMediaSsrcMap = function() { var self = this; var media_ssrcs = {}; var tmp; for (var mediaindex = 0; mediaindex < self.media.length; mediaindex++) { tmp = SDPUtil.find_lines(self.media[mediaindex], 'a=ssrc:'); var mid = SDPUtil.parse_mid(SDPUtil.find_line(self.media[mediaindex], 'a=mid:')); var media = { mediaindex: mediaindex, mid: mid, ssrcs: {}, ssrcGroups: [] }; media_ssrcs[mediaindex] = media; tmp.forEach(function (line) { var linessrc = line.substring(7).split(' ')[0]; // allocate new ChannelSsrc if(!media.ssrcs[linessrc]) { media.ssrcs[linessrc] = { ssrc: linessrc, lines: [] }; } media.ssrcs[linessrc].lines.push(line); }); tmp = SDPUtil.find_lines(self.media[mediaindex], 'a=ssrc-group:'); tmp.forEach(function(line){ var idx = line.indexOf(' '); var semantics = line.substr(0, idx).substr(13); var ssrcs = line.substr(14 + semantics.length).split(' '); if (ssrcs.length) { media.ssrcGroups.push({ semantics: semantics, ssrcs: ssrcs }); } }); } return media_ssrcs; }; /** * Returns true if this SDP contains given SSRC. * @param ssrc the ssrc to check. * @returns {boolean} true if this SDP contains given SSRC. */ SDP.prototype.containsSSRC = function (ssrc) { // FIXME this code is really strange - improve it if you can var medias = this.getMediaSsrcMap(); var result = false; Object.keys(medias).forEach(function (mediaindex) { if (result) return; if (medias[mediaindex].ssrcs[ssrc]) { result = true; } }); return result; }; // remove iSAC and CN from SDP SDP.prototype.mangle = function () { var i, j, mline, lines, rtpmap, newdesc; for (i = 0; i < this.media.length; i++) { lines = this.media[i].split('\r\n'); lines.pop(); // remove empty last element mline = SDPUtil.parse_mline(lines.shift()); if (mline.media != 'audio') continue; newdesc = ''; mline.fmt.length = 0; for (j = 0; j < lines.length; j++) { if (lines[j].substr(0, 9) == 'a=rtpmap:') { rtpmap = SDPUtil.parse_rtpmap(lines[j]); if (rtpmap.name == 'CN' || rtpmap.name == 'ISAC') continue; mline.fmt.push(rtpmap.id); newdesc += lines[j] + '\r\n'; } else { newdesc += lines[j] + '\r\n'; } } this.media[i] = SDPUtil.build_mline(mline) + '\r\n'; this.media[i] += newdesc; } this.raw = this.session + this.media.join(''); }; // remove lines matching prefix from session section SDP.prototype.removeSessionLines = function(prefix) { var self = this; var lines = SDPUtil.find_lines(this.session, prefix); lines.forEach(function(line) { self.session = self.session.replace(line + '\r\n', ''); }); this.raw = this.session + this.media.join(''); return lines; } // remove lines matching prefix from a media section specified by mediaindex // TODO: non-numeric mediaindex could match mid SDP.prototype.removeMediaLines = function(mediaindex, prefix) { var self = this; var lines = SDPUtil.find_lines(this.media[mediaindex], prefix); lines.forEach(function(line) { self.media[mediaindex] = self.media[mediaindex].replace(line + '\r\n', ''); }); this.raw = this.session + this.media.join(''); return lines; } // add content's to a jingle element SDP.prototype.toJingle = function (elem, thecreator) { // logger.log("SSRC" + ssrcs["audio"] + " - " + ssrcs["video"]); var self = this; var i, j, k, mline, ssrc, rtpmap, tmp, lines; // new bundle plan if (SDPUtil.find_line(this.session, 'a=group:')) { lines = SDPUtil.find_lines(this.session, 'a=group:'); for (i = 0; i < lines.length; i++) { tmp = lines[i].split(' '); var semantics = tmp.shift().substr(8); elem.c('group', {xmlns: 'urn:xmpp:jingle:apps:grouping:0', semantics:semantics}); for (j = 0; j < tmp.length; j++) { elem.c('content', {name: tmp[j]}).up(); } elem.up(); } } for (i = 0; i < this.media.length; i++) { mline = SDPUtil.parse_mline(this.media[i].split('\r\n')[0]); if (!(mline.media === 'audio' || mline.media === 'video' || mline.media === 'application')) { continue; } if (SDPUtil.find_line(this.media[i], 'a=ssrc:')) { ssrc = SDPUtil.find_line(this.media[i], 'a=ssrc:').substring(7).split(' ')[0]; // take the first } else { ssrc = false; } elem.c('content', {creator: thecreator, name: mline.media}); if (SDPUtil.find_line(this.media[i], 'a=mid:')) { // prefer identifier from a=mid if present var mid = SDPUtil.parse_mid(SDPUtil.find_line(this.media[i], 'a=mid:')); elem.attrs({ name: mid }); } if (SDPUtil.find_line(this.media[i], 'a=rtpmap:').length) { elem.c('description', {xmlns: 'urn:xmpp:jingle:apps:rtp:1', media: mline.media }); if (ssrc) { elem.attrs({ssrc: ssrc}); } for (j = 0; j < mline.fmt.length; j++) { rtpmap = SDPUtil.find_line(this.media[i], 'a=rtpmap:' + mline.fmt[j]); elem.c('payload-type', SDPUtil.parse_rtpmap(rtpmap)); // put any 'a=fmtp:' + mline.fmt[j] lines into if (SDPUtil.find_line(this.media[i], 'a=fmtp:' + mline.fmt[j])) { tmp = SDPUtil.parse_fmtp(SDPUtil.find_line(this.media[i], 'a=fmtp:' + mline.fmt[j])); for (k = 0; k < tmp.length; k++) { elem.c('parameter', tmp[k]).up(); } } this.rtcpFbToJingle(i, elem, mline.fmt[j]); // XEP-0293 -- map a=rtcp-fb elem.up(); } if (SDPUtil.find_line(this.media[i], 'a=crypto:', this.session)) { elem.c('encryption', {required: 1}); var crypto = SDPUtil.find_lines(this.media[i], 'a=crypto:', this.session); crypto.forEach(function(line) { elem.c('crypto', SDPUtil.parse_crypto(line)).up(); }); elem.up(); // end of encryption } if (ssrc) { // new style mapping elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' }); // FIXME: group by ssrc and support multiple different ssrcs var ssrclines = SDPUtil.find_lines(this.media[i], 'a=ssrc:'); if(ssrclines.length > 0) { ssrclines.forEach(function (line) { var idx = line.indexOf(' '); var linessrc = line.substr(0, idx).substr(7); if (linessrc != ssrc) { elem.up(); ssrc = linessrc; elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' }); } var kv = line.substr(idx + 1); elem.c('parameter'); if (kv.indexOf(':') == -1) { elem.attrs({ name: kv }); } else { var k = kv.split(':', 2)[0]; elem.attrs({ name: k }); var v = kv.split(':', 2)[1]; v = SDPUtil.filter_special_chars(v); elem.attrs({ value: v }); } elem.up(); }); } else { elem.up(); elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' }); elem.c('parameter'); elem.attrs({name: "cname", value:Math.random().toString(36).substring(7)}); elem.up(); var msid = null; if(mline.media == "audio") { msid = APP.RTC.localAudio._getId(); } else { msid = APP.RTC.localVideo._getId(); } if(msid != null) { msid = SDPUtil.filter_special_chars(msid); elem.c('parameter'); elem.attrs({name: "msid", value:msid}); elem.up(); elem.c('parameter'); elem.attrs({name: "mslabel", value:msid}); elem.up(); elem.c('parameter'); elem.attrs({name: "label", value:msid}); elem.up(); } } elem.up(); // XEP-0339 handle ssrc-group attributes var ssrc_group_lines = SDPUtil.find_lines(this.media[i], 'a=ssrc-group:'); ssrc_group_lines.forEach(function(line) { var idx = line.indexOf(' '); var semantics = line.substr(0, idx).substr(13); var ssrcs = line.substr(14 + semantics.length).split(' '); if (ssrcs.length) { elem.c('ssrc-group', { semantics: semantics, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' }); ssrcs.forEach(function(ssrc) { elem.c('source', { ssrc: ssrc }) .up(); }); elem.up(); } }); } if (SDPUtil.find_line(this.media[i], 'a=rtcp-mux')) { elem.c('rtcp-mux').up(); } // XEP-0293 -- map a=rtcp-fb:* this.rtcpFbToJingle(i, elem, '*'); // XEP-0294 if (SDPUtil.find_line(this.media[i], 'a=extmap:')) { lines = SDPUtil.find_lines(this.media[i], 'a=extmap:'); for (j = 0; j < lines.length; j++) { tmp = SDPUtil.parse_extmap(lines[j]); elem.c('rtp-hdrext', { xmlns: 'urn:xmpp:jingle:apps:rtp:rtp-hdrext:0', uri: tmp.uri, id: tmp.value }); if (tmp.hasOwnProperty('direction')) { switch (tmp.direction) { case 'sendonly': elem.attrs({senders: 'responder'}); break; case 'recvonly': elem.attrs({senders: 'initiator'}); break; case 'sendrecv': elem.attrs({senders: 'both'}); break; case 'inactive': elem.attrs({senders: 'none'}); break; } } // TODO: handle params elem.up(); } } elem.up(); // end of description } // map ice-ufrag/pwd, dtls fingerprint, candidates this.transportToJingle(i, elem); if (SDPUtil.find_line(this.media[i], 'a=sendrecv', this.session)) { elem.attrs({senders: 'both'}); } else if (SDPUtil.find_line(this.media[i], 'a=sendonly', this.session)) { elem.attrs({senders: 'initiator'}); } else if (SDPUtil.find_line(this.media[i], 'a=recvonly', this.session)) { elem.attrs({senders: 'responder'}); } else if (SDPUtil.find_line(this.media[i], 'a=inactive', this.session)) { elem.attrs({senders: 'none'}); } if (mline.port == '0') { // estos hack to reject an m-line elem.attrs({senders: 'rejected'}); } elem.up(); // end of content } elem.up(); return elem; }; SDP.prototype.transportToJingle = function (mediaindex, elem) { var tmp, sctpmap, sctpAttrs, fingerprints; var self = this; elem.c('transport'); // XEP-0343 DTLS/SCTP if (SDPUtil.find_line(this.media[mediaindex], 'a=sctpmap:').length) { sctpmap = SDPUtil.find_line( this.media[mediaindex], 'a=sctpmap:', self.session); if (sctpmap) { sctpAttrs = SDPUtil.parse_sctpmap(sctpmap); elem.c('sctpmap', { xmlns: 'urn:xmpp:jingle:transports:dtls-sctp:1', number: sctpAttrs[0], /* SCTP port */ protocol: sctpAttrs[1] /* protocol */ }); // Optional stream count attribute if (sctpAttrs.length > 2) elem.attrs({ streams: sctpAttrs[2]}); elem.up(); } } // XEP-0320 fingerprints = SDPUtil.find_lines(this.media[mediaindex], 'a=fingerprint:', this.session); fingerprints.forEach(function(line) { tmp = SDPUtil.parse_fingerprint(line); tmp.xmlns = 'urn:xmpp:jingle:apps:dtls:0'; elem.c('fingerprint').t(tmp.fingerprint); delete tmp.fingerprint; line = SDPUtil.find_line(self.media[mediaindex], 'a=setup:', self.session); if (line) { tmp.setup = line.substr(8); } elem.attrs(tmp); elem.up(); // end of fingerprint }); tmp = SDPUtil.iceparams(this.media[mediaindex], this.session); if (tmp) { tmp.xmlns = 'urn:xmpp:jingle:transports:ice-udp:1'; elem.attrs(tmp); // XEP-0176 if (SDPUtil.find_line(this.media[mediaindex], 'a=candidate:', this.session)) { // add any a=candidate lines var lines = SDPUtil.find_lines(this.media[mediaindex], 'a=candidate:', this.session); lines.forEach(function (line) { var candidate = SDPUtil.candidateToJingle(line); var protocol = (candidate && typeof candidate.protocol === 'string') ? candidate.protocol.toLowerCase() : ''; if ((self.removeTcpCandidates && protocol === 'tcp') || (self.removeUdpCandidates && protocol === 'udp')) { return; } elem.c('candidate', candidate).up(); }); } } elem.up(); // end of transport } SDP.prototype.rtcpFbToJingle = function (mediaindex, elem, payloadtype) { // XEP-0293 var lines = SDPUtil.find_lines(this.media[mediaindex], 'a=rtcp-fb:' + payloadtype); lines.forEach(function (line) { var tmp = SDPUtil.parse_rtcpfb(line); if (tmp.type == 'trr-int') { elem.c('rtcp-fb-trr-int', {xmlns: 'urn:xmpp:jingle:apps:rtp:rtcp-fb:0', value: tmp.params[0]}); elem.up(); } else { elem.c('rtcp-fb', {xmlns: 'urn:xmpp:jingle:apps:rtp:rtcp-fb:0', type: tmp.type}); if (tmp.params.length > 0) { elem.attrs({'subtype': tmp.params[0]}); } elem.up(); } }); }; SDP.prototype.rtcpFbFromJingle = function (elem, payloadtype) { // XEP-0293 var media = ''; var tmp = elem.find('>rtcp-fb-trr-int[xmlns="urn:xmpp:jingle:apps:rtp:rtcp-fb:0"]'); if (tmp.length) { media += 'a=rtcp-fb:' + '*' + ' ' + 'trr-int' + ' '; if (tmp.attr('value')) { media += tmp.attr('value'); } else { media += '0'; } media += '\r\n'; } tmp = elem.find('>rtcp-fb[xmlns="urn:xmpp:jingle:apps:rtp:rtcp-fb:0"]'); tmp.each(function () { media += 'a=rtcp-fb:' + payloadtype + ' ' + $(this).attr('type'); if ($(this).attr('subtype')) { media += ' ' + $(this).attr('subtype'); } media += '\r\n'; }); return media; }; // construct an SDP from a jingle stanza SDP.prototype.fromJingle = function (jingle) { var self = this; this.raw = 'v=0\r\n' + 'o=- 1923518516 2 IN IP4 0.0.0.0\r\n' +// FIXME 's=-\r\n' + 't=0 0\r\n'; // http://tools.ietf.org/html/draft-ietf-mmusic-sdp-bundle-negotiation-04#section-8 if ($(jingle).find('>group[xmlns="urn:xmpp:jingle:apps:grouping:0"]').length) { $(jingle).find('>group[xmlns="urn:xmpp:jingle:apps:grouping:0"]').each(function (idx, group) { var contents = $(group).find('>content').map(function (idx, content) { return content.getAttribute('name'); }).get(); if (contents.length > 0) { self.raw += 'a=group:' + (group.getAttribute('semantics') || group.getAttribute('type')) + ' ' + contents.join(' ') + '\r\n'; } }); } this.session = this.raw; jingle.find('>content').each(function () { var m = self.jingle2media($(this)); self.media.push(m); }); // reconstruct msid-semantic -- apparently not necessary /* var msid = SDPUtil.parse_ssrc(this.raw); if (msid.hasOwnProperty('mslabel')) { this.session += "a=msid-semantic: WMS " + msid.mslabel + "\r\n"; } */ this.raw = this.session + this.media.join(''); }; // translate a jingle content element into an an SDP media part SDP.prototype.jingle2media = function (content) { var media = '', desc = content.find('description'), ssrc = desc.attr('ssrc'), self = this, tmp; var sctp = content.find( '>transport>sctpmap[xmlns="urn:xmpp:jingle:transports:dtls-sctp:1"]'); tmp = { media: desc.attr('media') }; tmp.port = '1'; if (content.attr('senders') == 'rejected') { // estos hack to reject an m-line. tmp.port = '0'; } if (content.find('>transport>fingerprint').length || desc.find('encryption').length) { if (sctp.length) tmp.proto = 'DTLS/SCTP'; else tmp.proto = 'RTP/SAVPF'; } else { tmp.proto = 'RTP/AVPF'; } if (!sctp.length) { tmp.fmt = desc.find('payload-type').map( function () { return this.getAttribute('id'); }).get(); media += SDPUtil.build_mline(tmp) + '\r\n'; } else { media += 'm=application 1 DTLS/SCTP ' + sctp.attr('number') + '\r\n'; media += 'a=sctpmap:' + sctp.attr('number') + ' ' + sctp.attr('protocol'); var streamCount = sctp.attr('streams'); if (streamCount) media += ' ' + streamCount + '\r\n'; else media += '\r\n'; } media += 'c=IN IP4 0.0.0.0\r\n'; if (!sctp.length) media += 'a=rtcp:1 IN IP4 0.0.0.0\r\n'; tmp = content.find('>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]'); if (tmp.length) { if (tmp.attr('ufrag')) { media += SDPUtil.build_iceufrag(tmp.attr('ufrag')) + '\r\n'; } if (tmp.attr('pwd')) { media += SDPUtil.build_icepwd(tmp.attr('pwd')) + '\r\n'; } tmp.find('>fingerprint').each(function () { // FIXME: check namespace at some point media += 'a=fingerprint:' + this.getAttribute('hash'); media += ' ' + $(this).text(); media += '\r\n'; if (this.getAttribute('setup')) { media += 'a=setup:' + this.getAttribute('setup') + '\r\n'; } }); } switch (content.attr('senders')) { case 'initiator': media += 'a=sendonly\r\n'; break; case 'responder': media += 'a=recvonly\r\n'; break; case 'none': media += 'a=inactive\r\n'; break; case 'both': media += 'a=sendrecv\r\n'; break; } media += 'a=mid:' + content.attr('name') + '\r\n'; // // see http://code.google.com/p/libjingle/issues/detail?id=309 -- no spec though // and http://mail.jabber.org/pipermail/jingle/2011-December/001761.html if (desc.find('rtcp-mux').length) { media += 'a=rtcp-mux\r\n'; } if (desc.find('encryption').length) { desc.find('encryption>crypto').each(function () { media += 'a=crypto:' + this.getAttribute('tag'); media += ' ' + this.getAttribute('crypto-suite'); media += ' ' + this.getAttribute('key-params'); if (this.getAttribute('session-params')) { media += ' ' + this.getAttribute('session-params'); } media += '\r\n'; }); } desc.find('payload-type').each(function () { media += SDPUtil.build_rtpmap(this) + '\r\n'; if ($(this).find('>parameter').length) { media += 'a=fmtp:' + this.getAttribute('id') + ' '; media += $(this).find('parameter').map(function () { return (this.getAttribute('name') ? (this.getAttribute('name') + '=') : '') + this.getAttribute('value'); }).get().join('; '); media += '\r\n'; } // xep-0293 media += self.rtcpFbFromJingle($(this), this.getAttribute('id')); }); // xep-0293 media += self.rtcpFbFromJingle(desc, '*'); // xep-0294 tmp = desc.find('>rtp-hdrext[xmlns="urn:xmpp:jingle:apps:rtp:rtp-hdrext:0"]'); tmp.each(function () { media += 'a=extmap:' + this.getAttribute('id') + ' ' + this.getAttribute('uri') + '\r\n'; }); content.find('>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]>candidate').each(function () { var protocol = this.getAttribute('protocol'); protocol = (typeof protocol === 'string') ? protocol.toLowerCase(): ''; if ((self.removeTcpCandidates && protocol === 'tcp') || (self.removeUdpCandidates && protocol === 'udp')) { return; } media += SDPUtil.candidateFromJingle(this); }); // XEP-0339 handle ssrc-group attributes content.find('description>ssrc-group[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]').each(function() { var semantics = this.getAttribute('semantics'); var ssrcs = $(this).find('>source').map(function() { return this.getAttribute('ssrc'); }).get(); if (ssrcs.length) { media += 'a=ssrc-group:' + semantics + ' ' + ssrcs.join(' ') + '\r\n'; } }); tmp = content.find('description>source[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]'); tmp.each(function () { var ssrc = this.getAttribute('ssrc'); $(this).find('>parameter').each(function () { var name = this.getAttribute('name'); var value = this.getAttribute('value'); value = SDPUtil.filter_special_chars(value); media += 'a=ssrc:' + ssrc + ' ' + name; if (value && value.length) media += ':' + value; media += '\r\n'; }); }); return media; }; module.exports = SDP; }).call(this,"/modules/xmpp/SDP.js") },{"./SDPUtil":34,"jitsi-meet-logger":79}],33:[function(require,module,exports){ var SDPUtil = require("./SDPUtil"); function SDPDiffer(mySDP, otherSDP) { this.mySDP = mySDP; this.otherSDP = otherSDP; } /** * Returns map of MediaChannel that contains media contained in * 'mySDP', but not contained in 'otherSdp'. Mapped by channel idx. */ SDPDiffer.prototype.getNewMedia = function() { // this could be useful in Array.prototype. function arrayEquals(array) { // if the other array is a falsy value, return if (!array) return false; // compare lengths - can save a lot of time if (this.length != array.length) return false; for (var i = 0, l=this.length; i < l; i++) { // Check if we have nested arrays if (this[i] instanceof Array && array[i] instanceof Array) { // recurse into the nested arrays if (!this[i].equals(array[i])) return false; } else if (this[i] != array[i]) { // Warning - two different object instances will never be // equal: {x:20} != {x:20} return false; } } return true; } var myMedias = this.mySDP.getMediaSsrcMap(); var othersMedias = this.otherSDP.getMediaSsrcMap(); var newMedia = {}; Object.keys(othersMedias).forEach(function(othersMediaIdx) { var myMedia = myMedias[othersMediaIdx]; var othersMedia = othersMedias[othersMediaIdx]; if(!myMedia && othersMedia) { // Add whole channel newMedia[othersMediaIdx] = othersMedia; return; } // Look for new ssrcs across the channel Object.keys(othersMedia.ssrcs).forEach(function(ssrc) { if(Object.keys(myMedia.ssrcs).indexOf(ssrc) === -1) { // Allocate channel if we've found ssrc that doesn't exist in // our channel if(!newMedia[othersMediaIdx]){ newMedia[othersMediaIdx] = { mediaindex: othersMedia.mediaindex, mid: othersMedia.mid, ssrcs: {}, ssrcGroups: [] }; } newMedia[othersMediaIdx].ssrcs[ssrc] = othersMedia.ssrcs[ssrc]; } }); // Look for new ssrc groups across the channels othersMedia.ssrcGroups.forEach(function(otherSsrcGroup){ // try to match the other ssrc-group with an ssrc-group of ours var matched = false; for (var i = 0; i < myMedia.ssrcGroups.length; i++) { var mySsrcGroup = myMedia.ssrcGroups[i]; if (otherSsrcGroup.semantics == mySsrcGroup.semantics && arrayEquals.apply(otherSsrcGroup.ssrcs, [mySsrcGroup.ssrcs])) { matched = true; break; } } if (!matched) { // Allocate channel if we've found an ssrc-group that doesn't // exist in our channel if(!newMedia[othersMediaIdx]){ newMedia[othersMediaIdx] = { mediaindex: othersMedia.mediaindex, mid: othersMedia.mid, ssrcs: {}, ssrcGroups: [] }; } newMedia[othersMediaIdx].ssrcGroups.push(otherSsrcGroup); } }); }); return newMedia; }; /** * TODO: document! */ SDPDiffer.prototype.toJingle = function(modify) { var sdpMediaSsrcs = this.getNewMedia(); var modified = false; Object.keys(sdpMediaSsrcs).forEach(function(mediaindex){ modified = true; var media = sdpMediaSsrcs[mediaindex]; modify.c('content', {name: media.mid}); modify.c('description', {xmlns:'urn:xmpp:jingle:apps:rtp:1', media: media.mid}); // FIXME: not completely sure this operates on blocks and / or handles // different ssrcs correctly // generate sources from lines Object.keys(media.ssrcs).forEach(function(ssrcNum) { var mediaSsrc = media.ssrcs[ssrcNum]; modify.c('source', { xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' }); modify.attrs({ssrc: mediaSsrc.ssrc}); // iterate over ssrc lines mediaSsrc.lines.forEach(function (line) { var idx = line.indexOf(' '); var kv = line.substr(idx + 1); modify.c('parameter'); if (kv.indexOf(':') == -1) { modify.attrs({ name: kv }); } else { var nv = kv.split(':', 2); var name = nv[0]; var value = SDPUtil.filter_special_chars(nv[1]); modify.attrs({ name: name }); modify.attrs({ value: value }); } modify.up(); // end of parameter }); modify.up(); // end of source }); // generate source groups from lines media.ssrcGroups.forEach(function(ssrcGroup) { if (ssrcGroup.ssrcs.length) { modify.c('ssrc-group', { semantics: ssrcGroup.semantics, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' }); ssrcGroup.ssrcs.forEach(function (ssrc) { modify.c('source', { ssrc: ssrc }) .up(); // end of source }); modify.up(); // end of ssrc-group } }); modify.up(); // end of description modify.up(); // end of content }); return modified; }; module.exports = SDPDiffer; },{"./SDPUtil":34}],34:[function(require,module,exports){ (function (__filename){ var logger = require("jitsi-meet-logger").getLogger(__filename); var RTCBrowserType = require("../RTC/RTCBrowserType"); SDPUtil = { filter_special_chars: function (text) { return text.replace(/[\\\/\{,\}\+]/g, ""); }, iceparams: function (mediadesc, sessiondesc) { var data = null; if (SDPUtil.find_line(mediadesc, 'a=ice-ufrag:', sessiondesc) && SDPUtil.find_line(mediadesc, 'a=ice-pwd:', sessiondesc)) { data = { ufrag: SDPUtil.parse_iceufrag(SDPUtil.find_line(mediadesc, 'a=ice-ufrag:', sessiondesc)), pwd: SDPUtil.parse_icepwd(SDPUtil.find_line(mediadesc, 'a=ice-pwd:', sessiondesc)) }; } return data; }, parse_iceufrag: function (line) { return line.substring(12); }, build_iceufrag: function (frag) { return 'a=ice-ufrag:' + frag; }, parse_icepwd: function (line) { return line.substring(10); }, build_icepwd: function (pwd) { return 'a=ice-pwd:' + pwd; }, parse_mid: function (line) { return line.substring(6); }, parse_mline: function (line) { var parts = line.substring(2).split(' '), data = {}; data.media = parts.shift(); data.port = parts.shift(); data.proto = parts.shift(); if (parts[parts.length - 1] === '') { // trailing whitespace parts.pop(); } data.fmt = parts; return data; }, build_mline: function (mline) { return 'm=' + mline.media + ' ' + mline.port + ' ' + mline.proto + ' ' + mline.fmt.join(' '); }, parse_rtpmap: function (line) { var parts = line.substring(9).split(' '), data = {}; data.id = parts.shift(); parts = parts[0].split('/'); data.name = parts.shift(); data.clockrate = parts.shift(); data.channels = parts.length ? parts.shift() : '1'; return data; }, /** * Parses SDP line "a=sctpmap:..." and extracts SCTP port from it. * @param line eg. "a=sctpmap:5000 webrtc-datachannel" * @returns [SCTP port number, protocol, streams] */ parse_sctpmap: function (line) { var parts = line.substring(10).split(' '); var sctpPort = parts[0]; var protocol = parts[1]; // Stream count is optional var streamCount = parts.length > 2 ? parts[2] : null; return [sctpPort, protocol, streamCount];// SCTP port }, build_rtpmap: function (el) { var line = 'a=rtpmap:' + el.getAttribute('id') + ' ' + el.getAttribute('name') + '/' + el.getAttribute('clockrate'); if (el.getAttribute('channels') && el.getAttribute('channels') != '1') { line += '/' + el.getAttribute('channels'); } return line; }, parse_crypto: function (line) { var parts = line.substring(9).split(' '), data = {}; data.tag = parts.shift(); data['crypto-suite'] = parts.shift(); data['key-params'] = parts.shift(); if (parts.length) { data['session-params'] = parts.join(' '); } return data; }, parse_fingerprint: function (line) { // RFC 4572 var parts = line.substring(14).split(' '), data = {}; data.hash = parts.shift(); data.fingerprint = parts.shift(); // TODO assert that fingerprint satisfies 2UHEX *(":" 2UHEX) ? return data; }, parse_fmtp: function (line) { var parts = line.split(' '), i, key, value, data = []; parts.shift(); parts = parts.join(' ').split(';'); for (i = 0; i < parts.length; i++) { key = parts[i].split('=')[0]; while (key.length && key[0] == ' ') { key = key.substring(1); } value = parts[i].split('=')[1]; if (key && value) { data.push({name: key, value: value}); } else if (key) { // rfc 4733 (DTMF) style stuff data.push({name: '', value: key}); } } return data; }, parse_icecandidate: function (line) { var candidate = {}, elems = line.split(' '); candidate.foundation = elems[0].substring(12); candidate.component = elems[1]; candidate.protocol = elems[2].toLowerCase(); candidate.priority = elems[3]; candidate.ip = elems[4]; candidate.port = elems[5]; // elems[6] => "typ" candidate.type = elems[7]; candidate.generation = 0; // default value, may be overwritten below for (var i = 8; i < elems.length; i += 2) { switch (elems[i]) { case 'raddr': candidate['rel-addr'] = elems[i + 1]; break; case 'rport': candidate['rel-port'] = elems[i + 1]; break; case 'generation': candidate.generation = elems[i + 1]; break; case 'tcptype': candidate.tcptype = elems[i + 1]; break; default: // TODO logger.log('parse_icecandidate not translating "' + elems[i] + '" = "' + elems[i + 1] + '"'); } } candidate.network = '1'; candidate.id = Math.random().toString(36).substr(2, 10); // not applicable to SDP -- FIXME: should be unique, not just random return candidate; }, build_icecandidate: function (cand) { var line = ['a=candidate:' + cand.foundation, cand.component, cand.protocol, cand.priority, cand.ip, cand.port, 'typ', cand.type].join(' '); line += ' '; switch (cand.type) { case 'srflx': case 'prflx': case 'relay': if (cand.hasOwnAttribute('rel-addr') && cand.hasOwnAttribute('rel-port')) { line += 'raddr'; line += ' '; line += cand['rel-addr']; line += ' '; line += 'rport'; line += ' '; line += cand['rel-port']; line += ' '; } break; } if (cand.hasOwnAttribute('tcptype')) { line += 'tcptype'; line += ' '; line += cand.tcptype; line += ' '; } line += 'generation'; line += ' '; line += cand.hasOwnAttribute('generation') ? cand.generation : '0'; return line; }, parse_ssrc: function (desc) { // proprietary mapping of a=ssrc lines // TODO: see "Jingle RTP Source Description" by Juberti and P. Thatcher on google docs // and parse according to that var lines = desc.split('\r\n'), data = {}; for (var i = 0; i < lines.length; i++) { if (lines[i].substring(0, 7) == 'a=ssrc:') { var idx = lines[i].indexOf(' '); data[lines[i].substr(idx + 1).split(':', 2)[0]] = lines[i].substr(idx + 1).split(':', 2)[1]; } } return data; }, parse_rtcpfb: function (line) { var parts = line.substr(10).split(' '); var data = {}; data.pt = parts.shift(); data.type = parts.shift(); data.params = parts; return data; }, parse_extmap: function (line) { var parts = line.substr(9).split(' '); var data = {}; data.value = parts.shift(); if (data.value.indexOf('/') != -1) { data.direction = data.value.substr(data.value.indexOf('/') + 1); data.value = data.value.substr(0, data.value.indexOf('/')); } else { data.direction = 'both'; } data.uri = parts.shift(); data.params = parts; return data; }, find_line: function (haystack, needle, sessionpart) { var lines = haystack.split('\r\n'); for (var i = 0; i < lines.length; i++) { if (lines[i].substring(0, needle.length) == needle) { return lines[i]; } } if (!sessionpart) { return false; } // search session part lines = sessionpart.split('\r\n'); for (var j = 0; j < lines.length; j++) { if (lines[j].substring(0, needle.length) == needle) { return lines[j]; } } return false; }, find_lines: function (haystack, needle, sessionpart) { var lines = haystack.split('\r\n'), needles = []; for (var i = 0; i < lines.length; i++) { if (lines[i].substring(0, needle.length) == needle) needles.push(lines[i]); } if (needles.length || !sessionpart) { return needles; } // search session part lines = sessionpart.split('\r\n'); for (var j = 0; j < lines.length; j++) { if (lines[j].substring(0, needle.length) == needle) { needles.push(lines[j]); } } return needles; }, candidateToJingle: function (line) { // a=candidate:2979166662 1 udp 2113937151 192.168.2.100 57698 typ host generation 0 // if (line.indexOf('candidate:') === 0) { line = 'a=' + line; } else if (line.substring(0, 12) != 'a=candidate:') { logger.log('parseCandidate called with a line that is not a candidate line'); logger.log(line); return null; } if (line.substring(line.length - 2) == '\r\n') // chomp it line = line.substring(0, line.length - 2); var candidate = {}, elems = line.split(' '), i; if (elems[6] != 'typ') { logger.log('did not find typ in the right place'); logger.log(line); return null; } candidate.foundation = elems[0].substring(12); candidate.component = elems[1]; candidate.protocol = elems[2].toLowerCase(); candidate.priority = elems[3]; candidate.ip = elems[4]; candidate.port = elems[5]; // elems[6] => "typ" candidate.type = elems[7]; candidate.generation = '0'; // default, may be overwritten below for (i = 8; i < elems.length; i += 2) { switch (elems[i]) { case 'raddr': candidate['rel-addr'] = elems[i + 1]; break; case 'rport': candidate['rel-port'] = elems[i + 1]; break; case 'generation': candidate.generation = elems[i + 1]; break; case 'tcptype': candidate.tcptype = elems[i + 1]; break; default: // TODO logger.log('not translating "' + elems[i] + '" = "' + elems[i + 1] + '"'); } } candidate.network = '1'; candidate.id = Math.random().toString(36).substr(2, 10); // not applicable to SDP -- FIXME: should be unique, not just random return candidate; }, candidateFromJingle: function (cand) { var line = 'a=candidate:'; line += cand.getAttribute('foundation'); line += ' '; line += cand.getAttribute('component'); line += ' '; var protocol = cand.getAttribute('protocol'); // use tcp candidates for FF if (RTCBrowserType.isFirefox() && protocol.toLowerCase() == 'ssltcp') { protocol = 'tcp'; } line += protocol; //.toUpperCase(); // chrome M23 doesn't like this line += ' '; line += cand.getAttribute('priority'); line += ' '; line += cand.getAttribute('ip'); line += ' '; line += cand.getAttribute('port'); line += ' '; line += 'typ'; line += ' ' + cand.getAttribute('type'); line += ' '; switch (cand.getAttribute('type')) { case 'srflx': case 'prflx': case 'relay': if (cand.getAttribute('rel-addr') && cand.getAttribute('rel-port')) { line += 'raddr'; line += ' '; line += cand.getAttribute('rel-addr'); line += ' '; line += 'rport'; line += ' '; line += cand.getAttribute('rel-port'); line += ' '; } break; } if (protocol.toLowerCase() == 'tcp') { line += 'tcptype'; line += ' '; line += cand.getAttribute('tcptype'); line += ' '; } line += 'generation'; line += ' '; line += cand.getAttribute('generation') || '0'; return line + '\r\n'; } }; module.exports = SDPUtil; }).call(this,"/modules/xmpp/SDPUtil.js") },{"../RTC/RTCBrowserType":17,"jitsi-meet-logger":79}],35:[function(require,module,exports){ (function (__filename){ /* global $ */ var RTC = require('../RTC/RTC'); var logger = require("jitsi-meet-logger").getLogger(__filename); var RTCBrowserType = require("../RTC/RTCBrowserType.js"); var XMPPEvents = require("../../service/xmpp/XMPPEvents"); function TraceablePeerConnection(ice_config, constraints, session) { var self = this; this.session = session; var RTCPeerConnectionType = null; if (RTCBrowserType.isFirefox()) { RTCPeerConnectionType = mozRTCPeerConnection; } else if (RTCBrowserType.isTemasysPluginUsed()) { RTCPeerConnectionType = RTCPeerConnection; } else { RTCPeerConnectionType = webkitRTCPeerConnection; } this.peerconnection = new RTCPeerConnectionType(ice_config, constraints); this.updateLog = []; this.stats = {}; this.statsinterval = null; this.maxstats = 0; // limit to 300 values, i.e. 5 minutes; set to 0 to disable var Interop = require('sdp-interop').Interop; this.interop = new Interop(); var Simulcast = require('sdp-simulcast'); this.simulcast = new Simulcast({numOfLayers: 3, explodeRemoteSimulcast: false}); this.eventEmitter = this.session.room.eventEmitter; // override as desired this.trace = function (what, info) { /*logger.warn('WTRACE', what, info); if (info && RTCBrowserType.isIExplorer()) { if (info.length > 1024) { logger.warn('WTRACE', what, info.substr(1024)); } if (info.length > 2048) { logger.warn('WTRACE', what, info.substr(2048)); } }*/ self.updateLog.push({ time: new Date(), type: what, value: info || "" }); }; this.onicecandidate = null; this.peerconnection.onicecandidate = function (event) { // FIXME: this causes stack overflow with Temasys Plugin if (!RTCBrowserType.isTemasysPluginUsed()) self.trace('onicecandidate', JSON.stringify(event.candidate, null, ' ')); if (self.onicecandidate !== null) { self.onicecandidate(event); } }; this.onaddstream = null; this.peerconnection.onaddstream = function (event) { self.trace('onaddstream', event.stream.id); if (self.onaddstream !== null) { self.onaddstream(event); } }; this.onremovestream = null; this.peerconnection.onremovestream = function (event) { self.trace('onremovestream', event.stream.id); if (self.onremovestream !== null) { self.onremovestream(event); } }; this.onsignalingstatechange = null; this.peerconnection.onsignalingstatechange = function (event) { self.trace('onsignalingstatechange', self.signalingState); if (self.onsignalingstatechange !== null) { self.onsignalingstatechange(event); } }; this.oniceconnectionstatechange = null; this.peerconnection.oniceconnectionstatechange = function (event) { self.trace('oniceconnectionstatechange', self.iceConnectionState); if (self.oniceconnectionstatechange !== null) { self.oniceconnectionstatechange(event); } }; this.onnegotiationneeded = null; this.peerconnection.onnegotiationneeded = function (event) { self.trace('onnegotiationneeded'); if (self.onnegotiationneeded !== null) { self.onnegotiationneeded(event); } }; self.ondatachannel = null; this.peerconnection.ondatachannel = function (event) { self.trace('ondatachannel', event); if (self.ondatachannel !== null) { self.ondatachannel(event); } }; // XXX: do all non-firefox browsers which we support also support this? if (!RTCBrowserType.isFirefox() && this.maxstats) { this.statsinterval = window.setInterval(function() { self.peerconnection.getStats(function(stats) { var results = stats.result(); var now = new Date(); for (var i = 0; i < results.length; ++i) { results[i].names().forEach(function (name) { var id = results[i].id + '-' + name; if (!self.stats[id]) { self.stats[id] = { startTime: now, endTime: now, values: [], times: [] }; } self.stats[id].values.push(results[i].stat(name)); self.stats[id].times.push(now.getTime()); if (self.stats[id].values.length > self.maxstats) { self.stats[id].values.shift(); self.stats[id].times.shift(); } self.stats[id].endTime = now; }); } }); }, 1000); } } /** * Returns a string representation of a SessionDescription object. */ var dumpSDP = function(description) { if (typeof description === 'undefined' || description == null) { return ''; } return 'type: ' + description.type + '\r\n' + description.sdp; }; var insertRecvOnlySSRC = function (desc) { if (typeof desc !== 'object' || desc === null || typeof desc.sdp !== 'string') { console.warn('An empty description was passed as an argument.'); return desc; } var transform = require('sdp-transform'); var RandomUtil = require('../util/RandomUtil'); var session = transform.parse(desc.sdp); if (!Array.isArray(session.media)) { return; } var modded = false; session.media.forEach(function (bLine) { if (bLine.direction != 'recvonly') { return; } modded = true; if (!Array.isArray(bLine.ssrcs) || bLine.ssrcs.length === 0) { var ssrc = RandomUtil.randomInt(1, 0xffffffff); bLine.ssrcs = [{ id: ssrc, attribute: 'cname', value: ['recvonly-', ssrc].join('') }]; } }); return (!modded) ? desc : new RTCSessionDescription({ type: desc.type, sdp: transform.write(session), }); }; /** * Takes a SessionDescription object and returns a "normalized" version. * Currently it only takes care of ordering the a=ssrc lines. */ var normalizePlanB = function(desc) { if (typeof desc !== 'object' || desc === null || typeof desc.sdp !== 'string') { logger.warn('An empty description was passed as an argument.'); return desc; } var transform = require('sdp-transform'); var session = transform.parse(desc.sdp); if (typeof session !== 'undefined' && typeof session.media !== 'undefined' && Array.isArray(session.media)) { session.media.forEach(function (mLine) { // Chrome appears to be picky about the order in which a=ssrc lines // are listed in an m-line when rtx is enabled (and thus there are // a=ssrc-group lines with FID semantics). Specifically if we have // "a=ssrc-group:FID S1 S2" and the "a=ssrc:S2" lines appear before // the "a=ssrc:S1" lines, SRD fails. // So, put SSRC which appear as the first SSRC in an FID ssrc-group // first. var firstSsrcs = []; var newSsrcLines = []; if (typeof mLine.ssrcGroups !== 'undefined' && Array.isArray(mLine.ssrcGroups)) { mLine.ssrcGroups.forEach(function (group) { if (typeof group.semantics !== 'undefined' && group.semantics === 'FID') { if (typeof group.ssrcs !== 'undefined') { firstSsrcs.push(Number(group.ssrcs.split(' ')[0])); } } }); } if (typeof mLine.ssrcs !== 'undefined' && Array.isArray(mLine.ssrcs)) { var i; for (i = 0; i 0) { // start gathering stats } */ }; TraceablePeerConnection.prototype.close = function () { this.trace('stop'); if (this.statsinterval !== null) { window.clearInterval(this.statsinterval); this.statsinterval = null; } this.peerconnection.close(); }; TraceablePeerConnection.prototype.createOffer = function (successCallback, failureCallback, constraints) { var self = this; this.trace('createOffer', JSON.stringify(constraints, null, ' ')); this.peerconnection.createOffer( function (offer) { self.trace('createOfferOnSuccess::preTransform', dumpSDP(offer)); // NOTE this is not tested because in meet the focus generates the // offer. // if we're running on FF, transform to Plan B first. if (RTCBrowserType.usesUnifiedPlan()) { offer = self.interop.toPlanB(offer); self.trace('createOfferOnSuccess::postTransform (Plan B)', dumpSDP(offer)); } if (RTCBrowserType.isChrome()) { offer = insertRecvOnlySSRC(offer); self.trace('createOfferOnSuccess::mungeLocalVideoSSRC', dumpSDP(offer)); } if (!self.session.room.options.disableSimulcast && self.simulcast.isSupported()) { offer = self.simulcast.mungeLocalDescription(offer); self.trace('createOfferOnSuccess::postTransform (simulcast)', dumpSDP(offer)); } successCallback(offer); }, function(err) { self.trace('createOfferOnFailure', err); self.eventEmitter.emit(XMPPEvents.CREATE_OFFER_FAILED, err, self.peerconnection); failureCallback(err); }, constraints ); }; TraceablePeerConnection.prototype.createAnswer = function (successCallback, failureCallback, constraints) { var self = this; this.trace('createAnswer', JSON.stringify(constraints, null, ' ')); this.peerconnection.createAnswer( function (answer) { self.trace('createAnswerOnSuccess::preTransform', dumpSDP(answer)); // if we're running on FF, transform to Plan A first. if (RTCBrowserType.usesUnifiedPlan()) { answer = self.interop.toPlanB(answer); self.trace('createAnswerOnSuccess::postTransform (Plan B)', dumpSDP(answer)); } if (RTCBrowserType.isChrome()) { answer = insertRecvOnlySSRC(answer); self.trace('createAnswerOnSuccess::mungeLocalVideoSSRC', dumpSDP(answer)); } if (!self.session.room.options.disableSimulcast && self.simulcast.isSupported()) { answer = self.simulcast.mungeLocalDescription(answer); self.trace('createAnswerOnSuccess::postTransform (simulcast)', dumpSDP(answer)); } successCallback(answer); }, function(err) { self.trace('createAnswerOnFailure', err); self.eventEmitter.emit(XMPPEvents.CREATE_ANSWER_FAILED, err, self.peerconnection); failureCallback(err); }, constraints ); }; TraceablePeerConnection.prototype.addIceCandidate = function (candidate, successCallback, failureCallback) { //var self = this; this.trace('addIceCandidate', JSON.stringify(candidate, null, ' ')); this.peerconnection.addIceCandidate(candidate); /* maybe later this.peerconnection.addIceCandidate(candidate, function () { self.trace('addIceCandidateOnSuccess'); successCallback(); }, function (err) { self.trace('addIceCandidateOnFailure', err); failureCallback(err); } ); */ }; TraceablePeerConnection.prototype.getStats = function(callback, errback) { // TODO: Is this the correct way to handle Opera, Temasys? if (RTCBrowserType.isFirefox()) { // ignore for now... if(!errback) errback = function () {}; this.peerconnection.getStats(null, callback, errback); } else { this.peerconnection.getStats(callback); } }; module.exports = TraceablePeerConnection; }).call(this,"/modules/xmpp/TraceablePeerConnection.js") },{"../../service/xmpp/XMPPEvents":137,"../RTC/RTC":16,"../RTC/RTCBrowserType.js":17,"../util/RandomUtil":26,"jitsi-meet-logger":79,"sdp-interop":105,"sdp-simulcast":112,"sdp-transform":115}],36:[function(require,module,exports){ (function (__filename){ /* global $, $iq, Promise, Strophe */ var logger = require("jitsi-meet-logger").getLogger(__filename); var XMPPEvents = require("../../service/xmpp/XMPPEvents"); var AuthenticationEvents = require("../../service/authentication/AuthenticationEvents"); function createExpBackoffTimer(step) { var count = 1; return function (reset) { // Reset call if (reset) { count = 1; return; } // Calculate next timeout var timeout = Math.pow(2, count - 1); count += 1; return timeout * step; }; } function Moderator(roomName, xmpp, emitter, settings) { this.roomName = roomName; this.xmppService = xmpp; this.getNextTimeout = createExpBackoffTimer(1000); this.getNextErrorTimeout = createExpBackoffTimer(1000); // External authentication stuff this.externalAuthEnabled = false; this.settings = settings; // Sip gateway can be enabled by configuring Jigasi host in config.js or // it will be enabled automatically if focus detects the component through // service discovery. this.sipGatewayEnabled = this.xmppService.options.hosts && this.xmppService.options.hosts.call_control !== undefined; this.eventEmitter = emitter; this.connection = this.xmppService.connection; this.focusUserJid; //FIXME: // Message listener that talks to POPUP window function listener(event) { if (event.data && event.data.sessionId) { if (event.origin !== window.location.origin) { logger.warn("Ignoring sessionId from different origin: " + event.origin); return; } localStorage.setItem('sessionId', event.data.sessionId); // After popup is closed we will authenticate } } // Register if (window.addEventListener) { window.addEventListener("message", listener, false); } else { window.attachEvent("onmessage", listener); } } Moderator.prototype.isExternalAuthEnabled = function () { return this.externalAuthEnabled; }; Moderator.prototype.isSipGatewayEnabled = function () { return this.sipGatewayEnabled; }; Moderator.prototype.onMucMemberLeft = function (jid) { logger.info("Someone left is it focus ? " + jid); var resource = Strophe.getResourceFromJid(jid); if (resource === 'focus' && !this.xmppService.sessionTerminated) { logger.info( "Focus has left the room - leaving conference"); //hangUp(); // We'd rather reload to have everything re-initialized //FIXME: show some message before reload this.eventEmitter.emit(XMPPEvents.FOCUS_LEFT); } }; Moderator.prototype.setFocusUserJid = function (focusJid) { if (!this.focusUserJid) { this.focusUserJid = focusJid; logger.info("Focus jid set to: " + this.focusUserJid); } }; Moderator.prototype.getFocusUserJid = function () { return this.focusUserJid; }; Moderator.prototype.getFocusComponent = function () { // Get focus component address var focusComponent = this.xmppService.options.hosts.focus; // If not specified use default: 'focus.domain' if (!focusComponent) { focusComponent = 'focus.' + this.xmppService.options.hosts.domain; } return focusComponent; }; Moderator.prototype.createConferenceIq = function () { // Generate create conference IQ var elem = $iq({to: this.getFocusComponent(), type: 'set'}); // Session Id used for authentication var sessionId = localStorage.getItem('sessionId'); var machineUID = this.settings.getSettings().uid; logger.info( "Session ID: " + sessionId + " machine UID: " + machineUID); elem.c('conference', { xmlns: 'http://jitsi.org/protocol/focus', room: this.roomName, 'machine-uid': machineUID }); if (sessionId) { elem.attrs({ 'session-id': sessionId}); } if (this.xmppService.options.hosts.bridge !== undefined) { elem.c( 'property', { name: 'bridge', value: this.xmppService.options.hosts.bridge }).up(); } if (this.xmppService.options.enforcedBridge !== undefined) { elem.c( 'property', { name: 'enforcedBridge', value: this.xmppService.options.enforcedBridge }).up(); } // Tell the focus we have Jigasi configured if (this.xmppService.options.hosts.call_control !== undefined) { elem.c( 'property', { name: 'call_control', value: this.xmppService.options.hosts.call_control }).up(); } if (this.xmppService.options.channelLastN !== undefined) { elem.c( 'property', { name: 'channelLastN', value: this.xmppService.options.channelLastN }).up(); } if (this.xmppService.options.adaptiveLastN !== undefined) { elem.c( 'property', { name: 'adaptiveLastN', value: this.xmppService.options.adaptiveLastN }).up(); } if (this.xmppService.options.adaptiveSimulcast !== undefined) { elem.c( 'property', { name: 'adaptiveSimulcast', value: this.xmppService.options.adaptiveSimulcast }).up(); } if (this.xmppService.options.openSctp !== undefined) { elem.c( 'property', { name: 'openSctp', value: this.xmppService.options.openSctp }).up(); } if (this.xmppService.options.startAudioMuted !== undefined) { elem.c( 'property', { name: 'startAudioMuted', value: this.xmppService.options.startAudioMuted }).up(); } if (this.xmppService.options.startVideoMuted !== undefined) { elem.c( 'property', { name: 'startVideoMuted', value: this.xmppService.options.startVideoMuted }).up(); } elem.c( 'property', { name: 'simulcastMode', value: 'rewriting' }).up(); elem.up(); return elem; }; Moderator.prototype.parseSessionId = function (resultIq) { var sessionId = $(resultIq).find('conference').attr('session-id'); if (sessionId) { logger.info('Received sessionId: ' + sessionId); localStorage.setItem('sessionId', sessionId); } }; Moderator.prototype.parseConfigOptions = function (resultIq) { this.setFocusUserJid( $(resultIq).find('conference').attr('focusjid')); var authenticationEnabled = $(resultIq).find( '>conference>property' + '[name=\'authentication\'][value=\'true\']').length > 0; logger.info("Authentication enabled: " + authenticationEnabled); this.externalAuthEnabled = $(resultIq).find( '>conference>property' + '[name=\'externalAuth\'][value=\'true\']').length > 0; console.info( 'External authentication enabled: ' + this.externalAuthEnabled); if (!this.externalAuthEnabled) { // We expect to receive sessionId in 'internal' authentication mode this.parseSessionId(resultIq); } var authIdentity = $(resultIq).find('>conference').attr('identity'); this.eventEmitter.emit(AuthenticationEvents.IDENTITY_UPDATED, authenticationEnabled, authIdentity); // Check if focus has auto-detected Jigasi component(this will be also // included if we have passed our host from the config) if ($(resultIq).find( '>conference>property' + '[name=\'sipGatewayEnabled\'][value=\'true\']').length) { this.sipGatewayEnabled = true; } logger.info("Sip gateway enabled: " + this.sipGatewayEnabled); }; // FIXME = we need to show the fact that we're waiting for the focus // to the user(or that focus is not available) Moderator.prototype.allocateConferenceFocus = function (callback) { // Try to use focus user JID from the config this.setFocusUserJid(this.xmppService.options.focusUserJid); // Send create conference IQ var iq = this.createConferenceIq(); var self = this; this.connection.sendIQ( iq, function (result) { // Setup config options self.parseConfigOptions(result); if ('true' === $(result).find('conference').attr('ready')) { // Reset both timers self.getNextTimeout(true); self.getNextErrorTimeout(true); // Exec callback callback(); } else { var waitMs = self.getNextTimeout(); logger.info("Waiting for the focus... " + waitMs); // Reset error timeout self.getNextErrorTimeout(true); window.setTimeout( function () { self.allocateConferenceFocus(callback); }, waitMs); } }, function (error) { // Invalid session ? remove and try again // without session ID to get a new one var invalidSession = $(error).find('>error>session-invalid').length; if (invalidSession) { logger.info("Session expired! - removing"); localStorage.removeItem("sessionId"); } if ($(error).find('>error>graceful-shutdown').length) { self.eventEmitter.emit(XMPPEvents.GRACEFUL_SHUTDOWN); return; } // Check for error returned by the reservation system var reservationErr = $(error).find('>error>reservation-error'); if (reservationErr.length) { // Trigger error event var errorCode = reservationErr.attr('error-code'); var errorMsg; if ($(error).find('>error>text')) { errorMsg = $(error).find('>error>text').text(); } self.eventEmitter.emit( XMPPEvents.RESERVATION_ERROR, errorCode, errorMsg); return; } // Not authorized to create new room if ($(error).find('>error>not-authorized').length) { logger.warn("Unauthorized to start the conference", error); var toDomain = Strophe.getDomainFromJid(error.getAttribute('to')); if (toDomain !== this.xmppService.options.hosts.anonymousdomain) { //FIXME: "is external" should come either from // the focus or config.js self.externalAuthEnabled = true; } self.eventEmitter.emit( XMPPEvents.AUTHENTICATION_REQUIRED, function () { self.allocateConferenceFocus( callback); }); return; } var waitMs = self.getNextErrorTimeout(); logger.error("Focus error, retry after " + waitMs, error); // Show message var focusComponent = self.getFocusComponent(); var retrySec = waitMs / 1000; //FIXME: message is duplicated ? // Do not show in case of session invalid // which means just a retry if (!invalidSession) { self.eventEmitter.emit(XMPPEvents.FOCUS_DISCONNECTED, focusComponent, retrySec); } // Reset response timeout self.getNextTimeout(true); window.setTimeout( function () { self.allocateConferenceFocus(callback); }, waitMs); } ); }; Moderator.prototype.authenticate = function () { var self = this; return new Promise(function (resolve, reject) { self.connection.sendIQ( self.createConferenceIq(), function (result) { self.parseSessionId(result); resolve(); }, function (error) { var code = $(error).find('>error').attr('code'); reject(error, code); } ); }); }; Moderator.prototype.getLoginUrl = function (urlCallback, failureCallback) { var iq = $iq({to: this.getFocusComponent(), type: 'get'}); iq.c('login-url', { xmlns: 'http://jitsi.org/protocol/focus', room: this.roomName, 'machine-uid': this.settings.getSettings().uid }); this.connection.sendIQ( iq, function (result) { var url = $(result).find('login-url').attr('url'); url = url = decodeURIComponent(url); if (url) { logger.info("Got auth url: " + url); urlCallback(url); } else { logger.error( "Failed to get auth url from the focus", result); failureCallback(result); } }, function (error) { logger.error("Get auth url error", error); failureCallback(error); } ); }; Moderator.prototype.getPopupLoginUrl = function (urlCallback, failureCallback) { var iq = $iq({to: this.getFocusComponent(), type: 'get'}); iq.c('login-url', { xmlns: 'http://jitsi.org/protocol/focus', room: this.roomName, 'machine-uid': this.settings.getSettings().uid, popup: true }); this.connection.sendIQ( iq, function (result) { var url = $(result).find('login-url').attr('url'); url = url = decodeURIComponent(url); if (url) { logger.info("Got POPUP auth url: " + url); urlCallback(url); } else { logger.error( "Failed to get POPUP auth url from the focus", result); failureCallback(result); } }, function (error) { logger.error('Get POPUP auth url error', error); failureCallback(error); } ); }; Moderator.prototype.logout = function (callback) { var iq = $iq({to: this.getFocusComponent(), type: 'set'}); var sessionId = localStorage.getItem('sessionId'); if (!sessionId) { callback(); return; } iq.c('logout', { xmlns: 'http://jitsi.org/protocol/focus', 'session-id': sessionId }); this.connection.sendIQ( iq, function (result) { var logoutUrl = $(result).find('logout').attr('logout-url'); if (logoutUrl) { logoutUrl = decodeURIComponent(logoutUrl); } logger.info("Log out OK, url: " + logoutUrl, result); localStorage.removeItem('sessionId'); callback(logoutUrl); }, function (error) { logger.error("Logout error", error); } ); }; module.exports = Moderator; }).call(this,"/modules/xmpp/moderator.js") },{"../../service/authentication/AuthenticationEvents":133,"../../service/xmpp/XMPPEvents":137,"jitsi-meet-logger":79}],37:[function(require,module,exports){ (function (__filename){ /* global $, $iq, config, connection, focusMucJid, messageHandler, Toolbar, Util, Promise */ var XMPPEvents = require("../../service/xmpp/XMPPEvents"); var logger = require("jitsi-meet-logger").getLogger(__filename); function Recording(type, eventEmitter, connection, focusMucJid, jirecon, roomjid) { this.eventEmitter = eventEmitter; this.connection = connection; this.state = "off"; this.focusMucJid = focusMucJid; this.jirecon = jirecon; this.url = null; this.type = type; this._isSupported = ((type === Recording.types.JIBRI) || (type === Recording.types.JIRECON && !this.jirecon))? false : true; /** * The ID of the jirecon recording session. Jirecon generates it when we * initially start recording, and it needs to be used in subsequent requests * to jirecon. */ this.jireconRid = null; this.roomjid = roomjid; } Recording.types = { COLIBRI: "colibri", JIRECON: "jirecon", JIBRI: "jibri" }; Recording.prototype.handleJibriPresence = function (jibri) { var attributes = jibri.attributes; if(!attributes) return; this._isSupported = (attributes.status && attributes.status !== "undefined"); if(this._isSupported) { this.url = attributes.url || null; this.state = attributes.status || "off"; } this.eventEmitter.emit(XMPPEvents.RECORDING_STATE_CHANGED); }; Recording.prototype.setRecordingJibri = function (state, callback, errCallback, options) { if (state == this.state){ errCallback(new Error("Invalid state!")); } options = options || {}; // FIXME jibri does not accept IQ without 'url' attribute set ? var iq = $iq({to: this.focusMucJid, type: 'set'}) .c('jibri', { "xmlns": 'http://jitsi.org/protocol/jibri', "action": (state === 'on') ? 'start' : 'stop', "streamid": options.streamId, "follow-entity": options.followEntity }).up(); logger.log('Set jibri recording: '+state, iq.nodeTree); console.log(iq.nodeTree); this.connection.sendIQ( iq, function (result) { callback($(result).find('jibri').attr('state'), $(result).find('jibri').attr('url')); }, function (error) { logger.log('Failed to start recording, error: ', error); errCallback(error); }); }; Recording.prototype.setRecordingJirecon = function (state, callback, errCallback, options) { if (state == this.state){ errCallback(new Error("Invalid state!")); } var iq = $iq({to: this.jirecon, type: 'set'}) .c('recording', {xmlns: 'http://jitsi.org/protocol/jirecon', action: (state === 'on') ? 'start' : 'stop', mucjid: this.roomjid}); if (state === 'off'){ iq.attrs({rid: this.jireconRid}); } console.log('Start recording'); var self = this; this.connection.sendIQ( iq, function (result) { // TODO wait for an IQ with the real status, since this is // provisional? self.jireconRid = $(result).find('recording').attr('rid'); console.log('Recording ' + ((state === 'on') ? 'started' : 'stopped') + '(jirecon)' + result); self.state = state; if (state === 'off'){ self.jireconRid = null; } callback(state); }, function (error) { console.log('Failed to start recording, error: ', error); errCallback(error); }); }; // Sends a COLIBRI message which enables or disables (according to 'state') // the recording on the bridge. Waits for the result IQ and calls 'callback' // with the new recording state, according to the IQ. Recording.prototype.setRecordingColibri = function (state, callback, errCallback, options) { var elem = $iq({to: this.focusMucJid, type: 'set'}); elem.c('conference', { xmlns: 'http://jitsi.org/protocol/colibri' }); elem.c('recording', {state: state, token: options.token}); var self = this; this.connection.sendIQ(elem, function (result) { console.log('Set recording "', state, '". Result:', result); var recordingElem = $(result).find('>conference>recording'); var newState = recordingElem.attr('state'); self.state = newState; callback(newState); if (newState === 'pending') { self.connection.addHandler(function(iq){ var state = $(iq).find('recording').attr('state'); if (state) { self.state = newState; callback(state); } }, 'http://jitsi.org/protocol/colibri', 'iq', null, null, null); } }, function (error) { console.warn(error); errCallback(error); } ); }; Recording.prototype.setRecording = function (state, callback, errCallback, options) { switch(this.type){ case Recording.types.JIRECON: this.setRecordingJirecon(state, callback, errCallback, options); break; case Recording.types.COLIBRI: this.setRecordingColibri(state, callback, errCallback, options); break; case Recording.types.JIBRI: this.setRecordingJibri(state, callback, errCallback, options); break; default: console.error("Unknown recording type!"); return; } }; /** *Starts/stops the recording * @param token token for authentication * @param statusChangeHandler {function} receives the new status as argument. */ Recording.prototype.toggleRecording = function (options, statusChangeHandler) { if ((!options.token && this.type === Recording.types.COLIBRI) || (!options.streamId && this.type === Recording.types.JIBRI)){ statusChangeHandler("error", new Error("No token passed!")); logger.error("No token passed!"); return; } var oldState = this.state; var newState = (oldState === 'off' || !oldState) ? 'on' : 'off'; var self = this; this.setRecording(newState, function (state, url) { logger.log("New recording state: ", state); if (state && state !== oldState) { self.state = state; self.url = url; statusChangeHandler(state); } else { statusChangeHandler("error", new Error("Status not changed!")); } }, function (error) { statusChangeHandler("error", error); }, options); }; /** * Returns true if the recording is supproted and false if not. */ Recording.prototype.isSupported = function () { return this._isSupported; }; /** * Returns null if the recording is not supported, "on" if the recording started * and "off" if the recording is not started. */ Recording.prototype.getState = function () { return this.state; }; /** * Returns the url of the recorded video. */ Recording.prototype.getURL = function () { return this.url; }; module.exports = Recording; }).call(this,"/modules/xmpp/recording.js") },{"../../service/xmpp/XMPPEvents":137,"jitsi-meet-logger":79}],38:[function(require,module,exports){ (function (__filename){ /* jshint -W117 */ /* a simple MUC connection plugin * can only handle a single MUC room */ var logger = require("jitsi-meet-logger").getLogger(__filename); var ChatRoom = require("./ChatRoom"); module.exports = function(XMPP) { Strophe.addConnectionPlugin('emuc', { connection: null, rooms: {},//map with the rooms init: function (conn) { this.connection = conn; // add handlers (just once) this.connection.addHandler(this.onPresence.bind(this), null, 'presence', null, null, null, null); this.connection.addHandler(this.onPresenceUnavailable.bind(this), null, 'presence', 'unavailable', null); this.connection.addHandler(this.onPresenceError.bind(this), null, 'presence', 'error', null); this.connection.addHandler(this.onMessage.bind(this), null, 'message', null, null); this.connection.addHandler(this.onMute.bind(this), 'http://jitsi.org/jitmeet/audio', 'iq', 'set',null,null); }, createRoom: function (jid, password, options, settings) { var roomJid = Strophe.getBareJidFromJid(jid); if (this.rooms[roomJid]) { logger.error("You are already in the room!"); return; } this.rooms[roomJid] = new ChatRoom(this.connection, jid, password, XMPP, options, settings); return this.rooms[roomJid]; }, doLeave: function (jid) { this.rooms[jid].doLeave(); delete this.rooms[jid]; }, onPresence: function (pres) { var from = pres.getAttribute('from'); // What is this for? A workaround for something? if (pres.getAttribute('type')) { return true; } var room = this.rooms[Strophe.getBareJidFromJid(from)]; if(!room) return; // Parse status. if ($(pres).find('>x[xmlns="http://jabber.org/protocol/muc#user"]>status[code="201"]').length) { room.createNonAnonymousRoom(); } room.onPresence(pres); return true; }, onPresenceUnavailable: function (pres) { var from = pres.getAttribute('from'); var room = this.rooms[Strophe.getBareJidFromJid(from)]; if(!room) return; room.onPresenceUnavailable(pres, from); return true; }, onPresenceError: function (pres) { var from = pres.getAttribute('from'); var room = this.rooms[Strophe.getBareJidFromJid(from)]; if(!room) return; room.onPresenceError(pres, from); return true; }, onMessage: function (msg) { // FIXME: this is a hack. but jingle on muc makes nickchanges hard var from = msg.getAttribute('from'); var room = this.rooms[Strophe.getBareJidFromJid(from)]; if(!room) return; room.onMessage(msg, from); return true; }, setJingleSession: function (from, session) { var room = this.rooms[Strophe.getBareJidFromJid(from)]; if(!room) return; room.setJingleSession(session); }, onMute: function(iq) { var from = iq.getAttribute('from'); var room = this.rooms[Strophe.getBareJidFromJid(from)]; if(!room) return; room.onMute(iq); return true; } }); }; }).call(this,"/modules/xmpp/strophe.emuc.js") },{"./ChatRoom":29,"jitsi-meet-logger":79}],39:[function(require,module,exports){ (function (__filename){ /* jshint -W117 */ var logger = require("jitsi-meet-logger").getLogger(__filename); var JingleSession = require("./JingleSessionPC"); var XMPPEvents = require("../../service/xmpp/XMPPEvents"); var RTCBrowserType = require("../RTC/RTCBrowserType"); module.exports = function(XMPP, eventEmitter) { Strophe.addConnectionPlugin('jingle', { connection: null, sessions: {}, jid2session: {}, ice_config: {iceServers: []}, media_constraints: { mandatory: { 'OfferToReceiveAudio': true, 'OfferToReceiveVideo': true } // MozDontOfferDataChannel: true when this is firefox }, init: function (conn) { this.connection = conn; if (this.connection.disco) { // http://xmpp.org/extensions/xep-0167.html#support // http://xmpp.org/extensions/xep-0176.html#support this.connection.disco.addFeature('urn:xmpp:jingle:1'); this.connection.disco.addFeature('urn:xmpp:jingle:apps:rtp:1'); this.connection.disco.addFeature('urn:xmpp:jingle:transports:ice-udp:1'); this.connection.disco.addFeature('urn:xmpp:jingle:apps:dtls:0'); this.connection.disco.addFeature('urn:xmpp:jingle:transports:dtls-sctp:1'); this.connection.disco.addFeature('urn:xmpp:jingle:apps:rtp:audio'); this.connection.disco.addFeature('urn:xmpp:jingle:apps:rtp:video'); if (RTCBrowserType.isChrome() || RTCBrowserType.isOpera() || RTCBrowserType.isTemasysPluginUsed()) { this.connection.disco.addFeature('urn:ietf:rfc:4588'); } // this is dealt with by SDP O/A so we don't need to announce this //this.connection.disco.addFeature('urn:xmpp:jingle:apps:rtp:rtcp-fb:0'); // XEP-0293 //this.connection.disco.addFeature('urn:xmpp:jingle:apps:rtp:rtp-hdrext:0'); // XEP-0294 this.connection.disco.addFeature('urn:ietf:rfc:5761'); // rtcp-mux this.connection.disco.addFeature('urn:ietf:rfc:5888'); // a=group, e.g. bundle //this.connection.disco.addFeature('urn:ietf:rfc:5576'); // a=ssrc } this.connection.addHandler(this.onJingle.bind(this), 'urn:xmpp:jingle:1', 'iq', 'set', null, null); }, onJingle: function (iq) { var sid = $(iq).find('jingle').attr('sid'); var action = $(iq).find('jingle').attr('action'); var fromJid = iq.getAttribute('from'); // send ack first var ack = $iq({type: 'result', to: fromJid, id: iq.getAttribute('id') }); logger.log('on jingle ' + action + ' from ' + fromJid, iq); var sess = this.sessions[sid]; if ('session-initiate' != action) { if (sess === null) { ack.type = 'error'; ack.c('error', {type: 'cancel'}) .c('item-not-found', {xmlns: 'urn:ietf:params:xml:ns:xmpp-stanzas'}).up() .c('unknown-session', {xmlns: 'urn:xmpp:jingle:errors:1'}); this.connection.send(ack); return true; } // local jid is not checked if (fromJid != sess.peerjid) { logger.warn('jid mismatch for session id', sid, fromJid, sess.peerjid); ack.type = 'error'; ack.c('error', {type: 'cancel'}) .c('item-not-found', {xmlns: 'urn:ietf:params:xml:ns:xmpp-stanzas'}).up() .c('unknown-session', {xmlns: 'urn:xmpp:jingle:errors:1'}); this.connection.send(ack); return true; } } else if (sess !== undefined) { // existing session with same session id // this might be out-of-order if the sess.peerjid is the same as from ack.type = 'error'; ack.c('error', {type: 'cancel'}) .c('service-unavailable', {xmlns: 'urn:ietf:params:xml:ns:xmpp-stanzas'}).up(); logger.warn('duplicate session id', sid); this.connection.send(ack); return true; } // FIXME: check for a defined action this.connection.send(ack); // see http://xmpp.org/extensions/xep-0166.html#concepts-session switch (action) { case 'session-initiate': console.log("(TIME) received session-initiate:\t", window.performance.now()); var startMuted = $(iq).find('jingle>startmuted'); if (startMuted && startMuted.length > 0) { var audioMuted = startMuted.attr("audio"); var videoMuted = startMuted.attr("video"); eventEmitter.emit(XMPPEvents.START_MUTED_FROM_FOCUS, audioMuted === "true", videoMuted === "true"); } sess = new JingleSession( $(iq).attr('to'), $(iq).find('jingle').attr('sid'), this.connection, XMPP); // configure session var fromBareJid = Strophe.getBareJidFromJid(fromJid); this.connection.emuc.setJingleSession(fromBareJid, sess); sess.media_constraints = this.media_constraints; sess.ice_config = this.ice_config; sess.initialize(fromJid, false); eventEmitter.emit(XMPPEvents.CALL_INCOMING, sess); // FIXME: setRemoteDescription should only be done when this call is to be accepted sess.setOffer($(iq).find('>jingle')); this.sessions[sess.sid] = sess; this.jid2session[sess.peerjid] = sess; // the callback should either // .sendAnswer and .accept // or .sendTerminate -- not necessarily synchronous sess.sendAnswer(); sess.accept(); break; case 'session-accept': sess.setAnswer($(iq).find('>jingle')); sess.accept(); break; case 'session-terminate': // If this is not the focus sending the terminate, we have // nothing more to do here. if (Object.keys(this.sessions).length < 1 || !(this.sessions[Object.keys(this.sessions)[0]] instanceof JingleSession)) { break; } logger.log('terminating...', sess.sid); sess.terminate(); this.terminate(sess.sid); if ($(iq).find('>jingle>reason').length) { $(document).trigger('callterminated.jingle', [ sess.sid, sess.peerjid, $(iq).find('>jingle>reason>:first')[0].tagName, $(iq).find('>jingle>reason>text').text() ]); } else { $(document).trigger('callterminated.jingle', [sess.sid, sess.peerjid]); } break; case 'transport-info': sess.addIceCandidate($(iq).find('>jingle>content')); break; case 'session-info': var affected; if ($(iq).find('>jingle>ringing[xmlns="urn:xmpp:jingle:apps:rtp:info:1"]').length) { $(document).trigger('ringing.jingle', [sess.sid]); } else if ($(iq).find('>jingle>mute[xmlns="urn:xmpp:jingle:apps:rtp:info:1"]').length) { affected = $(iq).find('>jingle>mute[xmlns="urn:xmpp:jingle:apps:rtp:info:1"]').attr('name'); $(document).trigger('mute.jingle', [sess.sid, affected]); } else if ($(iq).find('>jingle>unmute[xmlns="urn:xmpp:jingle:apps:rtp:info:1"]').length) { affected = $(iq).find('>jingle>unmute[xmlns="urn:xmpp:jingle:apps:rtp:info:1"]').attr('name'); $(document).trigger('unmute.jingle', [sess.sid, affected]); } break; case 'addsource': // FIXME: proprietary, un-jingleish case 'source-add': // FIXME: proprietary sess.addSource($(iq).find('>jingle>content')); break; case 'removesource': // FIXME: proprietary, un-jingleish case 'source-remove': // FIXME: proprietary sess.removeSource($(iq).find('>jingle>content')); break; default: logger.warn('jingle action not implemented', action); break; } return true; }, terminate: function (sid, reason, text) { // terminate by sessionid (or all sessions) if (sid === null || sid === undefined) { for (sid in this.sessions) { if (this.sessions[sid].state != 'ended') { this.sessions[sid].sendTerminate(reason || (!this.sessions[sid].active()) ? 'cancel' : null, text); this.sessions[sid].terminate(); } delete this.jid2session[this.sessions[sid].peerjid]; delete this.sessions[sid]; } } else if (this.sessions.hasOwnProperty(sid)) { if (this.sessions[sid].state != 'ended') { this.sessions[sid].sendTerminate(reason || (!this.sessions[sid].active()) ? 'cancel' : null, text); this.sessions[sid].terminate(); } delete this.jid2session[this.sessions[sid].peerjid]; delete this.sessions[sid]; } }, getStunAndTurnCredentials: function () { // get stun and turn configuration from server via xep-0215 // uses time-limited credentials as described in // http://tools.ietf.org/html/draft-uberti-behave-turn-rest-00 // // see https://code.google.com/p/prosody-modules/source/browse/mod_turncredentials/mod_turncredentials.lua // for a prosody module which implements this // // currently, this doesn't work with updateIce and therefore credentials with a long // validity have to be fetched before creating the peerconnection // TODO: implement refresh via updateIce as described in // https://code.google.com/p/webrtc/issues/detail?id=1650 var self = this; this.connection.sendIQ( $iq({type: 'get', to: this.connection.domain}) .c('services', {xmlns: 'urn:xmpp:extdisco:1'}).c('service', {host: 'turn.' + this.connection.domain}), function (res) { var iceservers = []; $(res).find('>services>service').each(function (idx, el) { el = $(el); var dict = {}; var type = el.attr('type'); switch (type) { case 'stun': dict.url = 'stun:' + el.attr('host'); if (el.attr('port')) { dict.url += ':' + el.attr('port'); } iceservers.push(dict); break; case 'turn': case 'turns': dict.url = type + ':'; if (el.attr('username')) { // https://code.google.com/p/webrtc/issues/detail?id=1508 if (navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./) && parseInt(navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./)[2], 10) < 28) { dict.url += el.attr('username') + '@'; } else { dict.username = el.attr('username'); // only works in M28 } } dict.url += el.attr('host'); if (el.attr('port') && el.attr('port') != '3478') { dict.url += ':' + el.attr('port'); } if (el.attr('transport') && el.attr('transport') != 'udp') { dict.url += '?transport=' + el.attr('transport'); } if (el.attr('password')) { dict.credential = el.attr('password'); } iceservers.push(dict); break; } }); self.ice_config.iceServers = iceservers; }, function (err) { logger.warn('getting turn credentials failed', err); logger.warn('is mod_turncredentials or similar installed?'); } ); // implement push? }, /** * Returns the data saved in 'updateLog' in a format to be logged. */ getLog: function () { var data = {}; var self = this; Object.keys(this.sessions).forEach(function (sid) { var session = self.sessions[sid]; if (session.peerconnection && session.peerconnection.updateLog) { // FIXME: should probably be a .dump call data["jingle_" + session.sid] = { updateLog: session.peerconnection.updateLog, stats: session.peerconnection.stats, url: window.location.href }; } }); return data; } }); }; }).call(this,"/modules/xmpp/strophe.jingle.js") },{"../../service/xmpp/XMPPEvents":137,"../RTC/RTCBrowserType":17,"./JingleSessionPC":31,"jitsi-meet-logger":79}],40:[function(require,module,exports){ /* global Strophe */ module.exports = function () { Strophe.addConnectionPlugin('logger', { // logs raw stanzas and makes them available for download as JSON connection: null, log: [], init: function (conn) { this.connection = conn; this.connection.rawInput = this.log_incoming.bind(this); this.connection.rawOutput = this.log_outgoing.bind(this); }, log_incoming: function (stanza) { this.log.push([new Date().getTime(), 'incoming', stanza]); }, log_outgoing: function (stanza) { this.log.push([new Date().getTime(), 'outgoing', stanza]); } }); }; },{}],41:[function(require,module,exports){ (function (__filename){ /* global $, $iq, Strophe */ var logger = require("jitsi-meet-logger").getLogger(__filename); var XMPPEvents = require("../../service/xmpp/XMPPEvents"); /** * Ping every 10 sec */ var PING_INTERVAL = 10000; /** * Ping timeout error after 15 sec of waiting. */ var PING_TIMEOUT = 15000; /** * Will close the connection after 3 consecutive ping errors. */ var PING_THRESHOLD = 3; /** * XEP-0199 ping plugin. * * Registers "urn:xmpp:ping" namespace under Strophe.NS.PING. */ module.exports = function (XMPP, eventEmitter) { Strophe.addConnectionPlugin('ping', { connection: null, failedPings: 0, /** * Initializes the plugin. Method called by Strophe. * @param connection Strophe connection instance. */ init: function (connection) { this.connection = connection; Strophe.addNamespace('PING', "urn:xmpp:ping"); }, /** * Sends "ping" to given jid * @param jid the JID to which ping request will be sent. * @param success callback called on success. * @param error callback called on error. * @param timeout ms how long are we going to wait for the response. On * timeout error callback is called with undefined error * argument. */ ping: function (jid, success, error, timeout) { var iq = $iq({type: 'get', to: jid}); iq.c('ping', {xmlns: Strophe.NS.PING}); this.connection.sendIQ(iq, success, error, timeout); }, /** * Checks if given jid has XEP-0199 ping support. * @param jid the JID to be checked for ping support. * @param callback function with boolean argument which will be * true if XEP-0199 ping is supported by given jid */ hasPingSupport: function (jid, callback) { this.connection.disco.info( jid, null, function (result) { var ping = $(result).find('>>feature[var="urn:xmpp:ping"]'); callback(ping.length > 0); }, function (error) { logger.error("Ping feature discovery error", error); callback(false); } ); }, /** * Starts to send ping in given interval to specified remote JID. * This plugin supports only one such task and stopInterval * must be called before starting a new one. * @param remoteJid remote JID to which ping requests will be sent to. * @param interval task interval in ms. */ startInterval: function (remoteJid, interval) { if (this.intervalId) { logger.error("Ping task scheduled already"); return; } if (!interval) interval = PING_INTERVAL; var self = this; this.intervalId = window.setInterval(function () { self.ping(remoteJid, function (result) { // Ping OK self.failedPings = 0; }, function (error) { self.failedPings += 1; logger.error( "Ping " + (error ? "error" : "timeout"), error); if (self.failedPings >= PING_THRESHOLD) { self.connection.disconnect(); } }, PING_TIMEOUT); }, interval); logger.info("XMPP pings will be sent every " + interval + " ms"); }, /** * Stops current "ping" interval task. */ stopInterval: function () { if (this.intervalId) { window.clearInterval(this.intervalId); this.intervalId = null; this.failedPings = 0; logger.info("Ping interval cleared"); } } }); }; }).call(this,"/modules/xmpp/strophe.ping.js") },{"../../service/xmpp/XMPPEvents":137,"jitsi-meet-logger":79}],42:[function(require,module,exports){ (function (__filename){ /* jshint -W117 */ var logger = require("jitsi-meet-logger").getLogger(__filename); module.exports = function() { Strophe.addConnectionPlugin('rayo', { RAYO_XMLNS: 'urn:xmpp:rayo:1', connection: null, init: function (conn) { this.connection = conn; if (this.connection.disco) { this.connection.disco.addFeature('urn:xmpp:rayo:client:1'); } this.connection.addHandler( this.onRayo.bind(this), this.RAYO_XMLNS, 'iq', 'set', null, null); }, onRayo: function (iq) { logger.info("Rayo IQ", iq); }, dial: function (to, from, roomName, roomPass, focusMucJid) { var self = this; return new Promise(function (resolve, reject) { if(!focusMucJid) { reject(new Error("Internal error!")); return; } var req = $iq( { type: 'set', to: focusMucJid } ); req.c('dial', { xmlns: self.RAYO_XMLNS, to: to, from: from }); req.c('header', { name: 'JvbRoomName', value: roomName }).up(); if (roomPass !== null && roomPass.length) { req.c('header', { name: 'JvbRoomPassword', value: roomPass }).up(); } self.connection.sendIQ( req, function (result) { logger.info('Dial result ', result); var resource = $(result).find('ref').attr('uri'); self.call_resource = resource.substr('xmpp:'.length); logger.info( "Received call resource: " + self.call_resource); resolve(); }, function (error) { logger.info('Dial error ', error); reject(error); } ); }); }, hangup: function () { var self = this; return new Promise(function (resolve, reject) { if (!self.call_resource) { reject(new Error("No call in progress")); logger.warn("No call in progress"); return; } var req = $iq( { type: 'set', to: self.call_resource } ); req.c('hangup', { xmlns: self.RAYO_XMLNS }); self.connection.sendIQ( req, function (result) { logger.info('Hangup result ', result); self.call_resource = null; resolve(); }, function (error) { logger.info('Hangup error ', error); self.call_resource = null; reject(new Error('Hangup error ')); } ); }); } } ); }; }).call(this,"/modules/xmpp/strophe.rayo.js") },{"jitsi-meet-logger":79}],43:[function(require,module,exports){ (function (__filename){ /* global Strophe */ /** * Strophe logger implementation. Logs from level WARN and above. */ var logger = require("jitsi-meet-logger").getLogger(__filename); module.exports = function () { Strophe.log = function (level, msg) { switch (level) { case Strophe.LogLevel.WARN: logger.warn("Strophe: " + msg); break; case Strophe.LogLevel.ERROR: case Strophe.LogLevel.FATAL: logger.error("Strophe: " + msg); break; } }; Strophe.getStatusString = function (status) { switch (status) { case Strophe.Status.ERROR: return "ERROR"; case Strophe.Status.CONNECTING: return "CONNECTING"; case Strophe.Status.CONNFAIL: return "CONNFAIL"; case Strophe.Status.AUTHENTICATING: return "AUTHENTICATING"; case Strophe.Status.AUTHFAIL: return "AUTHFAIL"; case Strophe.Status.CONNECTED: return "CONNECTED"; case Strophe.Status.DISCONNECTED: return "DISCONNECTED"; case Strophe.Status.DISCONNECTING: return "DISCONNECTING"; case Strophe.Status.ATTACHED: return "ATTACHED"; default: return "unknown"; } }; }; }).call(this,"/modules/xmpp/strophe.util.js") },{"jitsi-meet-logger":79}],44:[function(require,module,exports){ (function (__filename){ /* global $, APP, config, Strophe*/ var logger = require("jitsi-meet-logger").getLogger(__filename); var EventEmitter = require("events"); var Pako = require("pako"); var RTCEvents = require("../../service/RTC/RTCEvents"); var XMPPEvents = require("../../service/xmpp/XMPPEvents"); var JitsiConnectionErrors = require("../../JitsiConnectionErrors"); var JitsiConnectionEvents = require("../../JitsiConnectionEvents"); var RTC = require("../RTC/RTC"); var authenticatedUser = false; function createConnection(bosh) { bosh = bosh || '/http-bind'; // Append token as URL param if (this.token) { bosh += bosh.indexOf('?') == -1 ? '?token=' + this.token : '&token=' + this.token; } return new Strophe.Connection(bosh); }; //!!!!!!!!!! FIXME: ... function initStrophePlugins(XMPP) { require("./strophe.emuc")(XMPP); require("./strophe.jingle")(XMPP, XMPP.eventEmitter); // require("./strophe.moderate")(XMPP, eventEmitter); require("./strophe.util")(); require("./strophe.ping")(XMPP, XMPP.eventEmitter); require("./strophe.rayo")(); require("./strophe.logger")(); } //!!!!!!!!!! FIXME: ... ///** // * If given localStream is video one this method will advertise it's // * video type in MUC presence. // * @param localStream new or modified LocalStream. // */ //function broadcastLocalVideoType(localStream) { // if (localStream.videoType) // XMPP.addToPresence('videoType', localStream.videoType); //} // //function registerListeners() { // RTC.addStreamListener( // broadcastLocalVideoType, // StreamEventTypes.EVENT_TYPE_LOCAL_CHANGED // ); //} function XMPP(options) { this.eventEmitter = new EventEmitter(); this.connection = null; this.disconnectInProgress = false; this.forceMuted = false; this.options = options; initStrophePlugins(this); // registerListeners(); this.connection = createConnection(options.bosh); } XMPP.prototype.getConnection = function(){ return connection; }; XMPP.prototype._connect = function (jid, password) { var self = this; // connection.connect() starts the connection process. // // As the connection process proceeds, the user supplied callback will // be triggered multiple times with status updates. The callback should // take two arguments - the status code and the error condition. // // The status code will be one of the values in the Strophe.Status // constants. The error condition will be one of the conditions defined // in RFC 3920 or the condition ‘strophe-parsererror’. // // The Parameters wait, hold and route are optional and only relevant // for BOSH connections. Please see XEP 124 for a more detailed // explanation of the optional parameters. // // Connection status constants for use by the connection handler // callback. // // Status.ERROR - An error has occurred (websockets specific) // Status.CONNECTING - The connection is currently being made // Status.CONNFAIL - The connection attempt failed // Status.AUTHENTICATING - The connection is authenticating // Status.AUTHFAIL - The authentication attempt failed // Status.CONNECTED - The connection has succeeded // Status.DISCONNECTED - The connection has been terminated // Status.DISCONNECTING - The connection is currently being terminated // Status.ATTACHED - The connection has been attached var anonymousConnectionFailed = false; var connectionFailed = false; var lastErrorMsg; this.connection.connect(jid, password, function (status, msg) { logger.log("(TIME) Strophe " + Strophe.getStatusString(status) + (msg ? "[" + msg + "]" : "") + "\t:" + window.performance.now()); if (status === Strophe.Status.CONNECTED) { if (self.options.useStunTurn) { self.connection.jingle.getStunAndTurnCredentials(); } logger.info("My Jabber ID: " + self.connection.jid); // Schedule ping ? var pingJid = self.connection.domain; self.connection.ping.hasPingSupport( pingJid, function (hasPing) { if (hasPing) self.connection.ping.startInterval(pingJid); else logger.warn("Ping NOT supported by " + pingJid); } ); if (password) authenticatedUser = true; if (self.connection && self.connection.connected && Strophe.getResourceFromJid(self.connection.jid)) { // .connected is true while connecting? // self.connection.send($pres()); self.eventEmitter.emit(JitsiConnectionEvents.CONNECTION_ESTABLISHED, Strophe.getResourceFromJid(self.connection.jid)); } } else if (status === Strophe.Status.CONNFAIL) { if (msg === 'x-strophe-bad-non-anon-jid') { anonymousConnectionFailed = true; } else { connectionFailed = true; } lastErrorMsg = msg; } else if (status === Strophe.Status.DISCONNECTED) { // Stop ping interval self.connection.ping.stopInterval(); self.disconnectInProgress = false; if (anonymousConnectionFailed) { // prompt user for username and password self.eventEmitter.emit(JitsiConnectionEvents.CONNECTION_FAILED, JitsiConnectionErrors.PASSWORD_REQUIRED); } else if(connectionFailed) { self.eventEmitter.emit(JitsiConnectionEvents.CONNECTION_FAILED, JitsiConnectionErrors.OTHER_ERROR, msg ? msg : lastErrorMsg); } else { self.eventEmitter.emit(JitsiConnectionEvents.CONNECTION_DISCONNECTED, msg ? msg : lastErrorMsg); } } else if (status === Strophe.Status.AUTHFAIL) { // wrong password or username, prompt user self.eventEmitter.emit(JitsiConnectionEvents.CONNECTION_FAILED, JitsiConnectionErrors.PASSWORD_REQUIRED); } }); } XMPP.prototype.connect = function (jid, password) { if(!jid) { var configDomain = this.options.hosts.anonymousdomain || this.options.hosts.domain; // Force authenticated domain if room is appended with '?login=true' if (this.options.hosts.anonymousdomain && window.location.search.indexOf("login=true") !== -1) { configDomain = this.options.hosts.domain; } jid = configDomain || window.location.hostname; } return this._connect(jid, password); }; XMPP.prototype.createRoom = function (roomName, options, settings) { var roomjid = roomName + '@' + this.options.hosts.muc; if (options.useNicks) { if (options.nick) { roomjid += '/' + options.nick; } else { roomjid += '/' + Strophe.getNodeFromJid(this.connection.jid); } } else { var tmpJid = Strophe.getNodeFromJid(this.connection.jid); if(!authenticatedUser) tmpJid = tmpJid.substr(0, 8); roomjid += '/' + tmpJid; } return this.connection.emuc.createRoom(roomjid, null, options, settings); } XMPP.prototype.addListener = function(type, listener) { this.eventEmitter.on(type, listener); }; XMPP.prototype.removeListener = function (type, listener) { this.eventEmitter.removeListener(type, listener); }; //FIXME: this should work with the room XMPP.prototype.leaveRoom = function (jid) { var handler = this.connection.jingle.jid2session[jid]; if (handler && handler.peerconnection) { // FIXME: probably removing streams is not required and close() should // be enough if (RTC.localAudio) { handler.peerconnection.removeStream( RTC.localAudio.getOriginalStream(), true); } if (RTC.localVideo) { handler.peerconnection.removeStream( RTC.localVideo.getOriginalStream(), true); } handler.peerconnection.close(); } this.eventEmitter.emit(XMPPEvents.DISPOSE_CONFERENCE); this.connection.emuc.doLeave(jid); }; /** * Sends 'data' as a log message to the focus. Returns true iff a message * was sent. * @param data * @returns {boolean} true iff a message was sent. */ XMPP.prototype.sendLogs = function (data) { if(!this.connection.emuc.focusMucJid) return false; var deflate = true; var content = JSON.stringify(data); if (deflate) { content = String.fromCharCode.apply(null, Pako.deflateRaw(content)); } content = Base64.encode(content); // XEP-0337-ish var message = $msg({to: this.connection.emuc.focusMucJid, type: 'normal'}); message.c('log', { xmlns: 'urn:xmpp:eventlog', id: 'PeerConnectionStats'}); message.c('message').t(content).up(); if (deflate) { message.c('tag', {name: "deflated", value: "true"}).up(); } message.up(); this.connection.send(message); return true; }; // Gets the logs from strophe.jingle. XMPP.prototype.getJingleLog = function () { return this.connection.jingle ? this.connection.jingle.getLog() : {}; }; // Gets the logs from strophe. XMPP.prototype.getXmppLog = function () { return this.connection.logger ? this.connection.logger.log : null; }; XMPP.prototype.dial = function (to, from, roomName,roomPass) { this.connection.rayo.dial(to, from, roomName,roomPass); }; XMPP.prototype.setMute = function (jid, mute) { this.connection.moderate.setMute(jid, mute); }; XMPP.prototype.eject = function (jid) { this.connection.moderate.eject(jid); }; XMPP.prototype.getSessions = function () { return this.connection.jingle.sessions; }; /** * Disconnects this from the XMPP server (if this is connected). * * @param ev optionally, the event which triggered the necessity to disconnect * from the XMPP server (e.g. beforeunload, unload) */ XMPP.prototype.disconnect = function (ev) { if (this.disconnectInProgress || !this.connection || !this.connection.connected) { this.eventEmitter.emit(JitsiConnectionEvents.WRONG_STATE); return; } this.disconnectInProgress = true; // XXX Strophe is asynchronously sending by default. Unfortunately, that // means that there may not be enough time to send an unavailable presence // or disconnect at all. Switching Strophe to synchronous sending is not // much of an option because it may lead to a noticeable delay in navigating // away from the current location. As a compromise, we will try to increase // the chances of sending an unavailable presence and/or disconecting within // the short time span that we have upon unloading by invoking flush() on // the connection. We flush() once before disconnect() in order to attemtp // to have its unavailable presence at the top of the send queue. We flush() // once more after disconnect() in order to attempt to have its unavailable // presence sent as soon as possible. this.connection.flush(); if (ev !== null && typeof ev !== 'undefined') { var evType = ev.type; if (evType == 'beforeunload' || evType == 'unload') { // XXX Whatever we said above, synchronous sending is the best // (known) way to properly disconnect from the XMPP server. // Consequently, it may be fine to have the source code and comment // it in or out depending on whether we want to run with it for some // time. this.connection.options.sync = true; } } this.connection.disconnect(); if (this.connection.options.sync !== true) { this.connection.flush(); } }; module.exports = XMPP; }).call(this,"/modules/xmpp/xmpp.js") },{"../../JitsiConnectionErrors":5,"../../JitsiConnectionEvents":6,"../../service/RTC/RTCEvents":131,"../../service/xmpp/XMPPEvents":137,"../RTC/RTC":16,"./strophe.emuc":38,"./strophe.jingle":39,"./strophe.logger":40,"./strophe.ping":41,"./strophe.rayo":42,"./strophe.util":43,"events":51,"jitsi-meet-logger":79,"pako":84}],45:[function(require,module,exports){ module.exports = after function after(count, callback, err_cb) { var bail = false err_cb = err_cb || noop proxy.count = count return (count === 0) ? callback() : proxy function proxy(err, result) { if (proxy.count <= 0) { throw new Error('after called too many times') } --proxy.count // after first error, rest are passed to err_cb if (err) { bail = true callback(err) // future error callbacks will go to error handler callback = err_cb } else if (proxy.count === 0 && !bail) { callback(null, result) } } } function noop() {} },{}],46:[function(require,module,exports){ /** * An abstraction for slicing an arraybuffer even when * ArrayBuffer.prototype.slice is not supported * * @api public */ module.exports = function(arraybuffer, start, end) { var bytes = arraybuffer.byteLength; start = start || 0; end = end || bytes; if (arraybuffer.slice) { return arraybuffer.slice(start, end); } if (start < 0) { start += bytes; } if (end < 0) { end += bytes; } if (end > bytes) { end = bytes; } if (start >= bytes || start >= end || bytes === 0) { return new ArrayBuffer(0); } var abv = new Uint8Array(arraybuffer); var result = new Uint8Array(end - start); for (var i = start, ii = 0; i < end; i++, ii++) { result[ii] = abv[i]; } return result.buffer; }; },{}],47:[function(require,module,exports){ (function (process){ /*! * async * https://github.com/caolan/async * * Copyright 2010-2014 Caolan McMahon * Released under the MIT license */ /*jshint onevar: false, indent:4 */ /*global setImmediate: false, setTimeout: false, console: false */ (function () { var async = {}; // global on the server, window in the browser var root, previous_async; root = this; if (root != null) { previous_async = root.async; } async.noConflict = function () { root.async = previous_async; return async; }; function only_once(fn) { var called = false; return function() { if (called) throw new Error("Callback was already called."); called = true; fn.apply(root, arguments); } } //// cross-browser compatiblity functions //// var _toString = Object.prototype.toString; var _isArray = Array.isArray || function (obj) { return _toString.call(obj) === '[object Array]'; }; var _each = function (arr, iterator) { if (arr.forEach) { return arr.forEach(iterator); } for (var i = 0; i < arr.length; i += 1) { iterator(arr[i], i, arr); } }; var _map = function (arr, iterator) { if (arr.map) { return arr.map(iterator); } var results = []; _each(arr, function (x, i, a) { results.push(iterator(x, i, a)); }); return results; }; var _reduce = function (arr, iterator, memo) { if (arr.reduce) { return arr.reduce(iterator, memo); } _each(arr, function (x, i, a) { memo = iterator(memo, x, i, a); }); return memo; }; var _keys = function (obj) { if (Object.keys) { return Object.keys(obj); } var keys = []; for (var k in obj) { if (obj.hasOwnProperty(k)) { keys.push(k); } } return keys; }; //// exported async module functions //// //// nextTick implementation with browser-compatible fallback //// if (typeof process === 'undefined' || !(process.nextTick)) { if (typeof setImmediate === 'function') { async.nextTick = function (fn) { // not a direct alias for IE10 compatibility setImmediate(fn); }; async.setImmediate = async.nextTick; } else { async.nextTick = function (fn) { setTimeout(fn, 0); }; async.setImmediate = async.nextTick; } } else { async.nextTick = process.nextTick; if (typeof setImmediate !== 'undefined') { async.setImmediate = function (fn) { // not a direct alias for IE10 compatibility setImmediate(fn); }; } else { async.setImmediate = async.nextTick; } } async.each = function (arr, iterator, callback) { callback = callback || function () {}; if (!arr.length) { return callback(); } var completed = 0; _each(arr, function (x) { iterator(x, only_once(done) ); }); function done(err) { if (err) { callback(err); callback = function () {}; } else { completed += 1; if (completed >= arr.length) { callback(); } } } }; async.forEach = async.each; async.eachSeries = function (arr, iterator, callback) { callback = callback || function () {}; if (!arr.length) { return callback(); } var completed = 0; var iterate = function () { iterator(arr[completed], function (err) { if (err) { callback(err); callback = function () {}; } else { completed += 1; if (completed >= arr.length) { callback(); } else { iterate(); } } }); }; iterate(); }; async.forEachSeries = async.eachSeries; async.eachLimit = function (arr, limit, iterator, callback) { var fn = _eachLimit(limit); fn.apply(null, [arr, iterator, callback]); }; async.forEachLimit = async.eachLimit; var _eachLimit = function (limit) { return function (arr, iterator, callback) { callback = callback || function () {}; if (!arr.length || limit <= 0) { return callback(); } var completed = 0; var started = 0; var running = 0; (function replenish () { if (completed >= arr.length) { return callback(); } while (running < limit && started < arr.length) { started += 1; running += 1; iterator(arr[started - 1], function (err) { if (err) { callback(err); callback = function () {}; } else { completed += 1; running -= 1; if (completed >= arr.length) { callback(); } else { replenish(); } } }); } })(); }; }; var doParallel = function (fn) { return function () { var args = Array.prototype.slice.call(arguments); return fn.apply(null, [async.each].concat(args)); }; }; var doParallelLimit = function(limit, fn) { return function () { var args = Array.prototype.slice.call(arguments); return fn.apply(null, [_eachLimit(limit)].concat(args)); }; }; var doSeries = function (fn) { return function () { var args = Array.prototype.slice.call(arguments); return fn.apply(null, [async.eachSeries].concat(args)); }; }; var _asyncMap = function (eachfn, arr, iterator, callback) { arr = _map(arr, function (x, i) { return {index: i, value: x}; }); if (!callback) { eachfn(arr, function (x, callback) { iterator(x.value, function (err) { callback(err); }); }); } else { var results = []; eachfn(arr, function (x, callback) { iterator(x.value, function (err, v) { results[x.index] = v; callback(err); }); }, function (err) { callback(err, results); }); } }; async.map = doParallel(_asyncMap); async.mapSeries = doSeries(_asyncMap); async.mapLimit = function (arr, limit, iterator, callback) { return _mapLimit(limit)(arr, iterator, callback); }; var _mapLimit = function(limit) { return doParallelLimit(limit, _asyncMap); }; // reduce only has a series version, as doing reduce in parallel won't // work in many situations. async.reduce = function (arr, memo, iterator, callback) { async.eachSeries(arr, function (x, callback) { iterator(memo, x, function (err, v) { memo = v; callback(err); }); }, function (err) { callback(err, memo); }); }; // inject alias async.inject = async.reduce; // foldl alias async.foldl = async.reduce; async.reduceRight = function (arr, memo, iterator, callback) { var reversed = _map(arr, function (x) { return x; }).reverse(); async.reduce(reversed, memo, iterator, callback); }; // foldr alias async.foldr = async.reduceRight; var _filter = function (eachfn, arr, iterator, callback) { var results = []; arr = _map(arr, function (x, i) { return {index: i, value: x}; }); eachfn(arr, function (x, callback) { iterator(x.value, function (v) { if (v) { results.push(x); } callback(); }); }, function (err) { callback(_map(results.sort(function (a, b) { return a.index - b.index; }), function (x) { return x.value; })); }); }; async.filter = doParallel(_filter); async.filterSeries = doSeries(_filter); // select alias async.select = async.filter; async.selectSeries = async.filterSeries; var _reject = function (eachfn, arr, iterator, callback) { var results = []; arr = _map(arr, function (x, i) { return {index: i, value: x}; }); eachfn(arr, function (x, callback) { iterator(x.value, function (v) { if (!v) { results.push(x); } callback(); }); }, function (err) { callback(_map(results.sort(function (a, b) { return a.index - b.index; }), function (x) { return x.value; })); }); }; async.reject = doParallel(_reject); async.rejectSeries = doSeries(_reject); var _detect = function (eachfn, arr, iterator, main_callback) { eachfn(arr, function (x, callback) { iterator(x, function (result) { if (result) { main_callback(x); main_callback = function () {}; } else { callback(); } }); }, function (err) { main_callback(); }); }; async.detect = doParallel(_detect); async.detectSeries = doSeries(_detect); async.some = function (arr, iterator, main_callback) { async.each(arr, function (x, callback) { iterator(x, function (v) { if (v) { main_callback(true); main_callback = function () {}; } callback(); }); }, function (err) { main_callback(false); }); }; // any alias async.any = async.some; async.every = function (arr, iterator, main_callback) { async.each(arr, function (x, callback) { iterator(x, function (v) { if (!v) { main_callback(false); main_callback = function () {}; } callback(); }); }, function (err) { main_callback(true); }); }; // all alias async.all = async.every; async.sortBy = function (arr, iterator, callback) { async.map(arr, function (x, callback) { iterator(x, function (err, criteria) { if (err) { callback(err); } else { callback(null, {value: x, criteria: criteria}); } }); }, function (err, results) { if (err) { return callback(err); } else { var fn = function (left, right) { var a = left.criteria, b = right.criteria; return a < b ? -1 : a > b ? 1 : 0; }; callback(null, _map(results.sort(fn), function (x) { return x.value; })); } }); }; async.auto = function (tasks, callback) { callback = callback || function () {}; var keys = _keys(tasks); var remainingTasks = keys.length if (!remainingTasks) { return callback(); } var results = {}; var listeners = []; var addListener = function (fn) { listeners.unshift(fn); }; var removeListener = function (fn) { for (var i = 0; i < listeners.length; i += 1) { if (listeners[i] === fn) { listeners.splice(i, 1); return; } } }; var taskComplete = function () { remainingTasks-- _each(listeners.slice(0), function (fn) { fn(); }); }; addListener(function () { if (!remainingTasks) { var theCallback = callback; // prevent final callback from calling itself if it errors callback = function () {}; theCallback(null, results); } }); _each(keys, function (k) { var task = _isArray(tasks[k]) ? tasks[k]: [tasks[k]]; var taskCallback = function (err) { var args = Array.prototype.slice.call(arguments, 1); if (args.length <= 1) { args = args[0]; } if (err) { var safeResults = {}; _each(_keys(results), function(rkey) { safeResults[rkey] = results[rkey]; }); safeResults[k] = args; callback(err, safeResults); // stop subsequent errors hitting callback multiple times callback = function () {}; } else { results[k] = args; async.setImmediate(taskComplete); } }; var requires = task.slice(0, Math.abs(task.length - 1)) || []; var ready = function () { return _reduce(requires, function (a, x) { return (a && results.hasOwnProperty(x)); }, true) && !results.hasOwnProperty(k); }; if (ready()) { task[task.length - 1](taskCallback, results); } else { var listener = function () { if (ready()) { removeListener(listener); task[task.length - 1](taskCallback, results); } }; addListener(listener); } }); }; async.retry = function(times, task, callback) { var DEFAULT_TIMES = 5; var attempts = []; // Use defaults if times not passed if (typeof times === 'function') { callback = task; task = times; times = DEFAULT_TIMES; } // Make sure times is a number times = parseInt(times, 10) || DEFAULT_TIMES; var wrappedTask = function(wrappedCallback, wrappedResults) { var retryAttempt = function(task, finalAttempt) { return function(seriesCallback) { task(function(err, result){ seriesCallback(!err || finalAttempt, {err: err, result: result}); }, wrappedResults); }; }; while (times) { attempts.push(retryAttempt(task, !(times-=1))); } async.series(attempts, function(done, data){ data = data[data.length - 1]; (wrappedCallback || callback)(data.err, data.result); }); } // If a callback is passed, run this as a controll flow return callback ? wrappedTask() : wrappedTask }; async.waterfall = function (tasks, callback) { callback = callback || function () {}; if (!_isArray(tasks)) { var err = new Error('First argument to waterfall must be an array of functions'); return callback(err); } if (!tasks.length) { return callback(); } var wrapIterator = function (iterator) { return function (err) { if (err) { callback.apply(null, arguments); callback = function () {}; } else { var args = Array.prototype.slice.call(arguments, 1); var next = iterator.next(); if (next) { args.push(wrapIterator(next)); } else { args.push(callback); } async.setImmediate(function () { iterator.apply(null, args); }); } }; }; wrapIterator(async.iterator(tasks))(); }; var _parallel = function(eachfn, tasks, callback) { callback = callback || function () {}; if (_isArray(tasks)) { eachfn.map(tasks, function (fn, callback) { if (fn) { fn(function (err) { var args = Array.prototype.slice.call(arguments, 1); if (args.length <= 1) { args = args[0]; } callback.call(null, err, args); }); } }, callback); } else { var results = {}; eachfn.each(_keys(tasks), function (k, callback) { tasks[k](function (err) { var args = Array.prototype.slice.call(arguments, 1); if (args.length <= 1) { args = args[0]; } results[k] = args; callback(err); }); }, function (err) { callback(err, results); }); } }; async.parallel = function (tasks, callback) { _parallel({ map: async.map, each: async.each }, tasks, callback); }; async.parallelLimit = function(tasks, limit, callback) { _parallel({ map: _mapLimit(limit), each: _eachLimit(limit) }, tasks, callback); }; async.series = function (tasks, callback) { callback = callback || function () {}; if (_isArray(tasks)) { async.mapSeries(tasks, function (fn, callback) { if (fn) { fn(function (err) { var args = Array.prototype.slice.call(arguments, 1); if (args.length <= 1) { args = args[0]; } callback.call(null, err, args); }); } }, callback); } else { var results = {}; async.eachSeries(_keys(tasks), function (k, callback) { tasks[k](function (err) { var args = Array.prototype.slice.call(arguments, 1); if (args.length <= 1) { args = args[0]; } results[k] = args; callback(err); }); }, function (err) { callback(err, results); }); } }; async.iterator = function (tasks) { var makeCallback = function (index) { var fn = function () { if (tasks.length) { tasks[index].apply(null, arguments); } return fn.next(); }; fn.next = function () { return (index < tasks.length - 1) ? makeCallback(index + 1): null; }; return fn; }; return makeCallback(0); }; async.apply = function (fn) { var args = Array.prototype.slice.call(arguments, 1); return function () { return fn.apply( null, args.concat(Array.prototype.slice.call(arguments)) ); }; }; var _concat = function (eachfn, arr, fn, callback) { var r = []; eachfn(arr, function (x, cb) { fn(x, function (err, y) { r = r.concat(y || []); cb(err); }); }, function (err) { callback(err, r); }); }; async.concat = doParallel(_concat); async.concatSeries = doSeries(_concat); async.whilst = function (test, iterator, callback) { if (test()) { iterator(function (err) { if (err) { return callback(err); } async.whilst(test, iterator, callback); }); } else { callback(); } }; async.doWhilst = function (iterator, test, callback) { iterator(function (err) { if (err) { return callback(err); } var args = Array.prototype.slice.call(arguments, 1); if (test.apply(null, args)) { async.doWhilst(iterator, test, callback); } else { callback(); } }); }; async.until = function (test, iterator, callback) { if (!test()) { iterator(function (err) { if (err) { return callback(err); } async.until(test, iterator, callback); }); } else { callback(); } }; async.doUntil = function (iterator, test, callback) { iterator(function (err) { if (err) { return callback(err); } var args = Array.prototype.slice.call(arguments, 1); if (!test.apply(null, args)) { async.doUntil(iterator, test, callback); } else { callback(); } }); }; async.queue = function (worker, concurrency) { if (concurrency === undefined) { concurrency = 1; } function _insert(q, data, pos, callback) { if (!q.started){ q.started = true; } if (!_isArray(data)) { data = [data]; } if(data.length == 0) { // call drain immediately if there are no tasks return async.setImmediate(function() { if (q.drain) { q.drain(); } }); } _each(data, function(task) { var item = { data: task, callback: typeof callback === 'function' ? callback : null }; if (pos) { q.tasks.unshift(item); } else { q.tasks.push(item); } if (q.saturated && q.tasks.length === q.concurrency) { q.saturated(); } async.setImmediate(q.process); }); } var workers = 0; var q = { tasks: [], concurrency: concurrency, saturated: null, empty: null, drain: null, started: false, paused: false, push: function (data, callback) { _insert(q, data, false, callback); }, kill: function () { q.drain = null; q.tasks = []; }, unshift: function (data, callback) { _insert(q, data, true, callback); }, process: function () { if (!q.paused && workers < q.concurrency && q.tasks.length) { var task = q.tasks.shift(); if (q.empty && q.tasks.length === 0) { q.empty(); } workers += 1; var next = function () { workers -= 1; if (task.callback) { task.callback.apply(task, arguments); } if (q.drain && q.tasks.length + workers === 0) { q.drain(); } q.process(); }; var cb = only_once(next); worker(task.data, cb); } }, length: function () { return q.tasks.length; }, running: function () { return workers; }, idle: function() { return q.tasks.length + workers === 0; }, pause: function () { if (q.paused === true) { return; } q.paused = true; q.process(); }, resume: function () { if (q.paused === false) { return; } q.paused = false; q.process(); } }; return q; }; async.priorityQueue = function (worker, concurrency) { function _compareTasks(a, b){ return a.priority - b.priority; }; function _binarySearch(sequence, item, compare) { var beg = -1, end = sequence.length - 1; while (beg < end) { var mid = beg + ((end - beg + 1) >>> 1); if (compare(item, sequence[mid]) >= 0) { beg = mid; } else { end = mid - 1; } } return beg; } function _insert(q, data, priority, callback) { if (!q.started){ q.started = true; } if (!_isArray(data)) { data = [data]; } if(data.length == 0) { // call drain immediately if there are no tasks return async.setImmediate(function() { if (q.drain) { q.drain(); } }); } _each(data, function(task) { var item = { data: task, priority: priority, callback: typeof callback === 'function' ? callback : null }; q.tasks.splice(_binarySearch(q.tasks, item, _compareTasks) + 1, 0, item); if (q.saturated && q.tasks.length === q.concurrency) { q.saturated(); } async.setImmediate(q.process); }); } // Start with a normal queue var q = async.queue(worker, concurrency); // Override push to accept second parameter representing priority q.push = function (data, priority, callback) { _insert(q, data, priority, callback); }; // Remove unshift function delete q.unshift; return q; }; async.cargo = function (worker, payload) { var working = false, tasks = []; var cargo = { tasks: tasks, payload: payload, saturated: null, empty: null, drain: null, drained: true, push: function (data, callback) { if (!_isArray(data)) { data = [data]; } _each(data, function(task) { tasks.push({ data: task, callback: typeof callback === 'function' ? callback : null }); cargo.drained = false; if (cargo.saturated && tasks.length === payload) { cargo.saturated(); } }); async.setImmediate(cargo.process); }, process: function process() { if (working) return; if (tasks.length === 0) { if(cargo.drain && !cargo.drained) cargo.drain(); cargo.drained = true; return; } var ts = typeof payload === 'number' ? tasks.splice(0, payload) : tasks.splice(0, tasks.length); var ds = _map(ts, function (task) { return task.data; }); if(cargo.empty) cargo.empty(); working = true; worker(ds, function () { working = false; var args = arguments; _each(ts, function (data) { if (data.callback) { data.callback.apply(null, args); } }); process(); }); }, length: function () { return tasks.length; }, running: function () { return working; } }; return cargo; }; var _console_fn = function (name) { return function (fn) { var args = Array.prototype.slice.call(arguments, 1); fn.apply(null, args.concat([function (err) { var args = Array.prototype.slice.call(arguments, 1); if (typeof console !== 'undefined') { if (err) { if (console.error) { console.error(err); } } else if (console[name]) { _each(args, function (x) { console[name](x); }); } } }])); }; }; async.log = _console_fn('log'); async.dir = _console_fn('dir'); /*async.info = _console_fn('info'); async.warn = _console_fn('warn'); async.error = _console_fn('error');*/ async.memoize = function (fn, hasher) { var memo = {}; var queues = {}; hasher = hasher || function (x) { return x; }; var memoized = function () { var args = Array.prototype.slice.call(arguments); var callback = args.pop(); var key = hasher.apply(null, args); if (key in memo) { async.nextTick(function () { callback.apply(null, memo[key]); }); } else if (key in queues) { queues[key].push(callback); } else { queues[key] = [callback]; fn.apply(null, args.concat([function () { memo[key] = arguments; var q = queues[key]; delete queues[key]; for (var i = 0, l = q.length; i < l; i++) { q[i].apply(null, arguments); } }])); } }; memoized.memo = memo; memoized.unmemoized = fn; return memoized; }; async.unmemoize = function (fn) { return function () { return (fn.unmemoized || fn).apply(null, arguments); }; }; async.times = function (count, iterator, callback) { var counter = []; for (var i = 0; i < count; i++) { counter.push(i); } return async.map(counter, iterator, callback); }; async.timesSeries = function (count, iterator, callback) { var counter = []; for (var i = 0; i < count; i++) { counter.push(i); } return async.mapSeries(counter, iterator, callback); }; async.seq = function (/* functions... */) { var fns = arguments; return function () { var that = this; var args = Array.prototype.slice.call(arguments); var callback = args.pop(); async.reduce(fns, args, function (newargs, fn, cb) { fn.apply(that, newargs.concat([function () { var err = arguments[0]; var nextargs = Array.prototype.slice.call(arguments, 1); cb(err, nextargs); }])) }, function (err, results) { callback.apply(that, [err].concat(results)); }); }; }; async.compose = function (/* functions... */) { return async.seq.apply(null, Array.prototype.reverse.call(arguments)); }; var _applyEach = function (eachfn, fns /*args...*/) { var go = function () { var that = this; var args = Array.prototype.slice.call(arguments); var callback = args.pop(); return eachfn(fns, function (fn, cb) { fn.apply(that, args.concat([cb])); }, callback); }; if (arguments.length > 2) { var args = Array.prototype.slice.call(arguments, 2); return go.apply(this, args); } else { return go; } }; async.applyEach = doParallel(_applyEach); async.applyEachSeries = doSeries(_applyEach); async.forever = function (fn, callback) { function next(err) { if (err) { if (callback) { return callback(err); } throw err; } fn(next); } next(); }; // Node.js if (typeof module !== 'undefined' && module.exports) { module.exports = async; } // AMD / RequireJS else if (typeof define !== 'undefined' && define.amd) { define([], function () { return async; }); } // included directly via