Merge pull request #1802 from jitsi/start_in_audio_only
Start in audio only
This commit is contained in:
commit
2525bb2805
149
conference.js
149
conference.js
|
@ -25,6 +25,7 @@ import {
|
||||||
conferenceFailed,
|
conferenceFailed,
|
||||||
conferenceJoined,
|
conferenceJoined,
|
||||||
conferenceLeft,
|
conferenceLeft,
|
||||||
|
toggleAudioOnly,
|
||||||
EMAIL_COMMAND,
|
EMAIL_COMMAND,
|
||||||
lockStateChanged
|
lockStateChanged
|
||||||
} from './react/features/base/conference';
|
} from './react/features/base/conference';
|
||||||
|
@ -74,7 +75,7 @@ const eventEmitter = new EventEmitter();
|
||||||
let room;
|
let room;
|
||||||
let connection;
|
let connection;
|
||||||
let localAudio, localVideo;
|
let localAudio, localVideo;
|
||||||
let initialAudioMutedState = false, initialVideoMutedState = false;
|
let initialAudioMutedState = false;
|
||||||
|
|
||||||
import {VIDEO_CONTAINER_TYPE} from "./modules/UI/videolayout/VideoContainer";
|
import {VIDEO_CONTAINER_TYPE} from "./modules/UI/videolayout/VideoContainer";
|
||||||
|
|
||||||
|
@ -177,27 +178,40 @@ function getDisplayName(id) {
|
||||||
* result of user interaction
|
* result of user interaction
|
||||||
*/
|
*/
|
||||||
function muteLocalAudio(muted) {
|
function muteLocalAudio(muted) {
|
||||||
muteLocalMedia(localAudio, muted, 'Audio');
|
muteLocalMedia(localAudio, muted);
|
||||||
}
|
}
|
||||||
|
|
||||||
function muteLocalMedia(localMedia, muted, localMediaTypeString) {
|
/**
|
||||||
if (!localMedia) {
|
* Mute or unmute local media stream if it exists.
|
||||||
return;
|
* @param {JitsiLocalTrack} localTrack
|
||||||
|
* @param {boolean} muted
|
||||||
|
*
|
||||||
|
* @returns {Promise} resolved in case mute/unmute operations succeeds or
|
||||||
|
* rejected with an error if something goes wrong. It is expected that often
|
||||||
|
* the error will be of the {@link JitsiTrackError} type, but it's not
|
||||||
|
* guaranteed.
|
||||||
|
*/
|
||||||
|
function muteLocalMedia(localTrack, muted) {
|
||||||
|
if (!localTrack) {
|
||||||
|
return Promise.resolve();
|
||||||
}
|
}
|
||||||
|
|
||||||
const method = muted ? 'mute' : 'unmute';
|
const method = muted ? 'mute' : 'unmute';
|
||||||
|
|
||||||
localMedia[method]().catch(reason => {
|
return localTrack[method]();
|
||||||
logger.warn(`${localMediaTypeString} ${method} was rejected:`, reason);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Mute or unmute local video stream if it exists.
|
* Mute or unmute local video stream if it exists.
|
||||||
* @param {boolean} muted if video stream should be muted or unmuted.
|
* @param {boolean} muted if video stream should be muted or unmuted.
|
||||||
|
*
|
||||||
|
* @returns {Promise} resolved in case mute/unmute operations succeeds or
|
||||||
|
* rejected with an error if something goes wrong. It is expected that often
|
||||||
|
* the error will be of the {@link JitsiTrackError} type, but it's not
|
||||||
|
* guaranteed.
|
||||||
*/
|
*/
|
||||||
function muteLocalVideo(muted) {
|
function muteLocalVideo(muted) {
|
||||||
muteLocalMedia(localVideo, muted, 'Video');
|
return muteLocalMedia(localVideo, muted);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -424,6 +438,12 @@ function _connectionFailedHandler(error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
|
/**
|
||||||
|
* Flag used to delay modification of the muted status of local media tracks
|
||||||
|
* until those are created (or not, but at that point it's certain that
|
||||||
|
* the tracks won't exist).
|
||||||
|
*/
|
||||||
|
_localTracksInitialized: false,
|
||||||
isModerator: false,
|
isModerator: false,
|
||||||
audioMuted: false,
|
audioMuted: false,
|
||||||
videoMuted: false,
|
videoMuted: false,
|
||||||
|
@ -462,11 +482,14 @@ export default {
|
||||||
* Creates local media tracks and connects to a room. Will show error
|
* Creates local media tracks and connects to a room. Will show error
|
||||||
* dialogs in case accessing the local microphone and/or camera failed. Will
|
* dialogs in case accessing the local microphone and/or camera failed. Will
|
||||||
* show guidance overlay for users on how to give access to camera and/or
|
* show guidance overlay for users on how to give access to camera and/or
|
||||||
* microphone,
|
* microphone.
|
||||||
* @param {string} roomName
|
* @param {string} roomName
|
||||||
* @param {object} options
|
* @param {object} options
|
||||||
* @param {boolean} options.startScreenSharing - if <tt>true</tt> should
|
* @param {boolean} options.startAudioOnly=false - if <tt>true</tt> then
|
||||||
* start with screensharing instead of camera video.
|
* only audio track will be created and the audio only mode will be turned
|
||||||
|
* on.
|
||||||
|
* @param {boolean} options.startScreenSharing=false - if <tt>true</tt>
|
||||||
|
* should start with screensharing instead of camera video.
|
||||||
* @returns {Promise.<JitsiLocalTrack[], JitsiConnection>}
|
* @returns {Promise.<JitsiLocalTrack[], JitsiConnection>}
|
||||||
*/
|
*/
|
||||||
createInitialLocalTracksAndConnect(roomName, options = {}) {
|
createInitialLocalTracksAndConnect(roomName, options = {}) {
|
||||||
|
@ -486,7 +509,20 @@ export default {
|
||||||
let tryCreateLocalTracks;
|
let tryCreateLocalTracks;
|
||||||
|
|
||||||
// FIXME the logic about trying to go audio only on error is duplicated
|
// FIXME the logic about trying to go audio only on error is duplicated
|
||||||
if (options.startScreenSharing) {
|
if (options.startAudioOnly) {
|
||||||
|
tryCreateLocalTracks
|
||||||
|
= createLocalTracks({ devices: ['audio'] }, true)
|
||||||
|
.catch(err => {
|
||||||
|
audioOnlyError = err;
|
||||||
|
|
||||||
|
return [];
|
||||||
|
});
|
||||||
|
|
||||||
|
// Enable audio only mode
|
||||||
|
if (config.startAudioOnly) {
|
||||||
|
APP.store.dispatch(toggleAudioOnly());
|
||||||
|
}
|
||||||
|
} else if (options.startScreenSharing) {
|
||||||
tryCreateLocalTracks = this._createDesktopTrack()
|
tryCreateLocalTracks = this._createDesktopTrack()
|
||||||
.then(desktopStream => {
|
.then(desktopStream => {
|
||||||
return createLocalTracks({ devices: ['audio'] }, true)
|
return createLocalTracks({ devices: ['audio'] }, true)
|
||||||
|
@ -594,16 +630,19 @@ export default {
|
||||||
analytics.init();
|
analytics.init();
|
||||||
return this.createInitialLocalTracksAndConnect(
|
return this.createInitialLocalTracksAndConnect(
|
||||||
options.roomName, {
|
options.roomName, {
|
||||||
|
startAudioOnly: config.startAudioOnly,
|
||||||
startScreenSharing: config.startScreenSharing
|
startScreenSharing: config.startScreenSharing
|
||||||
});
|
});
|
||||||
}).then(([tracks, con]) => {
|
}).then(([tracks, con]) => {
|
||||||
tracks.forEach(track => {
|
tracks.forEach(track => {
|
||||||
if((track.isAudioTrack() && initialAudioMutedState)
|
if (track.isAudioTrack() && initialAudioMutedState) {
|
||||||
|| (track.isVideoTrack() && initialVideoMutedState)) {
|
track.mute();
|
||||||
|
} else if (track.isVideoTrack() && this.videoMuted) {
|
||||||
track.mute();
|
track.mute();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
logger.log('initialized with %s local tracks', tracks.length);
|
logger.log('initialized with %s local tracks', tracks.length);
|
||||||
|
this._localTracksInitialized = true;
|
||||||
con.addEventListener(
|
con.addEventListener(
|
||||||
ConnectionEvents.CONNECTION_FAILED,
|
ConnectionEvents.CONNECTION_FAILED,
|
||||||
_connectionFailedHandler);
|
_connectionFailedHandler);
|
||||||
|
@ -695,6 +734,8 @@ export default {
|
||||||
*/
|
*/
|
||||||
toggleAudioMuted(force = false) {
|
toggleAudioMuted(force = false) {
|
||||||
if(!localAudio && force) {
|
if(!localAudio && force) {
|
||||||
|
// NOTE this logic will be adjusted to the same one as for the video
|
||||||
|
// once 'startWithAudioMuted' option is added.
|
||||||
initialAudioMutedState = !initialAudioMutedState;
|
initialAudioMutedState = !initialAudioMutedState;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -703,22 +744,60 @@ export default {
|
||||||
/**
|
/**
|
||||||
* Simulates toolbar button click for video mute. Used by shortcuts and API.
|
* Simulates toolbar button click for video mute. Used by shortcuts and API.
|
||||||
* @param mute true for mute and false for unmute.
|
* @param mute true for mute and false for unmute.
|
||||||
|
* @param {boolean} [showUI] when set to false will not display any error
|
||||||
|
* dialogs in case of media permissions error.
|
||||||
*/
|
*/
|
||||||
muteVideo(mute) {
|
muteVideo(mute, showUI = true) {
|
||||||
muteLocalVideo(mute);
|
// Not ready to modify track's state yet
|
||||||
|
if (!this._localTracksInitialized) {
|
||||||
|
this.videoMuted = mute;
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const maybeShowErrorDialog = (error) => {
|
||||||
|
if (showUI) {
|
||||||
|
APP.UI.showDeviceErrorDialog(null, error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!localVideo && this.videoMuted && !mute) {
|
||||||
|
// Try to create local video if there wasn't any.
|
||||||
|
// This handles the case when user joined with no video
|
||||||
|
// (dismissed screen sharing screen or in audio only mode), but
|
||||||
|
// decided to add it later on by clicking on muted video icon or
|
||||||
|
// turning off the audio only mode.
|
||||||
|
//
|
||||||
|
// FIXME when local track creation is moved to react/redux
|
||||||
|
// it should take care of the use case described above
|
||||||
|
createLocalTracks({ devices: ['video'] }, false)
|
||||||
|
.then(([videoTrack]) => videoTrack)
|
||||||
|
.catch(error => {
|
||||||
|
// FIXME should send some feedback to the API on error ?
|
||||||
|
maybeShowErrorDialog(error);
|
||||||
|
|
||||||
|
// Rollback the video muted status by using null track
|
||||||
|
return null;
|
||||||
|
})
|
||||||
|
.then(videoTrack => this.useVideoStream(videoTrack));
|
||||||
|
} else {
|
||||||
|
const oldMutedStatus = this.videoMuted;
|
||||||
|
|
||||||
|
muteLocalVideo(mute)
|
||||||
|
.catch(error => {
|
||||||
|
maybeShowErrorDialog(error);
|
||||||
|
this.videoMuted = oldMutedStatus;
|
||||||
|
APP.UI.setVideoMuted(this.getMyUserId(), this.videoMuted);
|
||||||
|
});
|
||||||
|
}
|
||||||
},
|
},
|
||||||
/**
|
/**
|
||||||
* Simulates toolbar button click for video mute. Used by shortcuts and API.
|
* Simulates toolbar button click for video mute. Used by shortcuts and API.
|
||||||
* @param {boolean} force - If the track is not created, the operation
|
* @param {boolean} [showUI] when set to false will not display any error
|
||||||
* will be executed after the track is created. Otherwise the operation
|
* dialogs in case of media permissions error.
|
||||||
* will be ignored.
|
|
||||||
*/
|
*/
|
||||||
toggleVideoMuted(force = false) {
|
toggleVideoMuted(showUI = true) {
|
||||||
if(!localVideo && force) {
|
this.muteVideo(!this.videoMuted, showUI);
|
||||||
initialVideoMutedState = !initialVideoMutedState;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
this.muteVideo(!this.videoMuted);
|
|
||||||
},
|
},
|
||||||
/**
|
/**
|
||||||
* Retrieve list of conference participants (without local user).
|
* Retrieve list of conference participants (without local user).
|
||||||
|
@ -1721,20 +1800,8 @@ export default {
|
||||||
APP.UI.addListener(UIEvents.VIDEO_MUTED, muted => {
|
APP.UI.addListener(UIEvents.VIDEO_MUTED, muted => {
|
||||||
if (this.isAudioOnly() && !muted) {
|
if (this.isAudioOnly() && !muted) {
|
||||||
this._displayAudioOnlyTooltip('videoMute');
|
this._displayAudioOnlyTooltip('videoMute');
|
||||||
} else if (!localVideo && this.videoMuted && !muted) {
|
|
||||||
// Maybe try to create local video if there wasn't any ?
|
|
||||||
// This handles the case when user joined with no video
|
|
||||||
// (dismissed screen sharing screen), but decided to add it
|
|
||||||
// later on by clicking on muted video icon.
|
|
||||||
createLocalTracks({ devices: ['video'] }, false)
|
|
||||||
.then(([videoTrack]) => {
|
|
||||||
APP.conference.useVideoStream(videoTrack);
|
|
||||||
})
|
|
||||||
.catch(error => {
|
|
||||||
APP.UI.showDeviceErrorDialog(null, error);
|
|
||||||
});
|
|
||||||
} else {
|
} else {
|
||||||
muteLocalVideo(muted);
|
this.muteVideo(muted);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -1927,7 +1994,7 @@ export default {
|
||||||
);
|
);
|
||||||
|
|
||||||
APP.UI.addListener(UIEvents.TOGGLE_AUDIO_ONLY, audioOnly => {
|
APP.UI.addListener(UIEvents.TOGGLE_AUDIO_ONLY, audioOnly => {
|
||||||
muteLocalVideo(audioOnly);
|
this.muteVideo(audioOnly);
|
||||||
|
|
||||||
// Immediately update the UI by having remote videos and the large
|
// Immediately update the UI by having remote videos and the large
|
||||||
// video update themselves instead of waiting for some other event
|
// video update themselves instead of waiting for some other event
|
||||||
|
@ -2038,7 +2105,7 @@ export default {
|
||||||
JitsiMeetJS.mediaDevices.enumerateDevices(devices => {
|
JitsiMeetJS.mediaDevices.enumerateDevices(devices => {
|
||||||
// Ugly way to synchronize real device IDs with local
|
// Ugly way to synchronize real device IDs with local
|
||||||
// storage and settings menu. This is a workaround until
|
// storage and settings menu. This is a workaround until
|
||||||
// getConstraints() method will be implemented
|
// getConstraints() method will be implemented
|
||||||
// in browsers.
|
// in browsers.
|
||||||
if (localAudio) {
|
if (localAudio) {
|
||||||
APP.settings.setMicDeviceId(
|
APP.settings.setMicDeviceId(
|
||||||
|
|
|
@ -76,6 +76,7 @@ var config = { // eslint-disable-line no-unused-vars
|
||||||
// page redirection when call is hangup
|
// page redirection when call is hangup
|
||||||
disableSimulcast: false,
|
disableSimulcast: false,
|
||||||
// requireDisplayName: true, // Forces the participants that doesn't have display name to enter it when they enter the room.
|
// requireDisplayName: true, // Forces the participants that doesn't have display name to enter it when they enter the room.
|
||||||
|
startAudioOnly: false, // Will start the conference in the audio only mode (no video is being received nor sent)
|
||||||
startScreenSharing: false, // Will try to start with screensharing instead of camera
|
startScreenSharing: false, // Will try to start with screensharing instead of camera
|
||||||
// startAudioMuted: 10, // every participant after the Nth will start audio muted
|
// startAudioMuted: 10, // every participant after the Nth will start audio muted
|
||||||
// startVideoMuted: 10, // every participant after the Nth will start video muted
|
// startVideoMuted: 10, // every participant after the Nth will start video muted
|
||||||
|
|
|
@ -36,7 +36,9 @@ function initCommands() {
|
||||||
'display-name':
|
'display-name':
|
||||||
APP.conference.changeLocalDisplayName.bind(APP.conference),
|
APP.conference.changeLocalDisplayName.bind(APP.conference),
|
||||||
'toggle-audio': () => APP.conference.toggleAudioMuted(true),
|
'toggle-audio': () => APP.conference.toggleAudioMuted(true),
|
||||||
'toggle-video': () => APP.conference.toggleVideoMuted(true),
|
'toggle-video': () => {
|
||||||
|
APP.conference.toggleVideoMuted(false /* no UI */);
|
||||||
|
},
|
||||||
'toggle-film-strip': APP.UI.toggleFilmstrip,
|
'toggle-film-strip': APP.UI.toggleFilmstrip,
|
||||||
'toggle-chat': APP.UI.toggleChat,
|
'toggle-chat': APP.UI.toggleChat,
|
||||||
'toggle-contact-list': APP.UI.toggleContactList,
|
'toggle-contact-list': APP.UI.toggleContactList,
|
||||||
|
|
|
@ -336,13 +336,11 @@ var VideoLayout = {
|
||||||
|
|
||||||
remoteVideo.addRemoteStreamElement(stream);
|
remoteVideo.addRemoteStreamElement(stream);
|
||||||
|
|
||||||
// if track is muted make sure we reflect that
|
// Make sure track's muted state is reflected
|
||||||
if(stream.isMuted())
|
if (stream.getType() === "audio") {
|
||||||
{
|
this.onAudioMute(stream.getParticipantId(), stream.isMuted());
|
||||||
if(stream.getType() === "audio")
|
} else {
|
||||||
this.onAudioMute(stream.getParticipantId(), true);
|
this.onVideoMute(stream.getParticipantId(), stream.isMuted());
|
||||||
else
|
|
||||||
this.onVideoMute(stream.getParticipantId(), true);
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -353,6 +351,30 @@ var VideoLayout = {
|
||||||
if (remoteVideo) {
|
if (remoteVideo) {
|
||||||
remoteVideo.removeRemoteStreamElement(stream);
|
remoteVideo.removeRemoteStreamElement(stream);
|
||||||
}
|
}
|
||||||
|
this.updateMutedForNoTracks(id, stream.getType());
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* FIXME get rid of this method once muted indicator are reactified (by
|
||||||
|
* making sure that user with no tracks is displayed as muted )
|
||||||
|
*
|
||||||
|
* If participant has no tracks will make the UI display muted status.
|
||||||
|
* @param {string} participantId
|
||||||
|
* @param {string} mediaType 'audio' or 'video'
|
||||||
|
*/
|
||||||
|
updateMutedForNoTracks(participantId, mediaType) {
|
||||||
|
const participant = APP.conference.getParticipantById(participantId);
|
||||||
|
|
||||||
|
if (participant
|
||||||
|
&& !participant.getTracksByMediaType(mediaType).length) {
|
||||||
|
if (mediaType === 'audio') {
|
||||||
|
APP.UI.setAudioMuted(participantId, true);
|
||||||
|
} else if (mediaType === 'video') {
|
||||||
|
APP.UI.setVideoMuted(participantId, true);
|
||||||
|
} else {
|
||||||
|
logger.error(`Unsupported media type: ${mediaType}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -446,6 +468,9 @@ var VideoLayout = {
|
||||||
this._setRemoteControlProperties(user, remoteVideo);
|
this._setRemoteControlProperties(user, remoteVideo);
|
||||||
this.addRemoteVideoContainer(id, remoteVideo);
|
this.addRemoteVideoContainer(id, remoteVideo);
|
||||||
|
|
||||||
|
this.updateMutedForNoTracks(id, 'audio');
|
||||||
|
this.updateMutedForNoTracks(id, 'video');
|
||||||
|
|
||||||
const remoteVideosCount = Object.keys(remoteVideos).length;
|
const remoteVideosCount = Object.keys(remoteVideos).length;
|
||||||
|
|
||||||
if (remoteVideosCount === 1) {
|
if (remoteVideosCount === 1) {
|
||||||
|
|
|
@ -15,7 +15,7 @@ import {
|
||||||
_setAudioOnlyVideoMuted,
|
_setAudioOnlyVideoMuted,
|
||||||
setLastN
|
setLastN
|
||||||
} from './actions';
|
} from './actions';
|
||||||
import { SET_AUDIO_ONLY, SET_LASTN } from './actionTypes';
|
import { CONFERENCE_JOINED, SET_AUDIO_ONLY, SET_LASTN } from './actionTypes';
|
||||||
import {
|
import {
|
||||||
_addLocalTracksToConference,
|
_addLocalTracksToConference,
|
||||||
_handleParticipantError,
|
_handleParticipantError,
|
||||||
|
@ -33,6 +33,9 @@ MiddlewareRegistry.register(store => next => action => {
|
||||||
case CONNECTION_ESTABLISHED:
|
case CONNECTION_ESTABLISHED:
|
||||||
return _connectionEstablished(store, next, action);
|
return _connectionEstablished(store, next, action);
|
||||||
|
|
||||||
|
case CONFERENCE_JOINED:
|
||||||
|
return _conferenceJoined(store, next, action);
|
||||||
|
|
||||||
case PIN_PARTICIPANT:
|
case PIN_PARTICIPANT:
|
||||||
return _pinParticipant(store, next, action);
|
return _pinParticipant(store, next, action);
|
||||||
|
|
||||||
|
@ -76,6 +79,35 @@ function _connectionEstablished(store, next, action) {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Does extra sync up on properties that may need to be updated, after
|
||||||
|
* the conference was joined.
|
||||||
|
*
|
||||||
|
* @param {Store} store - The Redux store in which the specified action is being
|
||||||
|
* dispatched.
|
||||||
|
* @param {Dispatch} next - The Redux dispatch function to dispatch the
|
||||||
|
* specified action to the specified store.
|
||||||
|
* @param {Action} action - The Redux action CONFERENCE_JOINED which is being
|
||||||
|
* dispatched in the specified store.
|
||||||
|
* @private
|
||||||
|
* @returns {Object} The new state that is the result of the reduction of the
|
||||||
|
* specified action.
|
||||||
|
*/
|
||||||
|
function _conferenceJoined(store, next, action) {
|
||||||
|
const result = next(action);
|
||||||
|
const { audioOnly, conference }
|
||||||
|
= store.getState()['features/base/conference'];
|
||||||
|
|
||||||
|
// FIXME On Web the audio only mode for "start audio only" is toggled before
|
||||||
|
// conference is added to the redux store ("on conference joined" action)
|
||||||
|
// and the LastN value needs to be synchronized here.
|
||||||
|
if (audioOnly && conference.getLastN() !== 0) {
|
||||||
|
store.dispatch(setLastN(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Notifies the feature base/conference that the action PIN_PARTICIPANT is being
|
* Notifies the feature base/conference that the action PIN_PARTICIPANT is being
|
||||||
* dispatched within a specific Redux store. Pins the specified remote
|
* dispatched within a specific Redux store. Pins the specified remote
|
||||||
|
|
Loading…
Reference in New Issue