Compare commits

...

7 Commits

Author SHA1 Message Date
Hristo Terezov 61d483ce1a fix(trackOpQueue):setEffect &_turnScreenSharingOff 2022-12-12 15:53:18 -06:00
Hristo Terezov 1e2f9160b5 fix: review comments. 2022-12-07 18:27:26 -06:00
Hristo Terezov 4daddd341d docs(conference.js): Add JSDoc for startConference 2022-12-07 14:37:56 -06:00
Hristo Terezov 15cd83387a fix(conference):useAudioVideo remove unused promise 2022-12-07 10:04:29 -06:00
Hristo Terezov 8303e261b2 fix: review comments 2022-12-07 09:56:11 -06:00
Hristo Terezov d3c45a5dea fix(conference): started muted from jicofo 2022-12-07 09:56:11 -06:00
Hristo Terezov 0d6f00abf3 feat(tracks): Synchronize track operations. 2022-12-07 09:56:09 -06:00
33 changed files with 1190 additions and 993 deletions

File diff suppressed because it is too large Load Diff

View File

@ -23,6 +23,7 @@ import {
getVideoTrackByParticipant,
trackStreamingStatusChanged
} from '../../../react/features/base/tracks';
import { createDeferred } from '../../../react/features/base/util/helpers';
import { CHAT_SIZE } from '../../../react/features/chat';
import {
isTrackStreamingStatusActive,
@ -38,7 +39,6 @@ import { getParticipantsPaneOpen } from '../../../react/features/participants-pa
import { PresenceLabel } from '../../../react/features/presence-status';
import { shouldDisplayTileView } from '../../../react/features/video-layout';
/* eslint-enable no-unused-vars */
import { createDeferred } from '../../util/helpers';
import AudioLevels from '../audio_levels/AudioLevels';
import { VIDEO_CONTAINER_TYPE, VideoContainer } from './VideoContainer';

View File

@ -1,71 +0,0 @@
const logger = require('@jitsi/logger').getLogger(__filename);
/**
* Manages a queue of functions where the current function in progress will
* automatically execute the next queued function.
*/
export class TaskQueue {
/**
* Creates a new instance of {@link TaskQueue} and sets initial instance
* variable values.
*/
constructor() {
this._queue = [];
this._currentTask = null;
this._onTaskComplete = this._onTaskComplete.bind(this);
}
/**
* Adds a new function to the queue. It will be immediately invoked if no
* other functions are queued.
*
* @param {Function} taskFunction - The function to be queued for execution.
* @private
* @returns {void}
*/
enqueue(taskFunction) {
this._queue.push(taskFunction);
this._executeNext();
}
/**
* If no queued task is currently executing, invokes the first task in the
* queue if any.
*
* @private
* @returns {void}
*/
_executeNext() {
if (this._currentTask) {
logger.warn('Task queued while a task is in progress.');
return;
}
this._currentTask = this._queue.shift() || null;
if (this._currentTask) {
logger.debug('Executing a task.');
try {
this._currentTask(this._onTaskComplete);
} catch (error) {
logger.error(`Task execution failed: ${error}`);
this._onTaskComplete();
}
}
}
/**
* Prepares to invoke the next function in the queue.
*
* @private
* @returns {void}
*/
_onTaskComplete() {
this._currentTask = null;
logger.debug('Task completed.');
this._executeNext();
}
}

View File

@ -1,26 +0,0 @@
import { TaskQueue } from './TaskQueue';
/**
* Create deferred object.
*
* @returns {{promise, resolve, reject}}
*/
export function createDeferred() {
const deferred = {};
deferred.promise = new Promise((resolve, reject) => {
deferred.resolve = resolve;
deferred.reject = reject;
});
return deferred;
}
/**
* Returns an instance of {@link TaskQueue}.
*
* @returns {Object}
*/
export function createTaskQueue() {
return new TaskQueue();
}

View File

@ -24,7 +24,7 @@ import { IResponsiveUIState } from '../base/responsive-ui/reducer';
import { ISettingsState } from '../base/settings/reducer';
import { ISoundsState } from '../base/sounds/reducer';
import { ITestingState } from '../base/testing/reducer';
import { INoSrcDataState, ITracksState } from '../base/tracks/reducer';
import { INoSrcDataState, ITrackOperations, ITracksState } from '../base/tracks/reducer';
import { IUserInteractionState } from '../base/user-interaction/reducer';
import { IBreakoutRoomsState } from '../breakout-rooms/reducer';
import { ICalendarSyncState } from '../calendar-sync/reducer';
@ -107,6 +107,7 @@ export interface IReduxState {
'features/base/responsive-ui': IResponsiveUIState;
'features/base/settings': ISettingsState;
'features/base/sounds': ISoundsState;
'features/base/track-operations': ITrackOperations;
'features/base/tracks': ITracksState;
'features/base/user-interaction': IUserInteractionState;
'features/breakout-rooms': IBreakoutRoomsState;

View File

@ -21,11 +21,13 @@ import { getNormalizedDisplayName } from '../participants/functions';
import { toState } from '../redux/functions';
import {
destroyLocalTracks,
replaceLocalTrack,
executeTrackOperation,
replaceStoredTracks,
trackAdded,
trackRemoved
} from '../tracks/actions.any';
} from '../tracks/actions';
import { getLocalTracks } from '../tracks/functions';
import { TrackOperationType } from '../tracks/types';
import { getBackendSafeRoomName } from '../util/uri';
import {
@ -137,34 +139,43 @@ function _addConferenceListeners(conference: IJitsiConference, dispatch: IStore[
conference.on(
JitsiConferenceEvents.STARTED_MUTED,
() => {
const audioMuted = Boolean(conference.isStartAudioMuted());
const videoMuted = Boolean(conference.isStartVideoMuted());
const localTracks = getLocalTracks(state['features/base/tracks']);
dispatch(executeTrackOperation(TrackOperationType.AudioVideo, () => {
const promises = [];
const audioMuted = Boolean(conference.isStartAudioMuted());
const videoMuted = Boolean(conference.isStartVideoMuted());
const localTracks = getLocalTracks(state['features/base/tracks']);
sendAnalytics(createStartMutedConfigurationEvent('remote', audioMuted, videoMuted));
logger.log(`Start muted: ${audioMuted ? 'audio, ' : ''}${videoMuted ? 'video' : ''}`);
sendAnalytics(createStartMutedConfigurationEvent('remote', audioMuted, videoMuted));
logger.log(`Start muted: ${audioMuted ? 'audio, ' : ''}${videoMuted ? 'video' : ''}`);
// XXX Jicofo tells lib-jitsi-meet to start with audio and/or video
// muted i.e. Jicofo expresses an intent. Lib-jitsi-meet has turned
// Jicofo's intent into reality by actually muting the respective
// tracks. The reality is expressed in base/tracks already so what
// is left is to express Jicofo's intent in base/media.
// TODO Maybe the app needs to learn about Jicofo's intent and
// transfer that intent to lib-jitsi-meet instead of lib-jitsi-meet
// acting on Jicofo's intent without the app's knowledge.
dispatch(setAudioMuted(audioMuted));
dispatch(setVideoMuted(videoMuted));
// XXX Jicofo tells lib-jitsi-meet to start with audio and/or video
// muted i.e. Jicofo expresses an intent. Lib-jitsi-meet has turned
// Jicofo's intent into reality by actually muting the respective
// tracks. The reality is expressed in base/tracks already so what
// is left is to express Jicofo's intent in base/media.
// TODO Maybe the app needs to learn about Jicofo's intent and
// transfer that intent to lib-jitsi-meet instead of lib-jitsi-meet
// acting on Jicofo's intent without the app's knowledge.
promises.push(
dispatch(setAudioMuted(audioMuted)).catch(e => logger.error(`Set audio muted failed: ${e}`)));
promises.push(
dispatch(setVideoMuted(videoMuted)).catch(e => logger.error(`Set video muted failed: ${e}`)));
// Remove the tracks from peerconnection as well.
for (const track of localTracks) {
const trackType = track.jitsiTrack.getType();
// Remove the tracks from peerconnection as well.
for (const track of localTracks) {
const trackType = track.jitsiTrack.getType();
// Do not remove the audio track on RN. Starting with iOS 15 it will fail to unmute otherwise.
if ((audioMuted && trackType === MEDIA_TYPE.AUDIO && navigator.product !== 'ReactNative')
|| (videoMuted && trackType === MEDIA_TYPE.VIDEO)) {
dispatch(replaceLocalTrack(track.jitsiTrack, null, conference));
// Do not remove the audio track on RN. Starting with iOS 15 it will fail to unmute otherwise.
if ((audioMuted && trackType === MEDIA_TYPE.AUDIO && navigator.product !== 'ReactNative')
|| (videoMuted && trackType === MEDIA_TYPE.VIDEO)) {
promises.push(
dispatch(replaceStoredTracks(track.jitsiTrack, null))
.catch(e => logger.error(`replaceLocalTrack failed: ${e}`)));
}
}
}
return Promise.all(promises);
}));
});
conference.on(

View File

@ -2,6 +2,11 @@ import { IStore } from '../../app/types';
import { showModeratedNotification } from '../../av-moderation/actions';
import { shouldShowModeratedNotification } from '../../av-moderation/functions';
import { isModerationNotificationDisplayed } from '../../notifications/functions';
import { isForceMuted } from '../../participants-pane/functions';
import { maybeStopMuteBecauseOfLocalRecording } from '../../recording/functions';
import { getLocalParticipant } from '../participants/functions';
import { setMuted } from '../tracks/actions.any';
import { isUserInteractionRequiredForUnmute } from '../tracks/functions';
import {
SET_AUDIO_AVAILABLE,
@ -53,10 +58,32 @@ export function setAudioAvailable(available: boolean) {
* }}
*/
export function setAudioMuted(muted: boolean, ensureTrack = false) {
return {
type: SET_AUDIO_MUTED,
ensureTrack,
muted
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
const participant = getLocalParticipant(state);
if (!muted && isForceMuted(participant, MEDIA_TYPE.AUDIO, state)) {
return Promise.resolve();
}
if (!muted
&& isUserInteractionRequiredForUnmute(state)) {
return Promise.resolve();
}
const mutePromise = dispatch(setMuted({
muted,
ensureTrack,
mediaType: MEDIA_TYPE.AUDIO
}));
dispatch({
type: SET_AUDIO_MUTED,
ensureTrack,
muted
});
return mutePromise;
};
}
@ -114,7 +141,7 @@ export function setScreenshareMuted(
ensureTrack && dispatch(showModeratedNotification(MEDIA_TYPE.SCREENSHARE));
}
return;
return Promise.resolve();
}
const oldValue = state['features/base/media'].screenshare.muted;
@ -122,13 +149,28 @@ export function setScreenshareMuted(
// eslint-disable-next-line no-bitwise
const newValue = muted ? oldValue | authority : oldValue & ~authority;
return dispatch({
const participant = getLocalParticipant(state);
if (!newValue && isForceMuted(participant, MEDIA_TYPE.SCREENSHARE, state)) {
return Promise.resolve();
}
const mutePromise = dispatch(setMuted({
authority,
mediaType,
ensureTrack,
muted: Boolean(newValue)
}));
dispatch({
type: SET_SCREENSHARE_MUTED,
authority,
mediaType,
ensureTrack,
muted: newValue
});
return mutePromise;
};
}
@ -163,7 +205,7 @@ export function setVideoAvailable(available: boolean) {
*/
export function setVideoMuted(
muted: boolean,
mediaType: string = MEDIA_TYPE.VIDEO,
mediaType: MediaType = MEDIA_TYPE.VIDEO,
authority: number = VIDEO_MUTISM_AUTHORITY.USER,
ensureTrack = false) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
@ -175,21 +217,43 @@ export function setVideoMuted(
ensureTrack && dispatch(showModeratedNotification(MEDIA_TYPE.VIDEO));
}
return;
return Promise.resolve();
}
const oldValue = state['features/base/media'].video.muted;
// eslint-disable-next-line no-bitwise
const newValue = muted ? oldValue | authority : oldValue & ~authority;
const participant = getLocalParticipant(state);
return dispatch({
if (!newValue && isForceMuted(participant, MEDIA_TYPE.VIDEO, state)) {
return Promise.resolve();
}
if (maybeStopMuteBecauseOfLocalRecording(Boolean(newValue), dispatch)) {
return Promise.resolve();
}
if (!newValue && isUserInteractionRequiredForUnmute(state)) {
return Promise.resolve();
}
const mutePromise = dispatch(setMuted({
ensureTrack,
authority,
muted: Boolean(newValue),
mediaType
}));
dispatch({
type: SET_VIDEO_MUTED,
authority,
mediaType,
ensureTrack,
muted: newValue
});
return mutePromise;
};
}

View File

@ -12,12 +12,10 @@ import {
NOTIFICATION_TIMEOUT_TYPE,
showWarningNotification
} from '../../notifications';
import { isForceMuted } from '../../participants-pane/functions';
import { isScreenMediaShared } from '../../screen-share/functions';
import { SET_AUDIO_ONLY, setAudioOnly } from '../audio-only';
import { SET_ROOM, isRoomValid } from '../conference';
import { getMultipleVideoSendingSupportFeatureFlag } from '../config';
import { getLocalParticipant } from '../participants';
import { MiddlewareRegistry } from '../redux';
import { getPropertyValue } from '../settings';
import {
@ -27,12 +25,11 @@ import {
isLocalVideoTrackDesktop,
setTrackMuted
} from '../tracks';
import { executeTrackOperation } from '../tracks/actions';
import { TrackOperationType } from '../tracks/types';
import {
SET_AUDIO_MUTED,
SET_AUDIO_UNMUTE_PERMISSIONS,
SET_SCREENSHARE_MUTED,
SET_VIDEO_MUTED,
SET_VIDEO_UNMUTE_PERMISSIONS
} from './actionTypes';
import {
@ -54,6 +51,9 @@ import {
_VIDEO_INITIAL_MEDIA_STATE
} from './reducer';
import './subscriber';
/**
* Implements the entry point of the middleware of the feature base/media.
*
@ -83,16 +83,6 @@ MiddlewareRegistry.register(store => next => action => {
return result;
}
case SET_AUDIO_MUTED: {
const state = store.getState();
const participant = getLocalParticipant(state);
if (!action.muted && isForceMuted(participant, MEDIA_TYPE.AUDIO, state)) {
return;
}
break;
}
case SET_AUDIO_UNMUTE_PERMISSIONS: {
const { blocked, skipNotification } = action;
const state = store.getState();
@ -108,25 +98,6 @@ MiddlewareRegistry.register(store => next => action => {
break;
}
case SET_SCREENSHARE_MUTED: {
const state = store.getState();
const participant = getLocalParticipant(state);
if (!action.muted && isForceMuted(participant, MEDIA_TYPE.SCREENSHARE, state)) {
return;
}
break;
}
case SET_VIDEO_MUTED: {
const state = store.getState();
const participant = getLocalParticipant(state);
if (!action.muted && isForceMuted(participant, MEDIA_TYPE.VIDEO, state)) {
return;
}
break;
}
case SET_VIDEO_UNMUTE_PERMISSIONS: {
const { blocked, skipNotification } = action;
const state = store.getState();
@ -191,9 +162,12 @@ function _setAudioOnly({ dispatch, getState }, next, action) {
sendAnalytics(createTrackMutedEvent('video', 'audio-only mode', audioOnly));
// Make sure we mute both the desktop and video tracks.
dispatch(setVideoMuted(audioOnly, MEDIA_TYPE.VIDEO, VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY));
dispatch(executeTrackOperation(TrackOperationType.Video,
() => dispatch(setVideoMuted(audioOnly, MEDIA_TYPE.VIDEO, VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY))));
if (getMultipleVideoSendingSupportFeatureFlag(state)) {
dispatch(setScreenshareMuted(audioOnly, MEDIA_TYPE.SCREENSHARE, SCREENSHARE_MUTISM_AUTHORITY.AUDIO_ONLY));
dispatch(executeTrackOperation(TrackOperationType.Video,
() => dispatch(setScreenshareMuted(
audioOnly, MEDIA_TYPE.SCREENSHARE, SCREENSHARE_MUTISM_AUTHORITY.AUDIO_ONLY))));
}
return next(action);
@ -234,9 +208,9 @@ function _setRoom({ dispatch, getState }, next, action) {
// Unconditionally express the desires/expectations/intents of the app and
// the user i.e. the state of base/media. Eventually, practice/reality i.e.
// the state of base/tracks will or will not agree with the desires.
dispatch(setAudioMuted(audioMuted));
dispatch(executeTrackOperation(TrackOperationType.Audio, () => dispatch(setAudioMuted(audioMuted))));
dispatch(setCameraFacingMode(CAMERA_FACING_MODE.USER));
dispatch(setVideoMuted(videoMuted));
dispatch(executeTrackOperation(TrackOperationType.Video, () => dispatch(setVideoMuted(videoMuted))));
// startAudioOnly
//
@ -296,21 +270,29 @@ function _setRoom({ dispatch, getState }, next, action) {
* @private
* @returns {void}
*/
function _syncTrackMutedState({ getState }, track) {
const state = getState()['features/base/media'];
function _syncTrackMutedState({ dispatch, getState }, track) {
const mediaType = track.mediaType;
const muted = Boolean(state[mediaType].muted);
const trackOpType = mediaType === MEDIA_TYPE.AUDIO ? TrackOperationType.Audio : TrackOperationType.Video;
// XXX If muted state of track when it was added is different from our media
// muted state, we need to mute track and explicitly modify 'muted' property
// on track. This is because though TRACK_ADDED action was dispatched it's
// not yet in redux state and JitsiTrackEvents.TRACK_MUTE_CHANGED may be
// fired before track gets to state.
if (track.muted !== muted) {
sendAnalytics(createSyncTrackStateEvent(mediaType, muted));
logger.log(`Sync ${mediaType} track muted state to ${muted ? 'muted' : 'unmuted'}`);
dispatch(executeTrackOperation(trackOpType, () => {
const state = getState()['features/base/media'];
track.muted = muted;
setTrackMuted(track.jitsiTrack, muted, state);
}
const muted = Boolean(state[mediaType].muted);
// XXX If muted state of track when it was added is different from our media
// muted state, we need to mute track and explicitly modify 'muted' property
// on track. This is because though TRACK_ADDED action was dispatched it's
// not yet in redux state and JitsiTrackEvents.TRACK_MUTE_CHANGED may be
// fired before track gets to state.
if (track.muted !== muted) {
sendAnalytics(createSyncTrackStateEvent(track.mediaType, muted));
logger.log(`Sync ${track.mediaType} track muted state to ${muted ? 'muted' : 'unmuted'}`);
track.muted = muted;
return setTrackMuted(track.jitsiTrack, muted, state);
}
return Promise.resolve();
}));
}

View File

@ -1 +0,0 @@
import './middleware.any.js';

View File

@ -1,45 +0,0 @@
import './middleware.any.js';
import { IStore } from '../../app/types';
import { showNotification } from '../../notifications/actions';
import { NOTIFICATION_TIMEOUT_TYPE } from '../../notifications/constants';
import LocalRecordingManager from '../../recording/components/Recording/LocalRecordingManager.web';
// eslint-disable-next-line lines-around-comment
// @ts-ignore
import StopRecordingDialog from '../../recording/components/Recording/web/StopRecordingDialog';
import { openDialog } from '../dialog/actions';
import MiddlewareRegistry from '../redux/MiddlewareRegistry';
import { SET_VIDEO_MUTED } from './actionTypes';
import './subscriber';
/**
* Implements the entry point of the middleware of the feature base/media.
*
* @param {IStore} store - The redux store.
* @returns {Function}
*/
MiddlewareRegistry.register((store: IStore) => (next: Function) => (action: any) => {
const { dispatch } = store;
switch (action.type) {
case SET_VIDEO_MUTED: {
if (LocalRecordingManager.isRecordingLocally() && LocalRecordingManager.selfRecording.on) {
if (action.muted && LocalRecordingManager.selfRecording.withVideo) {
dispatch(openDialog(StopRecordingDialog, { localRecordingVideoStop: true }));
return;
} else if (!action.muted && !LocalRecordingManager.selfRecording.withVideo) {
dispatch(showNotification({
titleKey: 'recording.localRecordingNoVideo',
descriptionKey: 'recording.localRecordingVideoWarning',
uid: 'recording.localRecordingNoVideo'
}, NOTIFICATION_TIMEOUT_TYPE.MEDIUM));
}
}
}
}
return next(action);
});

View File

@ -9,6 +9,17 @@
*/
export const SET_NO_SRC_DATA_NOTIFICATION_UID = 'SET_NO_SRC_DATA_NOTIFICATION_UID';
/**
* Sets the track operation promise.
*
* {
* type: SET_TRACK_OPERATIONS_PROMISE,
* audioTrackOperationsPromise: Promise<void>,
* videoTrackOperationsPromise: Promise<void>
* }
*/
export const SET_TRACK_OPERATIONS_PROMISE = 'SET_TRACK_OPERATIONS_PROMISE';
/**
* The type of redux action dispatched when a track has been (locally or
* remotely) added to the conference.

View File

@ -3,22 +3,22 @@ import { sendAnalytics } from '../../analytics/functions';
import { IStore } from '../../app/types';
import { showErrorNotification, showNotification } from '../../notifications/actions';
import { NOTIFICATION_TIMEOUT, NOTIFICATION_TIMEOUT_TYPE } from '../../notifications/constants';
import { isPrejoinPageVisible } from '../../prejoin/functions';
import { getCurrentConference } from '../conference/functions';
import { IJitsiConference } from '../conference/reducer';
import { getMultipleVideoSendingSupportFeatureFlag } from '../config/functions.any';
import { JitsiTrackErrors, JitsiTrackEvents } from '../lib-jitsi-meet';
import { createLocalTrack } from '../lib-jitsi-meet/functions.any';
import { setAudioMuted, setScreenshareMuted, setVideoMuted } from '../media/actions';
import {
CAMERA_FACING_MODE,
MEDIA_TYPE,
MediaType,
SCREENSHARE_MUTISM_AUTHORITY,
VIDEO_MUTISM_AUTHORITY,
VIDEO_TYPE,
VideoType
} from '../media/constants';
import { getLocalParticipant } from '../participants/functions';
import { updateSettings } from '../settings/actions';
import {
SET_NO_SRC_DATA_NOTIFICATION_UID,
@ -38,7 +38,8 @@ import {
getLocalTrack,
getLocalTracks,
getLocalVideoTrack,
getTrackByJitsiTrack
getTrackByJitsiTrack,
setTrackMuted
} from './functions';
import logger from './logger';
import { ITrackOptions } from './types';
@ -57,7 +58,7 @@ export function addLocalTrack(newTrack: any) {
await conference.addTrack(newTrack);
}
const setMuted = newTrack.isVideoTrack()
const setMutedA = newTrack.isVideoTrack()
? getMultipleVideoSendingSupportFeatureFlag(getState())
&& newTrack.getVideoType() === VIDEO_TYPE.DESKTOP
? setScreenshareMuted
@ -66,7 +67,7 @@ export function addLocalTrack(newTrack: any) {
const isMuted = newTrack.isMuted();
logger.log(`Adding ${newTrack.getType()} track - ${isMuted ? 'muted' : 'unmuted'}`);
await dispatch(setMuted(isMuted));
await dispatch(setMutedA(isMuted));
return dispatch(_addTracks([ newTrack ]));
};
@ -139,6 +140,7 @@ export function createLocalTracksA(options: ITrackOptions = {}) {
dispatch,
getState
};
const promises: Promise<any>[] = [];
// The following executes on React Native only at the time of this
// writing. The effort to port Web's createInitialLocalTracksAndConnect
@ -216,7 +218,14 @@ export function createLocalTracksA(options: ITrackOptions = {}) {
mediaType: device
}
});
promises.push(gumProcess.catch(() => {
// ignore the error in the result promises so that the Promise.all resolves after all promises are
// settled.
}));
}
return Promise.all(promises);
};
}
@ -329,7 +338,7 @@ export function replaceLocalTrack(oldTrack: any, newTrack: any, conference?: IJi
* @param {JitsiLocalTrack|null} newTrack - The track to use instead.
* @returns {Function}
*/
function replaceStoredTracks(oldTrack: any, newTrack: any) {
export function replaceStoredTracks(oldTrack: any, newTrack: any) {
return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
// We call dispose after doing the replace because dispose will
// try and do a new o/a after the track removes itself. Doing it
@ -345,7 +354,7 @@ function replaceStoredTracks(oldTrack: any, newTrack: any) {
// should be falsey. As such, emit a mute event here to set up the app to reflect the track's mute
// state. If this is not done, the current mute state of the app will be reflected on the track,
// not vice-versa.
const setMuted = newTrack.isVideoTrack()
const setMutedA = newTrack.isVideoTrack()
? getMultipleVideoSendingSupportFeatureFlag(getState())
&& newTrack.getVideoType() === VIDEO_TYPE.DESKTOP
? setScreenshareMuted
@ -356,7 +365,7 @@ function replaceStoredTracks(oldTrack: any, newTrack: any) {
sendAnalytics(createTrackMutedEvent(newTrack.getType(), 'track.replaced', isMuted));
logger.log(`Replace ${newTrack.getType()} track - ${isMuted ? 'muted' : 'unmuted'}`);
await dispatch(setMuted(isMuted));
await dispatch(setMutedA(isMuted));
await dispatch(_addTracks([ newTrack ]));
}
};
@ -817,37 +826,48 @@ export function updateLastTrackVideoMediaEvent(track: any, name: string): {
};
}
/**
* Toggles the facingMode constraint on the video stream.
* Mutes or unmutes a local track with a specific media type.
*
* @returns {Function}
* @param {Object} options - Parameters of the function.
* @private
* @returns {Promise}
*/
export function toggleCamera() {
return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
export function setMuted({ ensureTrack, authority, mediaType, muted }: {
authority?: number; ensureTrack: boolean; mediaType: MediaType; muted: boolean; }) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']): Promise<any> => {
const state = getState();
const tracks = state['features/base/tracks'];
const localVideoTrack = getLocalVideoTrack(tracks)?.jitsiTrack;
const currentFacingMode = localVideoTrack.getCameraFacingMode();
const localTrack = getLocalTrack(state['features/base/tracks'], mediaType, true);
/**
* FIXME: Ideally, we should be dispatching {@code replaceLocalTrack} here,
* but it seems to not trigger the re-rendering of the local video on Chrome;
* could be due to a plan B vs unified plan issue. Therefore, we use the legacy
* method defined in conference.js that manually takes care of updating the local
* video as well.
*/
await APP.conference.useVideoStream(null);
if (mediaType === MEDIA_TYPE.SCREENSHARE
&& getMultipleVideoSendingSupportFeatureFlag(state)
&& !muted) {
return Promise.resolve();
}
const targetFacingMode = currentFacingMode === CAMERA_FACING_MODE.USER
? CAMERA_FACING_MODE.ENVIRONMENT
: CAMERA_FACING_MODE.USER;
if (localTrack) {
// The `jitsiTrack` property will have a value only for a localTrack for which `getUserMedia` has
// already completed. If there's no `jitsiTrack`, then the `muted` state will be applied once the
// `jitsiTrack` is created.
const { jitsiTrack } = localTrack;
const isAudioOnly = (mediaType === MEDIA_TYPE.VIDEO && authority === VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY)
|| (mediaType === MEDIA_TYPE.SCREENSHARE && authority === SCREENSHARE_MUTISM_AUTHORITY.AUDIO_ONLY);
// Update the flipX value so the environment facing camera is not flipped, before the new track is created.
dispatch(updateSettings({ localFlipX: targetFacingMode === CAMERA_FACING_MODE.USER }));
// Screenshare cannot be unmuted using the video mute button unless it is muted by audioOnly in
// the legacy screensharing mode.
if (jitsiTrack && (
jitsiTrack.videoType !== 'desktop' || isAudioOnly || getMultipleVideoSendingSupportFeatureFlag(state))
) {
return setTrackMuted(jitsiTrack, muted, state).catch(
() => dispatch(trackMuteUnmuteFailed(localTrack, muted)));
}
} else if (!muted && ensureTrack && (typeof APP === 'undefined' || isPrejoinPageVisible(state))) {
// FIXME: This only runs on mobile now because web has its own way of
// creating local tracks. Adjust the check once they are unified.
return dispatch(createLocalTracksA({ devices: [ mediaType ] }));
}
const newVideoTrack = await createLocalTrack('video', null, null, { facingMode: targetFacingMode });
// FIXME: See above.
await APP.conference.useVideoStream(newVideoTrack);
return Promise.resolve();
};
}

View File

@ -6,7 +6,9 @@ import { setAudioOnly } from '../audio-only/actions';
import JitsiMeetJS from '../lib-jitsi-meet';
import { destroyLocalDesktopTrackIfExists, replaceLocalTrack } from './actions.any';
import { getLocalVideoTrack, isLocalVideoTrackDesktop } from './functions';
import { getLocalVideoTrack, isLocalVideoTrackDesktop } from './functions.native';
import { TrackOperationType } from './types';
/* eslint-enable lines-around-comment */
export * from './actions.any';
@ -70,3 +72,22 @@ function _startScreenSharing(dispatch: Function, state: IReduxState) {
setPictureInPictureEnabled(true);
});
}
/**
* Executes a track operation.
*
* NOTE: This is dummy implementation for mobile. Currently we are not sure if we need to chain the track operations.
* For now we are just executing the passed operation without chaining it.
*
* @param {TrackOperationType} type - The type of the operation ('audio', 'video' or 'audio-video').
* @param {Function} operation - The operation.
* @returns {{
* type: SET_TRACK_OPERATIONS_PROMISE,
* audioTrackOperationsPromise: Promise<void>,
* videoTrackOperationsPromise: Promise<void>
* }}
*/
export function executeTrackOperation(type: TrackOperationType, operation: () => Promise<any>) {
return () => operation();
}

View File

@ -17,10 +17,13 @@ import { AudioMixerEffect } from '../../stream-effects/audio-mixer/AudioMixerEff
import { setAudioOnly } from '../audio-only/actions';
import { getCurrentConference } from '../conference/functions';
import { JitsiTrackErrors, JitsiTrackEvents } from '../lib-jitsi-meet';
import { createLocalTrack } from '../lib-jitsi-meet/functions.any';
import { setScreenshareMuted } from '../media/actions';
import { MEDIA_TYPE, VIDEO_TYPE } from '../media/constants';
import { CAMERA_FACING_MODE, MEDIA_TYPE, VIDEO_TYPE } from '../media/constants';
import { updateSettings } from '../settings/actions';
/* eslint-enable lines-around-comment */
import { SET_TRACK_OPERATIONS_PROMISE } from './actionTypes';
import {
addLocalTrack,
replaceLocalTrack
@ -28,9 +31,10 @@ import {
import {
createLocalTracksF,
getLocalDesktopTrack,
getLocalJitsiAudioTrack
getLocalJitsiAudioTrack,
getLocalVideoTrack
} from './functions';
import { IShareOptions, IToggleScreenSharingOptions } from './types';
import { IShareOptions, IToggleScreenSharingOptions, TrackOperationType } from './types';
export * from './actions.any';
@ -148,8 +152,6 @@ async function _toggleScreenSharing(
const audioOnlySharing = isAudioOnlySharing(state);
const screenSharing = isScreenVideoShared(state);
const conference = getCurrentConference(state);
const localAudio = getLocalJitsiAudioTrack(state);
const localScreenshare = getLocalDesktopTrack(state['features/base/tracks']);
// Toggle screenshare or audio-only share if the new state is not passed. Happens in the following two cases.
// 1. ShareAudioDialog passes undefined when the user hits continue in the share audio demo modal.
@ -199,11 +201,16 @@ async function _toggleScreenSharing(
throw new Error(AUDIO_ONLY_SCREEN_SHARE_NO_TRACK);
}
} else if (desktopVideoTrack) {
if (localScreenshare) {
await dispatch(replaceLocalTrack(localScreenshare.jitsiTrack, desktopVideoTrack, conference));
} else {
await dispatch(addLocalTrack(desktopVideoTrack));
}
await dispatch(executeTrackOperation(TrackOperationType.Video, async () => {
const localScreenshare = getLocalDesktopTrack(getState()['features/base/tracks']);
if (localScreenshare) {
await dispatch(replaceLocalTrack(localScreenshare.jitsiTrack, desktopVideoTrack, conference));
} else {
await dispatch(addLocalTrack(desktopVideoTrack));
}
}));
if (isScreenshotCaptureEnabled(state, false, true)) {
dispatch(toggleScreenshotCaptureSummary(true));
}
@ -216,15 +223,23 @@ async function _toggleScreenSharing(
// Noise suppression doesn't work with desktop audio because we can't chain track effects yet, disable it
// first. We need to to wait for the effect to clear first or it might interfere with the audio mixer.
await dispatch(setNoiseSuppressionEnabled(false));
_maybeApplyAudioMixerEffect(desktopAudioTrack, state);
dispatch(setScreenshareAudioTrack(desktopAudioTrack));
// Handle the case where screen share was stopped from the browsers 'screen share in progress' window.
if (audioOnly) {
desktopAudioTrack?.on(
JitsiTrackEvents.LOCAL_TRACK_STOPPED,
() => dispatch(toggleScreensharing(undefined, true)));
}
dispatch(executeTrackOperation(TrackOperationType.Audio,
() => {
const result = _maybeApplyAudioMixerEffect(desktopAudioTrack, state);
dispatch(setScreenshareAudioTrack(desktopAudioTrack));
// Handle the case where screen share was stopped from the browsers 'screen share in progress'
// window.
if (audioOnly) {
desktopAudioTrack?.on(
JitsiTrackEvents.LOCAL_TRACK_STOPPED,
() => dispatch(toggleScreensharing(undefined, true)));
}
return result;
}));
}
// Disable audio-only or best performance mode if the user starts screensharing. This doesn't apply to
@ -241,16 +256,25 @@ async function _toggleScreenSharing(
dispatch(toggleScreenshotCaptureSummary(false));
// Mute the desktop track instead of removing it from the conference since we don't want the client to signal
// a source-remove to the remote peer for the screenshare track. Later when screenshare is enabled again, the
// same sender will be re-used without the need for signaling a new ssrc through source-add.
dispatch(setScreenshareMuted(true));
await dispatch(executeTrackOperation(TrackOperationType.Video, () => {
// Mute the desktop track instead of removing it from the conference since we don't want the client to
// signal a source-remove to the remote peer for the screenshare track. Later when screenshare is enabled
// again, the same sender will be re-used without the need for signaling a new ssrc through source-add.
dispatch(setScreenshareMuted(true));
return Promise.resolve();
}));
if (desktopAudioTrack) {
if (localAudio) {
localAudio.setEffect(undefined);
} else {
await conference.replaceTrack(desktopAudioTrack, null);
}
await dispatch(executeTrackOperation(TrackOperationType.Audio, async () => {
const localAudio = getLocalJitsiAudioTrack(state);
if (localAudio) {
await localAudio.setEffect(undefined);
} else {
await conference.replaceTrack(desktopAudioTrack, null);
}
}));
desktopAudioTrack.dispose();
dispatch(setScreenshareAudioTrack(null));
}
@ -263,3 +287,101 @@ async function _toggleScreenSharing(
APP.API.notifyScreenSharingStatusChanged(enable, screensharingDetails);
}
}
/**
* Executes a track operation.
*
* @param {TrackOperationType} type - The type of the operation ('audio', 'video' or 'audio-video').
* @param {Function} operation - The operation.
* @returns {{
* type: SET_TRACK_OPERATIONS_PROMISE,
* audioTrackOperationsPromise: Promise<void>,
* videoTrackOperationsPromise: Promise<void>
* }}
*/
export function executeTrackOperation(type: TrackOperationType, operation: () => Promise<any>) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const {
audioTrackOperationsPromise,
videoTrackOperationsPromise
} = getState()['features/base/track-operations'];
switch (type) {
case TrackOperationType.Audio: {
const promise = audioTrackOperationsPromise.then(operation, operation);
dispatch({
type: SET_TRACK_OPERATIONS_PROMISE,
audioTrackOperationsPromise: promise
});
return promise;
}
case TrackOperationType.Video: {
const promise = videoTrackOperationsPromise.then(operation, operation);
dispatch({
type: SET_TRACK_OPERATIONS_PROMISE,
videoTrackOperationsPromise: promise
});
return promise;
}
case TrackOperationType.AudioVideo: {
const promise = Promise.allSettled([
audioTrackOperationsPromise,
videoTrackOperationsPromise
]).then(operation);
dispatch({
type: SET_TRACK_OPERATIONS_PROMISE,
audioTrackOperationsPromise: promise,
videoTrackOperationsPromise: promise
});
return promise;
}
default: {
const unexpectedType: never = type;
return Promise.reject(new Error(`Unexpected track operation type: ${unexpectedType}`));
}
}
};
}
/**
* Toggles the facingMode constraint on the video stream.
*
* @returns {Function}
*/
export function toggleCamera() {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) =>
dispatch(executeTrackOperation(TrackOperationType.Video, () =>
/**
* FIXME: Ideally, we should be dispatching {@code replaceLocalTrack} here,
* but it seems to not trigger the re-rendering of the local video on Chrome;
* could be due to a plan B vs unified plan issue. Therefore, we use the legacy
* method defined in conference.js that manually takes care of updating the local
* video as well.
*/
APP.conference.useVideoStream(null).then(() => {
const state = getState();
const tracks = state['features/base/tracks'];
const localVideoTrack = getLocalVideoTrack(tracks)?.jitsiTrack;
const currentFacingMode = localVideoTrack.getCameraFacingMode();
const targetFacingMode = currentFacingMode === CAMERA_FACING_MODE.USER
? CAMERA_FACING_MODE.ENVIRONMENT
: CAMERA_FACING_MODE.USER;
// Update the flipX value so the environment facing camera is not flipped, before the new track is
// created.
dispatch(updateSettings({ localFlipX: targetFacingMode === CAMERA_FACING_MODE.USER }));
return createLocalTrack('video', null, null, { facingMode: targetFacingMode });
})
.then((newVideoTrack: any) => APP.conference.useVideoStream(newVideoTrack))));
}

View File

@ -9,10 +9,12 @@ import {
getUserSelectedMicDeviceId
} from '../settings/functions.web';
import { executeTrackOperation } from './actions.web';
// eslint-disable-next-line lines-around-comment
// @ts-ignore
import loadEffects from './loadEffects';
import logger from './logger';
import { ITrackOptions } from './types';
import { ITrackOptions, TrackOperationType } from './types';
export * from './functions.any';
@ -107,14 +109,15 @@ export function createPrejoinTracks() {
const initialDevices = [ 'audio' ];
const requestedAudio = true;
let requestedVideo = false;
const { startAudioOnly, startWithAudioMuted, startWithVideoMuted } = APP.store.getState()['features/base/settings'];
const { dispatch, getState } = APP.store;
const { startAudioOnly, startWithAudioMuted, startWithVideoMuted } = getState()['features/base/settings'];
// Always get a handle on the audio input device so that we have statistics even if the user joins the
// conference muted. Previous implementation would only acquire the handle when the user first unmuted,
// which would results in statistics ( such as "No audio input" or "Are you trying to speak?") being available
// only after that point.
if (startWithAudioMuted) {
APP.store.dispatch(setAudioMuted(true));
dispatch(executeTrackOperation(TrackOperationType.Audio, () => dispatch(setAudioMuted(true))));
}
if (!startWithVideoMuted && !startAudioOnly) {
@ -128,10 +131,11 @@ export function createPrejoinTracks() {
// Resolve with no tracks
tryCreateLocalTracks = Promise.resolve([]);
} else {
tryCreateLocalTracks = createLocalTracksF({
devices: initialDevices,
firePermissionPromptIsShownEvent: true
}, APP.store)
tryCreateLocalTracks = dispatch(executeTrackOperation(TrackOperationType.AudioVideo, () =>
createLocalTracksF({
devices: initialDevices,
firePermissionPromptIsShownEvent: true
}, APP.store)
.catch((err: Error) => {
if (requestedAudio && requestedVideo) {
@ -177,7 +181,7 @@ export function createPrejoinTracks() {
errors.videoOnlyError = err;
return [];
});
})));
}
return {

View File

@ -1,42 +1,17 @@
import { batch } from 'react-redux';
import { IStore } from '../../app/types';
import { _RESET_BREAKOUT_ROOMS } from '../../breakout-rooms/actionTypes';
import { isPrejoinPageVisible } from '../../prejoin/functions';
import { getCurrentConference } from '../conference/functions';
import { getMultipleVideoSendingSupportFeatureFlag } from '../config/functions.any';
import {
SET_AUDIO_MUTED,
SET_CAMERA_FACING_MODE,
SET_SCREENSHARE_MUTED,
SET_VIDEO_MUTED,
TOGGLE_CAMERA_FACING_MODE
} from '../media/actionTypes';
import { toggleCameraFacingMode } from '../media/actions';
import {
CAMERA_FACING_MODE,
MEDIA_TYPE,
MediaType,
SCREENSHARE_MUTISM_AUTHORITY,
VIDEO_MUTISM_AUTHORITY
MediaType
} from '../media/constants';
import MiddlewareRegistry from '../redux/MiddlewareRegistry';
import StateListenerRegistry from '../redux/StateListenerRegistry';
import {
TRACK_UPDATED
} from './actionTypes';
import {
createLocalTracksA,
destroyLocalTracks,
trackMuteUnmuteFailed,
trackRemoved
} from './actions';
import {
getLocalTrack,
isUserInteractionRequiredForUnmute,
setTrackMuted
} from './functions';
import { TRACK_UPDATED } from './actionTypes';
import { getLocalTrack } from './functions';
import './subscriber';
/**
@ -49,15 +24,6 @@ import './subscriber';
*/
MiddlewareRegistry.register(store => next => action => {
switch (action.type) {
case SET_AUDIO_MUTED:
if (!action.muted
&& isUserInteractionRequiredForUnmute(store.getState())) {
return;
}
_setMuted(store, action, MEDIA_TYPE.AUDIO);
break;
case SET_CAMERA_FACING_MODE: {
// XXX The camera facing mode of a MediaStreamTrack can be specified
// only at initialization time and then it can only be toggled. So in
@ -78,19 +44,6 @@ MiddlewareRegistry.register(store => next => action => {
break;
}
case SET_SCREENSHARE_MUTED:
_setMuted(store, action, action.mediaType);
break;
case SET_VIDEO_MUTED:
if (!action.muted
&& isUserInteractionRequiredForUnmute(store.getState())) {
return;
}
_setMuted(store, action, action.mediaType);
break;
case TOGGLE_CAMERA_FACING_MODE: {
const localTrack = _getLocalTrack(store, MEDIA_TYPE.VIDEO);
let jitsiTrack;
@ -121,31 +74,6 @@ MiddlewareRegistry.register(store => next => action => {
return next(action);
});
/**
* Set up state change listener to perform maintenance tasks when the conference
* is left or failed, remove all tracks from the store.
*/
StateListenerRegistry.register(
state => getCurrentConference(state),
(conference, { dispatch, getState }, prevConference) => {
const { authRequired, error } = getState()['features/base/conference'];
// conference keep flipping while we are authenticating, skip clearing while we are in that process
if (prevConference && !conference && !authRequired && !error) {
// Clear all tracks.
const remoteTracks = getState()['features/base/tracks'].filter(t => !t.local);
batch(() => {
dispatch(destroyLocalTracks());
for (const track of remoteTracks) {
dispatch(trackRemoved(track.jitsiTrack));
}
dispatch({ type: _RESET_BREAKOUT_ROOMS });
});
}
});
/**
* Gets the local track associated with a specific {@code MEDIA_TYPE} in a
* specific redux store.
@ -173,48 +101,3 @@ function _getLocalTrack(
mediaType,
includePending));
}
/**
* Mutes or unmutes a local track with a specific media type.
*
* @param {Store} store - The redux store in which the specified action is
* dispatched.
* @param {Action} action - The redux action dispatched in the specified store.
* @param {MEDIA_TYPE} mediaType - The {@link MEDIA_TYPE} of the local track
* which is being muted or unmuted.
* @private
* @returns {void}
*/
async function _setMuted(store: IStore, { ensureTrack, authority, muted }: {
authority: number; ensureTrack: boolean; muted: boolean; }, mediaType: MediaType) {
const { dispatch, getState } = store;
const localTrack = _getLocalTrack(store, mediaType, /* includePending */ true);
const state = getState();
if (mediaType === MEDIA_TYPE.SCREENSHARE
&& getMultipleVideoSendingSupportFeatureFlag(state)
&& !muted) {
return;
}
if (localTrack) {
// The `jitsiTrack` property will have a value only for a localTrack for which `getUserMedia` has already
// completed. If there's no `jitsiTrack`, then the `muted` state will be applied once the `jitsiTrack` is
// created.
const { jitsiTrack } = localTrack;
const isAudioOnly = (mediaType === MEDIA_TYPE.VIDEO && authority === VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY)
|| (mediaType === MEDIA_TYPE.SCREENSHARE && authority === SCREENSHARE_MUTISM_AUTHORITY.AUDIO_ONLY);
// Screenshare cannot be unmuted using the video mute button unless it is muted by audioOnly in the legacy
// screensharing mode.
if (jitsiTrack && (
jitsiTrack.videoType !== 'desktop' || isAudioOnly || getMultipleVideoSendingSupportFeatureFlag(state))
) {
setTrackMuted(jitsiTrack, muted, state).catch(() => dispatch(trackMuteUnmuteFailed(localTrack, muted)));
}
} else if (!muted && ensureTrack && (typeof APP === 'undefined' || isPrejoinPageVisible(state))) {
// FIXME: This only runs on mobile now because web has its own way of
// creating local tracks. Adjust the check once they are unified.
dispatch(createLocalTracksA({ devices: [ mediaType ] }));
}
}

View File

@ -18,6 +18,7 @@ import {
TRACK_UPDATED
} from './actionTypes';
import {
executeTrackOperation,
showNoDataFromSourceVideoError,
toggleScreensharing,
trackNoDataFromSourceNotificationInfoChanged
@ -25,7 +26,7 @@ import {
import {
getTrackByJitsiTrack
} from './functions.web';
import { ITrack } from './types';
import { ITrack, TrackOperationType } from './types';
import './middleware.any';
@ -67,9 +68,10 @@ MiddlewareRegistry.register(store => next => action => {
const { jitsiTrack } = action.track;
const muted = action.wasMuted;
const isVideoTrack = jitsiTrack.getType() !== MEDIA_TYPE.AUDIO;
const { dispatch } = store;
if (isVideoTrack && jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP) {
store.dispatch(setScreenshareMuted(!muted));
dispatch(executeTrackOperation(TrackOperationType.Video, () => dispatch(setScreenshareMuted(!muted))));
} else if (isVideoTrack) {
APP.conference.setVideoMuteStatus();
} else {

View File

@ -4,6 +4,7 @@ import { set } from '../redux/functions';
import {
SET_NO_SRC_DATA_NOTIFICATION_UID,
SET_TRACK_OPERATIONS_PROMISE,
TRACK_ADDED,
TRACK_CREATE_CANCELED,
TRACK_CREATE_ERROR,
@ -152,3 +153,31 @@ ReducerRegistry.register<INoSrcDataState>('features/base/no-src-data', (state =
}
});
export interface ITrackOperations {
audioTrackOperationsPromise: Promise<void>;
videoTrackOperationsPromise: Promise<void>;
}
const DEFAULT_TRACK_OPERATIONS_STATE = {
audioTrackOperationsPromise: Promise.resolve(),
videoTrackOperationsPromise: Promise.resolve()
};
/**
* Listen for actions that mutate the no-src-data state, like the current notification id.
*/
ReducerRegistry.register<ITrackOperations>(
'features/base/track-operations',
(state = DEFAULT_TRACK_OPERATIONS_STATE, action): ITrackOperations => {
switch (action.type) {
case SET_TRACK_OPERATIONS_PROMISE:
return {
...state,
audioTrackOperationsPromise: action.audioTrackOperationsPromise || state.audioTrackOperationsPromise,
videoTrackOperationsPromise: action.videoTrackOperationsPromise || state.videoTrackOperationsPromise
};
default:
return state;
}
});

View File

@ -1,9 +1,13 @@
import _ from 'lodash';
import { batch } from 'react-redux';
import { _RESET_BREAKOUT_ROOMS } from '../../breakout-rooms/actionTypes';
import { getCurrentConference } from '../conference/functions';
import { MEDIA_TYPE } from '../media/constants';
import { getScreenshareParticipantIds } from '../participants/functions';
import StateListenerRegistry from '../redux/StateListenerRegistry';
import { destroyLocalTracks, trackRemoved } from './actions.any';
import { isLocalTrackMuted } from './functions';
/**
@ -38,3 +42,28 @@ StateListenerRegistry.register(
}
}
);
/**
* Set up state change listener to perform maintenance tasks when the conference
* is left or failed, remove all tracks from the store.
*/
StateListenerRegistry.register(
state => getCurrentConference(state),
(conference, { dispatch, getState }, prevConference) => {
const { authRequired, error } = getState()['features/base/conference'];
// conference keep flipping while we are authenticating, skip clearing while we are in that process
if (prevConference && !conference && !authRequired && !error) {
// Clear all tracks.
const remoteTracks = getState()['features/base/tracks'].filter(t => !t.local);
batch(() => {
dispatch(destroyLocalTracks());
for (const track of remoteTracks) {
dispatch(trackRemoved(track.jitsiTrack));
}
dispatch({ type: _RESET_BREAKOUT_ROOMS });
});
}
});

View File

@ -72,3 +72,9 @@ export interface IShareOptions {
desktopSharingSources?: string[];
desktopStream?: any;
}
export enum TrackOperationType {
Audio = 'audio',
AudioVideo = 'audio-video',
Video = 'video'
}

View File

@ -6,8 +6,7 @@ import type { Dispatch } from 'redux';
import { v4 as uuidV4 } from 'uuid';
import { findWindows } from 'windows-iana';
import { createDeferred } from '../../../../modules/util/helpers';
import { parseStandardURIString, parseURLParams } from '../../base/util';
import { createDeferred, parseStandardURIString, parseURLParams } from '../../base/util';
import { getShareInfoText } from '../../invite';
import { setCalendarAPIAuthState } from '../actions';

View File

@ -1,5 +1,7 @@
import { IStore } from '../app/types';
import { executeTrackOperation } from '../base/tracks/actions';
import { getLocalJitsiAudioTrack } from '../base/tracks/functions';
import { TrackOperationType } from '../base/tracks/types';
import { showErrorNotification } from '../notifications/actions';
import { NOTIFICATION_TIMEOUT_TYPE } from '../notifications/constants';
import { NoiseSuppressionEffect } from '../stream-effects/noise-suppression/NoiseSuppressionEffect';
@ -48,30 +50,32 @@ export function toggleNoiseSuppression(): any {
*/
export function setNoiseSuppressionEnabled(enabled: boolean): any {
return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
const localAudio = getLocalJitsiAudioTrack(state);
const noiseSuppressionEnabled = isNoiseSuppressionEnabled(state);
logger.info(`Attempting to set noise suppression enabled state: ${enabled}`);
try {
if (enabled && !noiseSuppressionEnabled) {
if (!canEnableNoiseSuppression(state, dispatch, localAudio)) {
return;
await dispatch(executeTrackOperation(TrackOperationType.Audio, async () => {
const state = getState();
const localAudio = getLocalJitsiAudioTrack(state);
const noiseSuppressionEnabled = isNoiseSuppressionEnabled(state);
if (enabled && !noiseSuppressionEnabled) {
if (!canEnableNoiseSuppression(state, dispatch, localAudio)) {
return;
}
await localAudio.setEffect(new NoiseSuppressionEffect());
dispatch(setNoiseSuppressionEnabledState(true));
logger.info('Noise suppression enabled.');
} else if (!enabled && noiseSuppressionEnabled) {
await localAudio.setEffect(undefined);
dispatch(setNoiseSuppressionEnabledState(false));
logger.info('Noise suppression disabled.');
} else {
logger.warn(`Noise suppression enabled state already: ${enabled}`);
}
await localAudio.setEffect(new NoiseSuppressionEffect());
dispatch(setNoiseSuppressionEnabledState(true));
logger.info('Noise suppression enabled.');
} else if (!enabled && noiseSuppressionEnabled) {
await localAudio.setEffect(undefined);
dispatch(setNoiseSuppressionEnabledState(false));
logger.info('Noise suppression disabled.');
} else {
logger.warn(`Noise suppression enabled state already: ${enabled}`);
}
}));
} catch (error) {
logger.error(
`Failed to set noise suppression enabled to: ${enabled}`,

View File

@ -9,12 +9,14 @@ import { MEDIA_TYPE } from '../base/media/constants';
import { isVideoMutedByUser } from '../base/media/functions';
import { updateSettings } from '../base/settings/actions';
import { replaceLocalTrack, trackAdded } from '../base/tracks/actions';
import { executeTrackOperation } from '../base/tracks/actions.web';
import {
createLocalTracksF,
getLocalAudioTrack,
getLocalTracks,
getLocalVideoTrack
} from '../base/tracks/functions';
import { TrackOperationType } from '../base/tracks/types';
import { openURLInBrowser } from '../base/util/openURLInBrowser';
// eslint-disable-next-line lines-around-comment
// @ts-ignore
@ -228,31 +230,35 @@ export function joinConference(options?: Object, ignoreJoiningInProgress = false
dispatch(setJoiningInProgress(true));
}
const state = getState();
let localTracks = getLocalTracks(state['features/base/tracks']);
options && dispatch(updateConfig(options));
// Do not signal audio/video tracks if the user joins muted.
for (const track of localTracks) {
// Always add the audio track on Safari because of a known issue where audio playout doesn't happen
// if the user joins audio and video muted.
if (track.muted
&& !(browser.isWebKitBased() && track.jitsiTrack && track.jitsiTrack.getType() === MEDIA_TYPE.AUDIO)) {
try {
await dispatch(replaceLocalTrack(track.jitsiTrack, null));
} catch (error) {
logger.error(`Failed to replace local track (${track.jitsiTrack}) with null: ${error}`);
const jitsiTracks = await dispatch(executeTrackOperation(TrackOperationType.AudioVideo, async () => {
const state = getState();
let localTracks = getLocalTracks(state['features/base/tracks']);
// Do not signal audio/video tracks if the user joins muted.
for (const track of localTracks) {
// Always add the audio track on Safari because of a known issue where audio playout doesn't happen
// if the user joins audio and video muted.
if (track.muted
&& !(browser.isWebKitBased()
&& track.jitsiTrack
&& track.jitsiTrack.getType() === MEDIA_TYPE.AUDIO)) {
try {
await dispatch(replaceLocalTrack(track.jitsiTrack, null));
} catch (error) {
logger.error(`Failed to replace local track (${track.jitsiTrack}) with null: ${error}`);
}
}
}
}
// Re-fetch the local tracks after muted tracks have been removed above.
// This is needed, because the tracks are effectively disposed by the replaceLocalTrack and should not be used
// anymore.
localTracks = getLocalTracks(getState()['features/base/tracks']);
// Re-fetch the local tracks after muted tracks have been removed above.
// This is needed, because the tracks are effectively disposed by the
// replaceLocalTrack and should not be used anymore.
localTracks = getLocalTracks(getState()['features/base/tracks']);
const jitsiTracks = localTracks.map((t: any) => t.jitsiTrack);
return localTracks.map((t: any) => t.jitsiTrack);
}));
APP.conference.prejoinStart(jitsiTracks);
};
@ -288,16 +294,19 @@ export function joinConferenceWithoutAudio() {
}
dispatch(setJoiningInProgress(true));
const tracks = state['features/base/tracks'];
const audioTrack = getLocalAudioTrack(tracks)?.jitsiTrack;
if (audioTrack) {
try {
await dispatch(replaceLocalTrack(audioTrack, null));
} catch (error) {
logger.error(`Failed to replace local audio with null: ${error}`);
await dispatch(executeTrackOperation(TrackOperationType.Audio, async () => {
const tracks = getState()['features/base/tracks'];
const audioTrack = getLocalAudioTrack(tracks)?.jitsiTrack;
if (audioTrack) {
try {
await dispatch(replaceLocalTrack(audioTrack, null));
} catch (error) {
logger.error(`Failed to replace local audio with null: ${error}`);
}
}
}
}));
dispatch(joinConference({
startSilent: true
@ -357,17 +366,19 @@ function prejoinInitialized() {
export function replaceAudioTrackById(deviceId: string) {
return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
try {
const tracks = getState()['features/base/tracks'];
const newTrack = await createLocalTrack('audio', deviceId);
const oldTrack = getLocalAudioTrack(tracks)?.jitsiTrack;
const micDeviceId = newTrack.getDeviceId();
await dispatch(executeTrackOperation(TrackOperationType.Audio, async () => {
const tracks = getState()['features/base/tracks'];
const newTrack = await createLocalTrack('audio', deviceId);
const oldTrack = getLocalAudioTrack(tracks)?.jitsiTrack;
const micDeviceId = newTrack.getDeviceId();
logger.info(`Switching audio input device to ${micDeviceId}`);
dispatch(replaceLocalTrack(oldTrack, newTrack)).then(() => {
dispatch(updateSettings({
micDeviceId
}));
});
logger.info(`Switching audio input device to ${micDeviceId}`);
await dispatch(replaceLocalTrack(oldTrack, newTrack)).then(() => {
dispatch(updateSettings({
micDeviceId
}));
});
}));
} catch (err) {
dispatch(setDeviceStatusWarning('prejoin.audioTrackError'));
logger.log('Error replacing audio track', err);
@ -384,24 +395,28 @@ export function replaceAudioTrackById(deviceId: string) {
export function replaceVideoTrackById(deviceId: string) {
return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
try {
const tracks = getState()['features/base/tracks'];
const wasVideoMuted = isVideoMutedByUser(getState());
const [ newTrack ] = await createLocalTracksF(
{ cameraDeviceId: deviceId,
devices: [ 'video' ] },
{ dispatch,
getState }
);
const oldTrack = getLocalVideoTrack(tracks)?.jitsiTrack;
const cameraDeviceId = newTrack.getDeviceId();
await dispatch(executeTrackOperation(TrackOperationType.Video, async () => {
const tracks = getState()['features/base/tracks'];
const wasVideoMuted = isVideoMutedByUser(getState());
const [ newTrack ] = await createLocalTracksF(
{ cameraDeviceId: deviceId,
devices: [ 'video' ] },
{ dispatch,
getState }
);
const oldTrack = getLocalVideoTrack(tracks)?.jitsiTrack;
const cameraDeviceId = newTrack.getDeviceId();
logger.info(`Switching camera to ${cameraDeviceId}`);
dispatch(replaceLocalTrack(oldTrack, newTrack)).then(() => {
dispatch(updateSettings({
cameraDeviceId
}));
});
wasVideoMuted && newTrack.mute();
logger.info(`Switching camera to ${cameraDeviceId}`);
await dispatch(replaceLocalTrack(oldTrack, newTrack)).then(() => {
dispatch(updateSettings({
cameraDeviceId
}));
});
if (wasVideoMuted) {
await newTrack.mute();
}
}));
} catch (err) {
dispatch(setDeviceStatusWarning('prejoin.videoTrackError'));
logger.log('Error replacing video track', err);

View File

@ -6,8 +6,10 @@ import { IReduxState } from '../../../app/types';
import { IJitsiConference } from '../../../base/conference/reducer';
import { JitsiRecordingConstants } from '../../../base/lib-jitsi-meet';
import { setVideoMuted } from '../../../base/media/actions';
import { executeTrackOperation } from '../../../base/tracks/actions';
import { TrackOperationType } from '../../../base/tracks/types';
import { stopLocalVideoRecording } from '../../actions';
import { getActiveSession } from '../../functions';
import { getActiveSession } from '../../functions.any';
import LocalRecordingManager from './LocalRecordingManager';
@ -78,18 +80,16 @@ export default class AbstractStopRecordingDialog<P extends IProps>
_onSubmit() {
sendAnalytics(createRecordingDialogEvent('stop', 'confirm.button'));
if (this.props._localRecording) {
this.props.dispatch(stopLocalVideoRecording());
if (this.props.localRecordingVideoStop) {
this.props.dispatch(setVideoMuted(true));
}
} else {
const { _fileRecordingSession } = this.props;
const { _conference, _fileRecordingSession, _localRecording, dispatch, localRecordingVideoStop } = this.props;
if (_fileRecordingSession) { // @ts-ignore
this.props._conference.stopRecording(_fileRecordingSession.id);
this._toggleScreenshotCapture();
if (_localRecording) {
dispatch(stopLocalVideoRecording());
if (localRecordingVideoStop) {
dispatch(executeTrackOperation(TrackOperationType.Video, () => dispatch(setVideoMuted(true))));
}
} else if (_fileRecordingSession) { // @ts-ignore
_conference.stopRecording(_fileRecordingSession.id);
this._toggleScreenshotCapture();
}
return true;

View File

@ -0,0 +1,14 @@
import { IStore } from '../app/types';
export * from './functions.any';
/**
* Checks if the video mute operation needs to be stopped.
*
* @param {boolean} _muted - The new mute state.
* @param {Function} _dispatch - The redux dispatch function.
* @returns {boolean} - False always.
*/
export function maybeStopMuteBecauseOfLocalRecording(_muted: boolean, _dispatch: IStore['dispatch']) {
return false;
}

View File

@ -0,0 +1,37 @@
export * from './functions.any';
import { IStore } from '../app/types';
import { openDialog } from '../base/dialog/actions';
import { showNotification } from '../notifications/actions';
import { NOTIFICATION_TIMEOUT_TYPE } from '../notifications/constants';
import LocalRecordingManager from './components/Recording/LocalRecordingManager.web';
// eslint-disable-next-line lines-around-comment
// @ts-ignore
import StopRecordingDialog from './components/Recording/web/StopRecordingDialog';
/**
* Checks if the video mute operation needs to be stopped and opens the stop local recording dialog
* or the localRecordingNoVideo notification.
*
* @param {boolean} muted - The new mute state.
* @param {Function} dispatch - The redux dispatch function.
* @returns {boolean}
*/
export function maybeStopMuteBecauseOfLocalRecording(muted: boolean, dispatch: IStore['dispatch']) {
if (LocalRecordingManager.isRecordingLocally() && LocalRecordingManager.selfRecording.on) {
if (muted && LocalRecordingManager.selfRecording.withVideo) {
dispatch(openDialog(StopRecordingDialog, { localRecordingVideoStop: true }));
return true;
} else if (!muted && !LocalRecordingManager.selfRecording.withVideo) {
dispatch(showNotification({
titleKey: 'recording.localRecordingNoVideo',
descriptionKey: 'recording.localRecordingVideoWarning',
uid: 'recording.localRecordingNoVideo'
}, NOTIFICATION_TIMEOUT_TYPE.MEDIUM));
}
}
return false;
}

View File

@ -2,7 +2,7 @@
import { JitsiRecordingConstants } from '../base/lib-jitsi-meet';
import { toState } from '../base/redux';
import { getActiveSession } from '../recording/functions';
import { getActiveSession } from '../recording/functions.any';
import { isScreenVideoShared } from '../screen-share/functions';
import ScreenshotCaptureSummary from './ScreenshotCaptureSummary';

View File

@ -7,6 +7,8 @@ import { raiseHand } from '../base/participants/actions';
import { getLocalParticipant } from '../base/participants/functions';
import MiddlewareRegistry from '../base/redux/MiddlewareRegistry';
import { playSound, registerSound, unregisterSound } from '../base/sounds/actions';
import { executeTrackOperation } from '../base/tracks/actions';
import { TrackOperationType } from '../base/tracks/types';
import { hideNotification, showNotification } from '../notifications/actions';
import { NOTIFICATION_TIMEOUT_TYPE } from '../notifications/constants';
import { isForceMuted } from '../participants-pane/functions';
@ -51,7 +53,15 @@ MiddlewareRegistry.register(store => next => action => {
const notification = await dispatch(showNotification({
titleKey: 'toolbar.talkWhileMutedPopup',
customActionNameKey: [ forceMuted ? 'notify.raiseHandAction' : 'notify.unmute' ],
customActionHandler: [ () => dispatch(forceMuted ? raiseHand(true) : setAudioMuted(false)) ]
customActionHandler: [ () => {
if (forceMuted) {
dispatch(raiseHand(true));
} else {
dispatch(
executeTrackOperation(
TrackOperationType.Audio, () => dispatch(setAudioMuted(false))));
}
} ]
}, NOTIFICATION_TIMEOUT_TYPE.LONG));
const { soundsTalkWhileMuted } = getState()['features/base/settings'];

View File

@ -6,6 +6,8 @@ import { IStore } from '../app/types';
import { setAudioOnly } from '../base/audio-only/actions';
import { setVideoMuted } from '../base/media/actions';
import { MEDIA_TYPE, VIDEO_MUTISM_AUTHORITY } from '../base/media/constants';
import { executeTrackOperation } from '../base/tracks/actions';
import { TrackOperationType } from '../base/tracks/types';
import {
SET_TOOLBOX_ENABLED,
@ -85,20 +87,21 @@ export function toggleToolboxVisible() {
*/
export function handleToggleVideoMuted(muted: boolean, showUI: boolean, ensureTrack: boolean) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
const { enabled: audioOnly } = state['features/base/audio-only'];
const { enabled: audioOnly } = getState()['features/base/audio-only'];
sendAnalytics(createToolbarEvent(VIDEO_MUTE, { enable: muted }));
if (audioOnly) {
dispatch(setAudioOnly(false));
}
dispatch(
setVideoMuted(
muted,
MEDIA_TYPE.VIDEO,
VIDEO_MUTISM_AUTHORITY.USER,
ensureTrack));
dispatch(executeTrackOperation(TrackOperationType.Video, () =>
dispatch(
setVideoMuted(
muted,
MEDIA_TYPE.VIDEO,
VIDEO_MUTISM_AUTHORITY.USER,
ensureTrack))
));
// FIXME: The old conference logic still relies on this event being
// emitted.

View File

@ -14,7 +14,8 @@ import { setAudioMuted, setVideoMuted } from '../base/media/actions';
import { MEDIA_TYPE, MediaType, VIDEO_MUTISM_AUTHORITY } from '../base/media/constants';
import { muteRemoteParticipant } from '../base/participants/actions';
import { getLocalParticipant, getRemoteParticipants } from '../base/participants/functions';
import { toggleScreensharing } from '../base/tracks/actions';
import { executeTrackOperation, toggleScreensharing } from '../base/tracks/actions';
import { TrackOperationType } from '../base/tracks/types';
import { isModerationNotificationDisplayed } from '../notifications/functions';
import logger from './logger';
@ -51,8 +52,12 @@ export function muteLocal(enable: boolean, mediaType: MediaType, stopScreenShari
}
sendAnalytics(createToolbarEvent(isAudio ? AUDIO_MUTE : VIDEO_MUTE, { enable }));
dispatch(isAudio ? setAudioMuted(enable, /* ensureTrack */ true)
: setVideoMuted(enable, mediaType, VIDEO_MUTISM_AUTHORITY.USER, /* ensureTrack */ true));
const trackOpType = isAudio ? TrackOperationType.Audio : TrackOperationType.Video;
dispatch(executeTrackOperation(trackOpType, () =>
dispatch(isAudio ? setAudioMuted(enable, /* ensureTrack */ true)
: setVideoMuted(enable, mediaType, VIDEO_MUTISM_AUTHORITY.USER, /* ensureTrack */ true))));
// FIXME: The old conference logic still relies on this event being emitted.
typeof APP === 'undefined'

View File

@ -1,4 +1,7 @@
import { IStore } from '../app/types';
import { executeTrackOperation } from '../base/tracks/actions';
import { getLocalJitsiVideoTrack } from '../base/tracks/functions.any';
import { TrackOperationType } from '../base/tracks/types';
// eslint-disable-next-line lines-around-comment
// @ts-ignore
import { createVirtualBackgroundEffect } from '../stream-effects/virtual-background';
@ -37,6 +40,23 @@ export function toggleBackgroundEffect(options: IVirtualBackgroundOptions, jitsi
};
}
/**
* Adds a track operation to enable/disable the virtual background for the local video.
*
* @param {Object} options - Represents the virtual background set options.
* @returns {Function}
*/
export function toggleBackgroundEffectForTheLocalTrack(options: IVirtualBackgroundOptions) {
return function(dispatch: IStore['dispatch'], getState: IStore['getState']) {
return dispatch(executeTrackOperation(TrackOperationType.Video, () => {
const localVideo = getLocalJitsiVideoTrack(getState());
return dispatch(toggleBackgroundEffect(options, localVideo));
}));
};
}
/**
* Sets the selected virtual background image object.
*

View File

@ -18,9 +18,8 @@ import { connect } from '../../base/redux/functions';
import { updateSettings } from '../../base/settings/actions';
// @ts-ignore
import { Tooltip } from '../../base/tooltip';
import { getLocalVideoTrack } from '../../base/tracks/functions';
import Dialog from '../../base/ui/components/web/Dialog';
import { toggleBackgroundEffect } from '../actions';
import { toggleBackgroundEffectForTheLocalTrack } from '../actions';
import { BACKGROUNDS_LIMIT, IMAGES, type Image, VIRTUAL_BACKGROUND_TYPE } from '../constants';
import { toDataURL } from '../functions';
import logger from '../logger';
@ -36,11 +35,6 @@ interface IProps extends WithTranslation {
*/
_images: Array<Image>;
/**
* Returns the jitsi track that will have backgraund effect applied.
*/
_jitsiTrack: Object;
/**
* The current local flip x status.
*/
@ -104,7 +98,6 @@ function _mapStateToProps(state: IReduxState): Object {
_virtualBackground: state['features/virtual-background'],
_selectedThumbnail: state['features/virtual-background'].selectedThumbnail,
_showUploadButton: !(hasBrandingImages || state['features/base/config'].disableAddingBackgroundImages),
_jitsiTrack: getLocalVideoTrack(state['features/base/tracks'])?.jitsiTrack,
_multiStreamModeEnabled: getMultipleVideoSendingSupportFeatureFlag(state)
};
}
@ -272,7 +265,6 @@ const useStyles = makeStyles()(theme => {
*/
function VirtualBackground({
_images,
_jitsiTrack,
_localFlipX,
_selectedThumbnail,
_showUploadButton,
@ -422,7 +414,7 @@ function VirtualBackground({
const applyVirtualBackground = useCallback(async () => {
setLoading(true);
await dispatch(toggleBackgroundEffect(options, _jitsiTrack));
await dispatch(toggleBackgroundEffectForTheLocalTrack(options));
await setLoading(false);
// Set x scale to default value.