feat(multi-stream-support) Add screenshare as a second video track to the call.
* feat(multi-stream-support) Add screenshare as a second video track to the call. This feature is behind a sendMultipleVideoStreams config.js flag. sourceNameSignaling flag also needs to enabled. Sending multiple tracks is currently supported only on endpoints running in unified plan mode. However, clients with source-name signaling enabled and running in plan-b can still receive multiple streams . * squash: check if there is an existing track before adding camera/desktop * squash: enable multi-stream only on unified plan endpoints.
This commit is contained in:
parent
5f1a4f189c
commit
9f72c318d6
|
@ -52,7 +52,7 @@ import {
|
|||
sendLocalParticipant,
|
||||
nonParticipantMessageReceived
|
||||
} from './react/features/base/conference';
|
||||
import { getReplaceParticipant } from './react/features/base/config/functions';
|
||||
import { getReplaceParticipant, getMultipleVideoSupportFeatureFlag } from './react/features/base/config/functions';
|
||||
import {
|
||||
checkAndNotifyForNewDevice,
|
||||
getAvailableDevices,
|
||||
|
@ -106,6 +106,7 @@ import {
|
|||
updateSettings
|
||||
} from './react/features/base/settings';
|
||||
import {
|
||||
addLocalTrack,
|
||||
createLocalPresenterTrack,
|
||||
createLocalTracksF,
|
||||
destroyLocalTracks,
|
||||
|
@ -1444,11 +1445,13 @@ export default {
|
|||
* @returns {Promise}
|
||||
*/
|
||||
useVideoStream(newTrack) {
|
||||
const state = APP.store.getState();
|
||||
|
||||
logger.debug(`useVideoStream: ${newTrack}`);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
_replaceLocalVideoTrackQueue.enqueue(onFinish => {
|
||||
const oldTrack = getLocalJitsiVideoTrack(APP.store.getState());
|
||||
const oldTrack = getLocalJitsiVideoTrack(state);
|
||||
|
||||
logger.debug(`useVideoStream: Replacing ${oldTrack} with ${newTrack}`);
|
||||
|
||||
|
@ -1459,6 +1462,26 @@ export default {
|
|||
return;
|
||||
}
|
||||
|
||||
// In the multi-stream mode, add the track to the conference if there is no existing track, replace it
|
||||
// otherwise.
|
||||
if (getMultipleVideoSupportFeatureFlag(state)) {
|
||||
const trackAction = oldTrack
|
||||
? replaceLocalTrack(oldTrack, newTrack, room)
|
||||
: addLocalTrack(newTrack);
|
||||
|
||||
APP.store.dispatch(trackAction)
|
||||
.then(() => {
|
||||
this.setVideoMuteStatus();
|
||||
})
|
||||
.then(resolve)
|
||||
.catch(error => {
|
||||
logger.error(`useVideoStream failed: ${error}`);
|
||||
reject(error);
|
||||
})
|
||||
.then(onFinish);
|
||||
|
||||
return;
|
||||
}
|
||||
APP.store.dispatch(
|
||||
replaceLocalTrack(oldTrack, newTrack, room))
|
||||
.then(() => {
|
||||
|
|
|
@ -26,6 +26,7 @@ export const CS_MODERATION_NOTIFICATION_ID = 'screensharing-moderation';
|
|||
|
||||
export const MODERATION_NOTIFICATIONS = {
|
||||
[MEDIA_TYPE.AUDIO]: AUDIO_MODERATION_NOTIFICATION_ID,
|
||||
[MEDIA_TYPE.SCREENSHARE]: CS_MODERATION_NOTIFICATION_ID,
|
||||
[MEDIA_TYPE.VIDEO]: VIDEO_MODERATION_NOTIFICATION_ID,
|
||||
[MEDIA_TYPE.PRESENTER]: CS_MODERATION_NOTIFICATION_ID
|
||||
};
|
||||
|
|
|
@ -1,13 +1,30 @@
|
|||
// @flow
|
||||
|
||||
import { AUDIO_ONLY_SCREEN_SHARE_NO_TRACK } from '../../../../modules/UI/UIErrors';
|
||||
import { showNotification, NOTIFICATION_TIMEOUT_TYPE } from '../../notifications';
|
||||
import { setSkipPrejoinOnReload } from '../../prejoin';
|
||||
import { JitsiConferenceErrors } from '../lib-jitsi-meet';
|
||||
import { setScreenAudioShareState, setScreenshareAudioTrack } from '../../screen-share';
|
||||
import { AudioMixerEffect } from '../../stream-effects/audio-mixer/AudioMixerEffect';
|
||||
import { setAudioOnly } from '../audio-only';
|
||||
import { getMultipleVideoSupportFeatureFlag } from '../config/functions.any';
|
||||
import { JitsiConferenceErrors, JitsiTrackErrors } from '../lib-jitsi-meet';
|
||||
import { MEDIA_TYPE, setScreenshareMuted, VIDEO_TYPE } from '../media';
|
||||
import { MiddlewareRegistry } from '../redux';
|
||||
import {
|
||||
addLocalTrack,
|
||||
createLocalTracksF,
|
||||
getLocalDesktopTrack,
|
||||
getLocalJitsiAudioTrack,
|
||||
replaceLocalTrack,
|
||||
TOGGLE_SCREENSHARING
|
||||
} from '../tracks';
|
||||
|
||||
import { CONFERENCE_FAILED, CONFERENCE_JOINED } from './actionTypes';
|
||||
import { getCurrentConference } from './functions';
|
||||
import './middleware.any';
|
||||
|
||||
MiddlewareRegistry.register(({ dispatch, getState }) => next => action => {
|
||||
MiddlewareRegistry.register(store => next => action => {
|
||||
const { dispatch, getState } = store;
|
||||
const { enableForcedReload } = getState()['features/base/config'];
|
||||
|
||||
switch (action.type) {
|
||||
|
@ -25,7 +42,153 @@ MiddlewareRegistry.register(({ dispatch, getState }) => next => action => {
|
|||
|
||||
break;
|
||||
}
|
||||
case TOGGLE_SCREENSHARING: {
|
||||
getMultipleVideoSupportFeatureFlag(getState()) && _toggleScreenSharing(action, store);
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return next(action);
|
||||
});
|
||||
|
||||
/**
|
||||
* Displays a UI notification for screensharing failure based on the error passed.
|
||||
*
|
||||
* @private
|
||||
* @param {Object} error - The error.
|
||||
* @param {Object} store - The redux store.
|
||||
* @returns {void}
|
||||
*/
|
||||
function _handleScreensharingError(error, { dispatch }) {
|
||||
if (error.name === JitsiTrackErrors.SCREENSHARING_USER_CANCELED) {
|
||||
return;
|
||||
}
|
||||
let descriptionKey, titleKey;
|
||||
|
||||
if (error.name === JitsiTrackErrors.PERMISSION_DENIED) {
|
||||
descriptionKey = 'dialog.screenSharingPermissionDeniedError';
|
||||
titleKey = 'dialog.screenSharingFailedTitle';
|
||||
} else if (error.name === JitsiTrackErrors.CONSTRAINT_FAILED) {
|
||||
descriptionKey = 'dialog.cameraConstraintFailedError';
|
||||
titleKey = 'deviceError.cameraError';
|
||||
} else if (error.name === JitsiTrackErrors.SCREENSHARING_GENERIC_ERROR) {
|
||||
descriptionKey = 'dialog.screenSharingFailed';
|
||||
titleKey = 'dialog.screenSharingFailedTitle';
|
||||
} else if (error === AUDIO_ONLY_SCREEN_SHARE_NO_TRACK) {
|
||||
descriptionKey = 'notify.screenShareNoAudio';
|
||||
titleKey = 'notify.screenShareNoAudioTitle';
|
||||
}
|
||||
|
||||
dispatch(showNotification({
|
||||
titleKey,
|
||||
descriptionKey
|
||||
}, NOTIFICATION_TIMEOUT_TYPE.MEDIUM));
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies the AudioMixer effect on the local audio track if applicable. If there is no local audio track, the desktop
|
||||
* audio track is added to the conference.
|
||||
*
|
||||
* @private
|
||||
* @param {JitsiLocalTrack} desktopAudioTrack - The audio track to be added to the conference.
|
||||
* @param {*} state - The redux state.
|
||||
* @returns {void}
|
||||
*/
|
||||
async function _maybeApplyAudioMixerEffect(desktopAudioTrack, state) {
|
||||
const localAudio = getLocalJitsiAudioTrack(state);
|
||||
const conference = getCurrentConference(state);
|
||||
|
||||
if (localAudio) {
|
||||
// If there is a localAudio stream, mix in the desktop audio stream captured by the screen sharing API.
|
||||
const mixerEffect = new AudioMixerEffect(desktopAudioTrack);
|
||||
|
||||
await localAudio.setEffect(mixerEffect);
|
||||
} else {
|
||||
// If no local stream is present ( i.e. no input audio devices) we use the screen share audio
|
||||
// stream as we would use a regular stream.
|
||||
await conference.replaceTrack(null, desktopAudioTrack);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggles screen sharing.
|
||||
*
|
||||
* @private
|
||||
* @param {boolean} enabled - The state to toggle screen sharing to.
|
||||
* @param {Store} store - The redux store.
|
||||
* @returns {void}
|
||||
*/
|
||||
async function _toggleScreenSharing({ enabled, audioOnly = false }, store) {
|
||||
const { dispatch, getState } = store;
|
||||
const state = getState();
|
||||
const conference = getCurrentConference(state);
|
||||
const localAudio = getLocalJitsiAudioTrack(state);
|
||||
const localScreenshare = getLocalDesktopTrack(state['features/base/tracks']);
|
||||
|
||||
if (enabled) {
|
||||
let tracks;
|
||||
|
||||
try {
|
||||
tracks = await createLocalTracksF({ devices: [ VIDEO_TYPE.DESKTOP ] });
|
||||
} catch (error) {
|
||||
_handleScreensharingError(error, store);
|
||||
|
||||
return;
|
||||
}
|
||||
const desktopAudioTrack = tracks.find(track => track.getType() === MEDIA_TYPE.AUDIO);
|
||||
const desktopVideoTrack = tracks.find(track => track.getType() === MEDIA_TYPE.VIDEO);
|
||||
|
||||
// Dispose the desktop track for audio-only screensharing.
|
||||
if (audioOnly) {
|
||||
desktopVideoTrack.dispose();
|
||||
|
||||
if (!desktopAudioTrack) {
|
||||
_handleScreensharingError(AUDIO_ONLY_SCREEN_SHARE_NO_TRACK, store);
|
||||
|
||||
return;
|
||||
}
|
||||
} else if (desktopVideoTrack) {
|
||||
if (localScreenshare) {
|
||||
await dispatch(replaceLocalTrack(localScreenshare.jitsiTrack, desktopVideoTrack, conference));
|
||||
} else {
|
||||
await dispatch(addLocalTrack(desktopVideoTrack));
|
||||
}
|
||||
}
|
||||
|
||||
// Apply the AudioMixer effect if there is a local audio track, add the desktop track to the conference
|
||||
// otherwise without unmuting the microphone.
|
||||
if (desktopAudioTrack) {
|
||||
_maybeApplyAudioMixerEffect(desktopAudioTrack, state);
|
||||
dispatch(setScreenshareAudioTrack(desktopAudioTrack));
|
||||
}
|
||||
|
||||
// Disable audio-only or best performance mode if the user starts screensharing. This doesn't apply to
|
||||
// audio-only screensharing.
|
||||
const { enabled: bestPerformanceMode } = state['features/base/audio-only'];
|
||||
|
||||
if (bestPerformanceMode && !audioOnly) {
|
||||
dispatch(setAudioOnly(false));
|
||||
}
|
||||
} else {
|
||||
const { desktopAudioTrack } = state['features/screen-share'];
|
||||
|
||||
// Mute the desktop track instead of removing it from the conference since we don't want the client to signal
|
||||
// a source-remove to the remote peer for the screenshare track. Later when screenshare is enabled again, the
|
||||
// same sender will be re-used without the need for signaling a new ssrc through source-add.
|
||||
dispatch(setScreenshareMuted(true));
|
||||
if (desktopAudioTrack) {
|
||||
if (localAudio) {
|
||||
localAudio.setEffect(undefined);
|
||||
} else {
|
||||
await conference.replaceTrack(desktopAudioTrack, null);
|
||||
}
|
||||
desktopAudioTrack.dispose();
|
||||
dispatch(setScreenshareAudioTrack(null));
|
||||
}
|
||||
}
|
||||
|
||||
if (audioOnly) {
|
||||
dispatch(setScreenAudioShareState(enabled));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,5 +67,6 @@ export const THIRD_PARTY_PREJOIN_BUTTONS = [ 'microphone', 'camera', 'select-bac
|
|||
*/
|
||||
|
||||
export const FEATURE_FLAGS = {
|
||||
MULTIPLE_VIDEO_STREAMS_SUPPORT: 'sendMultipleVideoStreams',
|
||||
SOURCE_NAME_SIGNALING: 'sourceNameSignaling'
|
||||
};
|
||||
|
|
|
@ -4,6 +4,7 @@ import Bourne from '@hapi/bourne';
|
|||
import { jitsiLocalStorage } from '@jitsi/js-utils';
|
||||
import _ from 'lodash';
|
||||
|
||||
import { browser } from '../lib-jitsi-meet';
|
||||
import { parseURLParams } from '../util';
|
||||
|
||||
import CONFIG_WHITELIST from './configWhitelist';
|
||||
|
@ -49,6 +50,18 @@ export function getMeetingRegion(state: Object) {
|
|||
return state['features/base/config']?.deploymentInfo?.region || '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Selector used to get the sendMultipleVideoStreams feature flag.
|
||||
*
|
||||
* @param {Object} state - The global state.
|
||||
* @returns {boolean}
|
||||
*/
|
||||
export function getMultipleVideoSupportFeatureFlag(state: Object) {
|
||||
return getFeatureFlag(state, FEATURE_FLAGS.MULTIPLE_VIDEO_STREAMS_SUPPORT)
|
||||
&& getSourceNameSignalingFeatureFlag(state)
|
||||
&& isUnifiedPlanEnabled(state);
|
||||
}
|
||||
|
||||
/**
|
||||
* Selector used to get the sourceNameSignaling feature flag.
|
||||
*
|
||||
|
@ -196,6 +209,19 @@ export function isDisplayNameVisible(state: Object): boolean {
|
|||
return !state['features/base/config'].hideDisplayName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Selector for determining if Unified plan support is enabled.
|
||||
*
|
||||
* @param {Object} state - The state of the app.
|
||||
* @returns {boolean}
|
||||
*/
|
||||
export function isUnifiedPlanEnabled(state: Object): boolean {
|
||||
const { enableUnifiedOnChrome = true } = state['features/base/config'];
|
||||
|
||||
return browser.supportsUnifiedPlan()
|
||||
&& (!browser.isChromiumBased() || (browser.isChromiumBased() && enableUnifiedOnChrome));
|
||||
}
|
||||
|
||||
/**
|
||||
* Restores a Jitsi Meet config.js from {@code localStorage} if it was
|
||||
* previously downloaded from a specific {@code baseURL} and stored with
|
||||
|
|
|
@ -40,6 +40,16 @@ export const SET_AUDIO_UNMUTE_PERMISSIONS = 'SET_AUDIO_UNMUTE_PERMISSIONS';
|
|||
*/
|
||||
export const SET_CAMERA_FACING_MODE = 'SET_CAMERA_FACING_MODE';
|
||||
|
||||
/**
|
||||
* The type of (redux) action to set the muted state of the local screenshare.
|
||||
*
|
||||
* {
|
||||
* type: SET_SCREENSHARE_MUTED,
|
||||
* muted: boolean
|
||||
* }
|
||||
*/
|
||||
export const SET_SCREENSHARE_MUTED = 'SET_SCREENSHARE_MUTED';
|
||||
|
||||
/**
|
||||
* The type of (redux) action to adjust the availability of the local video.
|
||||
*
|
||||
|
|
|
@ -11,6 +11,7 @@ import {
|
|||
SET_AUDIO_AVAILABLE,
|
||||
SET_AUDIO_UNMUTE_PERMISSIONS,
|
||||
SET_CAMERA_FACING_MODE,
|
||||
SET_SCREENSHARE_MUTED,
|
||||
SET_VIDEO_AVAILABLE,
|
||||
SET_VIDEO_MUTED,
|
||||
SET_VIDEO_UNMUTE_PERMISSIONS,
|
||||
|
@ -20,6 +21,7 @@ import {
|
|||
import {
|
||||
MEDIA_TYPE,
|
||||
type MediaType,
|
||||
SCREENSHARE_MUTISM_AUTHORITY,
|
||||
VIDEO_MUTISM_AUTHORITY
|
||||
} from './constants';
|
||||
|
||||
|
@ -92,6 +94,47 @@ export function setCameraFacingMode(cameraFacingMode: string) {
|
|||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Action to set the muted state of the local screenshare.
|
||||
*
|
||||
* @param {boolean} muted - True if the local screenshare is to be enabled or false otherwise.
|
||||
* @param {MEDIA_TYPE} mediaType - The type of media.
|
||||
* @param {number} authority - The {@link SCREENSHARE_MUTISM_AUTHORITY} which is muting/unmuting the local screenshare.
|
||||
* @param {boolean} ensureTrack - True if we want to ensure that a new track is created if missing.
|
||||
* @returns {Function}
|
||||
*/
|
||||
export function setScreenshareMuted(
|
||||
muted: boolean,
|
||||
mediaType: MediaType = MEDIA_TYPE.SCREENSHARE,
|
||||
authority: number = SCREENSHARE_MUTISM_AUTHORITY.USER,
|
||||
ensureTrack: boolean = false) {
|
||||
return (dispatch: Dispatch<any>, getState: Function) => {
|
||||
const state = getState();
|
||||
|
||||
// check for A/V Moderation when trying to unmute
|
||||
if (!muted && shouldShowModeratedNotification(MEDIA_TYPE.SCREENSHARE, state)) {
|
||||
if (!isModerationNotificationDisplayed(MEDIA_TYPE.SCREENSHARE, state)) {
|
||||
ensureTrack && dispatch(showModeratedNotification(MEDIA_TYPE.SCREENSHARE));
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const oldValue = state['features/base/media'].screenshare.muted;
|
||||
|
||||
// eslint-disable-next-line no-bitwise
|
||||
const newValue = muted ? oldValue | authority : oldValue & ~authority;
|
||||
|
||||
return dispatch({
|
||||
type: SET_SCREENSHARE_MUTED,
|
||||
authority,
|
||||
mediaType,
|
||||
ensureTrack,
|
||||
muted: newValue
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Action to adjust the availability of the local video.
|
||||
*
|
||||
|
|
|
@ -10,7 +10,7 @@ export const CAMERA_FACING_MODE = {
|
|||
USER: 'user'
|
||||
};
|
||||
|
||||
export type MediaType = 'audio' | 'video' | 'presenter';
|
||||
export type MediaType = 'audio' | 'video' | 'presenter' | 'screenshare';
|
||||
|
||||
/**
|
||||
* The set of media types.
|
||||
|
@ -20,12 +20,23 @@ export type MediaType = 'audio' | 'video' | 'presenter';
|
|||
export const MEDIA_TYPE = {
|
||||
AUDIO: 'audio',
|
||||
PRESENTER: 'presenter',
|
||||
SCREENSHARE: 'screenshare',
|
||||
VIDEO: 'video'
|
||||
};
|
||||
|
||||
|
||||
/* eslint-disable no-bitwise */
|
||||
|
||||
/**
|
||||
* The types of authorities which may mute/unmute the local screenshare.
|
||||
*
|
||||
* @enum {number}
|
||||
*/
|
||||
export const SCREENSHARE_MUTISM_AUTHORITY = {
|
||||
AUDIO_ONLY: 1 << 0,
|
||||
USER: 1 << 2
|
||||
};
|
||||
|
||||
/**
|
||||
* The types of authorities which may mute/unmute the local video.
|
||||
*
|
||||
|
|
|
@ -16,6 +16,7 @@ import { isForceMuted } from '../../participants-pane/functions';
|
|||
import { isScreenMediaShared } from '../../screen-share/functions';
|
||||
import { SET_AUDIO_ONLY, setAudioOnly } from '../audio-only';
|
||||
import { isRoomValid, SET_ROOM } from '../conference';
|
||||
import { getMultipleVideoSupportFeatureFlag } from '../config';
|
||||
import { getLocalParticipant } from '../participants';
|
||||
import { MiddlewareRegistry } from '../redux';
|
||||
import { getPropertyValue } from '../settings';
|
||||
|
@ -30,13 +31,20 @@ import {
|
|||
import {
|
||||
SET_AUDIO_MUTED,
|
||||
SET_AUDIO_UNMUTE_PERMISSIONS,
|
||||
SET_SCREENSHARE_MUTED,
|
||||
SET_VIDEO_MUTED,
|
||||
SET_VIDEO_UNMUTE_PERMISSIONS
|
||||
} from './actionTypes';
|
||||
import { setAudioMuted, setCameraFacingMode, setVideoMuted } from './actions';
|
||||
import {
|
||||
setAudioMuted,
|
||||
setCameraFacingMode,
|
||||
setScreenshareMuted,
|
||||
setVideoMuted
|
||||
} from './actions';
|
||||
import {
|
||||
CAMERA_FACING_MODE,
|
||||
MEDIA_TYPE,
|
||||
SCREENSHARE_MUTISM_AUTHORITY,
|
||||
VIDEO_MUTISM_AUTHORITY
|
||||
} from './constants';
|
||||
import { getStartWithAudioMuted, getStartWithVideoMuted } from './functions';
|
||||
|
@ -100,6 +108,15 @@ MiddlewareRegistry.register(store => next => action => {
|
|||
break;
|
||||
}
|
||||
|
||||
case SET_SCREENSHARE_MUTED: {
|
||||
const state = store.getState();
|
||||
const participant = getLocalParticipant(state);
|
||||
|
||||
if (!action.muted && isForceMuted(participant, MEDIA_TYPE.SCREENSHARE, state)) {
|
||||
return;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case SET_VIDEO_MUTED: {
|
||||
const state = store.getState();
|
||||
const participant = getLocalParticipant(state);
|
||||
|
@ -167,15 +184,17 @@ function _appStateChanged({ dispatch, getState }, next, action) {
|
|||
* @private
|
||||
* @returns {Object} The value returned by {@code next(action)}.
|
||||
*/
|
||||
function _setAudioOnly({ dispatch }, next, action) {
|
||||
function _setAudioOnly({ dispatch, getState }, next, action) {
|
||||
const { audioOnly, ensureVideoTrack } = action;
|
||||
const state = getState();
|
||||
|
||||
sendAnalytics(createTrackMutedEvent('video', 'audio-only mode', audioOnly));
|
||||
|
||||
// Make sure we mute both the desktop and video tracks.
|
||||
dispatch(setVideoMuted(audioOnly, MEDIA_TYPE.VIDEO, VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY, ensureVideoTrack));
|
||||
|
||||
if (navigator.product !== 'ReactNative') {
|
||||
if (getMultipleVideoSupportFeatureFlag(state)) {
|
||||
dispatch(setScreenshareMuted(audioOnly, MEDIA_TYPE.SCREENSHARE, SCREENSHARE_MUTISM_AUTHORITY.AUDIO_ONLY));
|
||||
} else if (navigator.product !== 'ReactNative') {
|
||||
dispatch(setVideoMuted(audioOnly, MEDIA_TYPE.PRESENTER, VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY, ensureVideoTrack));
|
||||
}
|
||||
|
||||
|
@ -292,11 +311,9 @@ function _syncTrackMutedState({ getState }, track) {
|
|||
// fired before track gets to state.
|
||||
if (track.muted !== muted) {
|
||||
sendAnalytics(createSyncTrackStateEvent(track.mediaType, muted));
|
||||
logger.log(
|
||||
`Sync ${track.mediaType} track muted state to ${
|
||||
muted ? 'muted' : 'unmuted'}`);
|
||||
logger.log(`Sync ${track.mediaType} track muted state to ${muted ? 'muted' : 'unmuted'}`);
|
||||
|
||||
track.muted = muted;
|
||||
setTrackMuted(track.jitsiTrack, muted);
|
||||
setTrackMuted(track.jitsiTrack, muted, state);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,13 +9,14 @@ import {
|
|||
SET_AUDIO_MUTED,
|
||||
SET_AUDIO_UNMUTE_PERMISSIONS,
|
||||
SET_CAMERA_FACING_MODE,
|
||||
SET_SCREENSHARE_MUTED,
|
||||
SET_VIDEO_AVAILABLE,
|
||||
SET_VIDEO_MUTED,
|
||||
SET_VIDEO_UNMUTE_PERMISSIONS,
|
||||
STORE_VIDEO_TRANSFORM,
|
||||
TOGGLE_CAMERA_FACING_MODE
|
||||
} from './actionTypes';
|
||||
import { CAMERA_FACING_MODE } from './constants';
|
||||
import { CAMERA_FACING_MODE, SCREENSHARE_MUTISM_AUTHORITY } from './constants';
|
||||
|
||||
/**
|
||||
* Media state object for local audio.
|
||||
|
@ -73,6 +74,54 @@ function _audio(state = _AUDIO_INITIAL_MEDIA_STATE, action) {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Media state object for local screenshare.
|
||||
*
|
||||
* @typedef {Object} ScreenshareMediaState
|
||||
* @property {boolean} available=true - Screenshare available state.
|
||||
* @property {boolean} muted=true - Screenshare muted state.
|
||||
* @property {boolean} unmuteBlocked=false - Screenshare unmute blocked state.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Initial state for video.
|
||||
*
|
||||
* @type {ScreenshareMediaState}
|
||||
*/
|
||||
export const _SCREENSHARE_INITIAL_MEDIA_STATE = {
|
||||
available: true,
|
||||
muted: SCREENSHARE_MUTISM_AUTHORITY.USER,
|
||||
unmuteBlocked: false
|
||||
};
|
||||
|
||||
/**
|
||||
* Reducer for screenshare media state.
|
||||
*
|
||||
* @param {VideoMediaState} state - Media state of local screenshare.
|
||||
* @param {Object} action - Action object.
|
||||
* @param {string} action.type - Type of action.
|
||||
* @private
|
||||
* @returns {ScreenshareMediaState}
|
||||
*/
|
||||
function _screenshare(state = _SCREENSHARE_INITIAL_MEDIA_STATE, action) {
|
||||
switch (action.type) {
|
||||
case SET_SCREENSHARE_MUTED:
|
||||
return {
|
||||
...state,
|
||||
muted: action.muted
|
||||
};
|
||||
|
||||
case SET_VIDEO_UNMUTE_PERMISSIONS:
|
||||
return {
|
||||
...state,
|
||||
unmuteBlocked: action.blocked
|
||||
};
|
||||
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Media state object for local video.
|
||||
*
|
||||
|
@ -179,6 +228,7 @@ function _video(state = _VIDEO_INITIAL_MEDIA_STATE, action) {
|
|||
*/
|
||||
ReducerRegistry.register('features/base/media', combineReducers({
|
||||
audio: _audio,
|
||||
screenshare: _screenshare,
|
||||
video: _video
|
||||
}));
|
||||
|
||||
|
|
|
@ -54,6 +54,18 @@ export const TRACK_CREATE_CANCELED = 'TRACK_CREATE_CANCELED';
|
|||
*/
|
||||
export const TRACK_CREATE_ERROR = 'TRACK_CREATE_ERROR';
|
||||
|
||||
/**
|
||||
* The type of redux action dispatched when the track mute/unmute operation fails at the conference level. This could
|
||||
* happen because of {@code getUserMedia} errors during unmute or replace track errors at the peerconnection level.
|
||||
*
|
||||
* {
|
||||
* type: TRACK_MUTE_UNMUTE_FAILED,
|
||||
* track: Track,
|
||||
* wasMuting: Boolean
|
||||
* }
|
||||
*/
|
||||
export const TRACK_MUTE_UNMUTE_FAILED = 'TRACK_MUTE_UNMUTE_FAILED';
|
||||
|
||||
/**
|
||||
* The type of redux action dispatched when a track has triggered no data from source event.
|
||||
*
|
||||
|
|
|
@ -5,11 +5,14 @@ import {
|
|||
sendAnalytics
|
||||
} from '../../analytics';
|
||||
import { NOTIFICATION_TIMEOUT_TYPE, showErrorNotification, showNotification } from '../../notifications';
|
||||
import { getCurrentConference } from '../conference';
|
||||
import { getMultipleVideoSupportFeatureFlag } from '../config';
|
||||
import { JitsiTrackErrors, JitsiTrackEvents, createLocalTrack } from '../lib-jitsi-meet';
|
||||
import {
|
||||
CAMERA_FACING_MODE,
|
||||
MEDIA_TYPE,
|
||||
setAudioMuted,
|
||||
setScreenshareMuted,
|
||||
setVideoMuted,
|
||||
VIDEO_MUTISM_AUTHORITY,
|
||||
VIDEO_TYPE
|
||||
|
@ -23,6 +26,7 @@ import {
|
|||
TRACK_ADDED,
|
||||
TRACK_CREATE_CANCELED,
|
||||
TRACK_CREATE_ERROR,
|
||||
TRACK_MUTE_UNMUTE_FAILED,
|
||||
TRACK_NO_DATA_FROM_SOURCE,
|
||||
TRACK_REMOVED,
|
||||
TRACK_STOPPED,
|
||||
|
@ -39,6 +43,35 @@ import {
|
|||
} from './functions';
|
||||
import logger from './logger';
|
||||
|
||||
/**
|
||||
* Add a given local track to the conference.
|
||||
*
|
||||
* @param {JitsiLocalTrack} newTrack - The local track to be added to the conference.
|
||||
* @returns {Function}
|
||||
*/
|
||||
export function addLocalTrack(newTrack) {
|
||||
return async (dispatch, getState) => {
|
||||
const conference = getCurrentConference(getState());
|
||||
|
||||
if (conference) {
|
||||
await conference.addTrack(newTrack);
|
||||
}
|
||||
|
||||
const setMuted = newTrack.isVideoTrack()
|
||||
? getMultipleVideoSupportFeatureFlag(getState())
|
||||
&& newTrack.getVideoType() === VIDEO_TYPE.DESKTOP
|
||||
? setScreenshareMuted
|
||||
: setVideoMuted
|
||||
: setAudioMuted;
|
||||
const isMuted = newTrack.isMuted();
|
||||
|
||||
logger.log(`Adding ${newTrack.getType()} track - ${isMuted ? 'muted' : 'unmuted'}`);
|
||||
await dispatch(setMuted(isMuted));
|
||||
|
||||
return dispatch(_addTracks([ newTrack ]));
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests the creating of the desired media type tracks. Desire is expressed
|
||||
* by base/media unless the function caller specifies desired media types
|
||||
|
@ -320,49 +353,34 @@ export function replaceLocalTrack(oldTrack, newTrack, conference) {
|
|||
* @returns {Function}
|
||||
*/
|
||||
function replaceStoredTracks(oldTrack, newTrack) {
|
||||
return dispatch => {
|
||||
return async (dispatch, getState) => {
|
||||
// We call dispose after doing the replace because dispose will
|
||||
// try and do a new o/a after the track removes itself. Doing it
|
||||
// after means the JitsiLocalTrack.conference is already
|
||||
// cleared, so it won't try and do the o/a.
|
||||
const disposePromise
|
||||
= oldTrack
|
||||
? dispatch(_disposeAndRemoveTracks([ oldTrack ]))
|
||||
: Promise.resolve();
|
||||
if (oldTrack) {
|
||||
await dispatch(_disposeAndRemoveTracks([ oldTrack ]));
|
||||
}
|
||||
|
||||
return disposePromise
|
||||
.then(() => {
|
||||
if (newTrack) {
|
||||
// The mute state of the new track should be
|
||||
// reflected in the app's mute state. For example,
|
||||
// if the app is currently muted and changing to a
|
||||
// new track that is not muted, the app's mute
|
||||
// state should be falsey. As such, emit a mute
|
||||
// event here to set up the app to reflect the
|
||||
// track's mute state. If this is not done, the
|
||||
// current mute state of the app will be reflected
|
||||
// on the track, not vice-versa.
|
||||
const setMuted
|
||||
= newTrack.isVideoTrack()
|
||||
? setVideoMuted
|
||||
// The mute state of the new track should be reflected in the app's mute state. For example, if the
|
||||
// app is currently muted and changing to a new track that is not muted, the app's mute state
|
||||
// should be falsey. As such, emit a mute event here to set up the app to reflect the track's mute
|
||||
// state. If this is not done, the current mute state of the app will be reflected on the track,
|
||||
// not vice-versa.
|
||||
const setMuted = newTrack.isVideoTrack()
|
||||
? getMultipleVideoSupportFeatureFlag(getState()) && newTrack.getVideoType() === VIDEO_TYPE.DESKTOP
|
||||
? setScreenshareMuted
|
||||
: setVideoMuted
|
||||
: setAudioMuted;
|
||||
const isMuted = newTrack.isMuted();
|
||||
|
||||
sendAnalytics(createTrackMutedEvent(
|
||||
newTrack.getType(),
|
||||
'track.replaced',
|
||||
isMuted));
|
||||
logger.log(`Replace ${newTrack.getType()} track - ${
|
||||
isMuted ? 'muted' : 'unmuted'}`);
|
||||
sendAnalytics(createTrackMutedEvent(newTrack.getType(), 'track.replaced', isMuted));
|
||||
logger.log(`Replace ${newTrack.getType()} track - ${isMuted ? 'muted' : 'unmuted'}`);
|
||||
|
||||
return dispatch(setMuted(isMuted));
|
||||
await dispatch(setMuted(isMuted));
|
||||
await dispatch(_addTracks([ newTrack ]));
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
if (newTrack) {
|
||||
return dispatch(_addTracks([ newTrack ]));
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -384,7 +402,9 @@ export function trackAdded(track) {
|
|||
|
||||
// participantId
|
||||
const local = track.isLocal();
|
||||
const mediaType = track.getType();
|
||||
const mediaType = getMultipleVideoSupportFeatureFlag(getState()) && track.getVideoType() === VIDEO_TYPE.DESKTOP
|
||||
? MEDIA_TYPE.SCREENSHARE
|
||||
: track.getType();
|
||||
let isReceivingData, noDataFromSourceNotificationInfo, participantId;
|
||||
|
||||
if (local) {
|
||||
|
@ -471,6 +491,25 @@ export function trackMutedChanged(track) {
|
|||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an action for when a track's muted state change action has failed. This could happen because of
|
||||
* {@code getUserMedia} errors during unmute or replace track errors at the peerconnection level.
|
||||
*
|
||||
* @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
|
||||
* @param {boolean} wasMuting - If the operation that failed was a mute operation or an unmute operation.
|
||||
* @returns {{
|
||||
* type: TRACK_MUTE_UNMUTE_FAILED,
|
||||
* track: Track
|
||||
* }}
|
||||
*/
|
||||
export function trackMuteUnmuteFailed(track, wasMuting) {
|
||||
return {
|
||||
type: TRACK_MUTE_UNMUTE_FAILED,
|
||||
track,
|
||||
wasMuting
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an action for when a track's no data from source notification information changes.
|
||||
*
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/* global APP */
|
||||
|
||||
import { getMultipleVideoSupportFeatureFlag } from '../config/functions.any';
|
||||
import { isMobileBrowser } from '../environment/utils';
|
||||
import JitsiMeetJS, { JitsiTrackErrors, browser } from '../lib-jitsi-meet';
|
||||
import { MEDIA_TYPE, VIDEO_TYPE, setAudioMuted } from '../media';
|
||||
|
@ -295,6 +296,33 @@ export function getLocalAudioTrack(tracks) {
|
|||
return getLocalTrack(tracks, MEDIA_TYPE.AUDIO);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the local desktop track.
|
||||
*
|
||||
* @param {Track[]} tracks - List of all tracks.
|
||||
* @param {boolean} [includePending] - Indicates whether a local track is to be returned if it is still pending.
|
||||
* A local track is pending if {@code getUserMedia} is still executing to create it and, consequently, its
|
||||
* {@code jitsiTrack} property is {@code undefined}. By default a pending local track is not returned.
|
||||
* @returns {(Track|undefined)}
|
||||
*/
|
||||
export function getLocalDesktopTrack(tracks, includePending = false) {
|
||||
return (
|
||||
getLocalTracks(tracks, includePending)
|
||||
.find(t => t.mediaType === MEDIA_TYPE.SCREENSHARE || t.videoType === VIDEO_TYPE.DESKTOP));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the stored local desktop jitsiLocalTrack.
|
||||
*
|
||||
* @param {Object} state - The redux state.
|
||||
* @returns {JitsiLocalTrack|undefined}
|
||||
*/
|
||||
export function getLocalJitsiDesktopTrack(state) {
|
||||
const track = getLocalDesktopTrack(getTrackState(state));
|
||||
|
||||
return track?.jitsiTrack;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns local track by media type.
|
||||
*
|
||||
|
@ -524,20 +552,22 @@ export function isUserInteractionRequiredForUnmute(state) {
|
|||
}
|
||||
|
||||
/**
|
||||
* Mutes or unmutes a specific {@code JitsiLocalTrack}. If the muted state of
|
||||
* the specified {@code track} is already in accord with the specified
|
||||
* {@code muted} value, then does nothing.
|
||||
* Mutes or unmutes a specific {@code JitsiLocalTrack}. If the muted state of the specified {@code track} is already in
|
||||
* accord with the specified {@code muted} value, then does nothing.
|
||||
*
|
||||
* @param {JitsiLocalTrack} track - The {@code JitsiLocalTrack} to mute or
|
||||
* unmute.
|
||||
* @param {boolean} muted - If the specified {@code track} is to be muted, then
|
||||
* {@code true}; otherwise, {@code false}.
|
||||
* @param {JitsiLocalTrack} track - The {@code JitsiLocalTrack} to mute or unmute.
|
||||
* @param {boolean} muted - If the specified {@code track} is to be muted, then {@code true}; otherwise, {@code false}.
|
||||
* @param {Object} state - The redux state.
|
||||
* @returns {Promise}
|
||||
*/
|
||||
export function setTrackMuted(track, muted) {
|
||||
export function setTrackMuted(track, muted, state) {
|
||||
muted = Boolean(muted); // eslint-disable-line no-param-reassign
|
||||
|
||||
if (track.isMuted() === muted) {
|
||||
// Ignore the check for desktop track muted operation. When the screenshare is terminated by clicking on the
|
||||
// browser's 'Stop sharing' button, the local stream is stopped before the inactive stream handler is fired.
|
||||
// We still need to proceed here and remove the track from the peerconnection.
|
||||
if (track.isMuted() === muted
|
||||
&& !(track.getVideoType() === VIDEO_TYPE.DESKTOP && getMultipleVideoSupportFeatureFlag(state))) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
|
@ -546,8 +576,9 @@ export function setTrackMuted(track, muted) {
|
|||
return track[f]().catch(error => {
|
||||
// Track might be already disposed so ignore such an error.
|
||||
if (error.name !== JitsiTrackErrors.TRACK_IS_DISPOSED) {
|
||||
// FIXME Emit mute failed, so that the app can show error dialog.
|
||||
logger.error(`set track ${f} failed`, error);
|
||||
|
||||
return Promise.reject(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ import { shouldShowModeratedNotification } from '../../av-moderation/functions';
|
|||
import { hideNotification, isModerationNotificationDisplayed } from '../../notifications';
|
||||
import { isPrejoinPageVisible } from '../../prejoin/functions';
|
||||
import { getCurrentConference } from '../conference/functions';
|
||||
import { getMultipleVideoSupportFeatureFlag } from '../config';
|
||||
import { getAvailableDevices } from '../devices/actions';
|
||||
import {
|
||||
CAMERA_FACING_MODE,
|
||||
|
@ -18,15 +19,20 @@ import {
|
|||
VIDEO_MUTISM_AUTHORITY,
|
||||
TOGGLE_CAMERA_FACING_MODE,
|
||||
toggleCameraFacingMode,
|
||||
VIDEO_TYPE
|
||||
SET_SCREENSHARE_MUTED,
|
||||
VIDEO_TYPE,
|
||||
setScreenshareMuted,
|
||||
SCREENSHARE_MUTISM_AUTHORITY
|
||||
} from '../media';
|
||||
import { MiddlewareRegistry, StateListenerRegistry } from '../redux';
|
||||
|
||||
import {
|
||||
TRACK_ADDED,
|
||||
TOGGLE_SCREENSHARING,
|
||||
TRACK_ADDED,
|
||||
TRACK_MUTE_UNMUTE_FAILED,
|
||||
TRACK_NO_DATA_FROM_SOURCE,
|
||||
TRACK_REMOVED,
|
||||
TRACK_STOPPED,
|
||||
TRACK_UPDATED
|
||||
} from './actionTypes';
|
||||
import {
|
||||
|
@ -34,6 +40,7 @@ import {
|
|||
destroyLocalTracks,
|
||||
showNoDataFromSourceVideoError,
|
||||
toggleScreensharing,
|
||||
trackMuteUnmuteFailed,
|
||||
trackRemoved,
|
||||
trackNoDataFromSourceNotificationInfoChanged
|
||||
} from './actions';
|
||||
|
@ -107,6 +114,10 @@ MiddlewareRegistry.register(store => next => action => {
|
|||
break;
|
||||
}
|
||||
|
||||
case SET_SCREENSHARE_MUTED:
|
||||
_setMuted(store, action, action.mediaType);
|
||||
break;
|
||||
|
||||
case SET_VIDEO_MUTED:
|
||||
if (!action.muted
|
||||
&& isUserInteractionRequiredForUnmute(store.getState())) {
|
||||
|
@ -156,19 +167,54 @@ MiddlewareRegistry.register(store => next => action => {
|
|||
|
||||
const { enabled, audioOnly, ignoreDidHaveVideo } = action;
|
||||
|
||||
APP.UI.emitEvent(UIEvents.TOGGLE_SCREENSHARING, { enabled,
|
||||
if (!getMultipleVideoSupportFeatureFlag(store.getState())) {
|
||||
APP.UI.emitEvent(UIEvents.TOGGLE_SCREENSHARING,
|
||||
{
|
||||
enabled,
|
||||
audioOnly,
|
||||
ignoreDidHaveVideo });
|
||||
ignoreDidHaveVideo
|
||||
});
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case TRACK_MUTE_UNMUTE_FAILED: {
|
||||
const { jitsiTrack } = action.track;
|
||||
const muted = action.wasMuted;
|
||||
const isVideoTrack = jitsiTrack.getType() !== MEDIA_TYPE.AUDIO;
|
||||
|
||||
if (typeof APP !== 'undefined') {
|
||||
if (isVideoTrack && jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP
|
||||
&& getMultipleVideoSupportFeatureFlag(store.getState())) {
|
||||
store.dispatch(setScreenshareMuted(!muted));
|
||||
} else if (isVideoTrack) {
|
||||
APP.conference.setVideoMuteStatus();
|
||||
} else {
|
||||
APP.conference.setAudioMuteStatus(!muted);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case TRACK_STOPPED: {
|
||||
const { jitsiTrack } = action.track;
|
||||
|
||||
if (typeof APP !== 'undefined'
|
||||
&& getMultipleVideoSupportFeatureFlag(store.getState())
|
||||
&& jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP) {
|
||||
store.dispatch(toggleScreensharing(false));
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case TRACK_UPDATED: {
|
||||
// TODO Remove the following calls to APP.UI once components interested
|
||||
// in track mute changes are moved into React and/or redux.
|
||||
if (typeof APP !== 'undefined') {
|
||||
const result = next(action);
|
||||
const state = store.getState();
|
||||
|
||||
if (isPrejoinPageVisible(store.getState())) {
|
||||
if (isPrejoinPageVisible(state)) {
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -181,10 +227,11 @@ MiddlewareRegistry.register(store => next => action => {
|
|||
// Do not change the video mute state for local presenter tracks.
|
||||
if (jitsiTrack.type === MEDIA_TYPE.PRESENTER) {
|
||||
APP.conference.mutePresenter(muted);
|
||||
} else if (jitsiTrack.isLocal() && !(jitsiTrack.videoType === VIDEO_TYPE.DESKTOP)) {
|
||||
} else if (jitsiTrack.isLocal() && !(jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP)) {
|
||||
APP.conference.setVideoMuteStatus();
|
||||
} else if (jitsiTrack.isLocal() && muted && jitsiTrack.videoType === VIDEO_TYPE.DESKTOP) {
|
||||
store.dispatch(toggleScreensharing(false, false, true));
|
||||
} else if (jitsiTrack.isLocal() && muted && jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP) {
|
||||
!getMultipleVideoSupportFeatureFlag(state)
|
||||
&& store.dispatch(toggleScreensharing(false, false, true));
|
||||
} else {
|
||||
APP.UI.setVideoMuted(participantID);
|
||||
}
|
||||
|
@ -335,25 +382,34 @@ function _removeNoDataFromSourceNotification({ getState, dispatch }, track) {
|
|||
* @private
|
||||
* @returns {void}
|
||||
*/
|
||||
function _setMuted(store, { ensureTrack, authority, muted }, mediaType: MEDIA_TYPE) {
|
||||
const localTrack
|
||||
= _getLocalTrack(store, mediaType, /* includePending */ true);
|
||||
async function _setMuted(store, { ensureTrack, authority, muted }, mediaType: MEDIA_TYPE) {
|
||||
const { dispatch, getState } = store;
|
||||
const localTrack = _getLocalTrack(store, mediaType, /* includePending */ true);
|
||||
const state = getState();
|
||||
|
||||
if (mediaType === MEDIA_TYPE.SCREENSHARE
|
||||
&& getMultipleVideoSupportFeatureFlag(state)
|
||||
&& !muted) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (localTrack) {
|
||||
// The `jitsiTrack` property will have a value only for a localTrack for
|
||||
// which `getUserMedia` has already completed. If there's no
|
||||
// `jitsiTrack`, then the `muted` state will be applied once the
|
||||
// `jitsiTrack` is created.
|
||||
// The `jitsiTrack` property will have a value only for a localTrack for which `getUserMedia` has already
|
||||
// completed. If there's no `jitsiTrack`, then the `muted` state will be applied once the `jitsiTrack` is
|
||||
// created.
|
||||
const { jitsiTrack } = localTrack;
|
||||
const isAudioOnly = authority === VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY;
|
||||
const isAudioOnly = (mediaType === MEDIA_TYPE.VIDEO && authority === VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY)
|
||||
|| (mediaType === MEDIA_TYPE.SCREENSHARE && authority === SCREENSHARE_MUTISM_AUTHORITY.AUDIO_ONLY);
|
||||
|
||||
// screenshare cannot be muted or unmuted using the video mute button
|
||||
// anymore, unless it is muted by audioOnly.
|
||||
jitsiTrack && (jitsiTrack.videoType !== 'desktop' || isAudioOnly)
|
||||
&& setTrackMuted(jitsiTrack, muted);
|
||||
} else if (!muted && ensureTrack && (typeof APP === 'undefined' || isPrejoinPageVisible(store.getState()))) {
|
||||
// Screenshare cannot be unmuted using the video mute button unless it is muted by audioOnly in the legacy
|
||||
// screensharing mode.
|
||||
if (jitsiTrack
|
||||
&& (jitsiTrack.videoType !== 'desktop' || isAudioOnly || getMultipleVideoSupportFeatureFlag(state))) {
|
||||
setTrackMuted(jitsiTrack, muted, state).catch(() => dispatch(trackMuteUnmuteFailed(localTrack, muted)));
|
||||
}
|
||||
} else if (!muted && ensureTrack && (typeof APP === 'undefined' || isPrejoinPageVisible(state))) {
|
||||
// FIXME: This only runs on mobile now because web has its own way of
|
||||
// creating local tracks. Adjust the check once they are unified.
|
||||
store.dispatch(createLocalTracksA({ devices: [ mediaType ] }));
|
||||
dispatch(createLocalTracksA({ devices: [ mediaType ] }));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,3 +18,12 @@ export const SET_SCREEN_AUDIO_SHARE_STATE = 'SET_SCREEN_AUDIO_SHARE_STATE';
|
|||
* }
|
||||
*/
|
||||
export const SET_SCREENSHARE_CAPTURE_FRAME_RATE = 'SET_SCREENSHARE_CAPTURE_FRAME_RATE';
|
||||
|
||||
/**
|
||||
* Type of action which sets the current audio track captured from the screenshare.
|
||||
* {
|
||||
* type: SET_SCREENSHARE_TRACKS,
|
||||
* desktopAudioTrack: JitsiTrack
|
||||
* }
|
||||
*/
|
||||
export const SET_SCREENSHARE_TRACKS = 'SET_SCREENSHARE_TRACKS';
|
||||
|
|
|
@ -1,11 +1,16 @@
|
|||
// @flow
|
||||
|
||||
import { getMultipleVideoSupportFeatureFlag } from '../base/config/functions.any';
|
||||
import { openDialog } from '../base/dialog/actions';
|
||||
import { browser } from '../base/lib-jitsi-meet';
|
||||
import { shouldHideShareAudioHelper } from '../base/settings';
|
||||
import { toggleScreensharing } from '../base/tracks';
|
||||
|
||||
import { SET_SCREEN_AUDIO_SHARE_STATE, SET_SCREENSHARE_CAPTURE_FRAME_RATE } from './actionTypes';
|
||||
import {
|
||||
SET_SCREEN_AUDIO_SHARE_STATE,
|
||||
SET_SCREENSHARE_CAPTURE_FRAME_RATE,
|
||||
SET_SCREENSHARE_TRACKS
|
||||
} from './actionTypes';
|
||||
import { ShareAudioDialog } from './components';
|
||||
import ShareMediaWarningDialog from './components/ShareScreenWarningDialog';
|
||||
import { isAudioOnlySharing, isScreenVideoShared } from './functions';
|
||||
|
@ -42,6 +47,22 @@ export function setScreenshareFramerate(captureFrameRate: number) {
|
|||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the audio track associated with the screenshare.
|
||||
*
|
||||
* @param {JitsiLocalTrack} desktopAudioTrack - The audio track captured from the screenshare.
|
||||
* @returns {{
|
||||
* type: SET_SCREENSHARE_TRACKS,
|
||||
* desktopAudioTrack: JitsiTrack
|
||||
* }}
|
||||
*/
|
||||
export function setScreenshareAudioTrack(desktopAudioTrack) {
|
||||
return {
|
||||
type: SET_SCREENSHARE_TRACKS,
|
||||
desktopAudioTrack
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the audio only screen sharing flow. Function will switch between off and on states depending on the context.
|
||||
*
|
||||
|
@ -65,6 +86,12 @@ export function startAudioScreenShareFlow() {
|
|||
// available for audio screen sharing, namely full window audio.
|
||||
// If we're already sharing audio, toggle off.
|
||||
if (shouldHideShareAudioHelper(state) || browser.isElectron() || audioOnlySharing) {
|
||||
if (getMultipleVideoSupportFeatureFlag(state)) {
|
||||
dispatch(toggleScreensharing(!audioOnlySharing, true));
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// We don't want to explicity set the screens share state, by passing undefined we let the
|
||||
// underlying logic decide if it's on or off.
|
||||
dispatch(toggleScreensharing(undefined, true));
|
||||
|
@ -80,8 +107,7 @@ export function startAudioScreenShareFlow() {
|
|||
* Start normal screen sharing flow.Function will switch between off and on states depending on the context, and if
|
||||
* not explicity told otherwise.
|
||||
*
|
||||
* @param {boolean} enabled - Explicitly set the screen sharing state. This has been kept for backward compatibility
|
||||
* with the external API exposed by the iframe, even though it might not be used.
|
||||
* @param {boolean} enabled - Explicitly set the screen sharing state.
|
||||
* @returns {void}
|
||||
*/
|
||||
export function startScreenShareFlow(enabled: boolean) {
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
// @flow
|
||||
|
||||
import { getMultipleVideoSupportFeatureFlag } from '../base/config';
|
||||
import { isWindows } from '../base/environment';
|
||||
import { isMobileBrowser } from '../base/environment/utils';
|
||||
import { browser } from '../base/lib-jitsi-meet';
|
||||
import { VIDEO_TYPE } from '../base/media';
|
||||
import { getLocalVideoTrack } from '../base/tracks';
|
||||
import { getLocalDesktopTrack, getLocalVideoTrack } from '../base/tracks';
|
||||
|
||||
/**
|
||||
* Is the current screen sharing session audio only.
|
||||
|
@ -53,7 +54,14 @@ export function isScreenMediaShared(state: Object) {
|
|||
* @returns {boolean}
|
||||
*/
|
||||
export function isScreenVideoShared(state: Object) {
|
||||
const localVideo = getLocalVideoTrack(state['features/base/tracks']);
|
||||
const tracks = state['features/base/tracks'];
|
||||
const localScreenshare = getLocalDesktopTrack(tracks);
|
||||
|
||||
if (getMultipleVideoSupportFeatureFlag(state)) {
|
||||
|
||||
return localScreenshare && localScreenshare.jitsiTrack && !localScreenshare.jitsiTrack.isMuted();
|
||||
}
|
||||
const localVideo = getLocalVideoTrack(tracks);
|
||||
|
||||
// $FlowFixMe - No support for optional chain method calls in flow atm.
|
||||
return localVideo?.jitsiTrack?.getVideoType() === VIDEO_TYPE.DESKTOP;
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
|
||||
import { ReducerRegistry } from '../base/redux';
|
||||
|
||||
import { SET_SCREEN_AUDIO_SHARE_STATE, SET_SCREENSHARE_CAPTURE_FRAME_RATE } from './actionTypes';
|
||||
import {
|
||||
SET_SCREEN_AUDIO_SHARE_STATE,
|
||||
SET_SCREENSHARE_CAPTURE_FRAME_RATE,
|
||||
SET_SCREENSHARE_TRACKS
|
||||
} from './actionTypes';
|
||||
|
||||
/**
|
||||
* Reduces the Redux actions of the feature features/screen-share.
|
||||
*/
|
||||
ReducerRegistry.register('features/screen-share', (state = {}, action) => {
|
||||
const { captureFrameRate, isSharingAudio } = action;
|
||||
const { captureFrameRate, isSharingAudio, desktopAudioTrack } = action;
|
||||
|
||||
switch (action.type) {
|
||||
case SET_SCREEN_AUDIO_SHARE_STATE:
|
||||
|
@ -22,6 +26,12 @@ ReducerRegistry.register('features/screen-share', (state = {}, action) => {
|
|||
captureFrameRate
|
||||
};
|
||||
|
||||
case SET_SCREENSHARE_TRACKS:
|
||||
return {
|
||||
...state,
|
||||
desktopAudioTrack
|
||||
};
|
||||
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
|
|
|
@ -579,6 +579,7 @@ class Toolbox extends Component<Props> {
|
|||
_desktopSharingButtonDisabled,
|
||||
_desktopSharingEnabled,
|
||||
_localVideo,
|
||||
_screenSharing,
|
||||
_virtualSource,
|
||||
dispatch
|
||||
} = this.props;
|
||||
|
@ -599,7 +600,7 @@ class Toolbox extends Component<Props> {
|
|||
}
|
||||
|
||||
if (_desktopSharingEnabled && !_desktopSharingButtonDisabled) {
|
||||
dispatch(startScreenShareFlow());
|
||||
dispatch(startScreenShareFlow(!_screenSharing));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue