Compare commits

...

23 Commits

Author SHA1 Message Date
damencho f79aa5cf3b chore(deps) update lib-jitsi-meet.
fix(TPC): Switch media direction correctly on all m-lines associated with local tracks during p2p->jvb switch.

Fixes cases where SS stream is not being sent when p2p->jvb switch happens.

fix(ProxyConnection) add more necessary stubs
These have becomee necessary with the introduction of multi-stream.
2022-09-01 10:02:16 -05:00
Jaya Allamsetty f47dc797fe fix(audio-only) Do not enable video automatically when audio-only is disabled (#12056)
* fix(audio-only) Do not enable video automatically when audio-only is disabled.
2022-08-31 10:30:18 -05:00
Hristo Terezov c9fd3e12cc fix(follow-me): SS not visible
The remote screen shares stored in redux were not updated when follow
me is active. Therefore the receiver constraints were not correct.
2022-08-24 16:45:32 -05:00
Jaya Allamsetty 3ea01911d6 fix(filmstrip) Push dominant speaker to the top of the active speaker list.
The active speaker list in redux is alpha sorted, we need to ensure dominant speaker is at the top otherwise it can get truncated based on the available number of visible slots in the filmstrip.
2022-08-16 14:06:51 -04:00
Andrei Gavrilescu 12751c42cf
fix(noise-suppression): ignore ts on noise-suppression button (#12025) 2022-08-16 15:13:47 +03:00
Andrei Gavrilescu 34df40933c fix(noise-suppression): fix muted state, update icons (#11936) 2022-08-12 15:10:45 -05:00
Andrei Gavrilescu f18952a8d6 fix(noise-suppression): remove no track warning on share audio flow (#11966)
* remove no track warning on share audio flow

* remove unused import
2022-08-12 15:04:48 -05:00
Jaya Allamsetty 81a40139d2 fix(fimstrip): Do not re-sort the active speakers if dominant speaker is visible.
Fixes a case where re-sorting doesn't happen even when dominant speaker is currently not visible.
2022-08-12 14:48:01 -04:00
Jaya Allamsetty 55e354e721 chore(deps) update lib-jitsi-meet.
Possibly fixes a bug where SS appears black when the'unmute' and forwarded sources event for the track are received before a large-video update is scheduled.
2022-08-12 13:59:46 -04:00
Jaya Allamsetty 73e2ac8730 fix(audio-only-ss): Attach a track stopped handler to the audioDesktop track only for audio-only sharing case.
There will be an audio desktop track for regular screensahring when a tab is selected. We do not have to toggle screensharing again when that track is killed.
2022-08-12 12:05:08 -04:00
Jaya Allamsetty ec504057bd Add debugging for SS streaming issues 2022-08-12 12:04:11 -04:00
Jaya Allamsetty 7598f30ddd fix(audio-only SS) Stop audio-only SS when user stops share from the browser's share in progress window. 2022-08-05 14:34:56 -04:00
Hristo Terezov 6712761b7f fix(remote-control): when multistream is enabled
The remote control controller events were sent for the camera
participant instead of the screen sharing one.
2022-08-05 12:27:05 -04:00
Jaya Allamsetty 7e7bc618a7 fix(remote-control): fix remote-control when multi-stream is enabled. 2022-08-05 12:26:50 -04:00
Дамян Минков 484c3ec023 fix: Applies AV moderation checks on screen sharing with multistream on.
Guests trying to screenshare see the notification but also the dialog for choosing content.
2022-08-04 14:30:36 +03:00
Robert Pintilii 89fa51fd74 fix(config) Fix recording config backwards compatibility (#11953)
Overwrite the new flag with the old one only if the new one is not set
Fix hideStorageWarning config
2022-08-04 10:44:23 +03:00
Jaya Allamsetty f46ff3ebe9 fix(external-api) Fix toggleShareScreen in multi-stream mode.
Fixes https://github.com/jitsi/jitsi-meet/issues/11916.
2022-08-03 15:00:15 -04:00
Дамян Минков e112ee80a5 fix(moderation): media not working
When the participant has joined after A/V moderation has been enabled they are auto muted with the startmuted node in the session-initiate packet. We were not marking those as muted by the focus and therefore on unmute we were not sending the <muted>false</muted> IQ to jicofo.
The result was that the bridge wasn't forwarding the media to the remote participants.
2022-08-03 11:07:50 +03:00
Saúl Ibarra Corretgé 35b21bbea8 fix(lobby) fix not being able to type password
Autoofocus Strikes Back...
2022-08-02 17:12:26 -04:00
Jaya Allamsetty 70c7e15543 chore(deps) Update lib-jitsi-meet.
This fixes an issue where the bitrates for screenshare were much higher than before for VP9 causing the JVB to suspend SS streams more often.
2022-08-02 15:07:58 -04:00
Jaya Allamsetty de6477a0be fix(audio-share): Fix audio-only SS in multi-stream mode.
ShareAudioDialog passes undefined when the user hits continue in the share audio demo modal. Toggle state of audio-share based on the current state of audio share in that case.
2022-08-02 14:56:41 -04:00
Jaya Allamsetty 14c4b19c9b fix(participants-pane): Get the correct participantCount for multi-stream.
Do not add virtual screenshare participants to participantCount.
2022-08-02 14:56:23 -04:00
Jaya Allamsetty ffe66a1148 fix(screenshot-capture): Impl screenshot capture in multi-stream mode. 2022-08-02 14:56:04 -04:00
29 changed files with 245 additions and 162 deletions

View File

@ -2651,29 +2651,8 @@ export default {
}
);
APP.UI.addListener(UIEvents.TOGGLE_AUDIO_ONLY, audioOnly => {
// FIXME On web video track is stored both in redux and in
// 'localVideo' field, video is attempted to be unmuted twice when
// turning off the audio only mode. This will crash the app with
// 'unmute operation is already in progress'.
// Because there's no logic in redux about creating new track in
// case unmute when not track exists the things have to go through
// muteVideo logic in such case.
const tracks = APP.store.getState()['features/base/tracks'];
const isTrackInRedux
= Boolean(tracks.find(track => track.jitsiTrack && track.jitsiTrack.getType() === MEDIA_TYPE.VIDEO));
if (isTrackInRedux && !this.isSharingScreen) {
this.muteVideo(audioOnly);
}
// Immediately update the UI by having remote videos and the large
// video update themselves instead of waiting for some other event
// to cause the update, usually PARTICIPANT_CONN_STATUS_CHANGED.
// There is no guarantee another event will trigger the update
// immediately and in all situations, for example because a remote
// participant is having connection trouble so no status changes.
APP.UI.addListener(UIEvents.TOGGLE_AUDIO_ONLY, () => {
// Immediately update the UI by having remote videos and the large video update themselves.
const displayedUserId = APP.UI.getLargeVideoID();
if (displayedUserId) {

View File

@ -246,7 +246,7 @@ export default class LargeVideoManager {
this.newStreamData = null;
logger.info(`hover in ${id}`);
logger.debug(`Scheduled large video update for ${id}`);
this.state = videoType;
// eslint-disable-next-line no-shadow
const container = this.getCurrentContainer();
@ -288,12 +288,13 @@ export default class LargeVideoManager {
this.videoTrack.jitsiTrack.getTrackStreamingStatus()));
}
}
const streamingStatusActive = isTrackStreamingStatusActive(videoTrack);
isVideoRenderable = !isVideoMuted && (
APP.conference.isLocalId(id)
|| participant?.isLocalScreenShare
|| isTrackStreamingStatusActive(videoTrack)
);
isVideoRenderable = !isVideoMuted
&& (APP.conference.isLocalId(id) || participant?.isLocalScreenShare || streamingStatusActive);
this.videoTrack?.jitsiTrack?.getVideoType() === VIDEO_TYPE.DESKTOP
&& logger.debug(`Remote track ${videoTrack?.jitsiTrack}, isVideoMuted=${isVideoMuted},`
+ ` streamingStatusActive=${streamingStatusActive}, isVideoRenderable=${isVideoRenderable}`);
} else {
isVideoRenderable = !isVideoMuted
&& (APP.conference.isLocalId(id) || isParticipantConnectionStatusActive(participant));

11
package-lock.json generated
View File

@ -76,7 +76,7 @@
"js-md5": "0.6.1",
"js-sha512": "0.8.0",
"jwt-decode": "2.2.0",
"lib-jitsi-meet": "https://github.com/jitsi/lib-jitsi-meet/releases/download/v1468.0.0+634885b9/lib-jitsi-meet.tgz",
"lib-jitsi-meet": "https://github.com/jitsi/lib-jitsi-meet#634f69d148122452ffbf707281cf1f0b228c56bc",
"lodash": "4.17.21",
"moment": "2.29.4",
"moment-duration-format": "2.2.2",
@ -12889,8 +12889,8 @@
},
"node_modules/lib-jitsi-meet": {
"version": "0.0.0",
"resolved": "https://github.com/jitsi/lib-jitsi-meet/releases/download/v1468.0.0+634885b9/lib-jitsi-meet.tgz",
"integrity": "sha512-XxVIg/WlYHPi5J3DyJ/hq59YnM69fhLCrJCFVrSmkXiNb3VKNA7Fq53ktHEg2DKb+d4A0oRWNXcvwMUUceiJNQ==",
"resolved": "git+ssh://git@github.com/jitsi/lib-jitsi-meet.git#634f69d148122452ffbf707281cf1f0b228c56bc",
"integrity": "sha512-5mZMqZd8LdplVqeBER9OxDsHVA2PHw6RVGiEl5uX9Vi7WzfmCF90ECDVxZgRJvPAiBCj5v0Xf9sDXws7AoBRzg==",
"license": "Apache-2.0",
"dependencies": {
"@jitsi/js-utils": "2.0.0",
@ -30578,8 +30578,9 @@
}
},
"lib-jitsi-meet": {
"version": "https://github.com/jitsi/lib-jitsi-meet/releases/download/v1468.0.0+634885b9/lib-jitsi-meet.tgz",
"integrity": "sha512-XxVIg/WlYHPi5J3DyJ/hq59YnM69fhLCrJCFVrSmkXiNb3VKNA7Fq53ktHEg2DKb+d4A0oRWNXcvwMUUceiJNQ==",
"version": "git+ssh://git@github.com/jitsi/lib-jitsi-meet.git#634f69d148122452ffbf707281cf1f0b228c56bc",
"integrity": "sha512-5mZMqZd8LdplVqeBER9OxDsHVA2PHw6RVGiEl5uX9Vi7WzfmCF90ECDVxZgRJvPAiBCj5v0Xf9sDXws7AoBRzg==",
"from": "lib-jitsi-meet@https://github.com/jitsi/lib-jitsi-meet#634f69d148122452ffbf707281cf1f0b228c56bc",
"requires": {
"@jitsi/js-utils": "2.0.0",
"@jitsi/logger": "2.0.0",

View File

@ -81,7 +81,7 @@
"js-md5": "0.6.1",
"js-sha512": "0.8.0",
"jwt-decode": "2.2.0",
"lib-jitsi-meet": "https://github.com/jitsi/lib-jitsi-meet/releases/download/v1468.0.0+634885b9/lib-jitsi-meet.tgz",
"lib-jitsi-meet": "https://github.com/jitsi/lib-jitsi-meet#634f69d148122452ffbf707281cf1f0b228c56bc",
"lodash": "4.17.21",
"moment": "2.29.4",
"moment-duration-format": "2.2.2",

View File

@ -14,17 +14,13 @@ declare var APP: Object;
/**
* Sets the audio-only flag for the current JitsiConference.
*
* @param {boolean} audioOnly - True if the conference should be audio only;
* false, otherwise.
* @param {boolean} ensureVideoTrack - Define if conference should ensure
* to create a video track.
* @param {boolean} audioOnly - True if the conference should be audio only; false, otherwise.
* @returns {{
* type: SET_AUDIO_ONLY,
* audioOnly: boolean,
* ensureVideoTrack: boolean
* audioOnly: boolean
* }}
*/
export function setAudioOnly(audioOnly: boolean, ensureVideoTrack: boolean = false) {
export function setAudioOnly(audioOnly: boolean) {
return (dispatch: Dispatch<any>, getState: Function) => {
const { enabled: oldValue } = getState()['features/base/audio-only'];
@ -34,8 +30,7 @@ export function setAudioOnly(audioOnly: boolean, ensureVideoTrack: boolean = fal
dispatch({
type: SET_AUDIO_ONLY,
audioOnly,
ensureVideoTrack
audioOnly
});
if (typeof APP !== 'undefined') {
@ -56,6 +51,6 @@ export function toggleAudioOnly() {
return (dispatch: Dispatch<any>, getState: Function) => {
const { enabled } = getState()['features/base/audio-only'];
return dispatch(setAudioOnly(!enabled, true));
return dispatch(setAudioOnly(!enabled));
};
}

View File

@ -1,17 +1,30 @@
// @flow
import { AUDIO_ONLY_SCREEN_SHARE_NO_TRACK } from '../../../../modules/UI/UIErrors';
import UIEvents from '../../../../service/UI/UIEvents';
import { showModeratedNotification } from '../../av-moderation/actions';
import { shouldShowModeratedNotification } from '../../av-moderation/functions';
import { setNoiseSuppressionEnabled } from '../../noise-suppression/actions';
import { showNotification, NOTIFICATION_TIMEOUT_TYPE } from '../../notifications';
import {
showNotification,
NOTIFICATION_TIMEOUT_TYPE,
isModerationNotificationDisplayed
} from '../../notifications';
import {
setPrejoinPageVisibility,
setSkipPrejoinOnReload
} from '../../prejoin';
import { setScreenAudioShareState, setScreenshareAudioTrack } from '../../screen-share';
import {
isAudioOnlySharing,
isScreenVideoShared,
setScreenAudioShareState,
setScreenshareAudioTrack
} from '../../screen-share';
import { isScreenshotCaptureEnabled, toggleScreenshotCaptureSummary } from '../../screenshot-capture';
import { AudioMixerEffect } from '../../stream-effects/audio-mixer/AudioMixerEffect';
import { setAudioOnly } from '../audio-only';
import { getMultipleVideoSendingSupportFeatureFlag } from '../config/functions.any';
import { JitsiConferenceErrors, JitsiTrackErrors } from '../lib-jitsi-meet';
import { JitsiConferenceErrors, JitsiTrackErrors, JitsiTrackEvents } from '../lib-jitsi-meet';
import { MEDIA_TYPE, setScreenshareMuted, VIDEO_TYPE } from '../media';
import { MiddlewareRegistry } from '../redux';
import {
@ -20,6 +33,7 @@ import {
getLocalDesktopTrack,
getLocalJitsiAudioTrack,
replaceLocalTrack,
toggleScreensharing,
TOGGLE_SCREENSHARING
} from '../tracks';
@ -27,6 +41,8 @@ import { CONFERENCE_FAILED, CONFERENCE_JOIN_IN_PROGRESS, CONFERENCE_JOINED } fro
import { getCurrentConference } from './functions';
import './middleware.any';
declare var APP: Object;
MiddlewareRegistry.register(store => next => action => {
const { dispatch, getState } = store;
const { enableForcedReload } = getState()['features/base/config'];
@ -53,12 +69,33 @@ MiddlewareRegistry.register(store => next => action => {
break;
}
case TOGGLE_SCREENSHARING: {
getMultipleVideoSendingSupportFeatureFlag(getState()) && _toggleScreenSharing(action, store);
case TOGGLE_SCREENSHARING:
if (typeof APP === 'object') {
// check for A/V Moderation when trying to start screen sharing
if ((action.enabled || action.enabled === undefined)
&& shouldShowModeratedNotification(MEDIA_TYPE.VIDEO, store.getState())) {
if (!isModerationNotificationDisplayed(MEDIA_TYPE.PRESENTER, store.getState())) {
store.dispatch(showModeratedNotification(MEDIA_TYPE.PRESENTER));
}
return;
}
const { enabled, audioOnly, ignoreDidHaveVideo } = action;
if (getMultipleVideoSendingSupportFeatureFlag(store.getState())) {
_toggleScreenSharing(action, store);
} else {
APP.UI.emitEvent(UIEvents.TOGGLE_SCREENSHARING,
{
enabled,
audioOnly,
ignoreDidHaveVideo
});
}
}
break;
}
}
return next(action);
});
@ -130,18 +167,32 @@ async function _maybeApplyAudioMixerEffect(desktopAudioTrack, state) {
* @param {Store} store - The redux store.
* @returns {void}
*/
async function _toggleScreenSharing({ enabled, audioOnly = false }, store) {
async function _toggleScreenSharing({ enabled, audioOnly = false, shareOptions = {} }, store) {
const { dispatch, getState } = store;
const state = getState();
const audioOnlySharing = isAudioOnlySharing(state);
const screenSharing = isScreenVideoShared(state);
const conference = getCurrentConference(state);
const localAudio = getLocalJitsiAudioTrack(state);
const localScreenshare = getLocalDesktopTrack(state['features/base/tracks']);
if (enabled) {
// Toggle screenshare or audio-only share if the new state is not passed. Happens in the following two cases.
// 1. ShareAudioDialog passes undefined when the user hits continue in the share audio demo modal.
// 2. Toggle screenshare called from the external API.
const enable = audioOnly
? enabled ?? !audioOnlySharing
: enabled ?? !screenSharing;
const screensharingDetails = {};
if (enable) {
let tracks;
const options = {
devices: [ VIDEO_TYPE.DESKTOP ],
...shareOptions
};
try {
tracks = await createLocalTracksF({ devices: [ VIDEO_TYPE.DESKTOP ] });
tracks = await createLocalTracksF(options);
} catch (error) {
_handleScreensharingError(error, store);
@ -150,8 +201,8 @@ async function _toggleScreenSharing({ enabled, audioOnly = false }, store) {
const desktopAudioTrack = tracks.find(track => track.getType() === MEDIA_TYPE.AUDIO);
const desktopVideoTrack = tracks.find(track => track.getType() === MEDIA_TYPE.VIDEO);
// Dispose the desktop track for audio-only screensharing.
if (audioOnly) {
// Dispose the desktop track for audio-only screensharing.
desktopVideoTrack.dispose();
if (!desktopAudioTrack) {
@ -165,17 +216,27 @@ async function _toggleScreenSharing({ enabled, audioOnly = false }, store) {
} else {
await dispatch(addLocalTrack(desktopVideoTrack));
}
if (isScreenshotCaptureEnabled(state, false, true)) {
dispatch(toggleScreenshotCaptureSummary(true));
}
screensharingDetails.sourceType = desktopVideoTrack.sourceType;
}
// Apply the AudioMixer effect if there is a local audio track, add the desktop track to the conference
// otherwise without unmuting the microphone.
if (desktopAudioTrack) {
// Noise suppression doesn't work with desktop audio because we can't chain
// track effects yet, disable it first.
// We need to to wait for the effect to clear first or it might interfere with the audio mixer.
// Noise suppression doesn't work with desktop audio because we can't chain track effects yet, disable it
// first. We need to to wait for the effect to clear first or it might interfere with the audio mixer.
await dispatch(setNoiseSuppressionEnabled(false));
_maybeApplyAudioMixerEffect(desktopAudioTrack, state);
dispatch(setScreenshareAudioTrack(desktopAudioTrack));
// Handle the case where screen share was stopped from the browsers 'screen share in progress' window.
if (audioOnly) {
desktopAudioTrack?.on(
JitsiTrackEvents.LOCAL_TRACK_STOPPED,
() => dispatch(toggleScreensharing(undefined, true)));
}
}
// Disable audio-only or best performance mode if the user starts screensharing. This doesn't apply to
@ -188,6 +249,8 @@ async function _toggleScreenSharing({ enabled, audioOnly = false }, store) {
} else {
const { desktopAudioTrack } = state['features/screen-share'];
dispatch(toggleScreenshotCaptureSummary(false));
// Mute the desktop track instead of removing it from the conference since we don't want the client to signal
// a source-remove to the remote peer for the screenshare track. Later when screenshare is enabled again, the
// same sender will be re-used without the need for signaling a new ssrc through source-add.
@ -204,6 +267,9 @@ async function _toggleScreenSharing({ enabled, audioOnly = false }, store) {
}
if (audioOnly) {
dispatch(setScreenAudioShareState(enabled));
dispatch(setScreenAudioShareState(enable));
} else {
// Notify the external API.
APP.API.notifyScreenSharingStatusChanged(enable, screensharingDetails);
}
}

View File

@ -407,13 +407,15 @@ function _translateLegacyConfig(oldValue: IConfig) {
}
newValue.recordingService = newValue.recordingService || {};
if (oldValue.fileRecordingsServiceEnabled !== undefined) {
if (oldValue.fileRecordingsServiceEnabled !== undefined
&& newValue.recordingService.enabled === undefined) {
newValue.recordingService = {
...newValue.recordingService,
enabled: oldValue.fileRecordingsServiceEnabled
};
}
if (oldValue.fileRecordingsServiceSharingEnabled !== undefined) {
if (oldValue.fileRecordingsServiceSharingEnabled !== undefined
&& newValue.recordingService.sharingEnabled === undefined) {
newValue.recordingService = {
...newValue.recordingService,
sharingEnabled: oldValue.fileRecordingsServiceSharingEnabled

View File

@ -88,6 +88,8 @@ export { default as IconMuteEveryone } from './mute-everyone.svg';
export { default as IconMuteEveryoneElse } from './mute-everyone-else.svg';
export { default as IconMuteVideoEveryone } from './mute-video-everyone.svg';
export { default as IconMuteVideoEveryoneElse } from './mute-video-everyone-else.svg';
export { default as IconNoiseSuppressionOff } from './noise-suppression-off.svg';
export { default as IconNoiseSuppressionOn } from './noise-suppression-on.svg';
export { default as IconNotificationJoin } from './navigate_next.svg';
export { default as IconOpenInNew } from './open_in_new.svg';
export { default as IconOutlook } from './office365.svg';

View File

@ -0,0 +1,4 @@
<svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M6.06204 5.25986C5.92029 5.40926 5.83333 5.61114 5.83333 5.83333V14.1667C5.83333 14.6269 6.20643 15 6.66667 15C7.1269 15 7.5 14.6269 7.5 14.1667V6.69782L6.06204 5.25986ZM9.16667 8.36449V16.6667C9.16667 17.1269 9.53976 17.5 10 17.5C10.4602 17.5 10.8333 17.1269 10.8333 16.6667V10.0312L9.16667 8.36449ZM13.3016 12.4994C12.8666 12.4831 12.5169 12.1334 12.5006 11.6984L13.3016 12.4994ZM14.1667 10.9688L12.5 9.30218V8.33333C12.5 7.8731 12.8731 7.5 13.3333 7.5C13.7936 7.5 14.1667 7.8731 14.1667 8.33333V10.9688ZM17.4905 14.2927L15.8333 12.6355V5.83333C15.8333 5.3731 16.2064 5 16.6667 5C17.1269 5 17.5 5.3731 17.5 5.83333V14.1667C17.5 14.2095 17.4968 14.2516 17.4905 14.2927ZM10.8333 7.63551L9.16667 5.96884V3.33333C9.16667 2.8731 9.53976 2.5 10 2.5C10.4602 2.5 10.8333 2.8731 10.8333 3.33333V7.63551ZM3.33333 7.5C2.8731 7.5 2.5 7.8731 2.5 8.33333V11.6667C2.5 12.1269 2.8731 12.5 3.33333 12.5C3.79357 12.5 4.16667 12.1269 4.16667 11.6667V8.33333C4.16667 7.8731 3.79357 7.5 3.33333 7.5Z" fill="white"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M1.91469 1.91469C2.24546 1.58392 2.78183 1.58401 3.11271 1.91488L18.0851 16.8873C18.416 17.2182 18.4161 17.7545 18.0853 18.0853C17.7545 18.4161 17.2182 18.416 16.8873 18.0851L1.91488 3.11271C1.58401 2.78183 1.58392 2.24546 1.91469 1.91469Z" fill="white"/>
</svg>

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@ -0,0 +1,3 @@
<svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M10 2.5C9.53976 2.5 9.16667 2.8731 9.16667 3.33333V16.6667C9.16667 17.1269 9.53976 17.5 10 17.5C10.4602 17.5 10.8333 17.1269 10.8333 16.6667V3.33333C10.8333 2.8731 10.4602 2.5 10 2.5ZM6.66667 5C6.20643 5 5.83333 5.3731 5.83333 5.83333V14.1667C5.83333 14.6269 6.20643 15 6.66667 15C7.1269 15 7.5 14.6269 7.5 14.1667V5.83333C7.5 5.3731 7.1269 5 6.66667 5ZM2.5 8.33333C2.5 7.8731 2.8731 7.5 3.33333 7.5C3.79357 7.5 4.16667 7.8731 4.16667 8.33333V11.6667C4.16667 12.1269 3.79357 12.5 3.33333 12.5C2.8731 12.5 2.5 12.1269 2.5 11.6667V8.33333ZM12.5 8.33333C12.5 7.8731 12.8731 7.5 13.3333 7.5C13.7936 7.5 14.1667 7.8731 14.1667 8.33333V11.6667C14.1667 12.1269 13.7936 12.5 13.3333 12.5C12.8731 12.5 12.5 12.1269 12.5 11.6667V8.33333ZM15.8333 5.83333C15.8333 5.3731 16.2064 5 16.6667 5C17.1269 5 17.5 5.3731 17.5 5.83333V14.1667C17.5 14.6269 17.1269 15 16.6667 15C16.2064 15 15.8333 14.6269 15.8333 14.1667V5.83333Z" fill="white"/>
</svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@ -185,17 +185,17 @@ function _appStateChanged({ dispatch, getState }, next, action) {
* @returns {Object} The value returned by {@code next(action)}.
*/
function _setAudioOnly({ dispatch, getState }, next, action) {
const { audioOnly, ensureVideoTrack } = action;
const { audioOnly } = action;
const state = getState();
sendAnalytics(createTrackMutedEvent('video', 'audio-only mode', audioOnly));
// Make sure we mute both the desktop and video tracks.
dispatch(setVideoMuted(audioOnly, MEDIA_TYPE.VIDEO, VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY, ensureVideoTrack));
dispatch(setVideoMuted(audioOnly, MEDIA_TYPE.VIDEO, VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY));
if (getMultipleVideoSendingSupportFeatureFlag(state)) {
dispatch(setScreenshareMuted(audioOnly, MEDIA_TYPE.SCREENSHARE, SCREENSHARE_MUTISM_AUTHORITY.AUDIO_ONLY));
} else if (navigator.product !== 'ReactNative') {
dispatch(setVideoMuted(audioOnly, MEDIA_TYPE.PRESENTER, VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY, ensureVideoTrack));
dispatch(setVideoMuted(audioOnly, MEDIA_TYPE.PRESENTER, VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY));
}
return next(action);
@ -281,7 +281,7 @@ function _setRoom({ dispatch, getState }, next, action) {
sendAnalytics(createStartAudioOnlyEvent(audioOnly));
logger.log(`Start audio only set to ${audioOnly.toString()}`);
dispatch(setAudioOnly(audioOnly, false));
dispatch(setAudioOnly(audioOnly));
if (!roomIsValid) {
dispatch(destroyLocalTracks());

View File

@ -57,20 +57,34 @@ const AVATAR_CHECKER_FUNCTIONS = [
export function getActiveSpeakersToBeDisplayed(stateful: Object | Function) {
const state = toState(stateful);
const {
dominantSpeaker,
fakeParticipants,
sortedRemoteScreenshares,
sortedRemoteVirtualScreenshareParticipants,
speakersList
} = state['features/base/participants'];
const { visibleRemoteParticipants } = state['features/filmstrip'];
const activeSpeakers = new Map(speakersList);
let activeSpeakers = new Map(speakersList);
// Do not re-sort the active speakers if all of them are currently visible.
if (typeof visibleRemoteParticipants === 'undefined' || activeSpeakers.size <= visibleRemoteParticipants.size) {
// Do not re-sort the active speakers if dominant speaker is currently visible.
if (dominantSpeaker && visibleRemoteParticipants.has(dominantSpeaker)) {
return activeSpeakers;
}
let availableSlotsForActiveSpeakers = visibleRemoteParticipants.size;
if (activeSpeakers.has(dominantSpeaker)) {
activeSpeakers.delete(dominantSpeaker);
}
// Add dominant speaker to the beginning of the list (not including self) since the active speaker list is always
// alphabetically sorted.
if (dominantSpeaker && dominantSpeaker !== getLocalParticipant(state).id) {
const updatedSpeakers = Array.from(activeSpeakers);
updatedSpeakers.splice(0, 0, [ dominantSpeaker, getParticipantById(state, dominantSpeaker)?.name ]);
activeSpeakers = new Map(updatedSpeakers);
}
// Remove screenshares from the count.
if (getMultipleVideoSupportFeatureFlag(state)) {
if (sortedRemoteVirtualScreenshareParticipants) {
@ -244,7 +258,7 @@ export function getParticipantCount(stateful: Object | Function) {
sortedRemoteVirtualScreenshareParticipants
} = state['features/base/participants'];
if (getSourceNameSignalingFeatureFlag(state)) {
if (getMultipleVideoSupportFeatureFlag(state)) {
return remote.size - fakeParticipants.size - sortedRemoteVirtualScreenshareParticipants.size + (local ? 1 : 0);
}
@ -286,7 +300,7 @@ export function getFakeParticipants(stateful: Object | Function) {
export function getRemoteParticipantCount(stateful: Object | Function) {
const state = toState(stateful)['features/base/participants'];
if (getSourceNameSignalingFeatureFlag(state)) {
if (getMultipleVideoSupportFeatureFlag(state)) {
return state.remote.size - state.sortedRemoteVirtualScreenshareParticipants.size;
}
@ -306,7 +320,7 @@ export function getParticipantCountWithFake(stateful: Object | Function) {
const state = toState(stateful);
const { local, localScreenShare, remote } = state['features/base/participants'];
if (getSourceNameSignalingFeatureFlag(state)) {
if (getMultipleVideoSupportFeatureFlag(state)) {
return remote.size + (local ? 1 : 0) + (localScreenShare ? 1 : 0);
}

View File

@ -297,25 +297,27 @@ export function showNoDataFromSourceVideoError(jitsiTrack) {
}
/**
* Signals that the local participant is ending screensharing or beginning the
* screensharing flow.
* Signals that the local participant is ending screensharing or beginning the screensharing flow.
*
* @param {boolean} enabled - The state to toggle screen sharing to.
* @param {boolean} audioOnly - Only share system audio.
* @param {boolean} ignoreDidHaveVideo - Whether or not to ignore if video was on when sharing started.
* @param {Object} shareOptions - The options to be passed for capturing screenshare.
* @returns {{
* type: TOGGLE_SCREENSHARING,
* on: boolean,
* audioOnly: boolean,
* ignoreDidHaveVideo: boolean
* ignoreDidHaveVideo: boolean,
* shareOptions: Object
* }}
*/
export function toggleScreensharing(enabled, audioOnly = false, ignoreDidHaveVideo = false) {
export function toggleScreensharing(enabled, audioOnly = false, ignoreDidHaveVideo = false, shareOptions = {}) {
return {
type: TOGGLE_SCREENSHARING,
enabled,
audioOnly,
ignoreDidHaveVideo
ignoreDidHaveVideo,
shareOptions
};
}

View File

@ -2,11 +2,8 @@
import { batch } from 'react-redux';
import UIEvents from '../../../../service/UI/UIEvents';
import { showModeratedNotification } from '../../av-moderation/actions';
import { shouldShowModeratedNotification } from '../../av-moderation/functions';
import { _RESET_BREAKOUT_ROOMS } from '../../breakout-rooms/actionTypes';
import { hideNotification, isModerationNotificationDisplayed } from '../../notifications';
import { hideNotification } from '../../notifications';
import { isPrejoinPageVisible } from '../../prejoin/functions';
import { getCurrentConference } from '../conference/functions';
import { getMultipleVideoSendingSupportFeatureFlag } from '../config';
@ -28,7 +25,6 @@ import {
import { MiddlewareRegistry, StateListenerRegistry } from '../redux';
import {
TOGGLE_SCREENSHARING,
TRACK_ADDED,
TRACK_MUTE_UNMUTE_FAILED,
TRACK_NO_DATA_FROM_SOURCE,
@ -157,31 +153,6 @@ MiddlewareRegistry.register(store => next => action => {
break;
}
case TOGGLE_SCREENSHARING:
if (typeof APP === 'object') {
// check for A/V Moderation when trying to start screen sharing
if ((action.enabled || action.enabled === undefined)
&& shouldShowModeratedNotification(MEDIA_TYPE.VIDEO, store.getState())) {
if (!isModerationNotificationDisplayed(MEDIA_TYPE.PRESENTER, store.getState())) {
store.dispatch(showModeratedNotification(MEDIA_TYPE.PRESENTER));
}
return;
}
const { enabled, audioOnly, ignoreDidHaveVideo } = action;
if (!getMultipleVideoSendingSupportFeatureFlag(store.getState())) {
APP.UI.emitEvent(UIEvents.TOGGLE_SCREENSHARING,
{
enabled,
audioOnly,
ignoreDidHaveVideo
});
}
}
break;
case TRACK_MUTE_UNMUTE_FAILED: {
const { jitsiTrack } = action.track;
const muted = action.wasMuted;

View File

@ -4,7 +4,7 @@ import { Dispatch } from 'redux';
// @ts-ignore
import { getLocalJitsiAudioTrack } from '../base/tracks';
// @ts-ignore
import { NOTIFICATION_TIMEOUT_TYPE, showErrorNotification, showWarningNotification } from '../notifications';
import { NOTIFICATION_TIMEOUT_TYPE, showErrorNotification } from '../notifications';
// @ts-ignore
import { NoiseSuppressionEffect } from '../stream-effects/noise-suppression/NoiseSuppressionEffect';
@ -59,16 +59,6 @@ export function setNoiseSuppressionEnabled(enabled: boolean) : any {
logger.info(`Attempting to set noise suppression enabled state: ${enabled}`);
if (!localAudio) {
logger.warn('Can not apply noise suppression without any local track active.');
dispatch(showWarningNotification({
titleKey: 'notify.noiseSuppressionFailedTitle',
descriptionKey: 'notify.noiseSuppressionNoTrackDescription'
}, NOTIFICATION_TIMEOUT_TYPE.MEDIUM));
return;
}
try {
if (enabled && !noiseSuppressionEnabled) {
if (!canEnableNoiseSuppression(state, dispatch, localAudio)) {

View File

@ -4,13 +4,10 @@ import { IState } from '../../app/types';
import { translate } from '../../base/i18n';
// @ts-ignore
import {
IconShareAudio,
IconStopAudioShare
// @ts-ignore
} from '../../base/icons';
// @ts-ignore
import { connect } from '../../base/redux';
// @ts-ignore
IconNoiseSuppressionOn,
IconNoiseSuppressionOff
} from '../../base/icons/svg/index';
import { connect } from '../../base/redux/functions';
import {
AbstractButton,
type AbstractButtonProps
@ -35,10 +32,10 @@ type Props = AbstractButtonProps & {
*/
class NoiseSuppressionButton extends AbstractButton<Props, any, any> {
accessibilityLabel = 'toolbar.accessibilityLabel.noiseSuppression';
icon = IconShareAudio;
icon = IconNoiseSuppressionOn;
label = 'toolbar.noiseSuppression';
tooltip = 'toolbar.noiseSuppression';
toggledIcon = IconStopAudioShare;
toggledIcon = IconNoiseSuppressionOff;
toggledLabel = 'toolbar.disableNoiseSuppression';
private props: Props;
@ -81,4 +78,5 @@ function _mapStateToProps(state: IState): Object {
};
}
// @ts-ignore
export default translate(connect(_mapStateToProps)(NoiseSuppressionButton));

View File

@ -24,6 +24,15 @@ export function isNoiseSuppressionEnabled(state: IState): boolean {
* @returns {boolean}
*/
export function canEnableNoiseSuppression(state: IState, dispatch: Function, localAudio: any) : boolean {
if (!localAudio) {
dispatch(showWarningNotification({
titleKey: 'notify.noiseSuppressionFailedTitle',
descriptionKey: 'notify.noiseSuppressionNoTrackDescription'
}, NOTIFICATION_TIMEOUT_TYPE.MEDIUM));
return false;
}
const { channelCount } = localAudio.track.getSettings();
// Sharing screen audio implies an effect being applied to the local track, because currently we don't support

View File

@ -10,10 +10,7 @@ import useContextMenu from '../../../base/components/context-menu/useContextMenu
import participantsPaneTheme from '../../../base/components/themes/participantsPaneTheme.json';
import { isToolbarButtonEnabled } from '../../../base/config/functions.web';
import { MEDIA_TYPE } from '../../../base/media';
import {
getParticipantById,
getParticipantCountWithFake
} from '../../../base/participants';
import { getParticipantById } from '../../../base/participants';
import { connect } from '../../../base/redux';
import { normalizeAccents } from '../../../base/util/strings';
import { getBreakoutRooms, getCurrentRoomId, isInBreakoutRoom } from '../../../breakout-rooms/functions';
@ -161,14 +158,9 @@ function _mapStateToProps(state): Object {
return !participant.isVirtualScreenshareParticipant;
});
// This is very important as getRemoteParticipants is not changing its reference object
// and we will not re-render on change, but if count changes we will do
const participantsCount = getParticipantCountWithFake(state);
const participantsCount = sortedParticipantIds.length;
const showInviteButton = shouldRenderInviteButton(state) && isToolbarButtonEnabled('invite', state);
const overflowDrawer = showOverflowDrawer(state);
const currentRoomId = getCurrentRoomId(state);
const currentRoom = getBreakoutRooms(state)[currentRoomId];

View File

@ -16,6 +16,7 @@ import { ActionButton, InputField, PreMeetingScreen } from '../../base/premeetin
import { connect } from '../../base/redux';
import { getDisplayName, updateSettings } from '../../base/settings';
import { getLocalJitsiVideoTrack } from '../../base/tracks';
import { getIsLobbyVisible } from '../../lobby/functions';
import { PasswordRequiredPrompt } from '../../room-lock/components';
import {
joinConference as joinConferenceAction,
@ -459,8 +460,10 @@ function mapStateToProps(state): Object {
const name = getDisplayName(state);
const showErrorOnJoin = isDisplayNameRequired(state) && !name;
const { id: participantId } = getLocalParticipant(state);
const isLobbyVisible = getIsLobbyVisible(state);
const isAuthInProgress = isDialogOpen(state, WaitForOwnerDialog)
|| isDialogOpen(state, LoginDialog) || isDialogOpen(state, PasswordRequiredPrompt);
|| isDialogOpen(state, LoginDialog) || isDialogOpen(state, PasswordRequiredPrompt)
|| isLobbyVisible;
return {
canEditDisplayName: isPrejoinDisplayNameVisible(state),

View File

@ -763,7 +763,7 @@ function _mapStateToProps(state) {
return {
..._abstractMapStateToProps(state),
isVpaas: isVpaasMeeting(state),
_hideStorageWarning: state['features/base/config'].recording?.hideStorageWarning,
_hideStorageWarning: state['features/base/config'].recordingService?.hideStorageWarning,
_localRecordingEnabled: !state['features/base/config'].localRecording?.disable,
_localRecordingSelfEnabled: !state['features/base/config'].localRecording?.disableSelfRecording,
_localRecordingNoNotification: !state['features/base/config'].localRecording?.notifyAllParticipants,

View File

@ -1,9 +1,10 @@
// @flow
import { getMultipleVideoSendingSupportFeatureFlag } from '../base/config/functions.any';
import { openDialog } from '../base/dialog';
import { JitsiConferenceEvents } from '../base/lib-jitsi-meet';
import { getParticipantDisplayName, getPinnedParticipant, pinParticipant } from '../base/participants';
import { getLocalVideoTrack } from '../base/tracks';
import { getLocalDesktopTrack, getLocalVideoTrack, toggleScreensharing } from '../base/tracks';
import { NOTIFICATION_TIMEOUT_TYPE, showNotification } from '../notifications';
import {
@ -500,7 +501,9 @@ export function sendStartRequest() {
return (dispatch: Function, getState: Function) => {
const state = getState();
const tracks = state['features/base/tracks'];
const track = getLocalVideoTrack(tracks);
const track = getMultipleVideoSendingSupportFeatureFlag(state)
? getLocalDesktopTrack(tracks)
: getLocalVideoTrack(tracks);
const { sourceId } = track?.jitsiTrack || {};
const { transport } = state['features/remote-control'].receiver;
@ -530,12 +533,21 @@ export function grant(participantId: string) {
let promise;
const state = getState();
const tracks = state['features/base/tracks'];
const track = getLocalVideoTrack(tracks);
const isMultiStreamSupportEnabled = getMultipleVideoSendingSupportFeatureFlag(state);
const track = isMultiStreamSupportEnabled ? getLocalDesktopTrack(tracks) : getLocalVideoTrack(tracks);
const isScreenSharing = track?.videoType === 'desktop';
const { sourceType } = track?.jitsiTrack || {};
if (isScreenSharing && sourceType === 'screen') {
promise = dispatch(sendStartRequest());
} else if (isMultiStreamSupportEnabled) {
promise = dispatch(toggleScreensharing(
true,
false,
true,
{ desktopSharingSources: [ 'screen' ] }
))
.then(() => dispatch(sendStartRequest()));
} else {
// FIXME: Use action here once toggleScreenSharing is moved to redux.
promise = APP.conference.toggleScreenSharing(

View File

@ -1,5 +1,10 @@
// @flow
import {
getParticipantById,
getVirtualScreenshareParticipantByOwnerId,
getVirtualScreenshareParticipantOwnerId
} from '../base/participants';
import { StateListenerRegistry } from '../base/redux';
import { resume, pause } from './actions';
@ -17,6 +22,22 @@ StateListenerRegistry.register(
return undefined;
}
const participant = getParticipantById(state, participantId);
if (participant?.isVirtualScreenshareParticipant) {
// multistream support is enabled and the user has selected the desktop sharing thumbnail.
const id = getVirtualScreenshareParticipantOwnerId(participantId);
return id === controlled;
}
const virtualParticipant = getVirtualScreenshareParticipantByOwnerId(state, participantId);
if (virtualParticipant) { // multistream is enabled and the user has selected the camera thumbnail.
return false;
}
return controlled === participantId;
},
/* listener */ (isControlledParticipantOnStage, { dispatch }) => {

View File

@ -1,6 +1,5 @@
// @flow
import { getMultipleVideoSendingSupportFeatureFlag } from '../base/config/functions.any';
import { openDialog } from '../base/dialog/actions';
import { browser } from '../base/lib-jitsi-meet';
import { shouldHideShareAudioHelper } from '../base/settings';
@ -86,12 +85,6 @@ export function startAudioScreenShareFlow() {
// available for audio screen sharing, namely full window audio.
// If we're already sharing audio, toggle off.
if (shouldHideShareAudioHelper(state) || browser.isElectron() || audioOnlySharing) {
if (getMultipleVideoSendingSupportFeatureFlag(state)) {
dispatch(toggleScreensharing(!audioOnlySharing, true));
return;
}
// We don't want to explicitly set the screens share state, by passing undefined we let the
// underlying logic decide if it's on or off.
dispatch(toggleScreensharing(undefined, true));

View File

@ -1,6 +1,6 @@
// @flow
import { getMultipleVideoSupportFeatureFlag } from '../base/config';
import { getMultipleVideoSendingSupportFeatureFlag } from '../base/config';
import { isWindows } from '../base/environment';
import { isMobileBrowser } from '../base/environment/utils';
import { browser } from '../base/lib-jitsi-meet';
@ -57,8 +57,7 @@ export function isScreenVideoShared(state: Object) {
const tracks = state['features/base/tracks'];
const localScreenshare = getLocalDesktopTrack(tracks);
if (getMultipleVideoSupportFeatureFlag(state)) {
if (getMultipleVideoSendingSupportFeatureFlag(state)) {
return localScreenshare && localScreenshare.jitsiTrack && !localScreenshare.jitsiTrack.isMuted();
}
const localVideo = getLocalVideoTrack(tracks);

View File

@ -1,7 +1,7 @@
// @flow
import { getLocalVideoTrack } from '../../features/base/tracks';
import { getLocalJitsiDesktopTrack, getLocalJitsiVideoTrack } from '../../features/base/tracks';
import { getMultipleVideoSendingSupportFeatureFlag } from '../base/config';
import { SET_SCREENSHOT_CAPTURE } from './actionTypes';
import { createScreenshotCaptureSummary } from './functions';
@ -46,12 +46,13 @@ export function toggleScreenshotCaptureSummary(enabled: boolean) {
if (enabled) {
try {
const { jitsiTrack } = getLocalVideoTrack(state['features/base/tracks']);
const jitsiTrack = getMultipleVideoSendingSupportFeatureFlag(state)
? getLocalJitsiDesktopTrack(state)
: getLocalJitsiVideoTrack(state);
await screenshotSummary.start(jitsiTrack);
dispatch(setScreenshotCapture(enabled));
} catch {
// Handle promise rejection from {@code start} due to stream type not being desktop.
logger.error('Unsupported stream type.');
}

View File

@ -1 +1,2 @@
export * from './actions';
export * from './functions';

View File

@ -29,6 +29,16 @@ export class NoiseSuppressionEffect {
*/
private _noiseSuppressorNode: AudioWorkletNode;
/**
* Audio track extracted from the original MediaStream to which the effect is applied.
*/
private _originalMediaTrack: MediaStreamTrack;
/**
* Noise suppressed audio track extracted from the media destination node.
*/
private _outputMediaTrack: MediaStreamTrack;
/**
* Effect interface called by source JitsiLocalTrack.
* Applies effect that uses a {@code NoiseSuppressor} service initialized with {@code RnnoiseProcessor}
@ -38,10 +48,11 @@ export class NoiseSuppressionEffect {
* @returns {MediaStream} - MediaStream containing both audio tracks mixed together.
*/
startEffect(audioStream: MediaStream) : MediaStream {
this._originalMediaTrack = audioStream.getAudioTracks()[0];
this._audioContext = new AudioContext();
this._audioSource = this._audioContext.createMediaStreamSource(audioStream);
this._audioDestination = this._audioContext.createMediaStreamDestination();
this._outputMediaTrack = this._audioDestination.stream.getAudioTracks()[0];
const baseUrl = `${getBaseUrl()}libs/`;
const workletUrl = `${baseUrl}noise-suppressor-worklet.min.js`;
@ -57,6 +68,13 @@ export class NoiseSuppressionEffect {
logger.error('Error while adding audio worklet module: ', error);
});
// Sync the effect track muted state with the original track state.
this._outputMediaTrack.enabled = this._originalMediaTrack.enabled;
// We enable the audio on the original track because mute/unmute action will only affect the audio destination
// output track from this point on.
this._originalMediaTrack.enabled = true;
return this._audioDestination.stream;
}
@ -77,6 +95,9 @@ export class NoiseSuppressionEffect {
* @returns {void}
*/
stopEffect(): void {
// Sync original track muted state with effect state before removing the effect.
this._originalMediaTrack.enabled = this._outputMediaTrack.enabled;
// Technically after this process the Audio Worklet along with it's resources should be garbage collected,
// however on chrome there seems to be a problem as described here:
// https://bugs.chromium.org/p/chromium/issues/detail?id=1298955

View File

@ -160,8 +160,7 @@ class VideoMuteButton extends AbstractVideoMuteButton<Props, *> {
_setVideoMuted(videoMuted: boolean) {
sendAnalytics(createToolbarEvent(VIDEO_MUTE, { enable: videoMuted }));
if (this.props._audioOnly) {
this.props.dispatch(
setAudioOnly(false, /* ensureTrack */ true));
this.props.dispatch(setAudioOnly(false));
}
const mediaType = this.props._videoMediaType;

View File

@ -12,7 +12,7 @@ import { getAutoPinSetting, updateAutoPinnedParticipant } from './functions';
StateListenerRegistry.register(
/* selector */ state => state['features/base/participants'].sortedRemoteVirtualScreenshareParticipants,
/* listener */ (sortedRemoteVirtualScreenshareParticipants, store) => {
if (!getAutoPinSetting() || isFollowMeActive(store) || !getMultipleVideoSupportFeatureFlag(store.getState())) {
if (!getMultipleVideoSupportFeatureFlag(store.getState())) {
return;
}
@ -36,7 +36,9 @@ StateListenerRegistry.register(
if (!equals(oldScreenSharesOrder, newScreenSharesOrder)) {
store.dispatch(virtualScreenshareParticipantsUpdated(newScreenSharesOrder));
updateAutoPinnedParticipant(oldScreenSharesOrder, store);
if (getAutoPinSetting() && !isFollowMeActive(store)) {
updateAutoPinnedParticipant(oldScreenSharesOrder, store);
}
}
});
@ -53,7 +55,7 @@ StateListenerRegistry.register(
// possible to have screen sharing participant that has already left in the remoteScreenShares array.
// This can lead to rendering a thumbnails for already left participants since the remoteScreenShares
// array is used for building the ordered list of remote participants.
if (!getAutoPinSetting() || isFollowMeActive(store) || getMultipleVideoSupportFeatureFlag(store.getState())) {
if (getMultipleVideoSupportFeatureFlag(store.getState())) {
return;
}
@ -88,6 +90,8 @@ StateListenerRegistry.register(
store.dispatch(
setRemoteParticipantsWithScreenShare(newScreenSharesOrder));
updateAutoPinnedParticipant(oldScreenSharesOrder, store);
if (getAutoPinSetting() && !isFollowMeActive(store)) {
updateAutoPinnedParticipant(oldScreenSharesOrder, store);
}
}
}, 100));