Remove legacy signaling and legacy SS mode. (#12499)

* fix(connection-status): remove unused participant connectionStatus.
Always use trackStreamingStatus now that legacy endpoint based signaling has been removed.

* remove the check for source-name signaling.
Default to source-name signaling always.

* Remove the check for multi-stream mode.
Make that the default mode and remove the support for legacy SS mode.

* Remove presenter mode.

* update latest@lib-jitsi-meet
This commit is contained in:
Jaya Allamsetty 2022-11-08 14:15:49 -05:00 committed by GitHub
parent 1731d5188d
commit f3e4c57036
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
61 changed files with 298 additions and 1697 deletions

View File

@ -27,7 +27,6 @@ import {
} from './react/features/app/actions';
import { showModeratedNotification } from './react/features/av-moderation/actions';
import { shouldShowModeratedNotification } from './react/features/av-moderation/functions';
import { setAudioOnly } from './react/features/base/audio-only';
import {
AVATAR_URL_COMMAND,
CONFERENCE_LEAVE_REASONS,
@ -77,7 +76,6 @@ import {
JitsiConnectionEvents,
JitsiE2ePingEvents,
JitsiMediaDevicesEvents,
JitsiParticipantConnectionStatus,
JitsiTrackErrors,
JitsiTrackEvents,
browser
@ -101,9 +99,7 @@ import {
getNormalizedDisplayName,
getVirtualScreenshareParticipantByOwnerId,
localParticipantAudioLevelChanged,
localParticipantConnectionStatusChanged,
localParticipantRoleChanged,
participantConnectionStatusChanged,
participantKicked,
participantMutedUs,
participantPresenceChanged,
@ -112,20 +108,15 @@ import {
screenshareParticipantDisplayNameChanged,
updateRemoteParticipantFeatures
} from './react/features/base/participants';
import {
getUserSelectedCameraDeviceId,
updateSettings
} from './react/features/base/settings';
import { updateSettings } from './react/features/base/settings';
import {
addLocalTrack,
createLocalPresenterTrack,
createLocalTracksF,
destroyLocalTracks,
getLocalJitsiAudioTrack,
getLocalJitsiVideoTrack,
getLocalTracks,
getLocalVideoTrack,
isLocalCameraTrackMuted,
isLocalTrackMuted,
isUserInteractionRequiredForUnmute,
replaceLocalTrack,
@ -154,9 +145,7 @@ import { isPrejoinPageVisible } from './react/features/prejoin/functions';
import { disableReceiver, stopReceiver } from './react/features/remote-control';
import { isScreenAudioShared, setScreenAudioShareState } from './react/features/screen-share/';
import { toggleScreenshotCaptureSummary } from './react/features/screenshot-capture';
import { isScreenshotCaptureEnabled } from './react/features/screenshot-capture/functions';
import { AudioMixerEffect } from './react/features/stream-effects/audio-mixer/AudioMixerEffect';
import { createPresenterEffect } from './react/features/stream-effects/presenter';
import { createRnnoiseProcessor } from './react/features/stream-effects/rnnoise';
import { endpointMessageReceived } from './react/features/subtitles';
import { handleToggleVideoMuted } from './react/features/toolbox/actions.any';
@ -188,15 +177,6 @@ let _connectionPromise;
*/
let _onConnectionPromiseCreated;
/**
* This promise is used for chaining mutePresenterVideo calls in order to avoid calling GUM multiple times if it takes
* a while to finish.
*
* @type {Promise<void>}
* @private
*/
let _prevMutePresenterVideo = Promise.resolve();
/*
* Logic to open a desktop picker put on the window global for
* lib-jitsi-meet to detect and invoke
@ -480,12 +460,6 @@ export default {
isSharingScreen: false,
/**
* The local presenter video track (if any).
* @type {JitsiLocalTrack|null}
*/
localPresenterVideo: null,
/**
* Returns an object containing a promise which resolves with the created tracks &
* the errors resulting from that process.
@ -530,22 +504,10 @@ export default {
firePermissionPromptIsShownEvent: true
};
// FIXME is there any simpler way to rewrite this spaghetti below ?
if (options.startScreenSharing) {
// This option has been deprecated since it is no longer supported as per the w3c spec.
// https://w3c.github.io/mediacapture-screen-share/#dom-mediadevices-getdisplaymedia. If the user has not
// interacted with the webpage before the getDisplayMedia call, the promise will be rejected by the
// browser. This has already been implemented in Firefox and Safari and will be implemented in Chrome soon.
// https://bugs.chromium.org/p/chromium/issues/detail?id=1198918
// Please note that Spot uses the same config option to use an external video input device label as
// screenshare and calls getUserMedia instead of getDisplayMedia for capturing the media. Therefore it
// needs to be supported here if _desktopSharingSourceDevice is provided.
const errMessage = new Error('startScreenSharing config option is no longer supported for web browsers');
const desktopPromise = config._desktopSharingSourceDevice
? this._createDesktopTrack()
: Promise.reject(errMessage);
tryCreateLocalTracks = desktopPromise
// Spot uses the _desktopSharingSourceDevice config option to use an external video input device label as
// screenshare and calls getUserMedia instead of getDisplayMedia for capturing the media.
if (options.startScreenSharing && config._desktopSharingSourceDevice) {
tryCreateLocalTracks = this._createDesktopTrack()
.then(([ desktopStream ]) => {
if (!requestedAudio) {
return [ desktopStream ];
@ -910,8 +872,7 @@ export default {
isLocalVideoMuted() {
// If the tracks are not ready, read from base/media state
return this._localTracksInitialized
? isLocalCameraTrackMuted(
APP.store.getState()['features/base/tracks'])
? isLocalTrackMuted(APP.store.getState()['features/base/tracks'], MEDIA_TYPE.VIDEO)
: isVideoMutedByUser(APP.store);
},
@ -1031,36 +992,6 @@ export default {
this.muteAudio(!this.isLocalAudioMuted(), showUI);
},
/**
* Simulates toolbar button click for presenter video mute. Used by
* shortcuts and API.
* @param mute true for mute and false for unmute.
* @param {boolean} [showUI] when set to false will not display any error
* dialogs in case of media permissions error.
*/
async mutePresenter(mute, showUI = true) {
const maybeShowErrorDialog = error => {
showUI && APP.store.dispatch(notifyCameraError(error));
};
const localVideo = getLocalJitsiVideoTrack(APP.store.getState());
if (mute) {
try {
await localVideo.setEffect(undefined);
} catch (err) {
logger.error('Failed to remove the presenter effect', err);
maybeShowErrorDialog(err);
}
} else {
try {
await localVideo.setEffect(await this._createPresenterStreamEffect());
} catch (err) {
logger.error('Failed to apply the presenter effect', err);
maybeShowErrorDialog(err);
}
}
},
/**
* Simulates toolbar button click for video mute. Used by shortcuts and API.
* @param mute true for mute and false for unmute.
@ -1069,8 +1000,6 @@ export default {
*/
muteVideo(mute, showUI = true) {
if (this.videoSwitchInProgress) {
// Turning the camera on while the screen sharing mode is being turned off is causing issues around
// the presenter mode handling. It should be okay for the user to click the button again once that's done.
console.warn('muteVideo - unable to perform operations while video switch is in progress');
return;
@ -1083,13 +1012,6 @@ export default {
return;
}
if (this.isSharingScreen) {
// Chain _mutePresenterVideo calls
_prevMutePresenterVideo = _prevMutePresenterVideo.then(() => this._mutePresenterVideo(mute));
return;
}
// If not ready to modify track's state yet adjust the base/media
if (!this._localTracksInitialized) {
// This will only modify base/media.video.muted which is then synced
@ -1401,8 +1323,6 @@ export default {
// Restore initial state.
this._localTracksInitialized = false;
this.isSharingScreen = false;
this.localPresenterVideo = null;
this.roomName = roomName;
const { tryCreateLocalTracks, errors } = this.createInitialLocalTracks(options);
@ -1534,33 +1454,6 @@ export default {
});
},
/**
* Sets `this.isSharingScreen` depending on provided video stream.
* In case new screen sharing status is not equal previous one
* it updates desktop sharing buttons in UI
* and notifies external application.
*
* @param {JitsiLocalTrack} [newStream] new stream to use or null
* @private
* @returns {void}
*/
_setSharingScreen(newStream) {
const wasSharingScreen = this.isSharingScreen;
this.isSharingScreen = newStream && newStream.videoType === 'desktop';
if (wasSharingScreen !== this.isSharingScreen) {
const details = {};
if (this.isSharingScreen) {
details.sourceType = newStream.sourceType;
}
APP.API.notifyScreenSharingStatusChanged(
this.isSharingScreen, details);
}
},
/**
* Start using provided audio stream.
* Stops previous audio stream.
@ -1640,35 +1533,6 @@ export default {
const tracks = APP.store.getState()['features/base/tracks'];
const duration = getLocalVideoTrack(tracks)?.jitsiTrack.getDuration() ?? 0;
// It can happen that presenter GUM is in progress while screensharing is being turned off. Here it needs to
// wait for that GUM to be resolved in order to prevent leaking the presenter track(this.localPresenterVideo
// will be null when SS is being turned off, but it will initialize once GUM resolves).
let promise = _prevMutePresenterVideo = _prevMutePresenterVideo.then(() => {
// mute the presenter track if it exists.
if (this.localPresenterVideo) {
return (
this.localPresenterVideo.dispose().then(() => {
APP.store.dispatch(trackRemoved(this.localPresenterVideo));
this.localPresenterVideo = null;
})
.then(() => {
// This is needed only for setting the correct muted state in features/base/media.
// NOTE: It is important to be executed after we have disposed and removed the presenter track.
// This way all the side effects won't be executed and we won't start additional O/A cycle for
// replacing the track with video with the one without video. This O/A cycle is not needed since
// we are trying to destroy all tracks. Also due to the current async nature of muting the
// presenter, the final removal of the screen sharing track (see the code at the end of the
// function) can be executed between the removal of the stream with video and adding the
// original screen sharing stream to the peer connection. This will lead to a failure to remove
// the screen sharing track, compromising the screen sharing state in jitsi-meet and the user
// won't be able to turn off the screen sharing.
APP.store.dispatch(setVideoMuted(true, MEDIA_TYPE.PRESENTER));
})
);
}
});
// If system audio was also shared stop the AudioMixerEffect and dispose of the desktop audio track.
if (this._mixerEffect) {
const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
@ -1687,9 +1551,10 @@ export default {
}
APP.store.dispatch(setScreenAudioShareState(false));
let promise;
if (didHaveVideo && !ignoreDidHaveVideo) {
promise = promise.then(() => createLocalTracksF({ devices: [ 'video' ] }))
promise = createLocalTracksF({ devices: [ 'video' ] })
.then(([ stream ]) => {
logger.debug(`_turnScreenSharingOff using ${stream} for useVideoStream`);
@ -1705,11 +1570,7 @@ export default {
);
});
} else {
promise = promise.then(() => {
logger.debug('_turnScreenSharingOff using null for useVideoStream');
return this.useVideoStream(null);
});
promise = this.useVideoStream(null);
}
return promise.then(
@ -1727,56 +1588,6 @@ export default {
});
},
/**
* Toggles between screen sharing and camera video if the toggle parameter
* is not specified and starts the procedure for obtaining new screen
* sharing/video track otherwise.
*
* NOTE: this is currently ONLY used in the non-multi-stream case.
*
* @param {boolean} [toggle] - If true - new screen sharing track will be
* obtained. If false - new video track will be obtain. If not specified -
* toggles between screen sharing and camera video.
* @param {Object} [options] - Screen sharing options that will be passed to
* createLocalTracks.
* @param {boolean} [options.audioOnly] - Whether or not audioOnly is enabled.
* @param {Array<string>} [options.desktopSharingSources] - Array with the
* sources that have to be displayed in the desktop picker window ('screen',
* 'window', etc.).
* @param {Object} [options.desktopStream] - An existing desktop stream to
* use instead of creating a new desktop stream.
* @param {boolean} ignoreDidHaveVideo - if true ignore if video was on when sharing started.
* @return {Promise.<T>}
*/
async toggleScreenSharing(toggle = !this._untoggleScreenSharing, options = {}, ignoreDidHaveVideo) {
logger.debug(`toggleScreenSharing: ${toggle}`);
if (this.videoSwitchInProgress) {
return Promise.reject(`toggleScreenSharing: ${toggle} aborted - video switch in progress.`);
}
if (!JitsiMeetJS.isDesktopSharingEnabled()) {
return Promise.reject('Cannot toggle screen sharing: not supported.');
}
if (toggle) {
try {
await this._switchToScreenSharing(options);
if (this.isAudioOnly()) {
APP.store.dispatch(setAudioOnly(false));
}
return;
} catch (err) {
logger.error('Failed to switch to screensharing', err);
return;
}
}
return this._untoggleScreenSharing
? this._untoggleScreenSharing(ignoreDidHaveVideo)
: Promise.resolve();
},
/**
* Creates desktop (screensharing) {@link JitsiLocalTrack}
*
@ -1849,228 +1660,6 @@ export default {
});
},
/**
* Creates a new instance of presenter effect. A new video track is created
* using the new set of constraints that are calculated based on
* the height of the desktop that is being currently shared.
*
* @param {number} height - The height of the desktop stream that is being
* currently shared.
* @param {string} cameraDeviceId - The device id of the camera to be used.
* @return {Promise<JitsiStreamPresenterEffect>} - A promise resolved with
* {@link JitsiStreamPresenterEffect} if it succeeds.
*/
async _createPresenterStreamEffect(height = null, cameraDeviceId = null) {
if (!this.localPresenterVideo) {
const camera = cameraDeviceId ?? getUserSelectedCameraDeviceId(APP.store.getState());
try {
this.localPresenterVideo = await createLocalPresenterTrack({ cameraDeviceId: camera }, height);
} catch (err) {
logger.error('Failed to create a camera track for presenter', err);
return;
}
APP.store.dispatch(trackAdded(this.localPresenterVideo));
}
try {
const effect = await createPresenterEffect(this.localPresenterVideo.stream);
return effect;
} catch (err) {
logger.error('Failed to create the presenter effect', err);
}
},
/**
* Tries to turn the presenter video track on or off. If a presenter track
* doesn't exist, a new video track is created.
*
* @param mute - true for mute and false for unmute.
*
* @private
*/
async _mutePresenterVideo(mute) {
const maybeShowErrorDialog = error => {
APP.store.dispatch(notifyCameraError(error));
};
// Check for NO-OP
if (mute && (!this.localPresenterVideo || this.localPresenterVideo.isMuted())) {
return;
} else if (!mute && this.localPresenterVideo && !this.localPresenterVideo.isMuted()) {
return;
}
// Create a new presenter track and apply the presenter effect.
if (!this.localPresenterVideo && !mute) {
const localVideo = getLocalJitsiVideoTrack(APP.store.getState());
const { height, width } = localVideo.track.getSettings() ?? localVideo.track.getConstraints();
const isPortrait = height >= width;
const DESKTOP_STREAM_CAP = 720;
const highResolutionTrack
= (isPortrait && width > DESKTOP_STREAM_CAP) || (!isPortrait && height > DESKTOP_STREAM_CAP);
// Resizing the desktop track for presenter is causing blurriness of the desktop share on chrome.
// Disable resizing by default, enable it only when config.js setting is enabled.
const resizeDesktopStream = highResolutionTrack && config.videoQuality?.resizeDesktopForPresenter;
if (resizeDesktopStream) {
let desktopResizeConstraints = {};
if (height && width) {
const advancedConstraints = [ { aspectRatio: (width / height).toPrecision(4) } ];
const constraint = isPortrait ? { width: DESKTOP_STREAM_CAP } : { height: DESKTOP_STREAM_CAP };
advancedConstraints.push(constraint);
desktopResizeConstraints.advanced = advancedConstraints;
} else {
desktopResizeConstraints = {
width: 1280,
height: 720
};
}
// Apply the constraints on the desktop track.
try {
await localVideo.track.applyConstraints(desktopResizeConstraints);
} catch (err) {
logger.error('Failed to apply constraints on the desktop stream for presenter mode', err);
return;
}
}
const trackHeight = resizeDesktopStream
? localVideo.track.getSettings().height ?? DESKTOP_STREAM_CAP
: height;
let effect;
try {
effect = await this._createPresenterStreamEffect(trackHeight);
} catch (err) {
logger.error('Failed to unmute Presenter Video', err);
maybeShowErrorDialog(err);
return;
}
// Replace the desktop track on the peerconnection.
try {
await localVideo.setEffect(effect);
APP.store.dispatch(setVideoMuted(mute, MEDIA_TYPE.PRESENTER));
this.setVideoMuteStatus();
} catch (err) {
logger.error('Failed to apply the Presenter effect', err);
}
} else {
APP.store.dispatch(setVideoMuted(mute, MEDIA_TYPE.PRESENTER));
}
},
/**
* Tries to switch to the screensharing mode by disposing camera stream and
* replacing it with a desktop one.
*
* @param {Object} [options] - Screen sharing options that will be passed to
* createLocalTracks.
*
* @return {Promise} - A Promise resolved if the operation succeeds or
* rejected with some unknown type of error in case it fails. Promise will
* be rejected immediately if {@link videoSwitchInProgress} is true.
*
* @private
*/
_switchToScreenSharing(options = {}) {
if (this.videoSwitchInProgress) {
return Promise.reject('Switch in progress.');
}
this.videoSwitchInProgress = true;
return this._createDesktopTrack(options)
.then(async streams => {
let desktopVideoStream = streams.find(stream => stream.getType() === MEDIA_TYPE.VIDEO);
this._desktopAudioStream = streams.find(stream => stream.getType() === MEDIA_TYPE.AUDIO);
const { audioOnly = false } = options;
// If we're in audio only mode dispose of the video track otherwise the screensharing state will be
// inconsistent.
if (audioOnly) {
desktopVideoStream.dispose();
desktopVideoStream = undefined;
if (!this._desktopAudioStream) {
return Promise.reject(AUDIO_ONLY_SCREEN_SHARE_NO_TRACK);
}
}
if (desktopVideoStream) {
logger.debug(`_switchToScreenSharing is using ${desktopVideoStream} for useVideoStream`);
await this.useVideoStream(desktopVideoStream);
}
if (this._desktopAudioStream) {
// Noise suppression doesn't work with desktop audio because we can't chain
// track effects yet, disable it first.
// We need to to wait for the effect to clear first or it might interfere with the audio mixer.
await APP.store.dispatch(setNoiseSuppressionEnabled(false));
const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
// If there is a localAudio stream, mix in the desktop audio stream captured by the screen sharing
// api.
if (localAudio) {
this._mixerEffect = new AudioMixerEffect(this._desktopAudioStream);
logger.debug(`_switchToScreenSharing is mixing ${this._desktopAudioStream} and ${localAudio}`
+ ' as a single audio stream');
await localAudio.setEffect(this._mixerEffect);
} else {
// If no local stream is present ( i.e. no input audio devices) we use the screen share audio
// stream as we would use a regular stream.
logger.debug(`_switchToScreenSharing is using ${this._desktopAudioStream} for replacing it as`
+ ' the only audio track on the conference');
await room.replaceTrack(null, this._desktopAudioStream);
}
APP.store.dispatch(setScreenAudioShareState(true));
}
})
.then(() => {
this.videoSwitchInProgress = false;
if (isScreenshotCaptureEnabled(APP.store.getState(), false, true)) {
APP.store.dispatch(toggleScreenshotCaptureSummary(true));
}
sendAnalytics(createScreenSharingEvent('started'));
logger.log('Screen sharing started');
})
.catch(error => {
this.videoSwitchInProgress = false;
// Pawel: With this call I'm trying to preserve the original
// behaviour although it is not clear why would we "untoggle"
// on failure. I suppose it was to restore video in case there
// was some problem during "this.useVideoStream(desktopStream)".
// It's important to note that the handler will not be available
// if we fail early on trying to get desktop media (which makes
// sense, because the camera video is still being used, so
// nothing to "untoggle").
if (this._untoggleScreenSharing) {
this._untoggleScreenSharing();
}
// FIXME the code inside of _handleScreenSharingError is
// asynchronous, but does not return a Promise and is not part
// of the current Promise chain.
this._handleScreenSharingError(error);
return Promise.reject(error);
});
},
/**
* Handles {@link JitsiTrackError} returned by the lib-jitsi-meet when
* trying to create screensharing track. It will either do nothing if
@ -2275,11 +1864,6 @@ export default {
(jitsiConference, p2p) =>
APP.store.dispatch(p2pStatusChanged(p2p)));
room.on(
JitsiConferenceEvents.PARTICIPANT_CONN_STATUS_CHANGED,
(id, connectionStatus) => APP.store.dispatch(
participantConnectionStatusChanged(id, connectionStatus)));
room.on(
JitsiConferenceEvents.DOMINANT_SPEAKER_CHANGED,
(dominant, previous, silence) => {
@ -2290,16 +1874,6 @@ export default {
JitsiConferenceEvents.CONFERENCE_CREATED_TIMESTAMP,
conferenceTimestamp => APP.store.dispatch(conferenceTimestampChanged(conferenceTimestamp)));
room.on(JitsiConferenceEvents.CONNECTION_INTERRUPTED, () => {
APP.store.dispatch(localParticipantConnectionStatusChanged(
JitsiParticipantConnectionStatus.INTERRUPTED));
});
room.on(JitsiConferenceEvents.CONNECTION_RESTORED, () => {
APP.store.dispatch(localParticipantConnectionStatusChanged(
JitsiParticipantConnectionStatus.ACTIVE));
});
room.on(
JitsiConferenceEvents.DISPLAY_NAME_CHANGED,
(id, displayName) => {
@ -2519,51 +2093,13 @@ export default {
APP.UI.addListener(
UIEvents.VIDEO_DEVICE_CHANGED,
cameraDeviceId => {
const localVideo = getLocalJitsiVideoTrack(APP.store.getState());
const videoWasMuted = this.isLocalVideoMuted();
sendAnalytics(createDeviceChangedEvent('video', 'input'));
// If both screenshare and video are in progress, restart the
// presenter mode with the new camera device.
if (this.isSharingScreen && !videoWasMuted) {
const { height } = localVideo.track.getSettings();
// dispose the existing presenter track and create a new
// camera track.
// FIXME JitsiLocalTrack.dispose is async and should be waited for
this.localPresenterVideo && this.localPresenterVideo.dispose();
this.localPresenterVideo = null;
return this._createPresenterStreamEffect(height, cameraDeviceId)
.then(effect => localVideo.setEffect(effect))
.then(() => {
this.setVideoMuteStatus();
logger.log('Switched local video device while screen sharing and the video is unmuted');
this._updateVideoDeviceId();
})
.catch(err => APP.store.dispatch(notifyCameraError(err)));
// If screenshare is in progress but video is muted, update the default device
// id for video, dispose the existing presenter track and create a new effect
// that can be applied on un-mute.
} else if (this.isSharingScreen && videoWasMuted) {
logger.log('Switched local video device: while screen sharing and the video is muted');
const { height } = localVideo.track.getSettings();
this._updateVideoDeviceId();
// FIXME JitsiLocalTrack.dispose is async and should be waited for
this.localPresenterVideo && this.localPresenterVideo.dispose();
this.localPresenterVideo = null;
this._createPresenterStreamEffect(height, cameraDeviceId);
// if there is only video, switch to the new camera stream.
} else {
createLocalTracksF({
devices: [ 'video' ],
cameraDeviceId,
micDeviceId: null
cameraDeviceId
})
.then(([ stream ]) => {
// if we are in audio only mode or video was muted before
@ -2590,7 +2126,6 @@ export default {
return APP.store.dispatch(notifyCameraError(error));
});
}
}
);
APP.UI.addListener(
@ -2613,7 +2148,6 @@ export default {
sendAnalytics(createDeviceChangedEvent('audio', 'input'));
createLocalTracksF({
devices: [ 'audio' ],
cameraDeviceId: null,
micDeviceId: selectedDeviceId
})
.then(([ stream ]) => {
@ -2756,13 +2290,6 @@ export default {
cameraDeviceId: localVideo.getDeviceId()
}));
}
// If screenshare is in progress, get the device id from the presenter track.
if (this.localPresenterVideo) {
APP.store.dispatch(updateSettings({
cameraDeviceId: this.localPresenterVideo.getDeviceId()
}));
}
},
/**
@ -3211,7 +2738,7 @@ export default {
return;
}
APP.store.dispatch(toggleScreensharingA(undefined, false, false, { desktopStream }));
APP.store.dispatch(toggleScreensharingA(undefined, false, { desktopStream }));
}
});
}

View File

@ -79,15 +79,6 @@ var config = {
// This is useful when the client runs on a host with limited resources.
// noAutoPlayVideo: false,
// Enable / disable 500 Kbps bitrate cap on desktop tracks. When enabled,
// simulcast is turned off for the desktop share. If presenter is turned
// on while screensharing is in progress, the max bitrate is automatically
// adjusted to 2.5 Mbps. This takes a value between 0 and 1 which determines
// the probability for this to be enabled. This setting has been deprecated.
// desktopSharingFrameRate.max now determines whether simulcast will be enabled
// or disabled for the screenshare.
// capScreenshareBitrate: 1, // 0 to disable - deprecated.
// Whether to use fake constraints (height: 99999, width: 99999) when calling getDisplayMedia on
// Chromium based browsers. This is intended as a workaround for
// https://bugs.chromium.org/p/chromium/issues/detail?id=1056311
@ -99,20 +90,6 @@ var config = {
// callStatsThreshold: 5, // enable callstats for 5% of the users.
},
// Feature Flags.
flags: {
// Enables source names in the signaling.
// sourceNameSignaling: false,
// Enables sending multiple video streams, i.e., camera and desktop tracks can be shared in the conference
// separately as two different streams instead of one composite stream.
// sendMultipleVideoStreams: false,
// Signal that this client supports receiving multiple video streams. Without this flag jicofo will enable
// multi-stream backward compatibility.
// receiveMultipleVideoStreams: true,
},
// Disables moderator indicators.
// disableModeratorIndicator: false,
@ -523,9 +500,6 @@ var config = {
// 720: 'high',
// },
//
// // Provides a way to resize the desktop track to 720p (if it is greater than 720p) before creating a canvas
// // for the presenter mode (camera picture-in-picture mode with screenshare).
// resizeDesktopForPresenter: false,
// },
// Notification timeouts

View File

@ -10,10 +10,6 @@ import { Provider } from 'react-redux';
import { createScreenSharingIssueEvent, sendAnalytics } from '../../../react/features/analytics';
import { Avatar } from '../../../react/features/base/avatar';
import theme from '../../../react/features/base/components/themes/participantsPaneTheme.json';
import {
getMultipleVideoSupportFeatureFlag,
getSourceNameSignalingFeatureFlag
} from '../../../react/features/base/config';
import { i18next } from '../../../react/features/base/i18n';
import { JitsiTrackEvents } from '../../../react/features/base/lib-jitsi-meet';
import { VIDEO_TYPE } from '../../../react/features/base/media';
@ -29,9 +25,6 @@ import {
} from '../../../react/features/base/tracks';
import { CHAT_SIZE } from '../../../react/features/chat';
import {
isParticipantConnectionStatusActive,
isParticipantConnectionStatusInactive,
isParticipantConnectionStatusInterrupted,
isTrackStreamingStatusActive,
isTrackStreamingStatusInactive,
isTrackStreamingStatusInterrupted
@ -161,10 +154,8 @@ export default class LargeVideoManager {
* @returns {void}
*/
destroy() {
this.videoContainer.removeResizeListener(
this._onVideoResolutionUpdate);
this.videoContainer.removeResizeListener(this._onVideoResolutionUpdate);
if (getSourceNameSignalingFeatureFlag(APP.store.getState())) {
// Remove track streaming status listener.
// TODO: when this class is converted to a function react component,
// use a custom hook to update a local track streaming status.
@ -174,7 +165,6 @@ export default class LargeVideoManager {
APP.store.dispatch(trackStreamingStatusChanged(this.videoTrack.jitsiTrack,
this.videoTrack.jitsiTrack.getTrackStreamingStatus()));
}
}
this.removePresenceLabel();
@ -263,11 +253,6 @@ export default class LargeVideoManager {
const isVideoMuted = !stream || stream.isMuted();
const state = APP.store.getState();
const participant = getParticipantById(state, id);
const connectionStatus = participant?.connectionStatus;
let isVideoRenderable;
if (getSourceNameSignalingFeatureFlag(state)) {
const videoTrack = getVideoTrackByParticipant(state, participant);
// Remove track streaming status listener from the old track and add it to the new track,
@ -292,19 +277,15 @@ export default class LargeVideoManager {
}
}
const streamingStatusActive = isTrackStreamingStatusActive(videoTrack);
isVideoRenderable = !isVideoMuted
const isVideoRenderable = !isVideoMuted
&& (APP.conference.isLocalId(id)
|| isLocalScreenshareParticipant(participant)
|| streamingStatusActive
);
this.videoTrack?.jitsiTrack?.getVideoType() === VIDEO_TYPE.DESKTOP
&& logger.debug(`Remote track ${videoTrack?.jitsiTrack}, isVideoMuted=${isVideoMuted},`
+ ` streamingStatusActive=${streamingStatusActive}, isVideoRenderable=${isVideoRenderable}`);
} else {
isVideoRenderable = !isVideoMuted
&& (APP.conference.isLocalId(id) || isParticipantConnectionStatusActive(participant));
}
const isAudioOnly = APP.conference.isAudioOnly();
@ -312,9 +293,7 @@ export default class LargeVideoManager {
// screenshare tile is still created when a remote endpoint starts screenshare to keep the behavior
// consistent and an avatar is displayed on the original participant thumbnail as long as screenshare is in
// progress.
const legacyScreenshare = getMultipleVideoSupportFeatureFlag(state)
&& videoType === VIDEO_TYPE.DESKTOP
&& !isScreenShareParticipant(participant);
const legacyScreenshare = videoType === VIDEO_TYPE.DESKTOP && !isScreenShareParticipant(participant);
const showAvatar
= isVideoContainer
@ -345,7 +324,6 @@ export default class LargeVideoManager {
// send the event
sendAnalytics(createScreenSharingIssueEvent({
source: 'large-video',
connectionStatus,
isVideoMuted,
isAudioOnly,
isVideoContainer,
@ -366,15 +344,7 @@ export default class LargeVideoManager {
this.updateLargeVideoAudioLevel(0);
}
let messageKey;
if (getSourceNameSignalingFeatureFlag(state)) {
const videoTrack = getVideoTrackByParticipant(state, participant);
messageKey = isTrackStreamingStatusInactive(videoTrack) ? 'connection.LOW_BANDWIDTH' : null;
} else {
messageKey = isParticipantConnectionStatusInactive(participant) ? 'connection.LOW_BANDWIDTH' : null;
}
const messageKey = isTrackStreamingStatusInactive(videoTrack) ? 'connection.LOW_BANDWIDTH' : null;
// Do not show connection status message in the audio only mode,
// because it's based on the video playback status.
@ -620,20 +590,11 @@ export default class LargeVideoManager {
if (typeof show !== 'boolean') {
const participant = getParticipantById(APP.store.getState(), this.id);
const state = APP.store.getState();
if (getSourceNameSignalingFeatureFlag(state)) {
const videoTrack = getVideoTrackByParticipant(state, participant);
// eslint-disable-next-line no-param-reassign
show = !APP.conference.isLocalId(this.id)
&& (isTrackStreamingStatusInterrupted(videoTrack)
|| isTrackStreamingStatusInactive(videoTrack));
} else {
// eslint-disable-next-line no-param-reassign
show = !APP.conference.isLocalId(this.id)
&& (isParticipantConnectionStatusInterrupted(participant)
|| isParticipantConnectionStatusInactive(participant));
}
&& (isTrackStreamingStatusInterrupted(videoTrack) || isTrackStreamingStatusInactive(videoTrack));
}
if (show) {

View File

@ -2,17 +2,15 @@
import Logger from '@jitsi/logger';
import { getMultipleVideoSupportFeatureFlag } from '../../../react/features/base/config';
import { MEDIA_TYPE, VIDEO_TYPE } from '../../../react/features/base/media';
import {
getParticipantById,
getPinnedParticipant,
isScreenShareParticipant,
isScreenShareParticipantById
} from '../../../react/features/base/participants';
import {
getTrackByMediaTypeAndParticipant,
getVirtualScreenshareParticipantTrack
getVideoTrackByParticipant
} from '../../../react/features/base/tracks';
import LargeVideoManager from './LargeVideoManager';
@ -98,7 +96,7 @@ const VideoLayout = {
return VIDEO_TYPE.CAMERA;
}
if (getMultipleVideoSupportFeatureFlag(state) && isScreenShare) {
if (isScreenShare) {
return VIDEO_TYPE.DESKTOP;
}
@ -113,23 +111,6 @@ const VideoLayout = {
return id || null;
},
/**
* Shows/hides warning about a user's connectivity issues.
*
* @param {string} id - The ID of the remote participant(MUC nickname).
* @returns {void}
*/
onParticipantConnectionStatusChanged(id) {
if (APP.conference.isLocalId(id)) {
return;
}
// We have to trigger full large video update to transition from
// avatar to video on connectivity restored.
this._updateLargeVideoIfDisplayed(id, true);
},
/**
* On last N change event.
*
@ -189,16 +170,7 @@ const VideoLayout = {
const isOnLarge = this.isCurrentlyOnLarge(id);
const state = APP.store.getState();
const participant = getParticipantById(state, id);
const tracks = state['features/base/tracks'];
let videoTrack;
if (getMultipleVideoSupportFeatureFlag(state) && isScreenShareParticipant(participant)) {
videoTrack = getVirtualScreenshareParticipantTrack(tracks, id);
} else {
videoTrack = getTrackByMediaTypeAndParticipant(tracks, MEDIA_TYPE.VIDEO, id);
}
const videoTrack = getVideoTrackByParticipant(state, participant);
const videoStream = videoTrack?.jitsiTrack;
if (videoStream && forceStreamToReattach) {

10
package-lock.json generated
View File

@ -74,7 +74,7 @@
"js-md5": "0.6.1",
"js-sha512": "0.8.0",
"jwt-decode": "2.2.0",
"lib-jitsi-meet": "https://github.com/jitsi/lib-jitsi-meet/releases/download/v1530.0.0+f2af389e/lib-jitsi-meet.tgz",
"lib-jitsi-meet": "https://github.com/jitsi/lib-jitsi-meet/releases/download/v1533.0.0+7b257686/lib-jitsi-meet.tgz",
"lodash": "4.17.21",
"moment": "2.29.4",
"moment-duration-format": "2.2.2",
@ -13497,8 +13497,8 @@
},
"node_modules/lib-jitsi-meet": {
"version": "0.0.0",
"resolved": "https://github.com/jitsi/lib-jitsi-meet/releases/download/v1530.0.0+f2af389e/lib-jitsi-meet.tgz",
"integrity": "sha512-gqsNJblQ5wgYZJzhbkI7iBbg5Ddn9/EyfiCOwYdB9lHe07yDYco7H/vUH/TxTFTurEHtyV8LKb5KMEhJIKVhpw==",
"resolved": "https://github.com/jitsi/lib-jitsi-meet/releases/download/v1533.0.0+7b257686/lib-jitsi-meet.tgz",
"integrity": "sha512-AOsGOXwuZJrdaJPSBCkLtoDUrg/JKWZOdcR1Sw/ZGjlszcQ+gyfT8UbM9NI+b5hC3h39K9gmnGVcI8acNxpBrQ==",
"license": "Apache-2.0",
"dependencies": {
"@jitsi/js-utils": "2.0.0",
@ -30510,8 +30510,8 @@
}
},
"lib-jitsi-meet": {
"version": "https://github.com/jitsi/lib-jitsi-meet/releases/download/v1530.0.0+f2af389e/lib-jitsi-meet.tgz",
"integrity": "sha512-gqsNJblQ5wgYZJzhbkI7iBbg5Ddn9/EyfiCOwYdB9lHe07yDYco7H/vUH/TxTFTurEHtyV8LKb5KMEhJIKVhpw==",
"version": "https://github.com/jitsi/lib-jitsi-meet/releases/download/v1533.0.0+7b257686/lib-jitsi-meet.tgz",
"integrity": "sha512-AOsGOXwuZJrdaJPSBCkLtoDUrg/JKWZOdcR1Sw/ZGjlszcQ+gyfT8UbM9NI+b5hC3h39K9gmnGVcI8acNxpBrQ==",
"requires": {
"@jitsi/js-utils": "2.0.0",
"@jitsi/logger": "2.0.0",

View File

@ -79,7 +79,7 @@
"js-md5": "0.6.1",
"js-sha512": "0.8.0",
"jwt-decode": "2.2.0",
"lib-jitsi-meet": "https://github.com/jitsi/lib-jitsi-meet/releases/download/v1530.0.0+f2af389e/lib-jitsi-meet.tgz",
"lib-jitsi-meet": "https://github.com/jitsi/lib-jitsi-meet/releases/download/v1533.0.0+7b257686/lib-jitsi-meet.tgz",
"lodash": "4.17.21",
"moment": "2.29.4",
"moment-duration-format": "2.2.2",

View File

@ -159,10 +159,9 @@ MiddlewareRegistry.register(store => next => action => {
const state = getState();
const { localTracksDuration } = state['features/analytics'];
if (localTracksDuration.conference.startedTime === -1 || action.mediaType === 'presenter') {
if (localTracksDuration.conference.startedTime === -1) {
// We don't want to track the media duration if the conference is not joined yet because otherwise we won't
// be able to compare them with the conference duration (from conference join to conference will leave).
// Also, do not track media duration for presenter tracks.
break;
}
dispatch({

View File

@ -1,7 +1,7 @@
import { IStateful } from '../base/app/types';
import { MEDIA_TYPE } from '../base/media/constants';
import { toState } from '../base/redux/functions';
import { isLocalCameraTrackMuted, isLocalTrackMuted } from '../base/tracks/functions';
import { isLocalTrackMuted } from '../base/tracks/functions';
import { addHashParamsToURL } from '../base/util/uri';
/**
@ -14,7 +14,7 @@ import { addHashParamsToURL } from '../base/util/uri';
export function addTrackStateToURL(url: string, stateful: IStateful) {
const state = toState(stateful);
const tracks = state['features/base/tracks'];
const isVideoMuted = isLocalCameraTrackMuted(tracks);
const isVideoMuted = isLocalTrackMuted(tracks, MEDIA_TYPE.VIDEO);
const isAudioMuted = isLocalTrackMuted(tracks, MEDIA_TYPE.AUDIO);
return addHashParamsToURL(new URL(url), { // use new URL object in order to not pollute the passed parameter.

View File

@ -26,6 +26,5 @@ export const CS_MODERATION_NOTIFICATION_ID = 'screensharing-moderation';
export const MODERATION_NOTIFICATIONS = {
[MEDIA_TYPE.AUDIO]: AUDIO_MODERATION_NOTIFICATION_ID,
[MEDIA_TYPE.SCREENSHARE]: CS_MODERATION_NOTIFICATION_ID,
[MEDIA_TYPE.VIDEO]: VIDEO_MODERATION_NOTIFICATION_ID,
[MEDIA_TYPE.PRESENTER]: CS_MODERATION_NOTIFICATION_ID
[MEDIA_TYPE.VIDEO]: VIDEO_MODERATION_NOTIFICATION_ID
};

View File

@ -48,7 +48,6 @@ import {
ASKED_TO_UNMUTE_NOTIFICATION_ID,
ASKED_TO_UNMUTE_SOUND_ID,
AUDIO_MODERATION_NOTIFICATION_ID,
CS_MODERATION_NOTIFICATION_ID,
VIDEO_MODERATION_NOTIFICATION_ID
} from './constants';
import {
@ -89,11 +88,6 @@ MiddlewareRegistry.register(({ dispatch, getState }) => next => action => {
uid = VIDEO_MODERATION_NOTIFICATION_ID;
break;
}
case MEDIA_TYPE.PRESENTER: {
titleKey = 'notify.moderationInEffectCSTitle';
uid = CS_MODERATION_NOTIFICATION_ID;
break;
}
}
dispatch(showNotification({

View File

@ -12,7 +12,6 @@ import { setAudioMuted, setAudioUnmutePermissions, setVideoMuted, setVideoUnmute
import { MEDIA_TYPE } from '../media/constants';
import {
dominantSpeakerChanged,
participantConnectionStatusChanged,
participantKicked,
participantMutedUs,
participantPresenceChanged,
@ -220,10 +219,6 @@ function _addConferenceListeners(conference: IJitsiConference, dispatch: IStore[
JitsiConferenceEvents.NON_PARTICIPANT_MESSAGE_RECEIVED, // @ts-ignore
(...args: any[]) => dispatch(nonParticipantMessageReceived(...args)));
conference.on(
JitsiConferenceEvents.PARTICIPANT_CONN_STATUS_CHANGED, // @ts-ignore
(...args: any[]) => dispatch(participantConnectionStatusChanged(...args)));
conference.on(
JitsiConferenceEvents.USER_JOINED,
(_id: string, user: any) => commonUserJoinedHandling({ dispatch }, conference, user));

View File

@ -97,7 +97,6 @@ export function commonUserJoinedHandling(
dispatch(participantJoined({
botType: user.getBotType(),
connectionStatus: user.getConnectionStatus(),
conference,
id,
name: displayName,

View File

@ -15,7 +15,6 @@ import { NOTIFICATION_TIMEOUT_TYPE, showErrorNotification, showWarningNotificati
import { CONNECTION_ESTABLISHED, CONNECTION_FAILED, connectionDisconnected } from '../connection';
import { validateJwt } from '../jwt';
import { JitsiConferenceErrors } from '../lib-jitsi-meet';
import { MEDIA_TYPE } from '../media';
import {
PARTICIPANT_ROLE,
PARTICIPANT_UPDATED,
@ -547,9 +546,7 @@ function _trackAddedOrRemoved(store, next, action) {
const track = action.track;
// TODO All track swapping should happen here instead of conference.js.
// Since we swap the tracks for the web client in conference.js, ignore
// presenter tracks here and do not add/remove them to/from the conference.
if (track && track.local && track.mediaType !== MEDIA_TYPE.PRESENTER) {
if (track?.local) {
return (
_syncConferenceLocalTracksWithState(store, action)
.then(() => next(action)));

View File

@ -494,7 +494,6 @@ export interface IConfig {
};
persist?: boolean;
preferredCodec?: string;
resizeDesktopForPresenter?: boolean;
};
webhookProxyUrl?: string;
webrtcIceTcpDisable?: boolean;

View File

@ -11,7 +11,7 @@ import { parseURLParams } from '../util/parseURLParams';
import { IConfig } from './configType';
import CONFIG_WHITELIST from './configWhitelist';
import { FEATURE_FLAGS, _CONFIG_STORE_PREFIX } from './constants';
import { _CONFIG_STORE_PREFIX } from './constants';
import INTERFACE_CONFIG_WHITELIST from './interfaceConfigWhitelist';
import logger from './logger';
@ -53,17 +53,6 @@ export function getMeetingRegion(state: IReduxState) {
return state['features/base/config']?.deploymentInfo?.region || '';
}
/**
* Selector for determining if receiving multiple stream support is enabled.
*
* @param {Object} state - The global state.
* @returns {boolean}
*/
export function getMultipleVideoSupportFeatureFlag(state: IReduxState) {
return (getFeatureFlag(state, FEATURE_FLAGS.MULTIPLE_VIDEO_STREAMS_SUPPORT)
&& getSourceNameSignalingFeatureFlag(state)) ?? true;
}
/**
* Selector for determining if sending multiple stream support is enabled.
*
@ -71,18 +60,7 @@ export function getMultipleVideoSupportFeatureFlag(state: IReduxState) {
* @returns {boolean}
*/
export function getMultipleVideoSendingSupportFeatureFlag(state: IReduxState) {
return navigator.product !== 'ReactNative'
&& ((getMultipleVideoSupportFeatureFlag(state) ?? true) && isUnifiedPlanEnabled(state));
}
/**
* Selector used to get the sourceNameSignaling feature flag.
*
* @param {Object} state - The global state.
* @returns {boolean}
*/
export function getSourceNameSignalingFeatureFlag(state: IReduxState) {
return getFeatureFlag(state, FEATURE_FLAGS.SOURCE_NAME_SIGNALING) ?? true;
return navigator.product !== 'ReactNative' && isUnifiedPlanEnabled(state);
}
/**

View File

@ -18,8 +18,6 @@ export const JitsiConnectionQualityEvents
export const JitsiDetectionEvents = JitsiMeetJS.events.detection;
export const JitsiE2ePingEvents = JitsiMeetJS.events.e2eping;
export const JitsiMediaDevicesEvents = JitsiMeetJS.events.mediaDevices;
export const JitsiParticipantConnectionStatus
= JitsiMeetJS.constants.participantConnectionStatus;
export const JitsiTrackStreamingStatus = JitsiMeetJS.constants.trackStreamingStatus;
export const JitsiRecordingConstants = JitsiMeetJS.constants.recording;
export const JitsiSIPVideoGWStatus = JitsiMeetJS.constants.sipVideoGW;

View File

@ -8,7 +8,7 @@ export const CAMERA_FACING_MODE = {
USER: 'user'
};
export type MediaType = 'audio' | 'video' | 'presenter' | 'screenshare';
export type MediaType = 'audio' | 'video' | 'screenshare';
/**
* The set of media types.
@ -17,7 +17,6 @@ export type MediaType = 'audio' | 'video' | 'presenter' | 'screenshare';
*/
export const MEDIA_TYPE: { [key: string]: MediaType; } = {
AUDIO: 'audio',
PRESENTER: 'presenter',
SCREENSHARE: 'screenshare',
VIDEO: 'video'
};

View File

@ -194,8 +194,6 @@ function _setAudioOnly({ dispatch, getState }, next, action) {
dispatch(setVideoMuted(audioOnly, MEDIA_TYPE.VIDEO, VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY));
if (getMultipleVideoSendingSupportFeatureFlag(state)) {
dispatch(setScreenshareMuted(audioOnly, MEDIA_TYPE.SCREENSHARE, SCREENSHARE_MUTISM_AUTHORITY.AUDIO_ONLY));
} else if (navigator.product !== 'ReactNative') {
dispatch(setVideoMuted(audioOnly, MEDIA_TYPE.PRESENTER, VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY));
}
return next(action);
@ -300,8 +298,7 @@ function _setRoom({ dispatch, getState }, next, action) {
*/
function _syncTrackMutedState({ getState }, track) {
const state = getState()['features/base/media'];
const mediaType = track.mediaType === MEDIA_TYPE.PRESENTER
? MEDIA_TYPE.VIDEO : track.mediaType;
const mediaType = track.mediaType;
const muted = Boolean(state[mediaType].muted);
// XXX If muted state of track when it was added is different from our media
@ -310,8 +307,8 @@ function _syncTrackMutedState({ getState }, track) {
// not yet in redux state and JitsiTrackEvents.TRACK_MUTE_CHANGED may be
// fired before track gets to state.
if (track.muted !== muted) {
sendAnalytics(createSyncTrackStateEvent(track.mediaType, muted));
logger.log(`Sync ${track.mediaType} track muted state to ${muted ? 'muted' : 'unmuted'}`);
sendAnalytics(createSyncTrackStateEvent(mediaType, muted));
logger.log(`Sync ${mediaType} track muted state to ${muted ? 'muted' : 'unmuted'}`);
track.muted = muted;
setTrackMuted(track.jitsiTrack, muted, state);

View File

@ -103,27 +103,6 @@ export function kickParticipant(id: string) {
};
}
/**
* Creates an action to signal the connection status of the local participant
* has changed.
*
* @param {string} connectionStatus - The current connection status of the local
* participant, as enumerated by the library's participantConnectionStatus
* constants.
* @returns {Function}
*/
export function localParticipantConnectionStatusChanged(connectionStatus: string) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const participant = getLocalParticipant(getState);
if (participant) {
return dispatch(participantConnectionStatusChanged(
participant.id,
connectionStatus));
}
};
}
/**
* Action to signal that the ID of local participant has changed. It happens
* when the local participant joins a new conference or leaves an existing
@ -227,30 +206,6 @@ export function muteRemoteParticipant(id: string, mediaType: string) {
};
}
/**
* Action to update a participant's connection status.
*
* @param {string} id - Participant's ID.
* @param {string} connectionStatus - The new connection status of the
* participant.
* @returns {{
* type: PARTICIPANT_UPDATED,
* participant: {
* connectionStatus: string,
* id: string
* }
* }}
*/
export function participantConnectionStatusChanged(id: string, connectionStatus: string) {
return {
type: PARTICIPANT_UPDATED,
participant: {
connectionStatus,
id
}
};
}
/**
* Action to signal that a participant has joined.
*

View File

@ -2,14 +2,11 @@ import React, { Component } from 'react';
import { Text, View } from 'react-native';
import {
isParticipantConnectionStatusActive,
isParticipantConnectionStatusInactive,
isTrackStreamingStatusActive,
isTrackStreamingStatusInactive
} from '../../../connection-indicator/functions';
import { SharedVideo } from '../../../shared-video/components/native';
import Avatar from '../../avatar/components/Avatar';
import { getSourceNameSignalingFeatureFlag } from '../../config/functions.any';
import { translate } from '../../i18n/functions';
import VideoTrack from '../../media/components/native/VideoTrack';
import { shouldRenderVideoTrack } from '../../media/functions';
@ -231,8 +228,7 @@ function _mapStateToProps(state, ownProps) {
const videoTrack = getVideoTrackByParticipant(state, participant);
return {
_isConnectionInactive: getSourceNameSignalingFeatureFlag(state)
? isTrackStreamingStatusInactive(videoTrack) : isParticipantConnectionStatusInactive(participant),
_isConnectionInactive: isTrackStreamingStatusInactive(videoTrack),
_isSharedVideoParticipant: isSharedVideoParticipant(participant),
_participantName: getParticipantDisplayName(state, participantId),
_renderVideo: shouldRenderParticipantVideo(state, participantId) && !disableVideo,
@ -268,15 +264,9 @@ function shouldRenderParticipantVideo(stateful, id) {
}
/* Then check if the participant connection or track streaming status is active. */
if (getSourceNameSignalingFeatureFlag(state)) {
// Note that this will work only if a listener is registered for the track's TrackStreamingStatus.
// The associated TrackStreamingStatusImpl instance is not created or disposed when there are zero listeners.
if (!videoTrack.local && !isTrackStreamingStatusActive(videoTrack)) {
return false;
}
} else if (!isParticipantConnectionStatusActive(participant)) {
return false;
}
/* Then check if audio-only mode is not active. */
const audioOnly = state['features/base/audio-only'].enabled;

View File

@ -8,7 +8,6 @@ import { isStageFilmstripAvailable } from '../../filmstrip/functions';
import { IStateful } from '../app/types';
import { GRAVATAR_BASE_URL } from '../avatar/constants';
import { isCORSAvatarURL } from '../avatar/functions';
import { getMultipleVideoSupportFeatureFlag } from '../config/functions.any';
import i18next from '../i18n/i18next';
import { VIDEO_TYPE } from '../media/constants';
import { toState } from '../redux/functions';
@ -71,7 +70,6 @@ export function getActiveSpeakersToBeDisplayed(stateful: IStateful) {
const {
dominantSpeaker,
fakeParticipants,
sortedRemoteScreenshares,
sortedRemoteVirtualScreenshareParticipants,
speakersList
} = state['features/base/participants'];
@ -98,7 +96,6 @@ export function getActiveSpeakersToBeDisplayed(stateful: IStateful) {
}
// Remove screenshares from the count.
if (getMultipleVideoSupportFeatureFlag(state)) {
if (sortedRemoteVirtualScreenshareParticipants) {
availableSlotsForActiveSpeakers -= sortedRemoteVirtualScreenshareParticipants.size * 2;
for (const screenshare of Array.from(sortedRemoteVirtualScreenshareParticipants.keys())) {
@ -107,12 +104,6 @@ export function getActiveSpeakersToBeDisplayed(stateful: IStateful) {
activeSpeakers.delete(ownerId);
}
}
} else if (sortedRemoteScreenshares) {
availableSlotsForActiveSpeakers -= sortedRemoteScreenshares.size;
for (const id of Array.from(sortedRemoteScreenshares.keys())) {
activeSpeakers.delete(id);
}
}
// Remove fake participants from the count.
if (fakeParticipants) {
@ -193,16 +184,11 @@ export function getLocalScreenShareParticipant(stateful: IStateful) {
*/
export function getVirtualScreenshareParticipantByOwnerId(stateful: IStateful, id: string) {
const state = toState(stateful);
if (getMultipleVideoSupportFeatureFlag(state)) {
const track = getScreenShareTrack(state['features/base/tracks'], id);
return getParticipantById(stateful, track?.jitsiTrack.getSourceName());
}
return;
}
/**
* Normalizes a display name so then no invalid values (padding, length...etc)
* can be set.
@ -269,14 +255,9 @@ export function getParticipantCount(stateful: IStateful) {
sortedRemoteVirtualScreenshareParticipants
} = state['features/base/participants'];
if (getMultipleVideoSupportFeatureFlag(state)) {
return remote.size - fakeParticipants.size - sortedRemoteVirtualScreenshareParticipants.size + (local ? 1 : 0);
}
return remote.size - fakeParticipants.size + (local ? 1 : 0);
}
/**
* Returns participant ID of the owner of a virtual screenshare participant.
*
@ -385,13 +366,9 @@ export function getRemoteParticipantCount(stateful: IStateful) {
const state = toState(stateful);
const participantsState = state['features/base/participants'];
if (getMultipleVideoSupportFeatureFlag(state)) {
return participantsState.remote.size - participantsState.sortedRemoteVirtualScreenshareParticipants.size;
}
return participantsState.remote.size;
}
/**
* Returns a count of the known participants in the passed in redux state,
* including fake participants.
@ -405,13 +382,9 @@ export function getParticipantCountWithFake(stateful: IStateful) {
const state = toState(stateful);
const { local, localScreenShare, remote } = state['features/base/participants'];
if (getMultipleVideoSupportFeatureFlag(state)) {
return remote.size + (local ? 1 : 0) + (localScreenShare ? 1 : 0);
}
return remote.size + (local ? 1 : 0);
}
/**
* Returns participant's display name.
*

View File

@ -512,7 +512,6 @@ function _participantJoined({ participant }: { participant: IParticipant; }) {
const {
avatarURL,
botType,
connectionStatus,
dominantSpeaker,
email,
fakeParticipant,
@ -542,7 +541,6 @@ function _participantJoined({ participant }: { participant: IParticipant; }) {
avatarURL,
botType,
conference,
connectionStatus,
dominantSpeaker: dominantSpeaker || false,
email,
fakeParticipant,

View File

@ -3,8 +3,7 @@ import _ from 'lodash';
import { IStore } from '../../app/types';
import { getCurrentConference } from '../conference/functions';
import {
getMultipleVideoSendingSupportFeatureFlag,
getMultipleVideoSupportFeatureFlag
getMultipleVideoSendingSupportFeatureFlag
} from '../config/functions.any';
import StateListenerRegistry from '../redux/StateListenerRegistry';
@ -24,11 +23,6 @@ StateListenerRegistry.register(
*/
function _updateScreenshareParticipants({ getState, dispatch }: IStore) {
const state = getState();
if (!getMultipleVideoSupportFeatureFlag(state)) {
return;
}
const conference = getCurrentConference(state);
const tracks = state['features/base/tracks'];
const { sortedRemoteVirtualScreenshareParticipants, localScreenShare } = state['features/base/participants'];

View File

@ -10,7 +10,6 @@ export interface IParticipant {
avatarURL?: string;
botType?: string;
conference?: Object;
connectionStatus?: string;
displayName?: string;
dominantSpeaker?: boolean;
e2eeEnabled?: boolean;

View File

@ -1,8 +1,7 @@
import { IReduxState, IStore } from '../../app/types';
import { getMultipleVideoSupportFeatureFlag } from '../config/functions.any';
import { MEDIA_TYPE, VIDEO_TYPE } from '../media/constants';
import { getParticipantById, isScreenShareParticipant } from '../participants/functions';
import { getTrackByMediaTypeAndParticipant, getVirtualScreenshareParticipantTrack } from '../tracks/functions';
import { getTrackByMediaTypeAndParticipant, getVideoTrackByParticipant } from '../tracks/functions';
/**
* Indicates whether the test mode is enabled. When it's enabled
@ -29,7 +28,7 @@ export function getRemoteVideoType({ getState }: IStore, id: string) {
const state = getState();
const participant = getParticipantById(state, id);
if (getMultipleVideoSupportFeatureFlag(state) && isScreenShareParticipant(participant)) {
if (isScreenShareParticipant(participant)) {
return VIDEO_TYPE.DESKTOP;
}
@ -46,15 +45,7 @@ export function isLargeVideoReceived({ getState }: IStore): boolean {
const state = getState();
const largeVideoParticipantId = state['features/large-video'].participantId ?? '';
const largeVideoParticipant = getParticipantById(state, largeVideoParticipantId ?? '');
const tracks = state['features/base/tracks'];
let videoTrack;
if (getMultipleVideoSupportFeatureFlag(state) && isScreenShareParticipant(largeVideoParticipant)) {
videoTrack = getVirtualScreenshareParticipantTrack(tracks, largeVideoParticipantId);
} else {
videoTrack = getTrackByMediaTypeAndParticipant(tracks, MEDIA_TYPE.VIDEO, largeVideoParticipantId);
}
const videoTrack = getVideoTrackByParticipant(state, largeVideoParticipant);
const lastMediaEvent = state['features/large-video']?.lastMediaEvent;
return Boolean(videoTrack && !videoTrack.muted
@ -70,15 +61,8 @@ export function isLargeVideoReceived({ getState }: IStore): boolean {
*/
export function isRemoteVideoReceived({ getState }: IStore, id: string): boolean {
const state = getState();
const tracks = state['features/base/tracks'];
const participant = getParticipantById(state, id);
let videoTrack;
if (getMultipleVideoSupportFeatureFlag(state) && isScreenShareParticipant(participant)) {
videoTrack = getVirtualScreenshareParticipantTrack(tracks, id);
} else {
videoTrack = getTrackByMediaTypeAndParticipant(tracks, MEDIA_TYPE.VIDEO, id);
}
const videoTrack = getVideoTrackByParticipant(state, participant);
const lastMediaEvent = videoTrack?.lastMediaEvent;
return Boolean(videoTrack && !videoTrack.muted

View File

@ -5,7 +5,7 @@ import { showErrorNotification, showNotification } from '../../notifications/act
import { NOTIFICATION_TIMEOUT, NOTIFICATION_TIMEOUT_TYPE } from '../../notifications/constants';
import { getCurrentConference } from '../conference/functions';
import { IJitsiConference } from '../conference/reducer';
import { getMultipleVideoSendingSupportFeatureFlag, getMultipleVideoSupportFeatureFlag } from '../config/functions.any';
import { getMultipleVideoSendingSupportFeatureFlag } from '../config/functions.any';
import { JitsiTrackErrors, JitsiTrackEvents } from '../lib-jitsi-meet';
import { createLocalTrack } from '../lib-jitsi-meet/functions.any';
import { setAudioMuted, setScreenshareMuted, setVideoMuted } from '../media/actions';
@ -379,9 +379,7 @@ export function trackAdded(track: any) {
(type: VideoType) => dispatch(trackVideoTypeChanged(track, type)));
const local = track.isLocal();
const isVirtualScreenshareParticipantCreated = local
? getMultipleVideoSendingSupportFeatureFlag(getState())
: getMultipleVideoSupportFeatureFlag(getState());
const isVirtualScreenshareParticipantCreated = !local || getMultipleVideoSendingSupportFeatureFlag(getState());
const mediaType = track.getVideoType() === VIDEO_TYPE.DESKTOP && isVirtualScreenshareParticipantCreated
? MEDIA_TYPE.SCREENSHARE
: track.getType();

View File

@ -19,11 +19,10 @@ export * from './actions.any';
*
* @param {boolean} enabled - The state to toggle screen sharing to.
* @param {boolean} _ignore1 - Ignored.
* @param {boolean} _ignore2 - Ignored.
* @param {Object} _ignore3 - Ignored.
* @param {any} _ignore2 - Ignored.
* @returns {Function}
*/
export function toggleScreensharing(enabled: boolean, _ignore1?: boolean, _ignore2?: boolean, _ignore3?: any) {
export function toggleScreensharing(enabled: boolean, _ignore1?: boolean, _ignore2?: any) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();

View File

@ -2,12 +2,10 @@
// @ts-expect-error
import { AUDIO_ONLY_SCREEN_SHARE_NO_TRACK } from '../../../../modules/UI/UIErrors';
import { IReduxState, IStore } from '../../app/types';
import { showModeratedNotification } from '../../av-moderation/actions';
import { shouldShowModeratedNotification } from '../../av-moderation/functions';
import { setNoiseSuppressionEnabled } from '../../noise-suppression/actions';
import { showNotification } from '../../notifications/actions';
import { NOTIFICATION_TIMEOUT_TYPE } from '../../notifications/constants';
import { isModerationNotificationDisplayed } from '../../notifications/functions';
// @ts-ignore
import { stopReceiver } from '../../remote-control/actions';
// @ts-ignore
@ -19,7 +17,6 @@ import { isScreenshotCaptureEnabled, toggleScreenshotCaptureSummary } from '../.
import { AudioMixerEffect } from '../../stream-effects/audio-mixer/AudioMixerEffect';
import { setAudioOnly } from '../audio-only/actions';
import { getCurrentConference } from '../conference/functions';
import { getMultipleVideoSendingSupportFeatureFlag } from '../config/functions.any';
import { JitsiTrackErrors, JitsiTrackEvents } from '../lib-jitsi-meet';
import { setScreenshareMuted } from '../media/actions';
import { MEDIA_TYPE, VIDEO_TYPE } from '../media/constants';
@ -43,27 +40,20 @@ export * from './actions.any';
*
* @param {boolean} enabled - The state to toggle screen sharing to.
* @param {boolean} audioOnly - Only share system audio.
* @param {boolean} ignoreDidHaveVideo - Whether or not to ignore if video was on when sharing started.
* @param {Object} shareOptions - The options to be passed for capturing screenshare.
* @returns {Function}
*/
export function toggleScreensharing(
enabled?: boolean,
audioOnly = false,
ignoreDidHaveVideo = false,
shareOptions: IShareOptions = {}) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
// check for A/V Moderation when trying to start screen sharing
if ((enabled || enabled === undefined)
&& shouldShowModeratedNotification(MEDIA_TYPE.VIDEO, getState())) {
if (!isModerationNotificationDisplayed(MEDIA_TYPE.PRESENTER, getState())) {
dispatch(showModeratedNotification(MEDIA_TYPE.PRESENTER));
}
if ((enabled || enabled === undefined) && shouldShowModeratedNotification(MEDIA_TYPE.VIDEO, getState())) {
return Promise.reject();
}
if (getMultipleVideoSendingSupportFeatureFlag(getState())) {
return _toggleScreenSharing({
enabled,
audioOnly,
@ -72,12 +62,6 @@ export function toggleScreensharing(
dispatch,
getState
});
}
return APP.conference.toggleScreenSharing(enabled, {
audioOnly,
desktopStream: shareOptions?.desktopStream
}, ignoreDidHaveVideo);
};
}

View File

@ -1,7 +1,6 @@
import { IReduxState } from '../../app/types';
import {
getMultipleVideoSendingSupportFeatureFlag,
getMultipleVideoSupportFeatureFlag
getMultipleVideoSendingSupportFeatureFlag
} from '../config/functions.any';
import { JitsiTrackErrors, browser } from '../lib-jitsi-meet';
import { MEDIA_TYPE, MediaType, VIDEO_TYPE } from '../media/constants';
@ -156,18 +155,6 @@ export function getLocalVideoTrack(tracks: ITrack[]) {
return getLocalTrack(tracks, MEDIA_TYPE.VIDEO);
}
/**
* Returns the media type of the local video, presenter or video.
*
* @param {ITrack[]} tracks - List of all tracks.
* @returns {MEDIA_TYPE}
*/
export function getLocalVideoType(tracks: ITrack[]) {
const presenterTrack = getLocalTrack(tracks, MEDIA_TYPE.PRESENTER);
return presenterTrack ? MEDIA_TYPE.PRESENTER : MEDIA_TYPE.VIDEO;
}
/**
* Returns the stored local video track.
*
@ -246,29 +233,6 @@ export function getVirtualScreenshareParticipantTrack(tracks: ITrack[], virtualS
return getScreenShareTrack(tracks, ownderId);
}
/**
* Returns track source names of given screen share participant ids.
*
* @param {IReduxState} state - The entire redux state.
* @param {string[]} screenShareParticipantIds - Participant ID.
* @returns {(string[])}
*/
export function getRemoteScreenSharesSourceNames(state: IReduxState, screenShareParticipantIds: string[] = []) {
const tracks = state['features/base/tracks'];
return getMultipleVideoSupportFeatureFlag(state)
? screenShareParticipantIds
: screenShareParticipantIds.reduce((acc: string[], id) => {
const sourceName = getScreenShareTrack(tracks, id)?.jitsiTrack.getSourceName();
if (sourceName) {
acc.push(sourceName);
}
return acc;
}, []);
}
/**
* Returns screenshare track of given owner ID.
*
@ -327,29 +291,6 @@ export function getTracksByMediaType(tracks: ITrack[], mediaType: MediaType) {
return tracks.filter(t => t.mediaType === mediaType);
}
/**
* Checks if the local video camera track in the given set of tracks is muted.
*
* @param {ITrack[]} tracks - List of all tracks.
* @returns {ITrack[]}
*/
export function isLocalCameraTrackMuted(tracks: ITrack[]) {
const presenterTrack = getLocalTrack(tracks, MEDIA_TYPE.PRESENTER);
const videoTrack = getLocalTrack(tracks, MEDIA_TYPE.VIDEO);
// Make sure we check the mute status of only camera tracks, i.e.,
// presenter track when it exists, camera track when the presenter
// track doesn't exist.
if (presenterTrack) {
return isLocalTrackMuted(tracks, MEDIA_TYPE.PRESENTER);
} else if (videoTrack) {
return videoTrack.videoType === 'camera'
? isLocalTrackMuted(tracks, MEDIA_TYPE.VIDEO) : true;
}
return true;
}
/**
* Checks if the first local track in the given tracks set is muted.
*

View File

@ -3,7 +3,6 @@ import { IStateful } from '../app/types';
import { isMobileBrowser } from '../environment/utils';
import JitsiMeetJS from '../lib-jitsi-meet';
import { setAudioMuted } from '../media/actions';
import { MEDIA_TYPE } from '../media/constants';
import { toState } from '../redux/functions';
import {
getUserSelectedCameraDeviceId,
@ -95,47 +94,6 @@ export function createLocalTracksF(options: ITrackOptions = {}, store?: IStore)
}));
}
/**
* Creates a local video track for presenter. The constraints are computed based
* on the height of the desktop that is being shared.
*
* @param {Object} options - The options with which the local presenter track
* is to be created.
* @param {string|null} [options.cameraDeviceId] - Camera device id or
* {@code undefined} to use app's settings.
* @param {number} desktopHeight - The height of the desktop that is being
* shared.
* @returns {Promise<JitsiLocalTrack>}
*/
export async function createLocalPresenterTrack(options: ITrackOptions, desktopHeight: number) {
const { cameraDeviceId } = options;
// compute the constraints of the camera track based on the resolution
// of the desktop screen that is being shared.
const cameraHeights = [ 180, 270, 360, 540, 720 ];
const proportion = 5;
const result = cameraHeights.find(
height => (desktopHeight / proportion) < height);
const constraints = {
video: {
aspectRatio: 4 / 3,
height: {
ideal: result
}
}
};
const [ videoTrack ] = await JitsiMeetJS.createLocalTracks(
{
cameraDeviceId,
constraints,
devices: [ 'video' ]
});
videoTrack.type = MEDIA_TYPE.PRESENTER;
return videoTrack;
}
/**
* Returns an object containing a promise which resolves with the created tracks &
* the errors resulting from that process.

View File

@ -1,7 +1,6 @@
import { IStore } from '../../app/types';
import { hideNotification } from '../../notifications/actions';
import { isPrejoinPageVisible } from '../../prejoin/functions';
import { getMultipleVideoSendingSupportFeatureFlag } from '../config/functions.any';
import { getAvailableDevices } from '../devices/actions.web';
import { setScreenshareMuted } from '../media/actions';
import {
@ -69,8 +68,7 @@ MiddlewareRegistry.register(store => next => action => {
const muted = action.wasMuted;
const isVideoTrack = jitsiTrack.getType() !== MEDIA_TYPE.AUDIO;
if (isVideoTrack && jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP
&& getMultipleVideoSendingSupportFeatureFlag(store.getState())) {
if (isVideoTrack && jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP) {
store.dispatch(setScreenshareMuted(!muted));
} else if (isVideoTrack) {
APP.conference.setVideoMuteStatus();
@ -84,8 +82,7 @@ MiddlewareRegistry.register(store => next => action => {
case TRACK_STOPPED: {
const { jitsiTrack } = action.track;
if (getMultipleVideoSendingSupportFeatureFlag(store.getState())
&& jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP) {
if (jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP) {
store.dispatch(toggleScreensharing(false));
}
break;
@ -108,15 +105,9 @@ MiddlewareRegistry.register(store => next => action => {
const isVideoTrack = jitsiTrack.type !== MEDIA_TYPE.AUDIO;
if (isVideoTrack) {
// Do not change the video mute state for local presenter tracks.
if (jitsiTrack.type === MEDIA_TYPE.PRESENTER) {
APP.conference.mutePresenter(muted);
} else if (jitsiTrack.isLocal() && !(jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP)) {
if (jitsiTrack.isLocal() && !(jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP)) {
APP.conference.setVideoMuteStatus();
} else if (jitsiTrack.isLocal() && muted && jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP) {
!getMultipleVideoSendingSupportFeatureFlag(state)
&& store.dispatch(toggleScreensharing(false, false, true));
} else {
} else if (!jitsiTrack.isLocal()) {
APP.UI.setVideoMuted(participantID);
}
} else if (jitsiTrack.isLocal()) {

View File

@ -1,9 +1,10 @@
import _ from 'lodash';
import { MEDIA_TYPE } from '../media/constants';
import { getScreenshareParticipantIds } from '../participants/functions';
import StateListenerRegistry from '../redux/StateListenerRegistry';
import { isLocalCameraTrackMuted } from './functions';
import { isLocalTrackMuted } from './functions';
/**
* Notifies when the list of currently sharing participants changes.
@ -26,7 +27,7 @@ StateListenerRegistry.register(
* Notifies when the local video mute state changes.
*/
StateListenerRegistry.register(
/* selector */ state => isLocalCameraTrackMuted(state['features/base/tracks']),
/* selector */ state => isLocalTrackMuted(state['features/base/tracks'], MEDIA_TYPE.VIDEO),
/* listener */ (muted, store, previousMuted) => {
if (typeof APP !== 'object') {
return;

View File

@ -17,7 +17,6 @@ import { getRemoteParticipants } from '../base/participants/functions';
import { createDesiredLocalTracks } from '../base/tracks/actions';
import {
getLocalTracks,
isLocalCameraTrackMuted,
isLocalTrackMuted
} from '../base/tracks/functions';
import { clearNotifications, showNotification } from '../notifications/actions';
@ -225,7 +224,7 @@ export function moveToRoom(roomId?: string) {
} else {
const localTracks = getLocalTracks(getState()['features/base/tracks']);
const isAudioMuted = isLocalTrackMuted(localTracks, MEDIA_TYPE.AUDIO);
const isVideoMuted = isLocalCameraTrackMuted(localTracks);
const isVideoMuted = isLocalTrackMuted(localTracks, MEDIA_TYPE.VIDEO);
try {
// all places we fire notifyConferenceLeft we pass the room name from APP.conference

View File

@ -8,7 +8,6 @@ import { WithTranslation } from 'react-i18next';
import { connect } from 'react-redux';
import { IReduxState, IStore } from '../../../app/types';
import { getSourceNameSignalingFeatureFlag } from '../../../base/config/functions.any';
import { translate } from '../../../base/i18n/functions';
import { MEDIA_TYPE } from '../../../base/media/constants';
import {
@ -22,8 +21,6 @@ import {
getVirtualScreenshareParticipantTrack
} from '../../../base/tracks/functions';
import {
isParticipantConnectionStatusInactive,
isParticipantConnectionStatusInterrupted,
isTrackStreamingStatusInactive,
isTrackStreamingStatusInterrupted
} from '../../functions';
@ -86,12 +83,6 @@ type Props = AbstractProps & WithTranslation & {
*/
_connectionIndicatorInactiveDisabled: boolean;
/**
* The current condition of the user's connection, matching one of the
* enumerated values in the library.
*/
_connectionStatus: string;
/**
* Whether the indicator popover is disabled.
*/
@ -394,33 +385,24 @@ class ConnectionIndicator extends AbstractConnectionIndicator<Props, IState> {
export function _mapStateToProps(state: IReduxState, ownProps: Props) {
const { participantId } = ownProps;
const tracks = state['features/base/tracks'];
const sourceNameSignalingEnabled = getSourceNameSignalingFeatureFlag(state);
const participant = participantId ? getParticipantById(state, participantId) : getLocalParticipant(state);
let _videoTrack = getTrackByMediaTypeAndParticipant(tracks, MEDIA_TYPE.VIDEO, participantId);
let firstVideoTrack;
if (sourceNameSignalingEnabled && isScreenShareParticipant(participant)) {
firstVideoTrack = getVirtualScreenshareParticipantTrack(tracks, participantId);
} else {
firstVideoTrack = getTrackByMediaTypeAndParticipant(tracks, MEDIA_TYPE.VIDEO, participantId);
if (isScreenShareParticipant(participant)) {
_videoTrack = getVirtualScreenshareParticipantTrack(tracks, participantId);
}
const _isConnectionStatusInactive = sourceNameSignalingEnabled
? isTrackStreamingStatusInactive(firstVideoTrack)
: isParticipantConnectionStatusInactive(participant);
const _isConnectionStatusInterrupted = sourceNameSignalingEnabled
? isTrackStreamingStatusInterrupted(firstVideoTrack)
: isParticipantConnectionStatusInterrupted(participant);
const _isConnectionStatusInactive = isTrackStreamingStatusInactive(_videoTrack);
const _isConnectionStatusInterrupted = isTrackStreamingStatusInterrupted(_videoTrack);
return {
_connectionIndicatorInactiveDisabled:
Boolean(state['features/base/config'].connectionIndicators?.inactiveDisabled),
_isVirtualScreenshareParticipant: sourceNameSignalingEnabled && isScreenShareParticipant(participant),
_isVirtualScreenshareParticipant: isScreenShareParticipant(participant),
_popoverDisabled: state['features/base/config'].connectionIndicators?.disableDetails,
_videoTrack: firstVideoTrack,
_isConnectionStatusInactive,
_isConnectionStatusInterrupted
_isConnectionStatusInterrupted,
_videoTrack
};
}

View File

@ -3,7 +3,6 @@
import React from 'react';
import type { Dispatch } from 'redux';
import { getSourceNameSignalingFeatureFlag } from '../../../base/config';
import { translate } from '../../../base/i18n';
import { MEDIA_TYPE } from '../../../base/media';
import { getLocalParticipant, getParticipantById, isScreenShareParticipant } from '../../../base/participants';
@ -15,8 +14,6 @@ import {
import { ConnectionStatsTable } from '../../../connection-stats';
import { saveLogs } from '../../actions';
import {
isParticipantConnectionStatusInactive,
isParticipantConnectionStatusInterrupted,
isTrackStreamingStatusInactive,
isTrackStreamingStatusInterrupted
} from '../../functions';
@ -72,12 +69,6 @@ type Props = AbstractProps & {
*/
_audioSsrc: number,
/**
* The current condition of the user's connection, matching one of the
* enumerated values in the library.
*/
_connectionStatus: string,
/**
* Whether or not should display the "Show More" link in the local video
* stats table.
@ -320,32 +311,24 @@ export function _mapStateToProps(state: Object, ownProps: Props) {
const conference = state['features/base/conference'].conference;
const participant
= participantId ? getParticipantById(state, participantId) : getLocalParticipant(state);
const sourceNameSignalingEnabled = getSourceNameSignalingFeatureFlag(state);
const tracks = state['features/base/tracks'];
const audioTrack = getTrackByMediaTypeAndParticipant(tracks, MEDIA_TYPE.AUDIO, participantId);
let videoTrack = getTrackByMediaTypeAndParticipant(tracks, MEDIA_TYPE.VIDEO, participantId);
if (sourceNameSignalingEnabled && isScreenShareParticipant(participant)) {
if (isScreenShareParticipant(participant)) {
videoTrack = getVirtualScreenshareParticipantTrack(tracks, participant?.id);
}
const _isConnectionStatusInactive = sourceNameSignalingEnabled
? isTrackStreamingStatusInactive(videoTrack)
: isParticipantConnectionStatusInactive(participant);
const _isConnectionStatusInterrupted = sourceNameSignalingEnabled
? isTrackStreamingStatusInterrupted(videoTrack)
: isParticipantConnectionStatusInterrupted(participant);
const _isConnectionStatusInactive = isTrackStreamingStatusInactive(videoTrack);
const _isConnectionStatusInterrupted = isTrackStreamingStatusInterrupted(videoTrack);
return {
_audioSsrc: audioTrack ? conference?.getSsrcByTrack(audioTrack.jitsiTrack) : undefined,
_connectionStatus: participant?.connectionStatus,
_enableSaveLogs: state['features/base/config'].enableSaveLogs,
_disableShowMoreStats: state['features/base/config'].disableShowMoreStats,
_isConnectionStatusInactive,
_isConnectionStatusInterrupted,
_isVirtualScreenshareParticipant: sourceNameSignalingEnabled && isScreenShareParticipant(participant),
_isVirtualScreenshareParticipant: isScreenShareParticipant(participant),
_isLocalVideo: participant?.local,
_region: participant?.region,
_videoSsrc: videoTrack ? conference?.getSsrcByTrack(videoTrack.jitsiTrack) : undefined

View File

@ -2,9 +2,8 @@
import clsx from 'clsx';
import React, { useEffect } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { useDispatch } from 'react-redux';
import { getSourceNameSignalingFeatureFlag } from '../../../base/config';
import { Icon, IconConnection, IconConnectionInactive } from '../../../base/icons';
import { JitsiTrackEvents } from '../../../base/lib-jitsi-meet';
import { trackStreamingStatusChanged } from '../../../base/tracks';
@ -50,7 +49,6 @@ export const ConnectionIndicatorIcon = ({
isConnectionStatusInterrupted,
track
}: Props) => {
const sourceNameSignalingEnabled = useSelector(state => getSourceNameSignalingFeatureFlag(state));
const dispatch = useDispatch();
const sourceName = track?.jitsiTrack?.getSourceName();
@ -61,14 +59,14 @@ export const ConnectionIndicatorIcon = ({
// TODO: replace this with a custom hook to be reused where track streaming status is needed.
// TODO: In the hood the listener should updates a local track streaming status instead of that in redux store.
useEffect(() => {
if (track && !track.local && sourceNameSignalingEnabled) {
if (track && !track.local) {
track.jitsiTrack.on(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED, handleTrackStreamingStatusChanged);
dispatch(trackStreamingStatusChanged(track.jitsiTrack, track.jitsiTrack.getTrackStreamingStatus?.()));
}
return () => {
if (track && !track.local && sourceNameSignalingEnabled) {
if (track && !track.local) {
track.jitsiTrack.off(
JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED,
handleTrackStreamingStatusChanged

View File

@ -1,5 +1,4 @@
import { JitsiParticipantConnectionStatus, JitsiTrackStreamingStatus } from '../base/lib-jitsi-meet';
import { IParticipant } from '../base/participants/types';
import { JitsiTrackStreamingStatus } from '../base/lib-jitsi-meet';
import { ITrack } from '../base/tracks/types';
/**
@ -37,39 +36,3 @@ export function isTrackStreamingStatusInterrupted(videoTrack?: ITrack) {
return streamingStatus === JitsiTrackStreamingStatus.INTERRUPTED;
}
/**
* Checks if the passed participant's connection status is active.
*
* @param {Object} participant - Participant reference.
* @returns {boolean} - Is connection status active.
*/
export function isParticipantConnectionStatusActive(participant: IParticipant) {
const connectionStatus = participant?.connectionStatus;
return connectionStatus === JitsiParticipantConnectionStatus.ACTIVE;
}
/**
* Checks if the passed participant's connection status is inactive.
*
* @param {Object} participant - Participant reference.
* @returns {boolean} - Is connection status inactive.
*/
export function isParticipantConnectionStatusInactive(participant?: IParticipant) {
const connectionStatus = participant?.connectionStatus;
return connectionStatus === JitsiParticipantConnectionStatus.INACTIVE;
}
/**
* Checks if the passed participant's connection status is interrupted.
*
* @param {Object} participant - Participant reference.
* @returns {boolean} - Is connection status interrupted.
*/
export function isParticipantConnectionStatusInterrupted(participant?: IParticipant) {
const connectionStatus = participant?.connectionStatus;
return connectionStatus === JitsiParticipantConnectionStatus.INTERRUPTED;
}

View File

@ -1,7 +1,6 @@
// @flow
import type { Dispatch } from 'redux';
import { getSourceNameSignalingFeatureFlag } from '../base/config';
import {
getLocalParticipant,
getParticipantById,
@ -216,7 +215,6 @@ export function setVerticalViewDimensions() {
remoteVideosContainerHeight
= clientHeight - (disableSelfView ? 0 : thumbnails?.local?.height) - VERTICAL_FILMSTRIP_VERTICAL_MARGIN;
if (getSourceNameSignalingFeatureFlag(state)) {
// Account for the height of the local screen share thumbnail when calculating the height of the remote
// videos container.
const localCameraThumbnailHeight = thumbnails?.local?.height;
@ -227,7 +225,6 @@ export function setVerticalViewDimensions() {
- localCameraThumbnailHeight
- localScreenShareThumbnailHeight
- VERTICAL_FILMSTRIP_VERTICAL_MARGIN;
}
hasScroll
= remoteVideosContainerHeight

View File

@ -4,7 +4,6 @@ import React, { PureComponent } from 'react';
import { Image, View } from 'react-native';
import type { Dispatch } from 'redux';
import { getMultipleVideoSupportFeatureFlag, getSourceNameSignalingFeatureFlag } from '../../../base/config';
import { JitsiTrackEvents } from '../../../base/lib-jitsi-meet';
import { MEDIA_TYPE, VIDEO_TYPE } from '../../../base/media';
import {
@ -111,11 +110,6 @@ type Props = {
*/
_renderModeratorIndicator: boolean,
/**
* Whether source name signaling is enabled.
*/
_sourceNameSignalingEnabled: boolean,
/**
* The video track that will be displayed in the thumbnail.
*/
@ -266,9 +260,9 @@ class Thumbnail extends PureComponent<Props> {
// Listen to track streaming status changed event to keep it updated.
// TODO: after converting this component to a react function component,
// use a custom hook to update local track streaming status.
const { _videoTrack, dispatch, _sourceNameSignalingEnabled } = this.props;
const { _videoTrack, dispatch } = this.props;
if (_sourceNameSignalingEnabled && _videoTrack && !_videoTrack.local) {
if (_videoTrack && !_videoTrack.local) {
_videoTrack.jitsiTrack.on(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED,
this.handleTrackStreamingStatusChanged);
dispatch(trackStreamingStatusChanged(_videoTrack.jitsiTrack,
@ -286,10 +280,9 @@ class Thumbnail extends PureComponent<Props> {
componentDidUpdate(prevProps: Props) {
// TODO: after converting this component to a react function component,
// use a custom hook to update local track streaming status.
const { _videoTrack, dispatch, _sourceNameSignalingEnabled } = this.props;
const { _videoTrack, dispatch } = this.props;
if (_sourceNameSignalingEnabled
&& prevProps._videoTrack?.jitsiTrack?.getSourceName() !== _videoTrack?.jitsiTrack?.getSourceName()) {
if (prevProps._videoTrack?.jitsiTrack?.getSourceName() !== _videoTrack?.jitsiTrack?.getSourceName()) {
if (prevProps._videoTrack && !prevProps._videoTrack.local) {
prevProps._videoTrack.jitsiTrack.off(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED,
this.handleTrackStreamingStatusChanged);
@ -314,9 +307,9 @@ class Thumbnail extends PureComponent<Props> {
componentWillUnmount() {
// TODO: after converting this component to a react function component,
// use a custom hook to update local track streaming status.
const { _videoTrack, dispatch, _sourceNameSignalingEnabled } = this.props;
const { _videoTrack, dispatch } = this.props;
if (_sourceNameSignalingEnabled && _videoTrack && !_videoTrack.local) {
if (_videoTrack && !_videoTrack.local) {
_videoTrack.jitsiTrack.off(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED,
this.handleTrackStreamingStatusChanged);
dispatch(trackStreamingStatusChanged(_videoTrack.jitsiTrack,
@ -409,7 +402,6 @@ function _mapStateToProps(state, ownProps) {
const id = participant?.id;
const audioTrack = getTrackByMediaTypeAndParticipant(tracks, MEDIA_TYPE.AUDIO, id);
const videoTrack = getVideoTrackByParticipant(state, participant);
const isMultiStreamSupportEnabled = getMultipleVideoSupportFeatureFlag(state);
const isScreenShare = videoTrack?.videoType === VIDEO_TYPE.DESKTOP;
const participantCount = getParticipantCount(state);
const renderDominantSpeakerIndicator = participant && participant.dominantSpeaker && participantCount > 2;
@ -424,7 +416,7 @@ function _mapStateToProps(state, ownProps) {
_fakeParticipant: participant?.fakeParticipant,
_gifSrc: mode === 'chat' ? null : gifSrc,
_isScreenShare: isScreenShare,
_isVirtualScreenshare: isMultiStreamSupportEnabled && isScreenShareParticipant(participant),
_isVirtualScreenshare: isScreenShareParticipant(participant),
_local: participant?.local,
_localVideoOwner: Boolean(ownerId === localParticipantId),
_participantId: id,
@ -432,7 +424,6 @@ function _mapStateToProps(state, ownProps) {
_raisedHand: hasRaisedHand(participant),
_renderDominantSpeakerIndicator: renderDominantSpeakerIndicator,
_renderModeratorIndicator: renderModeratorIndicator,
_sourceNameSignalingEnabled: getSourceNameSignalingFeatureFlag(state),
_videoTrack: videoTrack
};
}

View File

@ -9,7 +9,7 @@ import { FixedSizeGrid, FixedSizeList } from 'react-window';
import { ACTION_SHORTCUT_TRIGGERED, createShortcutEvent, createToolbarEvent } from '../../../analytics/AnalyticsEvents';
import { sendAnalytics } from '../../../analytics/functions';
import { IReduxState, IStore } from '../../../app/types';
import { getSourceNameSignalingFeatureFlag, getToolbarButtons } from '../../../base/config/functions.web';
import { getToolbarButtons } from '../../../base/config/functions.web';
import { isMobileBrowser } from '../../../base/environment/utils';
import { translate } from '../../../base/i18n/functions';
import Icon from '../../../base/icons/components/Icon';
@ -915,7 +915,7 @@ function _mapStateToProps(state: IReduxState, ownProps: Partial<IProps>) {
_isFilmstripButtonEnabled: isButtonEnabled('filmstrip', state),
_isToolboxVisible: isToolboxVisible(state),
_isVerticalFilmstrip,
_localScreenShare: getSourceNameSignalingFeatureFlag(state) && localScreenShare,
_localScreenShare: localScreenShare,
_mainFilmstripVisible: visible,
_maxFilmstripWidth: clientWidth - MIN_STAGE_VIEW_WIDTH,
_maxTopPanelHeight: clientHeight - MIN_STAGE_VIEW_HEIGHT,

View File

@ -11,10 +11,6 @@ import { sendAnalytics } from '../../../analytics/functions';
import { IReduxState } from '../../../app/types';
// @ts-ignore
import { Avatar } from '../../../base/avatar';
import {
getMultipleVideoSupportFeatureFlag,
getSourceNameSignalingFeatureFlag
} from '../../../base/config/functions.web';
import { isMobileBrowser } from '../../../base/environment/utils';
import { JitsiTrackEvents } from '../../../base/lib-jitsi-meet';
// @ts-ignore
@ -35,9 +31,8 @@ import { isTestModeEnabled } from '../../../base/testing/functions';
import { trackStreamingStatusChanged, updateLastTrackVideoMediaEvent } from '../../../base/tracks/actions';
import {
getLocalAudioTrack,
getLocalVideoTrack,
getTrackByMediaTypeAndParticipant,
getVirtualScreenshareParticipantTrack
getVideoTrackByParticipant
} from '../../../base/tracks/functions';
import { getVideoObjectPosition } from '../../../face-landmarks/functions';
import { hideGif, showGif } from '../../../gifs/actions';
@ -203,11 +198,6 @@ export interface IProps {
*/
_raisedHand: boolean;
/**
* Whether source name signaling is enabled.
*/
_sourceNameSignalingEnabled: boolean;
/**
* Whether or not the current layout is stage filmstrip layout.
*/
@ -445,9 +435,9 @@ class Thumbnail extends Component<IProps, IState> {
// Listen to track streaming status changed event to keep it updated.
// TODO: after converting this component to a react function component,
// use a custom hook to update local track streaming status.
const { _videoTrack, dispatch, _sourceNameSignalingEnabled } = this.props;
const { _videoTrack, dispatch } = this.props;
if (_sourceNameSignalingEnabled && _videoTrack && !_videoTrack.local) {
if (_videoTrack && !_videoTrack.local) {
_videoTrack.jitsiTrack.on(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED,
this.handleTrackStreamingStatusChanged);
dispatch(trackStreamingStatusChanged(_videoTrack.jitsiTrack,
@ -464,9 +454,9 @@ class Thumbnail extends Component<IProps, IState> {
componentWillUnmount() {
// TODO: after converting this component to a react function component,
// use a custom hook to update local track streaming status.
const { _videoTrack, dispatch, _sourceNameSignalingEnabled } = this.props;
const { _videoTrack, dispatch } = this.props;
if (_sourceNameSignalingEnabled && _videoTrack && !_videoTrack.local) {
if (_videoTrack && !_videoTrack.local) {
_videoTrack.jitsiTrack.off(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED,
this.handleTrackStreamingStatusChanged);
dispatch(trackStreamingStatusChanged(_videoTrack.jitsiTrack,
@ -488,10 +478,9 @@ class Thumbnail extends Component<IProps, IState> {
// TODO: after converting this component to a react function component,
// use a custom hook to update local track streaming status.
const { _videoTrack, dispatch, _sourceNameSignalingEnabled } = this.props;
const { _videoTrack, dispatch } = this.props;
if (_sourceNameSignalingEnabled
&& prevProps._videoTrack?.jitsiTrack?.getSourceName() !== _videoTrack?.jitsiTrack?.getSourceName()) {
if (prevProps._videoTrack?.jitsiTrack?.getSourceName() !== _videoTrack?.jitsiTrack?.getSourceName()) {
if (prevProps._videoTrack && !prevProps._videoTrack.local) {
prevProps._videoTrack.jitsiTrack.off(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED,
this.handleTrackStreamingStatusChanged);
@ -1170,21 +1159,12 @@ function _mapStateToProps(state: IReduxState, ownProps: any): Object {
const participant = getParticipantByIdOrUndefined(state, participantID);
const id = participant?.id ?? '';
const isLocal = participant?.local ?? true;
const multipleVideoSupportEnabled = getMultipleVideoSupportFeatureFlag(state);
const sourceNameSignalingEnabled = getSourceNameSignalingFeatureFlag(state);
const _isVirtualScreenshareParticipant = multipleVideoSupportEnabled && isScreenShareParticipant(participant);
const _isVirtualScreenshareParticipant = isScreenShareParticipant(participant);
const tracks = state['features/base/tracks'];
let _videoTrack;
if (_isVirtualScreenshareParticipant) {
_videoTrack = getVirtualScreenshareParticipantTrack(tracks, id);
} else {
_videoTrack = isLocal
? getLocalVideoTrack(tracks) : getTrackByMediaTypeAndParticipant(tracks, MEDIA_TYPE.VIDEO, participantID);
}
const _videoTrack = getVideoTrackByParticipant(state, participant);
const _audioTrack = isLocal
? getLocalAudioTrack(tracks) : getTrackByMediaTypeAndParticipant(tracks, MEDIA_TYPE.AUDIO, participantID);
? getLocalAudioTrack(tracks)
: getTrackByMediaTypeAndParticipant(tracks, MEDIA_TYPE.AUDIO, id);
const _currentLayout = getCurrentLayout(state);
let size: any = {};
let _isMobilePortrait = false;
@ -1302,10 +1282,8 @@ function _mapStateToProps(state: IReduxState, ownProps: any): Object {
_isVideoPlayable: id && isVideoPlayable(state, id),
_isVirtualScreenshareParticipant,
_localFlipX: Boolean(localFlipX),
_multipleVideoSupport: multipleVideoSupportEnabled,
_participant: participant,
_raisedHand: hasRaisedHand(participant),
_sourceNameSignalingEnabled: sourceNameSignalingEnabled,
_stageFilmstripLayout: isStageFilmstripAvailable(state),
_stageParticipantsVisible: _currentLayout === LAYOUTS.STAGE_FILMSTRIP_VIEW,
_thumbnailType: tileType,

View File

@ -6,13 +6,11 @@ import { makeStyles } from 'tss-react/mui';
import { IReduxState } from '../../../app/types';
import {
getMultipleVideoSupportFeatureFlag,
isDisplayNameVisible,
isNameReadOnly
} from '../../../base/config/functions.any';
import { isScreenShareParticipantById } from '../../../base/participants/functions';
import DisplayName from '../../../display-name/components/web/DisplayName';
import { THUMBNAIL_TYPE } from '../../constants';
// @ts-ignore
import StatusIndicators from './StatusIndicators';
@ -73,7 +71,6 @@ const ThumbnailBottomIndicators = ({
const { classes: styles } = useStyles();
const _allowEditing = !useSelector(isNameReadOnly);
const _defaultLocalDisplayName = interfaceConfig.DEFAULT_LOCAL_DISPLAY_NAME;
const _isMultiStreamEnabled = useSelector(getMultipleVideoSupportFeatureFlag);
const _showDisplayName = useSelector(isDisplayNameVisible);
const isVirtualScreenshareParticipant = useSelector(
(state: IReduxState) => isScreenShareParticipantById(state, participantId)
@ -85,9 +82,7 @@ const ThumbnailBottomIndicators = ({
audio = { !isVirtualScreenshareParticipant }
moderator = { true }
participantID = { participantId }
screenshare = { _isMultiStreamEnabled
? isVirtualScreenshareParticipant
: thumbnailType === THUMBNAIL_TYPE.TILE }
screenshare = { isVirtualScreenshareParticipant }
thumbnailType = { thumbnailType } />
}
{

View File

@ -5,7 +5,6 @@ import { useSelector } from 'react-redux';
import { makeStyles } from 'tss-react/mui';
import { IReduxState } from '../../../app/types';
import { getMultipleVideoSupportFeatureFlag } from '../../../base/config/functions.any';
import { isMobileBrowser } from '../../../base/environment/utils';
import { isScreenShareParticipantById } from '../../../base/participants/functions';
import ConnectionIndicator from '../../../connection-indicator/components/web/ConnectionIndicator';
@ -99,13 +98,12 @@ const ThumbnailTopIndicators = ({
useSelector((state: IReduxState) => state['features/base/config'].connectionIndicators?.autoHide) ?? true);
const _connectionIndicatorDisabled = _isMobile || disableConnectionIndicator
|| Boolean(useSelector((state: IReduxState) => state['features/base/config'].connectionIndicators?.disabled));
const _isMultiStreamEnabled = useSelector(getMultipleVideoSupportFeatureFlag);
const showConnectionIndicator = isHovered || !_connectionIndicatorAutoHideEnabled;
const isVirtualScreenshareParticipant = useSelector(
(state: IReduxState) => isScreenShareParticipantById(state, participantId)
);
if (_isMultiStreamEnabled && isVirtualScreenshareParticipant) {
if (isVirtualScreenshareParticipant) {
return (
<div className = { styles.container }>
{!_connectionIndicatorDisabled
@ -144,7 +142,7 @@ const ThumbnailTopIndicators = ({
<div className = { cx(indicatorsClassName, 'top-indicators') }>
<StatusIndicators
participantID = { participantId }
screenshare = { !_isMultiStreamEnabled } />
screenshare = { false } />
</div>
)}
</div>

View File

@ -2,7 +2,6 @@
import React, { Component } from 'react';
import { shouldComponentUpdate } from 'react-window';
import { getSourceNameSignalingFeatureFlag } from '../../../base/config';
import { getLocalParticipant } from '../../../base/participants';
import { connect } from '../../../base/redux';
import { shouldHideSelfView } from '../../../base/settings/functions.any';
@ -154,7 +153,6 @@ function _mapStateToProps(state, ownProps) {
const { remoteParticipants: remote } = state['features/filmstrip'];
const activeParticipants = getActiveParticipantsIds(state);
const disableSelfView = shouldHideSelfView(state);
const sourceNameSignalingEnabled = getSourceNameSignalingFeatureFlag(state);
const _verticalViewGrid = showGridInVerticalView(state);
const filmstripType = ownProps.data?.filmstripType;
const stageFilmstrip = filmstripType === FILMSTRIP_TYPE.STAGE;
@ -189,7 +187,7 @@ function _mapStateToProps(state, ownProps) {
if (stageFilmstrip) {
// We use the length of activeParticipants in stage filmstrip which includes local participants.
participantsLength = remoteParticipantsLength;
} else if (sourceNameSignalingEnabled) {
} else {
// We need to include the local screenshare participant in tile view.
participantsLength = remoteParticipantsLength
@ -198,8 +196,6 @@ function _mapStateToProps(state, ownProps) {
// Removes iAmRecorder from the total participants count.
- (iAmRecorder ? 1 : 0);
} else {
participantsLength = remoteParticipantsLength + (iAmRecorder ? 0 : 1) - (disableSelfView ? 1 : 0);
}
if (rowIndex === rows - 1) { // center the last row
@ -246,15 +242,9 @@ function _mapStateToProps(state, ownProps) {
// Local screen share is inserted at index 1 after the local camera.
const localScreenShareIndex = disableSelfView ? remoteParticipantsLength : 1;
let remoteIndex;
if (sourceNameSignalingEnabled) {
remoteIndex = !iAmRecorder && !disableSelfView
? index - localParticipantsLength : index;
} else {
remoteIndex = !iAmRecorder && !disableSelfView ? index - 1 : index;
}
const remoteIndex = !iAmRecorder && !disableSelfView
? index - localParticipantsLength
: index;
if (!iAmRecorder && index === localIndex) {
return {
@ -266,7 +256,7 @@ function _mapStateToProps(state, ownProps) {
};
}
if (sourceNameSignalingEnabled && !iAmRecorder && localScreenShare && index === localScreenShareIndex) {
if (!iAmRecorder && localScreenShare && index === localScreenShareIndex) {
return {
_disableSelfView: disableSelfView,
_filmstripType: filmstripType,

View File

@ -1,6 +1,3 @@
// @flow
import { getMultipleVideoSupportFeatureFlag } from '../base/config';
import { getActiveSpeakersToBeDisplayed, getVirtualScreenshareParticipantOwnerId } from '../base/participants';
import { setRemoteParticipants } from './actions';
@ -32,17 +29,14 @@ export function updateRemoteParticipants(store: Object, participantId: ?number)
const {
fakeParticipants,
sortedRemoteParticipants,
sortedRemoteScreenshares
sortedRemoteParticipants
} = state['features/base/participants'];
const remoteParticipants = new Map(sortedRemoteParticipants);
const screenShares = new Map(sortedRemoteScreenshares);
const screenShareParticipants = sortedRemoteVirtualScreenshareParticipants
? [ ...sortedRemoteVirtualScreenshareParticipants.keys() ] : [];
const sharedVideos = fakeParticipants ? Array.from(fakeParticipants.keys()) : [];
const speakers = getActiveSpeakersToBeDisplayed(state);
if (getMultipleVideoSupportFeatureFlag(state)) {
for (const screenshare of screenShareParticipants) {
const ownerId = getVirtualScreenshareParticipantOwnerId(screenshare);
@ -50,12 +44,6 @@ export function updateRemoteParticipants(store: Object, participantId: ?number)
remoteParticipants.delete(screenshare);
speakers.delete(ownerId);
}
} else {
for (const screenshare of screenShares.keys()) {
remoteParticipants.delete(screenshare);
speakers.delete(screenshare);
}
}
for (const sharedVideo of sharedVideos) {
remoteParticipants.delete(sharedVideo);
@ -64,7 +52,6 @@ export function updateRemoteParticipants(store: Object, participantId: ?number)
remoteParticipants.delete(speaker);
}
if (getMultipleVideoSupportFeatureFlag(state)) {
// Always update the order of the thumnails.
const participantsWithScreenShare = screenShareParticipants.reduce((acc, screenshare) => {
const ownerId = getVirtualScreenshareParticipantOwnerId(screenshare);
@ -81,15 +68,6 @@ export function updateRemoteParticipants(store: Object, participantId: ?number)
...Array.from(speakers.keys()),
...Array.from(remoteParticipants.keys())
];
} else {
// Always update the order of the thumnails.
reorderedParticipants = [
...Array.from(screenShares.keys()),
...sharedVideos,
...Array.from(speakers.keys()),
...Array.from(remoteParticipants.keys())
];
}
store.dispatch(setRemoteParticipants(Array.from(new Set(reorderedParticipants))));
}

View File

@ -1,6 +1,5 @@
// @flow
import { getSourceNameSignalingFeatureFlag } from '../base/config';
import { isMobileBrowser } from '../base/environment/utils';
import { MEDIA_TYPE } from '../base/media';
import {
@ -14,12 +13,11 @@ import {
import { toState } from '../base/redux';
import { shouldHideSelfView } from '../base/settings/functions.any';
import {
getLocalVideoTrack,
getTrackByMediaTypeAndParticipant,
getVideoTrackByParticipant,
isLocalTrackMuted,
isRemoteTrackMuted
} from '../base/tracks/functions';
import { isParticipantConnectionStatusActive, isTrackStreamingStatusActive } from '../connection-indicator/functions';
import { isTrackStreamingStatusActive } from '../connection-indicator/functions';
import { isSharingStatus } from '../shared-video/functions';
import {
LAYOUTS,
@ -120,9 +118,7 @@ export function isVideoPlayable(stateful: Object | Function, id: String) {
const tracks = state['features/base/tracks'];
const participant = id ? getParticipantById(state, id) : getLocalParticipant(state);
const isLocal = participant?.local ?? true;
const videoTrack
= isLocal ? getLocalVideoTrack(tracks) : getTrackByMediaTypeAndParticipant(tracks, MEDIA_TYPE.VIDEO, id);
const videoTrack = getVideoTrackByParticipant(state, participant);
const isAudioOnly = Boolean(state['features/base/audio-only'].enabled);
let isPlayable = false;
@ -134,13 +130,7 @@ export function isVideoPlayable(stateful: Object | Function, id: String) {
// remote participants excluding shared video
const isVideoMuted = isRemoteTrackMuted(tracks, MEDIA_TYPE.VIDEO, id);
if (getSourceNameSignalingFeatureFlag(state)) {
isPlayable = Boolean(videoTrack) && !isVideoMuted && !isAudioOnly
&& isTrackStreamingStatusActive(videoTrack);
} else {
isPlayable = Boolean(videoTrack) && !isVideoMuted && !isAudioOnly
&& isParticipantConnectionStatusActive(participant);
}
isPlayable = Boolean(videoTrack) && !isVideoMuted && !isAudioOnly && isTrackStreamingStatusActive(videoTrack);
}
return isPlayable;
@ -235,7 +225,7 @@ export function getNumberOfPartipantsForTileView(state) {
const { iAmRecorder } = state['features/base/config'];
const disableSelfView = shouldHideSelfView(state);
const { localScreenShare } = state['features/base/participants'];
const localParticipantsCount = getSourceNameSignalingFeatureFlag(state) && localScreenShare ? 2 : 1;
const localParticipantsCount = localScreenShare ? 2 : 1;
const numberOfParticipants = getParticipantCountWithFake(state)
- (iAmRecorder ? 1 : 0)
- (disableSelfView ? localParticipantsCount : 0);
@ -521,13 +511,11 @@ export function computeDisplayModeFromInput(input: Object) {
isScreenSharing,
canPlayEventReceived,
isRemoteParticipant,
multipleVideoSupport,
stageParticipantsVisible,
tileViewActive
} = input;
const adjustedIsVideoPlayable = input.isVideoPlayable && (!isRemoteParticipant || canPlayEventReceived);
if (multipleVideoSupport) {
// Display video for virtual screen share participants in all layouts.
if (isVirtualScreenshareParticipant) {
return DISPLAY_VIDEO;
@ -539,7 +527,6 @@ export function computeDisplayModeFromInput(input: Object) {
if (isScreenSharing) {
return DISPLAY_AVATAR;
}
}
if (!tileViewActive && filmstripType === FILMSTRIP_TYPE.MAIN && ((isScreenSharing && isRemoteParticipant)
|| (stageParticipantsVisible && isActiveParticipant))) {
@ -572,7 +559,6 @@ export function getDisplayModeInput(props: Object, state: Object) {
_isVirtualScreenshareParticipant,
_isScreenSharing,
_isVideoPlayable,
_multipleVideoSupport,
_participant,
_stageParticipantsVisible,
_videoTrack,
@ -588,13 +574,11 @@ export function getDisplayModeInput(props: Object, state: Object) {
isAudioOnly: _isAudioOnly,
tileViewActive,
isVideoPlayable: _isVideoPlayable,
connectionStatus: _participant?.connectionStatus,
canPlayEventReceived,
videoStream: Boolean(_videoTrack),
isRemoteParticipant: !_participant?.fakeParticipant && !_participant?.local,
isScreenSharing: _isScreenSharing,
isVirtualScreenshareParticipant: _isVirtualScreenshareParticipant,
multipleVideoSupport: _multipleVideoSupport,
stageParticipantsVisible: _stageParticipantsVisible,
videoStreamMuted: _videoTrack ? _videoTrack.muted : 'no stream'
};

View File

@ -2,7 +2,6 @@
import type { Dispatch } from 'redux';
import { getMultipleVideoSupportFeatureFlag } from '../base/config';
import { MEDIA_TYPE } from '../base/media';
import {
getDominantSpeakerParticipant,
@ -163,16 +162,12 @@ function _electParticipantInLargeVideo(state) {
participant = getDominantSpeakerParticipant(state);
if (participant && !participant.local) {
// Return the screensharing participant id associated with this endpoint if multi-stream is enabled and
// auto pin latest screenshare is disabled.
if (getMultipleVideoSupportFeatureFlag(state)) {
// auto_pin_latest_screen_share setting is disabled.
const screenshareParticipant = getVirtualScreenshareParticipantByOwnerId(state, participant.id);
return screenshareParticipant?.id ?? participant.id;
}
return participant.id;
}
// In case this is the local participant.
participant = undefined;

View File

@ -3,7 +3,6 @@
import React, { PureComponent } from 'react';
import type { Dispatch } from 'redux';
import { getSourceNameSignalingFeatureFlag } from '../../base/config/functions.any';
import { JitsiTrackEvents } from '../../base/lib-jitsi-meet';
import ParticipantView from '../../base/participants/components/ParticipantView.native';
import { getParticipantById, isLocalScreenshareParticipant } from '../../base/participants/functions';
@ -38,11 +37,6 @@ type Props = {
*/
_participantId: string,
/**
* Whether source name signaling is enabled.
*/
_sourceNameSignalingEnabled: boolean,
/**
* The video track that will be displayed in the thumbnail.
*/
@ -144,9 +138,9 @@ class LargeVideo extends PureComponent<Props, State> {
// Listen to track streaming status changed event to keep it updated.
// TODO: after converting this component to a react function component,
// use a custom hook to update local track streaming status.
const { _videoTrack, dispatch, _sourceNameSignalingEnabled } = this.props;
const { _videoTrack, dispatch } = this.props;
if (_sourceNameSignalingEnabled && _videoTrack && !_videoTrack.local) {
if (_videoTrack && !_videoTrack.local) {
_videoTrack.jitsiTrack.on(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED,
this.handleTrackStreamingStatusChanged);
dispatch(trackStreamingStatusChanged(_videoTrack.jitsiTrack,
@ -164,10 +158,9 @@ class LargeVideo extends PureComponent<Props, State> {
componentDidUpdate(prevProps: Props) {
// TODO: after converting this component to a react function component,
// use a custom hook to update local track streaming status.
const { _videoTrack, dispatch, _sourceNameSignalingEnabled } = this.props;
const { _videoTrack, dispatch } = this.props;
if (_sourceNameSignalingEnabled
&& prevProps._videoTrack?.jitsiTrack?.getSourceName() !== _videoTrack?.jitsiTrack?.getSourceName()) {
if (prevProps._videoTrack?.jitsiTrack?.getSourceName() !== _videoTrack?.jitsiTrack?.getSourceName()) {
if (prevProps._videoTrack && !prevProps._videoTrack.local) {
prevProps._videoTrack.jitsiTrack.off(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED,
this.handleTrackStreamingStatusChanged);
@ -192,9 +185,9 @@ class LargeVideo extends PureComponent<Props, State> {
componentWillUnmount() {
// TODO: after converting this component to a react function component,
// use a custom hook to update local track streaming status.
const { _videoTrack, dispatch, _sourceNameSignalingEnabled } = this.props;
const { _videoTrack, dispatch } = this.props;
if (_sourceNameSignalingEnabled && _videoTrack && !_videoTrack.local) {
if (_videoTrack && !_videoTrack.local) {
_videoTrack.jitsiTrack.off(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED,
this.handleTrackStreamingStatusChanged);
dispatch(trackStreamingStatusChanged(_videoTrack.jitsiTrack,
@ -269,7 +262,6 @@ function _mapStateToProps(state) {
_disableVideo: disableVideo,
_height: height,
_participantId: participantId,
_sourceNameSignalingEnabled: getSourceNameSignalingFeatureFlag(state),
_videoTrack: videoTrack,
_width: width
};

View File

@ -3,12 +3,11 @@
import React, { Component } from 'react';
import VideoLayout from '../../../../modules/UI/videolayout/VideoLayout';
import { getMultipleVideoSupportFeatureFlag } from '../../base/config';
import { MEDIA_TYPE, VIDEO_TYPE } from '../../base/media';
import { getLocalParticipant, isScreenShareParticipant } from '../../base/participants';
import { VIDEO_TYPE } from '../../base/media';
import { getLocalParticipant } from '../../base/participants';
import { Watermarks } from '../../base/react';
import { connect } from '../../base/redux';
import { getTrackByMediaTypeAndParticipant, getVirtualScreenshareParticipantTrack } from '../../base/tracks';
import { getVideoTrackByParticipant } from '../../base/tracks';
import { setColorAlpha } from '../../base/util';
import { StageParticipantNameLabel } from '../../display-name';
import { FILMSTRIP_BREAKPOINT, isFilmstripResizable } from '../../filmstrip';
@ -340,20 +339,11 @@ function _mapStateToProps(state) {
const { width: verticalFilmstripWidth, visible } = state['features/filmstrip'];
const { defaultLocalDisplayName, hideDominantSpeakerBadge } = state['features/base/config'];
const { seeWhatIsBeingShared } = state['features/large-video'];
const tracks = state['features/base/tracks'];
const localParticipantId = getLocalParticipant(state)?.id;
const largeVideoParticipant = getLargeVideoParticipant(state);
let videoTrack;
if (getMultipleVideoSupportFeatureFlag(state) && isScreenShareParticipant(largeVideoParticipant)) {
videoTrack = getVirtualScreenshareParticipantTrack(tracks, largeVideoParticipant?.id);
} else {
videoTrack = getTrackByMediaTypeAndParticipant(tracks, MEDIA_TYPE.VIDEO, largeVideoParticipant?.id);
}
const videoTrack = getVideoTrackByParticipant(state, largeVideoParticipant);
const isLocalScreenshareOnLargeVideo = largeVideoParticipant?.id?.includes(localParticipantId)
&& videoTrack?.videoType === VIDEO_TYPE.DESKTOP;
const isOnSpot = defaultLocalDisplayName === SPOT_DISPLAY_NAME;
return {

View File

@ -1,11 +1,8 @@
// @flow
import VideoLayout from '../../../modules/UI/videolayout/VideoLayout';
import { getMultipleVideoSupportFeatureFlag } from '../base/config';
import { MEDIA_TYPE } from '../base/media';
import { isScreenShareParticipant } from '../base/participants';
import { StateListenerRegistry } from '../base/redux';
import { getTrackByMediaTypeAndParticipant, getVirtualScreenshareParticipantTrack } from '../base/tracks';
import { getVideoTrackByParticipant } from '../base/tracks';
import { getLargeVideoParticipant } from './functions';
@ -25,14 +22,7 @@ StateListenerRegistry.register(
StateListenerRegistry.register(
/* selector */ state => {
const largeVideoParticipant = getLargeVideoParticipant(state);
const tracks = state['features/base/tracks'];
let videoTrack;
if (getMultipleVideoSupportFeatureFlag(state) && isScreenShareParticipant(largeVideoParticipant)) {
videoTrack = getVirtualScreenshareParticipantTrack(tracks, largeVideoParticipant?.id);
} else {
videoTrack = getTrackByMediaTypeAndParticipant(tracks, MEDIA_TYPE.VIDEO, largeVideoParticipant?.id);
}
const videoTrack = getVideoTrackByParticipant(state, largeVideoParticipant);
return {
participantId: largeVideoParticipant?.id,

View File

@ -2,7 +2,6 @@
import $ from 'jquery';
import { getMultipleVideoSendingSupportFeatureFlag } from '../base/config/functions.any';
import { openDialog } from '../base/dialog';
import { JitsiConferenceEvents } from '../base/lib-jitsi-meet';
import {
@ -11,7 +10,7 @@ import {
getVirtualScreenshareParticipantByOwnerId,
pinParticipant
} from '../base/participants';
import { getLocalDesktopTrack, getLocalVideoTrack, toggleScreensharing } from '../base/tracks';
import { getLocalDesktopTrack, toggleScreensharing } from '../base/tracks';
import { NOTIFICATION_TIMEOUT_TYPE, showNotification } from '../notifications';
import { isScreenVideoShared } from '../screen-share/functions';
@ -511,9 +510,7 @@ export function sendStartRequest() {
return (dispatch: Function, getState: Function) => {
const state = getState();
const tracks = state['features/base/tracks'];
const track = getMultipleVideoSendingSupportFeatureFlag(state)
? getLocalDesktopTrack(tracks)
: getLocalVideoTrack(tracks);
const track = getLocalDesktopTrack(tracks);
const { sourceId } = track?.jitsiTrack || {};
const { transport } = state['features/remote-control'].receiver;
@ -547,29 +544,19 @@ export function grant(participantId: string) {
let promise;
const state = getState();
const tracks = state['features/base/tracks'];
const isMultiStreamSupportEnabled = getMultipleVideoSendingSupportFeatureFlag(state);
const track = isMultiStreamSupportEnabled ? getLocalDesktopTrack(tracks) : getLocalVideoTrack(tracks);
const track = getLocalDesktopTrack(tracks);
const isScreenSharing = isScreenVideoShared(state);
const { sourceType } = track?.jitsiTrack || {};
if (isScreenSharing && sourceType === 'screen') {
promise = dispatch(sendStartRequest());
} else if (isMultiStreamSupportEnabled) {
} else {
promise = dispatch(toggleScreensharing(
true,
false,
true,
{ desktopSharingSources: [ 'screen' ] }
))
.then(() => dispatch(sendStartRequest()));
} else {
// FIXME: Use action here once toggleScreenSharing is moved to redux.
promise = APP.conference.toggleScreenSharing(
true,
{
desktopSharingSources: [ 'screen' ]
})
.then(() => dispatch(sendStartRequest()));
}
const { conference } = state['features/base/conference'];

View File

@ -1,166 +0,0 @@
// @flow
import {
CLEAR_INTERVAL,
INTERVAL_TIMEOUT,
SET_INTERVAL,
timerWorkerScript
} from './TimeWorker';
/**
* Represents a modified MediaStream that adds video as pip on a desktop stream.
* <tt>JitsiStreamPresenterEffect</tt> does the processing of the original
* desktop stream.
*/
export default class JitsiStreamPresenterEffect {
_canvas: HTMLCanvasElement;
_ctx: CanvasRenderingContext2D;
_desktopElement: HTMLVideoElement;
_desktopStream: MediaStream;
_frameRate: number;
_onVideoFrameTimer: Function;
_onVideoFrameTimerWorker: Function;
_renderVideo: Function;
_videoFrameTimerWorker: Worker;
_videoElement: HTMLVideoElement;
isEnabled: Function;
startEffect: Function;
stopEffect: Function;
/**
* Represents a modified MediaStream that adds a camera track at the
* bottom right corner of the desktop track using a HTML canvas.
* <tt>JitsiStreamPresenterEffect</tt> does the processing of the original
* video stream.
*
* @param {MediaStream} videoStream - The video stream which is user for
* creating the canvas.
*/
constructor(videoStream: MediaStream) {
const videoDiv = document.createElement('div');
const firstVideoTrack = videoStream.getVideoTracks()[0];
const { height, width, frameRate } = firstVideoTrack.getSettings() ?? firstVideoTrack.getConstraints();
this._canvas = document.createElement('canvas');
this._ctx = this._canvas.getContext('2d');
this._desktopElement = document.createElement('video');
this._videoElement = document.createElement('video');
videoDiv.appendChild(this._videoElement);
videoDiv.appendChild(this._desktopElement);
if (document.body !== null) {
document.body.appendChild(videoDiv);
}
// Set the video element properties
this._frameRate = parseInt(frameRate, 10);
this._videoElement.width = parseInt(width, 10);
this._videoElement.height = parseInt(height, 10);
this._videoElement.autoplay = true;
this._videoElement.srcObject = videoStream;
// autoplay is not enough to start the video on Safari, it's fine to call play() on other platforms as well
this._videoElement.play();
// set the style attribute of the div to make it invisible
videoDiv.style.display = 'none';
// Bind event handler so it is only bound once for every instance.
this._onVideoFrameTimer = this._onVideoFrameTimer.bind(this);
}
/**
* EventHandler onmessage for the videoFrameTimerWorker WebWorker.
*
* @private
* @param {EventHandler} response - The onmessage EventHandler parameter.
* @returns {void}
*/
_onVideoFrameTimer(response) {
if (response.data.id === INTERVAL_TIMEOUT) {
this._renderVideo();
}
}
/**
* Loop function to render the video frame input and draw presenter effect.
*
* @private
* @returns {void}
*/
_renderVideo() {
// adjust the canvas width/height on every frame in case the window has been resized.
const [ track ] = this._desktopStream.getVideoTracks();
const { height, width } = track.getSettings() ?? track.getConstraints();
this._canvas.width = parseInt(width, 10);
this._canvas.height = parseInt(height, 10);
this._ctx.drawImage(this._desktopElement, 0, 0, this._canvas.width, this._canvas.height);
this._ctx.drawImage(this._videoElement, this._canvas.width - this._videoElement.width, this._canvas.height
- this._videoElement.height, this._videoElement.width, this._videoElement.height);
// draw a border around the video element.
this._ctx.beginPath();
this._ctx.lineWidth = 2;
this._ctx.strokeStyle = '#A9A9A9'; // dark grey
this._ctx.rect(this._canvas.width - this._videoElement.width, this._canvas.height - this._videoElement.height,
this._videoElement.width, this._videoElement.height);
this._ctx.stroke();
}
/**
* Checks if the local track supports this effect.
*
* @param {JitsiLocalTrack} jitsiLocalTrack - Track to apply effect.
* @returns {boolean} - Returns true if this effect can run on the
* specified track, false otherwise.
*/
isEnabled(jitsiLocalTrack: Object) {
return jitsiLocalTrack.isVideoTrack() && jitsiLocalTrack.videoType === 'desktop';
}
/**
* Starts loop to capture video frame and render presenter effect.
*
* @param {MediaStream} desktopStream - Stream to be used for processing.
* @returns {MediaStream} - The stream with the applied effect.
*/
startEffect(desktopStream: MediaStream) {
const firstVideoTrack = desktopStream.getVideoTracks()[0];
const { height, width } = firstVideoTrack.getSettings() ?? firstVideoTrack.getConstraints();
// set the desktop element properties.
this._desktopStream = desktopStream;
this._desktopElement.width = parseInt(width, 10);
this._desktopElement.height = parseInt(height, 10);
this._desktopElement.autoplay = true;
this._desktopElement.srcObject = desktopStream;
// autoplay is not enough to start the video on Safari, it's fine to call play() on other platforms as well
this._desktopElement.play();
this._canvas.width = parseInt(width, 10);
this._canvas.height = parseInt(height, 10);
this._videoFrameTimerWorker = new Worker(timerWorkerScript, { name: 'Presenter effect worker' });
this._videoFrameTimerWorker.onmessage = this._onVideoFrameTimer;
this._videoFrameTimerWorker.postMessage({
id: SET_INTERVAL,
timeMs: 1000 / this._frameRate
});
return this._canvas.captureStream(this._frameRate);
}
/**
* Stops the capture and render loop.
*
* @returns {void}
*/
stopEffect() {
this._videoFrameTimerWorker.postMessage({
id: CLEAR_INTERVAL
});
this._videoFrameTimerWorker.terminate();
}
}

View File

@ -1,62 +0,0 @@
// @flow
/**
* SET_INTERVAL constant is used to set interval and it is set in
* the id property of the request.data property. TimeMs property must
* also be set. Request.data example:
*
* {
* id: SET_INTERVAL,
* timeMs: 33
* }.
*/
export const SET_INTERVAL = 1;
/**
* CLEAR_INTERVAL constant is used to clear the interval and it is set in
* the id property of the request.data property.
*
* {
* id: CLEAR_INTERVAL
* }.
*/
export const CLEAR_INTERVAL = 2;
/**
* INTERVAL_TIMEOUT constant is used as response and it is set in the id
* property.
*
* {
* id: INTERVAL_TIMEOUT
* }.
*/
export const INTERVAL_TIMEOUT = 3;
/**
* The following code is needed as string to create a URL from a Blob.
* The URL is then passed to a WebWorker. Reason for this is to enable
* use of setInterval that is not throttled when tab is inactive.
*/
const code = `
var timer;
onmessage = function(request) {
switch (request.data.id) {
case ${SET_INTERVAL}: {
timer = setInterval(() => {
postMessage({ id: ${INTERVAL_TIMEOUT} });
}, request.data.timeMs);
break;
}
case ${CLEAR_INTERVAL}: {
if (timer) {
clearInterval(timer);
}
break;
}
}
};
`;
export const timerWorkerScript
= URL.createObjectURL(new Blob([ code ], { type: 'application/javascript' }));

View File

@ -1,19 +0,0 @@
// @flow
import JitsiStreamPresenterEffect from './JitsiStreamPresenterEffect';
/**
* Creates a new instance of JitsiStreamPresenterEffect.
*
* @param {MediaStream} stream - The video stream which will be used for
* creating the presenter effect.
* @returns {Promise<JitsiStreamPresenterEffect>}
*/
export function createPresenterEffect(stream: MediaStream) {
if (!MediaStreamTrack.prototype.getSettings
&& !MediaStreamTrack.prototype.getConstraints) {
return Promise.reject(new Error('JitsiStreamPresenterEffect not supported!'));
}
return Promise.resolve(new JitsiStreamPresenterEffect(stream));
}

View File

@ -5,8 +5,7 @@ import { sendAnalytics } from '../analytics/functions';
import { IStore } from '../app/types';
import { setAudioOnly } from '../base/audio-only/actions';
import { setVideoMuted } from '../base/media/actions';
import { VIDEO_MUTISM_AUTHORITY } from '../base/media/constants';
import { getLocalVideoType } from '../base/tracks/functions';
import { MEDIA_TYPE, VIDEO_MUTISM_AUTHORITY } from '../base/media/constants';
import {
SET_TOOLBOX_ENABLED,
@ -88,18 +87,16 @@ export function handleToggleVideoMuted(muted: boolean, showUI: boolean, ensureTr
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
const { enabled: audioOnly } = state['features/base/audio-only'];
const tracks = state['features/base/tracks'];
sendAnalytics(createToolbarEvent(VIDEO_MUTE, { enable: muted }));
if (audioOnly) {
dispatch(setAudioOnly(false));
}
const mediaType = getLocalVideoType(tracks);
dispatch(
setVideoMuted(
muted,
mediaType,
MEDIA_TYPE.VIDEO,
VIDEO_MUTISM_AUTHORITY.USER,
ensureTrack));

View File

@ -8,10 +8,11 @@ import {
} from '../../analytics';
import { VIDEO_MUTE_BUTTON_ENABLED, getFeatureFlag } from '../../base/flags';
import { translate } from '../../base/i18n';
import { MEDIA_TYPE } from '../../base/media';
import { connect } from '../../base/redux';
import { AbstractButton, AbstractVideoMuteButton } from '../../base/toolbox/components';
import type { AbstractButtonProps } from '../../base/toolbox/components';
import { isLocalCameraTrackMuted } from '../../base/tracks';
import { isLocalTrackMuted } from '../../base/tracks';
import { handleToggleVideoMuted } from '../actions.any';
import { isVideoMuteButtonDisabled } from '../functions';
@ -163,7 +164,7 @@ function _mapStateToProps(state): Object {
return {
_videoDisabled: isVideoMuteButtonDisabled(state),
_videoMuted: isLocalCameraTrackMuted(tracks),
_videoMuted: isLocalTrackMuted(tracks, MEDIA_TYPE.VIDEO),
visible: enabledFlag
};
}

View File

@ -2,9 +2,10 @@
import { translate } from '../../../base/i18n';
import { IconCameraRefresh } from '../../../base/icons';
import { MEDIA_TYPE } from '../../../base/media';
import { connect } from '../../../base/redux';
import { AbstractButton, type AbstractButtonProps } from '../../../base/toolbox/components';
import { isLocalCameraTrackMuted, isToggleCameraEnabled, toggleCamera } from '../../../base/tracks';
import { isLocalTrackMuted, isToggleCameraEnabled, toggleCamera } from '../../../base/tracks';
/**
* The type of the React {@code Component} props of {@link ToggleCameraButton}.
@ -69,7 +70,7 @@ function mapStateToProps(state): Object {
return {
_audioOnly: Boolean(audioOnly),
_videoMuted: isLocalCameraTrackMuted(tracks),
_videoMuted: isLocalTrackMuted(tracks, MEDIA_TYPE.VIDEO),
visible: isToggleCameraEnabled(state)
};
}

View File

@ -2,7 +2,7 @@
import VideoLayout from '../../../modules/UI/videolayout/VideoLayout.js';
import { CONFERENCE_WILL_LEAVE } from '../base/conference/actionTypes';
import { MEDIA_TYPE } from '../base/media/constants';
import { PARTICIPANT_JOINED, PARTICIPANT_UPDATED } from '../base/participants/actionTypes';
import { PARTICIPANT_JOINED } from '../base/participants/actionTypes';
import { getLocalParticipant } from '../base/participants/functions';
import MiddlewareRegistry from '../base/redux/MiddlewareRegistry';
import { TRACK_ADDED, TRACK_REMOVED, TRACK_STOPPED } from '../base/tracks/actionTypes';
@ -35,18 +35,6 @@ MiddlewareRegistry.register(store => next => action => {
}
break;
case PARTICIPANT_UPDATED: {
// Look for actions that triggered a change to connectionStatus. This is
// done instead of changing the connection status change action to be
// explicit in order to minimize changes to other code.
if (typeof action.participant.connectionStatus !== 'undefined') {
VideoLayout.onParticipantConnectionStatusChanged(
action.participant.id,
action.participant.connectionStatus);
}
break;
}
case PARTICIPANTS_PANE_CLOSE:
case PARTICIPANTS_PANE_OPEN:
VideoLayout.resizeVideoArea();

View File

@ -1,21 +1,13 @@
import debounce from 'lodash/debounce';
import { getMultipleVideoSupportFeatureFlag } from '../base/config/functions';
import StateListenerRegistry from '../base/redux/StateListenerRegistry';
import { equals } from '../base/redux/functions';
import { ITrack } from '../base/tracks/types';
import { isFollowMeActive } from '../follow-me/functions';
import { setRemoteParticipantsWithScreenShare, virtualScreenshareParticipantsUpdated } from './actions.web';
import { virtualScreenshareParticipantsUpdated } from './actions.web';
import { getAutoPinSetting, updateAutoPinnedParticipant } from './functions.web';
StateListenerRegistry.register(
/* selector */ state => state['features/base/participants'].sortedRemoteVirtualScreenshareParticipants,
/* listener */ (sortedRemoteVirtualScreenshareParticipants, store) => {
if (!getMultipleVideoSupportFeatureFlag(store.getState())) {
return;
}
const oldScreenSharesOrder = store.getState()['features/video-layout'].remoteScreenShares || [];
const knownSharingParticipantIds = [ ...sortedRemoteVirtualScreenshareParticipants.keys() ];
@ -41,57 +33,3 @@ StateListenerRegistry.register(
}
}
});
/**
* For auto-pin mode, listen for changes to the known media tracks and look
* for updates to screen shares. The listener is debounced to avoid state
* thrashing that might occur, especially when switching in or out of p2p.
*/
StateListenerRegistry.register(
/* selector */ state => state['features/base/tracks'],
/* listener */ debounce((tracks, store) => {
// Because of the debounce we need to handle removal of screen shares in the middleware. Otherwise it is
// possible to have screen sharing participant that has already left in the remoteScreenShares array.
// This can lead to rendering a thumbnails for already left participants since the remoteScreenShares
// array is used for building the ordered list of remote participants.
if (getMultipleVideoSupportFeatureFlag(store.getState())) {
return;
}
const oldScreenSharesOrder = store.getState()['features/video-layout'].remoteScreenShares || [];
const knownSharingParticipantIds = tracks.reduce((acc: string[], track: ITrack) => {
if (track.mediaType === 'video' && track.videoType === 'desktop') {
const skipTrack = getAutoPinSetting() === 'remote-only' && track.local;
if (!skipTrack) {
acc.push(track.participantId);
}
}
return acc;
}, []);
// Filter out any participants which are no longer screen sharing
// by looping through the known sharing participants and removing any
// participant IDs which are no longer sharing.
const newScreenSharesOrder = oldScreenSharesOrder.filter(
(participantId: string) => knownSharingParticipantIds.includes(participantId));
// Make sure all new sharing participant get added to the end of the
// known screen shares.
knownSharingParticipantIds.forEach((participantId: string) => {
if (!newScreenSharesOrder.includes(participantId)) {
newScreenSharesOrder.push(participantId);
}
});
if (!equals(oldScreenSharesOrder, newScreenSharesOrder)) {
store.dispatch(
setRemoteParticipantsWithScreenShare(newScreenSharesOrder));
if (getAutoPinSetting() && !isFollowMeActive(store)) {
updateAutoPinnedParticipant(oldScreenSharesOrder, store);
}
}
}, 100));

View File

@ -47,7 +47,7 @@ export function muteLocal(enable: boolean, mediaType: MediaType, stopScreenShari
}
if (enable && stopScreenSharing) {
dispatch(toggleScreensharing(false, false, true));
dispatch(toggleScreensharing(false, false));
}
sendAnalytics(createToolbarEvent(isAudio ? AUDIO_MUTE : VIDEO_MUTE, { enable }));

View File

@ -2,14 +2,10 @@ import debounce from 'lodash/debounce';
import { IStore } from '../app/types';
import { _handleParticipantError } from '../base/conference/functions';
import { getSourceNameSignalingFeatureFlag } from '../base/config/functions';
import { MEDIA_TYPE } from '../base/media/constants';
import { getLocalParticipant } from '../base/participants/functions';
import StateListenerRegistry from '../base/redux/StateListenerRegistry';
import {
getRemoteScreenSharesSourceNames,
getTrackSourceNameByMediaTypeAndParticipant
} from '../base/tracks/functions';
import { getTrackSourceNameByMediaTypeAndParticipant } from '../base/tracks/functions';
import { reportError } from '../base/util/helpers';
import {
getActiveParticipantsIds,
@ -355,7 +351,6 @@ function _updateReceiverVideoConstraints({ getState }: IStore) {
const { remoteScreenShares } = state['features/video-layout'];
const { visibleRemoteParticipants } = state['features/filmstrip'];
const tracks = state['features/base/tracks'];
const sourceNameSignaling = getSourceNameSignalingFeatureFlag(state);
const localParticipantId = getLocalParticipant(state)?.id;
const activeParticipantsIds = getActiveParticipantsIds(state);
const screenshareFilmstripParticipantId = isTopPanelEnabled(state) && getScreenshareFilmstripParticipantId(state);
@ -366,22 +361,18 @@ function _updateReceiverVideoConstraints({ getState }: IStore) {
lastN
};
let remoteScreenSharesSourceNames: string[];
let visibleRemoteTrackSourceNames: string[] = [];
const activeParticipantsSources: string[] = [];
const visibleRemoteTrackSourceNames: string[] = [];
let largeVideoSourceName: string | undefined;
let activeParticipantsSources: string[] = [];
if (sourceNameSignaling) {
receiverConstraints.onStageSources = [];
receiverConstraints.selectedSources = [];
remoteScreenSharesSourceNames = getRemoteScreenSharesSourceNames(state, remoteScreenShares);
if (visibleRemoteParticipants?.size) {
visibleRemoteParticipants.forEach(participantId => {
let sourceName;
if (remoteScreenSharesSourceNames.includes(participantId)) {
if (remoteScreenShares.includes(participantId)) {
sourceName = participantId;
} else {
sourceName = getTrackSourceNameByMediaTypeAndParticipant(tracks, MEDIA_TYPE.VIDEO, participantId);
@ -397,7 +388,7 @@ function _updateReceiverVideoConstraints({ getState }: IStore) {
activeParticipantsIds.forEach((participantId: string) => {
let sourceName;
if (remoteScreenSharesSourceNames.includes(participantId)) {
if (remoteScreenShares.includes(participantId)) {
sourceName = participantId;
} else {
sourceName = getTrackSourceNameByMediaTypeAndParticipant(tracks, MEDIA_TYPE.VIDEO, participantId);
@ -411,7 +402,7 @@ function _updateReceiverVideoConstraints({ getState }: IStore) {
}
if (localParticipantId !== largeVideoParticipantId) {
if (remoteScreenSharesSourceNames.includes(largeVideoParticipantId)) {
if (remoteScreenShares.includes(largeVideoParticipantId)) {
largeVideoSourceName = largeVideoParticipantId;
} else {
largeVideoSourceName = getTrackSourceNameByMediaTypeAndParticipant(
@ -419,15 +410,6 @@ function _updateReceiverVideoConstraints({ getState }: IStore) {
);
}
}
} else {
receiverConstraints.onStageEndpoints = [];
receiverConstraints.selectedEndpoints = [];
remoteScreenSharesSourceNames = remoteScreenShares;
visibleRemoteTrackSourceNames = [ ...visibleRemoteParticipants ];
largeVideoSourceName = largeVideoParticipantId;
activeParticipantsSources = activeParticipantsIds;
}
// Tile view.
if (shouldDisplayTileView(state)) {
@ -440,9 +422,8 @@ function _updateReceiverVideoConstraints({ getState }: IStore) {
});
// Prioritize screenshare in tile view.
if (remoteScreenSharesSourceNames?.length) {
receiverConstraints[sourceNameSignaling ? 'selectedSources' : 'selectedEndpoints']
= remoteScreenSharesSourceNames;
if (remoteScreenShares?.length) {
receiverConstraints.selectedSources = remoteScreenShares;
}
// Stage view.
@ -479,7 +460,7 @@ function _updateReceiverVideoConstraints({ getState }: IStore) {
};
}
receiverConstraints[sourceNameSignaling ? 'onStageSources' : 'onStageEndpoints'] = onStageSources;
receiverConstraints.onStageSources = onStageSources;
} else if (largeVideoSourceName) {
let quality = VIDEO_QUALITY_UNLIMITED;
@ -488,8 +469,7 @@ function _updateReceiverVideoConstraints({ getState }: IStore) {
quality = maxFrameHeightForLargeVideo;
}
receiverConstraints.constraints[largeVideoSourceName] = { 'maxHeight': quality };
receiverConstraints[sourceNameSignaling ? 'onStageSources' : 'onStageEndpoints']
= [ largeVideoSourceName ];
receiverConstraints.onStageSources = [ largeVideoSourceName ];
}
}