Use redux for local tracks instead of conference.js (#9920)

* do not use this.local video

* move tracks initialized flag around

* do not use this.localAudio

* untangle use audio/video stream methods

It should be safe to call setVideoMuteStatus and
setAudioMuteStatus regardless of the prejoin page
visibility state.

* add NO-OP to use track methods and fix crash
in _setLocalAudioVideoStreams on not a promise

* use allSettled
This commit is contained in:
Paweł Domas 2021-09-13 12:33:04 -05:00 committed by GitHub
parent 6711801c3b
commit 1db52354fb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 78 additions and 91 deletions

View File

@ -454,27 +454,12 @@ export default {
isSharingScreen: false, isSharingScreen: false,
/**
* The local audio track (if any).
* FIXME tracks from redux store should be the single source of truth
* @type {JitsiLocalTrack|null}
*/
localAudio: null,
/** /**
* The local presenter video track (if any). * The local presenter video track (if any).
* @type {JitsiLocalTrack|null} * @type {JitsiLocalTrack|null}
*/ */
localPresenterVideo: null, localPresenterVideo: null,
/**
* The local video track (if any).
* FIXME tracks from redux store should be the single source of truth, but
* more refactoring is required around screen sharing ('localVideo' usages).
* @type {JitsiLocalTrack|null}
*/
localVideo: null,
/** /**
* Returns an object containing a promise which resolves with the created tracks & * Returns an object containing a promise which resolves with the created tracks &
* the errors resulting from that process. * the errors resulting from that process.
@ -728,9 +713,7 @@ export default {
track.mute(); track.mute();
} }
}); });
logger.log(`Initialized with ${tracks.length} local tracks`);
this._localTracksInitialized = true;
con.addEventListener(JitsiConnectionEvents.CONNECTION_FAILED, _connectionFailedHandler); con.addEventListener(JitsiConnectionEvents.CONNECTION_FAILED, _connectionFailedHandler);
APP.connection = connection = con; APP.connection = connection = con;
@ -907,7 +890,9 @@ export default {
return; return;
} }
if (!this.localAudio && !mute) { const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
if (!localAudio && !mute) {
const maybeShowErrorDialog = error => { const maybeShowErrorDialog = error => {
showUI && APP.store.dispatch(notifyMicError(error)); showUI && APP.store.dispatch(notifyMicError(error));
}; };
@ -961,17 +946,18 @@ export default {
const maybeShowErrorDialog = error => { const maybeShowErrorDialog = error => {
showUI && APP.store.dispatch(notifyCameraError(error)); showUI && APP.store.dispatch(notifyCameraError(error));
}; };
const localVideo = getLocalJitsiVideoTrack(APP.store.getState());
if (mute) { if (mute) {
try { try {
await this.localVideo.setEffect(undefined); await localVideo.setEffect(undefined);
} catch (err) { } catch (err) {
logger.error('Failed to remove the presenter effect', err); logger.error('Failed to remove the presenter effect', err);
maybeShowErrorDialog(err); maybeShowErrorDialog(err);
} }
} else { } else {
try { try {
await this.localVideo.setEffect(await this._createPresenterStreamEffect()); await localVideo.setEffect(await this._createPresenterStreamEffect());
} catch (err) { } catch (err) {
logger.error('Failed to apply the presenter effect', err); logger.error('Failed to apply the presenter effect', err);
maybeShowErrorDialog(err); maybeShowErrorDialog(err);
@ -1013,7 +999,9 @@ export default {
return; return;
} }
if (!this.localVideo && !mute) { const localVideo = getLocalJitsiVideoTrack(APP.store.getState());
if (!localVideo && !mute) {
const maybeShowErrorDialog = error => { const maybeShowErrorDialog = error => {
showUI && APP.store.dispatch(notifyCameraError(error)); showUI && APP.store.dispatch(notifyCameraError(error));
}; };
@ -1347,7 +1335,7 @@ export default {
* @private * @private
*/ */
_setLocalAudioVideoStreams(tracks = []) { _setLocalAudioVideoStreams(tracks = []) {
return tracks.map(track => { const promises = tracks.map(track => {
if (track.isAudioTrack()) { if (track.isAudioTrack()) {
return this.useAudioStream(track); return this.useAudioStream(track);
} else if (track.isVideoTrack()) { } else if (track.isVideoTrack()) {
@ -1356,12 +1344,16 @@ export default {
return this.useVideoStream(track); return this.useVideoStream(track);
} }
logger.error( logger.error('Ignored not an audio nor a video track: ', track);
'Ignored not an audio nor a video track: ', track);
return Promise.resolve(); return Promise.resolve();
}); });
return Promise.allSettled(promises).then(() => {
this._localTracksInitialized = true;
logger.log(`Initialized with ${tracks.length} local tracks`);
});
}, },
_getConferenceOptions() { _getConferenceOptions() {
@ -1383,29 +1375,20 @@ export default {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
_replaceLocalVideoTrackQueue.enqueue(onFinish => { _replaceLocalVideoTrackQueue.enqueue(onFinish => {
const state = APP.store.getState(); const oldTrack = getLocalJitsiVideoTrack(APP.store.getState());
// When the prejoin page is displayed localVideo is not set logger.debug(`useVideoStream: Replacing ${oldTrack} with ${newTrack}`);
// so just replace the video track from the store with the new one.
if (isPrejoinPageVisible(state)) {
const oldTrack = getLocalJitsiVideoTrack(state);
logger.debug(`useVideoStream on the prejoin screen: Replacing ${oldTrack} with ${newTrack}`); if (oldTrack === newTrack) {
resolve();
onFinish();
return APP.store.dispatch(replaceLocalTrack(oldTrack, newTrack)) return;
.then(resolve)
.catch(error => {
logger.error(`useVideoStream failed on the prejoin screen: ${error}`);
reject(error);
})
.then(onFinish);
} }
logger.debug(`useVideoStream: Replacing ${this.localVideo} with ${newTrack}`);
APP.store.dispatch( APP.store.dispatch(
replaceLocalTrack(this.localVideo, newTrack, room)) replaceLocalTrack(oldTrack, newTrack, room))
.then(() => { .then(() => {
this.localVideo = newTrack;
this._setSharingScreen(newTrack); this._setSharingScreen(newTrack);
this.setVideoMuteStatus(); this.setVideoMuteStatus();
}) })
@ -1455,23 +1438,18 @@ export default {
useAudioStream(newTrack) { useAudioStream(newTrack) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
_replaceLocalAudioTrackQueue.enqueue(onFinish => { _replaceLocalAudioTrackQueue.enqueue(onFinish => {
const state = APP.store.getState(); const oldTrack = getLocalJitsiAudioTrack(APP.store.getState());
// When the prejoin page is displayed localAudio is not set if (oldTrack === newTrack) {
// so just replace the audio track from the store with the new one. resolve();
if (isPrejoinPageVisible(state)) { onFinish();
const oldTrack = getLocalJitsiAudioTrack(state);
return APP.store.dispatch(replaceLocalTrack(oldTrack, newTrack)) return;
.then(resolve)
.catch(reject)
.then(onFinish);
} }
APP.store.dispatch( APP.store.dispatch(
replaceLocalTrack(this.localAudio, newTrack, room)) replaceLocalTrack(oldTrack, newTrack, room))
.then(() => { .then(() => {
this.localAudio = newTrack;
this.setAudioMuteStatus(this.isLocalAudioMuted()); this.setAudioMuteStatus(this.isLocalAudioMuted());
}) })
.then(resolve) .then(resolve)
@ -1546,7 +1524,9 @@ export default {
// If system audio was also shared stop the AudioMixerEffect and dispose of the desktop audio track. // If system audio was also shared stop the AudioMixerEffect and dispose of the desktop audio track.
if (this._mixerEffect) { if (this._mixerEffect) {
await this.localAudio.setEffect(undefined); const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
await localAudio.setEffect(undefined);
await this._desktopAudioStream.dispose(); await this._desktopAudioStream.dispose();
this._mixerEffect = undefined; this._mixerEffect = undefined;
this._desktopAudioStream = undefined; this._desktopAudioStream = undefined;
@ -1772,7 +1752,8 @@ export default {
// Create a new presenter track and apply the presenter effect. // Create a new presenter track and apply the presenter effect.
if (!this.localPresenterVideo && !mute) { if (!this.localPresenterVideo && !mute) {
const { height, width } = this.localVideo.track.getSettings() ?? this.localVideo.track.getConstraints(); const localVideo = getLocalJitsiVideoTrack(APP.store.getState());
const { height, width } = localVideo.track.getSettings() ?? localVideo.track.getConstraints();
const isPortrait = height >= width; const isPortrait = height >= width;
const DESKTOP_STREAM_CAP = 720; const DESKTOP_STREAM_CAP = 720;
@ -1801,7 +1782,7 @@ export default {
// Apply the constraints on the desktop track. // Apply the constraints on the desktop track.
try { try {
await this.localVideo.track.applyConstraints(desktopResizeConstraints); await localVideo.track.applyConstraints(desktopResizeConstraints);
} catch (err) { } catch (err) {
logger.error('Failed to apply constraints on the desktop stream for presenter mode', err); logger.error('Failed to apply constraints on the desktop stream for presenter mode', err);
@ -1809,7 +1790,7 @@ export default {
} }
} }
const trackHeight = resizeDesktopStream const trackHeight = resizeDesktopStream
? this.localVideo.track.getSettings().height ?? DESKTOP_STREAM_CAP ? localVideo.track.getSettings().height ?? DESKTOP_STREAM_CAP
: height; : height;
let effect; let effect;
@ -1824,7 +1805,7 @@ export default {
// Replace the desktop track on the peerconnection. // Replace the desktop track on the peerconnection.
try { try {
await this.localVideo.setEffect(effect); await localVideo.setEffect(effect);
APP.store.dispatch(setVideoMuted(mute, MEDIA_TYPE.PRESENTER)); APP.store.dispatch(setVideoMuted(mute, MEDIA_TYPE.PRESENTER));
this.setVideoMuteStatus(); this.setVideoMuteStatus();
} catch (err) { } catch (err) {
@ -1880,12 +1861,14 @@ export default {
} }
if (this._desktopAudioStream) { if (this._desktopAudioStream) {
const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
// If there is a localAudio stream, mix in the desktop audio stream captured by the screen sharing // If there is a localAudio stream, mix in the desktop audio stream captured by the screen sharing
// api. // api.
if (this.localAudio) { if (localAudio) {
this._mixerEffect = new AudioMixerEffect(this._desktopAudioStream); this._mixerEffect = new AudioMixerEffect(this._desktopAudioStream);
await this.localAudio.setEffect(this._mixerEffect); await localAudio.setEffect(this._mixerEffect);
} else { } else {
// If no local stream is present ( i.e. no input audio devices) we use the screen share audio // If no local stream is present ( i.e. no input audio devices) we use the screen share audio
// stream as we would use a regular stream. // stream as we would use a regular stream.
@ -2066,10 +2049,10 @@ export default {
}); });
room.on(JitsiConferenceEvents.TRACK_AUDIO_LEVEL_CHANGED, (id, lvl) => { room.on(JitsiConferenceEvents.TRACK_AUDIO_LEVEL_CHANGED, (id, lvl) => {
const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
let newLvl = lvl; let newLvl = lvl;
if (this.isLocalId(id) if (this.isLocalId(id) && localAudio?.isMuted()) {
&& this.localAudio && this.localAudio.isMuted()) {
newLvl = 0; newLvl = 0;
} }
@ -2311,6 +2294,7 @@ export default {
APP.UI.addListener( APP.UI.addListener(
UIEvents.VIDEO_DEVICE_CHANGED, UIEvents.VIDEO_DEVICE_CHANGED,
cameraDeviceId => { cameraDeviceId => {
const localVideo = getLocalJitsiVideoTrack(APP.store.getState());
const videoWasMuted = this.isLocalVideoMuted(); const videoWasMuted = this.isLocalVideoMuted();
sendAnalytics(createDeviceChangedEvent('video', 'input')); sendAnalytics(createDeviceChangedEvent('video', 'input'));
@ -2318,7 +2302,7 @@ export default {
// If both screenshare and video are in progress, restart the // If both screenshare and video are in progress, restart the
// presenter mode with the new camera device. // presenter mode with the new camera device.
if (this.isSharingScreen && !videoWasMuted) { if (this.isSharingScreen && !videoWasMuted) {
const { height } = this.localVideo.track.getSettings(); const { height } = localVideo.track.getSettings();
// dispose the existing presenter track and create a new // dispose the existing presenter track and create a new
// camera track. // camera track.
@ -2327,7 +2311,7 @@ export default {
this.localPresenterVideo = null; this.localPresenterVideo = null;
return this._createPresenterStreamEffect(height, cameraDeviceId) return this._createPresenterStreamEffect(height, cameraDeviceId)
.then(effect => this.localVideo.setEffect(effect)) .then(effect => localVideo.setEffect(effect))
.then(() => { .then(() => {
this.setVideoMuteStatus(); this.setVideoMuteStatus();
logger.log('Switched local video device while screen sharing and the video is unmuted'); logger.log('Switched local video device while screen sharing and the video is unmuted');
@ -2340,7 +2324,7 @@ export default {
// that can be applied on un-mute. // that can be applied on un-mute.
} else if (this.isSharingScreen && videoWasMuted) { } else if (this.isSharingScreen && videoWasMuted) {
logger.log('Switched local video device: while screen sharing and the video is muted'); logger.log('Switched local video device: while screen sharing and the video is muted');
const { height } = this.localVideo.track.getSettings(); const { height } = localVideo.track.getSettings();
this._updateVideoDeviceId(); this._updateVideoDeviceId();
@ -2426,13 +2410,15 @@ export default {
return this.useAudioStream(stream); return this.useAudioStream(stream);
}) })
.then(() => { .then(() => {
if (this.localAudio && hasDefaultMicChanged) { const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
if (localAudio && hasDefaultMicChanged) {
// workaround for the default device to be shown as selected in the // workaround for the default device to be shown as selected in the
// settings even when the real device id was passed to gUM because of the // settings even when the real device id was passed to gUM because of the
// above mentioned chrome bug. // above mentioned chrome bug.
this.localAudio._realDeviceId = this.localAudio.deviceId = 'default'; localAudio._realDeviceId = localAudio.deviceId = 'default';
} }
logger.log(`switched local audio device: ${this.localAudio?.getDeviceId()}`); logger.log(`switched local audio device: ${localAudio?.getDeviceId()}`);
this._updateAudioDeviceId(); this._updateAudioDeviceId();
}) })
@ -2498,9 +2484,6 @@ export default {
JitsiMediaDevicesEvents.DEVICE_LIST_CHANGED, JitsiMediaDevicesEvents.DEVICE_LIST_CHANGED,
this.deviceChangeListener); this.deviceChangeListener);
} }
this.localVideo = null;
this.localAudio = null;
}, },
/** /**
@ -2563,10 +2546,11 @@ export default {
* @private * @private
*/ */
_updateVideoDeviceId() { _updateVideoDeviceId() {
if (this.localVideo const localVideo = getLocalJitsiVideoTrack(APP.store.getState());
&& this.localVideo.videoType === 'camera') {
if (localVideo && localVideo.videoType === 'camera') {
APP.store.dispatch(updateSettings({ APP.store.dispatch(updateSettings({
cameraDeviceId: this.localVideo.getDeviceId() cameraDeviceId: localVideo.getDeviceId()
})); }));
} }
@ -2584,9 +2568,11 @@ export default {
* @private * @private
*/ */
_updateAudioDeviceId() { _updateAudioDeviceId() {
if (this.localAudio) { const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
if (localAudio) {
APP.store.dispatch(updateSettings({ APP.store.dispatch(updateSettings({
micDeviceId: this.localAudio.getDeviceId() micDeviceId: localAudio.getDeviceId()
})); }));
} }
}, },
@ -2600,6 +2586,8 @@ export default {
*/ */
_onDeviceListChanged(devices) { _onDeviceListChanged(devices) {
const oldDevices = APP.store.getState()['features/base/devices'].availableDevices; const oldDevices = APP.store.getState()['features/base/devices'].availableDevices;
const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
const localVideo = getLocalJitsiVideoTrack(APP.store.getState());
APP.store.dispatch(updateDeviceList(devices)); APP.store.dispatch(updateDeviceList(devices));
@ -2607,8 +2595,8 @@ export default {
= mediaDeviceHelper.getNewMediaDevicesAfterDeviceListChanged( = mediaDeviceHelper.getNewMediaDevicesAfterDeviceListChanged(
devices, devices,
this.isSharingScreen, this.isSharingScreen,
this.localVideo, localVideo,
this.localAudio); localAudio);
const promises = []; const promises = [];
const audioWasMuted = this.isLocalAudioMuted(); const audioWasMuted = this.isLocalAudioMuted();
const videoWasMuted = this.isLocalVideoMuted(); const videoWasMuted = this.isLocalVideoMuted();
@ -2631,12 +2619,12 @@ export default {
// simpler): // simpler):
// If the default device is changed we need to first stop the local streams and then call GUM. Otherwise GUM // If the default device is changed we need to first stop the local streams and then call GUM. Otherwise GUM
// will return a stream using the old default device. // will return a stream using the old default device.
if (requestedInput.audio && this.localAudio) { if (requestedInput.audio && localAudio) {
this.localAudio.stopStream(); localAudio.stopStream();
} }
if (requestedInput.video && this.localVideo) { if (requestedInput.video && localVideo) {
this.localVideo.stopStream(); localVideo.stopStream();
} }
// Let's handle unknown/non-preferred devices // Let's handle unknown/non-preferred devices
@ -2716,15 +2704,16 @@ export default {
= mediaType === 'audio' = mediaType === 'audio'
? this.useAudioStream.bind(this) ? this.useAudioStream.bind(this)
: this.useVideoStream.bind(this); : this.useVideoStream.bind(this);
const track = tracks.find(t => t.getType() === mediaType) || null;
// Use the new stream or null if we failed to obtain it. // Use the new stream or null if we failed to obtain it.
return useStream(tracks.find(track => track.getType() === mediaType) || null) return useStream(track)
.then(() => { .then(() => {
if (this.localAudio && hasDefaultMicChanged) { if (track?.isAudioTrack() && hasDefaultMicChanged) {
// workaround for the default device to be shown as selected in the // workaround for the default device to be shown as selected in the
// settings even when the real device id was passed to gUM because of // settings even when the real device id was passed to gUM because of
// the above mentioned chrome bug. // the above mentioned chrome bug.
this.localAudio._realDeviceId = this.localAudio.deviceId = 'default'; track._realDeviceId = track.deviceId = 'default';
} }
mediaType === 'audio' mediaType === 'audio'
? this._updateAudioDeviceId() ? this._updateAudioDeviceId()
@ -2764,14 +2753,13 @@ export default {
* Determines whether or not the audio button should be enabled. * Determines whether or not the audio button should be enabled.
*/ */
updateAudioIconEnabled() { updateAudioIconEnabled() {
const audioMediaDevices const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
= APP.store.getState()['features/base/devices'].availableDevices.audioInput; const audioMediaDevices = APP.store.getState()['features/base/devices'].availableDevices.audioInput;
const audioDeviceCount const audioDeviceCount = audioMediaDevices ? audioMediaDevices.length : 0;
= audioMediaDevices ? audioMediaDevices.length : 0;
// The audio functionality is considered available if there are any // The audio functionality is considered available if there are any
// audio devices detected or if the local audio stream already exists. // audio devices detected or if the local audio stream already exists.
const available = audioDeviceCount > 0 || Boolean(this.localAudio); const available = audioDeviceCount > 0 || Boolean(localAudio);
APP.store.dispatch(setAudioAvailable(available)); APP.store.dispatch(setAudioAvailable(available));
APP.API.notifyAudioAvailabilityChanged(available); APP.API.notifyAudioAvailabilityChanged(available);
@ -2785,13 +2773,14 @@ export default {
= APP.store.getState()['features/base/devices'].availableDevices.videoInput; = APP.store.getState()['features/base/devices'].availableDevices.videoInput;
const videoDeviceCount const videoDeviceCount
= videoMediaDevices ? videoMediaDevices.length : 0; = videoMediaDevices ? videoMediaDevices.length : 0;
const localVideo = getLocalJitsiVideoTrack(APP.store.getState());
// The video functionality is considered available if there are any // The video functionality is considered available if there are any
// video devices detected or if there is local video stream already // video devices detected or if there is local video stream already
// active which could be either screensharing stream or a video track // active which could be either screensharing stream or a video track
// created before the permissions were rejected (through browser // created before the permissions were rejected (through browser
// config). // config).
const available = videoDeviceCount > 0 || Boolean(this.localVideo); const available = videoDeviceCount > 0 || Boolean(localVideo);
APP.store.dispatch(setVideoAvailable(available)); APP.store.dispatch(setVideoAvailable(available));
APP.API.notifyVideoAvailabilityChanged(available); APP.API.notifyVideoAvailabilityChanged(available);
@ -2809,8 +2798,6 @@ export default {
APP.store.dispatch(destroyLocalTracks()); APP.store.dispatch(destroyLocalTracks());
this._localTracksInitialized = false; this._localTracksInitialized = false;
this.localVideo = null;
this.localAudio = null;
// Remove unnecessary event listeners from firing callbacks. // Remove unnecessary event listeners from firing callbacks.
if (this.deviceChangeListener) { if (this.deviceChangeListener) {