fix: Reuse the existing JitsiLocalTrack on presenter unmute
This commit is contained in:
parent
4d0cbff5a1
commit
0b25e62c5c
129
conference.js
129
conference.js
|
@ -815,7 +815,7 @@ export default {
|
||||||
* @param {boolean} [showUI] when set to false will not display any error
|
* @param {boolean} [showUI] when set to false will not display any error
|
||||||
* dialogs in case of media permissions error.
|
* dialogs in case of media permissions error.
|
||||||
*/
|
*/
|
||||||
async mutePresenterVideo(mute, showUI = true) {
|
async mutePresenter(mute, showUI = true) {
|
||||||
const maybeShowErrorDialog = error => {
|
const maybeShowErrorDialog = error => {
|
||||||
showUI && APP.store.dispatch(notifyCameraError(error));
|
showUI && APP.store.dispatch(notifyCameraError(error));
|
||||||
};
|
};
|
||||||
|
@ -823,33 +823,17 @@ export default {
|
||||||
if (mute) {
|
if (mute) {
|
||||||
try {
|
try {
|
||||||
await this.localVideo.setEffect(undefined);
|
await this.localVideo.setEffect(undefined);
|
||||||
APP.store.dispatch(
|
|
||||||
setVideoMuted(mute, MEDIA_TYPE.PRESENTER));
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error('Failed to mute the Presenter video');
|
logger.error('Failed to remove the presenter effect', err);
|
||||||
|
maybeShowErrorDialog(err);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
await this.localVideo.setEffect(await this._createPresenterStreamEffect());
|
||||||
|
} catch (err) {
|
||||||
|
logger.error('Failed to apply the presenter effect', err);
|
||||||
|
maybeShowErrorDialog(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const { height } = this.localVideo.track.getSettings();
|
|
||||||
const defaultCamera
|
|
||||||
= getUserSelectedCameraDeviceId(APP.store.getState());
|
|
||||||
let effect;
|
|
||||||
|
|
||||||
try {
|
|
||||||
effect = await this._createPresenterStreamEffect(height,
|
|
||||||
defaultCamera);
|
|
||||||
} catch (err) {
|
|
||||||
logger.error('Failed to unmute Presenter Video');
|
|
||||||
maybeShowErrorDialog(err);
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
await this.localVideo.setEffect(effect);
|
|
||||||
APP.store.dispatch(setVideoMuted(mute, MEDIA_TYPE.PRESENTER));
|
|
||||||
} catch (err) {
|
|
||||||
logger.error('Failed to apply the Presenter effect', err);
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -868,7 +852,7 @@ export default {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.isSharingScreen) {
|
if (this.isSharingScreen) {
|
||||||
return this.mutePresenterVideo(mute);
|
return this._mutePresenterVideo(mute);
|
||||||
}
|
}
|
||||||
|
|
||||||
// If not ready to modify track's state yet adjust the base/media
|
// If not ready to modify track's state yet adjust the base/media
|
||||||
|
@ -1612,31 +1596,63 @@ export default {
|
||||||
* @return {Promise<JitsiStreamPresenterEffect>} - A promise resolved with
|
* @return {Promise<JitsiStreamPresenterEffect>} - A promise resolved with
|
||||||
* {@link JitsiStreamPresenterEffect} if it succeeds.
|
* {@link JitsiStreamPresenterEffect} if it succeeds.
|
||||||
*/
|
*/
|
||||||
async _createPresenterStreamEffect(height, cameraDeviceId = null) {
|
async _createPresenterStreamEffect(height = null, cameraDeviceId = null) {
|
||||||
let presenterTrack;
|
if (!this.localPresenterVideo) {
|
||||||
|
try {
|
||||||
try {
|
this.localPresenterVideo = await createLocalPresenterTrack({ cameraDeviceId }, height);
|
||||||
presenterTrack = await createLocalPresenterTrack({
|
} catch (err) {
|
||||||
cameraDeviceId
|
logger.error('Failed to create a camera track for presenter', err);
|
||||||
},
|
|
||||||
height);
|
|
||||||
} catch (err) {
|
|
||||||
logger.error('Failed to create a camera track for presenter', err);
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
this.localPresenterVideo = presenterTrack;
|
|
||||||
try {
|
|
||||||
const effect = await createPresenterEffect(presenterTrack.stream);
|
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
APP.store.dispatch(trackAdded(this.localPresenterVideo));
|
APP.store.dispatch(trackAdded(this.localPresenterVideo));
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const effect = await createPresenterEffect(this.localPresenterVideo.stream);
|
||||||
|
|
||||||
return effect;
|
return effect;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error('Failed to create the presenter effect', err);
|
logger.error('Failed to create the presenter effect', err);
|
||||||
APP.store.dispatch(
|
}
|
||||||
setVideoMuted(true, MEDIA_TYPE.PRESENTER));
|
},
|
||||||
APP.store.dispatch(notifyCameraError(err));
|
|
||||||
|
/**
|
||||||
|
* Tries to turn the presenter video track on or off. If a presenter track
|
||||||
|
* doesn't exist, a new video track is created.
|
||||||
|
*
|
||||||
|
* @param mute - true for mute and false for unmute.
|
||||||
|
*
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
async _mutePresenterVideo(mute) {
|
||||||
|
const maybeShowErrorDialog = error => {
|
||||||
|
APP.store.dispatch(notifyCameraError(error));
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!this.localPresenterVideo && !mute) {
|
||||||
|
// create a new presenter track and apply the presenter effect.
|
||||||
|
const { height } = this.localVideo.track.getSettings();
|
||||||
|
const defaultCamera
|
||||||
|
= getUserSelectedCameraDeviceId(APP.store.getState());
|
||||||
|
let effect;
|
||||||
|
|
||||||
|
try {
|
||||||
|
effect = await this._createPresenterStreamEffect(height,
|
||||||
|
defaultCamera);
|
||||||
|
} catch (err) {
|
||||||
|
logger.error('Failed to unmute Presenter Video');
|
||||||
|
maybeShowErrorDialog(err);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
await this.localVideo.setEffect(effect);
|
||||||
|
APP.store.dispatch(setVideoMuted(mute, MEDIA_TYPE.PRESENTER));
|
||||||
|
} catch (err) {
|
||||||
|
logger.error('Failed to apply the Presenter effect', err);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
APP.store.dispatch(setVideoMuted(mute, MEDIA_TYPE.PRESENTER));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -2108,23 +2124,29 @@ export default {
|
||||||
|
|
||||||
// dispose the existing presenter track and create a new
|
// dispose the existing presenter track and create a new
|
||||||
// camera track.
|
// camera track.
|
||||||
APP.store.dispatch(setVideoMuted(true, MEDIA_TYPE.PRESENTER));
|
this.localPresenterVideo.dispose();
|
||||||
|
this.localPresenterVideo = null;
|
||||||
|
|
||||||
return this._createPresenterStreamEffect(height, cameraDeviceId)
|
return this._createPresenterStreamEffect(height, cameraDeviceId)
|
||||||
.then(effect => this.localVideo.setEffect(effect))
|
.then(effect => this.localVideo.setEffect(effect))
|
||||||
.then(() => {
|
.then(() => {
|
||||||
muteLocalVideo(false);
|
|
||||||
this.setVideoMuteStatus(false);
|
this.setVideoMuteStatus(false);
|
||||||
logger.log('switched local video device');
|
logger.log('switched local video device');
|
||||||
this._updateVideoDeviceId();
|
this._updateVideoDeviceId();
|
||||||
})
|
})
|
||||||
.catch(err => APP.store.dispatch(notifyCameraError(err)));
|
.catch(err => APP.store.dispatch(notifyCameraError(err)));
|
||||||
|
|
||||||
// If screenshare is in progress but video is muted,
|
// If screenshare is in progress but video is muted, update the default device
|
||||||
// update the default device id for video.
|
// id for video, dispose the existing presenter track and create a new effect
|
||||||
|
// that can be applied on un-mute.
|
||||||
} else if (this.isSharingScreen && videoWasMuted) {
|
} else if (this.isSharingScreen && videoWasMuted) {
|
||||||
logger.log('switched local video device');
|
logger.log('switched local video device');
|
||||||
|
const { height } = this.localVideo.track.getSettings();
|
||||||
|
|
||||||
this._updateVideoDeviceId();
|
this._updateVideoDeviceId();
|
||||||
|
this.localPresenterVideo.dispose();
|
||||||
|
this.localPresenterVideo = null;
|
||||||
|
this._createPresenterStreamEffect(height, cameraDeviceId);
|
||||||
|
|
||||||
// if there is only video, switch to the new camera stream.
|
// if there is only video, switch to the new camera stream.
|
||||||
} else {
|
} else {
|
||||||
|
@ -2379,6 +2401,13 @@ export default {
|
||||||
cameraDeviceId: this.localVideo.getDeviceId()
|
cameraDeviceId: this.localVideo.getDeviceId()
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If screenshare is in progress, get the device id from the presenter track.
|
||||||
|
if (this.localPresenterVideo) {
|
||||||
|
APP.store.dispatch(updateSettings({
|
||||||
|
cameraDeviceId: this.localPresenterVideo.getDeviceId()
|
||||||
|
}));
|
||||||
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -10931,8 +10931,8 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"lib-jitsi-meet": {
|
"lib-jitsi-meet": {
|
||||||
"version": "github:jitsi/lib-jitsi-meet#4a87f342858963c36bb64a8a0e89d8d7e6e06060",
|
"version": "github:jitsi/lib-jitsi-meet#ea2114ca92b80bf27a04c9e3c124f80eb91c924f",
|
||||||
"from": "github:jitsi/lib-jitsi-meet#4a87f342858963c36bb64a8a0e89d8d7e6e06060",
|
"from": "github:jitsi/lib-jitsi-meet#ea2114ca92b80bf27a04c9e3c124f80eb91c924f",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@jitsi/sdp-interop": "0.1.14",
|
"@jitsi/sdp-interop": "0.1.14",
|
||||||
"@jitsi/sdp-simulcast": "0.2.2",
|
"@jitsi/sdp-simulcast": "0.2.2",
|
||||||
|
|
|
@ -57,7 +57,7 @@
|
||||||
"js-utils": "github:jitsi/js-utils#192b1c996e8c05530eb1f19e82a31069c3021e31",
|
"js-utils": "github:jitsi/js-utils#192b1c996e8c05530eb1f19e82a31069c3021e31",
|
||||||
"jsrsasign": "8.0.12",
|
"jsrsasign": "8.0.12",
|
||||||
"jwt-decode": "2.2.0",
|
"jwt-decode": "2.2.0",
|
||||||
"lib-jitsi-meet": "github:jitsi/lib-jitsi-meet#4a87f342858963c36bb64a8a0e89d8d7e6e06060",
|
"lib-jitsi-meet": "github:jitsi/lib-jitsi-meet#ea2114ca92b80bf27a04c9e3c124f80eb91c924f",
|
||||||
"libflacjs": "github:mmig/libflac.js#93d37e7f811f01cf7d8b6a603e38bd3c3810907d",
|
"libflacjs": "github:mmig/libflac.js#93d37e7f811f01cf7d8b6a603e38bd3c3810907d",
|
||||||
"lodash": "4.17.13",
|
"lodash": "4.17.13",
|
||||||
"moment": "2.19.4",
|
"moment": "2.19.4",
|
||||||
|
|
|
@ -35,7 +35,7 @@ export async function createLocalPresenterTrack(options, desktopHeight) {
|
||||||
video: {
|
video: {
|
||||||
aspectRatio: 4 / 3,
|
aspectRatio: 4 / 3,
|
||||||
height: {
|
height: {
|
||||||
exact: result
|
ideal: result
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -135,7 +135,12 @@ MiddlewareRegistry.register(store => next => action => {
|
||||||
const isVideoTrack = jitsiTrack.type !== MEDIA_TYPE.AUDIO;
|
const isVideoTrack = jitsiTrack.type !== MEDIA_TYPE.AUDIO;
|
||||||
|
|
||||||
if (isVideoTrack) {
|
if (isVideoTrack) {
|
||||||
if (jitsiTrack.isLocal()) {
|
if (jitsiTrack.type === MEDIA_TYPE.PRESENTER) {
|
||||||
|
APP.conference.mutePresenter(muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure we change the video mute state only for camera tracks.
|
||||||
|
if (jitsiTrack.isLocal() && jitsiTrack.videoType !== 'desktop') {
|
||||||
APP.conference.setVideoMuteStatus(muted);
|
APP.conference.setVideoMuteStatus(muted);
|
||||||
} else {
|
} else {
|
||||||
APP.UI.setVideoMuted(participantID, muted);
|
APP.UI.setVideoMuted(participantID, muted);
|
||||||
|
|
Loading…
Reference in New Issue