feat(presenter): add Presenter Mode

- Adds the ability to share video as a "PiP" when screenshare is in progress.
- Add a method for creating a local presenter track.
- Make sure isLocalVideoTrackMuted returns the correct mute state when only screenshare is present.
- Make sure we get the updated window size of the window being shared before painting it on the canvas.
- Make sure we check if the shared window has been resized
This commit is contained in:
Jaya Allamsetty 2019-11-26 05:57:03 -05:00 committed by Saúl Ibarra Corretgé
parent db6a2673de
commit 0a64bf2068
12 changed files with 578 additions and 59 deletions

View File

@ -93,10 +93,15 @@ import {
participantRoleChanged,
participantUpdated
} from './react/features/base/participants';
import { updateSettings } from './react/features/base/settings';
import {
getUserSelectedCameraDeviceId,
updateSettings
} from './react/features/base/settings';
import {
createLocalPresenterTrack,
createLocalTracksF,
destroyLocalTracks,
isLocalVideoTrackMuted,
isLocalTrackMuted,
isUserInteractionRequiredForUnmute,
replaceLocalTrack,
@ -113,6 +118,7 @@ import {
import { mediaPermissionPromptVisibilityChanged } from './react/features/overlay';
import { suspendDetected } from './react/features/power-monitor';
import { setSharedVideoStatus } from './react/features/shared-video';
import { createPresenterEffect } from './react/features/stream-effects/presenter';
import { endpointMessageReceived } from './react/features/subtitles';
const logger = require('jitsi-meet-logger').getLogger(__filename);
@ -437,6 +443,11 @@ export default {
*/
localAudio: null,
/**
* The local presenter video track (if any).
*/
localPresenterVideo: null,
/**
* The local video track (if any).
* FIXME tracks from redux store should be the single source of truth, but
@ -722,9 +733,8 @@ export default {
isLocalVideoMuted() {
// If the tracks are not ready, read from base/media state
return this._localTracksInitialized
? isLocalTrackMuted(
APP.store.getState()['features/base/tracks'],
MEDIA_TYPE.VIDEO)
? isLocalVideoTrackMuted(
APP.store.getState()['features/base/tracks'])
: isVideoMutedByUser(APP.store);
},
@ -798,6 +808,55 @@ export default {
this.muteAudio(!this.isLocalAudioMuted(), showUI);
},
/**
* Simulates toolbar button click for presenter video mute. Used by
* shortcuts and API.
* @param mute true for mute and false for unmute.
* @param {boolean} [showUI] when set to false will not display any error
* dialogs in case of media permissions error.
*/
async mutePresenterVideo(mute, showUI = true) {
const maybeShowErrorDialog = error => {
showUI && APP.store.dispatch(notifyCameraError(error));
};
if (mute) {
try {
await this.localVideo.setEffect(undefined);
APP.store.dispatch(
setVideoMuted(mute, MEDIA_TYPE.PRESENTER));
this._untoggleScreenSharing
= this._turnScreenSharingOff.bind(this, false);
} catch (err) {
logger.error('Failed to mute the Presenter video');
}
return;
}
const { height } = this.localVideo.track.getSettings();
const defaultCamera
= getUserSelectedCameraDeviceId(APP.store.getState());
let effect;
try {
effect = await this._createPresenterStreamEffect(height,
defaultCamera);
} catch (err) {
logger.error('Failed to unmute Presenter Video');
maybeShowErrorDialog(err);
return;
}
try {
await this.localVideo.setEffect(effect);
APP.store.dispatch(setVideoMuted(mute, MEDIA_TYPE.PRESENTER));
this._untoggleScreenSharing
= this._turnScreenSharingOff.bind(this, true);
} catch (err) {
logger.error('Failed to apply the Presenter effect', err);
}
},
/**
* Simulates toolbar button click for video mute. Used by shortcuts and API.
* @param mute true for mute and false for unmute.
@ -812,6 +871,10 @@ export default {
return;
}
if (this.isSharingScreen) {
return this.mutePresenterVideo(mute);
}
// If not ready to modify track's state yet adjust the base/media
if (!this._localTracksInitialized) {
// This will only modify base/media.video.muted which is then synced
@ -1351,7 +1414,7 @@ export default {
* in case it fails.
* @private
*/
_turnScreenSharingOff(didHaveVideo, wasVideoMuted) {
_turnScreenSharingOff(didHaveVideo) {
this._untoggleScreenSharing = null;
this.videoSwitchInProgress = true;
const { receiver } = APP.remoteControl;
@ -1369,13 +1432,7 @@ export default {
.then(([ stream ]) => this.useVideoStream(stream))
.then(() => {
sendAnalytics(createScreenSharingEvent('stopped'));
logger.log('Screen sharing stopped, switching to video.');
if (!this.localVideo && wasVideoMuted) {
return Promise.reject('No local video to be muted!');
} else if (wasVideoMuted && this.localVideo) {
return this.localVideo.mute();
}
logger.log('Screen sharing stopped.');
})
.catch(error => {
logger.error('failed to switch back to local video', error);
@ -1390,6 +1447,16 @@ export default {
promise = this.useVideoStream(null);
}
// mute the presenter track if it exists.
if (this.localPresenterVideo) {
APP.store.dispatch(
setVideoMuted(true, MEDIA_TYPE.PRESENTER));
this.localPresenterVideo.dispose();
APP.store.dispatch(
trackRemoved(this.localPresenterVideo));
this.localPresenterVideo = null;
}
return promise.then(
() => {
this.videoSwitchInProgress = false;
@ -1415,7 +1482,7 @@ export default {
* 'window', etc.).
* @return {Promise.<T>}
*/
toggleScreenSharing(toggle = !this._untoggleScreenSharing, options = {}) {
async toggleScreenSharing(toggle = !this._untoggleScreenSharing, options = {}) {
if (this.videoSwitchInProgress) {
return Promise.reject('Switch in progress.');
}
@ -1429,7 +1496,41 @@ export default {
}
if (toggle) {
return this._switchToScreenSharing(options);
const wasVideoMuted = this.isLocalVideoMuted();
try {
await this._switchToScreenSharing(options);
} catch (err) {
logger.error('Failed to switch to screensharing', err);
return;
}
if (wasVideoMuted) {
return;
}
const { height } = this.localVideo.track.getSettings();
const defaultCamera
= getUserSelectedCameraDeviceId(APP.store.getState());
let effect;
try {
effect = await this._createPresenterStreamEffect(
height, defaultCamera);
} catch (err) {
logger.error('Failed to create the presenter effect');
return;
}
try {
await this.localVideo.setEffect(effect);
muteLocalVideo(false);
return;
} catch (err) {
logger.error('Failed to create the presenter effect', err);
return;
}
}
return this._untoggleScreenSharing
@ -1455,7 +1556,6 @@ export default {
let externalInstallation = false;
let DSExternalInstallationInProgress = false;
const didHaveVideo = Boolean(this.localVideo);
const wasVideoMuted = this.isLocalVideoMuted();
const getDesktopStreamPromise = options.desktopStream
? Promise.resolve([ options.desktopStream ])
@ -1506,8 +1606,7 @@ export default {
// Stores the "untoggle" handler which remembers whether was
// there any video before and whether was it muted.
this._untoggleScreenSharing
= this._turnScreenSharingOff
.bind(this, didHaveVideo, wasVideoMuted);
= this._turnScreenSharingOff.bind(this, didHaveVideo);
desktopStream.on(
JitsiTrackEvents.LOCAL_TRACK_STOPPED,
() => {
@ -1532,6 +1631,45 @@ export default {
});
},
/**
* Creates a new instance of presenter effect. A new video track is created
* using the new set of constraints that are calculated based on
* the height of the desktop that is being currently shared.
*
* @param {number} height - The height of the desktop stream that is being
* currently shared.
* @param {string} cameraDeviceId - The device id of the camera to be used.
* @return {Promise<JitsiStreamPresenterEffect>} - A promise resolved with
* {@link JitsiStreamPresenterEffect} if it succeeds.
*/
async _createPresenterStreamEffect(height, cameraDeviceId = null) {
let presenterTrack;
try {
presenterTrack = await createLocalPresenterTrack({
cameraDeviceId
},
height);
} catch (err) {
logger.error('Failed to create a camera track for presenter', err);
return;
}
this.localPresenterVideo = presenterTrack;
try {
const effect = await createPresenterEffect(presenterTrack.stream);
APP.store.dispatch(trackAdded(this.localPresenterVideo));
return effect;
} catch (err) {
logger.error('Failed to create the presenter effect', err);
APP.store.dispatch(
setVideoMuted(true, MEDIA_TYPE.PRESENTER));
APP.store.dispatch(notifyCameraError(err));
}
},
/**
* Tries to switch to the screensharing mode by disposing camera stream and
* replacing it with a desktop one.
@ -1992,36 +2130,56 @@ export default {
const videoWasMuted = this.isLocalVideoMuted();
sendAnalytics(createDeviceChangedEvent('video', 'input'));
createLocalTracksF({
devices: [ 'video' ],
cameraDeviceId,
micDeviceId: null
})
.then(([ stream ]) => {
// if we are in audio only mode or video was muted before
// changing device, then mute
if (this.isAudioOnly() || videoWasMuted) {
return stream.mute()
.then(() => stream);
}
return stream;
})
.then(stream => {
// if we are screen sharing we do not want to stop it
if (this.isSharingScreen) {
return Promise.resolve();
}
// If both screenshare and video are in progress, restart the
// presenter mode with the new camera device.
if (this.isSharingScreen && !videoWasMuted) {
const { height } = this.localVideo.track.getSettings();
return this.useVideoStream(stream);
})
.then(() => {
// dispose the existing presenter track and create a new
// camera track.
APP.store.dispatch(setVideoMuted(true, MEDIA_TYPE.PRESENTER));
return this._createPresenterStreamEffect(height, cameraDeviceId)
.then(effect => this.localVideo.setEffect(effect))
.then(() => {
muteLocalVideo(false);
this.setVideoMuteStatus(false);
logger.log('switched local video device');
this._updateVideoDeviceId();
})
.catch(err => APP.store.dispatch(notifyCameraError(err)));
// If screenshare is in progress but video is muted,
// update the default device id for video.
} else if (this.isSharingScreen && videoWasMuted) {
logger.log('switched local video device');
this._updateVideoDeviceId();
})
.catch(err => {
APP.store.dispatch(notifyCameraError(err));
});
// if there is only video, switch to the new camera stream.
} else {
createLocalTracksF({
devices: [ 'video' ],
cameraDeviceId,
micDeviceId: null
})
.then(([ stream ]) => {
// if we are in audio only mode or video was muted before
// changing device, then mute
if (this.isAudioOnly() || videoWasMuted) {
return stream.mute()
.then(() => stream);
}
return stream;
})
.then(stream => this.useVideoStream(stream))
.then(() => {
logger.log('switched local video device');
this._updateVideoDeviceId();
})
.catch(err => APP.store.dispatch(notifyCameraError(err)));
}
}
);

View File

@ -147,9 +147,10 @@ MiddlewareRegistry.register(store => next => action => {
const state = getState();
const { localTracksDuration } = state['features/analytics'];
if (localTracksDuration.conference.startedTime === -1) {
if (localTracksDuration.conference.startedTime === -1 || action.mediaType === 'presenter') {
// We don't want to track the media duration if the conference is not joined yet because otherwise we won't
// be able to compare them with the conference duration (from conference join to conference will leave).
// Also, do not track media duration for presenter tracks.
break;
}
dispatch({

View File

@ -46,6 +46,7 @@ import {
getCurrentConference
} from './functions';
import logger from './logger';
import { MEDIA_TYPE } from '../media';
declare var APP: Object;
@ -589,7 +590,10 @@ function _syncReceiveVideoQuality({ getState }, next, action) {
function _trackAddedOrRemoved(store, next, action) {
const track = action.track;
if (track && track.local) {
// TODO All track swapping should happen here instead of conference.js.
// Since we swap the tracks for the web client in conference.js, ignore
// presenter tracks here and do not add/remove them to/from the conference.
if (track && track.local && track.mediaType !== MEDIA_TYPE.PRESENTER) {
return (
_syncConferenceLocalTracksWithState(store, action)
.then(() => next(action)));

View File

@ -11,7 +11,11 @@ import {
STORE_VIDEO_TRANSFORM,
TOGGLE_CAMERA_FACING_MODE
} from './actionTypes';
import { CAMERA_FACING_MODE, VIDEO_MUTISM_AUTHORITY } from './constants';
import {
CAMERA_FACING_MODE,
MEDIA_TYPE,
VIDEO_MUTISM_AUTHORITY
} from './constants';
/**
* Action to adjust the availability of the local audio.
@ -89,6 +93,7 @@ export function setVideoAvailable(available: boolean) {
*
* @param {boolean} muted - True if the local video is to be muted or false if
* the local video is to be unmuted.
* @param {MEDIA_TYPE} mediaType - The type of media.
* @param {number} authority - The {@link VIDEO_MUTISM_AUTHORITY} which is
* muting/unmuting the local video.
* @param {boolean} ensureTrack - True if we want to ensure that a new track is
@ -97,6 +102,7 @@ export function setVideoAvailable(available: boolean) {
*/
export function setVideoMuted(
muted: boolean,
mediaType: MEDIA_TYPE = MEDIA_TYPE.VIDEO,
authority: number = VIDEO_MUTISM_AUTHORITY.USER,
ensureTrack: boolean = false) {
return (dispatch: Dispatch<any>, getState: Function) => {
@ -107,6 +113,8 @@ export function setVideoMuted(
return dispatch({
type: SET_VIDEO_MUTED,
authority,
mediaType,
ensureTrack,
muted: newValue
});

View File

@ -15,6 +15,7 @@ export const CAMERA_FACING_MODE = {
*/
export const MEDIA_TYPE = {
AUDIO: 'audio',
PRESENTER: 'presenter',
VIDEO: 'video'
};

View File

@ -17,7 +17,11 @@ import { getPropertyValue } from '../settings';
import { setTrackMuted, TRACK_ADDED } from '../tracks';
import { setAudioMuted, setCameraFacingMode, setVideoMuted } from './actions';
import { CAMERA_FACING_MODE, VIDEO_MUTISM_AUTHORITY } from './constants';
import {
CAMERA_FACING_MODE,
MEDIA_TYPE,
VIDEO_MUTISM_AUTHORITY
} from './constants';
import logger from './logger';
import {
_AUDIO_INITIAL_MEDIA_STATE,
@ -45,7 +49,10 @@ MiddlewareRegistry.register(store => next => action => {
const result = next(action);
const { track } = action;
track.local && _syncTrackMutedState(store, track);
// Don't sync track mute state with the redux store for screenshare
// since video mute state represents local camera mute state only.
track.local && track.videoType !== 'desktop'
&& _syncTrackMutedState(store, track);
return result;
}
@ -72,7 +79,7 @@ function _appStateChanged({ dispatch }, next, action) {
sendAnalytics(createTrackMutedEvent('video', 'background mode', mute));
dispatch(setVideoMuted(mute, VIDEO_MUTISM_AUTHORITY.BACKGROUND));
dispatch(setVideoMuted(mute, MEDIA_TYPE.VIDEO, VIDEO_MUTISM_AUTHORITY.BACKGROUND));
return next(action);
}
@ -94,7 +101,11 @@ function _setAudioOnly({ dispatch }, next, action) {
sendAnalytics(createTrackMutedEvent('video', 'audio-only mode', audioOnly));
dispatch(setVideoMuted(audioOnly, VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY, ensureVideoTrack));
// Make sure we mute both the desktop and video tracks.
dispatch(setVideoMuted(
audioOnly, MEDIA_TYPE.VIDEO, VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY, ensureVideoTrack));
dispatch(setVideoMuted(
audioOnly, MEDIA_TYPE.PRESENTER, VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY, ensureVideoTrack));
return next(action);
}
@ -231,7 +242,9 @@ function _setRoom({ dispatch, getState }, next, action) {
*/
function _syncTrackMutedState({ getState }, track) {
const state = getState()['features/base/media'];
const muted = Boolean(state[track.mediaType].muted);
const mediaType = track.mediaType === MEDIA_TYPE.PRESENTER
? MEDIA_TYPE.VIDEO : track.mediaType;
const muted = Boolean(state[mediaType].muted);
// XXX If muted state of track when it was added is different from our media
// muted state, we need to mute track and explicitly modify 'muted' property

View File

@ -10,6 +10,47 @@ import {
import logger from './logger';
/**
* Creates a local video track for presenter. The constraints are computed based
* on the height of the desktop that is being shared.
*
* @param {Object} options - The options with which the local presenter track
* is to be created.
* @param {string|null} [options.cameraDeviceId] - Camera device id or
* {@code undefined} to use app's settings.
* @param {number} desktopHeight - The height of the desktop that is being
* shared.
* @returns {Promise<JitsiLocalTrack>}
*/
export async function createLocalPresenterTrack(options, desktopHeight) {
const { cameraDeviceId } = options;
// compute the constraints of the camera track based on the resolution
// of the desktop screen that is being shared.
const cameraHeights = [ 180, 270, 360, 540, 720 ];
const proportion = 4;
const result = cameraHeights.find(
height => (desktopHeight / proportion) < height);
const constraints = {
video: {
aspectRatio: 4 / 3,
height: {
exact: result
}
}
};
const [ videoTrack ] = await JitsiMeetJS.createLocalTracks(
{
cameraDeviceId,
constraints,
devices: [ 'video' ]
});
videoTrack.type = MEDIA_TYPE.PRESENTER;
return videoTrack;
}
/**
* Create local tracks of specific types.
*
@ -53,11 +94,15 @@ export function createLocalTracksF(
const state = store.getState();
const {
constraints,
desktopSharingFrameRate,
firefox_fake_device, // eslint-disable-line camelcase
resolution
} = state['features/base/config'];
const constraints = options.constraints
?? state['features/base/config'].constraints;
// Do not load blur effect if option for ignoring effects is present.
// This is needed when we are creating a video track for presenter mode.
const loadEffectsPromise = state['features/blur'].blurEnabled
? getBlurEffect()
.then(blurEffect => [ blurEffect ])
@ -157,6 +202,18 @@ export function getLocalVideoTrack(tracks) {
return getLocalTrack(tracks, MEDIA_TYPE.VIDEO);
}
/**
* Returns the media type of the local video, presenter or video.
*
* @param {Track[]} tracks - List of all tracks.
* @returns {MEDIA_TYPE}
*/
export function getLocalVideoType(tracks) {
const presenterTrack = getLocalTrack(tracks, MEDIA_TYPE.PRESENTER);
return presenterTrack ? MEDIA_TYPE.PRESENTER : MEDIA_TYPE.VIDEO;
}
/**
* Returns track of specified media type for specified participant id.
*
@ -197,6 +254,29 @@ export function getTracksByMediaType(tracks, mediaType) {
return tracks.filter(t => t.mediaType === mediaType);
}
/**
* Checks if the local video track in the given set of tracks is muted.
*
* @param {Track[]} tracks - List of all tracks.
* @returns {Track[]}
*/
export function isLocalVideoTrackMuted(tracks) {
const presenterTrack = getLocalTrack(tracks, MEDIA_TYPE.PRESENTER);
const videoTrack = getLocalTrack(tracks, MEDIA_TYPE.VIDEO);
// Make sure we check the mute status of only camera tracks, i.e.,
// presenter track when it exists, camera track when the presenter
// track doesn't exist.
if (presenterTrack) {
return isLocalTrackMuted(tracks, MEDIA_TYPE.PRESENTER);
} else if (videoTrack) {
return videoTrack.videoType === 'camera'
? isLocalTrackMuted(tracks, MEDIA_TYPE.VIDEO) : true;
}
return true;
}
/**
* Checks if the first local track in the given tracks set is muted.
*

View File

@ -6,6 +6,7 @@ import {
SET_AUDIO_MUTED,
SET_CAMERA_FACING_MODE,
SET_VIDEO_MUTED,
VIDEO_MUTISM_AUTHORITY,
TOGGLE_CAMERA_FACING_MODE,
toggleCameraFacingMode
} from '../media';
@ -89,7 +90,7 @@ MiddlewareRegistry.register(store => next => action => {
return;
}
_setMuted(store, action, MEDIA_TYPE.VIDEO);
_setMuted(store, action, action.mediaType);
break;
case TOGGLE_CAMERA_FACING_MODE: {
@ -131,7 +132,7 @@ MiddlewareRegistry.register(store => next => action => {
const { jitsiTrack } = action.track;
const muted = jitsiTrack.isMuted();
const participantID = jitsiTrack.getParticipantId();
const isVideoTrack = jitsiTrack.isVideoTrack();
const isVideoTrack = jitsiTrack.type !== MEDIA_TYPE.AUDIO;
if (isVideoTrack) {
if (jitsiTrack.isLocal()) {
@ -255,7 +256,7 @@ function _removeNoDataFromSourceNotification({ getState, dispatch }, track) {
* @private
* @returns {void}
*/
function _setMuted(store, { ensureTrack, muted }, mediaType: MEDIA_TYPE) {
function _setMuted(store, { ensureTrack, authority, muted }, mediaType: MEDIA_TYPE) {
const localTrack
= _getLocalTrack(store, mediaType, /* includePending */ true);
@ -265,8 +266,12 @@ function _setMuted(store, { ensureTrack, muted }, mediaType: MEDIA_TYPE) {
// `jitsiTrack`, then the `muted` state will be applied once the
// `jitsiTrack` is created.
const { jitsiTrack } = localTrack;
const isAudioOnly = authority === VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY;
jitsiTrack && setTrackMuted(jitsiTrack, muted);
// screenshare cannot be muted or unmuted using the video mute button
// anymore, unless it is muted by audioOnly.
jitsiTrack && (jitsiTrack.videoType !== 'desktop' || isAudioOnly)
&& setTrackMuted(jitsiTrack, muted);
} else if (!muted && ensureTrack && typeof APP === 'undefined') {
// FIXME: This only runs on mobile now because web has its own way of
// creating local tracks. Adjust the check once they are unified.

View File

@ -0,0 +1,161 @@
// @flow
import {
CLEAR_INTERVAL,
INTERVAL_TIMEOUT,
SET_INTERVAL,
timerWorkerScript
} from './TimeWorker';
/**
* Represents a modified MediaStream that adds video as pip on a desktop stream.
* <tt>JitsiStreamPresenterEffect</tt> does the processing of the original
* desktop stream.
*/
export default class JitsiStreamPresenterEffect {
_canvas: HTMLCanvasElement;
_ctx: CanvasRenderingContext2D;
_desktopElement: HTMLVideoElement;
_desktopStream: MediaStream;
_frameRate: number;
_onVideoFrameTimer: Function;
_onVideoFrameTimerWorker: Function;
_renderVideo: Function;
_videoFrameTimerWorker: Worker;
_videoElement: HTMLVideoElement;
isEnabled: Function;
startEffect: Function;
stopEffect: Function;
/**
* Represents a modified MediaStream that adds a camera track at the
* bottom right corner of the desktop track using a HTML canvas.
* <tt>JitsiStreamPresenterEffect</tt> does the processing of the original
* video stream.
*
* @param {MediaStream} videoStream - The video stream which is user for
* creating the canvas.
*/
constructor(videoStream: MediaStream) {
const videoDiv = document.createElement('div');
const firstVideoTrack = videoStream.getVideoTracks()[0];
const { height, width, frameRate } = firstVideoTrack.getSettings() ?? firstVideoTrack.getConstraints();
this._canvas = document.createElement('canvas');
this._ctx = this._canvas.getContext('2d');
if (document.body !== null) {
document.body.appendChild(this._canvas);
}
this._desktopElement = document.createElement('video');
this._videoElement = document.createElement('video');
videoDiv.appendChild(this._videoElement);
videoDiv.appendChild(this._desktopElement);
if (document.body !== null) {
document.body.appendChild(videoDiv);
}
// Set the video element properties
this._frameRate = parseInt(frameRate, 10);
this._videoElement.width = parseInt(width, 10);
this._videoElement.height = parseInt(height, 10);
this._videoElement.autoplay = true;
this._videoElement.srcObject = videoStream;
// set the style attribute of the div to make it invisible
videoDiv.style.display = 'none';
// Bind event handler so it is only bound once for every instance.
this._onVideoFrameTimer = this._onVideoFrameTimer.bind(this);
this._videoFrameTimerWorker = new Worker(timerWorkerScript);
this._videoFrameTimerWorker.onmessage = this._onVideoFrameTimer;
}
/**
* EventHandler onmessage for the videoFrameTimerWorker WebWorker.
*
* @private
* @param {EventHandler} response - The onmessage EventHandler parameter.
* @returns {void}
*/
_onVideoFrameTimer(response) {
if (response.data.id === INTERVAL_TIMEOUT) {
this._renderVideo();
}
}
/**
* Loop function to render the video frame input and draw presenter effect.
*
* @private
* @returns {void}
*/
_renderVideo() {
// adjust the canvas width/height on every frame incase the window has been resized.
const [ track ] = this._desktopStream.getVideoTracks();
const { height, width } = track.getSettings() ?? track.getConstraints();
this._canvas.width = parseInt(width, 10);
this._canvas.height = parseInt(height, 10);
this._ctx.drawImage(this._desktopElement, 0, 0, this._canvas.width, this._canvas.height);
this._ctx.drawImage(this._videoElement, this._canvas.width - this._videoElement.width, this._canvas.height
- this._videoElement.height, this._videoElement.width, this._videoElement.height);
// draw a border around the video element.
this._ctx.beginPath();
this._ctx.lineWidth = 2;
this._ctx.strokeStyle = '#A9A9A9'; // dark grey
this._ctx.rect(this._canvas.width - this._videoElement.width, this._canvas.height - this._videoElement.height,
this._videoElement.width, this._videoElement.height);
this._ctx.stroke();
}
/**
* Checks if the local track supports this effect.
*
* @param {JitsiLocalTrack} jitsiLocalTrack - Track to apply effect.
* @returns {boolean} - Returns true if this effect can run on the
* specified track, false otherwise.
*/
isEnabled(jitsiLocalTrack: Object) {
return jitsiLocalTrack.isVideoTrack() && jitsiLocalTrack.videoType === 'desktop';
}
/**
* Starts loop to capture video frame and render presenter effect.
*
* @param {MediaStream} desktopStream - Stream to be used for processing.
* @returns {MediaStream} - The stream with the applied effect.
*/
startEffect(desktopStream: MediaStream) {
const firstVideoTrack = desktopStream.getVideoTracks()[0];
const { height, width } = firstVideoTrack.getSettings() ?? firstVideoTrack.getConstraints();
// set the desktop element properties.
this._desktopStream = desktopStream;
this._desktopElement.width = parseInt(width, 10);
this._desktopElement.height = parseInt(height, 10);
this._desktopElement.autoplay = true;
this._desktopElement.srcObject = desktopStream;
this._canvas.width = parseInt(width, 10);
this._canvas.height = parseInt(height, 10);
this._videoFrameTimerWorker.postMessage({
id: SET_INTERVAL,
timeMs: 1000 / this._frameRate
});
return this._canvas.captureStream(this._frameRate);
}
/**
* Stops the capture and render loop.
*
* @returns {void}
*/
stopEffect() {
this._videoFrameTimerWorker.postMessage({
id: CLEAR_INTERVAL
});
}
}

View File

@ -0,0 +1,62 @@
// @flow
/**
* SET_INTERVAL constant is used to set interval and it is set in
* the id property of the request.data property. timeMs property must
* also be set. request.data example:
*
* {
* id: SET_INTERVAL,
* timeMs: 33
* }
*/
export const SET_INTERVAL = 1;
/**
* CLEAR_INTERVAL constant is used to clear the interval and it is set in
* the id property of the request.data property.
*
* {
* id: CLEAR_INTERVAL
* }
*/
export const CLEAR_INTERVAL = 2;
/**
* INTERVAL_TIMEOUT constant is used as response and it is set in the id
* property.
*
* {
* id: INTERVAL_TIMEOUT
* }
*/
export const INTERVAL_TIMEOUT = 3;
/**
* The following code is needed as string to create a URL from a Blob.
* The URL is then passed to a WebWorker. Reason for this is to enable
* use of setInterval that is not throttled when tab is inactive.
*/
const code = `
var timer;
onmessage = function(request) {
switch (request.data.id) {
case ${SET_INTERVAL}: {
timer = setInterval(() => {
postMessage({ id: ${INTERVAL_TIMEOUT} });
}, request.data.timeMs);
break;
}
case ${CLEAR_INTERVAL}: {
if (timer) {
clearInterval(timer);
}
break;
}
}
};
`;
export const timerWorkerScript
= URL.createObjectURL(new Blob([ code ], { type: 'application/javascript' }));

View File

@ -0,0 +1,19 @@
// @flow
import JitsiStreamPresenterEffect from './JitsiStreamPresenterEffect';
/**
* Creates a new instance of JitsiStreamPresenterEffect.
*
* @param {MediaStream} stream - The video stream which will be used for
* creating the presenter effect.
* @returns {Promise<JitsiStreamPresenterEffect>}
*/
export function createPresenterEffect(stream: MediaStream) {
if (!MediaStreamTrack.prototype.getSettings
&& !MediaStreamTrack.prototype.getConstraints) {
return Promise.reject(new Error('JitsiStreamPresenterEffect not supported!'));
}
return Promise.resolve(new JitsiStreamPresenterEffect(stream));
}

View File

@ -10,14 +10,13 @@ import {
import { setAudioOnly } from '../../base/audio-only';
import { translate } from '../../base/i18n';
import {
MEDIA_TYPE,
VIDEO_MUTISM_AUTHORITY,
setVideoMuted
} from '../../base/media';
import { connect } from '../../base/redux';
import { AbstractVideoMuteButton } from '../../base/toolbox';
import type { AbstractButtonProps } from '../../base/toolbox';
import { isLocalTrackMuted } from '../../base/tracks';
import { getLocalVideoType, isLocalVideoTrackMuted } from '../../base/tracks';
import UIEvents from '../../../../service/UI/UIEvents';
declare var APP: Object;
@ -32,6 +31,11 @@ type Props = AbstractButtonProps & {
*/
_audioOnly: boolean,
/**
* MEDIA_TYPE of the local video.
*/
_videoMediaType: string,
/**
* Whether video is currently muted or not.
*/
@ -136,10 +140,12 @@ class VideoMuteButton extends AbstractVideoMuteButton<Props, *> {
this.props.dispatch(
setAudioOnly(false, /* ensureTrack */ true));
}
const mediaType = this.props._videoMediaType;
this.props.dispatch(
setVideoMuted(
videoMuted,
mediaType,
VIDEO_MUTISM_AUTHORITY.USER,
/* ensureTrack */ true));
@ -167,7 +173,8 @@ function _mapStateToProps(state): Object {
return {
_audioOnly: Boolean(audioOnly),
_videoMuted: isLocalTrackMuted(tracks, MEDIA_TYPE.VIDEO)
_videoMediaType: getLocalVideoType(tracks),
_videoMuted: isLocalVideoTrackMuted(tracks)
};
}