Add video background blur

This commit is contained in:
Cristian Florin Ghita 2019-06-28 20:18:47 +03:00 committed by Hristo Terezov
parent 6383d000a9
commit 3b750ddd5a
23 changed files with 649 additions and 45 deletions

View File

@ -45,6 +45,8 @@ deploy-appbundle:
$(OUTPUT_DIR)/analytics-ga.js \
$(BUILD_DIR)/analytics-ga.min.js \
$(BUILD_DIR)/analytics-ga.min.map \
$(BUILD_DIR)/video-blur-effect.min.js \
$(BUILD_DIR)/video-blur-effect.min.map \
$(DEPLOY_DIR)
deploy-lib-jitsi-meet:

View File

@ -105,7 +105,10 @@ import {
trackAdded,
trackRemoved
} from './react/features/base/tracks';
import { getJitsiMeetGlobalNS } from './react/features/base/util';
import {
getJitsiMeetGlobalNS,
loadScript
} from './react/features/base/util';
import { addMessage } from './react/features/chat';
import { showDesktopPicker } from './react/features/desktop-picker';
import { appendSuffix } from './react/features/display-name';
@ -559,48 +562,74 @@ export default {
// Resolve with no tracks
tryCreateLocalTracks = Promise.resolve([]);
} else {
tryCreateLocalTracks = createLocalTracksF(
{ devices: initialDevices }, true)
.catch(err => {
if (requestedAudio && requestedVideo) {
// Try audio only...
audioAndVideoError = err;
const loadEffectsPromise = options.startWithBlurEnabled
? loadScript('libs/video-blur-effect.min.js')
.then(() =>
getJitsiMeetGlobalNS().effects.createBlurEffect()
.then(blurEffectInstance =>
Promise.resolve([ blurEffectInstance ])
)
.catch(error => {
logger.log('Failed to create JitsiStreamBlurEffect!', error);
return (
createLocalTracksF({ devices: [ 'audio' ] }, true));
} else if (requestedAudio && !requestedVideo) {
return Promise.resolve([]);
})
)
.catch(error => {
logger.error('loadScript failed with error: ', error);
return Promise.resolve([]);
})
: Promise.resolve([]);
tryCreateLocalTracks = loadEffectsPromise.then(trackEffects =>
createLocalTracksF(
{
devices: initialDevices,
effects: trackEffects
}, true)
.catch(err => {
if (requestedAudio && requestedVideo) {
// Try audio only...
audioAndVideoError = err;
return (
createLocalTracksF({ devices: [ 'audio' ] }, true));
} else if (requestedAudio && !requestedVideo) {
audioOnlyError = err;
return [];
} else if (requestedVideo && !requestedAudio) {
videoOnlyError = err;
return [];
}
logger.error('Should never happen');
})
.catch(err => {
// Log this just in case...
if (!requestedAudio) {
logger.error('The impossible just happened', err);
}
audioOnlyError = err;
return [];
} else if (requestedVideo && !requestedAudio) {
// Try video only...
return requestedVideo
? createLocalTracksF({ devices: [ 'video' ] }, true)
: [];
})
.catch(err => {
// Log this just in case...
if (!requestedVideo) {
logger.error('The impossible just happened', err);
}
videoOnlyError = err;
return [];
}
logger.error('Should never happen');
})
.catch(err => {
// Log this just in case...
if (!requestedAudio) {
logger.error('The impossible just happened', err);
}
audioOnlyError = err;
// Try video only...
return requestedVideo
? createLocalTracksF({ devices: [ 'video' ] }, true)
: [];
})
.catch(err => {
// Log this just in case...
if (!requestedVideo) {
logger.error('The impossible just happened', err);
}
videoOnlyError = err;
return [];
});
})
);
}
// Hide the permissions prompt/overlay as soon as the tracks are
@ -649,6 +678,7 @@ export default {
*/
init(options) {
this.roomName = options.roomName;
const videoBlurEffectEnabled = APP.store.getState()['features/blur'].blurEnabled;
return (
@ -662,6 +692,7 @@ export default {
'initial device list initialization failed', error))
.then(() => this.createInitialLocalTracksAndConnect(
options.roomName, {
startWithBlurEnabled: videoBlurEffectEnabled,
startAudioOnly: config.startAudioOnly,
startScreenSharing: config.startScreenSharing,
startWithAudioMuted: config.startWithAudioMuted || config.startSilent,

View File

@ -220,3 +220,7 @@
.icon-visibility-off:before {
content: "\e924";
}
.icon-blur-background:before {
content: "\e901";
color: #a4b8d1;
}

Binary file not shown.

View File

@ -28,16 +28,13 @@
<glyph unicode="&#xe8b3;" glyph-name="restore" d="M512 682h64v-180l150-90-32-52-182 110v212zM554 896c212 0 384-172 384-384s-172-384-384-384c-106 0-200 42-270 112l60 62c54-54 128-88 210-88 166 0 300 132 300 298s-134 298-300 298-298-132-298-298h128l-172-172-4 6-166 166h128c0 212 172 384 384 384z" />
<glyph unicode="&#xe8b6;" glyph-name="search" d="M406 426c106 0 192 86 192 192s-86 192-192 192-192-86-192-192 86-192 192-192zM662 426l212-212-64-64-212 212v34l-12 12c-48-42-112-66-180-66-154 0-278 122-278 276s124 278 278 278 276-124 276-278c0-68-24-132-66-180l12-12h34z" />
<glyph unicode="&#xe900;" glyph-name="AUD" d="M512 0c-282.77 0-512 229.23-512 512s229.23 512 512 512c282.77 0 512-229.23 512-512s-229.23-512-512-512zM308.25 387.3h57.225l-87.675 252.525h-62.125l-87.675-252.525h53.025l19.425 60.2h88.725l19.075-60.2zM461.9 639.825h-52.85v-165.375c0-56 41.125-93.625 105.7-93.625 64.75 0 105.875 37.625 105.875 93.625v165.375h-52.85v-159.95c0-31.85-19.075-52.15-53.025-52.15-33.775 0-52.85 20.3-52.85 52.15v159.95zM682.225 640v-252.7h99.4c75.6 0 118.475 46.025 118.475 128.1 0 79.1-43.4 124.6-118.475 124.6h-99.4zM735.075 594.85v-162.4h38.15c46.725 0 72.975 28.7 72.975 82.075 0 51.1-27.125 80.325-72.975 80.325h-38.15zM243.5 587.325l-31.675-99.050h66.15l-31.325 99.050h-3.15z" />
<glyph unicode="&#xe901;" glyph-name="signal_cellular_0" d="M938 938v-852h-852zM854 732l-562-562h562v562z" />
<glyph unicode="&#xe902;" glyph-name="signal_cellular_1" d="M86 86l852 852v-256h-170v-596h-682zM854 86v84h84v-84h-84zM854 256v342h84v-342h-84z" />
<glyph unicode="&#xe901;" glyph-name="blur-background" d="M469.333 640c0-47.128-38.205-85.333-85.333-85.333s-85.333 38.205-85.333 85.333c0 47.128 38.205 85.333 85.333 85.333s85.333-38.205 85.333-85.333zM725.333 640c0-47.128-38.205-85.333-85.333-85.333s-85.333 38.205-85.333 85.333c0 47.128 38.205 85.333 85.333 85.333s85.333-38.205 85.333-85.333zM469.333 384c0-47.128-38.205-85.333-85.333-85.333s-85.333 38.205-85.333 85.333c0 47.128 38.205 85.333 85.333 85.333s85.333-38.205 85.333-85.333zM426.667 170.667c0-23.564-19.103-42.667-42.667-42.667s-42.667 19.103-42.667 42.667c0 23.564 19.103 42.667 42.667 42.667s42.667-19.103 42.667-42.667zM682.667 170.667c0-23.564-19.103-42.667-42.667-42.667s-42.667 19.103-42.667 42.667c0 23.564 19.103 42.667 42.667 42.667s42.667-19.103 42.667-42.667zM213.333 384c0-23.564-19.103-42.667-42.667-42.667s-42.667 19.103-42.667 42.667c0 23.564 19.103 42.667 42.667 42.667s42.667-19.103 42.667-42.667zM213.333 640c0-23.564-19.103-42.667-42.667-42.667s-42.667 19.103-42.667 42.667c0 23.564 19.103 42.667 42.667 42.667s42.667-19.103 42.667-42.667zM896 384c0-23.564-19.103-42.667-42.667-42.667s-42.667 19.103-42.667 42.667c0 23.564 19.103 42.667 42.667 42.667s42.667-19.103 42.667-42.667zM896 640c0-23.564-19.103-42.667-42.667-42.667s-42.667 19.103-42.667 42.667c0 23.564 19.103 42.667 42.667 42.667s42.667-19.103 42.667-42.667zM426.667 853.333c0-23.564-19.103-42.667-42.667-42.667s-42.667 19.103-42.667 42.667c0 23.564 19.103 42.667 42.667 42.667s42.667-19.103 42.667-42.667zM682.667 853.333c0-23.564-19.103-42.667-42.667-42.667s-42.667 19.103-42.667 42.667c0 23.564 19.103 42.667 42.667 42.667s42.667-19.103 42.667-42.667zM725.333 384c0-47.128-38.205-85.333-85.333-85.333s-85.333 38.205-85.333 85.333c0 47.128 38.205 85.333 85.333 85.333s85.333-38.205 85.333-85.333z" />
<glyph unicode="&#xe903;" glyph-name="mic-camera-combined" d="M756.704 628.138l267.296 202.213v-635.075l-267.296 202.213v-191.923c0-12.085-11.296-21.863-25.216-21.863h-706.272c-13.92 0-25.216 9.777-25.216 21.863v612.25c0 12.085 11.296 21.863 25.216 21.863h706.272c13.92 0 25.216-9.777 25.216-21.863v-189.679zM371.338 376.228c47.817 0 86.529 40.232 86.529 89.811v184.835c0 49.651-38.713 89.883-86.529 89.883-47.788 0-86.515-40.232-86.515-89.883v-184.835c0-49.579 38.756-89.811 86.515-89.811v0zM356.754 314.070v-32.78h33.718v33.412c73.858 9.606 131.235 73.73 131.235 151.351v88.232h-30.636v-88.232c0-67.57-53.696-122.534-119.734-122.534-66.024 0-119.691 54.964-119.691 122.534v88.232h-30.636v-88.232c0-79.215 59.674-144.502 135.744-151.969v-0.014z" />
<glyph unicode="&#xe904;" glyph-name="kick" d="M512 810l284-426h-568zM214 298h596v-84h-596v84z" />
<glyph unicode="&#xe905;" glyph-name="hangup" d="M512 640c-68 0-134-10-196-30v-132c0-16-10-34-24-40-42-20-80-46-114-78-8-8-18-12-30-12s-22 4-30 12l-106 106c-8 8-12 18-12 30s4 22 12 30c130 124 306 200 500 200s370-76 500-200c8-8 12-18 12-30s-4-22-12-30l-106-106c-8-8-18-12-30-12s-22 4-30 12c-34 32-72 58-114 78-14 6-24 20-24 38v132c-62 20-128 32-196 32z" />
<glyph unicode="&#xe906;" glyph-name="chat" d="M854 342v512h-684v-598l86 86h598zM854 938c46 0 84-38 84-84v-512c0-46-38-86-84-86h-598l-170-170v768c0 46 38 84 84 84h684z" />
<glyph unicode="&#xe907;" glyph-name="signal_cellular_2" d="M86 86l852 852v-852h-852z" />
<glyph unicode="&#xe908;" glyph-name="share-doc" d="M554 640h236l-236 234v-234zM682 426v86h-340v-86h340zM682 256v86h-340v-86h340zM598 938l256-256v-512c0-46-40-84-86-84h-512c-46 0-86 38-86 84l2 684c0 46 38 84 84 84h342z" />
<glyph unicode="&#xe909;" glyph-name="ninja" d="M330.667 469.333c-0.427 14.933 6.4 29.44 17.92 39.253 32-6.827 61.867-20.053 88.747-39.253 0-29.013-23.893-52.907-53.333-52.907s-52.907 23.467-53.333 52.907zM586.667 469.333c26.88 18.773 56.747 32 88.747 38.827 11.52-9.813 18.347-24.32 17.92-38.827 0-29.867-23.893-53.76-53.333-53.76s-53.333 23.893-53.333 53.76v0zM512 640c-118.187 1.707-234.667-27.733-338.347-85.333l-2.987-42.667c0-52.48 12.373-104.107 35.84-151.040 101.12 15.36 203.093 23.040 305.493 23.040s204.373-7.68 305.493-23.040c23.467 46.933 35.84 98.56 35.84 151.040l-2.987 42.667c-103.68 57.6-220.16 87.040-338.347 85.333zM512 938.667c235.641 0 426.667-191.025 426.667-426.667s-191.025-426.667-426.667-426.667c-235.641 0-426.667 191.025-426.667 426.667s191.025 426.667 426.667 426.667z" />
<glyph unicode="&#xe90a;" glyph-name="enlarge" d="M896 212v600h-768v-600h768zM896 896q34 0 60-26t26-60v-596q0-34-26-60t-60-26h-768q-34 0-60 26t-26 60v596q0 34 26 60t60 26h768zM598 342l-86-108-86 108h172zM256 598v-172l-106 86zM768 598l106-86-106-86v172zM512 790l86-108h-172z" />
<glyph unicode="&#xe90b;" glyph-name="full-screen" d="M598 810h212v-212h-84v128h-128v84zM726 298v128h84v-212h-212v84h128zM214 598v212h212v-84h-128v-128h-84zM298 426v-128h128v-84h-212v212h84z" />
<glyph unicode="&#xe90c;" glyph-name="exit-full-screen" d="M682 682h128v-84h-212v212h84v-128zM598 214v212h212v-84h-128v-128h-84zM342 682v128h84v-212h-212v84h128zM214 342v84h212v-212h-84v128h-128z" />
<glyph unicode="&#xe90d;" glyph-name="security" d="M768 170v428h-512v-428h512zM768 682c46 0 86-38 86-84v-428c0-46-40-84-86-84h-512c-46 0-86 38-86 84v428c0 46 40 84 86 84h388v86c0 72-60 132-132 132s-132-60-132-132h-82c0 118 96 214 214 214s214-96 214-214v-86h42zM512 298c-46 0-86 40-86 86s40 86 86 86 86-40 86-86-40-86-86-86z" />

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@ -50,7 +50,7 @@ var interfaceConfig = {
'fodeviceselection', 'hangup', 'profile', 'info', 'chat', 'recording',
'livestreaming', 'etherpad', 'sharedvideo', 'settings', 'raisehand',
'videoquality', 'filmstrip', 'invite', 'feedback', 'stats', 'shortcuts',
'tileview'
'tileview', 'videobackgroundblur'
],
SETTINGS_SECTIONS: [ 'devices', 'language', 'moderator', 'profile', 'calendar' ],

View File

@ -615,7 +615,8 @@
"speakerStats": "Toggle speaker statistics",
"tileView": "Toggle tile view",
"toggleCamera": "Toggle camera",
"videomute": "Toggle mute video"
"videomute": "Toggle mute video",
"videoblur": "Toggle video blur"
},
"addPeople": "Add people to your call",
"audioonly": "Enable / Disable audio only mode",
@ -668,7 +669,9 @@
"tileViewToggle": "Toggle tile view",
"toggleCamera": "Toggle camera",
"unableToUnmutePopup": "You cannot un-mute while the shared video is on.",
"videomute": "Start / Stop camera"
"videomute": "Start / Stop camera",
"startvideoblur": "Blur my background",
"stopvideoblur": "Disable background blur"
},
"transcribing": {
"ccButtonTooltip": "Start / Stop subtitles",

View File

@ -35,6 +35,8 @@
"@atlaskit/tooltip": "12.1.13",
"@microsoft/microsoft-graph-client": "1.1.0",
"@react-native-community/async-storage": "1.3.4",
"@tensorflow-models/body-pix": "^1.0.1",
"@tensorflow/tfjs": "^1.1.2",
"@webcomponents/url": "0.7.1",
"amplitude-js": "4.5.2",
"bc-css-flags": "3.0.0",

View File

@ -467,6 +467,21 @@ export function createRemoteVideoMenuButtonEvent(buttonName, attributes) {
};
}
/**
* Creates an event indicating that an action related to video blur
* occurred (e.g. It was started or stopped).
*
* @param {string} action - The action which occurred.
* @returns {Object} The event in a format suitable for sending via
* sendAnalytics.
*/
export function createVideoBlurEvent(action) {
return {
action,
actionSubject: 'video.blur'
};
}
/**
* Creates an event indicating that an action related to screen sharing
* occurred (e.g. It was started or stopped).

View File

@ -71,6 +71,7 @@ export function createLocalTracksF(
// Copy array to avoid mutations inside library.
devices: options.devices.slice(0),
effects: options.effects,
firefox_fake_device, // eslint-disable-line camelcase
micDeviceId,
resolution

View File

@ -0,0 +1,21 @@
// @flow
/**
* The type of redux action dispatched which represents that the blur
* is enabled.
*
* {
* type: BLUR_ENABLED
* }
*/
export const BLUR_ENABLED = 'BLUR_ENABLED';
/**
* The type of redux action dispatched which represents that the blur
* is disabled.
*
* {
* type: BLUR_DISABLED
* }
*/
export const BLUR_DISABLED = 'BLUR_DISABLED';

View File

@ -0,0 +1,69 @@
// @flow
import { getJitsiMeetGlobalNS } from '../base/util';
import { getLocalVideoTrack } from '../../features/base/tracks';
import {
BLUR_DISABLED,
BLUR_ENABLED
} from './actionTypes';
const logger = require('jitsi-meet-logger').getLogger(__filename);
/**
* Signals the local participant is switching between blurred or
* non blurred video.
*
* @param {boolean} enabled - If true enables video blur, false otherwise
*
* @returns {Promise}
*/
export function toggleBlurEffect(enabled: boolean) {
return function(dispatch: (Object) => Object, getState: () => any) {
if (getState()['features/blur'].blurEnabled !== enabled) {
const videoTrack = getLocalVideoTrack(getState()['features/base/tracks']).jitsiTrack;
return getJitsiMeetGlobalNS().effects.createBlurEffect()
.then(blurEffectInstance =>
videoTrack.enableEffect(enabled, blurEffectInstance)
.then(() => {
enabled ? dispatch(blurEnabled()) : dispatch(blurDisabled());
})
.catch(error => {
enabled ? dispatch(blurDisabled()) : dispatch(blurEnabled());
logger.log('enableEffect failed with error:', error);
})
)
.catch(error => {
dispatch(blurDisabled());
logger.log('createBlurEffect failed with error:', error);
});
}
};
}
/**
* Signals the local participant that the blur has been enabled
*
* @returns {{
* type: BLUR_ENABLED
* }}
*/
export function blurEnabled() {
return {
type: BLUR_ENABLED
};
}
/**
* Signals the local participant that the blur has been disabled
*
* @returns {{
* type: BLUR_DISABLED
* }}
*/
export function blurDisabled() {
return {
type: BLUR_DISABLED
};
}

View File

@ -0,0 +1,112 @@
// @flow
import { createVideoBlurEvent, sendAnalytics } from '../../analytics';
import { translate } from '../../base/i18n';
import { connect } from '../../base/redux';
import { AbstractButton } from '../../base/toolbox';
import type { AbstractButtonProps } from '../../base/toolbox';
import {
getJitsiMeetGlobalNS,
loadScript
} from '../../base/util';
import { toggleBlurEffect } from '../actions';
const logger = require('jitsi-meet-logger').getLogger(__filename);
/**
* The type of the React {@code Component} props of {@link VideoBlurButton}.
*/
type Props = AbstractButtonProps & {
/**
* True if the video background is blurred or false if it is not.
*/
_isVideoBlurred: boolean,
/**
* The redux {@code dispatch} function.
*/
dispatch: Function
};
/**
* An abstract implementation of a button that toggles the video blur effect.
*/
class VideoBlurButton extends AbstractButton<Props, *> {
accessibilityLabel = 'toolbar.accessibilityLabel.videoblur';
iconName = 'icon-blur-background';
label = 'toolbar.startvideoblur';
tooltip = 'toolbar.startvideoblur';
toggledLabel = 'toolbar.stopvideoblur';
/**
* Handles clicking / pressing the button, and toggles the blur effect
* state accordingly.
*
* @protected
* @returns {void}
*/
_handleClick() {
const {
_isVideoBlurred,
dispatch
} = this.props;
if (!getJitsiMeetGlobalNS().effects
|| !getJitsiMeetGlobalNS().effects.createBlurEffect) {
loadScript('libs/video-blur-effect.min.js')
.then(() => {
this._handleClick();
})
.catch(error => {
logger.error('Failed to load script with error: ', error);
});
} else {
sendAnalytics(createVideoBlurEvent(_isVideoBlurred ? 'started' : 'stopped'));
dispatch(toggleBlurEffect(!_isVideoBlurred));
}
}
/**
* Returns {@code boolean} value indicating if the blur effect is
* enabled or not.
*
* @protected
* @returns {boolean}
*/
_isToggled() {
const {
_isVideoBlurred
} = this.props;
if (!getJitsiMeetGlobalNS().effects
|| !getJitsiMeetGlobalNS().effects.createBlurEffect) {
return false;
}
return _isVideoBlurred;
}
}
/**
* Maps (parts of) the redux state to the associated props for the
* {@code VideoBlurButton} component.
*
* @param {Object} state - The Redux state.
* @private
* @returns {{
* _isVideoBlurred: boolean
* }}
*/
function _mapStateToProps(state): Object {
return {
_isVideoBlurred: Boolean(state['features/blur'].blurEnabled)
};
}
export default translate(connect(_mapStateToProps)(VideoBlurButton));

View File

@ -0,0 +1 @@
export { default as VideoBlurButton } from './VideoBlurButton';

View File

@ -0,0 +1,4 @@
export * from './actions';
export * from './components';
import './reducer';

View File

@ -0,0 +1,30 @@
// @flow
import { ReducerRegistry } from '../base/redux';
import { PersistenceRegistry } from '../base/storage';
import { BLUR_ENABLED, BLUR_DISABLED } from './actionTypes';
PersistenceRegistry.register('features/blur', true, {
blurEnabled: false
});
ReducerRegistry.register('features/blur', (state = {}, action) => {
switch (action.type) {
case BLUR_ENABLED: {
return {
...state,
blurEnabled: true
};
}
case BLUR_DISABLED: {
return {
...state,
blurEnabled: false
};
}
}
return state;
});

View File

@ -0,0 +1,237 @@
import { getLogger } from 'jitsi-meet-logger';
import {
drawBokehEffect,
load
} from '@tensorflow-models/body-pix';
import {
CLEAR_INTERVAL,
INTERVAL_TIMEOUT,
SET_INTERVAL,
timerWorkerScript
} from './TimerWorker';
const logger = getLogger(__filename);
/**
* This promise represents the loading of the BodyPix model that is used
* to extract person segmentation. A multiplier of 0.25 is used to for
* improved performance on a larger range of CPUs.
*/
const bpModelPromise = load(0.25);
/**
* Represents a modified MediaStream that adds blur to video background.
* <tt>JitsiStreamBlurEffect</tt> does the processing of the original
* video stream.
*/
class JitsiStreamBlurEffect {
/**
*
* Represents a modified video MediaStream track.
*
* @class
* @param {BodyPix} bpModel - BodyPix model
*/
constructor(bpModel) {
this._bpModel = bpModel;
this._outputCanvasElement = document.createElement('canvas');
this._maskCanvasElement = document.createElement('canvas');
this._inputVideoElement = document.createElement('video');
this._renderVideo = this._renderVideo.bind(this);
this._renderMask = this._renderMask.bind(this);
this._videoFrameTimerWorker = new Worker(timerWorkerScript);
this._maskFrameTimerWorker = new Worker(timerWorkerScript);
this._onMaskFrameTimer = this._onMaskFrameTimer.bind(this);
this._onVideoFrameTimer = this._onVideoFrameTimer.bind(this);
this._videoFrameTimerWorker.onmessage = this._onVideoFrameTimer;
this._maskFrameTimerWorker.onmessage = this._onMaskFrameTimer;
}
/**
* EventHandler onmessage for the videoFrameTimerWorker WebWorker
*
* @private
* @param {EventHandler} response - onmessage EventHandler parameter
* @returns {void}
*/
_onVideoFrameTimer(response) {
switch (response.data.id) {
case INTERVAL_TIMEOUT: {
this._renderVideo();
break;
}
}
}
/**
* EventHandler onmessage for the maskFrameTimerWorker WebWorker
*
* @private
* @param {EventHandler} response - onmessage EventHandler parameter
* @returns {void}
*/
_onMaskFrameTimer(response) {
switch (response.data.id) {
case INTERVAL_TIMEOUT: {
this._renderMask();
break;
}
}
}
/**
* Starts loop to capture video frame and render the segmentation mask.
*
* @param {MediaStream} stream - Stream to be used for processing
*
* @returns {void}
*/
startEffect(stream) {
this._stream = stream;
const firstVideoTrack = this._stream.getVideoTracks()[0];
const { height, frameRate, width } = firstVideoTrack.getSettings
? firstVideoTrack.getSettings() : firstVideoTrack.getConstraints();
if (!firstVideoTrack.getSettings && !firstVideoTrack.getConstraints) {
throw new Error('JitsiStreamBlurEffect not supported!');
}
this._frameRate = frameRate;
this._height = height;
this._width = width;
this._outputCanvasElement.width = width;
this._outputCanvasElement.height = height;
this._maskCanvasElement.width = this._width;
this._maskCanvasElement.height = this._height;
this._inputVideoElement.width = width;
this._inputVideoElement.height = height;
this._maskCanvasContext = this._maskCanvasElement.getContext('2d');
this._inputVideoElement.autoplay = true;
this._inputVideoElement.srcObject = this._stream;
this._videoFrameTimerWorker.postMessage({
id: SET_INTERVAL,
timeMs: 1000 / this._frameRate
});
this._maskFrameTimerWorker.postMessage({
id: SET_INTERVAL,
timeMs: 200
});
}
/**
* Stops the capture and render loop.
*
* @returns {void}
*/
stopEffect() {
this._videoFrameTimerWorker.postMessage({
id: CLEAR_INTERVAL
});
this._maskFrameTimerWorker.postMessage({
id: CLEAR_INTERVAL
});
}
/**
* Get the modified stream.
*
* @returns {MediaStream}
*/
getStreamWithEffect() {
return this._outputCanvasElement.captureStream(this._frameRate);
}
/**
* Loop function to render the video frame input and draw blur effect.
*
* @private
* @returns {void}
*/
_renderVideo() {
if (this._bpModel) {
this._maskCanvasContext.drawImage(this._inputVideoElement,
0,
0,
this._width,
this._height);
if (this._segmentationData) {
drawBokehEffect(this._outputCanvasElement,
this._inputVideoElement,
this._segmentationData,
7, // Constant for background blur, integer values between 0-20
7); // Constant for edge blur, integer values between 0-20
}
} else {
this._outputCanvasElement
.getContext('2d')
.drawImage(this._inputVideoElement,
0,
0,
this._width,
this._height);
}
}
/**
* Loop function to render the background mask.
*
* @private
* @returns {void}
*/
_renderMask() {
if (this._bpModel) {
this._bpModel.estimatePersonSegmentation(this._maskCanvasElement,
32, // Chose 32 for better performance
0.75) // Represents probability that a pixel belongs to a person
.then(value => {
this._segmentationData = value;
});
}
}
/**
* Checks if the local track supports this effect.
*
* @param {JitsiLocalTrack} jitsiLocalTrack - Track to apply effect
*
* @returns {boolean} Returns true if this effect can run on the specified track
* false otherwise
*/
isEnabled(jitsiLocalTrack) {
return jitsiLocalTrack.isVideoTrack();
}
}
/**
* Creates a new instance of JitsiStreamBlurEffect.
*
* @returns {Promise<JitsiStreamBlurEffect>}
*/
export function createBlurEffect() {
return bpModelPromise
.then(bpmodel =>
Promise.resolve(new JitsiStreamBlurEffect(bpmodel))
)
.catch(error => {
logger.error('Failed to load BodyPix model. Fallback to original stream!', error);
throw error;
});
}

View File

@ -0,0 +1,59 @@
/**
* SET_INTERVAL constant is used to set interval and it is set in
* the id property of the request.data property. timeMs property must
* also be set. request.data example:
*
* {
* id: SET_INTERVAL,
* timeMs: 33
* }
*/
export const SET_INTERVAL = 2;
/**
* CLEAR_INTERVAL constant is used to clear the interval and it is set in
* the id property of the request.data property.
*
* {
* id: CLEAR_INTERVAL
* }
*/
export const CLEAR_INTERVAL = 3;
/**
* INTERVAL_TIMEOUT constant is used as response and it is set in the id property.
*
* {
* id: INTERVAL_TIMEOUT
* }
*/
export const INTERVAL_TIMEOUT = 22;
/**
* The following code is needed as string to create a URL from a Blob.
* The URL is then passed to a WebWorker. Reason for this is to enable
* use of setInterval that is not throttled when tab is inactive.
*/
const code
= ` let timer = null;
onmessage = function(request) {
switch (request.data.id) {
case ${SET_INTERVAL}: {
timer = setInterval(() => {
postMessage({ id: ${INTERVAL_TIMEOUT} });
}, request.data.timeMs);
break;
}
case ${CLEAR_INTERVAL}: {
clearInterval(timer);
break;
}
}
};
`;
const blob = new Blob([ code ], { type: 'application/javascript' });
export const timerWorkerScript = URL.createObjectURL(blob);

View File

@ -18,6 +18,9 @@ import {
import { connect } from '../../../base/redux';
import { OverflowMenuItem } from '../../../base/toolbox';
import { getLocalVideoTrack, toggleScreensharing } from '../../../base/tracks';
import {
VideoBlurButton
} from '../../../blur';
import { ChatCounter, toggleChat } from '../../../chat';
import { toggleDocument } from '../../../etherpad';
import { openFeedbackDialog } from '../../../feedback';
@ -220,7 +223,6 @@ class Toolbox extends Component<Props, State> {
= this._onShortcutToggleRaiseHand.bind(this);
this._onShortcutToggleScreenshare
= this._onShortcutToggleScreenshare.bind(this);
this._onToolbarOpenFeedback
= this._onToolbarOpenFeedback.bind(this);
this._onToolbarOpenInvite = this._onToolbarOpenInvite.bind(this);
@ -970,6 +972,10 @@ class Toolbox extends Component<Props, State> {
text = { _editingDocument
? t('toolbar.documentClose')
: t('toolbar.documentOpen') } />,
<VideoBlurButton
key = 'videobackgroundblur'
showLabel = { true }
visible = { this._shouldShowButton('videobackgroundblur') } />,
<SettingsButton
key = 'settings'
showLabel = { true }

View File

@ -151,6 +151,16 @@ module.exports = [
'./react/features/analytics/handlers/GoogleAnalyticsHandler.js'
}
}),
Object.assign({}, config, {
entry: {
'video-blur-effect':
'./react/features/stream-effects/JitsiStreamBlurEffect.js'
},
output: Object.assign({}, config.output, {
library: [ 'JitsiMeetJS', 'app', 'effects' ],
libraryTarget: 'window'
})
}),
// The Webpack configuration to bundle external_api.js (aka
// JitsiMeetExternalAPI).