fix(blur): many small issues.

This commit is contained in:
Hristo Terezov 2019-07-03 16:38:25 +01:00
parent 3b750ddd5a
commit 3b0c5d0b6a
12 changed files with 388 additions and 327 deletions

View File

@ -105,10 +105,8 @@ import {
trackAdded,
trackRemoved
} from './react/features/base/tracks';
import {
getJitsiMeetGlobalNS,
loadScript
} from './react/features/base/util';
import { getJitsiMeetGlobalNS } from './react/features/base/util';
import { getBlurEffect } from './react/features/blur';
import { addMessage } from './react/features/chat';
import { showDesktopPicker } from './react/features/desktop-picker';
import { appendSuffix } from './react/features/display-name';
@ -562,25 +560,14 @@ export default {
// Resolve with no tracks
tryCreateLocalTracks = Promise.resolve([]);
} else {
const loadEffectsPromise = options.startWithBlurEnabled
? loadScript('libs/video-blur-effect.min.js')
.then(() =>
getJitsiMeetGlobalNS().effects.createBlurEffect()
.then(blurEffectInstance =>
Promise.resolve([ blurEffectInstance ])
)
.catch(error => {
logger.log('Failed to create JitsiStreamBlurEffect!', error);
? getBlurEffect()
.then(blurEffect => [ blurEffect ])
.catch(error => {
logger.error('Failed to obtain the blur effect instance with error: ', error);
return Promise.resolve([]);
})
)
.catch(error => {
logger.error('loadScript failed with error: ', error);
return Promise.resolve([]);
})
return Promise.resolve([]);
})
: Promise.resolve([]);
tryCreateLocalTracks = loadEffectsPromise.then(trackEffects =>
@ -678,7 +665,6 @@ export default {
*/
init(options) {
this.roomName = options.roomName;
const videoBlurEffectEnabled = APP.store.getState()['features/blur'].blurEnabled;
return (
@ -692,7 +678,7 @@ export default {
'initial device list initialization failed', error))
.then(() => this.createInitialLocalTracksAndConnect(
options.roomName, {
startWithBlurEnabled: videoBlurEffectEnabled,
startWithBlurEnabled: APP.store.getState()['features/blur'].blurEnabled,
startAudioOnly: config.startAudioOnly,
startScreenSharing: config.startScreenSharing,
startWithAudioMuted: config.startWithAudioMuted || config.startSilent,

136
package-lock.json generated
View File

@ -2563,6 +2563,102 @@
"component-url": "^0.2.1"
}
},
"@tensorflow-models/body-pix": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@tensorflow-models/body-pix/-/body-pix-1.1.1.tgz",
"integrity": "sha512-l9bd+b3QI7OzJjw/OuhEfeGRb5l2lRivgDHGMvQbT2Snn8nV7odHSRW55NzhU7Khl7vga00TWo5QDuVnkevQmQ=="
},
"@tensorflow/tfjs": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs/-/tfjs-1.2.2.tgz",
"integrity": "sha512-HfhSzL2eTWhlT0r/A5wmo+u3bHe+an16p5wsnFH3ujn21fQ8QtGpSfDHQZjWx1kVFaQnV6KBG+17MOrRHoHlLA==",
"requires": {
"@tensorflow/tfjs-converter": "1.2.2",
"@tensorflow/tfjs-core": "1.2.2",
"@tensorflow/tfjs-data": "1.2.2",
"@tensorflow/tfjs-layers": "1.2.2"
}
},
"@tensorflow/tfjs-converter": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-converter/-/tfjs-converter-1.2.2.tgz",
"integrity": "sha512-NM2NcPRHpCNeJdBxHcYpmW9ZHTQ2lJFJgmgGpQ8CxSC9CtQB05bFONs3SKcwMNDE/69QBRVom5DYqLCVUg+A+g=="
},
"@tensorflow/tfjs-core": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-core/-/tfjs-core-1.2.2.tgz",
"integrity": "sha512-2hCHMKjh3UNpLEjbAEaurrTGJyj/KpLtMSAraWgHA1vGY0kmk50BBSbgCDmXWUVm7lyh/SkCq4/GrGDZktEs3g==",
"requires": {
"@types/offscreencanvas": "~2019.3.0",
"@types/seedrandom": "2.4.27",
"@types/webgl-ext": "0.0.30",
"@types/webgl2": "0.0.4",
"node-fetch": "~2.1.2",
"rollup-plugin-visualizer": "~1.1.1",
"seedrandom": "2.4.3"
},
"dependencies": {
"node-fetch": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.1.2.tgz",
"integrity": "sha1-q4hOjn5X44qUR1POxwb3iNF2i7U="
}
}
},
"@tensorflow/tfjs-data": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-data/-/tfjs-data-1.2.2.tgz",
"integrity": "sha512-oHGBoGdnCl2RyouLKplQqo+iil0iJgPbi/aoHizhpO77UBuJXlKMblH8w5GbxVAw3hKxWlqzYpxPo6rVRgehNA==",
"requires": {
"@types/node-fetch": "^2.1.2",
"node-fetch": "~2.1.2"
},
"dependencies": {
"node-fetch": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.1.2.tgz",
"integrity": "sha1-q4hOjn5X44qUR1POxwb3iNF2i7U="
}
}
},
"@tensorflow/tfjs-layers": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-layers/-/tfjs-layers-1.2.2.tgz",
"integrity": "sha512-yzWZaZrCVpEyTkSrzMe4OOP4aGUfaaROE/zR9fPsPGGF8wLlbLNZUJjeYUmjy3G3pXGaM0mQUbLR5Vd707CVtQ=="
},
"@types/node": {
"version": "12.0.10",
"resolved": "https://registry.npmjs.org/@types/node/-/node-12.0.10.tgz",
"integrity": "sha512-LcsGbPomWsad6wmMNv7nBLw7YYYyfdYcz6xryKYQhx89c3XXan+8Q6AJ43G5XDIaklaVkK3mE4fCb0SBvMiPSQ=="
},
"@types/node-fetch": {
"version": "2.3.7",
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.3.7.tgz",
"integrity": "sha512-+bKtuxhj/TYSSP1r4CZhfmyA0vm/aDRQNo7vbAgf6/cZajn0SAniGGST07yvI4Q+q169WTa2/x9gEHfJrkcALw==",
"requires": {
"@types/node": "*"
}
},
"@types/offscreencanvas": {
"version": "2019.3.0",
"resolved": "https://registry.npmjs.org/@types/offscreencanvas/-/offscreencanvas-2019.3.0.tgz",
"integrity": "sha512-esIJx9bQg+QYF0ra8GnvfianIY8qWB0GBx54PK5Eps6m+xTj86KLavHv6qDhzKcu5UUOgNfJ2pWaIIV7TRUd9Q=="
},
"@types/seedrandom": {
"version": "2.4.27",
"resolved": "https://registry.npmjs.org/@types/seedrandom/-/seedrandom-2.4.27.tgz",
"integrity": "sha1-nbVjk33YaRX2kJK8QyWdL0hXjkE="
},
"@types/webgl-ext": {
"version": "0.0.30",
"resolved": "https://registry.npmjs.org/@types/webgl-ext/-/webgl-ext-0.0.30.tgz",
"integrity": "sha512-LKVgNmBxN0BbljJrVUwkxwRYqzsAEPcZOe6S2T6ZaBDIrFp0qu4FNlpc5sM1tGbXUYFgdVQIoeLk1Y1UoblyEg=="
},
"@types/webgl2": {
"version": "0.0.4",
"resolved": "https://registry.npmjs.org/@types/webgl2/-/webgl2-0.0.4.tgz",
"integrity": "sha512-PACt1xdErJbMUOUweSrbVM7gSIYm1vTncW2hF6Os/EeWi6TXYAYMPp+8v6rzHmypE5gHrxaxZNXgMkJVIdZpHw=="
},
"@webassemblyjs/ast": {
"version": "1.7.11",
"resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.7.11.tgz",
@ -13187,6 +13283,35 @@
"inherits": "^2.0.1"
}
},
"rollup-plugin-visualizer": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/rollup-plugin-visualizer/-/rollup-plugin-visualizer-1.1.1.tgz",
"integrity": "sha512-7xkSKp+dyJmSC7jg2LXqViaHuOnF1VvIFCnsZEKjrgT5ZVyiLLSbeszxFcQSfNJILphqgAEmWAUz0Z4xYScrRw==",
"optional": true,
"requires": {
"mkdirp": "^0.5.1",
"opn": "^5.4.0",
"source-map": "^0.7.3",
"typeface-oswald": "0.0.54"
},
"dependencies": {
"opn": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/opn/-/opn-5.5.0.tgz",
"integrity": "sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA==",
"optional": true,
"requires": {
"is-wsl": "^1.1.0"
}
},
"source-map": {
"version": "0.7.3",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz",
"integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==",
"optional": true
}
}
},
"rsvp": {
"version": "3.6.2",
"resolved": "https://registry.npmjs.org/rsvp/-/rsvp-3.6.2.tgz",
@ -13775,6 +13900,11 @@
"resolved": "https://registry.npmjs.org/sdp-transform/-/sdp-transform-2.3.0.tgz",
"integrity": "sha1-V6lXWUIEHYV3qGnXx01MOgvYiPY="
},
"seedrandom": {
"version": "2.4.3",
"resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-2.4.3.tgz",
"integrity": "sha1-JDhQTa0zkXMUv/GKxNeU8W1qrsw="
},
"select-hose": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz",
@ -15363,6 +15493,12 @@
"resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz",
"integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c="
},
"typeface-oswald": {
"version": "0.0.54",
"resolved": "https://registry.npmjs.org/typeface-oswald/-/typeface-oswald-0.0.54.tgz",
"integrity": "sha512-U1WMNp4qfy4/3khIfHMVAIKnNu941MXUfs3+H9R8PFgnoz42Hh9pboSFztWr86zut0eXC8byalmVhfkiKON/8Q==",
"optional": true
},
"ua-parser-js": {
"version": "0.7.17",
"resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.17.tgz",

View File

@ -1,49 +1,48 @@
// @flow
import { getJitsiMeetGlobalNS } from '../base/util';
import { getLocalVideoTrack } from '../../features/base/tracks';
import {
BLUR_DISABLED,
BLUR_ENABLED
} from './actionTypes';
import { BLUR_DISABLED, BLUR_ENABLED } from './actionTypes';
import { getBlurEffect } from './functions';
const logger = require('jitsi-meet-logger').getLogger(__filename);
/**
* Signals the local participant is switching between blurred or
* non blurred video.
*
* @param {boolean} enabled - If true enables video blur, false otherwise
* Signals the local participant is switching between blurred or non blurred video.
*
* @param {boolean} enabled - If true enables video blur, false otherwise.
* @returns {Promise}
*/
export function toggleBlurEffect(enabled: boolean) {
return function(dispatch: (Object) => Object, getState: () => any) {
if (getState()['features/blur'].blurEnabled !== enabled) {
const videoTrack = getLocalVideoTrack(getState()['features/base/tracks']).jitsiTrack;
const state = getState();
return getJitsiMeetGlobalNS().effects.createBlurEffect()
if (state['features/blur'].blurEnabled !== enabled) {
const { jitsiTrack } = getLocalVideoTrack(state['features/base/tracks']);
return getBlurEffect()
.then(blurEffectInstance =>
videoTrack.enableEffect(enabled, blurEffectInstance)
jitsiTrack.setEffect(enabled ? blurEffectInstance : undefined)
.then(() => {
enabled ? dispatch(blurEnabled()) : dispatch(blurDisabled());
})
.catch(error => {
enabled ? dispatch(blurDisabled()) : dispatch(blurEnabled());
logger.log('enableEffect failed with error:', error);
logger.error('setEffect failed with error:', error);
})
)
.catch(error => {
dispatch(blurDisabled());
logger.log('createBlurEffect failed with error:', error);
logger.error('getBlurEffect failed with error:', error);
});
}
return Promise.resolve();
};
}
/**
* Signals the local participant that the blur has been enabled
* Signals the local participant that the blur has been enabled.
*
* @returns {{
* type: BLUR_ENABLED
@ -56,7 +55,7 @@ export function blurEnabled() {
}
/**
* Signals the local participant that the blur has been disabled
* Signals the local participant that the blur has been disabled.
*
* @returns {{
* type: BLUR_DISABLED

View File

@ -5,15 +5,9 @@ import { translate } from '../../base/i18n';
import { connect } from '../../base/redux';
import { AbstractButton } from '../../base/toolbox';
import type { AbstractButtonProps } from '../../base/toolbox';
import {
getJitsiMeetGlobalNS,
loadScript
} from '../../base/util';
import { toggleBlurEffect } from '../actions';
const logger = require('jitsi-meet-logger').getLogger(__filename);
/**
* The type of the React {@code Component} props of {@link VideoBlurButton}.
*/
@ -49,27 +43,11 @@ class VideoBlurButton extends AbstractButton<Props, *> {
* @returns {void}
*/
_handleClick() {
const {
_isVideoBlurred,
dispatch
} = this.props;
const { _isVideoBlurred, dispatch } = this.props;
const value = !_isVideoBlurred;
if (!getJitsiMeetGlobalNS().effects
|| !getJitsiMeetGlobalNS().effects.createBlurEffect) {
loadScript('libs/video-blur-effect.min.js')
.then(() => {
this._handleClick();
})
.catch(error => {
logger.error('Failed to load script with error: ', error);
});
} else {
sendAnalytics(createVideoBlurEvent(_isVideoBlurred ? 'started' : 'stopped'));
dispatch(toggleBlurEffect(!_isVideoBlurred));
}
sendAnalytics(createVideoBlurEvent(value ? 'started' : 'stopped'));
dispatch(toggleBlurEffect(value));
}
/**
@ -80,16 +58,7 @@ class VideoBlurButton extends AbstractButton<Props, *> {
* @returns {boolean}
*/
_isToggled() {
const {
_isVideoBlurred
} = this.props;
if (!getJitsiMeetGlobalNS().effects
|| !getJitsiMeetGlobalNS().effects.createBlurEffect) {
return false;
}
return _isVideoBlurred;
return this.props._isVideoBlurred;
}
}

View File

@ -0,0 +1,18 @@
// @flow
import { getJitsiMeetGlobalNS, loadScript } from '../base/util';
/**
* Returns promise that resolves with the blur effect instance.
*
* @returns {Promise<JitsiStreamBlurEffect>} - Resolves with the blur effect instance.
*/
export function getBlurEffect() {
const ns = getJitsiMeetGlobalNS();
if (ns.effects && ns.effects.createBlurEffect) {
return ns.effects.createBlurEffect();
}
return loadScript('libs/video-blur-effect.min.js').then(() => ns.effects.createBlurEffect());
}

View File

@ -1,4 +1,5 @@
export * from './actions';
export * from './components';
export * from './functions';
import './reducer';

View File

@ -1,237 +0,0 @@
import { getLogger } from 'jitsi-meet-logger';
import {
drawBokehEffect,
load
} from '@tensorflow-models/body-pix';
import {
CLEAR_INTERVAL,
INTERVAL_TIMEOUT,
SET_INTERVAL,
timerWorkerScript
} from './TimerWorker';
const logger = getLogger(__filename);
/**
* This promise represents the loading of the BodyPix model that is used
* to extract person segmentation. A multiplier of 0.25 is used to for
* improved performance on a larger range of CPUs.
*/
const bpModelPromise = load(0.25);
/**
* Represents a modified MediaStream that adds blur to video background.
* <tt>JitsiStreamBlurEffect</tt> does the processing of the original
* video stream.
*/
class JitsiStreamBlurEffect {
/**
*
* Represents a modified video MediaStream track.
*
* @class
* @param {BodyPix} bpModel - BodyPix model
*/
constructor(bpModel) {
this._bpModel = bpModel;
this._outputCanvasElement = document.createElement('canvas');
this._maskCanvasElement = document.createElement('canvas');
this._inputVideoElement = document.createElement('video');
this._renderVideo = this._renderVideo.bind(this);
this._renderMask = this._renderMask.bind(this);
this._videoFrameTimerWorker = new Worker(timerWorkerScript);
this._maskFrameTimerWorker = new Worker(timerWorkerScript);
this._onMaskFrameTimer = this._onMaskFrameTimer.bind(this);
this._onVideoFrameTimer = this._onVideoFrameTimer.bind(this);
this._videoFrameTimerWorker.onmessage = this._onVideoFrameTimer;
this._maskFrameTimerWorker.onmessage = this._onMaskFrameTimer;
}
/**
* EventHandler onmessage for the videoFrameTimerWorker WebWorker
*
* @private
* @param {EventHandler} response - onmessage EventHandler parameter
* @returns {void}
*/
_onVideoFrameTimer(response) {
switch (response.data.id) {
case INTERVAL_TIMEOUT: {
this._renderVideo();
break;
}
}
}
/**
* EventHandler onmessage for the maskFrameTimerWorker WebWorker
*
* @private
* @param {EventHandler} response - onmessage EventHandler parameter
* @returns {void}
*/
_onMaskFrameTimer(response) {
switch (response.data.id) {
case INTERVAL_TIMEOUT: {
this._renderMask();
break;
}
}
}
/**
* Starts loop to capture video frame and render the segmentation mask.
*
* @param {MediaStream} stream - Stream to be used for processing
*
* @returns {void}
*/
startEffect(stream) {
this._stream = stream;
const firstVideoTrack = this._stream.getVideoTracks()[0];
const { height, frameRate, width } = firstVideoTrack.getSettings
? firstVideoTrack.getSettings() : firstVideoTrack.getConstraints();
if (!firstVideoTrack.getSettings && !firstVideoTrack.getConstraints) {
throw new Error('JitsiStreamBlurEffect not supported!');
}
this._frameRate = frameRate;
this._height = height;
this._width = width;
this._outputCanvasElement.width = width;
this._outputCanvasElement.height = height;
this._maskCanvasElement.width = this._width;
this._maskCanvasElement.height = this._height;
this._inputVideoElement.width = width;
this._inputVideoElement.height = height;
this._maskCanvasContext = this._maskCanvasElement.getContext('2d');
this._inputVideoElement.autoplay = true;
this._inputVideoElement.srcObject = this._stream;
this._videoFrameTimerWorker.postMessage({
id: SET_INTERVAL,
timeMs: 1000 / this._frameRate
});
this._maskFrameTimerWorker.postMessage({
id: SET_INTERVAL,
timeMs: 200
});
}
/**
* Stops the capture and render loop.
*
* @returns {void}
*/
stopEffect() {
this._videoFrameTimerWorker.postMessage({
id: CLEAR_INTERVAL
});
this._maskFrameTimerWorker.postMessage({
id: CLEAR_INTERVAL
});
}
/**
* Get the modified stream.
*
* @returns {MediaStream}
*/
getStreamWithEffect() {
return this._outputCanvasElement.captureStream(this._frameRate);
}
/**
* Loop function to render the video frame input and draw blur effect.
*
* @private
* @returns {void}
*/
_renderVideo() {
if (this._bpModel) {
this._maskCanvasContext.drawImage(this._inputVideoElement,
0,
0,
this._width,
this._height);
if (this._segmentationData) {
drawBokehEffect(this._outputCanvasElement,
this._inputVideoElement,
this._segmentationData,
7, // Constant for background blur, integer values between 0-20
7); // Constant for edge blur, integer values between 0-20
}
} else {
this._outputCanvasElement
.getContext('2d')
.drawImage(this._inputVideoElement,
0,
0,
this._width,
this._height);
}
}
/**
* Loop function to render the background mask.
*
* @private
* @returns {void}
*/
_renderMask() {
if (this._bpModel) {
this._bpModel.estimatePersonSegmentation(this._maskCanvasElement,
32, // Chose 32 for better performance
0.75) // Represents probability that a pixel belongs to a person
.then(value => {
this._segmentationData = value;
});
}
}
/**
* Checks if the local track supports this effect.
*
* @param {JitsiLocalTrack} jitsiLocalTrack - Track to apply effect
*
* @returns {boolean} Returns true if this effect can run on the specified track
* false otherwise
*/
isEnabled(jitsiLocalTrack) {
return jitsiLocalTrack.isVideoTrack();
}
}
/**
* Creates a new instance of JitsiStreamBlurEffect.
*
* @returns {Promise<JitsiStreamBlurEffect>}
*/
export function createBlurEffect() {
return bpModelPromise
.then(bpmodel =>
Promise.resolve(new JitsiStreamBlurEffect(bpmodel))
)
.catch(error => {
logger.error('Failed to load BodyPix model. Fallback to original stream!', error);
throw error;
});
}

View File

@ -0,0 +1,167 @@
import { drawBokehEffect } from '@tensorflow-models/body-pix';
import {
CLEAR_INTERVAL,
INTERVAL_TIMEOUT,
SET_INTERVAL,
timerWorkerScript
} from './TimerWorker';
/**
* Represents a modified MediaStream that adds blur to video background.
* <tt>JitsiStreamBlurEffect</tt> does the processing of the original
* video stream.
*/
export default class JitsiStreamBlurEffect {
/**
* Represents a modified video MediaStream track.
*
* @class
* @param {BodyPix} bpModel - BodyPix model.
*/
constructor(bpModel) {
this._bpModel = bpModel;
// Bind event handler so it is only bound once for every instance.
this._onMaskFrameTimer = this._onMaskFrameTimer.bind(this);
this._onVideoFrameTimer = this._onVideoFrameTimer.bind(this);
this._outputCanvasElement = document.createElement('canvas');
this._maskCanvasElement = document.createElement('canvas');
this._inputVideoElement = document.createElement('video');
this._videoFrameTimerWorker = new Worker(timerWorkerScript);
this._maskFrameTimerWorker = new Worker(timerWorkerScript);
this._videoFrameTimerWorker.onmessage = this._onVideoFrameTimer;
this._maskFrameTimerWorker.onmessage = this._onMaskFrameTimer;
}
/**
* EventHandler onmessage for the videoFrameTimerWorker WebWorker.
*
* @private
* @param {EventHandler} response - The onmessage EventHandler parameter.
* @returns {void}
*/
_onVideoFrameTimer(response) {
if (response.data.id === INTERVAL_TIMEOUT) {
this._renderVideo();
}
}
/**
* EventHandler onmessage for the maskFrameTimerWorker WebWorker.
*
* @private
* @param {EventHandler} response - The onmessage EventHandler parameter.
* @returns {void}
*/
_onMaskFrameTimer(response) {
if (response.data.id === INTERVAL_TIMEOUT) {
this._renderMask();
}
}
/**
* Starts loop to capture video frame and render the segmentation mask.
*
* @param {MediaStream} stream - Stream to be used for processing.
* @returns {MediaStream} - The stream with the applied effect.
*/
startEffect(stream) {
const firstVideoTrack = stream.getVideoTracks()[0];
const { height, frameRate, width }
= firstVideoTrack.getSettings ? firstVideoTrack.getSettings() : firstVideoTrack.getConstraints();
this._frameRate = frameRate;
this._height = height;
this._width = width;
this._outputCanvasElement.width = width;
this._outputCanvasElement.height = height;
this._maskCanvasElement.width = width;
this._maskCanvasElement.height = height;
this._maskCanvasContext = this._maskCanvasElement.getContext('2d');
this._inputVideoElement.width = width;
this._inputVideoElement.height = height;
this._inputVideoElement.autoplay = true;
this._inputVideoElement.srcObject = stream;
this._videoFrameTimerWorker.postMessage({
id: SET_INTERVAL,
timeMs: 1000 / this._frameRate
});
this._maskFrameTimerWorker.postMessage({
id: SET_INTERVAL,
timeMs: 200
});
return this._outputCanvasElement.captureStream(this._frameRate);
}
/**
* Stops the capture and render loop.
*
* @returns {void}
*/
stopEffect() {
this._videoFrameTimerWorker.postMessage({
id: CLEAR_INTERVAL
});
this._maskFrameTimerWorker.postMessage({
id: CLEAR_INTERVAL
});
}
/**
* Loop function to render the video frame input and draw blur effect.
*
* @private
* @returns {void}
*/
_renderVideo() {
this._maskCanvasContext.drawImage(this._inputVideoElement, 0, 0, this._width, this._height);
if (this._segmentationData) {
drawBokehEffect(
this._outputCanvasElement,
this._inputVideoElement,
this._segmentationData,
7, // Constant for background blur, integer values between 0-20
7 // Constant for edge blur, integer values between 0-20
);
}
}
/**
* Loop function to render the background mask.
*
* @private
* @returns {void}
*/
_renderMask() {
this._bpModel.estimatePersonSegmentation(
this._maskCanvasElement,
32, // Chose 32 for better performance
0.75 // Represents probability that a pixel belongs to a person
)
.then(value => {
this._segmentationData = value;
});
}
/**
* Checks if the local track supports this effect.
*
* @param {JitsiLocalTrack} jitsiLocalTrack - Track to apply effect.
* @returns {boolean} - Returns true if this effect can run on the specified track
* false otherwise.
*/
isEnabled(jitsiLocalTrack) {
return jitsiLocalTrack.isVideoTrack();
}
}

View File

@ -9,7 +9,7 @@
* timeMs: 33
* }
*/
export const SET_INTERVAL = 2;
export const SET_INTERVAL = 1;
/**
* CLEAR_INTERVAL constant is used to clear the interval and it is set in
@ -19,7 +19,7 @@ export const SET_INTERVAL = 2;
* id: CLEAR_INTERVAL
* }
*/
export const CLEAR_INTERVAL = 3;
export const CLEAR_INTERVAL = 2;
/**
* INTERVAL_TIMEOUT constant is used as response and it is set in the id property.
@ -28,15 +28,15 @@ export const CLEAR_INTERVAL = 3;
* id: INTERVAL_TIMEOUT
* }
*/
export const INTERVAL_TIMEOUT = 22;
export const INTERVAL_TIMEOUT = 3;
/**
* The following code is needed as string to create a URL from a Blob.
* The URL is then passed to a WebWorker. Reason for this is to enable
* use of setInterval that is not throttled when tab is inactive.
*/
const code
= ` let timer = null;
const code = `
var timer;
onmessage = function(request) {
switch (request.data.id) {
@ -47,13 +47,13 @@ const code
break;
}
case ${CLEAR_INTERVAL}: {
clearInterval(timer);
if (timer) {
clearInterval(timer);
}
break;
}
}
};
`;
const blob = new Blob([ code ], { type: 'application/javascript' });
export const timerWorkerScript = URL.createObjectURL(blob);
export const timerWorkerScript = URL.createObjectURL(new Blob([ code ], { type: 'application/javascript' }));

View File

@ -0,0 +1,25 @@
// @flow
import { load } from '@tensorflow-models/body-pix';
import JitsiStreamBlurEffect from './JitsiStreamBlurEffect';
/**
* This promise represents the loading of the BodyPix model that is used
* to extract person segmentation. A multiplier of 0.25 is used to for
* improved performance on a larger range of CPUs.
*/
const bpModelPromise = load(0.25);
/**
* Creates a new instance of JitsiStreamBlurEffect.
*
* @returns {Promise<JitsiStreamBlurEffect>}
*/
export function createBlurEffect() {
if (!MediaStreamTrack.prototype.getSettings && !MediaStreamTrack.prototype.getConstraints) {
return Promise.reject(new Error('JitsiStreamBlurEffect not supported!'));
}
return bpModelPromise.then(bpmodel => new JitsiStreamBlurEffect(bpmodel));
}

View File

@ -18,9 +18,7 @@ import {
import { connect } from '../../../base/redux';
import { OverflowMenuItem } from '../../../base/toolbox';
import { getLocalVideoTrack, toggleScreensharing } from '../../../base/tracks';
import {
VideoBlurButton
} from '../../../blur';
import { VideoBlurButton } from '../../../blur';
import { ChatCounter, toggleChat } from '../../../chat';
import { toggleDocument } from '../../../etherpad';
import { openFeedbackDialog } from '../../../feedback';

View File

@ -153,8 +153,7 @@ module.exports = [
}),
Object.assign({}, config, {
entry: {
'video-blur-effect':
'./react/features/stream-effects/JitsiStreamBlurEffect.js'
'video-blur-effect': './react/features/stream-effects/blur/index.js'
},
output: Object.assign({}, config.output, {
library: [ 'JitsiMeetJS', 'app', 'effects' ],