fix(blur-effect) enable blur effect on all platforms supporting canvas filters

That means all browsers except Safari, for now.

In addition, use the 96p model (instead of the 144p one) on browsers without SIMD support.
This commit is contained in:
Tudor D. Pop 2021-02-25 14:21:03 +02:00 committed by GitHub
parent 159f59b665
commit dd1f8339b1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 60 additions and 39 deletions

View File

@ -1,12 +1,10 @@
// @flow
import React from 'react';
import { createVideoBlurEvent, sendAnalytics } from '../../analytics';
import { translate } from '../../base/i18n';
import { IconBlurBackground } from '../../base/icons';
import { connect } from '../../base/redux';
import { AbstractButton, BetaTag } from '../../base/toolbox/components';
import { AbstractButton } from '../../base/toolbox/components';
import type { AbstractButtonProps } from '../../base/toolbox/components';
import { toggleBlurEffect } from '../actions';
@ -37,18 +35,6 @@ class VideoBlurButton extends AbstractButton<Props, *> {
tooltip = 'toolbar.startvideoblur';
toggledLabel = 'toolbar.stopvideoblur';
/**
* Helper function to be implemented by subclasses, which returns
* a React Element to display (a beta tag) at the end of the button.
*
* @override
* @protected
* @returns {ReactElement}
*/
_getElementAfter() {
return <BetaTag />;
}
/**
* Handles clicking / pressing the button, and toggles the blur effect
* state accordingly.

View File

@ -2,6 +2,8 @@
import { getJitsiMeetGlobalNS, loadScript } from '../base/util';
let filterSupport;
/**
* Returns promise that resolves with the blur effect instance.
*
@ -16,3 +18,21 @@ export function getBlurEffect() {
return loadScript('libs/video-blur-effect.min.js').then(() => ns.effects.createBlurEffect());
}
/**
* Checks context filter support.
*
* @returns {boolean} True if the filter is supported and false if the filter is not supported by the browser.
*/
export function checkBlurSupport() {
if (typeof filterSupport === 'undefined') {
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d');
filterSupport = typeof ctx.filter !== 'undefined';
canvas.remove();
}
return filterSupport;
}

View File

@ -6,9 +6,6 @@ import {
timerWorkerScript
} from './TimerWorker';
const segmentationWidth = 256;
const segmentationHeight = 144;
const segmentationPixelCount = segmentationWidth * segmentationHeight;
const blurValue = '25px';
/**
@ -18,6 +15,8 @@ const blurValue = '25px';
*/
export default class JitsiStreamBlurEffect {
_model: Object;
_options: Object;
_segmentationPixelCount: number;
_inputVideoElement: HTMLVideoElement;
_onMaskFrameTimer: Function;
_maskFrameTimerWorker: Worker;
@ -35,10 +34,13 @@ export default class JitsiStreamBlurEffect {
* Represents a modified video MediaStream track.
*
* @class
* @param {BodyPix} bpModel - BodyPix model.
* @param {Object} model - Meet model.
* @param {Object} options - Segmentation dimensions.
*/
constructor(bpModel: Object) {
this._model = bpModel;
constructor(model: Object, options: Object) {
this._model = model;
this._options = options;
this._segmentationPixelCount = this._options.width * this._options.height;
// Bind event handler so it is only bound once for every instance.
this._onMaskFrameTimer = this._onMaskFrameTimer.bind(this);
@ -76,8 +78,8 @@ export default class JitsiStreamBlurEffect {
this._segmentationMaskCanvas,
0,
0,
segmentationWidth,
segmentationHeight,
this._options.width,
this._options.height,
0,
0,
this._inputVideoElement.width,
@ -89,7 +91,7 @@ export default class JitsiStreamBlurEffect {
this._outputCanvasCtx.drawImage(this._inputVideoElement, 0, 0);
this._outputCanvasCtx.globalCompositeOperation = 'destination-over';
this._outputCanvasCtx.filter = `blur(${blurValue})`; // FIXME Does not work on Safari.
this._outputCanvasCtx.filter = `blur(${blurValue})`;
this._outputCanvasCtx.drawImage(this._inputVideoElement, 0, 0);
}
@ -102,7 +104,7 @@ export default class JitsiStreamBlurEffect {
this._model._runInference();
const outputMemoryOffset = this._model._getOutputMemoryOffset() / 4;
for (let i = 0; i < segmentationPixelCount; i++) {
for (let i = 0; i < this._segmentationPixelCount; i++) {
const background = this._model.HEAPF32[outputMemoryOffset + (i * 2)];
const person = this._model.HEAPF32[outputMemoryOffset + (i * 2) + 1];
const shift = Math.max(background, person);
@ -146,19 +148,19 @@ export default class JitsiStreamBlurEffect {
this._inputVideoElement.height,
0,
0,
segmentationWidth,
segmentationHeight
this._options.width,
this._options.height
);
const imageData = this._segmentationMaskCtx.getImageData(
0,
0,
segmentationWidth,
segmentationHeight
this._options.width,
this._options.height
);
const inputMemoryOffset = this._model._getInputMemoryOffset() / 4;
for (let i = 0; i < segmentationPixelCount; i++) {
for (let i = 0; i < this._segmentationPixelCount; i++) {
this._model.HEAPF32[inputMemoryOffset + (i * 3)] = imageData.data[i * 4] / 255;
this._model.HEAPF32[inputMemoryOffset + (i * 3) + 1] = imageData.data[(i * 4) + 1] / 255;
this._model.HEAPF32[inputMemoryOffset + (i * 3) + 2] = imageData.data[(i * 4) + 2] / 255;
@ -189,10 +191,10 @@ export default class JitsiStreamBlurEffect {
const { height, frameRate, width }
= firstVideoTrack.getSettings ? firstVideoTrack.getSettings() : firstVideoTrack.getConstraints();
this._segmentationMask = new ImageData(segmentationWidth, segmentationHeight);
this._segmentationMask = new ImageData(this._options.width, this._options.height);
this._segmentationMaskCanvas = document.createElement('canvas');
this._segmentationMaskCanvas.width = segmentationWidth;
this._segmentationMaskCanvas.height = segmentationHeight;
this._segmentationMaskCanvas.width = this._options.width;
this._segmentationMaskCanvas.height = this._options.height;
this._segmentationMaskCtx = this._segmentationMaskCanvas.getContext('2d');
this._outputCanvasElement.width = parseInt(width, 10);
this._outputCanvasElement.height = parseInt(height, 10);

View File

@ -7,8 +7,19 @@ import createTFLiteModule from './vendor/tflite/tflite';
import createTFLiteSIMDModule from './vendor/tflite/tflite-simd';
const models = {
'96': 'libs/segm_lite_v681.tflite',
'144': 'libs/segm_full_v679.tflite'
'model96': 'libs/segm_lite_v681.tflite',
'model144': 'libs/segm_full_v679.tflite'
};
const segmentationDimensions = {
'model96': {
'height': 96,
'width': 160
},
'model144': {
'height': 144,
'width': 256
}
};
/**
@ -31,7 +42,7 @@ export async function createBlurEffect() {
const modelBufferOffset = tflite._getModelBufferMemoryOffset();
const modelResponse = await fetch(
models['144']
wasmCheck.feature.simd ? models.model144 : models.model96
);
if (!modelResponse.ok) {
@ -44,5 +55,7 @@ export async function createBlurEffect() {
tflite._loadModel(model.byteLength);
return new JitsiStreamBlurEffect(tflite);
const options = wasmCheck.feature.simd ? segmentationDimensions.model144 : segmentationDimensions.model96;
return new JitsiStreamBlurEffect(tflite, options);
}

View File

@ -1,7 +1,6 @@
// @flow
import React, { Component } from 'react';
import * as wasmCheck from 'wasm-check';
import {
ACTION_SHORTCUT_TRIGGERED,
@ -37,6 +36,7 @@ import { OverflowMenuItem } from '../../../base/toolbox/components';
import { getLocalVideoTrack, toggleScreensharing } from '../../../base/tracks';
import { isVpaasMeeting } from '../../../billing-counter/functions';
import { VideoBlurButton } from '../../../blur';
import { checkBlurSupport } from '../../../blur/functions';
import { CHAT_SIZE, ChatCounter, toggleChat } from '../../../chat';
import { EmbedMeetingDialog } from '../../../embed-meeting';
import { SharedDocumentButton } from '../../../etherpad';
@ -1071,7 +1071,7 @@ class Toolbox extends Component<Props, State> {
&& <VideoBlurButton
key = 'videobackgroundblur'
showLabel = { true }
visible = { !_screensharing && wasmCheck.feature.simd } />,
visible = { !_screensharing && checkBlurSupport() } />,
this._shouldShowButton('settings')
&& <SettingsButton
key = 'settings'