fix(facial-expressions): set screen size in worker and add constants for worker message types (#10678)

* fix(facial-expressions): set window screen size from worker with the value from main thread

* fix(facial-expressions): refactor worker and add constants for message types
This commit is contained in:
Gabriel Borlea 2022-02-04 09:14:52 +02:00 committed by GitHub
parent df6eff8804
commit fa9f4588c2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 74 additions and 37 deletions

View File

@ -13,8 +13,10 @@ import {
STOP_FACIAL_RECOGNITION
} from './actionTypes';
import {
CPU_TIME_INTERVAL,
WEBGL_TIME_INTERVAL,
CLEAR_TIMEOUT,
FACIAL_EXPRESSION_MESSAGE,
INIT_WORKER,
INTERVAL_MESSAGE,
WEBHOOK_SEND_TIME_INTERVAL
} from './constants';
import { sendDataToWorker, sendFacialExpressionsWebhook } from './functions';
@ -82,19 +84,12 @@ export function loadWorker() {
// receives a message indicating what type of backend tfjs decided to use.
// it is received after as a response to the first message sent to the worker.
if (type === 'tf-backend' && value) {
let detectionTimeInterval = -1;
if (value === 'webgl') {
detectionTimeInterval = WEBGL_TIME_INTERVAL;
} else if (value === 'cpu') {
detectionTimeInterval = CPU_TIME_INTERVAL;
}
dispatch(setDetectionTimeInterval(detectionTimeInterval));
if (type === INTERVAL_MESSAGE) {
value && dispatch(setDetectionTimeInterval(value));
}
// receives a message with the predicted facial expression.
if (type === 'facial-expression') {
if (type === FACIAL_EXPRESSION_MESSAGE) {
sendDataToWorker(worker, imageCapture);
if (!value) {
return;
@ -118,8 +113,12 @@ export function loadWorker() {
}
};
worker.postMessage({
id: 'SET_MODELS_URL',
url: baseUrl
type: INIT_WORKER,
url: baseUrl,
windowScreenSize: window.screen ? {
width: window.screen.width,
height: window.screen.height
} : undefined
});
dispatch(startFacialRecognition());
};
@ -187,7 +186,7 @@ export function stopFacialRecognition() {
}
imageCapture = null;
worker.postMessage({
id: 'CLEAR_TIMEOUT'
type: CLEAR_TIMEOUT
});
if (lastFacialExpression && lastFacialExpressionTimestamp) {

View File

@ -27,3 +27,32 @@ export const CPU_TIME_INTERVAL = 6000;
* Time is ms used for sending expression.
*/
export const WEBHOOK_SEND_TIME_INTERVAL = 15000;
/**
* Type of message sent from main thread to worker that contains init information:
* such as models directory and window screen size.
*/
export const INIT_WORKER = 'INIT_WORKER';
/**
* Type of message sent from main thread to worker that contain image data and
* will set a timeout for sending back the expression if detected in the worker.
*/
export const SET_TIMEOUT = 'SET_TIMEOUT';
/**
* Type of message sent from main thread to worker that will stop the recognition;
* the worker will clear the timeout and then will send nothing back.
*/
export const CLEAR_TIMEOUT = 'CLEAR_TIMEOUT';
/**
* Type of message sent from the worker to main thread that contains a facial expression or undefined.
*/
export const FACIAL_EXPRESSION_MESSAGE = 'FACIAL_EXPRESSION_MESSAGE_TYPE';
/**
* Type of message sent from the worker to main thread that contains the time interval chosen by the worker.
*/
export const INTERVAL_MESSAGE = 'INTERVAL_MESSAGE_TYPE';

View File

@ -2,6 +2,16 @@
import './faceApiPatch';
import * as faceapi from 'face-api.js';
import {
CLEAR_TIMEOUT,
CPU_TIME_INTERVAL,
FACIAL_EXPRESSION_MESSAGE,
INIT_WORKER,
SET_TIMEOUT,
INTERVAL_MESSAGE,
WEBGL_TIME_INTERVAL
} from './constants';
/**
* A flag that indicates whether the tensorflow models were loaded or not.
*/
@ -28,34 +38,29 @@ let timer;
let timeoutDuration = -1;
/**
* Time used for detection interval when facial expressions worker uses webgl backend.
* A patch for having window object in the worker.
*/
const WEBGL_TIME_INTERVAL = 1000;
/**
* Time used for detection interval when facial expression worker uses cpu backend.
*/
const CPU_TIME_INTERVAL = 6000;
// eslint-disable-next-line no-unused-vars
const window = {
screen: {
width: 1280,
height: 720
}
};
onmessage = async function(message) {
if (message.data.id === 'SET_MODELS_URL') {
switch (message.data.type) {
case INIT_WORKER : {
modelsURL = message.data.url;
if (message.data.windowScreenSize) {
window.screen = message.data.windowScreenSize;
}
break;
}
// Receives image data
if (message.data.id === 'SET_TIMEOUT') {
case SET_TIMEOUT : {
if (!message.data.imageData || !modelsURL) {
self.postMessage({
type: 'facial-expression',
type: FACIAL_EXPRESSION_MESSAGE,
value: null
});
}
@ -77,15 +82,15 @@ onmessage = async function(message) {
if (!backendSet) {
const backend = faceapi.tf.getBackend();
if (backend !== undefined) {
if (backend) {
if (backend === 'webgl') {
timeoutDuration = WEBGL_TIME_INTERVAL;
} else if (backend === 'cpu') {
timeoutDuration = CPU_TIME_INTERVAL;
}
self.postMessage({
type: 'tf-backend',
value: backend
type: INTERVAL_MESSAGE,
value: timeoutDuration
});
backendSet = true;
}
@ -98,15 +103,19 @@ onmessage = async function(message) {
}
timer = setTimeout(() => {
self.postMessage({
type: 'facial-expression',
type: FACIAL_EXPRESSION_MESSAGE,
value: facialExpression
});
}, timeoutDuration);
} else if (message.data.id === 'CLEAR_TIMEOUT') {
// Clear the timeout.
break;
}
case CLEAR_TIMEOUT: {
if (timer) {
clearTimeout(timer);
timer = null;
}
break;
}
}
};

View File

@ -2,6 +2,7 @@
import { getLocalParticipant } from '../base/participants';
import { extractFqnFromPath } from '../dynamic-branding';
import { SET_TIMEOUT } from './constants';
import logger from './logger';
/**
@ -120,7 +121,6 @@ export async function sendDataToWorker(
if (imageCapture === null || imageCapture === undefined) {
return;
}
let imageBitmap;
try {
@ -141,7 +141,7 @@ export async function sendDataToWorker(
const imageData = context.getImageData(0, 0, imageBitmap.width, imageBitmap.height);
worker.postMessage({
id: 'SET_TIMEOUT',
type: SET_TIMEOUT,
imageData
});
}