fix(facial-expressions) load worker as a blob

This commit is contained in:
Gabriel Borlea 2021-12-08 09:27:17 +02:00 committed by GitHub
parent b890f34a53
commit 0d5beb0c4e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 37 additions and 24 deletions

View File

@ -56,7 +56,20 @@ export function loadWorker() {
return;
}
worker = new Worker('libs/facial-expressions-worker.min.js', { name: 'Facial Expression Worker' });
let baseUrl = '';
const app: Object = document.querySelector('script[src*="app.bundle.min.js"]');
if (app) {
const idx = app.src.lastIndexOf('/');
baseUrl = `${app.src.substring(0, idx)}/`;
}
let workerUrl = `${baseUrl}facial-expressions-worker.min.js`;
const workerBlob = new Blob([ `importScripts("${workerUrl}");` ], { type: 'application/javascript' });
workerUrl = window.URL.createObjectURL(workerBlob);
worker = new Worker(workerUrl, { name: 'Facial Expression Worker' });
worker.onmessage = function(e: Object) {
const { type, value } = e.data;
@ -89,6 +102,11 @@ export function loadWorker() {
}
}
};
worker.postMessage({
id: 'SET_MODELS_URL',
url: baseUrl
});
dispatch(startFacialRecognition());
};
}

View File

@ -7,6 +7,11 @@ import * as faceapi from 'face-api.js';
*/
let modelsLoaded = false;
/**
* The url where the models for the facial detection of expressions are located.
*/
let modelsURL;
/**
* A flag that indicates whether the tensorflow backend is set or not.
*/
@ -41,22 +46,26 @@ const window = {
};
onmessage = async function(message) {
if (message.data.id === 'SET_MODELS_URL') {
modelsURL = message.data.url;
}
// Receives image data
if (message.data.id === 'SET_TIMEOUT') {
if (message.data.imageData === null || message.data.imageData === undefined) {
return;
if (!message.data.imageData || !modelsURL) {
self.postMessage({
type: 'facial-expression',
value: null
});
}
// the models are loaded
if (!modelsLoaded) {
await faceapi.loadTinyFaceDetectorModel('.');
await faceapi.loadFaceExpressionModel('.');
await faceapi.loadTinyFaceDetectorModel(modelsURL);
await faceapi.loadFaceExpressionModel(modelsURL);
modelsLoaded = true;
}
faceapi.tf.engine().startScope();
const tensor = faceapi.tf.browser.fromPixels(message.data.imageData);
const detections = await faceapi.detectSingleFace(
@ -82,29 +91,17 @@ onmessage = async function(message) {
}
}
faceapi.tf.engine().endScope();
let facialExpression;
if (detections) {
facialExpression = detections.expressions.asSortedArray()[0].expression;
}
if (timeoutDuration === -1) {
timer = setTimeout(() => {
self.postMessage({
type: 'facial-expression',
value: facialExpression
});
} else {
timer = setTimeout(() => {
self.postMessage({
type: 'facial-expression',
value: facialExpression
});
}, timeoutDuration);
}
}, timeoutDuration);
} else if (message.data.id === 'CLEAR_TIMEOUT') {
// Clear the timeout.
if (timer) {
@ -112,5 +109,4 @@ onmessage = async function(message) {
timer = null;
}
}
};

View File

@ -29,7 +29,6 @@ MiddlewareRegistry.register(({ dispatch, getState }) => next => action => {
}
if (action.type === CONFERENCE_JOINED) {
dispatch(loadWorker());
dispatch(startFacialRecognition());
return next(action);
}