diff --git a/config.js b/config.js index fd0bf412c..a61a591d6 100644 --- a/config.js +++ b/config.js @@ -795,10 +795,7 @@ var config = { // faceCenteringThreshold: 10, // // Milliseconds for processing a new image capture in order to detect face coordinates if they exist. - // captureInterval: 1000, - - // // Maximum number of faces that can be detected from a video track. - // maxFacesDetected: 4 + // captureInterval: 1000 // }, // Controls the percentage of automatic feedback shown to participants when callstats is enabled. diff --git a/react/features/face-landmarks/FaceLandmarksHelper.ts b/react/features/face-landmarks/FaceLandmarksHelper.ts index 0b90b369d..9ca0cee4b 100644 --- a/react/features/face-landmarks/FaceLandmarksHelper.ts +++ b/react/features/face-landmarks/FaceLandmarksHelper.ts @@ -3,11 +3,6 @@ import { Human, Config, FaceResult } from '@vladmandic/human'; import { DETECTION_TYPES, FACE_DETECTION_SCORE_THRESHOLD, FACE_EXPRESSIONS_NAMING_MAPPING } from './constants'; -type Detection = { - detections: Array, - threshold?: number -}; - type DetectInput = { image: ImageBitmap | ImageData, threshold: number @@ -21,20 +16,22 @@ type FaceBox = { type InitInput = { baseUrl: string, - detectionTypes: string[], - maxFacesDetected?: number + detectionTypes: string[] } type DetectOutput = { faceExpression?: string, - faceBox?: FaceBox + faceBox?: FaceBox, + faceCount: number }; export interface FaceLandmarksHelper { - getFaceBox({ detections, threshold }: Detection): FaceBox | undefined; - getFaceExpression({ detections }: Detection): string | undefined; + getFaceBox(detections: Array, threshold: number): FaceBox | undefined; + getFaceExpression(detections: Array): string | undefined; + getFaceCount(detections : Array): number; + getDetections(image: ImageBitmap | ImageData): Promise>; init(): Promise; - detect({ image, threshold } : DetectInput): Promise; + detect({ image, threshold } : DetectInput): Promise; getDetectionInProgress(): boolean; } @@ -45,7 +42,6 @@ export class HumanHelper implements FaceLandmarksHelper { protected human: Human | undefined; protected faceDetectionTypes: string[]; protected baseUrl: string; - protected maxFacesDetected?: number; private detectionInProgress = false; private lastValidFaceBox: FaceBox | undefined; /** @@ -66,7 +62,7 @@ export class HumanHelper implements FaceLandmarksHelper { enabled: false, rotation: false, modelPath: 'blazeface-front.json', - maxDetected: 4 + maxDetected: 20 }, mesh: { enabled: false }, iris: { enabled: false }, @@ -82,10 +78,9 @@ export class HumanHelper implements FaceLandmarksHelper { segmentation: { enabled: false } }; - constructor({ baseUrl, detectionTypes, maxFacesDetected }: InitInput) { + constructor({ baseUrl, detectionTypes }: InitInput) { this.faceDetectionTypes = detectionTypes; this.baseUrl = baseUrl; - this.maxFacesDetected = maxFacesDetected; this.init(); } @@ -102,10 +97,6 @@ export class HumanHelper implements FaceLandmarksHelper { if (this.faceDetectionTypes.length > 0 && this.config.face) { this.config.face.enabled = true } - - if (this.maxFacesDetected && this.config.face?.detector) { - this.config.face.detector.maxDetected = this.maxFacesDetected; - } if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_BOX) && this.config.face?.detector) { this.config.face.detector.enabled = true; @@ -126,15 +117,15 @@ export class HumanHelper implements FaceLandmarksHelper { } } - getFaceBox({ detections, threshold }: Detection): FaceBox | undefined { - if (!detections.length) { + getFaceBox(detections: Array, threshold: number): FaceBox | undefined { + if (this.getFaceCount(detections) !== 1) { return; } const faceBox: FaceBox = { // normalize to percentage based - left: Math.round(Math.min(...detections.map(d => d.boxRaw[0])) * 100), - right: Math.round(Math.max(...detections.map(d => d.boxRaw[0] + d.boxRaw[2])) * 100) + left: Math.round(detections[0].boxRaw[0] * 100), + right: Math.round((detections[0].boxRaw[0] + detections[0].boxRaw[2]) * 100) }; faceBox.width = Math.round(faceBox.right - faceBox.left); @@ -148,15 +139,27 @@ export class HumanHelper implements FaceLandmarksHelper { return faceBox; } - getFaceExpression({ detections }: Detection): string | undefined { - if (detections[0]?.emotion) { - return FACE_EXPRESSIONS_NAMING_MAPPING[detections[0]?.emotion[0].emotion]; + getFaceExpression(detections: Array): string | undefined { + if (this.getFaceCount(detections) !== 1) { + return; + } + + if (detections[0].emotion) { + return FACE_EXPRESSIONS_NAMING_MAPPING[detections[0].emotion[0].emotion]; } } - async getDetections(image: ImageBitmap | ImageData) { - if (!this.human) { - return; + getFaceCount(detections: Array | undefined): number { + if (detections) { + return detections.length; + } + + return 0; + } + + async getDetections(image: ImageBitmap | ImageData): Promise> { + if (!this.human || !this.faceDetectionTypes.length) { + return []; } this.human.tf.engine().startScope(); @@ -169,39 +172,42 @@ export class HumanHelper implements FaceLandmarksHelper { return detections.filter(detection => detection.score > FACE_DETECTION_SCORE_THRESHOLD); } - public async detect({ image, threshold } : DetectInput): Promise { + public async detect({ image, threshold } : DetectInput): Promise { let detections; let faceExpression; let faceBox; this.detectionInProgress = true; - if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS)) { - detections = await this.getDetections(image); + detections = await this.getDetections(image); - if (detections) { - faceExpression = this.getFaceExpression({ detections }); - } + if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS)) { + faceExpression = this.getFaceExpression(detections); } if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_BOX)) { - if (!detections) { - detections = await this.getDetections(image); + //if more than one face is detected the face centering will be disabled. + if (this.getFaceCount(detections) > 1 ) { + this.faceDetectionTypes.splice(this.faceDetectionTypes.indexOf(DETECTION_TYPES.FACE_BOX), 1); + + //face-box for re-centering + faceBox = { + left: 0, + right: 100, + width: 100, + }; + } else { + faceBox = this.getFaceBox(detections, threshold); } - if(detections) { - faceBox = this.getFaceBox({ - detections, - threshold - }); - } } - + this.detectionInProgress = false; return { faceExpression, - faceBox + faceBox, + faceCount: this.getFaceCount(detections) } } diff --git a/react/features/face-landmarks/actions.js b/react/features/face-landmarks/actions.js index 94ff60789..1263690ad 100644 --- a/react/features/face-landmarks/actions.js +++ b/react/features/face-landmarks/actions.js @@ -140,8 +140,7 @@ export function loadWorker() { worker.postMessage({ type: INIT_WORKER, baseUrl, - detectionTypes, - maxFacesDetected: faceLandmarks?.maxFacesDetected + detectionTypes }); dispatch(startFaceLandmarksDetection()); diff --git a/react/features/face-landmarks/faceLandmarksWorker.ts b/react/features/face-landmarks/faceLandmarksWorker.ts index 47a42fc5e..aaa7d7158 100644 --- a/react/features/face-landmarks/faceLandmarksWorker.ts +++ b/react/features/face-landmarks/faceLandmarksWorker.ts @@ -13,7 +13,7 @@ onmessage = async function(message: MessageEvent) { const detections = await helper.detect(message.data); - if (detections && (detections.faceBox || detections.faceExpression)) { + if (detections && (detections.faceBox || detections.faceExpression || detections.faceCount)) { self.postMessage(detections); }