fix(face-landmarks): filter face detections based on detection score (#11658)

* fix(face-landmarks): filter face detections based on detection score

* fix: add blank line and semi column
This commit is contained in:
Gabriel Borlea 2022-06-10 15:19:18 +03:00 committed by GitHub
parent 2f1fe637ca
commit 9383942cb9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 34 additions and 21 deletions

View File

@ -1,7 +1,7 @@
import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm'; import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm';
import { Human, Config, FaceResult } from '@vladmandic/human'; import { Human, Config, FaceResult } from '@vladmandic/human';
import { DETECTION_TYPES, FACE_EXPRESSIONS_NAMING_MAPPING } from './constants'; import { DETECTION_TYPES, FACE_DETECTION_SCORE_THRESHOLD, FACE_EXPRESSIONS_NAMING_MAPPING } from './constants';
type Detection = { type Detection = {
detections: Array<FaceResult>, detections: Array<FaceResult>,
@ -130,7 +130,7 @@ export class HumanHelper implements FaceLandmarksHelper {
if (!detections.length) { if (!detections.length) {
return; return;
} }
const faceBox: FaceBox = { const faceBox: FaceBox = {
// normalize to percentage based // normalize to percentage based
left: Math.round(Math.min(...detections.map(d => d.boxRaw[0])) * 100), left: Math.round(Math.min(...detections.map(d => d.boxRaw[0])) * 100),
@ -154,41 +154,49 @@ export class HumanHelper implements FaceLandmarksHelper {
} }
} }
async getDetections(image: ImageBitmap | ImageData) {
if (!this.human) {
return;
}
this.human.tf.engine().startScope();
const imageTensor = this.human.tf.browser.fromPixels(image);
const { face: detections } = await this.human.detect(imageTensor, this.config);
this.human.tf.engine().endScope();
return detections.filter(detection => detection.score > FACE_DETECTION_SCORE_THRESHOLD);
}
public async detect({ image, threshold } : DetectInput): Promise<DetectOutput | undefined> { public async detect({ image, threshold } : DetectInput): Promise<DetectOutput | undefined> {
let detections; let detections;
let faceExpression; let faceExpression;
let faceBox; let faceBox;
if (!this.human){
return;
}
this.detectionInProgress = true; this.detectionInProgress = true;
this.human.tf.engine().startScope();
const imageTensor = this.human.tf.browser.fromPixels(image);
if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS)) { if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS)) {
const { face } = await this.human.detect(imageTensor, this.config); detections = await this.getDetections(image);
detections = face; if (detections) {
faceExpression = this.getFaceExpression({ detections }); faceExpression = this.getFaceExpression({ detections });
}
} }
if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_BOX)) { if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_BOX)) {
if (!detections) { if (!detections) {
const { face } = await this.human.detect(imageTensor, this.config); detections = await this.getDetections(image);
detections = face;
} }
faceBox = this.getFaceBox({ if(detections) {
detections, faceBox = this.getFaceBox({
threshold detections,
}); threshold
});
}
} }
this.human.tf.engine().endScope();
this.detectionInProgress = false; this.detectionInProgress = false;
return { return {

View File

@ -55,3 +55,8 @@ export const DETECTION_TYPES = {
FACE_BOX: 'face-box', FACE_BOX: 'face-box',
FACE_EXPRESSIONS: 'face-expressions' FACE_EXPRESSIONS: 'face-expressions'
}; };
/**
* Threshold for detection score of face.
*/
export const FACE_DETECTION_SCORE_THRESHOLD = 0.6;