feat(face-centering) implement centering of faces in a video
Config options: faceCoordinatesSharing.enabled faceCoordinatesSharig.threshold faceCoordinatesSharing.captureInterval
This commit is contained in:
parent
2863b05f37
commit
d718d9d8fb
12
Makefile
12
Makefile
|
@ -4,6 +4,7 @@ DEPLOY_DIR = libs
|
|||
LIBJITSIMEET_DIR = node_modules/lib-jitsi-meet
|
||||
LIBFLAC_DIR = node_modules/libflacjs/dist/min
|
||||
OLM_DIR = node_modules/@matrix-org/olm
|
||||
TF_WASM_DIR = node_modules/@tensorflow/tfjs-backend-wasm/dist/
|
||||
RNNOISE_WASM_DIR = node_modules/rnnoise-wasm/dist
|
||||
TFLITE_WASM = react/features/stream-effects/virtual-background/vendor/tflite
|
||||
MEET_MODELS_DIR = react/features/stream-effects/virtual-background/vendor/models
|
||||
|
@ -29,7 +30,7 @@ clean:
|
|||
rm -fr $(BUILD_DIR)
|
||||
|
||||
.NOTPARALLEL:
|
||||
deploy: deploy-init deploy-appbundle deploy-rnnoise-binary deploy-tflite deploy-meet-models deploy-lib-jitsi-meet deploy-libflac deploy-olm deploy-css deploy-local deploy-facial-expressions
|
||||
deploy: deploy-init deploy-appbundle deploy-rnnoise-binary deploy-tflite deploy-meet-models deploy-lib-jitsi-meet deploy-libflac deploy-olm deploy-tf-wasm deploy-css deploy-local deploy-facial-expressions
|
||||
|
||||
deploy-init:
|
||||
rm -fr $(DEPLOY_DIR)
|
||||
|
@ -52,6 +53,8 @@ deploy-appbundle:
|
|||
$(OUTPUT_DIR)/analytics-ga.js \
|
||||
$(BUILD_DIR)/analytics-ga.min.js \
|
||||
$(BUILD_DIR)/analytics-ga.min.js.map \
|
||||
$(BUILD_DIR)/face-centering-worker.min.js \
|
||||
$(BUILD_DIR)/face-centering-worker.min.js.map \
|
||||
$(BUILD_DIR)/facial-expressions-worker.min.js \
|
||||
$(BUILD_DIR)/facial-expressions-worker.min.js.map \
|
||||
$(DEPLOY_DIR)
|
||||
|
@ -80,6 +83,11 @@ deploy-olm:
|
|||
$(OLM_DIR)/olm.wasm \
|
||||
$(DEPLOY_DIR)
|
||||
|
||||
deploy-tf-wasm:
|
||||
cp \
|
||||
$(TF_WASM_DIR)/*.wasm \
|
||||
$(DEPLOY_DIR)
|
||||
|
||||
deploy-rnnoise-binary:
|
||||
cp \
|
||||
$(RNNOISE_WASM_DIR)/rnnoise.wasm \
|
||||
|
@ -109,7 +117,7 @@ deploy-local:
|
|||
([ ! -x deploy-local.sh ] || ./deploy-local.sh)
|
||||
|
||||
.NOTPARALLEL:
|
||||
dev: deploy-init deploy-css deploy-rnnoise-binary deploy-tflite deploy-meet-models deploy-lib-jitsi-meet deploy-libflac deploy-olm deploy-facial-expressions
|
||||
dev: deploy-init deploy-css deploy-rnnoise-binary deploy-tflite deploy-meet-models deploy-lib-jitsi-meet deploy-libflac deploy-olm deploy-tf-wasm deploy-facial-expressions
|
||||
$(WEBPACK_DEV_SERVER)
|
||||
|
||||
source-package:
|
||||
|
|
12
config.js
12
config.js
|
@ -1,3 +1,4 @@
|
|||
|
||||
/* eslint-disable no-unused-vars, no-var */
|
||||
|
||||
var config = {
|
||||
|
@ -749,6 +750,17 @@ var config = {
|
|||
// Enables displaying facial expressions in speaker stats
|
||||
// enableDisplayFacialExpressions: true,
|
||||
|
||||
// faceCoordinatesSharing: {
|
||||
// // Enables sharing your face cordinates. Used for centering faces within a video.
|
||||
// enabled: false,
|
||||
|
||||
// // Minimum required face movement percentage threshold for sending new face coordinates data.
|
||||
// threshold: 10,
|
||||
|
||||
// // Miliseconds for processing a new image capture in order to detect face coordinates if they exist.
|
||||
// captureInterval: 100
|
||||
// },
|
||||
|
||||
// Controls the percentage of automatic feedback shown to participants when callstats is enabled.
|
||||
// The default value is 100%. If set to 0, no automatic feedback will be requested
|
||||
// feedbackPercentage: 100,
|
||||
|
|
|
@ -48,6 +48,7 @@ canvas,
|
|||
progress,
|
||||
video {
|
||||
display: inline-block;
|
||||
transition: object-position 0.5s ease 0s;
|
||||
vertical-align: baseline;
|
||||
}
|
||||
audio:not([controls]) {
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
* CSS styles that are specific to the filmstrip that shows the thumbnail tiles.
|
||||
*/
|
||||
.tile-view {
|
||||
|
||||
.remote-videos {
|
||||
align-items: center;
|
||||
box-sizing: border-box;
|
||||
|
|
|
@ -48,6 +48,10 @@
|
|||
"@react-navigation/native": "6.0.6",
|
||||
"@react-navigation/stack": "6.0.11",
|
||||
"@svgr/webpack": "4.3.2",
|
||||
"@tensorflow-models/blazeface": "0.0.7",
|
||||
"@tensorflow/tfjs-backend-wasm": "3.13.0",
|
||||
"@tensorflow/tfjs-converter": "3.13.0",
|
||||
"@tensorflow/tfjs-core": "3.13.0",
|
||||
"@vladmandic/face-api": "1.6.4",
|
||||
"@xmldom/xmldom": "0.7.5",
|
||||
"amplitude-js": "8.2.1",
|
||||
|
@ -4834,6 +4838,67 @@
|
|||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/@tensorflow-models/blazeface": {
|
||||
"version": "0.0.7",
|
||||
"resolved": "https://registry.npmjs.org/@tensorflow-models/blazeface/-/blazeface-0.0.7.tgz",
|
||||
"integrity": "sha512-+hInPkvHJoubfiXlmNuF3SCucZvU6W1PMC25IV99NSAftJUpKvLokfF93iX8UkOFQCXkPFbnLKacGfGlbjgvMw==",
|
||||
"peerDependencies": {
|
||||
"@tensorflow/tfjs-converter": "^3.1.0",
|
||||
"@tensorflow/tfjs-core": "^3.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@tensorflow/tfjs-backend-cpu": {
|
||||
"version": "3.13.0",
|
||||
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-backend-cpu/-/tfjs-backend-cpu-3.13.0.tgz",
|
||||
"integrity": "sha512-POmzUoAP8HooYYTZ72O1ZYkpVZB0f+8PeAkbTxIG0oahcJccj6a0Vovp1A6xWKfljUoPlJb3jWVC++S603ZL8w==",
|
||||
"dependencies": {
|
||||
"@types/seedrandom": "2.4.27",
|
||||
"seedrandom": "2.4.3"
|
||||
},
|
||||
"engines": {
|
||||
"yarn": ">= 1.3.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@tensorflow/tfjs-core": "3.13.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@tensorflow/tfjs-backend-wasm": {
|
||||
"version": "3.13.0",
|
||||
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-backend-wasm/-/tfjs-backend-wasm-3.13.0.tgz",
|
||||
"integrity": "sha512-h5kNS4xvljoySzfcFwqbdFB6QZGR06IA9/Xq/PjBeZt18XEoJGqKHbOCYupmUlr5pxo/gnXzPhAC2h4SfZXPXw==",
|
||||
"dependencies": {
|
||||
"@tensorflow/tfjs-backend-cpu": "3.13.0",
|
||||
"@types/emscripten": "~0.0.34"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@tensorflow/tfjs-core": "3.13.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@tensorflow/tfjs-converter": {
|
||||
"version": "3.13.0",
|
||||
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-converter/-/tfjs-converter-3.13.0.tgz",
|
||||
"integrity": "sha512-H2VpDTv9Ve0HBt7ttzz46DmnsPaiT0B+yJjVH3NebGZbgY9C8boBgJIsdyqfiqEWBS3WxF8h4rh58Hv5XXMgaQ==",
|
||||
"peerDependencies": {
|
||||
"@tensorflow/tfjs-core": "3.13.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@tensorflow/tfjs-core": {
|
||||
"version": "3.13.0",
|
||||
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-core/-/tfjs-core-3.13.0.tgz",
|
||||
"integrity": "sha512-18qBEVIB/4u2OUK9nA5P1XT3e3LyarElD1UKNSNDpnMLxhLTUVZaCR71eHJcpl9wP2Q0cciaTJCTpJdPv1tNDQ==",
|
||||
"dependencies": {
|
||||
"@types/long": "^4.0.1",
|
||||
"@types/offscreencanvas": "~2019.3.0",
|
||||
"@types/seedrandom": "2.4.27",
|
||||
"@types/webgl-ext": "0.0.30",
|
||||
"long": "4.0.0",
|
||||
"node-fetch": "~2.6.1",
|
||||
"seedrandom": "2.4.3"
|
||||
},
|
||||
"engines": {
|
||||
"yarn": ">= 1.3.2"
|
||||
}
|
||||
},
|
||||
"node_modules/@trysound/sax": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz",
|
||||
|
@ -4880,6 +4945,11 @@
|
|||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/emscripten": {
|
||||
"version": "0.0.34",
|
||||
"resolved": "https://registry.npmjs.org/@types/emscripten/-/emscripten-0.0.34.tgz",
|
||||
"integrity": "sha512-QSb9ojDincskc+uKMI0KXp8e1NALFINCrMlp8VGKGcTSxeEyRTTKyjWw75NYrCZHUsVEEEpr1tYHpbtaC++/sQ=="
|
||||
},
|
||||
"node_modules/@types/eslint": {
|
||||
"version": "8.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.4.1.tgz",
|
||||
|
@ -4984,6 +5054,11 @@
|
|||
"integrity": "sha1-7ihweulOEdK4J7y+UnC86n8+ce4=",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/long": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.1.tgz",
|
||||
"integrity": "sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w=="
|
||||
},
|
||||
"node_modules/@types/mime": {
|
||||
"version": "1.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz",
|
||||
|
@ -4995,6 +5070,11 @@
|
|||
"resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.19.tgz",
|
||||
"integrity": "sha512-PfeQhvcMR4cPFVuYfBN4ifG7p9c+Dlh3yUZR6k+5yQK7wX3gDgVxBly4/WkBRs9x4dmcy1TVl08SY67wwtEvmA=="
|
||||
},
|
||||
"node_modules/@types/offscreencanvas": {
|
||||
"version": "2019.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/offscreencanvas/-/offscreencanvas-2019.3.0.tgz",
|
||||
"integrity": "sha512-esIJx9bQg+QYF0ra8GnvfianIY8qWB0GBx54PK5Eps6m+xTj86KLavHv6qDhzKcu5UUOgNfJ2pWaIIV7TRUd9Q=="
|
||||
},
|
||||
"node_modules/@types/parse-json": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz",
|
||||
|
@ -5051,6 +5131,11 @@
|
|||
"resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz",
|
||||
"integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew=="
|
||||
},
|
||||
"node_modules/@types/seedrandom": {
|
||||
"version": "2.4.27",
|
||||
"resolved": "https://registry.npmjs.org/@types/seedrandom/-/seedrandom-2.4.27.tgz",
|
||||
"integrity": "sha1-nbVjk33YaRX2kJK8QyWdL0hXjkE="
|
||||
},
|
||||
"node_modules/@types/serve-index": {
|
||||
"version": "1.9.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/serve-index/-/serve-index-1.9.1.tgz",
|
||||
|
@ -5079,6 +5164,11 @@
|
|||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/webgl-ext": {
|
||||
"version": "0.0.30",
|
||||
"resolved": "https://registry.npmjs.org/@types/webgl-ext/-/webgl-ext-0.0.30.tgz",
|
||||
"integrity": "sha512-LKVgNmBxN0BbljJrVUwkxwRYqzsAEPcZOe6S2T6ZaBDIrFp0qu4FNlpc5sM1tGbXUYFgdVQIoeLk1Y1UoblyEg=="
|
||||
},
|
||||
"node_modules/@types/ws": {
|
||||
"version": "8.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.2.3.tgz",
|
||||
|
@ -11694,6 +11784,11 @@
|
|||
"logkitty": "bin/logkitty.js"
|
||||
}
|
||||
},
|
||||
"node_modules/long": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
|
||||
"integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA=="
|
||||
},
|
||||
"node_modules/loose-envify": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
|
||||
|
@ -16270,6 +16365,11 @@
|
|||
"sdp-verify": "checker.js"
|
||||
}
|
||||
},
|
||||
"node_modules/seedrandom": {
|
||||
"version": "2.4.3",
|
||||
"resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-2.4.3.tgz",
|
||||
"integrity": "sha1-JDhQTa0zkXMUv/GKxNeU8W1qrsw="
|
||||
},
|
||||
"node_modules/select-hose": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz",
|
||||
|
@ -22857,6 +22957,48 @@
|
|||
"loader-utils": "^1.2.3"
|
||||
}
|
||||
},
|
||||
"@tensorflow-models/blazeface": {
|
||||
"version": "0.0.7",
|
||||
"resolved": "https://registry.npmjs.org/@tensorflow-models/blazeface/-/blazeface-0.0.7.tgz",
|
||||
"integrity": "sha512-+hInPkvHJoubfiXlmNuF3SCucZvU6W1PMC25IV99NSAftJUpKvLokfF93iX8UkOFQCXkPFbnLKacGfGlbjgvMw=="
|
||||
},
|
||||
"@tensorflow/tfjs-backend-cpu": {
|
||||
"version": "3.13.0",
|
||||
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-backend-cpu/-/tfjs-backend-cpu-3.13.0.tgz",
|
||||
"integrity": "sha512-POmzUoAP8HooYYTZ72O1ZYkpVZB0f+8PeAkbTxIG0oahcJccj6a0Vovp1A6xWKfljUoPlJb3jWVC++S603ZL8w==",
|
||||
"requires": {
|
||||
"@types/seedrandom": "2.4.27",
|
||||
"seedrandom": "2.4.3"
|
||||
}
|
||||
},
|
||||
"@tensorflow/tfjs-backend-wasm": {
|
||||
"version": "3.13.0",
|
||||
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-backend-wasm/-/tfjs-backend-wasm-3.13.0.tgz",
|
||||
"integrity": "sha512-h5kNS4xvljoySzfcFwqbdFB6QZGR06IA9/Xq/PjBeZt18XEoJGqKHbOCYupmUlr5pxo/gnXzPhAC2h4SfZXPXw==",
|
||||
"requires": {
|
||||
"@tensorflow/tfjs-backend-cpu": "3.13.0",
|
||||
"@types/emscripten": "~0.0.34"
|
||||
}
|
||||
},
|
||||
"@tensorflow/tfjs-converter": {
|
||||
"version": "3.13.0",
|
||||
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-converter/-/tfjs-converter-3.13.0.tgz",
|
||||
"integrity": "sha512-H2VpDTv9Ve0HBt7ttzz46DmnsPaiT0B+yJjVH3NebGZbgY9C8boBgJIsdyqfiqEWBS3WxF8h4rh58Hv5XXMgaQ=="
|
||||
},
|
||||
"@tensorflow/tfjs-core": {
|
||||
"version": "3.13.0",
|
||||
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-core/-/tfjs-core-3.13.0.tgz",
|
||||
"integrity": "sha512-18qBEVIB/4u2OUK9nA5P1XT3e3LyarElD1UKNSNDpnMLxhLTUVZaCR71eHJcpl9wP2Q0cciaTJCTpJdPv1tNDQ==",
|
||||
"requires": {
|
||||
"@types/long": "^4.0.1",
|
||||
"@types/offscreencanvas": "~2019.3.0",
|
||||
"@types/seedrandom": "2.4.27",
|
||||
"@types/webgl-ext": "0.0.30",
|
||||
"long": "4.0.0",
|
||||
"node-fetch": "~2.6.1",
|
||||
"seedrandom": "2.4.3"
|
||||
}
|
||||
},
|
||||
"@trysound/sax": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz",
|
||||
|
@ -22900,6 +23042,11 @@
|
|||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"@types/emscripten": {
|
||||
"version": "0.0.34",
|
||||
"resolved": "https://registry.npmjs.org/@types/emscripten/-/emscripten-0.0.34.tgz",
|
||||
"integrity": "sha512-QSb9ojDincskc+uKMI0KXp8e1NALFINCrMlp8VGKGcTSxeEyRTTKyjWw75NYrCZHUsVEEEpr1tYHpbtaC++/sQ=="
|
||||
},
|
||||
"@types/eslint": {
|
||||
"version": "8.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.4.1.tgz",
|
||||
|
@ -23004,6 +23151,11 @@
|
|||
"integrity": "sha1-7ihweulOEdK4J7y+UnC86n8+ce4=",
|
||||
"dev": true
|
||||
},
|
||||
"@types/long": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.1.tgz",
|
||||
"integrity": "sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w=="
|
||||
},
|
||||
"@types/mime": {
|
||||
"version": "1.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz",
|
||||
|
@ -23015,6 +23167,11 @@
|
|||
"resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.19.tgz",
|
||||
"integrity": "sha512-PfeQhvcMR4cPFVuYfBN4ifG7p9c+Dlh3yUZR6k+5yQK7wX3gDgVxBly4/WkBRs9x4dmcy1TVl08SY67wwtEvmA=="
|
||||
},
|
||||
"@types/offscreencanvas": {
|
||||
"version": "2019.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/offscreencanvas/-/offscreencanvas-2019.3.0.tgz",
|
||||
"integrity": "sha512-esIJx9bQg+QYF0ra8GnvfianIY8qWB0GBx54PK5Eps6m+xTj86KLavHv6qDhzKcu5UUOgNfJ2pWaIIV7TRUd9Q=="
|
||||
},
|
||||
"@types/parse-json": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz",
|
||||
|
@ -23071,6 +23228,11 @@
|
|||
"resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz",
|
||||
"integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew=="
|
||||
},
|
||||
"@types/seedrandom": {
|
||||
"version": "2.4.27",
|
||||
"resolved": "https://registry.npmjs.org/@types/seedrandom/-/seedrandom-2.4.27.tgz",
|
||||
"integrity": "sha1-nbVjk33YaRX2kJK8QyWdL0hXjkE="
|
||||
},
|
||||
"@types/serve-index": {
|
||||
"version": "1.9.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/serve-index/-/serve-index-1.9.1.tgz",
|
||||
|
@ -23099,6 +23261,11 @@
|
|||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"@types/webgl-ext": {
|
||||
"version": "0.0.30",
|
||||
"resolved": "https://registry.npmjs.org/@types/webgl-ext/-/webgl-ext-0.0.30.tgz",
|
||||
"integrity": "sha512-LKVgNmBxN0BbljJrVUwkxwRYqzsAEPcZOe6S2T6ZaBDIrFp0qu4FNlpc5sM1tGbXUYFgdVQIoeLk1Y1UoblyEg=="
|
||||
},
|
||||
"@types/ws": {
|
||||
"version": "8.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.2.3.tgz",
|
||||
|
@ -28240,6 +28407,11 @@
|
|||
"yargs": "^15.1.0"
|
||||
}
|
||||
},
|
||||
"long": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
|
||||
"integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA=="
|
||||
},
|
||||
"loose-envify": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
|
||||
|
@ -31698,6 +31870,11 @@
|
|||
"resolved": "https://registry.npmjs.org/sdp-transform/-/sdp-transform-2.3.0.tgz",
|
||||
"integrity": "sha1-V6lXWUIEHYV3qGnXx01MOgvYiPY="
|
||||
},
|
||||
"seedrandom": {
|
||||
"version": "2.4.3",
|
||||
"resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-2.4.3.tgz",
|
||||
"integrity": "sha1-JDhQTa0zkXMUv/GKxNeU8W1qrsw="
|
||||
},
|
||||
"select-hose": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz",
|
||||
|
|
|
@ -53,6 +53,10 @@
|
|||
"@react-navigation/native": "6.0.6",
|
||||
"@react-navigation/stack": "6.0.11",
|
||||
"@svgr/webpack": "4.3.2",
|
||||
"@tensorflow-models/blazeface": "0.0.7",
|
||||
"@tensorflow/tfjs-backend-wasm": "3.13.0",
|
||||
"@tensorflow/tfjs-converter": "3.13.0",
|
||||
"@tensorflow/tfjs-core": "3.13.0",
|
||||
"@vladmandic/face-api": "1.6.4",
|
||||
"@xmldom/xmldom": "0.7.5",
|
||||
"amplitude-js": "8.2.1",
|
||||
|
|
|
@ -7,6 +7,8 @@ module.exports = {
|
|||
'.eslintrc-react-native.js'
|
||||
],
|
||||
'rules': {
|
||||
'flowtype/no-types-missing-file-annotation': 0,
|
||||
|
||||
// XXX remove this eventually.
|
||||
'react/jsx-indent-props': 0
|
||||
},
|
||||
|
|
|
@ -20,6 +20,7 @@ import '../shared-video/middleware';
|
|||
import '../settings/middleware';
|
||||
import '../talk-while-muted/middleware';
|
||||
import '../virtual-background/middleware';
|
||||
import '../face-centering/middleware';
|
||||
import '../facial-recognition/middleware';
|
||||
|
||||
import './middlewares.any';
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
import '../base/devices/reducer';
|
||||
import '../e2ee/reducer';
|
||||
import '../face-centering/reducer';
|
||||
import '../facial-recognition/reducer';
|
||||
import '../feedback/reducer';
|
||||
import '../local-recording/reducer';
|
||||
|
|
|
@ -153,6 +153,7 @@ export default [
|
|||
'enableTcc',
|
||||
'enableAutomaticUrlCopy',
|
||||
'etherpad_base',
|
||||
'faceCoordinatesSharing',
|
||||
'failICE',
|
||||
'feedbackPercentage',
|
||||
'fileRecordingsEnabled',
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
/**
|
||||
* Redux action type dispatched in order to set the time interval in which
|
||||
* the message to the face centering worker will be sent.
|
||||
*
|
||||
* {
|
||||
* type: SET_DETECTION_TIME_INTERVAL,
|
||||
* time: number
|
||||
* }
|
||||
*/
|
||||
export const SET_DETECTION_TIME_INTERVAL = 'SET_DETECTION_TIME_INTERVAL';
|
||||
|
||||
/**
|
||||
* Redux action type dispatched in order to set recognition active in the state.
|
||||
*
|
||||
* {
|
||||
* type: START_FACE_RECOGNITION
|
||||
* }
|
||||
*/
|
||||
export const START_FACE_RECOGNITION = 'START_FACE_RECOGNITION';
|
||||
|
||||
/**
|
||||
* Redux action type dispatched in order to set recognition inactive in the state.
|
||||
*
|
||||
* {
|
||||
* type: STOP_FACE_RECOGNITION
|
||||
* }
|
||||
*/
|
||||
export const STOP_FACE_RECOGNITION = 'STOP_FACE_RECOGNITION';
|
||||
|
||||
/**
|
||||
* Redux action type dispatched in order to update coordinates of a detected face.
|
||||
*
|
||||
* {
|
||||
* type: UPDATE_FACE_COORDINATES,
|
||||
* faceBox: Object({ left, bottom, right, top }),
|
||||
* participantId: string
|
||||
* }
|
||||
*/
|
||||
export const UPDATE_FACE_COORDINATES = 'UPDATE_FACE_COORDINATES';
|
|
@ -0,0 +1,139 @@
|
|||
import 'image-capture';
|
||||
|
||||
import { getCurrentConference } from '../base/conference';
|
||||
import { getLocalParticipant, getParticipantCount } from '../base/participants';
|
||||
import { getLocalVideoTrack } from '../base/tracks';
|
||||
import { getBaseUrl } from '../base/util';
|
||||
import '../facial-recognition/createImageBitmap';
|
||||
|
||||
import {
|
||||
START_FACE_RECOGNITION,
|
||||
STOP_FACE_RECOGNITION,
|
||||
UPDATE_FACE_COORDINATES
|
||||
} from './actionTypes';
|
||||
import {
|
||||
FACE_BOX_MESSAGE,
|
||||
SEND_IMAGE_INTERVAL_MS
|
||||
} from './constants';
|
||||
import { sendDataToWorker, sendFaceBoxToParticipants } from './functions';
|
||||
import logger from './logger';
|
||||
|
||||
/**
|
||||
* Interval object for sending new image data to worker.
|
||||
*/
|
||||
let interval;
|
||||
|
||||
/**
|
||||
* Object containing a image capture of the local track.
|
||||
*/
|
||||
let imageCapture;
|
||||
|
||||
/**
|
||||
* Object where the face centering worker is stored.
|
||||
*/
|
||||
let worker;
|
||||
|
||||
/**
|
||||
* Loads the worker.
|
||||
*
|
||||
* @returns {Function}
|
||||
*/
|
||||
export function loadWorker() {
|
||||
return async function(dispatch: Function, getState: Function) {
|
||||
if (navigator.product === 'ReactNative') {
|
||||
logger.warn('Unsupported environment for face centering');
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const baseUrl = getBaseUrl();
|
||||
let workerUrl = `${baseUrl}libs/face-centering-worker.min.js`;
|
||||
|
||||
const workerBlob = new Blob([ `importScripts("${workerUrl}");` ], { type: 'application/javascript' });
|
||||
|
||||
workerUrl = window.URL.createObjectURL(workerBlob);
|
||||
worker = new Worker(workerUrl, { name: 'Face Centering Worker' });
|
||||
worker.onmessage = function(e: Object) {
|
||||
const { type, value } = e.data;
|
||||
|
||||
// receives a message with the face(s) bounding box.
|
||||
if (type === FACE_BOX_MESSAGE) {
|
||||
const state = getState();
|
||||
const conference = getCurrentConference(state);
|
||||
const localParticipant = getLocalParticipant(state);
|
||||
|
||||
if (getParticipantCount(state) > 1) {
|
||||
sendFaceBoxToParticipants(conference, value);
|
||||
}
|
||||
|
||||
dispatch({
|
||||
type: UPDATE_FACE_COORDINATES,
|
||||
faceBox: value,
|
||||
id: localParticipant.id
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
dispatch(startFaceRecognition());
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts the recognition and detection of face position.
|
||||
*
|
||||
* @param {Track | undefined} track - Track for which to start detecting faces.
|
||||
*
|
||||
* @returns {Function}
|
||||
*/
|
||||
export function startFaceRecognition(track) {
|
||||
return async function(dispatch: Function, getState: Function) {
|
||||
if (!worker) {
|
||||
return;
|
||||
}
|
||||
const state = getState();
|
||||
const { recognitionActive } = state['features/face-centering'];
|
||||
|
||||
if (recognitionActive) {
|
||||
logger.log('Face centering already active.');
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const localVideoTrack = track || getLocalVideoTrack(state['features/base/tracks']);
|
||||
|
||||
if (!localVideoTrack) {
|
||||
logger.warn('Face centering is disabled due to missing local track.');
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
dispatch({ type: START_FACE_RECOGNITION });
|
||||
logger.log('Start face recognition');
|
||||
|
||||
const stream = localVideoTrack.jitsiTrack.getOriginalStream();
|
||||
const firstVideoTrack = stream.getVideoTracks()[0];
|
||||
|
||||
imageCapture = new ImageCapture(firstVideoTrack);
|
||||
const { disableLocalVideoFlip, faceCoordinatesSharing } = state['features/base/config'];
|
||||
|
||||
interval = setInterval(() => {
|
||||
sendDataToWorker(worker, imageCapture, faceCoordinatesSharing?.threshold, !disableLocalVideoFlip);
|
||||
}, faceCoordinatesSharing?.captureInterval || SEND_IMAGE_INTERVAL_MS);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops the recognition and detection of face position.
|
||||
*
|
||||
* @returns {Function}
|
||||
*/
|
||||
export function stopFaceRecognition() {
|
||||
return function(dispatch: Function) {
|
||||
clearInterval(interval);
|
||||
interval = null;
|
||||
imageCapture = null;
|
||||
|
||||
dispatch({ type: STOP_FACE_RECOGNITION });
|
||||
logger.log('Stop face recognition');
|
||||
};
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
/**
|
||||
* Type of message sent from main thread to worker that contain image data and
|
||||
* will trigger a response message from the worker containing the detected face(s) bounding box if any.
|
||||
*/
|
||||
export const DETECT_FACE_BOX = 'DETECT_FACE_BOX';
|
||||
|
||||
/**
|
||||
* Type of event sent on the data channel.
|
||||
*/
|
||||
export const FACE_BOX_EVENT_TYPE = 'face-box';
|
||||
|
||||
/**
|
||||
* Type of message sent from the worker to main thread that contains a face box or undefined.
|
||||
*/
|
||||
export const FACE_BOX_MESSAGE = 'face-box';
|
||||
|
||||
/**
|
||||
* Miliseconds interval value for sending new image data to the worker.
|
||||
*/
|
||||
export const SEND_IMAGE_INTERVAL_MS = 100;
|
|
@ -0,0 +1,107 @@
|
|||
import * as blazeface from '@tensorflow-models/blazeface';
|
||||
import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm';
|
||||
import * as tf from '@tensorflow/tfjs-core';
|
||||
|
||||
import { FACE_BOX_MESSAGE, DETECT_FACE_BOX } from './constants';
|
||||
|
||||
/**
|
||||
* Indicates whether an init error occured.
|
||||
*/
|
||||
let initError = false;
|
||||
|
||||
/**
|
||||
* The blazeface model.
|
||||
*/
|
||||
let model;
|
||||
|
||||
/**
|
||||
* A flag that indicates whether the tensorflow backend is set or not.
|
||||
*/
|
||||
let backendSet = false;
|
||||
|
||||
/**
|
||||
* Flag for indicating whether an init operation (e.g setting tf backend) is in progress.
|
||||
*/
|
||||
let initInProgress = false;
|
||||
|
||||
/**
|
||||
* Callbacks queue for avoiding overlapping executions of face detection.
|
||||
*/
|
||||
const queue = [];
|
||||
|
||||
/**
|
||||
* Contains the last valid face bounding box (passes threshold validation) which was sent to the main process.
|
||||
*/
|
||||
let lastValidFaceBox;
|
||||
|
||||
const detect = async message => {
|
||||
const { baseUrl, imageBitmap, isHorizontallyFlipped, threshold } = message.data;
|
||||
|
||||
if (initInProgress || initError) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!backendSet) {
|
||||
initInProgress = true;
|
||||
setWasmPaths(`${baseUrl}libs/`);
|
||||
|
||||
try {
|
||||
await tf.setBackend('wasm');
|
||||
} catch (err) {
|
||||
initError = true;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
backendSet = true;
|
||||
initInProgress = false;
|
||||
}
|
||||
|
||||
// load face detection model
|
||||
if (!model) {
|
||||
try {
|
||||
model = await blazeface.load();
|
||||
} catch (err) {
|
||||
initError = true;
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
tf.engine().startScope();
|
||||
|
||||
const image = tf.browser.fromPixels(imageBitmap);
|
||||
const detections = await model.estimateFaces(image, false, isHorizontallyFlipped, false);
|
||||
|
||||
tf.engine().endScope();
|
||||
|
||||
let faceBox;
|
||||
|
||||
if (detections.length) {
|
||||
faceBox = {
|
||||
// normalize to percentage based
|
||||
left: Math.round(Math.min(...detections.map(d => d.topLeft[0])) * 100 / imageBitmap.width),
|
||||
right: Math.round(Math.max(...detections.map(d => d.bottomRight[0])) * 100 / imageBitmap.width),
|
||||
top: Math.round(Math.min(...detections.map(d => d.topLeft[1])) * 100 / imageBitmap.height),
|
||||
bottom: Math.round(Math.max(...detections.map(d => d.bottomRight[1])) * 100 / imageBitmap.height)
|
||||
};
|
||||
|
||||
if (lastValidFaceBox && Math.abs(lastValidFaceBox.left - faceBox.left) < threshold) {
|
||||
return;
|
||||
}
|
||||
|
||||
lastValidFaceBox = faceBox;
|
||||
|
||||
self.postMessage({
|
||||
type: FACE_BOX_MESSAGE,
|
||||
value: faceBox
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
onmessage = function(message) {
|
||||
if (message.data.id === DETECT_FACE_BOX) {
|
||||
queue.push(() => detect(message));
|
||||
queue.shift()();
|
||||
}
|
||||
};
|
|
@ -0,0 +1,96 @@
|
|||
import { getBaseUrl } from '../base/util';
|
||||
|
||||
import { FACE_BOX_EVENT_TYPE, DETECT_FACE_BOX } from './constants';
|
||||
import logger from './logger';
|
||||
|
||||
/**
|
||||
* Sends the face box to all the other participants.
|
||||
*
|
||||
* @param {Object} conference - The current conference.
|
||||
* @param {Object} faceBox - Face box to be sent.
|
||||
* @returns {void}
|
||||
*/
|
||||
export function sendFaceBoxToParticipants(
|
||||
conference: Object,
|
||||
faceBox: Object
|
||||
): void {
|
||||
try {
|
||||
conference.sendEndpointMessage('', {
|
||||
type: FACE_BOX_EVENT_TYPE,
|
||||
faceBox
|
||||
});
|
||||
} catch (err) {
|
||||
logger.warn('Could not broadcast the face box to the other participants', err);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends the image data a canvas from the track in the image capture to the face centering worker.
|
||||
*
|
||||
* @param {Worker} worker - Face centering worker.
|
||||
* @param {Object} imageCapture - Image capture that contains the current track.
|
||||
* @param {number} threshold - Movement threshold as percentage for sharing face coordinates.
|
||||
* @param {boolean} isHorizontallyFlipped - Indicates whether the image is horizontally flipped.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
export async function sendDataToWorker(
|
||||
worker: Worker,
|
||||
imageCapture: Object,
|
||||
threshold: number = 10,
|
||||
isHorizontallyFlipped = true
|
||||
): Promise<void> {
|
||||
if (imageCapture === null || imageCapture === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
let imageBitmap;
|
||||
|
||||
try {
|
||||
imageBitmap = await imageCapture.grabFrame();
|
||||
} catch (err) {
|
||||
logger.warn(err);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
worker.postMessage({
|
||||
id: DETECT_FACE_BOX,
|
||||
baseUrl: getBaseUrl(),
|
||||
imageBitmap,
|
||||
threshold,
|
||||
isHorizontallyFlipped
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets face box for a participant id.
|
||||
*
|
||||
* @param {string} id - The participant id.
|
||||
* @param {Object} state - The redux state.
|
||||
* @returns {Object}
|
||||
*/
|
||||
export function getFaceBoxForId(id: string, state: Object) {
|
||||
return state['features/face-centering'].faceBoxes[id];
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the video object position for a participant id.
|
||||
*
|
||||
* @param {Object} state - The redux state.
|
||||
* @param {string} id - The participant id.
|
||||
* @returns {string} - CSS object-position in the shape of '{horizontalPercentage}% {verticalPercentage}%'.
|
||||
*/
|
||||
export function getVideoObjectPosition(state: Object, id: string) {
|
||||
const faceBox = getFaceBoxForId(id, state);
|
||||
|
||||
if (faceBox) {
|
||||
const { left, right, top, bottom } = faceBox;
|
||||
|
||||
const horizontalPos = 100 - Math.round((left + right) / 2, 100);
|
||||
const verticalPos = 100 - Math.round((top + bottom) / 2, 100);
|
||||
|
||||
return `${horizontalPos}% ${verticalPos}%`;
|
||||
}
|
||||
|
||||
return '50% 50%';
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
import { getLogger } from '../base/logging/functions';
|
||||
|
||||
export default getLogger('features/face-centering');
|
|
@ -0,0 +1,103 @@
|
|||
import {
|
||||
CONFERENCE_JOINED,
|
||||
CONFERENCE_WILL_LEAVE,
|
||||
getCurrentConference
|
||||
} from '../base/conference';
|
||||
import { JitsiConferenceEvents } from '../base/lib-jitsi-meet';
|
||||
import { MiddlewareRegistry } from '../base/redux';
|
||||
import { TRACK_UPDATED, TRACK_REMOVED, TRACK_ADDED } from '../base/tracks';
|
||||
|
||||
import { UPDATE_FACE_COORDINATES } from './actionTypes';
|
||||
import {
|
||||
loadWorker,
|
||||
stopFaceRecognition,
|
||||
startFaceRecognition
|
||||
} from './actions';
|
||||
import { FACE_BOX_EVENT_TYPE } from './constants';
|
||||
|
||||
MiddlewareRegistry.register(({ dispatch, getState }) => next => action => {
|
||||
const state = getState();
|
||||
const { faceCoordinatesSharing } = state['features/base/config'];
|
||||
|
||||
if (!getCurrentConference(state)) {
|
||||
return next(action);
|
||||
}
|
||||
|
||||
if (action.type === CONFERENCE_JOINED) {
|
||||
if (faceCoordinatesSharing?.enabled) {
|
||||
dispatch(loadWorker());
|
||||
}
|
||||
|
||||
// allow using remote face centering data when local face centering is not enabled
|
||||
action.conference.on(
|
||||
JitsiConferenceEvents.ENDPOINT_MESSAGE_RECEIVED,
|
||||
(participant, eventData) => {
|
||||
if (!participant || !eventData) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (eventData.type === FACE_BOX_EVENT_TYPE) {
|
||||
dispatch({
|
||||
type: UPDATE_FACE_COORDINATES,
|
||||
faceBox: eventData.faceBox,
|
||||
id: participant.getId()
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return next(action);
|
||||
}
|
||||
|
||||
if (!faceCoordinatesSharing?.enabled) {
|
||||
return next(action);
|
||||
}
|
||||
|
||||
switch (action.type) {
|
||||
case CONFERENCE_WILL_LEAVE : {
|
||||
dispatch(stopFaceRecognition());
|
||||
|
||||
return next(action);
|
||||
}
|
||||
case TRACK_ADDED: {
|
||||
const { jitsiTrack: { isLocal, videoType } } = action.track;
|
||||
|
||||
if (videoType === 'camera' && isLocal()) {
|
||||
// need to pass this since the track is not yet added in the store
|
||||
dispatch(startFaceRecognition(action.track));
|
||||
}
|
||||
|
||||
return next(action);
|
||||
}
|
||||
case TRACK_UPDATED: {
|
||||
const { jitsiTrack: { isLocal, videoType } } = action.track;
|
||||
|
||||
if (videoType !== 'camera' || !isLocal()) {
|
||||
return next(action);
|
||||
}
|
||||
|
||||
const { muted } = action.track;
|
||||
|
||||
if (muted !== undefined) {
|
||||
// addresses video mute state changes
|
||||
if (muted) {
|
||||
dispatch(stopFaceRecognition());
|
||||
} else {
|
||||
dispatch(startFaceRecognition());
|
||||
}
|
||||
}
|
||||
|
||||
return next(action);
|
||||
}
|
||||
case TRACK_REMOVED: {
|
||||
const { jitsiTrack: { isLocal, videoType } } = action.track;
|
||||
|
||||
if (videoType === 'camera' && isLocal()) {
|
||||
dispatch(stopFaceRecognition());
|
||||
}
|
||||
|
||||
return next(action);
|
||||
}
|
||||
}
|
||||
|
||||
return next(action);
|
||||
});
|
|
@ -0,0 +1,55 @@
|
|||
import { ReducerRegistry } from '../base/redux';
|
||||
|
||||
import {
|
||||
START_FACE_RECOGNITION,
|
||||
STOP_FACE_RECOGNITION,
|
||||
UPDATE_FACE_COORDINATES
|
||||
} from './actionTypes';
|
||||
|
||||
/**
|
||||
* The default state object.
|
||||
*/
|
||||
const defaultState = {
|
||||
/**
|
||||
* Map of participant ids containing their respective facebox in the shape of a left, right, bottom, top percentages
|
||||
* The percentages indicate the distance of the detected face starting edge (top or left) to the corresponding edge.
|
||||
*
|
||||
* Examples:
|
||||
* 70% left indicates a 70% distance from the left edge of the video to the left edge of the detected face.
|
||||
* 70% right indicates a 70% distance from the right edge of the video to the left edge of the detected face.
|
||||
* 30% top indicates a 30% distance from the top edge of the video to the top edge of the detected face.
|
||||
* 30% bottom indicates a 30% distance from the bottom edge of the video to the top edge of the detected face.
|
||||
*/
|
||||
faceBoxes: {},
|
||||
|
||||
/**
|
||||
* Flag indicating whether face recognition is currently running.
|
||||
*/
|
||||
recognitionActive: false
|
||||
};
|
||||
|
||||
ReducerRegistry.register('features/face-centering', (state = defaultState, action) => {
|
||||
switch (action.type) {
|
||||
case UPDATE_FACE_COORDINATES: {
|
||||
return {
|
||||
...state,
|
||||
faceBoxes: {
|
||||
...state.faceBoxes,
|
||||
[action.id]: action.faceBox
|
||||
}
|
||||
};
|
||||
}
|
||||
case START_FACE_RECOGNITION: {
|
||||
return {
|
||||
...state,
|
||||
recognitionActive: true
|
||||
};
|
||||
}
|
||||
|
||||
case STOP_FACE_RECOGNITION: {
|
||||
return defaultState;
|
||||
}
|
||||
}
|
||||
|
||||
return state;
|
||||
});
|
|
@ -1,9 +1,10 @@
|
|||
// @flow
|
||||
import { getLocalVideoTrack } from '../base/tracks';
|
||||
|
||||
import 'image-capture';
|
||||
import './createImageBitmap';
|
||||
|
||||
import { getLocalVideoTrack } from '../base/tracks';
|
||||
import { getBaseUrl } from '../base/util';
|
||||
|
||||
import {
|
||||
ADD_FACIAL_EXPRESSION,
|
||||
ADD_TO_FACIAL_EXPRESSIONS_BUFFER,
|
||||
|
@ -65,15 +66,9 @@ export function loadWorker() {
|
|||
|
||||
return;
|
||||
}
|
||||
let baseUrl = '';
|
||||
const app: Object = document.querySelector('script[src*="app.bundle.min.js"]');
|
||||
|
||||
if (app) {
|
||||
const idx = app.src.lastIndexOf('/');
|
||||
|
||||
baseUrl = `${app.src.substring(0, idx)}/`;
|
||||
}
|
||||
let workerUrl = `${baseUrl}facial-expressions-worker.min.js`;
|
||||
const baseUrl = getBaseUrl();
|
||||
let workerUrl = `${baseUrl}libs/facial-expressions-worker.min.js`;
|
||||
|
||||
const workerBlob = new Blob([ `importScripts("${workerUrl}");` ], { type: 'application/javascript' });
|
||||
|
||||
|
@ -132,9 +127,10 @@ export function loadWorker() {
|
|||
*/
|
||||
export function startFacialRecognition() {
|
||||
return async function(dispatch: Function, getState: Function) {
|
||||
if (worker === undefined || worker === null) {
|
||||
if (!worker) {
|
||||
return;
|
||||
}
|
||||
|
||||
const state = getState();
|
||||
const { recognitionActive } = state['features/facial-recognition'];
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ import {
|
|||
getTrackByMediaTypeAndParticipant,
|
||||
updateLastTrackVideoMediaEvent
|
||||
} from '../../../base/tracks';
|
||||
import { getVideoObjectPosition } from '../../../face-centering/functions';
|
||||
import { PresenceLabel } from '../../../presence-status';
|
||||
import { getCurrentLayout, LAYOUTS } from '../../../video-layout';
|
||||
import {
|
||||
|
@ -165,6 +166,11 @@ export type Props = {|
|
|||
*/
|
||||
_raisedHand: boolean,
|
||||
|
||||
/**
|
||||
* The video object position for the participant.
|
||||
*/
|
||||
_videoObjectPosition: string,
|
||||
|
||||
/**
|
||||
* The video track that will be displayed in the thumbnail.
|
||||
*/
|
||||
|
@ -479,6 +485,7 @@ class Thumbnail extends Component<Props, State> {
|
|||
_isHidden,
|
||||
_isScreenSharing,
|
||||
_participant,
|
||||
_videoObjectPosition,
|
||||
_videoTrack,
|
||||
_width,
|
||||
horizontalOffset,
|
||||
|
@ -522,6 +529,10 @@ class Thumbnail extends Component<Props, State> {
|
|||
};
|
||||
}
|
||||
|
||||
if (videoStyles.objectFit === 'cover') {
|
||||
videoStyles.objectPosition = _videoObjectPosition;
|
||||
}
|
||||
|
||||
styles = {
|
||||
thumbnail: {
|
||||
...style,
|
||||
|
@ -1010,6 +1021,7 @@ function _mapStateToProps(state, ownProps): Object {
|
|||
_localFlipX: Boolean(localFlipX),
|
||||
_participant: participant,
|
||||
_raisedHand: hasRaisedHand(participant),
|
||||
_videoObjectPosition: getVideoObjectPosition(state, participant.id),
|
||||
_videoTrack,
|
||||
...size
|
||||
};
|
||||
|
|
|
@ -384,6 +384,16 @@ module.exports = (_env, argv) => {
|
|||
],
|
||||
performance: getPerformanceHints(perfHintOptions, 35 * 1024)
|
||||
}),
|
||||
Object.assign({}, config, {
|
||||
entry: {
|
||||
'face-centering-worker': './react/features/face-centering/faceCenteringWorker.js'
|
||||
},
|
||||
plugins: [
|
||||
...config.plugins,
|
||||
...getBundleAnalyzerPlugin(analyzeBundle, 'face-centering-worker')
|
||||
],
|
||||
performance: getPerformanceHints(perfHintOptions, 500 * 1024)
|
||||
}),
|
||||
Object.assign({}, config, {
|
||||
entry: {
|
||||
'facial-expressions-worker': './react/features/facial-recognition/facialExpressionsWorker.js'
|
||||
|
|
Loading…
Reference in New Issue