feat(face-landmarks): add face landmarks timeline (#12561)

* feat(face-landmarks): add face landmarks timeline

fixes after rebase

* fixes after rebase compiling and linting

* fix: change keyboard shorcut for participants stats

* fix: label for emotions switch

* fix: linting issues

* code review changes

* fix linting issues

* code review changes 2

* fix typo
This commit is contained in:
Gabriel Borlea 2022-11-22 15:56:37 +02:00 committed by GitHub
parent 3081b41d0d
commit 4b969cf4ab
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
51 changed files with 1628 additions and 800 deletions

2
globals.native.d.ts vendored
View File

@ -23,6 +23,8 @@ interface IWindow {
onerror: (event: string, source: any, lineno: any, colno: any, e: Error) => void;
onunhandledrejection: (event: any) => void;
setInterval: typeof setInterval;
clearInterval: typeof clearInterval;
setTimeout: typeof setTimeout;
clearTimeout: typeof clearTimeout;
setImmediate: typeof setImmediate;

View File

@ -365,7 +365,7 @@
"mute": "Mute or unmute your microphone",
"pushToTalk": "Press to transmit",
"raiseHand": "Raise or lower your hand",
"showSpeakerStats": "Show speaker stats",
"showSpeakerStats": "Show participants stats",
"toggleChat": "Open or close the chat",
"toggleFilmstrip": "Show or hide video thumbnails",
"toggleScreensharing": "Switch between camera and screen sharing",
@ -579,7 +579,7 @@
"minutes": "{{count}}m",
"name": "Name",
"seconds": "{{count}}s",
"speakerStats": "Speaker Stats",
"speakerStats": "Participants Stats",
"speakerTime": "Speaker Time"
},
"startupoverlay": {
@ -626,7 +626,7 @@
"sharedvideo": "Toggle video sharing",
"shortcuts": "Toggle shortcuts",
"show": "Show on stage",
"speakerStats": "Toggle speaker statistics",
"speakerStats": "Toggle participants statistics",
"tileView": "Toggle tile view",
"toggleCamera": "Toggle camera",
"videoblur": "",
@ -662,7 +662,7 @@
"shareRoom": "Invite someone",
"sharedvideo": "Share video",
"shortcuts": "View shortcuts",
"speakerStats": "Speaker stats",
"speakerStats": "Participants stats",
"startScreenSharing": "Start screen sharing",
"startSubtitles": "Start subtitles",
"startvideoblur": "",

View File

@ -511,7 +511,7 @@
"mute": "Mute or unmute your microphone",
"pushToTalk": "Push to talk",
"raiseHand": "Raise or lower your hand",
"showSpeakerStats": "Show speaker stats",
"showSpeakerStats": "Show participants stats",
"toggleChat": "Open or close the chat",
"toggleFilmstrip": "Show or hide video thumbnails",
"toggleParticipantsPane": "Show or hide the participants pane",
@ -1038,7 +1038,7 @@
"sad": "Sad",
"search": "Search",
"seconds": "{{count}}s",
"speakerStats": "Speaker Stats",
"speakerStats": "Participants Stats",
"speakerTime": "Speaker Time",
"surprised": "Surprised"
},
@ -1119,7 +1119,7 @@
"shortcuts": "Toggle shortcuts",
"show": "Show on stage",
"silence": "Silence",
"speakerStats": "Toggle speaker statistics",
"speakerStats": "Toggle participants statistics",
"surprised": "Surprised",
"tileView": "Toggle tile view",
"toggleCamera": "Toggle camera",
@ -1206,7 +1206,7 @@
"shortcuts": "View shortcuts",
"showWhiteboard": "Show whiteboard",
"silence": "Silence",
"speakerStats": "Speaker stats",
"speakerStats": "Participants stats",
"startScreenSharing": "Start screen sharing",
"startSubtitles": "Subtitles • {{language}}",
"stopAudioSharing": "Stop audio sharing",

View File

@ -1,4 +1,6 @@
import { FaceLandmarks } from '../../face-landmarks/types';
import { LOCKED_LOCALLY, LOCKED_REMOTELY } from '../../room-lock/constants';
import { ISpeakerStats } from '../../speaker-stats/reducer';
import { CONNECTION_WILL_CONNECT, SET_LOCATION_URL } from '../connection/actionTypes';
import { JitsiConferenceErrors } from '../lib-jitsi-meet';
import ReducerRegistry from '../redux/ReducerRegistry';
@ -53,6 +55,7 @@ export interface IJitsiConference {
getMeetingUniqueId: Function;
getParticipantById: Function;
getParticipants: Function;
getSpeakerStats: () => ISpeakerStats;
grantOwner: Function;
isAVModerationSupported: Function;
isCallstatsEnabled: Function;
@ -74,6 +77,7 @@ export interface IJitsiConference {
sendCommand: Function;
sendCommandOnce: Function;
sendEndpointMessage: Function;
sendFaceLandmarks: (faceLandmarks: FaceLandmarks) => void;
sendFeedback: Function;
sendLobbyMessage: Function;
sessionId: string;

View File

@ -0,0 +1,10 @@
<svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<circle cx="10" cy="10" r="10" fill="url(#paint0_radial_72_1897)"/>
<path opacity="0.8" fill-rule="evenodd" clip-rule="evenodd" d="M5.46845 5.14411C5.08781 4.88541 4.56951 4.98428 4.31081 5.36492C4.05212 5.74557 4.15098 6.26387 4.53163 6.52257L5.47766 7.16551C5.18224 7.46624 5.00002 7.87855 5.00002 8.33341C5.00002 9.25388 5.74622 10.0001 6.66669 10.0001C7.58716 10.0001 8.33336 9.25388 8.33336 8.33341C8.33336 8.23415 8.32468 8.13691 8.30804 8.04242C8.54426 7.66462 8.44124 7.16449 8.06956 6.91188L5.46845 5.14411ZM6.66305 14.7842C6.30373 14.5781 6.1795 14.1198 6.38556 13.7605C6.75032 13.1244 7.27645 12.5959 7.91081 12.2283C8.54518 11.8607 9.26532 11.6669 9.99852 11.6667C10.7317 11.6664 11.452 11.8596 12.0866 12.2268C12.7213 12.5939 13.2478 13.1221 13.613 13.7578C13.8193 14.117 13.6954 14.5754 13.3362 14.7818C12.9771 14.9881 12.5186 14.8642 12.3123 14.505C12.0786 14.0981 11.7416 13.7601 11.3354 13.5251C10.9293 13.2901 10.4683 13.1665 9.99906 13.1667C9.52981 13.1668 9.06892 13.2908 8.66293 13.5261C8.25693 13.7614 7.92021 14.0996 7.68677 14.5067C7.4807 14.866 7.02237 14.9902 6.66305 14.7842ZM15.7903 5.36492C15.5316 4.98428 15.0134 4.88541 14.6327 5.14411L12.0316 6.91188C11.7043 7.13434 11.5853 7.54876 11.7229 7.90254C11.6862 8.03998 11.6667 8.18441 11.6667 8.33341C11.6667 9.25388 12.4129 10.0001 13.3334 10.0001C14.2538 10.0001 15 9.25388 15 8.33341C15 7.89926 14.834 7.50388 14.562 7.20728L15.5695 6.52257C15.9502 6.26387 16.049 5.74557 15.7903 5.36492Z" fill="black"/>
<defs>
<radialGradient id="paint0_radial_72_1897" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(10 4.58333) rotate(90) scale(15.4167)">
<stop offset="0.359375" stop-color="#F26325"/>
<stop offset="1" stop-color="#F24A25"/>
</radialGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 1.8 KiB

View File

@ -0,0 +1,10 @@
<svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<circle cx="10" cy="10" r="10" fill="url(#paint0_radial_351_6183)"/>
<path opacity="0.8" fill-rule="evenodd" clip-rule="evenodd" d="M4.16669 7.50001C4.16669 7.03977 4.53978 6.66667 5.00002 6.66667H7.50002C7.96026 6.66667 8.33335 7.03977 8.33335 7.50001C8.33335 7.96024 7.96026 8.33334 7.50002 8.33334H5.00002C4.53978 8.33334 4.16669 7.96024 4.16669 7.50001ZM6.66669 15C6.66669 13.1591 8.15907 11.6667 10 11.6667C11.841 11.6667 13.3334 13.1591 13.3334 15H6.66669ZM12.5 6.66667C12.0398 6.66667 11.6667 7.03977 11.6667 7.50001C11.6667 7.96024 12.0398 8.33334 12.5 8.33334H15C15.4603 8.33334 15.8334 7.96024 15.8334 7.50001C15.8334 7.03977 15.4603 6.66667 15 6.66667H12.5Z" fill="black"/>
<defs>
<radialGradient id="paint0_radial_351_6183" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(10 4.58333) rotate(90) scale(15.4167)">
<stop offset="0.359375" stop-color="#98E791"/>
<stop offset="1" stop-color="#3C9845"/>
</radialGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 1.0 KiB

View File

@ -0,0 +1,10 @@
<svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<circle cx="10" cy="10" r="10" fill="url(#paint0_radial_72_1884)"/>
<path opacity="0.8" fill-rule="evenodd" clip-rule="evenodd" d="M8.33333 7.49999C8.33333 8.42047 7.58714 9.16666 6.66667 9.16666C5.74619 9.16666 5 8.42047 5 7.49999C5 6.57952 5.74619 5.83333 6.66667 5.83333C7.58714 5.83333 8.33333 6.57952 8.33333 7.49999ZM15 7.49999C15 8.42047 14.2538 9.16666 13.3333 9.16666C12.4129 9.16666 11.6667 8.42047 11.6667 7.49999C11.6667 6.57952 12.4129 5.83333 13.3333 5.83333C14.2538 5.83333 15 6.57952 15 7.49999ZM10 11.6667C8.15905 11.6667 6.66667 13.159 6.66667 15H13.3333C13.3333 13.159 11.8409 11.6667 10 11.6667Z" fill="black"/>
<defs>
<radialGradient id="paint0_radial_72_1884" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(10 4.58333) rotate(90) scale(15.4167)">
<stop offset="0.359375" stop-color="#6BEBD4"/>
<stop offset="1" stop-color="#077EA4"/>
</radialGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 1018 B

View File

@ -0,0 +1,10 @@
<svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<circle cx="10" cy="10" r="10" fill="url(#paint0_radial_72_1844)"/>
<path opacity="0.8" fill-rule="evenodd" clip-rule="evenodd" d="M8.33333 7.49999C8.33333 8.42047 7.58714 9.16666 6.66667 9.16666C5.74619 9.16666 5 8.42047 5 7.49999C5 6.57952 5.74619 5.83333 6.66667 5.83333C7.58714 5.83333 8.33333 6.57952 8.33333 7.49999ZM15 7.49999C15 8.42047 14.2538 9.16666 13.3333 9.16666C12.4129 9.16666 11.6667 8.42047 11.6667 7.49999C11.6667 6.57952 12.4129 5.83333 13.3333 5.83333C14.2538 5.83333 15 6.57952 15 7.49999ZM7.53238 12.6776C7.37535 12.2943 6.93734 12.1109 6.55404 12.2679C6.17075 12.4249 5.98732 12.8629 6.14435 13.2462C6.45676 14.0088 6.98828 14.6616 7.6717 15.1221C8.35513 15.5826 9.15976 15.8301 9.98384 15.8333C10.8079 15.8365 11.6144 15.5953 12.3014 15.1401C12.9884 14.6849 13.525 14.0362 13.8433 13.2761C14.0033 12.894 13.8233 12.4546 13.4412 12.2946C13.0591 12.1346 12.6197 12.3146 12.4597 12.6967C12.256 13.1832 11.9126 13.5983 11.4729 13.8896C11.0332 14.181 10.5171 14.3354 9.98966 14.3333C9.46224 14.3313 8.94728 14.1729 8.50989 13.8782C8.0725 13.5834 7.73232 13.1656 7.53238 12.6776Z" fill="black"/>
<defs>
<radialGradient id="paint0_radial_72_1844" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(10 4.58333) rotate(90) scale(15.4167)">
<stop offset="0.359375" stop-color="#F2AD25"/>
<stop offset="1" stop-color="#F27B25"/>
</radialGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

@ -0,0 +1,10 @@
<svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<circle cx="10" cy="10" r="10" fill="url(#paint0_radial_72_1850)"/>
<path opacity="0.8" fill-rule="evenodd" clip-rule="evenodd" d="M8.33333 7.49999C8.33333 8.42047 7.58714 9.16666 6.66667 9.16666C5.74619 9.16666 5 8.42047 5 7.49999C5 6.57952 5.74619 5.83333 6.66667 5.83333C7.58714 5.83333 8.33333 6.57952 8.33333 7.49999ZM15 7.49999C15 8.42047 14.2538 9.16666 13.3333 9.16666C12.4129 9.16666 11.6667 8.42047 11.6667 7.49999C11.6667 6.57952 12.4129 5.83333 13.3333 5.83333C14.2538 5.83333 15 6.57952 15 7.49999ZM7.5 13.3333C7.03976 13.3333 6.66667 13.7064 6.66667 14.1667C6.66667 14.6269 7.03976 15 7.5 15H12.5C12.9602 15 13.3333 14.6269 13.3333 14.1667C13.3333 13.7064 12.9602 13.3333 12.5 13.3333H7.5Z" fill="black"/>
<defs>
<radialGradient id="paint0_radial_72_1850" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(10 4.58333) rotate(90) scale(15.4167)">
<stop offset="0.359375" stop-color="#AAAAAA"/>
<stop offset="1" stop-color="#5E5E5E"/>
</radialGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@ -0,0 +1,10 @@
<svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<circle cx="10" cy="10" r="10" fill="url(#paint0_radial_72_1862)"/>
<path opacity="0.8" fill-rule="evenodd" clip-rule="evenodd" d="M8.33333 7.49999C8.33333 8.42047 7.58714 9.16666 6.66667 9.16666C5.74619 9.16666 5 8.42047 5 7.49999C5 6.57952 5.74619 5.83333 6.66667 5.83333C7.58714 5.83333 8.33333 6.57952 8.33333 7.49999ZM15 7.49999C15 8.42047 14.2538 9.16666 13.3333 9.16666C12.4129 9.16666 11.6667 8.42047 11.6667 7.49999C11.6667 6.57952 12.4129 5.83333 13.3333 5.83333C14.2538 5.83333 15 6.57952 15 7.49999ZM6.38554 13.7605C6.17948 14.1198 6.30371 14.5781 6.66303 14.7842C7.02235 14.9902 7.48068 14.866 7.68675 14.5067C7.92019 14.0996 8.25691 13.7614 8.66291 13.5261C9.0689 13.2908 9.52979 13.1668 9.99904 13.1667C10.4683 13.1665 10.9293 13.2901 11.3354 13.5251C11.7416 13.7601 12.0786 14.0981 12.3123 14.505C12.5186 14.8642 12.977 14.9881 13.3362 14.7818C13.6954 14.5754 13.8193 14.117 13.613 13.7578C13.2477 13.1221 12.7212 12.5939 12.0866 12.2268C11.452 11.8596 10.7317 11.6664 9.9985 11.6667C9.2653 11.6669 8.54516 11.8607 7.91079 12.2283C7.27643 12.5959 6.7503 13.1244 6.38554 13.7605Z" fill="black"/>
<defs>
<radialGradient id="paint0_radial_72_1862" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(10 4.58333) rotate(90) scale(15.4167)">
<stop offset="0.359375" stop-color="#65B3FB"/>
<stop offset="1" stop-color="#256BF2"/>
</radialGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

@ -0,0 +1,10 @@
<svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<circle cx="10" cy="10" r="10" fill="url(#paint0_radial_72_1873)"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M8.33333 7.49999C8.33333 8.42047 7.58714 9.16666 6.66667 9.16666C5.74619 9.16666 5 8.42047 5 7.49999C5 6.57952 5.74619 5.83333 6.66667 5.83333C7.58714 5.83333 8.33333 6.57952 8.33333 7.49999ZM15 7.49999C15 8.42047 14.2538 9.16666 13.3333 9.16666C12.4129 9.16666 11.6667 8.42047 11.6667 7.49999C11.6667 6.57952 12.4129 5.83333 13.3333 5.83333C14.2538 5.83333 15 6.57952 15 7.49999ZM10 15C11.3807 15 12.5 14.403 12.5 13.6667C12.5 12.9303 11.3807 11.6667 10 11.6667C8.61929 11.6667 7.5 12.9303 7.5 13.6667C7.5 14.403 8.61929 15 10 15Z" fill="black"/>
<defs>
<radialGradient id="paint0_radial_72_1873" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(10 4.58333) rotate(90) scale(15.4167)">
<stop offset="0.359375" stop-color="#CC86E4"/>
<stop offset="1" stop-color="#933CD8"/>
</radialGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 1.0 KiB

View File

@ -29,6 +29,13 @@ export { default as IconE2EE } from './e2ee.svg';
export { default as IconEnlarge } from './enlarge.svg';
export { default as IconEnterFullscreen } from './enter-fullscreen.svg';
export { default as IconEnvelope } from './envelope.svg';
export { default as IconEmotionsAngry } from './emotions-angry.svg';
export { default as IconEmotionsDisgusted } from './emotions-disgusted.svg';
export { default as IconEmotionsFearful } from './emotions-fearful.svg';
export { default as IconEmotionsHappy } from './emotions-happy.svg';
export { default as IconEmotionsNeutral } from './emotions-neutral.svg';
export { default as IconEmotionsSad } from './emotions-sad.svg';
export { default as IconEmotionsSurprised } from './emotions-surprised.svg';
export { default as IconExclamationSolid } from './exclamation-solid.svg';
export { default as IconExclamationTriangle } from './exclamation-triangle.svg';
export { default as IconExitFullscreen } from './exit-fullscreen.svg';

View File

@ -5,20 +5,21 @@ import { getLocalVideoTrack } from '../base/tracks/functions';
import { getBaseUrl } from '../base/util/helpers';
import {
addFaceExpression,
addFaceLandmarks,
clearFaceExpressionBuffer,
newFaceBox
} from './actions';
import {
DETECTION_TYPES,
DETECT_FACE,
FACE_LANDMARK_DETECTION_ERROR_THRESHOLD,
FACE_LANDMARKS_DETECTION_ERROR_THRESHOLD,
INIT_WORKER,
NO_DETECTION,
NO_FACE_DETECTION_THRESHOLD,
WEBHOOK_SEND_TIME_INTERVAL
} from './constants';
import {
getDetectionInterval,
getFaceExpressionDuration,
sendFaceExpressionsWebhook
} from './functions';
import logger from './logger';
@ -33,13 +34,14 @@ class FaceLandmarksDetector {
private worker: Worker | null = null;
private lastFaceExpression: string | null = null;
private lastFaceExpressionTimestamp: number | null = null;
private duplicateConsecutiveExpressions = 0;
private webhookSendInterval: number | null = null;
private detectionInterval: number | null = null;
private recognitionActive = false;
private canvas?: HTMLCanvasElement;
private context?: CanvasRenderingContext2D | null;
private errorCount = 0;
private noDetectionCount = 0;
private noDetectionStartTimestamp: number | null = null;
/**
* Constructor for class, checks if the environment supports OffscreenCanvas.
@ -97,27 +99,48 @@ class FaceLandmarksDetector {
// @ts-ignore
const workerBlob = new Blob([ `importScripts("${workerUrl}");` ], { type: 'application/javascript' });
const state = getState();
const addToBuffer = Boolean(state['features/base/config'].webhookProxyUrl);
// @ts-ignore
workerUrl = window.URL.createObjectURL(workerBlob);
this.worker = new Worker(workerUrl, { name: 'Face Recognition Worker' });
this.worker = new Worker(workerUrl, { name: 'Face Landmarks Worker' });
this.worker.onmessage = ({ data }: MessageEvent<any>) => {
const { faceExpression, faceBox } = data;
const { faceExpression, faceBox, faceCount } = data;
const messageTimestamp = Date.now();
if (faceExpression) {
if (faceExpression === this.lastFaceExpression) {
this.duplicateConsecutiveExpressions++;
} else {
if (this.lastFaceExpression && this.lastFaceExpressionTimestamp) {
dispatch(addFaceExpression(
this.lastFaceExpression,
getFaceExpressionDuration(getState(), this.duplicateConsecutiveExpressions + 1),
this.lastFaceExpressionTimestamp
));
}
this.lastFaceExpression = faceExpression;
this.lastFaceExpressionTimestamp = Date.now();
this.duplicateConsecutiveExpressions = 0;
// if the number of faces detected is different from 1 we do not take into consideration that detection
if (faceCount !== 1) {
if (this.noDetectionCount === 0) {
this.noDetectionStartTimestamp = messageTimestamp;
}
this.noDetectionCount++;
if (this.noDetectionCount === NO_FACE_DETECTION_THRESHOLD && this.noDetectionStartTimestamp) {
this.addFaceLandmarks(
dispatch,
this.noDetectionStartTimestamp,
NO_DETECTION,
addToBuffer
);
}
return;
} else if (this.noDetectionCount > 0) {
this.noDetectionCount = 0;
this.noDetectionStartTimestamp = null;
}
if (faceExpression?.expression) {
const { expression } = faceExpression;
if (expression !== this.lastFaceExpression) {
this.addFaceLandmarks(
dispatch,
messageTimestamp,
expression,
addToBuffer
);
}
}
@ -128,7 +151,7 @@ class FaceLandmarksDetector {
APP.API.notifyFaceLandmarkDetected(faceBox, faceExpression);
};
const { faceLandmarks } = getState()['features/base/config'];
const { faceLandmarks } = state['features/base/config'];
const detectionTypes = [
faceLandmarks?.enableFaceCentering && DETECTION_TYPES.FACE_BOX,
faceLandmarks?.enableFaceExpressionsDetection && DETECTION_TYPES.FACE_EXPRESSIONS
@ -162,7 +185,7 @@ class FaceLandmarksDetector {
}
if (this.recognitionActive) {
logger.log('Face detection already active.');
logger.log('Face landmarks detection already active.');
return;
}
@ -179,7 +202,7 @@ class FaceLandmarksDetector {
this.imageCapture = new ImageCapture(firstVideoTrack);
this.recognitionActive = true;
logger.log('Start face detection');
logger.log('Start face landmarks detection');
const { faceLandmarks } = state['features/base/config'];
@ -191,7 +214,7 @@ class FaceLandmarksDetector {
).then(status => {
if (status) {
this.errorCount = 0;
} else if (++this.errorCount > FACE_LANDMARK_DETECTION_ERROR_THRESHOLD) {
} else if (++this.errorCount > FACE_LANDMARKS_DETECTION_ERROR_THRESHOLD) {
/* this prevents the detection from stopping immediately after occurring an error
* sometimes due to the small detection interval when starting the detection some errors
* might occur due to the track not being ready
@ -228,18 +251,11 @@ class FaceLandmarksDetector {
if (!this.recognitionActive || !this.isInitialized()) {
return;
}
const stopTimestamp = Date.now();
const addToBuffer = Boolean(getState()['features/base/config'].webhookProxyUrl);
if (this.lastFaceExpression && this.lastFaceExpressionTimestamp) {
dispatch(
addFaceExpression(
this.lastFaceExpression,
getFaceExpressionDuration(getState(), this.duplicateConsecutiveExpressions + 1),
this.lastFaceExpressionTimestamp
)
);
this.duplicateConsecutiveExpressions = 0;
this.lastFaceExpression = null;
this.lastFaceExpressionTimestamp = null;
this.addFaceLandmarks(dispatch, stopTimestamp, null, addToBuffer);
}
this.webhookSendInterval && window.clearInterval(this.webhookSendInterval);
@ -248,7 +264,36 @@ class FaceLandmarksDetector {
this.detectionInterval = null;
this.imageCapture = null;
this.recognitionActive = false;
logger.log('Stop face detection');
logger.log('Stop face landmarks detection');
}
/**
* Dispatches the action for adding new face landmarks and changes the state of the class.
*
* @param {IStore.dispatch} dispatch - The redux dispatch function.
* @param {number} endTimestamp - The timestamp when the face landmarks ended.
* @param {string} newFaceExpression - The new face expression.
* @param {boolean} addToBuffer - Flag for adding the face landmarks to the buffer.
* @returns {void}
*/
private addFaceLandmarks(
dispatch: IStore['dispatch'],
endTimestamp: number,
newFaceExpression: string | null,
addToBuffer = false) {
if (this.lastFaceExpression && this.lastFaceExpressionTimestamp) {
dispatch(addFaceLandmarks(
{
duration: endTimestamp - this.lastFaceExpressionTimestamp,
faceExpression: this.lastFaceExpression,
timestamp: this.lastFaceExpressionTimestamp
},
addToBuffer
));
}
this.lastFaceExpression = newFaceExpression;
this.lastFaceExpressionTimestamp = endTimestamp;
}
/**

View File

@ -2,7 +2,7 @@ import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm';
import { Config, FaceResult, Human } from '@vladmandic/human';
import { DETECTION_TYPES, FACE_DETECTION_SCORE_THRESHOLD, FACE_EXPRESSIONS_NAMING_MAPPING } from './constants';
import { DetectInput, DetectOutput, FaceBox, InitInput } from './types';
import { DetectInput, DetectOutput, FaceBox, FaceExpression, InitInput } from './types';
export interface IFaceLandmarksHelper {
detect: ({ image, threshold }: DetectInput) => Promise<DetectOutput>;
@ -10,7 +10,7 @@ export interface IFaceLandmarksHelper {
getDetections: (image: ImageBitmap | ImageData) => Promise<Array<FaceResult>>;
getFaceBox: (detections: Array<FaceResult>, threshold: number) => FaceBox | undefined;
getFaceCount: (detections: Array<FaceResult>) => number;
getFaceExpression: (detections: Array<FaceResult>) => string | undefined;
getFaceExpression: (detections: Array<FaceResult>) => FaceExpression | undefined;
init: () => Promise<void>;
}
@ -144,13 +144,18 @@ export class HumanHelper implements IFaceLandmarksHelper {
* @param {Array<FaceResult>} detections - The array with the detections.
* @returns {string | undefined}
*/
getFaceExpression(detections: Array<FaceResult>): string | undefined {
getFaceExpression(detections: Array<FaceResult>): FaceExpression | undefined {
if (this.getFaceCount(detections) !== 1) {
return;
}
if (detections[0].emotion) {
return FACE_EXPRESSIONS_NAMING_MAPPING[detections[0].emotion[0].emotion];
const detection = detections[0];
if (detection.emotion) {
return {
expression: FACE_EXPRESSIONS_NAMING_MAPPING[detection.emotion[0].emotion],
score: detection.emotion[0].score
};
}
}

View File

@ -1,32 +1,21 @@
/**
* Redux action type dispatched in order to add a face expression.
* Redux action type dispatched in order to add real-time faceLandmarks to timeline.
*
* {
* type: ADD_FACE_EXPRESSION,
* faceExpression: string,
* duration: number
* type: ADD_FACE_LANDMARKS,
* faceLandmarks: FaceLandmarks
* }
*/
export const ADD_FACE_EXPRESSION = 'ADD_FACE_EXPRESSION';
export const ADD_FACE_LANDMARKS = 'ADD_FACE_LANDMARKS';
/**
* Redux action type dispatched in order to add a expression to the face expressions buffer.
* Redux action type dispatched in order to clear the faceLandmarks buffer for webhook in the state.
*
* {
* type: ADD_TO_FACE_EXPRESSIONS_BUFFER,
* faceExpression: string
* type: CLEAR_FACE_LANDMARKS_BUFFER
* }
*/
export const ADD_TO_FACE_EXPRESSIONS_BUFFER = 'ADD_TO_FACE_EXPRESSIONS_BUFFER';
/**
* Redux action type dispatched in order to clear the face expressions buffer in the state.
*
* {
* type: CLEAR_FACE_EXPRESSIONS_BUFFER
* }
*/
export const CLEAR_FACE_EXPRESSIONS_BUFFER = 'CLEAR_FACE_EXPRESSIONS_BUFFER';
export const CLEAR_FACE_LANDMARKS_BUFFER = 'CLEAR_FACE_LANDMARKS_BUFFER';
/**
* Redux action type dispatched in order to update coordinates of a detected face.

View File

@ -3,56 +3,35 @@ import './createImageBitmap';
import { AnyAction } from 'redux';
import {
ADD_FACE_EXPRESSION,
ADD_TO_FACE_EXPRESSIONS_BUFFER,
CLEAR_FACE_EXPRESSIONS_BUFFER,
ADD_FACE_LANDMARKS,
CLEAR_FACE_LANDMARKS_BUFFER,
NEW_FACE_COORDINATES
} from './actionTypes';
import { FaceBox } from './types';
import { FaceBox, FaceLandmarks } from './types';
/**
* Adds a new face expression and its duration.
* Adds new face landmarks to the timeline.
*
* @param {string} faceExpression - Face expression to be added.
* @param {number} duration - Duration in seconds of the face expression.
* @param {number} timestamp - Duration in seconds of the face expression.
* @param {FaceLandmarks} faceLandmarks - The new face landmarks to timeline.
* @param {boolean} addToBuffer - If true adds the face landmarks to a buffer in the reducer for webhook.
* @returns {AnyAction}
*/
export function addFaceExpression(faceExpression: string, duration: number, timestamp: number): AnyAction {
export function addFaceLandmarks(faceLandmarks: FaceLandmarks, addToBuffer: boolean): AnyAction {
return {
type: ADD_FACE_EXPRESSION,
faceExpression,
duration,
timestamp
type: ADD_FACE_LANDMARKS,
faceLandmarks,
addToBuffer
};
}
/**
* Adds a face expression with its timestamp to the face expression buffer.
* Clears the face landmarks array in the state.
*
* @param {Object} faceExpression - Object containing face expression string and its timestamp.
* @returns {AnyAction}
*/
export function addToFaceExpressionsBuffer(
faceExpression: {
emotion: string;
timestamp: number;
}
): AnyAction {
export function clearFaceExpressionBuffer(): AnyAction {
return {
type: ADD_TO_FACE_EXPRESSIONS_BUFFER,
faceExpression
};
}
/**
* Clears the face expressions array in the state.
*
* @returns {Object}
*/
export function clearFaceExpressionBuffer() {
return {
type: CLEAR_FACE_EXPRESSIONS_BUFFER
type: CLEAR_FACE_LANDMARKS_BUFFER
};
}

View File

@ -37,6 +37,11 @@ export const INIT_WORKER = 'INIT_WORKER';
*/
export const FACE_BOX_EVENT_TYPE = 'face-box';
/**
* Type of event sent on the data channel.
*/
export const FACE_LANDMARKS_EVENT_TYPE = 'face-landmarks';
/**
* Milliseconds interval value for sending new image data to the worker.
*/
@ -64,4 +69,15 @@ export const FACE_DETECTION_SCORE_THRESHOLD = 0.75;
/**
* Threshold for stopping detection after a certain number of consecutive errors have occurred.
*/
export const FACE_LANDMARK_DETECTION_ERROR_THRESHOLD = 4;
export const FACE_LANDMARKS_DETECTION_ERROR_THRESHOLD = 4;
/**
* Threshold for number of consecutive detections with no face,
* so that when achieved there will be dispatched an action.
*/
export const NO_FACE_DETECTION_THRESHOLD = 5;
/**
* Constant type used for signaling that no valid face detection is found.
*/
export const NO_DETECTION = 'no-detection';

View File

@ -12,10 +12,9 @@ onmessage = async function({ data }: MessageEvent<any>) {
const detections = await helper.detect(data);
if (detections && (detections.faceBox || detections.faceExpression || detections.faceCount)) {
if (detections) {
self.postMessage(detections);
}
break;
}

View File

@ -1,40 +1,27 @@
import { IReduxState } from '../app/types';
import { IJitsiConference } from '../base/conference/reducer';
import { getLocalParticipant } from '../base/participants/functions';
import { extractFqnFromPath } from '../dynamic-branding/functions.any';
import { DETECT_FACE, FACE_BOX_EVENT_TYPE, SEND_IMAGE_INTERVAL_MS } from './constants';
import { FACE_BOX_EVENT_TYPE, FACE_LANDMARKS_EVENT_TYPE, SEND_IMAGE_INTERVAL_MS } from './constants';
import logger from './logger';
import { FaceBox } from './types';
let canvas: HTMLCanvasElement;
let context: CanvasRenderingContext2D | null;
if (typeof OffscreenCanvas === 'undefined') {
canvas = document.createElement('canvas');
context = canvas.getContext('2d');
}
import { FaceBox, FaceLandmarks } from './types';
/**
* Sends the face expression with its duration to all the other participants.
* Sends the face landmarks to other participants via the data channel.
*
* @param {any} conference - The current conference.
* @param {string} faceExpression - Face expression to be sent.
* @param {number} duration - The duration of the face expression in seconds.
* @param {FaceLandmarks} faceLandmarks - Face landmarks to be sent.
* @returns {void}
*/
export function sendFaceExpressionToParticipants(
conference: any,
faceExpression: string,
duration: number
): void {
export function sendFaceExpressionToParticipants(conference: any, faceLandmarks: FaceLandmarks): void {
try {
conference.sendEndpointMessage('', {
type: 'face_landmark',
faceExpression,
duration
type: FACE_LANDMARKS_EVENT_TYPE,
faceLandmarks
});
} catch (err) {
logger.warn('Could not broadcast the face expression to the other participants', err);
logger.warn('Could not broadcast the face landmarks to the other participants', err);
}
}
@ -61,30 +48,22 @@ export function sendFaceBoxToParticipants(
}
/**
* Sends the face expression with its duration to xmpp server.
* Sends the face landmarks to prosody.
*
* @param {any} conference - The current conference.
* @param {string} faceExpression - Face expression to be sent.
* @param {number} duration - The duration of the face expression in seconds.
* @param {FaceLandmarks} faceLandmarks - Face landmarks to be sent.
* @returns {void}
*/
export function sendFaceExpressionToServer(
conference: any,
faceExpression: string,
duration: number
): void {
export function sendFaceExpressionToServer(conference: IJitsiConference, faceLandmarks: FaceLandmarks): void {
try {
conference.sendFaceLandmarks({
faceExpression,
duration
});
conference.sendFaceLandmarks(faceLandmarks);
} catch (err) {
logger.warn('Could not send the face expression to xmpp server', err);
logger.warn('Could not send the face landmarks to prosody', err);
}
}
/**
* Sends face expression to backend.
* Sends face landmarks to backend.
*
* @param {Object} state - Redux state.
* @returns {boolean} - True if sent, false otherwise.
@ -96,9 +75,9 @@ export async function sendFaceExpressionsWebhook(state: IReduxState) {
const { connection } = state['features/base/connection'];
const jid = connection?.getJid();
const localParticipant = getLocalParticipant(state);
const { faceExpressionsBuffer } = state['features/face-landmarks'];
const { faceLandmarksBuffer } = state['features/face-landmarks'];
if (faceExpressionsBuffer.length === 0) {
if (faceLandmarksBuffer.length === 0) {
return false;
}
@ -111,7 +90,7 @@ export async function sendFaceExpressionsWebhook(state: IReduxState) {
meetingFqn: extractFqnFromPath(),
sessionId: conference?.sessionId,
submitted: Date.now(),
emotions: faceExpressionsBuffer,
emotions: faceLandmarksBuffer,
participantId: localParticipant?.jwtId,
participantName: localParticipant?.name,
participantJid: jid
@ -138,55 +117,6 @@ export async function sendFaceExpressionsWebhook(state: IReduxState) {
}
/**
* Sends the image data a canvas from the track in the image capture to the face recognition worker.
*
* @param {Worker} worker - Face recognition worker.
* @param {Object} imageCapture - Image capture that contains the current track.
* @param {number} threshold - Movement threshold as percentage for sharing face coordinates.
* @returns {Promise<boolean>} - True if sent, false otherwise.
*/
export async function sendDataToWorker(
worker: Worker,
imageCapture: ImageCapture,
threshold = 10
): Promise<boolean> {
if (imageCapture === null || imageCapture === undefined) {
return false;
}
let imageBitmap;
let image;
try {
imageBitmap = await imageCapture.grabFrame();
} catch (err) {
logger.warn(err);
return false;
}
if (typeof OffscreenCanvas === 'undefined') {
canvas.width = imageBitmap.width;
canvas.height = imageBitmap.height;
context?.drawImage(imageBitmap, 0, 0);
image = context?.getImageData(0, 0, imageBitmap.width, imageBitmap.height);
} else {
image = imageBitmap;
}
worker.postMessage({
type: DETECT_FACE,
image,
threshold
});
imageBitmap.close();
return true;
}
/**
* Gets face box for a participant id.
*
@ -230,14 +160,3 @@ export function getDetectionInterval(state: IReduxState) {
return Math.max(faceLandmarks?.captureInterval || SEND_IMAGE_INTERVAL_MS);
}
/**
* Returns the duration in seconds of a face expression.
*
* @param {IReduxState} state - The redux state.
* @param {number} faceExpressionCount - The number of consecutive face expressions.
* @returns {number} - Duration of face expression in seconds.
*/
export function getFaceExpressionDuration(state: IReduxState, faceExpressionCount: number) {
return faceExpressionCount * (getDetectionInterval(state) / 1000);
}

View File

@ -11,18 +11,15 @@ import MiddlewareRegistry from '../base/redux/MiddlewareRegistry';
import { TRACK_ADDED, TRACK_REMOVED, TRACK_UPDATED } from '../base/tracks/actionTypes';
import FaceLandmarksDetector from './FaceLandmarksDetector';
import { ADD_FACE_EXPRESSION, NEW_FACE_COORDINATES, UPDATE_FACE_COORDINATES } from './actionTypes';
import {
addToFaceExpressionsBuffer
} from './actions';
import { ADD_FACE_LANDMARKS, NEW_FACE_COORDINATES, UPDATE_FACE_COORDINATES } from './actionTypes';
import { FACE_BOX_EVENT_TYPE } from './constants';
import { sendFaceBoxToParticipants, sendFaceExpressionToParticipants, sendFaceExpressionToServer } from './functions';
MiddlewareRegistry.register((store: IStore) => (next: Function) => (action: any) => {
const { dispatch, getState } = store;
const { faceLandmarks } = getState()['features/base/config'];
const isEnabled = faceLandmarks?.enableFaceCentering || faceLandmarks?.enableFaceExpressionsDetection;
const { faceLandmarks: faceLandmarksConfig } = getState()['features/base/config'];
const isEnabled = faceLandmarksConfig?.enableFaceCentering || faceLandmarksConfig?.enableFaceExpressionsDetection;
if (action.type === CONFERENCE_JOINED) {
if (isEnabled) {
@ -99,19 +96,16 @@ MiddlewareRegistry.register((store: IStore) => (next: Function) => (action: any)
return next(action);
}
case ADD_FACE_EXPRESSION: {
case ADD_FACE_LANDMARKS: {
const state = getState();
const { faceExpression, duration, timestamp } = action;
const { faceLandmarks } = action;
const conference = getCurrentConference(state);
if (getParticipantCount(state) > 1) {
sendFaceExpressionToParticipants(conference, faceExpression, duration);
sendFaceExpressionToParticipants(conference, faceLandmarks);
}
sendFaceExpressionToServer(conference, faceExpression, duration);
dispatch(addToFaceExpressionsBuffer({
emotion: faceExpression,
timestamp
}));
sendFaceExpressionToServer(conference, faceLandmarks);
return next(action);
}

View File

@ -1,42 +1,25 @@
import ReducerRegistry from '../base/redux/ReducerRegistry';
import {
ADD_FACE_EXPRESSION,
ADD_TO_FACE_EXPRESSIONS_BUFFER,
CLEAR_FACE_EXPRESSIONS_BUFFER,
ADD_FACE_LANDMARKS,
CLEAR_FACE_LANDMARKS_BUFFER,
UPDATE_FACE_COORDINATES
} from './actionTypes';
import { FaceBox } from './types';
import { FaceBox, FaceLandmarks } from './types';
const defaultState = {
faceBoxes: {},
faceExpressions: {
happy: 0,
neutral: 0,
surprised: 0,
angry: 0,
fearful: 0,
disgusted: 0,
sad: 0
},
faceExpressionsBuffer: [],
faceLandmarks: [],
faceLandmarksBuffer: [],
recognitionActive: false
};
export interface IFaceLandmarksState {
faceBoxes: { [key: string]: FaceBox; };
faceExpressions: {
angry: number;
disgusted: number;
fearful: number;
happy: number;
neutral: number;
sad: number;
surprised: number;
};
faceExpressionsBuffer: Array<{
faceLandmarks: Array<FaceLandmarks>;
faceLandmarksBuffer: Array<{
emotion: string;
timestamp: string;
timestamp: number;
}>;
recognitionActive: boolean;
}
@ -44,26 +27,23 @@ export interface IFaceLandmarksState {
ReducerRegistry.register<IFaceLandmarksState>('features/face-landmarks',
(state = defaultState, action): IFaceLandmarksState => {
switch (action.type) {
case ADD_FACE_EXPRESSION: {
case ADD_FACE_LANDMARKS: {
const { addToBuffer, faceLandmarks }: { addToBuffer: boolean; faceLandmarks: FaceLandmarks; } = action;
return {
...state,
faceExpressions: {
...state.faceExpressions,
[action.faceExpression]: state.faceExpressions[
action.faceExpression as keyof typeof state.faceExpressions] + action.duration
}
faceLandmarks: [ ...state.faceLandmarks, faceLandmarks ],
faceLandmarksBuffer: addToBuffer ? [ ...state.faceLandmarksBuffer,
{
emotion: faceLandmarks.faceExpression,
timestamp: faceLandmarks.timestamp
} ] : state.faceLandmarksBuffer
};
}
case ADD_TO_FACE_EXPRESSIONS_BUFFER: {
case CLEAR_FACE_LANDMARKS_BUFFER: {
return {
...state,
faceExpressionsBuffer: [ ...state.faceExpressionsBuffer, action.faceExpression ]
};
}
case CLEAR_FACE_EXPRESSIONS_BUFFER: {
return {
...state,
faceExpressionsBuffer: []
faceLandmarksBuffer: []
};
}
case UPDATE_FACE_COORDINATES: {

View File

@ -19,5 +19,21 @@ export type InitInput = {
export type DetectOutput = {
faceBox?: FaceBox;
faceCount: number;
faceExpression?: string;
faceExpression?: FaceExpression;
};
export type FaceExpression = {
expression: string;
score: number;
};
export type FaceLandmarks = {
// duration in milliseconds of the face landmarks
duration: number;
faceExpression: string;
score?: number;
// the start timestamp of the expression
timestamp: number;
};

View File

@ -14,7 +14,8 @@ import MiddlewareRegistry from '../base/redux/MiddlewareRegistry';
import { TRACK_ADDED, TRACK_UPDATED } from '../base/tracks/actionTypes';
import { getCurrentRoomId, isInBreakoutRoom } from '../breakout-rooms/functions';
import { extractFqnFromPath } from '../dynamic-branding/functions.any';
import { ADD_FACE_EXPRESSION } from '../face-landmarks/actionTypes';
import { ADD_FACE_LANDMARKS } from '../face-landmarks/actionTypes';
import { FaceLandmarks } from '../face-landmarks/types';
import RTCStats from './RTCStats';
import {
@ -164,17 +165,19 @@ MiddlewareRegistry.register((store: IStore) => (next: Function) => (action: AnyA
}
break;
}
case ADD_FACE_EXPRESSION:
case ADD_FACE_LANDMARKS: {
if (canSendFaceLandmarksRtcstatsData(state)) {
const { duration, faceExpression, timestamp } = action;
const { duration, faceExpression, timestamp } = action.faceLandmarks as FaceLandmarks;
const durationSeconds = Math.round(duration / 1000);
RTCStats.sendFaceLandmarksData({
duration,
duration: durationSeconds,
faceLandmarks: faceExpression,
timestamp
});
}
break;
}
case CONFERENCE_TIMESTAMP_CHANGED: {
if (canSendRtcstatsData(state)) {
const { conferenceTimestamp } = action;

View File

@ -63,3 +63,20 @@ export const RESET_SEARCH_CRITERIA = 'RESET_SEARCH_CRITERIA'
*/
export const TOGGLE_FACE_EXPRESSIONS = 'SHOW_FACE_EXPRESSIONS';
export const INCREASE_ZOOM = 'INCREASE_ZOOM';
export const DECREASE_ZOOM = 'DECREASE_ZOOM';
export const ADD_TO_OFFSET = 'ADD_TO_OFFSET';
export const SET_OFFSET = 'RESET_OFFSET';
export const ADD_TO_OFFSET_LEFT = 'ADD_TO_OFFSET_LEFT';
export const ADD_TO_OFFSET_RIGHT = 'ADD_TO_OFFSET_RIGHT';
export const SET_TIMELINE_BOUNDARY = 'SET_TIMELINE_BOUNDARY';
export const SET_PANNING = 'SET_PANNING';

View File

@ -0,0 +1,231 @@
import { IStore } from '../app/types';
import {
ADD_TO_OFFSET,
ADD_TO_OFFSET_LEFT,
ADD_TO_OFFSET_RIGHT,
INIT_REORDER_STATS,
INIT_SEARCH,
INIT_UPDATE_STATS,
RESET_SEARCH_CRITERIA,
SET_PANNING,
SET_TIMELINE_BOUNDARY,
TOGGLE_FACE_EXPRESSIONS,
UPDATE_SORTED_SPEAKER_STATS_IDS,
UPDATE_STATS
} from './actionTypes';
import { MINIMUM_INTERVAL } from './constants';
import { getCurrentDuration, getTimelineBoundaries } from './functions';
import { ISpeakerStats } from './reducer';
/**
* Starts a search by criteria.
*
* @param {string} criteria - The search criteria.
* @returns {Object}
*/
export function initSearch(criteria: string) {
return {
type: INIT_SEARCH,
criteria
};
}
/**
* Gets the new stats and triggers update.
*
* @param {Function} getSpeakerStats - Function to get the speaker stats.
* @returns {Object}
*/
export function initUpdateStats(getSpeakerStats: () => ISpeakerStats) {
return {
type: INIT_UPDATE_STATS,
getSpeakerStats
};
}
/**
* Updates the stats with new stats.
*
* @param {Object} stats - The new stats.
* @returns {Object}
*/
export function updateStats(stats: Object) {
return {
type: UPDATE_STATS,
stats
};
}
/**
* Updates the speaker stats order.
*
* @param {Array<string>} participantIds - Participant ids.
* @returns {Object}
*/
export function updateSortedSpeakerStatsIds(participantIds: Array<string>) {
return {
type: UPDATE_SORTED_SPEAKER_STATS_IDS,
participantIds
};
}
/**
* Initiates reordering of the stats.
*
* @returns {Object}
*/
export function initReorderStats() {
return {
type: INIT_REORDER_STATS
};
}
/**
* Resets the search criteria.
*
* @returns {Object}
*/
export function resetSearchCriteria() {
return {
type: RESET_SEARCH_CRITERIA
};
}
/**
* Toggles the face expressions grid.
*
* @returns {Object}
*/
export function toggleFaceExpressions() {
return {
type: TOGGLE_FACE_EXPRESSIONS
};
}
/**
* Adds a value to the boundary offset of the timeline.
*
* @param {number} value - The value to be added.
* @param {number} left - The left boundary.
* @param {number} right - The right boundary.
* @param {number} currentDuration - The currentDuration of the conference.
* @returns {Object}
*/
export function addToOffset(value: number) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
const { left, right } = getTimelineBoundaries(state);
const currentDuration = getCurrentDuration(state) ?? 0;
const newLeft = left + value;
const newRight = right + value;
if (newLeft >= 0 && newRight <= currentDuration) {
dispatch({
type: ADD_TO_OFFSET,
value
});
} else if (newLeft < 0) {
dispatch({
type: ADD_TO_OFFSET,
value: -left
});
} else if (newRight > currentDuration) {
dispatch({
type: ADD_TO_OFFSET,
value: currentDuration - right
});
}
};
}
/**
* Adds the value to the offset of the left boundary for the timeline.
*
* @param {number} value - The new value for the offset.
* @returns {Object}
*/
export function addToOffsetLeft(value: number) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
const { left, right } = getTimelineBoundaries(state);
const newLeft = left + value;
if (newLeft >= 0 && right - newLeft > MINIMUM_INTERVAL) {
dispatch({
type: ADD_TO_OFFSET_LEFT,
value
});
} else if (newLeft < 0) {
dispatch({
type: ADD_TO_OFFSET_LEFT,
value: -left
});
}
};
}
/**
* Adds the value to the offset of the right boundary for the timeline.
*
* @param {number} value - The new value for the offset.
* @returns {Object}
*/
export function addToOffsetRight(value: number) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
const { left, right } = getTimelineBoundaries(state);
const currentDuration = getCurrentDuration(state) ?? 0;
const newRight = right + value;
if (newRight <= currentDuration && newRight - left > MINIMUM_INTERVAL) {
dispatch({
type: ADD_TO_OFFSET_RIGHT,
value
});
} else if (newRight > currentDuration) {
dispatch({
type: ADD_TO_OFFSET_RIGHT,
value: currentDuration - right
});
}
};
}
/**
* Sets the current time boundary of the timeline, when zoomed in.
*
* @param {number} boundary - The current time boundary.
* @returns {Object}
*/
export function setTimelineBoundary(boundary: number) {
return {
type: SET_TIMELINE_BOUNDARY,
boundary
};
}
/**
* Clears the current time boundary of the timeline, when zoomed out full.
*
* @returns {Object}
*/
export function clearTimelineBoundary() {
return {
type: SET_TIMELINE_BOUNDARY,
boundary: null
};
}
/**
* Sets the state of the timeline panning.
*
* @param {Object} panning - The state of the timeline panning.
* @returns {Object}
*/
export function setTimelinePanning(panning: { active: boolean; x: number; }) {
return {
type: SET_PANNING,
panning
};
}

View File

@ -0,0 +1 @@
export * from './actions.any';

View File

@ -1,94 +0,0 @@
import {
INIT_REORDER_STATS,
INIT_SEARCH,
INIT_UPDATE_STATS,
RESET_SEARCH_CRITERIA,
TOGGLE_FACE_EXPRESSIONS,
UPDATE_SORTED_SPEAKER_STATS_IDS,
UPDATE_STATS
} from './actionTypes';
/**
* Starts a search by criteria.
*
* @param {string | null} criteria - The search criteria.
* @returns {Object}
*/
export function initSearch(criteria: string | null) {
return {
type: INIT_SEARCH,
criteria
};
}
/**
* Gets the new stats and triggers update.
*
* @param {Function} getSpeakerStats - Function to get the speaker stats.
* @returns {Object}
*/
export function initUpdateStats(getSpeakerStats: Function) {
return {
type: INIT_UPDATE_STATS,
getSpeakerStats
};
}
/**
* Updates the stats with new stats.
*
* @param {Object} stats - The new stats.
* @returns {Object}
*/
export function updateStats(stats: Object) {
return {
type: UPDATE_STATS,
stats
};
}
/**
* Updates the speaker stats order.
*
* @param {Object} participantIds - Participant ids.
* @returns {Object}
*/
export function updateSortedSpeakerStatsIds(participantIds?: Array<string>) {
return {
type: UPDATE_SORTED_SPEAKER_STATS_IDS,
participantIds
};
}
/**
* Initiates reordering of the stats.
*
* @returns {Object}
*/
export function initReorderStats() {
return {
type: INIT_REORDER_STATS
};
}
/**
* Resets the search criteria.
*
* @returns {Object}
*/
export function resetSearchCriteria() {
return {
type: RESET_SEARCH_CRITERIA
};
}
/**
* Toggles the face expressions grid.
*
* @returns {Object}
*/
export function toggleFaceExpressions() {
return {
type: TOGGLE_FACE_EXPRESSIONS
};
}

View File

@ -0,0 +1 @@
export * from './actions.any';

View File

@ -1,24 +1,22 @@
// @flow
import type { Dispatch } from 'redux';
import { IconConnection } from '../../base/icons';
import { AbstractButton } from '../../base/toolbox/components';
import type { AbstractButtonProps } from '../../base/toolbox/components';
import { IStore } from '../../app/types';
import { IconConnection } from '../../base/icons/svg';
// eslint-disable-next-line lines-around-comment
// @ts-ignore
import { AbstractButton, type AbstractButtonProps } from '../../base/toolbox/components';
type Props = AbstractButtonProps & {
/**
* True if the navigation bar should be visible.
*/
dispatch: Dispatch<any>
dispatch: IStore['dispatch'];
};
/**
* Implementation of a button for opening speaker stats dialog.
*/
class AbstractSpeakerStatsButton extends AbstractButton<Props, *> {
class AbstractSpeakerStatsButton extends AbstractButton<Props, any, any> {
accessibilityLabel = 'toolbar.accessibilityLabel.speakerStats';
icon = IconConnection;
label = 'toolbar.speakerStats';

View File

@ -1,11 +1,10 @@
// @flow
import { useCallback, useEffect, useRef } from 'react';
import { useTranslation } from 'react-i18next';
import { useDispatch, useSelector } from 'react-redux';
import { getLocalParticipant } from '../../base/participants';
import { initUpdateStats } from '../actions';
import { IReduxState } from '../../app/types';
import { getLocalParticipant } from '../../base/participants/functions';
import { initUpdateStats } from '../actions.any';
import {
SPEAKER_STATS_RELOAD_INTERVAL
} from '../constants';
@ -17,21 +16,22 @@ import {
* @param {Object} itemStyles - Styles for the speaker stats item.
* @returns {Function}
*/
const abstractSpeakerStatsList = (speakerStatsItem: Function, itemStyles?: Object): Function[] => {
const abstractSpeakerStatsList = (speakerStatsItem: Function): Function[] => {
const dispatch = useDispatch();
const { t } = useTranslation();
const conference = useSelector(state => state['features/base/conference'].conference);
const { conference } = useSelector((state: IReduxState) => state['features/base/conference']);
const {
stats: speakerStats,
showFaceExpressions,
sortedSpeakerStatsIds
} = useSelector(state => state['features/speaker-stats']);
} = useSelector((state: IReduxState) => state['features/speaker-stats']);
const localParticipant = useSelector(getLocalParticipant);
const { defaultRemoteDisplayName } = useSelector(
state => state['features/base/config']) || {};
const { faceLandmarks } = useSelector(state => state['features/base/config']) || {};
const { faceExpressions } = useSelector(state => state['features/face-landmarks']) || {};
const reloadInterval = useRef(null);
(state: IReduxState) => state['features/base/config']) || {};
const { faceLandmarks: faceLandmarksConfig } = useSelector((state: IReduxState) =>
state['features/base/config']) || {};
const { faceLandmarks } = useSelector((state: IReduxState) => state['features/face-landmarks']) || {};
const reloadInterval = useRef<number>();
/**
* Update the internal state with the latest speaker stats.
@ -40,7 +40,7 @@ const abstractSpeakerStatsList = (speakerStatsItem: Function, itemStyles?: Objec
* @private
*/
const getSpeakerStats = useCallback(() => {
const stats = conference.getSpeakerStats();
const stats = conference?.getSpeakerStats();
for (const userId in stats) {
if (stats[userId]) {
@ -48,40 +48,42 @@ const abstractSpeakerStatsList = (speakerStatsItem: Function, itemStyles?: Objec
const meString = t('me');
stats[userId].setDisplayName(
localParticipant.name
localParticipant?.name
? `${localParticipant.name} (${meString})`
: meString
);
if (faceLandmarks?.enableDisplayFaceExpressions) {
stats[userId].setFaceExpressions(faceExpressions);
if (faceLandmarksConfig?.enableDisplayFaceExpressions) {
stats[userId].setFaceLandmarks(faceLandmarks);
}
}
if (!stats[userId].getDisplayName()) {
stats[userId].setDisplayName(
conference.getParticipantById(userId)?.name
conference?.getParticipantById(userId)?.name
);
}
}
}
return stats;
}, [ faceExpressions ]);
return stats ?? {};
}, [ faceLandmarks ]);
const updateStats = useCallback(
() => dispatch(initUpdateStats(getSpeakerStats)),
[ dispatch, initUpdateStats, getSpeakerStats ]);
useEffect(() => {
if (reloadInterval.current) {
clearInterval(reloadInterval.current);
}
reloadInterval.current = setInterval(() => {
reloadInterval.current = window.setInterval(() => {
updateStats();
}, SPEAKER_STATS_RELOAD_INTERVAL);
return () => clearInterval(reloadInterval.current);
}, [ faceExpressions ]);
return () => {
if (reloadInterval.current) {
clearInterval(reloadInterval.current);
}
};
}, [ faceLandmarks ]);
const localSpeakerStats = Object.keys(speakerStats).length === 0 ? getSpeakerStats() : speakerStats;
const localSortedSpeakerStatsIds
@ -91,22 +93,17 @@ const abstractSpeakerStatsList = (speakerStatsItem: Function, itemStyles?: Objec
return userIds.map(userId => {
const statsModel = localSpeakerStats[userId];
const props = {};
props.isDominantSpeaker = statsModel.isDominantSpeaker();
props.dominantSpeakerTime = statsModel.getTotalDominantSpeakerTime();
props.participantId = userId;
props.hasLeft = statsModel.hasLeft();
if (showFaceExpressions) {
props.faceExpressions = statsModel.getFaceExpressions();
}
props.hidden = statsModel.hidden;
props.showFaceExpressions = showFaceExpressions;
props.displayName = statsModel.getDisplayName() || defaultRemoteDisplayName;
if (itemStyles) {
props.styles = itemStyles;
}
props.t = t;
const props = {
isDominantSpeaker: statsModel.isDominantSpeaker(),
dominantSpeakerTime: statsModel.getTotalDominantSpeakerTime(),
participantId: userId,
hasLeft: statsModel.hasLeft(),
faceLandmarks: showFaceExpressions ? statsModel.getFaceLandmarks() : undefined,
hidden: statsModel.hidden,
showFaceExpressions,
displayName: statsModel.getDisplayName() || defaultRemoteDisplayName,
t
};
return speakerStatsItem(props);
});

View File

@ -7,7 +7,7 @@
* @private
* @returns {number}
*/
function getHoursCount(milliseconds) {
function getHoursCount(milliseconds: number) {
return Math.floor(milliseconds / (60 * 60 * 1000));
}
@ -18,7 +18,7 @@ function getHoursCount(milliseconds) {
* @private
* @returns {number}
*/
function getMinutesCount(milliseconds) {
function getMinutesCount(milliseconds: number) {
return Math.floor(milliseconds / (60 * 1000) % 60);
}
@ -29,7 +29,7 @@ function getMinutesCount(milliseconds) {
* @private
* @returns {number}
*/
function getSecondsCount(milliseconds) {
function getSecondsCount(milliseconds: number) {
return Math.floor(milliseconds / 1000 % 60);
}
@ -85,6 +85,6 @@ export function createLocalizedTime(time: number, t: Function) {
* key for react to iterate upon.
* @returns {string}
*/
function createTimeDisplay(count, countNounKey, t) {
function createTimeDisplay(count: number, countNounKey: string, t: Function) {
return t(countNounKey, { count });
}

View File

@ -1,15 +1,28 @@
import React, { useCallback, useEffect } from 'react';
import { useTranslation } from 'react-i18next';
import { useDispatch, useSelector } from 'react-redux';
import { makeStyles } from 'tss-react/mui';
import { IReduxState } from '../../../app/types';
import Icon from '../../../base/icons/components/Icon';
import {
IconEmotionsAngry,
IconEmotionsDisgusted,
IconEmotionsFearful,
IconEmotionsHappy,
IconEmotionsNeutral,
IconEmotionsSad,
IconEmotionsSurprised
} from '../../../base/icons/svg';
// eslint-disable-next-line lines-around-comment
// @ts-ignore
import { Tooltip } from '../../../base/tooltip';
import Dialog from '../../../base/ui/components/web/Dialog';
import { escapeRegexp } from '../../../base/util/helpers';
import { initSearch, resetSearchCriteria, toggleFaceExpressions } from '../../actions';
import { initSearch, resetSearchCriteria, toggleFaceExpressions } from '../../actions.any';
import {
DISPLAY_SWITCH_BREAKPOINT,
MOBILE_BREAKPOINT,
RESIZE_SEARCH_SWITCH_CONTAINER_BREAKPOINT
MOBILE_BREAKPOINT
} from '../../constants';
import FaceExpressionsSwitch from './FaceExpressionsSwitch';
@ -20,69 +33,171 @@ import SpeakerStatsSearch from './SpeakerStatsSearch';
const useStyles = makeStyles()(theme => {
return {
speakerStats: {
'& .header': {
position: 'fixed',
backgroundColor: theme.palette.ui01,
paddingLeft: theme.spacing(4),
paddingRight: theme.spacing(4),
marginLeft: `-${theme.spacing(4)}`,
'&.large': {
width: '616px'
},
'&.medium': {
width: '352px'
},
'@media (max-width: 448px)': {
width: 'calc(100% - 48px) !important'
},
'& .upper-header': {
display: 'flex',
justifyContent: 'space-between',
alignItems: 'center',
width: '100%',
'& .search-switch-container': {
display: 'flex',
width: '100%',
'& .search-container': {
width: 175,
marginRight: theme.spacing(3)
},
'& .search-container-full-width': {
width: '100%'
}
},
'& .emotions-icons': {
display: 'flex',
'& svg': {
fill: '#000'
},
'&>div': {
marginRight: theme.spacing(3)
},
'&>div:last-child': {
marginRight: 0
}
}
}
},
'& .row': {
display: 'flex',
alignItems: 'center',
'& .avatar': {
width: '32px',
marginRight: theme.spacing(3)
},
'& .name-time': {
width: 'calc(100% - 48px)',
display: 'flex',
justifyContent: 'space-between',
alignItems: 'center'
alignItems: 'center',
'&.expressions-on': {
width: 'calc(47% - 48px)',
marginRight: theme.spacing(4)
}
},
'& .name-time_expressions-on': {
width: 'calc(47% - 48px)'
},
'& .expressions': {
width: 'calc(53% - 29px)',
'& .timeline-container': {
height: '100%',
width: `calc(53% - ${theme.spacing(4)})`,
display: 'flex',
justifyContent: 'space-between',
'& .expression': {
width: '30px',
textAlign: 'center'
alignItems: 'center',
borderLeftWidth: 1,
borderLeftColor: theme.palette.ui02,
borderLeftStyle: 'solid',
'& .timeline': {
height: theme.spacing(2),
display: 'flex',
width: '100%',
'&>div': {
marginRight: theme.spacing(1),
borderRadius: 5
},
'&>div:first-child': {
borderRadius: '0 5px 5px 0'
},
'&>div:last-child': {
marginRight: 0,
borderRadius: '5px 0 0 5px'
}
}
},
'& .axis-container': {
height: '100%',
width: `calc(53% - ${theme.spacing(6)})`,
display: 'flex',
alignItems: 'center',
marginLeft: theme.spacing(3),
'& div': {
borderRadius: 5
},
'& .axis': {
height: theme.spacing(1),
display: 'flex',
width: '100%',
backgroundColor: theme.palette.ui03,
position: 'relative',
'& .left-bound': {
position: 'absolute',
bottom: 10,
left: 0
},
'& .right-bound': {
position: 'absolute',
bottom: 10,
right: 0
},
'& .handler': {
position: 'absolute',
backgroundColor: theme.palette.ui09,
height: 12,
marginTop: -4,
display: 'flex',
justifyContent: 'space-between',
'& .resize': {
height: '100%',
width: 5,
cursor: 'col-resize'
}
}
}
}
},
'& .separator': {
width: 'calc(100% + 48px)',
height: 1,
marginLeft: -24,
backgroundColor: theme.palette.ui02
}
},
labelsContainer: {
position: 'relative'
},
separator: {
position: 'absolute',
width: 'calc(100% + 48px)',
height: 1,
left: -24,
backgroundColor: theme.palette.ui05
},
searchSwitchContainer: {
display: 'flex',
justifyContent: 'space-between',
alignItems: 'center',
width: '100%'
},
searchSwitchContainerExpressionsOn: {
width: '58.5%',
[theme.breakpoints.down(RESIZE_SEARCH_SWITCH_CONTAINER_BREAKPOINT)]: {
width: '100%'
}
},
searchContainer: {
width: '50%'
},
searchContainerFullWidth: {
width: '100%'
}
};
});
const EMOTIONS_LEGEND = [
{
translationKey: 'speakerStats.neutral',
icon: IconEmotionsNeutral
},
{
translationKey: 'speakerStats.happy',
icon: IconEmotionsHappy
},
{
translationKey: 'speakerStats.surprised',
icon: IconEmotionsSurprised
},
{
translationKey: 'speakerStats.sad',
icon: IconEmotionsSad
},
{
translationKey: 'speakerStats.fearful',
icon: IconEmotionsFearful
},
{
translationKey: 'speakerStats.angry',
icon: IconEmotionsAngry
},
{
translationKey: 'speakerStats.disgusted',
icon: IconEmotionsDisgusted
}
];
const SpeakerStats = () => {
const { faceLandmarks } = useSelector((state: IReduxState) => state['features/base/config']);
const { showFaceExpressions } = useSelector((state: IReduxState) => state['features/speaker-stats']);
@ -91,6 +206,7 @@ const SpeakerStats = () => {
const displayLabels = clientWidth > MOBILE_BREAKPOINT;
const dispatch = useDispatch();
const { classes } = useStyles();
const { t } = useTranslation();
const onToggleFaceExpressions = useCallback(() =>
dispatch(toggleFaceExpressions())
@ -104,9 +220,9 @@ const SpeakerStats = () => {
useEffect(() => {
showFaceExpressions && !displaySwitch && dispatch(toggleFaceExpressions());
}, [ clientWidth ]);
useEffect(() => () => {
dispatch(resetSearchCriteria());
}, []);
// @ts-ignore
useEffect(() => () => dispatch(resetSearchCriteria()), []);
return (
<Dialog
@ -115,33 +231,49 @@ const SpeakerStats = () => {
size = { showFaceExpressions ? 'large' : 'medium' }
titleKey = 'speakerStats.speakerStats'>
<div className = { classes.speakerStats }>
<div
className = {
`${classes.searchSwitchContainer}
${showFaceExpressions ? classes.searchSwitchContainerExpressionsOn : ''}`
}>
<div
className = {
displaySwitch
? classes.searchContainer
: classes.searchContainerFullWidth }>
<SpeakerStatsSearch
onSearch = { onSearch } />
</div>
<div className = { `header ${showFaceExpressions ? 'large' : 'medium'}` }>
<div className = 'upper-header'>
<div
className = {
`search-switch-container
${showFaceExpressions ? 'expressions-on' : ''}`
}>
<div
className = {
displaySwitch
? 'search-container'
: 'search-container-full-width' }>
<SpeakerStatsSearch
onSearch = { onSearch } />
</div>
{ displaySwitch
{ displaySwitch
&& <FaceExpressionsSwitch
onChange = { onToggleFaceExpressions }
showFaceExpressions = { showFaceExpressions } />
}
</div>
{ displayLabels && (
<div className = { classes.labelsContainer }>
}
</div>
{ showFaceExpressions && <div className = 'emotions-icons'>
{
EMOTIONS_LEGEND.map(emotion => (
<Tooltip
content = { t(emotion.translationKey) }
key = { emotion.translationKey }
position = { 'top' }>
<Icon
size = { 20 }
src = { emotion.icon } />
</Tooltip>
))
}
</div>}
</div>
{ displayLabels && (
<SpeakerStatsLabels
showFaceExpressions = { showFaceExpressions ?? false } />
<div className = { classes.separator } />
</div>
)}
)}
</div>
<SpeakerStatsList />
</div>
</Dialog>

View File

@ -1,12 +1,12 @@
// @flow
import { createToolbarEvent, sendAnalytics } from '../../../analytics';
import { openDialog } from '../../../base/dialog';
import { translate } from '../../../base/i18n';
import { connect } from '../../../base/redux';
import { createToolbarEvent } from '../../../analytics/AnalyticsEvents';
import { sendAnalytics } from '../../../analytics/functions';
import { openDialog } from '../../../base/dialog/actions';
import { translate } from '../../../base/i18n/functions';
import { connect } from '../../../base/redux/functions';
import AbstractSpeakerStatsButton from '../AbstractSpeakerStatsButton';
import { SpeakerStats } from './';
import SpeakerStats from './SpeakerStats';
/**
* Implementation of a button for opening speaker stats dialog.
@ -20,6 +20,7 @@ class SpeakerStatsButton extends AbstractSpeakerStatsButton {
* @returns {void}
*/
_handleClick() {
// @ts-ignore
const { dispatch } = this.props;
sendAnalytics(createToolbarEvent('speaker.stats'));
@ -27,4 +28,5 @@ class SpeakerStatsButton extends AbstractSpeakerStatsButton {
}
}
// @ts-ignore
export default translate(connect()(SpeakerStatsButton));

View File

@ -1,136 +0,0 @@
/* @flow */
import React from 'react';
import { Avatar, StatelessAvatar } from '../../../base/avatar';
import { getInitials } from '../../../base/avatar/functions';
import BaseTheme from '../../../base/ui/components/BaseTheme';
import { FACE_EXPRESSIONS } from '../../../face-landmarks/constants';
import TimeElapsed from './TimeElapsed';
/**
* The type of the React {@code Component} props of {@link SpeakerStatsItem}.
*/
type Props = {
/**
* The name of the participant.
*/
displayName: string,
/**
* The object that has as keys the face expressions of the
* participant and as values a number that represents the count .
*/
faceExpressions: Object,
/**
* True if the face expressions detection is not disabled.
*/
showFaceExpressions: boolean,
/**
* The total milliseconds the participant has been dominant speaker.
*/
dominantSpeakerTime: number,
/**
* The id of the user.
*/
participantId: string,
/**
* True if the participant is no longer in the meeting.
*/
hasLeft: boolean,
/**
* True if the participant is not shown in speaker stats.
*/
hidden: boolean,
/**
* True if the participant is currently the dominant speaker.
*/
isDominantSpeaker: boolean,
/**
* Styles for the item.
*/
styles: Object,
/**
* Invoked to obtain translated strings.
*/
t: Function
}
const SpeakerStatsItem = (props: Props) => {
const hasLeftClass = props.hasLeft ? props.styles.hasLeft : '';
const rowDisplayClass = `row ${hasLeftClass} ${props.styles.item}`;
const expressionClass = 'expression';
const nameTimeClass = `name-time${
props.showFaceExpressions ? ' name-time_expressions-on' : ''
}`;
const timeClass = `${props.styles.time} ${props.isDominantSpeaker ? props.styles.dominant : ''}`;
const FaceExpressions = () => FACE_EXPRESSIONS.map(
expression => (
<div
aria-label = { props.t(`speakerStats.${expression}`) }
className = {
`${expressionClass} ${
props.faceExpressions[expression] === 0 ? props.styles.hasLeft : ''
}`
}
key = { expression }>
{ props.faceExpressions[expression] }
</div>
)
);
return (
<div
className = { rowDisplayClass }
key = { props.participantId } >
<div className = { `avatar ${props.styles.avatar}` }>
{
props.hasLeft ? (
<StatelessAvatar
className = 'userAvatar'
color = { BaseTheme.palette.ui04 }
id = 'avatar'
initials = { getInitials(props.displayName) } />
) : (
<Avatar
className = 'userAvatar'
participantId = { props.participantId } />
)
}
</div>
<div className = { nameTimeClass }>
<div
aria-label = { props.t('speakerStats.speakerStats') }
className = { props.styles.displayName }>
{ props.displayName }
</div>
<div
aria-label = { props.t('speakerStats.speakerTime') }
className = { timeClass }>
<TimeElapsed
time = { props.dominantSpeakerTime } />
</div>
</div>
{ props.showFaceExpressions
&& (
<div className = { `expressions ${props.styles.expressions}` }>
<FaceExpressions />
</div>
)}
</div>
);
};
export default SpeakerStatsItem;

View File

@ -0,0 +1,115 @@
// eslint-disable-next-line lines-around-comment
import React from 'react';
// @ts-ignore
import Avatar from '../../../base/avatar/components/Avatar';
import StatelessAvatar from '../../../base/avatar/components/web/StatelessAvatar';
import { getInitials } from '../../../base/avatar/functions';
import BaseTheme from '../../../base/ui/components/BaseTheme.web';
import { FaceLandmarks } from '../../../face-landmarks/types';
import TimeElapsed from './TimeElapsed';
import Timeline from './Timeline';
/**
* The type of the React {@code Component} props of {@link SpeakerStatsItem}.
*/
type Props = {
/**
* The name of the participant.
*/
displayName: string;
/**
* The total milliseconds the participant has been dominant speaker.
*/
dominantSpeakerTime: number;
/**
* The object that has as keys the face expressions of the
* participant and as values a number that represents the count .
*/
faceLandmarks?: FaceLandmarks[];
/**
* True if the participant is no longer in the meeting.
*/
hasLeft: boolean;
/**
* True if the participant is not shown in speaker stats.
*/
hidden: boolean;
/**
* True if the participant is currently the dominant speaker.
*/
isDominantSpeaker: boolean;
/**
* The id of the user.
*/
participantId: string;
/**
* True if the face expressions detection is not disabled.
*/
showFaceExpressions: boolean;
/**
* Invoked to obtain translated strings.
*/
t: Function;
};
const SpeakerStatsItem = (props: Props) => {
const rowDisplayClass = `row item ${props.hasLeft ? 'has-left' : ''}`;
const nameTimeClass = `name-time${
props.showFaceExpressions ? ' expressions-on' : ''
}`;
const timeClass = `time ${props.isDominantSpeaker ? 'dominant' : ''}`;
return (
<div key = { props.participantId }>
<div className = { rowDisplayClass } >
<div className = 'avatar' >
{
props.hasLeft ? (
<StatelessAvatar
className = 'userAvatar'
color = { BaseTheme.palette.ui04 }
initials = { getInitials(props.displayName) } />
) : (
<Avatar
// @ts-ignore
className = 'userAvatar'
participantId = { props.participantId } />
)
}
</div>
<div className = { nameTimeClass }>
<div
aria-label = { props.t('speakerStats.speakerStats') }
className = 'display-name'>
{ props.displayName }
</div>
<div
aria-label = { props.t('speakerStats.speakerTime') }
className = { timeClass }>
<TimeElapsed
time = { props.dominantSpeakerTime } />
</div>
</div>
{ props.showFaceExpressions
&& <Timeline faceLandmarks = { props.faceLandmarks } />
}
</div>
<div className = 'separator' />
</div>
);
};
export default SpeakerStatsItem;

View File

@ -2,21 +2,18 @@ import React from 'react';
import { useTranslation } from 'react-i18next';
import { makeStyles } from 'tss-react/mui';
import { withPixelLineHeight } from '../../../base/styles/functions.web';
// eslint-disable-next-line lines-around-comment
// @ts-ignore
import { Tooltip } from '../../../base/tooltip';
import { FACE_EXPRESSIONS_EMOJIS } from '../../../face-landmarks/constants';
import TimelineAxis from './TimelineAxis';
const useStyles = makeStyles()(theme => {
return {
labels: {
padding: '22px 0 7px 0',
height: 20
},
emojis: {
paddingLeft: 27,
...withPixelLineHeight(theme.typography.bodyShortRegularLarge)
height: 20,
'& .avatar-placeholder': {
width: '32px',
marginRight: theme.spacing(3)
}
}
};
});
@ -36,12 +33,12 @@ const SpeakerStatsLabels = (props: IProps) => {
const { t } = useTranslation();
const { classes } = useStyles();
const nameTimeClass = `name-time${
props.showFaceExpressions ? ' name-time_expressions-on' : ''
props.showFaceExpressions ? ' expressions-on' : ''
}`;
return (
<div className = { `row ${classes.labels}` }>
<div className = 'avatar' />
<div className = 'avatar-placeholder' />
<div className = { nameTimeClass }>
<div>
@ -51,27 +48,7 @@ const SpeakerStatsLabels = (props: IProps) => {
{ t('speakerStats.speakerTime') }
</div>
</div>
{
props.showFaceExpressions
&& <div className = { `expressions ${classes.emojis}` }>
{Object.keys(FACE_EXPRESSIONS_EMOJIS).map(
expression => (
<div
className = 'expression'
key = { expression }>
<Tooltip
content = { t(`speakerStats.${expression}`) }
position = { 'top' } >
<div>
{FACE_EXPRESSIONS_EMOJIS[expression as keyof typeof FACE_EXPRESSIONS_EMOJIS]}
</div>
</Tooltip>
</div>
)
)}
</div>
}
{props.showFaceExpressions && <TimelineAxis />}
</div>
);
};

View File

@ -13,40 +13,40 @@ import SpeakerStatsItem from './SpeakerStatsItem';
const useStyles = makeStyles()(theme => {
return {
list: {
marginTop: theme.spacing(3),
marginBottom: theme.spacing(3)
},
item: {
height: theme.spacing(7),
[theme.breakpoints.down(MOBILE_BREAKPOINT)]: {
height: theme.spacing(8)
paddingTop: 90,
'& .item': {
height: theme.spacing(7),
[theme.breakpoints.down(MOBILE_BREAKPOINT)]: {
height: theme.spacing(8)
},
'& .has-left': {
color: theme.palette.text03
},
'& .avatar': {
width: '32px',
marginRight: theme.spacing(3),
height: theme.spacing(5)
},
'& .time': {
padding: '2px 4px',
borderRadius: '4px',
...withPixelLineHeight(theme.typography.labelBold),
[theme.breakpoints.down(MOBILE_BREAKPOINT)]: {
...withPixelLineHeight(theme.typography.bodyShortRegularLarge)
},
backgroundColor: theme.palette.ui02
},
'& .display-name': {
...withPixelLineHeight(theme.typography.bodyShortRegular),
[theme.breakpoints.down(MOBILE_BREAKPOINT)]: {
...withPixelLineHeight(theme.typography.bodyShortRegularLarge)
}
},
'& .dominant': {
backgroundColor: theme.palette.success02
}
}
},
avatar: {
height: theme.spacing(5)
},
expressions: {
paddingLeft: 29
},
hasLeft: {
color: theme.palette.text03
},
displayName: {
...withPixelLineHeight(theme.typography.bodyShortRegular),
[theme.breakpoints.down(MOBILE_BREAKPOINT)]: {
...withPixelLineHeight(theme.typography.bodyShortRegularLarge)
}
},
time: {
padding: '2px 4px',
borderRadius: '4px',
...withPixelLineHeight(theme.typography.labelBold),
[theme.breakpoints.down(MOBILE_BREAKPOINT)]: {
...withPixelLineHeight(theme.typography.bodyShortRegularLarge)
}
},
dominant: {
backgroundColor: theme.palette.success02
}
};
});
@ -58,10 +58,11 @@ const useStyles = makeStyles()(theme => {
*/
const SpeakerStatsList = () => {
const { classes } = useStyles();
const items = abstractSpeakerStatsList(SpeakerStatsItem, classes);
const items = abstractSpeakerStatsList(SpeakerStatsItem);
return (
<div className = { classes.list }>
<div className = 'separator' />
{items}
</div>
);

View File

@ -1,50 +0,0 @@
/* @flow */
import React, { Component } from 'react';
import { translate } from '../../../base/i18n';
import { createLocalizedTime } from '../timeFunctions';
/**
* The type of the React {@code Component} props of {@link TimeElapsed}.
*/
type Props = {
/**
* The function to translate human-readable text.
*/
t: Function,
/**
* The milliseconds to be converted into a human-readable format.
*/
time: number
};
/**
* React component for displaying total time elapsed. Converts a total count of
* milliseconds into a more humanized form: "# hours, # minutes, # seconds".
* With a time of 0, "0s" will be displayed.
*
* @augments Component
*/
class TimeElapsed extends Component<Props> {
/**
* Implements React's {@link Component#render()}.
*
* @inheritdoc
* @returns {ReactElement}
*/
render() {
const { time, t } = this.props;
const timeElapsed = createLocalizedTime(time, t);
return (
<div>
{ timeElapsed }
</div>
);
}
}
export default translate(TimeElapsed);

View File

@ -0,0 +1,36 @@
import React from 'react';
import { useTranslation } from 'react-i18next';
import { createLocalizedTime } from '../timeFunctions';
/**
* The type of the React {@code Component} props of {@link TimeElapsed}.
*/
type Props = {
/**
* The milliseconds to be converted into a human-readable format.
*/
time: number;
};
/**
* React component for displaying total time elapsed. Converts a total count of
* milliseconds into a more humanized form: "# hours, # minutes, # seconds".
* With a time of 0, "0s" will be displayed.
*
* @augments Component
*/
const TimeElapsed = ({ time }: Props) => {
const { t } = useTranslation();
const timeElapsed = createLocalizedTime(time, t);
return (
<span>
{ timeElapsed }
</span>
);
};
export default TimeElapsed;

View File

@ -0,0 +1,207 @@
import React, { MouseEvent, useCallback, useEffect, useMemo, useRef } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { IReduxState } from '../../../app/types';
import { getConferenceTimestamp } from '../../../base/conference/functions';
import { FaceLandmarks } from '../../../face-landmarks/types';
import { addToOffset, setTimelinePanning } from '../../actions.any';
import { SCROLL_RATE, TIMELINE_COLORS } from '../../constants';
import { getFaceLandmarksEnd, getFaceLandmarksStart, getTimelineBoundaries } from '../../functions';
type Props = {
faceLandmarks?: FaceLandmarks[];
};
const Timeline = ({ faceLandmarks }: Props) => {
const startTimestamp = useSelector((state: IReduxState) => getConferenceTimestamp(state)) ?? 0;
const { left, right } = useSelector((state: IReduxState) => getTimelineBoundaries(state));
const { timelinePanning } = useSelector((state: IReduxState) => state['features/speaker-stats']);
const dispatch = useDispatch();
const containerRef = useRef<HTMLDivElement>(null);
const intervalDuration = useMemo(() => right - left, [ left, right ]);
const getSegments = useCallback(() => {
const segments = faceLandmarks?.filter(landmarks => {
const timeStart = getFaceLandmarksStart(landmarks, startTimestamp);
const timeEnd = getFaceLandmarksEnd(landmarks, startTimestamp);
if (timeEnd > left && timeStart < right) {
return true;
}
return false;
}) ?? [];
let leftCut;
let rightCut;
if (segments.length) {
const start = getFaceLandmarksStart(segments[0], startTimestamp);
const end = getFaceLandmarksEnd(segments[segments.length - 1], startTimestamp);
if (start <= left) {
leftCut = segments[0];
}
if (end >= right) {
rightCut = segments[segments.length - 1];
}
}
if (leftCut) {
segments.shift();
}
if (rightCut) {
segments.pop();
}
return {
segments,
leftCut,
rightCut
};
}, [ faceLandmarks, left, right, startTimestamp ]);
const { segments, leftCut, rightCut } = getSegments();
const getStyle = useCallback((duration: number, faceExpression: string) => {
return {
width: `${100 / (intervalDuration / duration)}%`,
backgroundColor: TIMELINE_COLORS[faceExpression] ?? TIMELINE_COLORS['no-detection']
};
}, [ intervalDuration ]);
const getStartStyle = useCallback(() => {
let startDuration = 0;
let color = TIMELINE_COLORS['no-detection'];
if (leftCut) {
const { faceExpression } = leftCut;
startDuration = getFaceLandmarksEnd(leftCut, startTimestamp) - left;
color = TIMELINE_COLORS[faceExpression];
} else if (segments.length) {
startDuration = getFaceLandmarksStart(segments[0], startTimestamp) - left;
} else if (rightCut) {
startDuration = getFaceLandmarksStart(rightCut, startTimestamp) - left;
}
return {
width: `${100 / (intervalDuration / startDuration)}%`,
backgroundColor: color
};
}, [ leftCut, rightCut, startTimestamp, left, intervalDuration, segments ]);
const getEndStyle = useCallback(() => {
let endDuration = 0;
let color = TIMELINE_COLORS['no-detection'];
if (rightCut) {
const { faceExpression } = rightCut;
endDuration = right - getFaceLandmarksStart(rightCut, startTimestamp);
color = TIMELINE_COLORS[faceExpression];
} else if (segments.length) {
endDuration = right - getFaceLandmarksEnd(segments[segments.length - 1], startTimestamp);
} else if (leftCut) {
endDuration = right - getFaceLandmarksEnd(leftCut, startTimestamp);
}
return {
width: `${100 / (intervalDuration / endDuration)}%`,
backgroundColor: color
};
}, [ leftCut, rightCut, startTimestamp, right, intervalDuration, segments ]);
const getOneSegmentStyle = useCallback((faceExpression?: string) => {
return {
width: '100%',
backgroundColor: faceExpression ? TIMELINE_COLORS[faceExpression] : TIMELINE_COLORS['no-detection'],
borderRadius: 0
};
}, []);
const handleOnWheel = useCallback((event: WheelEvent) => {
// check if horizontal scroll
if (Math.abs(event.deltaX) >= Math.abs(event.deltaY)) {
const value = event.deltaX * SCROLL_RATE;
dispatch(addToOffset(value));
event.preventDefault();
}
}, [ dispatch, addToOffset ]);
const hideStartAndEndSegments = useCallback(() => leftCut && rightCut
&& leftCut.faceExpression === rightCut.faceExpression
&& !segments.length,
[ leftCut, rightCut, segments ]);
useEffect(() => {
containerRef.current?.addEventListener('wheel', handleOnWheel, { passive: false });
return () => containerRef.current?.removeEventListener('wheel', handleOnWheel);
}, []);
const getPointOnTimeline = useCallback((event: MouseEvent) => {
const axisRect = event.currentTarget.getBoundingClientRect();
const eventOffsetX = event.pageX - axisRect.left;
return (eventOffsetX * right) / axisRect.width;
}, [ right ]);
const handleOnMouseMove = useCallback((event: MouseEvent) => {
const { active, x } = timelinePanning;
if (active) {
const point = getPointOnTimeline(event);
dispatch(addToOffset(x - point));
dispatch(setTimelinePanning({ ...timelinePanning,
x: point }));
}
}, [ timelinePanning, dispatch, addToOffset, setTimelinePanning, getPointOnTimeline ]);
const handleOnMouseDown = useCallback((event: MouseEvent) => {
const point = getPointOnTimeline(event);
dispatch(setTimelinePanning(
{
active: true,
x: point
}
));
event.preventDefault();
event.stopPropagation();
}, [ getPointOnTimeline, dispatch, setTimelinePanning ]);
return (
<div
className = 'timeline-container'
onMouseDown = { handleOnMouseDown }
onMouseMove = { handleOnMouseMove }
ref = { containerRef }>
<div
className = 'timeline'>
{!hideStartAndEndSegments() && <div
aria-label = 'start'
style = { getStartStyle() } />}
{hideStartAndEndSegments() && <div
style = { getOneSegmentStyle(leftCut?.faceExpression) } />}
{segments?.map(({ duration, timestamp, faceExpression }) =>
(<div
aria-label = { faceExpression }
key = { timestamp }
style = { getStyle(duration, faceExpression) } />)) }
{!hideStartAndEndSegments() && <div
aria-label = 'end'
style = { getEndStyle() } />}
</div>
</div>
);
};
export default Timeline;

View File

@ -0,0 +1,187 @@
import React, { MouseEvent, useCallback, useEffect, useRef, useState } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { IReduxState } from '../../../app/types';
import { addToOffset, addToOffsetLeft, addToOffsetRight, setTimelinePanning } from '../../actions.any';
import { MIN_HANDLER_WIDTH } from '../../constants';
import { getCurrentDuration, getTimelineBoundaries } from '../../functions';
import TimeElapsed from './TimeElapsed';
const TimelineAxis = () => {
const currentDuration = useSelector((state: IReduxState) => getCurrentDuration(state)) ?? 0;
const { left, right } = useSelector((state: IReduxState) => getTimelineBoundaries(state));
const { timelinePanning } = useSelector((state: IReduxState) => state['features/speaker-stats']);
const dispatch = useDispatch();
const axisRef = useRef<HTMLDivElement>(null);
const [ dragLeft, setDragLeft ] = useState(false);
const [ dragRight, setDragRight ] = useState(false);
const getPointOnAxis = useCallback((event: MouseEvent) => {
const axisRect = event.currentTarget.getBoundingClientRect();
const eventOffsetX = event.pageX - axisRect.left;
return (eventOffsetX * currentDuration) / axisRect.width;
}, [ currentDuration ]);
const startResizeHandlerLeft = useCallback((event: MouseEvent) => {
if (!timelinePanning.active && !dragRight) {
setDragLeft(true);
}
event.preventDefault();
event.stopPropagation();
}, [ dragRight, timelinePanning, setDragLeft ]);
const stopResizeLeft = () => {
setDragLeft(false);
};
const resizeHandlerLeft = useCallback((event: MouseEvent) => {
if (dragLeft) {
const point = getPointOnAxis(event);
if (point >= 0 && point < right) {
const value = point - left;
dispatch(addToOffsetLeft(value));
}
}
}, [ dragLeft, getPointOnAxis, dispatch, addToOffsetLeft ]);
const startResizeHandlerRight = useCallback((event: MouseEvent) => {
if (!timelinePanning.active && !dragRight) {
setDragRight(true);
}
event.preventDefault();
event.stopPropagation();
}, [ timelinePanning, dragRight ]);
const stopResizeRight = useCallback(() => {
setDragRight(false);
}, [ setDragRight ]);
const resizeHandlerRight = (event: MouseEvent) => {
if (dragRight) {
const point = getPointOnAxis(event);
if (point > left && point <= currentDuration) {
const value = point - right;
dispatch(addToOffsetRight(value));
}
}
};
const startMoveHandler = useCallback((event: MouseEvent) => {
if (!dragLeft && !dragRight) {
const point = getPointOnAxis(event);
dispatch(setTimelinePanning(
{
active: true,
x: point
}
));
}
event.preventDefault();
event.stopPropagation();
}, [ dragLeft, dragRight, getPointOnAxis, dispatch, setTimelinePanning ]);
const stopMoveHandler = () => {
dispatch(setTimelinePanning({ ...timelinePanning,
active: false }));
};
const moveHandler = useCallback((event: MouseEvent) => {
const { active, x } = timelinePanning;
if (active) {
const point = getPointOnAxis(event);
dispatch(addToOffset(point - x));
dispatch(setTimelinePanning({ ...timelinePanning,
x: point }));
}
}, [ timelinePanning, getPointOnAxis, dispatch, addToOffset, setTimelinePanning ]);
const handleOnMouseMove = useCallback((event: MouseEvent) => {
resizeHandlerLeft(event);
resizeHandlerRight(event);
moveHandler(event);
}, [ resizeHandlerLeft, resizeHandlerRight ]);
const handleOnMouseUp = useCallback(() => {
stopResizeLeft();
stopResizeRight();
stopMoveHandler();
}, [ stopResizeLeft, stopResizeRight, stopMoveHandler ]);
const getHandlerStyle = useCallback(() => {
let marginLeft = 100 / (currentDuration / left);
let width = 100 / (currentDuration / (right - left));
if (axisRef.current) {
const axisWidth = axisRef.current.getBoundingClientRect().width;
let handlerWidth = (width / 100) * axisWidth;
if (handlerWidth < MIN_HANDLER_WIDTH) {
const newLeft = right - ((currentDuration * MIN_HANDLER_WIDTH) / axisWidth);
handlerWidth = MIN_HANDLER_WIDTH;
marginLeft = 100 / (currentDuration / newLeft);
width = 100 / (currentDuration / (right - newLeft));
}
if (marginLeft + width > 100) {
return {
marginLeft: `calc(100% - ${handlerWidth}px)`,
width: handlerWidth
};
}
}
return {
marginLeft: `${marginLeft > 0 ? marginLeft : 0}%`,
width: `${width}%`
};
}, [ currentDuration, left, right, axisRef ]);
useEffect(() => {
window.addEventListener('mouseup', handleOnMouseUp);
return () => window.removeEventListener('mouseup', handleOnMouseUp);
}, []);
return (
<div
className = 'axis-container'
onMouseMove = { handleOnMouseMove }
ref = { axisRef }>
<div
className = 'axis'>
<div className = 'left-bound'>
<TimeElapsed time = { 0 } />
</div>
<div className = 'right-bound'>
<TimeElapsed time = { currentDuration } />
</div>
<div
className = 'handler'
onMouseDown = { startMoveHandler }
style = { getHandlerStyle() } >
<div
className = 'resize'
id = 'left'
onMouseDown = { startResizeHandlerLeft } />
<div
className = 'resize'
id = 'right'
onMouseDown = { startResizeHandlerRight } />
</div>
</div>
</div>
);
};
export default TimelineAxis;

View File

@ -2,6 +2,27 @@ export const SPEAKER_STATS_RELOAD_INTERVAL = 1000;
export const DISPLAY_SWITCH_BREAKPOINT = 600;
export const RESIZE_SEARCH_SWITCH_CONTAINER_BREAKPOINT = 750;
export const MOBILE_BREAKPOINT = 480;
export const THRESHOLD_FIXED_AXIS = 10000;
export const MINIMUM_INTERVAL = 4000;
export const SCROLL_RATE = 500;
export const MIN_HANDLER_WIDTH = 30;
export const TIMELINE_COLORS: {
[key: string]: string;
} = {
happy: '#F3AD26',
neutral: '#676767',
sad: '#539EF9',
surprised: '#BC72E1',
angry: '#F35826',
fearful: '#3AC8C8',
disgusted: '#65B16B',
'no-detection': '#FFFFFF00'
};
export const CLEAR_TIME_BOUNDARY_THRESHOLD = 1000;

View File

@ -1,8 +1,13 @@
import _ from 'lodash';
import { IReduxState } from '../app/types';
import { getConferenceTimestamp } from '../base/conference/functions';
import { PARTICIPANT_ROLE } from '../base/participants/constants';
import { getParticipantById } from '../base/participants/functions';
import { FaceLandmarks } from '../face-landmarks/types';
import { THRESHOLD_FIXED_AXIS } from './constants';
import { ISpeaker, ISpeakerStats } from './reducer';
/**
* Checks if the speaker stats search is disabled.
@ -71,12 +76,12 @@ export function getPendingReorder(state: IReduxState) {
/**
* Get sorted speaker stats ids based on a configuration setting.
*
* @param {IReduxState} state - The redux state.
* @param {Object} stats - The current speaker stats.
* @returns {Object} - Ordered speaker stats ids.
* @param {IState} state - The redux state.
* @param {IState} stats - The current speaker stats.
* @returns {string[] | undefined} - Ordered speaker stats ids.
* @public
*/
export function getSortedSpeakerStatsIds(state: IReduxState, stats: Object) {
export function getSortedSpeakerStatsIds(state: IReduxState, stats: ISpeakerStats) {
const orderConfig = getSpeakerStatsOrder(state);
if (orderConfig) {
@ -91,11 +96,11 @@ export function getSortedSpeakerStatsIds(state: IReduxState, stats: Object) {
*
* Compares the order of two participants in the speaker stats list.
*
* @param {Object} currentParticipant - The first participant for comparison.
* @param {Object} nextParticipant - The second participant for comparison.
* @param {ISpeaker} currentParticipant - The first participant for comparison.
* @param {ISpeaker} nextParticipant - The second participant for comparison.
* @returns {number} - The sort order of the two participants.
*/
function compareFn(currentParticipant: any, nextParticipant: any) {
function compareFn(currentParticipant: ISpeaker, nextParticipant: ISpeaker) {
if (orderConfig.includes('hasLeft')) {
if (nextParticipant.hasLeft() && !currentParticipant.hasLeft()) {
return -1;
@ -104,7 +109,7 @@ export function getSortedSpeakerStatsIds(state: IReduxState, stats: Object) {
}
}
let result;
let result = 0;
for (const sortCriteria of orderConfig) {
switch (sortCriteria) {
@ -136,13 +141,13 @@ export function getSortedSpeakerStatsIds(state: IReduxState, stats: Object) {
/**
* Enhance speaker stats to include data needed for ordering.
*
* @param {IReduxState} state - The redux state.
* @param {Object} stats - Speaker stats.
* @param {IState} state - The redux state.
* @param {ISpeakerStats} stats - Speaker stats.
* @param {Array<string>} orderConfig - Ordering configuration.
* @returns {Object} - Enhanced speaker stats.
* @returns {ISpeakerStats} - Enhanced speaker stats.
* @public
*/
function getEnhancedStatsForOrdering(state: IReduxState, stats: any, orderConfig?: string[]) {
function getEnhancedStatsForOrdering(state: IReduxState, stats: ISpeakerStats, orderConfig: Array<string>) {
if (!orderConfig) {
return stats;
}
@ -163,14 +168,14 @@ function getEnhancedStatsForOrdering(state: IReduxState, stats: any, orderConfig
/**
* Filter stats by search criteria.
*
* @param {IReduxState} state - The redux state.
* @param {Object | undefined} stats - The unfiltered stats.
* @param {IState} state - The redux state.
* @param {ISpeakerStats | undefined} stats - The unfiltered stats.
*
* @returns {Object} - Filtered speaker stats.
* @returns {ISpeakerStats} - Filtered speaker stats.
* @public
*/
export function filterBySearchCriteria(state: IReduxState, stats?: Object) {
const filteredStats: any = _.cloneDeep(stats ?? getSpeakerStats(state));
export function filterBySearchCriteria(state: IReduxState, stats?: ISpeakerStats) {
const filteredStats = _.cloneDeep(stats ?? getSpeakerStats(state));
const criteria = getSearchCriteria(state);
if (criteria !== null) {
@ -191,14 +196,14 @@ export function filterBySearchCriteria(state: IReduxState, stats?: Object) {
/**
* Reset the hidden speaker stats.
*
* @param {IReduxState} state - The redux state.
* @param {Object | undefined} stats - The unfiltered stats.
* @param {IState} state - The redux state.
* @param {ISpeakerStats | undefined} stats - The unfiltered stats.
*
* @returns {Object} - Speaker stats.
* @public
*/
export function resetHiddenStats(state: IReduxState, stats?: Object) {
const resetStats: any = _.cloneDeep(stats ?? getSpeakerStats(state));
export function resetHiddenStats(state: IReduxState, stats?: ISpeakerStats) {
const resetStats = _.cloneDeep(stats ?? getSpeakerStats(state));
for (const id in resetStats) {
if (resetStats[id].hidden) {
@ -208,3 +213,62 @@ export function resetHiddenStats(state: IReduxState, stats?: Object) {
return resetStats;
}
/**
* Gets the current duration of the conference.
*
* @param {IState} state - The redux state.
* @returns {number | null} - The duration in milliseconds or null.
*/
export function getCurrentDuration(state: IReduxState) {
const startTimestamp = getConferenceTimestamp(state);
return startTimestamp ? Date.now() - startTimestamp : null;
}
/**
* Gets the boundaries of the emotion timeline.
*
* @param {IState} state - The redux state.
* @returns {Object} - The left and right boundaries.
*/
export function getTimelineBoundaries(state: IReduxState) {
const { timelineBoundary, offsetLeft, offsetRight } = state['features/speaker-stats'];
const currentDuration = getCurrentDuration(state) ?? 0;
const rightBoundary = timelineBoundary ? timelineBoundary : currentDuration;
let leftOffset = 0;
if (rightBoundary > THRESHOLD_FIXED_AXIS) {
leftOffset = rightBoundary - THRESHOLD_FIXED_AXIS;
}
const left = offsetLeft + leftOffset;
const right = rightBoundary + offsetRight;
return {
left,
right
};
}
/**
* Returns the conference start time of the face landmarks.
*
* @param {FaceLandmarks} faceLandmarks - The face landmarks.
* @param {number} startTimestamp - The start timestamp of the conference.
* @returns {number}
*/
export function getFaceLandmarksStart(faceLandmarks: FaceLandmarks, startTimestamp: number) {
return faceLandmarks.timestamp - startTimestamp;
}
/**
* Returns the conference end time of the face landmarks.
*
* @param {FaceLandmarks} faceLandmarks - The face landmarks.
* @param {number} startTimestamp - The start timestamp of the conference.
* @returns {number}
*/
export function getFaceLandmarksEnd(faceLandmarks: FaceLandmarks, startTimestamp: number) {
return getFaceLandmarksStart(faceLandmarks, startTimestamp) + faceLandmarks.duration;
}

View File

@ -1,3 +1,6 @@
import { AnyAction } from 'redux';
import { IStore } from '../app/types';
import {
PARTICIPANT_JOINED,
PARTICIPANT_KICKED,
@ -7,16 +10,29 @@ import {
import MiddlewareRegistry from '../base/redux/MiddlewareRegistry';
import {
ADD_TO_OFFSET,
INIT_SEARCH,
INIT_UPDATE_STATS,
RESET_SEARCH_CRITERIA
} from './actionTypes';
import { initReorderStats, updateSortedSpeakerStatsIds, updateStats } from './actions';
import { filterBySearchCriteria, getPendingReorder, getSortedSpeakerStatsIds, resetHiddenStats } from './functions';
MiddlewareRegistry.register(({ dispatch, getState }) => next => action => {
const result = next(action);
import {
clearTimelineBoundary,
initReorderStats,
setTimelineBoundary,
updateSortedSpeakerStatsIds,
updateStats
} from './actions.any';
import { CLEAR_TIME_BOUNDARY_THRESHOLD } from './constants';
import {
filterBySearchCriteria,
getCurrentDuration,
getPendingReorder,
getSortedSpeakerStatsIds,
getTimelineBoundaries,
resetHiddenStats
} from './functions';
MiddlewareRegistry.register(({ dispatch, getState }: IStore) => (next: Function) => (action: AnyAction) => {
switch (action.type) {
case INIT_SEARCH: {
const state = getState();
@ -34,7 +50,7 @@ MiddlewareRegistry.register(({ dispatch, getState }) => next => action => {
const pendingReorder = getPendingReorder(state);
if (pendingReorder) {
dispatch(updateSortedSpeakerStatsIds(getSortedSpeakerStatsIds(state, stats)));
dispatch(updateSortedSpeakerStatsIds(getSortedSpeakerStatsIds(state, stats) ?? []));
}
dispatch(updateStats(stats));
@ -57,7 +73,22 @@ MiddlewareRegistry.register(({ dispatch, getState }) => next => action => {
break;
}
case ADD_TO_OFFSET: {
const state = getState();
const { timelineBoundary } = state['features/speaker-stats'];
const { right } = getTimelineBoundaries(state);
const currentDuration = getCurrentDuration(state) ?? 0;
if (Math.abs((right + action.value) - currentDuration) < CLEAR_TIME_BOUNDARY_THRESHOLD) {
dispatch(clearTimelineBoundary());
} else if (!timelineBoundary) {
dispatch(setTimelineBoundary(currentDuration ?? 0));
}
break;
}
}
return result;
return next(action);
});

View File

@ -1,11 +1,17 @@
import _ from 'lodash';
import ReducerRegistry from '../base/redux/ReducerRegistry';
import { FaceLandmarks } from '../face-landmarks/types';
import {
ADD_TO_OFFSET,
ADD_TO_OFFSET_LEFT,
ADD_TO_OFFSET_RIGHT,
INIT_REORDER_STATS,
INIT_SEARCH,
RESET_SEARCH_CRITERIA,
SET_PANNING,
SET_TIMELINE_BOUNDARY,
TOGGLE_FACE_EXPRESSIONS,
UPDATE_SORTED_SPEAKER_STATS_IDS,
UPDATE_STATS
@ -22,16 +28,52 @@ const INITIAL_STATE = {
pendingReorder: true,
criteria: null,
showFaceExpressions: false,
sortedSpeakerStatsIds: []
sortedSpeakerStatsIds: [],
timelineBoundary: null,
offsetLeft: 0,
offsetRight: 0,
timelinePanning: {
active: false,
x: 0
}
};
export interface ISpeaker {
addFaceLandmarks: (faceLandmarks: FaceLandmarks) => void;
displayName?: string;
getDisplayName: () => string;
getFaceLandmarks: () => FaceLandmarks[];
getTotalDominantSpeakerTime: () => number;
getUserId: () => string;
hasLeft: () => boolean;
hidden?: boolean;
isDominantSpeaker: () => boolean;
isLocalStats: () => boolean;
isModerator?: boolean;
markAsHasLeft: () => boolean;
setDisplayName: (newName: string) => void;
setDominantSpeaker: (isNowDominantSpeaker: boolean, silence: boolean) => void;
setFaceLandmarks: (faceLandmarks: FaceLandmarks[]) => void;
}
export interface ISpeakerStats {
[key: string]: ISpeaker;
}
export interface ISpeakerStatsState {
criteria: string | null;
isOpen: boolean;
offsetLeft: number;
offsetRight: number;
pendingReorder: boolean;
showFaceExpressions: boolean;
sortedSpeakerStatsIds: Array<string>;
stats: Object;
stats: ISpeakerStats;
timelineBoundary: number | null;
timelinePanning: {
active: boolean;
x: number;
};
}
ReducerRegistry.register<ISpeakerStatsState>('features/speaker-stats',
@ -53,6 +95,37 @@ ReducerRegistry.register<ISpeakerStatsState>('features/speaker-stats',
showFaceExpressions: !state.showFaceExpressions
};
}
case ADD_TO_OFFSET: {
return {
...state,
offsetLeft: state.offsetLeft + action.value,
offsetRight: state.offsetRight + action.value
};
}
case ADD_TO_OFFSET_RIGHT: {
return {
...state,
offsetRight: state.offsetRight + action.value
};
}
case ADD_TO_OFFSET_LEFT: {
return {
...state,
offsetLeft: state.offsetLeft + action.value
};
}
case SET_TIMELINE_BOUNDARY: {
return {
...state,
timelineBoundary: action.boundary
};
}
case SET_PANNING: {
return {
...state,
timelinePanning: action.panning
};
}
}
return state;

View File

@ -100,10 +100,10 @@ function on_message(event)
room.speakerStats['dominantSpeakerId'] = occupant.jid;
end
local faceExpression = event.stanza:get_child('faceExpression', 'http://jitsi.org/jitmeet');
local newFaceLandmarks = event.stanza:get_child('faceLandmarks', 'http://jitsi.org/jitmeet');
if faceExpression then
local roomAddress = faceExpression.attr.room;
if newFaceLandmarks then
local roomAddress = newFaceLandmarks.attr.room;
local room = get_room_from_jid(room_jid_match_rewrite(roomAddress));
if not room then
@ -121,9 +121,13 @@ function on_message(event)
log("warn", "No occupant %s found for %s", from, roomAddress);
return false;
end
local faceExpressions = room.speakerStats[occupant.jid].faceExpressions;
faceExpressions[faceExpression.attr.expression] =
faceExpressions[faceExpression.attr.expression] + tonumber(faceExpression.attr.duration);
local faceLandmarks = room.speakerStats[occupant.jid].faceLandmarks;
table.insert(faceLandmarks,
{
faceExpression = newFaceLandmarks.attr.faceExpression,
timestamp = tonumber(newFaceLandmarks.attr.timestamp),
duration = tonumber(newFaceLandmarks.attr.duration),
})
end
return true
@ -142,15 +146,7 @@ function new_SpeakerStats(nick, context_user)
nick = nick;
context_user = context_user;
displayName = nil;
faceExpressions = {
happy = 0,
neutral = 0,
surprised = 0,
angry = 0,
fearful = 0,
disgusted = 0,
sad = 0
};
faceLandmarks = {};
}, SpeakerStats);
end
@ -243,9 +239,9 @@ function occupant_joined(event)
-- and skip focus if sneaked into the table
if values and type(values) == 'table' and values.nick ~= nil and values.nick ~= 'focus' then
local totalDominantSpeakerTime = values.totalDominantSpeakerTime;
local faceExpressions = values.faceExpressions;
local faceLandmarks = values.faceLandmarks;
if totalDominantSpeakerTime > 0 or room:get_occupant_jid(jid) == nil or values:isDominantSpeaker()
or get_participant_expressions_count(faceExpressions) > 0 then
or next(faceLandmarks) ~= nil then
-- before sending we need to calculate current dominant speaker state
if values:isDominantSpeaker() and not values:isSilent() then
local timeElapsed = math.floor(socket.gettime()*1000 - values._dominantSpeakerStart);
@ -255,7 +251,7 @@ function occupant_joined(event)
users_json[values.nick] = {
displayName = values.displayName,
totalDominantSpeakerTime = totalDominantSpeakerTime,
faceExpressions = faceExpressions
faceLandmarks = faceLandmarks
};
end
end
@ -391,12 +387,3 @@ process_host_module(breakout_room_component_host, function(host_module, host)
end);
end
end);
function get_participant_expressions_count(faceExpressions)
local count = 0;
for _, value in pairs(faceExpressions) do
count = count + value;
end
return count;
end