diff --git a/globals.native.d.ts b/globals.native.d.ts
index c631700ce..110e465ef 100644
--- a/globals.native.d.ts
+++ b/globals.native.d.ts
@@ -23,6 +23,8 @@ interface IWindow {
onerror: (event: string, source: any, lineno: any, colno: any, e: Error) => void;
onunhandledrejection: (event: any) => void;
+ setInterval: typeof setInterval;
+ clearInterval: typeof clearInterval;
setTimeout: typeof setTimeout;
clearTimeout: typeof clearTimeout;
setImmediate: typeof setImmediate;
diff --git a/lang/main-enGB.json b/lang/main-enGB.json
index fd0df0499..be09e4965 100644
--- a/lang/main-enGB.json
+++ b/lang/main-enGB.json
@@ -365,7 +365,7 @@
"mute": "Mute or unmute your microphone",
"pushToTalk": "Press to transmit",
"raiseHand": "Raise or lower your hand",
- "showSpeakerStats": "Show speaker stats",
+ "showSpeakerStats": "Show participants stats",
"toggleChat": "Open or close the chat",
"toggleFilmstrip": "Show or hide video thumbnails",
"toggleScreensharing": "Switch between camera and screen sharing",
@@ -579,7 +579,7 @@
"minutes": "{{count}}m",
"name": "Name",
"seconds": "{{count}}s",
- "speakerStats": "Speaker Stats",
+ "speakerStats": "Participants Stats",
"speakerTime": "Speaker Time"
},
"startupoverlay": {
@@ -626,7 +626,7 @@
"sharedvideo": "Toggle video sharing",
"shortcuts": "Toggle shortcuts",
"show": "Show on stage",
- "speakerStats": "Toggle speaker statistics",
+ "speakerStats": "Toggle participants statistics",
"tileView": "Toggle tile view",
"toggleCamera": "Toggle camera",
"videoblur": "",
@@ -662,7 +662,7 @@
"shareRoom": "Invite someone",
"sharedvideo": "Share video",
"shortcuts": "View shortcuts",
- "speakerStats": "Speaker stats",
+ "speakerStats": "Participants stats",
"startScreenSharing": "Start screen sharing",
"startSubtitles": "Start subtitles",
"startvideoblur": "",
diff --git a/lang/main.json b/lang/main.json
index 1572b5b8d..eea77a973 100644
--- a/lang/main.json
+++ b/lang/main.json
@@ -511,7 +511,7 @@
"mute": "Mute or unmute your microphone",
"pushToTalk": "Push to talk",
"raiseHand": "Raise or lower your hand",
- "showSpeakerStats": "Show speaker stats",
+ "showSpeakerStats": "Show participants stats",
"toggleChat": "Open or close the chat",
"toggleFilmstrip": "Show or hide video thumbnails",
"toggleParticipantsPane": "Show or hide the participants pane",
@@ -1038,7 +1038,7 @@
"sad": "Sad",
"search": "Search",
"seconds": "{{count}}s",
- "speakerStats": "Speaker Stats",
+ "speakerStats": "Participants Stats",
"speakerTime": "Speaker Time",
"surprised": "Surprised"
},
@@ -1119,7 +1119,7 @@
"shortcuts": "Toggle shortcuts",
"show": "Show on stage",
"silence": "Silence",
- "speakerStats": "Toggle speaker statistics",
+ "speakerStats": "Toggle participants statistics",
"surprised": "Surprised",
"tileView": "Toggle tile view",
"toggleCamera": "Toggle camera",
@@ -1206,7 +1206,7 @@
"shortcuts": "View shortcuts",
"showWhiteboard": "Show whiteboard",
"silence": "Silence",
- "speakerStats": "Speaker stats",
+ "speakerStats": "Participants stats",
"startScreenSharing": "Start screen sharing",
"startSubtitles": "Subtitles • {{language}}",
"stopAudioSharing": "Stop audio sharing",
diff --git a/react/features/base/conference/reducer.ts b/react/features/base/conference/reducer.ts
index 6167a3101..46d888add 100644
--- a/react/features/base/conference/reducer.ts
+++ b/react/features/base/conference/reducer.ts
@@ -1,4 +1,6 @@
+import { FaceLandmarks } from '../../face-landmarks/types';
import { LOCKED_LOCALLY, LOCKED_REMOTELY } from '../../room-lock/constants';
+import { ISpeakerStats } from '../../speaker-stats/reducer';
import { CONNECTION_WILL_CONNECT, SET_LOCATION_URL } from '../connection/actionTypes';
import { JitsiConferenceErrors } from '../lib-jitsi-meet';
import ReducerRegistry from '../redux/ReducerRegistry';
@@ -53,6 +55,7 @@ export interface IJitsiConference {
getMeetingUniqueId: Function;
getParticipantById: Function;
getParticipants: Function;
+ getSpeakerStats: () => ISpeakerStats;
grantOwner: Function;
isAVModerationSupported: Function;
isCallstatsEnabled: Function;
@@ -74,6 +77,7 @@ export interface IJitsiConference {
sendCommand: Function;
sendCommandOnce: Function;
sendEndpointMessage: Function;
+ sendFaceLandmarks: (faceLandmarks: FaceLandmarks) => void;
sendFeedback: Function;
sendLobbyMessage: Function;
sessionId: string;
diff --git a/react/features/base/icons/svg/emotions-angry.svg b/react/features/base/icons/svg/emotions-angry.svg
new file mode 100644
index 000000000..fa5a81306
--- /dev/null
+++ b/react/features/base/icons/svg/emotions-angry.svg
@@ -0,0 +1,10 @@
+
diff --git a/react/features/base/icons/svg/emotions-disgusted.svg b/react/features/base/icons/svg/emotions-disgusted.svg
new file mode 100644
index 000000000..c80162e8d
--- /dev/null
+++ b/react/features/base/icons/svg/emotions-disgusted.svg
@@ -0,0 +1,10 @@
+
diff --git a/react/features/base/icons/svg/emotions-fearful.svg b/react/features/base/icons/svg/emotions-fearful.svg
new file mode 100644
index 000000000..4a31450d4
--- /dev/null
+++ b/react/features/base/icons/svg/emotions-fearful.svg
@@ -0,0 +1,10 @@
+
diff --git a/react/features/base/icons/svg/emotions-happy.svg b/react/features/base/icons/svg/emotions-happy.svg
new file mode 100644
index 000000000..009295656
--- /dev/null
+++ b/react/features/base/icons/svg/emotions-happy.svg
@@ -0,0 +1,10 @@
+
diff --git a/react/features/base/icons/svg/emotions-neutral.svg b/react/features/base/icons/svg/emotions-neutral.svg
new file mode 100644
index 000000000..50d191d60
--- /dev/null
+++ b/react/features/base/icons/svg/emotions-neutral.svg
@@ -0,0 +1,10 @@
+
diff --git a/react/features/base/icons/svg/emotions-sad.svg b/react/features/base/icons/svg/emotions-sad.svg
new file mode 100644
index 000000000..bc311e86b
--- /dev/null
+++ b/react/features/base/icons/svg/emotions-sad.svg
@@ -0,0 +1,10 @@
+
diff --git a/react/features/base/icons/svg/emotions-surprised.svg b/react/features/base/icons/svg/emotions-surprised.svg
new file mode 100644
index 000000000..5365469de
--- /dev/null
+++ b/react/features/base/icons/svg/emotions-surprised.svg
@@ -0,0 +1,10 @@
+
diff --git a/react/features/base/icons/svg/index.ts b/react/features/base/icons/svg/index.ts
index dc7d4d852..a77dfd6e5 100644
--- a/react/features/base/icons/svg/index.ts
+++ b/react/features/base/icons/svg/index.ts
@@ -29,6 +29,13 @@ export { default as IconE2EE } from './e2ee.svg';
export { default as IconEnlarge } from './enlarge.svg';
export { default as IconEnterFullscreen } from './enter-fullscreen.svg';
export { default as IconEnvelope } from './envelope.svg';
+export { default as IconEmotionsAngry } from './emotions-angry.svg';
+export { default as IconEmotionsDisgusted } from './emotions-disgusted.svg';
+export { default as IconEmotionsFearful } from './emotions-fearful.svg';
+export { default as IconEmotionsHappy } from './emotions-happy.svg';
+export { default as IconEmotionsNeutral } from './emotions-neutral.svg';
+export { default as IconEmotionsSad } from './emotions-sad.svg';
+export { default as IconEmotionsSurprised } from './emotions-surprised.svg';
export { default as IconExclamationSolid } from './exclamation-solid.svg';
export { default as IconExclamationTriangle } from './exclamation-triangle.svg';
export { default as IconExitFullscreen } from './exit-fullscreen.svg';
diff --git a/react/features/face-landmarks/FaceLandmarksDetector.ts b/react/features/face-landmarks/FaceLandmarksDetector.ts
index 7fe803f2f..0af9c56a4 100644
--- a/react/features/face-landmarks/FaceLandmarksDetector.ts
+++ b/react/features/face-landmarks/FaceLandmarksDetector.ts
@@ -5,20 +5,21 @@ import { getLocalVideoTrack } from '../base/tracks/functions';
import { getBaseUrl } from '../base/util/helpers';
import {
- addFaceExpression,
+ addFaceLandmarks,
clearFaceExpressionBuffer,
newFaceBox
} from './actions';
import {
DETECTION_TYPES,
DETECT_FACE,
- FACE_LANDMARK_DETECTION_ERROR_THRESHOLD,
+ FACE_LANDMARKS_DETECTION_ERROR_THRESHOLD,
INIT_WORKER,
+ NO_DETECTION,
+ NO_FACE_DETECTION_THRESHOLD,
WEBHOOK_SEND_TIME_INTERVAL
} from './constants';
import {
getDetectionInterval,
- getFaceExpressionDuration,
sendFaceExpressionsWebhook
} from './functions';
import logger from './logger';
@@ -33,13 +34,14 @@ class FaceLandmarksDetector {
private worker: Worker | null = null;
private lastFaceExpression: string | null = null;
private lastFaceExpressionTimestamp: number | null = null;
- private duplicateConsecutiveExpressions = 0;
private webhookSendInterval: number | null = null;
private detectionInterval: number | null = null;
private recognitionActive = false;
private canvas?: HTMLCanvasElement;
private context?: CanvasRenderingContext2D | null;
private errorCount = 0;
+ private noDetectionCount = 0;
+ private noDetectionStartTimestamp: number | null = null;
/**
* Constructor for class, checks if the environment supports OffscreenCanvas.
@@ -97,27 +99,48 @@ class FaceLandmarksDetector {
// @ts-ignore
const workerBlob = new Blob([ `importScripts("${workerUrl}");` ], { type: 'application/javascript' });
+ const state = getState();
+ const addToBuffer = Boolean(state['features/base/config'].webhookProxyUrl);
// @ts-ignore
workerUrl = window.URL.createObjectURL(workerBlob);
- this.worker = new Worker(workerUrl, { name: 'Face Recognition Worker' });
+ this.worker = new Worker(workerUrl, { name: 'Face Landmarks Worker' });
this.worker.onmessage = ({ data }: MessageEvent) => {
- const { faceExpression, faceBox } = data;
+ const { faceExpression, faceBox, faceCount } = data;
+ const messageTimestamp = Date.now();
- if (faceExpression) {
- if (faceExpression === this.lastFaceExpression) {
- this.duplicateConsecutiveExpressions++;
- } else {
- if (this.lastFaceExpression && this.lastFaceExpressionTimestamp) {
- dispatch(addFaceExpression(
- this.lastFaceExpression,
- getFaceExpressionDuration(getState(), this.duplicateConsecutiveExpressions + 1),
- this.lastFaceExpressionTimestamp
- ));
- }
- this.lastFaceExpression = faceExpression;
- this.lastFaceExpressionTimestamp = Date.now();
- this.duplicateConsecutiveExpressions = 0;
+ // if the number of faces detected is different from 1 we do not take into consideration that detection
+ if (faceCount !== 1) {
+ if (this.noDetectionCount === 0) {
+ this.noDetectionStartTimestamp = messageTimestamp;
+ }
+ this.noDetectionCount++;
+
+ if (this.noDetectionCount === NO_FACE_DETECTION_THRESHOLD && this.noDetectionStartTimestamp) {
+ this.addFaceLandmarks(
+ dispatch,
+ this.noDetectionStartTimestamp,
+ NO_DETECTION,
+ addToBuffer
+ );
+ }
+
+ return;
+ } else if (this.noDetectionCount > 0) {
+ this.noDetectionCount = 0;
+ this.noDetectionStartTimestamp = null;
+ }
+
+ if (faceExpression?.expression) {
+ const { expression } = faceExpression;
+
+ if (expression !== this.lastFaceExpression) {
+ this.addFaceLandmarks(
+ dispatch,
+ messageTimestamp,
+ expression,
+ addToBuffer
+ );
}
}
@@ -128,7 +151,7 @@ class FaceLandmarksDetector {
APP.API.notifyFaceLandmarkDetected(faceBox, faceExpression);
};
- const { faceLandmarks } = getState()['features/base/config'];
+ const { faceLandmarks } = state['features/base/config'];
const detectionTypes = [
faceLandmarks?.enableFaceCentering && DETECTION_TYPES.FACE_BOX,
faceLandmarks?.enableFaceExpressionsDetection && DETECTION_TYPES.FACE_EXPRESSIONS
@@ -162,7 +185,7 @@ class FaceLandmarksDetector {
}
if (this.recognitionActive) {
- logger.log('Face detection already active.');
+ logger.log('Face landmarks detection already active.');
return;
}
@@ -179,7 +202,7 @@ class FaceLandmarksDetector {
this.imageCapture = new ImageCapture(firstVideoTrack);
this.recognitionActive = true;
- logger.log('Start face detection');
+ logger.log('Start face landmarks detection');
const { faceLandmarks } = state['features/base/config'];
@@ -191,7 +214,7 @@ class FaceLandmarksDetector {
).then(status => {
if (status) {
this.errorCount = 0;
- } else if (++this.errorCount > FACE_LANDMARK_DETECTION_ERROR_THRESHOLD) {
+ } else if (++this.errorCount > FACE_LANDMARKS_DETECTION_ERROR_THRESHOLD) {
/* this prevents the detection from stopping immediately after occurring an error
* sometimes due to the small detection interval when starting the detection some errors
* might occur due to the track not being ready
@@ -228,18 +251,11 @@ class FaceLandmarksDetector {
if (!this.recognitionActive || !this.isInitialized()) {
return;
}
+ const stopTimestamp = Date.now();
+ const addToBuffer = Boolean(getState()['features/base/config'].webhookProxyUrl);
if (this.lastFaceExpression && this.lastFaceExpressionTimestamp) {
- dispatch(
- addFaceExpression(
- this.lastFaceExpression,
- getFaceExpressionDuration(getState(), this.duplicateConsecutiveExpressions + 1),
- this.lastFaceExpressionTimestamp
- )
- );
- this.duplicateConsecutiveExpressions = 0;
- this.lastFaceExpression = null;
- this.lastFaceExpressionTimestamp = null;
+ this.addFaceLandmarks(dispatch, stopTimestamp, null, addToBuffer);
}
this.webhookSendInterval && window.clearInterval(this.webhookSendInterval);
@@ -248,7 +264,36 @@ class FaceLandmarksDetector {
this.detectionInterval = null;
this.imageCapture = null;
this.recognitionActive = false;
- logger.log('Stop face detection');
+ logger.log('Stop face landmarks detection');
+ }
+
+ /**
+ * Dispatches the action for adding new face landmarks and changes the state of the class.
+ *
+ * @param {IStore.dispatch} dispatch - The redux dispatch function.
+ * @param {number} endTimestamp - The timestamp when the face landmarks ended.
+ * @param {string} newFaceExpression - The new face expression.
+ * @param {boolean} addToBuffer - Flag for adding the face landmarks to the buffer.
+ * @returns {void}
+ */
+ private addFaceLandmarks(
+ dispatch: IStore['dispatch'],
+ endTimestamp: number,
+ newFaceExpression: string | null,
+ addToBuffer = false) {
+ if (this.lastFaceExpression && this.lastFaceExpressionTimestamp) {
+ dispatch(addFaceLandmarks(
+ {
+ duration: endTimestamp - this.lastFaceExpressionTimestamp,
+ faceExpression: this.lastFaceExpression,
+ timestamp: this.lastFaceExpressionTimestamp
+ },
+ addToBuffer
+ ));
+ }
+
+ this.lastFaceExpression = newFaceExpression;
+ this.lastFaceExpressionTimestamp = endTimestamp;
}
/**
diff --git a/react/features/face-landmarks/FaceLandmarksHelper.ts b/react/features/face-landmarks/FaceLandmarksHelper.ts
index 2aba627e0..015f08502 100644
--- a/react/features/face-landmarks/FaceLandmarksHelper.ts
+++ b/react/features/face-landmarks/FaceLandmarksHelper.ts
@@ -2,7 +2,7 @@ import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm';
import { Config, FaceResult, Human } from '@vladmandic/human';
import { DETECTION_TYPES, FACE_DETECTION_SCORE_THRESHOLD, FACE_EXPRESSIONS_NAMING_MAPPING } from './constants';
-import { DetectInput, DetectOutput, FaceBox, InitInput } from './types';
+import { DetectInput, DetectOutput, FaceBox, FaceExpression, InitInput } from './types';
export interface IFaceLandmarksHelper {
detect: ({ image, threshold }: DetectInput) => Promise;
@@ -10,7 +10,7 @@ export interface IFaceLandmarksHelper {
getDetections: (image: ImageBitmap | ImageData) => Promise>;
getFaceBox: (detections: Array, threshold: number) => FaceBox | undefined;
getFaceCount: (detections: Array) => number;
- getFaceExpression: (detections: Array) => string | undefined;
+ getFaceExpression: (detections: Array) => FaceExpression | undefined;
init: () => Promise;
}
@@ -144,13 +144,18 @@ export class HumanHelper implements IFaceLandmarksHelper {
* @param {Array} detections - The array with the detections.
* @returns {string | undefined}
*/
- getFaceExpression(detections: Array): string | undefined {
+ getFaceExpression(detections: Array): FaceExpression | undefined {
if (this.getFaceCount(detections) !== 1) {
return;
}
- if (detections[0].emotion) {
- return FACE_EXPRESSIONS_NAMING_MAPPING[detections[0].emotion[0].emotion];
+ const detection = detections[0];
+
+ if (detection.emotion) {
+ return {
+ expression: FACE_EXPRESSIONS_NAMING_MAPPING[detection.emotion[0].emotion],
+ score: detection.emotion[0].score
+ };
}
}
diff --git a/react/features/face-landmarks/actionTypes.ts b/react/features/face-landmarks/actionTypes.ts
index e4231f934..ba88a4668 100644
--- a/react/features/face-landmarks/actionTypes.ts
+++ b/react/features/face-landmarks/actionTypes.ts
@@ -1,32 +1,21 @@
/**
- * Redux action type dispatched in order to add a face expression.
+ * Redux action type dispatched in order to add real-time faceLandmarks to timeline.
*
* {
- * type: ADD_FACE_EXPRESSION,
- * faceExpression: string,
- * duration: number
+ * type: ADD_FACE_LANDMARKS,
+ * faceLandmarks: FaceLandmarks
* }
*/
-export const ADD_FACE_EXPRESSION = 'ADD_FACE_EXPRESSION';
+export const ADD_FACE_LANDMARKS = 'ADD_FACE_LANDMARKS';
/**
- * Redux action type dispatched in order to add a expression to the face expressions buffer.
+ * Redux action type dispatched in order to clear the faceLandmarks buffer for webhook in the state.
*
* {
- * type: ADD_TO_FACE_EXPRESSIONS_BUFFER,
- * faceExpression: string
+ * type: CLEAR_FACE_LANDMARKS_BUFFER
* }
*/
-export const ADD_TO_FACE_EXPRESSIONS_BUFFER = 'ADD_TO_FACE_EXPRESSIONS_BUFFER';
-
-/**
- * Redux action type dispatched in order to clear the face expressions buffer in the state.
- *
- * {
- * type: CLEAR_FACE_EXPRESSIONS_BUFFER
- * }
-*/
-export const CLEAR_FACE_EXPRESSIONS_BUFFER = 'CLEAR_FACE_EXPRESSIONS_BUFFER';
+export const CLEAR_FACE_LANDMARKS_BUFFER = 'CLEAR_FACE_LANDMARKS_BUFFER';
/**
* Redux action type dispatched in order to update coordinates of a detected face.
diff --git a/react/features/face-landmarks/actions.ts b/react/features/face-landmarks/actions.ts
index a603a0313..d016d9dcc 100644
--- a/react/features/face-landmarks/actions.ts
+++ b/react/features/face-landmarks/actions.ts
@@ -3,56 +3,35 @@ import './createImageBitmap';
import { AnyAction } from 'redux';
import {
- ADD_FACE_EXPRESSION,
- ADD_TO_FACE_EXPRESSIONS_BUFFER,
- CLEAR_FACE_EXPRESSIONS_BUFFER,
+ ADD_FACE_LANDMARKS,
+ CLEAR_FACE_LANDMARKS_BUFFER,
NEW_FACE_COORDINATES
} from './actionTypes';
-import { FaceBox } from './types';
+import { FaceBox, FaceLandmarks } from './types';
/**
- * Adds a new face expression and its duration.
+ * Adds new face landmarks to the timeline.
*
- * @param {string} faceExpression - Face expression to be added.
- * @param {number} duration - Duration in seconds of the face expression.
- * @param {number} timestamp - Duration in seconds of the face expression.
+ * @param {FaceLandmarks} faceLandmarks - The new face landmarks to timeline.
+ * @param {boolean} addToBuffer - If true adds the face landmarks to a buffer in the reducer for webhook.
* @returns {AnyAction}
*/
-export function addFaceExpression(faceExpression: string, duration: number, timestamp: number): AnyAction {
+export function addFaceLandmarks(faceLandmarks: FaceLandmarks, addToBuffer: boolean): AnyAction {
return {
- type: ADD_FACE_EXPRESSION,
- faceExpression,
- duration,
- timestamp
+ type: ADD_FACE_LANDMARKS,
+ faceLandmarks,
+ addToBuffer
};
}
/**
- * Adds a face expression with its timestamp to the face expression buffer.
+ * Clears the face landmarks array in the state.
*
- * @param {Object} faceExpression - Object containing face expression string and its timestamp.
* @returns {AnyAction}
*/
-export function addToFaceExpressionsBuffer(
- faceExpression: {
- emotion: string;
- timestamp: number;
- }
-): AnyAction {
+export function clearFaceExpressionBuffer(): AnyAction {
return {
- type: ADD_TO_FACE_EXPRESSIONS_BUFFER,
- faceExpression
- };
-}
-
-/**
- * Clears the face expressions array in the state.
- *
- * @returns {Object}
- */
-export function clearFaceExpressionBuffer() {
- return {
- type: CLEAR_FACE_EXPRESSIONS_BUFFER
+ type: CLEAR_FACE_LANDMARKS_BUFFER
};
}
diff --git a/react/features/face-landmarks/constants.ts b/react/features/face-landmarks/constants.ts
index 14954bd27..81d4cff7f 100644
--- a/react/features/face-landmarks/constants.ts
+++ b/react/features/face-landmarks/constants.ts
@@ -37,6 +37,11 @@ export const INIT_WORKER = 'INIT_WORKER';
*/
export const FACE_BOX_EVENT_TYPE = 'face-box';
+/**
+ * Type of event sent on the data channel.
+ */
+export const FACE_LANDMARKS_EVENT_TYPE = 'face-landmarks';
+
/**
* Milliseconds interval value for sending new image data to the worker.
*/
@@ -64,4 +69,15 @@ export const FACE_DETECTION_SCORE_THRESHOLD = 0.75;
/**
* Threshold for stopping detection after a certain number of consecutive errors have occurred.
*/
-export const FACE_LANDMARK_DETECTION_ERROR_THRESHOLD = 4;
+export const FACE_LANDMARKS_DETECTION_ERROR_THRESHOLD = 4;
+
+/**
+ * Threshold for number of consecutive detections with no face,
+ * so that when achieved there will be dispatched an action.
+ */
+export const NO_FACE_DETECTION_THRESHOLD = 5;
+
+/**
+ * Constant type used for signaling that no valid face detection is found.
+ */
+export const NO_DETECTION = 'no-detection';
diff --git a/react/features/face-landmarks/faceLandmarksWorker.ts b/react/features/face-landmarks/faceLandmarksWorker.ts
index b94fbe17e..a71c7f005 100644
--- a/react/features/face-landmarks/faceLandmarksWorker.ts
+++ b/react/features/face-landmarks/faceLandmarksWorker.ts
@@ -12,10 +12,9 @@ onmessage = async function({ data }: MessageEvent) {
const detections = await helper.detect(data);
- if (detections && (detections.faceBox || detections.faceExpression || detections.faceCount)) {
+ if (detections) {
self.postMessage(detections);
}
-
break;
}
diff --git a/react/features/face-landmarks/functions.ts b/react/features/face-landmarks/functions.ts
index 5602eb57c..cf2f5d6ae 100644
--- a/react/features/face-landmarks/functions.ts
+++ b/react/features/face-landmarks/functions.ts
@@ -1,40 +1,27 @@
import { IReduxState } from '../app/types';
+import { IJitsiConference } from '../base/conference/reducer';
import { getLocalParticipant } from '../base/participants/functions';
import { extractFqnFromPath } from '../dynamic-branding/functions.any';
-import { DETECT_FACE, FACE_BOX_EVENT_TYPE, SEND_IMAGE_INTERVAL_MS } from './constants';
+import { FACE_BOX_EVENT_TYPE, FACE_LANDMARKS_EVENT_TYPE, SEND_IMAGE_INTERVAL_MS } from './constants';
import logger from './logger';
-import { FaceBox } from './types';
-
-let canvas: HTMLCanvasElement;
-let context: CanvasRenderingContext2D | null;
-
-if (typeof OffscreenCanvas === 'undefined') {
- canvas = document.createElement('canvas');
- context = canvas.getContext('2d');
-}
+import { FaceBox, FaceLandmarks } from './types';
/**
- * Sends the face expression with its duration to all the other participants.
+ * Sends the face landmarks to other participants via the data channel.
*
* @param {any} conference - The current conference.
- * @param {string} faceExpression - Face expression to be sent.
- * @param {number} duration - The duration of the face expression in seconds.
+ * @param {FaceLandmarks} faceLandmarks - Face landmarks to be sent.
* @returns {void}
*/
-export function sendFaceExpressionToParticipants(
- conference: any,
- faceExpression: string,
- duration: number
-): void {
+export function sendFaceExpressionToParticipants(conference: any, faceLandmarks: FaceLandmarks): void {
try {
conference.sendEndpointMessage('', {
- type: 'face_landmark',
- faceExpression,
- duration
+ type: FACE_LANDMARKS_EVENT_TYPE,
+ faceLandmarks
});
} catch (err) {
- logger.warn('Could not broadcast the face expression to the other participants', err);
+ logger.warn('Could not broadcast the face landmarks to the other participants', err);
}
}
@@ -61,30 +48,22 @@ export function sendFaceBoxToParticipants(
}
/**
- * Sends the face expression with its duration to xmpp server.
+ * Sends the face landmarks to prosody.
*
* @param {any} conference - The current conference.
- * @param {string} faceExpression - Face expression to be sent.
- * @param {number} duration - The duration of the face expression in seconds.
+ * @param {FaceLandmarks} faceLandmarks - Face landmarks to be sent.
* @returns {void}
*/
-export function sendFaceExpressionToServer(
- conference: any,
- faceExpression: string,
- duration: number
-): void {
+export function sendFaceExpressionToServer(conference: IJitsiConference, faceLandmarks: FaceLandmarks): void {
try {
- conference.sendFaceLandmarks({
- faceExpression,
- duration
- });
+ conference.sendFaceLandmarks(faceLandmarks);
} catch (err) {
- logger.warn('Could not send the face expression to xmpp server', err);
+ logger.warn('Could not send the face landmarks to prosody', err);
}
}
/**
- * Sends face expression to backend.
+ * Sends face landmarks to backend.
*
* @param {Object} state - Redux state.
* @returns {boolean} - True if sent, false otherwise.
@@ -96,9 +75,9 @@ export async function sendFaceExpressionsWebhook(state: IReduxState) {
const { connection } = state['features/base/connection'];
const jid = connection?.getJid();
const localParticipant = getLocalParticipant(state);
- const { faceExpressionsBuffer } = state['features/face-landmarks'];
+ const { faceLandmarksBuffer } = state['features/face-landmarks'];
- if (faceExpressionsBuffer.length === 0) {
+ if (faceLandmarksBuffer.length === 0) {
return false;
}
@@ -111,7 +90,7 @@ export async function sendFaceExpressionsWebhook(state: IReduxState) {
meetingFqn: extractFqnFromPath(),
sessionId: conference?.sessionId,
submitted: Date.now(),
- emotions: faceExpressionsBuffer,
+ emotions: faceLandmarksBuffer,
participantId: localParticipant?.jwtId,
participantName: localParticipant?.name,
participantJid: jid
@@ -138,55 +117,6 @@ export async function sendFaceExpressionsWebhook(state: IReduxState) {
}
-/**
- * Sends the image data a canvas from the track in the image capture to the face recognition worker.
- *
- * @param {Worker} worker - Face recognition worker.
- * @param {Object} imageCapture - Image capture that contains the current track.
- * @param {number} threshold - Movement threshold as percentage for sharing face coordinates.
- * @returns {Promise} - True if sent, false otherwise.
- */
-export async function sendDataToWorker(
- worker: Worker,
- imageCapture: ImageCapture,
- threshold = 10
-): Promise {
- if (imageCapture === null || imageCapture === undefined) {
- return false;
- }
-
- let imageBitmap;
- let image;
-
- try {
- imageBitmap = await imageCapture.grabFrame();
- } catch (err) {
- logger.warn(err);
-
- return false;
- }
-
- if (typeof OffscreenCanvas === 'undefined') {
- canvas.width = imageBitmap.width;
- canvas.height = imageBitmap.height;
- context?.drawImage(imageBitmap, 0, 0);
-
- image = context?.getImageData(0, 0, imageBitmap.width, imageBitmap.height);
- } else {
- image = imageBitmap;
- }
-
- worker.postMessage({
- type: DETECT_FACE,
- image,
- threshold
- });
-
- imageBitmap.close();
-
- return true;
-}
-
/**
* Gets face box for a participant id.
*
@@ -230,14 +160,3 @@ export function getDetectionInterval(state: IReduxState) {
return Math.max(faceLandmarks?.captureInterval || SEND_IMAGE_INTERVAL_MS);
}
-
-/**
- * Returns the duration in seconds of a face expression.
- *
- * @param {IReduxState} state - The redux state.
- * @param {number} faceExpressionCount - The number of consecutive face expressions.
- * @returns {number} - Duration of face expression in seconds.
- */
-export function getFaceExpressionDuration(state: IReduxState, faceExpressionCount: number) {
- return faceExpressionCount * (getDetectionInterval(state) / 1000);
-}
diff --git a/react/features/face-landmarks/middleware.ts b/react/features/face-landmarks/middleware.ts
index 2ba9aa61f..72e8f8b8d 100644
--- a/react/features/face-landmarks/middleware.ts
+++ b/react/features/face-landmarks/middleware.ts
@@ -11,18 +11,15 @@ import MiddlewareRegistry from '../base/redux/MiddlewareRegistry';
import { TRACK_ADDED, TRACK_REMOVED, TRACK_UPDATED } from '../base/tracks/actionTypes';
import FaceLandmarksDetector from './FaceLandmarksDetector';
-import { ADD_FACE_EXPRESSION, NEW_FACE_COORDINATES, UPDATE_FACE_COORDINATES } from './actionTypes';
-import {
- addToFaceExpressionsBuffer
-} from './actions';
+import { ADD_FACE_LANDMARKS, NEW_FACE_COORDINATES, UPDATE_FACE_COORDINATES } from './actionTypes';
import { FACE_BOX_EVENT_TYPE } from './constants';
import { sendFaceBoxToParticipants, sendFaceExpressionToParticipants, sendFaceExpressionToServer } from './functions';
MiddlewareRegistry.register((store: IStore) => (next: Function) => (action: any) => {
const { dispatch, getState } = store;
- const { faceLandmarks } = getState()['features/base/config'];
- const isEnabled = faceLandmarks?.enableFaceCentering || faceLandmarks?.enableFaceExpressionsDetection;
+ const { faceLandmarks: faceLandmarksConfig } = getState()['features/base/config'];
+ const isEnabled = faceLandmarksConfig?.enableFaceCentering || faceLandmarksConfig?.enableFaceExpressionsDetection;
if (action.type === CONFERENCE_JOINED) {
if (isEnabled) {
@@ -99,19 +96,16 @@ MiddlewareRegistry.register((store: IStore) => (next: Function) => (action: any)
return next(action);
}
- case ADD_FACE_EXPRESSION: {
+ case ADD_FACE_LANDMARKS: {
const state = getState();
- const { faceExpression, duration, timestamp } = action;
+ const { faceLandmarks } = action;
const conference = getCurrentConference(state);
if (getParticipantCount(state) > 1) {
- sendFaceExpressionToParticipants(conference, faceExpression, duration);
+ sendFaceExpressionToParticipants(conference, faceLandmarks);
}
- sendFaceExpressionToServer(conference, faceExpression, duration);
- dispatch(addToFaceExpressionsBuffer({
- emotion: faceExpression,
- timestamp
- }));
+
+ sendFaceExpressionToServer(conference, faceLandmarks);
return next(action);
}
diff --git a/react/features/face-landmarks/reducer.ts b/react/features/face-landmarks/reducer.ts
index 9514c7cfe..b9625a015 100644
--- a/react/features/face-landmarks/reducer.ts
+++ b/react/features/face-landmarks/reducer.ts
@@ -1,42 +1,25 @@
import ReducerRegistry from '../base/redux/ReducerRegistry';
import {
- ADD_FACE_EXPRESSION,
- ADD_TO_FACE_EXPRESSIONS_BUFFER,
- CLEAR_FACE_EXPRESSIONS_BUFFER,
+ ADD_FACE_LANDMARKS,
+ CLEAR_FACE_LANDMARKS_BUFFER,
UPDATE_FACE_COORDINATES
} from './actionTypes';
-import { FaceBox } from './types';
+import { FaceBox, FaceLandmarks } from './types';
const defaultState = {
faceBoxes: {},
- faceExpressions: {
- happy: 0,
- neutral: 0,
- surprised: 0,
- angry: 0,
- fearful: 0,
- disgusted: 0,
- sad: 0
- },
- faceExpressionsBuffer: [],
+ faceLandmarks: [],
+ faceLandmarksBuffer: [],
recognitionActive: false
};
export interface IFaceLandmarksState {
faceBoxes: { [key: string]: FaceBox; };
- faceExpressions: {
- angry: number;
- disgusted: number;
- fearful: number;
- happy: number;
- neutral: number;
- sad: number;
- surprised: number;
- };
- faceExpressionsBuffer: Array<{
+ faceLandmarks: Array;
+ faceLandmarksBuffer: Array<{
emotion: string;
- timestamp: string;
+ timestamp: number;
}>;
recognitionActive: boolean;
}
@@ -44,26 +27,23 @@ export interface IFaceLandmarksState {
ReducerRegistry.register('features/face-landmarks',
(state = defaultState, action): IFaceLandmarksState => {
switch (action.type) {
- case ADD_FACE_EXPRESSION: {
+ case ADD_FACE_LANDMARKS: {
+ const { addToBuffer, faceLandmarks }: { addToBuffer: boolean; faceLandmarks: FaceLandmarks; } = action;
+
return {
...state,
- faceExpressions: {
- ...state.faceExpressions,
- [action.faceExpression]: state.faceExpressions[
- action.faceExpression as keyof typeof state.faceExpressions] + action.duration
- }
+ faceLandmarks: [ ...state.faceLandmarks, faceLandmarks ],
+ faceLandmarksBuffer: addToBuffer ? [ ...state.faceLandmarksBuffer,
+ {
+ emotion: faceLandmarks.faceExpression,
+ timestamp: faceLandmarks.timestamp
+ } ] : state.faceLandmarksBuffer
};
}
- case ADD_TO_FACE_EXPRESSIONS_BUFFER: {
+ case CLEAR_FACE_LANDMARKS_BUFFER: {
return {
...state,
- faceExpressionsBuffer: [ ...state.faceExpressionsBuffer, action.faceExpression ]
- };
- }
- case CLEAR_FACE_EXPRESSIONS_BUFFER: {
- return {
- ...state,
- faceExpressionsBuffer: []
+ faceLandmarksBuffer: []
};
}
case UPDATE_FACE_COORDINATES: {
diff --git a/react/features/face-landmarks/types.ts b/react/features/face-landmarks/types.ts
index 406815836..876754eb2 100644
--- a/react/features/face-landmarks/types.ts
+++ b/react/features/face-landmarks/types.ts
@@ -19,5 +19,21 @@ export type InitInput = {
export type DetectOutput = {
faceBox?: FaceBox;
faceCount: number;
- faceExpression?: string;
+ faceExpression?: FaceExpression;
+};
+
+export type FaceExpression = {
+ expression: string;
+ score: number;
+};
+
+export type FaceLandmarks = {
+
+ // duration in milliseconds of the face landmarks
+ duration: number;
+ faceExpression: string;
+ score?: number;
+
+ // the start timestamp of the expression
+ timestamp: number;
};
diff --git a/react/features/rtcstats/middleware.ts b/react/features/rtcstats/middleware.ts
index fbe0cf838..d7ca51dac 100644
--- a/react/features/rtcstats/middleware.ts
+++ b/react/features/rtcstats/middleware.ts
@@ -14,7 +14,8 @@ import MiddlewareRegistry from '../base/redux/MiddlewareRegistry';
import { TRACK_ADDED, TRACK_UPDATED } from '../base/tracks/actionTypes';
import { getCurrentRoomId, isInBreakoutRoom } from '../breakout-rooms/functions';
import { extractFqnFromPath } from '../dynamic-branding/functions.any';
-import { ADD_FACE_EXPRESSION } from '../face-landmarks/actionTypes';
+import { ADD_FACE_LANDMARKS } from '../face-landmarks/actionTypes';
+import { FaceLandmarks } from '../face-landmarks/types';
import RTCStats from './RTCStats';
import {
@@ -164,17 +165,19 @@ MiddlewareRegistry.register((store: IStore) => (next: Function) => (action: AnyA
}
break;
}
- case ADD_FACE_EXPRESSION:
+ case ADD_FACE_LANDMARKS: {
if (canSendFaceLandmarksRtcstatsData(state)) {
- const { duration, faceExpression, timestamp } = action;
+ const { duration, faceExpression, timestamp } = action.faceLandmarks as FaceLandmarks;
+ const durationSeconds = Math.round(duration / 1000);
RTCStats.sendFaceLandmarksData({
- duration,
+ duration: durationSeconds,
faceLandmarks: faceExpression,
timestamp
});
}
break;
+ }
case CONFERENCE_TIMESTAMP_CHANGED: {
if (canSendRtcstatsData(state)) {
const { conferenceTimestamp } = action;
diff --git a/react/features/speaker-stats/actionTypes.ts b/react/features/speaker-stats/actionTypes.ts
index 369bb83e9..ae6e48ff5 100644
--- a/react/features/speaker-stats/actionTypes.ts
+++ b/react/features/speaker-stats/actionTypes.ts
@@ -63,3 +63,20 @@ export const RESET_SEARCH_CRITERIA = 'RESET_SEARCH_CRITERIA'
*/
export const TOGGLE_FACE_EXPRESSIONS = 'SHOW_FACE_EXPRESSIONS';
+
+export const INCREASE_ZOOM = 'INCREASE_ZOOM';
+
+export const DECREASE_ZOOM = 'DECREASE_ZOOM';
+
+export const ADD_TO_OFFSET = 'ADD_TO_OFFSET';
+
+export const SET_OFFSET = 'RESET_OFFSET';
+
+export const ADD_TO_OFFSET_LEFT = 'ADD_TO_OFFSET_LEFT';
+
+export const ADD_TO_OFFSET_RIGHT = 'ADD_TO_OFFSET_RIGHT';
+
+export const SET_TIMELINE_BOUNDARY = 'SET_TIMELINE_BOUNDARY';
+
+export const SET_PANNING = 'SET_PANNING';
+
diff --git a/react/features/speaker-stats/actions.any.ts b/react/features/speaker-stats/actions.any.ts
new file mode 100644
index 000000000..3400f6273
--- /dev/null
+++ b/react/features/speaker-stats/actions.any.ts
@@ -0,0 +1,231 @@
+import { IStore } from '../app/types';
+
+import {
+ ADD_TO_OFFSET,
+ ADD_TO_OFFSET_LEFT,
+ ADD_TO_OFFSET_RIGHT,
+ INIT_REORDER_STATS,
+ INIT_SEARCH,
+ INIT_UPDATE_STATS,
+ RESET_SEARCH_CRITERIA,
+ SET_PANNING,
+ SET_TIMELINE_BOUNDARY,
+ TOGGLE_FACE_EXPRESSIONS,
+ UPDATE_SORTED_SPEAKER_STATS_IDS,
+ UPDATE_STATS
+} from './actionTypes';
+import { MINIMUM_INTERVAL } from './constants';
+import { getCurrentDuration, getTimelineBoundaries } from './functions';
+import { ISpeakerStats } from './reducer';
+
+/**
+ * Starts a search by criteria.
+ *
+ * @param {string} criteria - The search criteria.
+ * @returns {Object}
+ */
+export function initSearch(criteria: string) {
+ return {
+ type: INIT_SEARCH,
+ criteria
+ };
+}
+
+/**
+ * Gets the new stats and triggers update.
+ *
+ * @param {Function} getSpeakerStats - Function to get the speaker stats.
+ * @returns {Object}
+ */
+export function initUpdateStats(getSpeakerStats: () => ISpeakerStats) {
+ return {
+ type: INIT_UPDATE_STATS,
+ getSpeakerStats
+ };
+}
+
+/**
+ * Updates the stats with new stats.
+ *
+ * @param {Object} stats - The new stats.
+ * @returns {Object}
+ */
+export function updateStats(stats: Object) {
+ return {
+ type: UPDATE_STATS,
+ stats
+ };
+}
+
+/**
+ * Updates the speaker stats order.
+ *
+ * @param {Array} participantIds - Participant ids.
+ * @returns {Object}
+ */
+export function updateSortedSpeakerStatsIds(participantIds: Array) {
+ return {
+ type: UPDATE_SORTED_SPEAKER_STATS_IDS,
+ participantIds
+ };
+}
+
+/**
+ * Initiates reordering of the stats.
+ *
+ * @returns {Object}
+ */
+export function initReorderStats() {
+ return {
+ type: INIT_REORDER_STATS
+ };
+}
+
+/**
+ * Resets the search criteria.
+ *
+ * @returns {Object}
+ */
+export function resetSearchCriteria() {
+ return {
+ type: RESET_SEARCH_CRITERIA
+ };
+}
+
+/**
+ * Toggles the face expressions grid.
+ *
+ * @returns {Object}
+ */
+export function toggleFaceExpressions() {
+ return {
+ type: TOGGLE_FACE_EXPRESSIONS
+ };
+}
+
+/**
+ * Adds a value to the boundary offset of the timeline.
+ *
+ * @param {number} value - The value to be added.
+ * @param {number} left - The left boundary.
+ * @param {number} right - The right boundary.
+ * @param {number} currentDuration - The currentDuration of the conference.
+ * @returns {Object}
+ */
+export function addToOffset(value: number) {
+ return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
+ const state = getState();
+ const { left, right } = getTimelineBoundaries(state);
+ const currentDuration = getCurrentDuration(state) ?? 0;
+ const newLeft = left + value;
+ const newRight = right + value;
+
+ if (newLeft >= 0 && newRight <= currentDuration) {
+ dispatch({
+ type: ADD_TO_OFFSET,
+ value
+ });
+ } else if (newLeft < 0) {
+ dispatch({
+ type: ADD_TO_OFFSET,
+ value: -left
+ });
+ } else if (newRight > currentDuration) {
+ dispatch({
+ type: ADD_TO_OFFSET,
+ value: currentDuration - right
+ });
+ }
+ };
+}
+
+/**
+ * Adds the value to the offset of the left boundary for the timeline.
+ *
+ * @param {number} value - The new value for the offset.
+ * @returns {Object}
+ */
+export function addToOffsetLeft(value: number) {
+ return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
+ const state = getState();
+ const { left, right } = getTimelineBoundaries(state);
+ const newLeft = left + value;
+
+ if (newLeft >= 0 && right - newLeft > MINIMUM_INTERVAL) {
+ dispatch({
+ type: ADD_TO_OFFSET_LEFT,
+ value
+ });
+ } else if (newLeft < 0) {
+ dispatch({
+ type: ADD_TO_OFFSET_LEFT,
+ value: -left
+ });
+ }
+ };
+}
+
+/**
+ * Adds the value to the offset of the right boundary for the timeline.
+ *
+ * @param {number} value - The new value for the offset.
+ * @returns {Object}
+ */
+export function addToOffsetRight(value: number) {
+ return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
+ const state = getState();
+ const { left, right } = getTimelineBoundaries(state);
+ const currentDuration = getCurrentDuration(state) ?? 0;
+ const newRight = right + value;
+
+ if (newRight <= currentDuration && newRight - left > MINIMUM_INTERVAL) {
+ dispatch({
+ type: ADD_TO_OFFSET_RIGHT,
+ value
+ });
+ } else if (newRight > currentDuration) {
+ dispatch({
+ type: ADD_TO_OFFSET_RIGHT,
+ value: currentDuration - right
+ });
+ }
+ };
+}
+
+/**
+ * Sets the current time boundary of the timeline, when zoomed in.
+ *
+ * @param {number} boundary - The current time boundary.
+ * @returns {Object}
+ */
+export function setTimelineBoundary(boundary: number) {
+ return {
+ type: SET_TIMELINE_BOUNDARY,
+ boundary
+ };
+}
+
+/**
+ * Clears the current time boundary of the timeline, when zoomed out full.
+ *
+ * @returns {Object}
+ */
+export function clearTimelineBoundary() {
+ return {
+ type: SET_TIMELINE_BOUNDARY,
+ boundary: null
+ };
+}
+
+/**
+ * Sets the state of the timeline panning.
+ *
+ * @param {Object} panning - The state of the timeline panning.
+ * @returns {Object}
+ */
+export function setTimelinePanning(panning: { active: boolean; x: number; }) {
+ return {
+ type: SET_PANNING,
+ panning
+ };
+}
diff --git a/react/features/speaker-stats/actions.native.ts b/react/features/speaker-stats/actions.native.ts
new file mode 100644
index 000000000..02b37d475
--- /dev/null
+++ b/react/features/speaker-stats/actions.native.ts
@@ -0,0 +1 @@
+export * from './actions.any';
diff --git a/react/features/speaker-stats/actions.ts b/react/features/speaker-stats/actions.ts
deleted file mode 100644
index af30f8e71..000000000
--- a/react/features/speaker-stats/actions.ts
+++ /dev/null
@@ -1,94 +0,0 @@
-import {
- INIT_REORDER_STATS,
- INIT_SEARCH,
- INIT_UPDATE_STATS,
- RESET_SEARCH_CRITERIA,
- TOGGLE_FACE_EXPRESSIONS,
- UPDATE_SORTED_SPEAKER_STATS_IDS,
- UPDATE_STATS
-} from './actionTypes';
-
-/**
- * Starts a search by criteria.
- *
- * @param {string | null} criteria - The search criteria.
- * @returns {Object}
- */
-export function initSearch(criteria: string | null) {
- return {
- type: INIT_SEARCH,
- criteria
- };
-}
-
-/**
- * Gets the new stats and triggers update.
- *
- * @param {Function} getSpeakerStats - Function to get the speaker stats.
- * @returns {Object}
- */
-export function initUpdateStats(getSpeakerStats: Function) {
- return {
- type: INIT_UPDATE_STATS,
- getSpeakerStats
- };
-}
-
-/**
- * Updates the stats with new stats.
- *
- * @param {Object} stats - The new stats.
- * @returns {Object}
- */
-export function updateStats(stats: Object) {
- return {
- type: UPDATE_STATS,
- stats
- };
-}
-
-/**
- * Updates the speaker stats order.
- *
- * @param {Object} participantIds - Participant ids.
- * @returns {Object}
- */
-export function updateSortedSpeakerStatsIds(participantIds?: Array) {
- return {
- type: UPDATE_SORTED_SPEAKER_STATS_IDS,
- participantIds
- };
-}
-
-/**
- * Initiates reordering of the stats.
- *
- * @returns {Object}
- */
-export function initReorderStats() {
- return {
- type: INIT_REORDER_STATS
- };
-}
-
-/**
- * Resets the search criteria.
- *
- * @returns {Object}
- */
-export function resetSearchCriteria() {
- return {
- type: RESET_SEARCH_CRITERIA
- };
-}
-
-/**
- * Toggles the face expressions grid.
- *
- * @returns {Object}
- */
-export function toggleFaceExpressions() {
- return {
- type: TOGGLE_FACE_EXPRESSIONS
- };
-}
diff --git a/react/features/speaker-stats/actions.web.ts b/react/features/speaker-stats/actions.web.ts
new file mode 100644
index 000000000..02b37d475
--- /dev/null
+++ b/react/features/speaker-stats/actions.web.ts
@@ -0,0 +1 @@
+export * from './actions.any';
diff --git a/react/features/speaker-stats/components/AbstractSpeakerStatsButton.js b/react/features/speaker-stats/components/AbstractSpeakerStatsButton.tsx
similarity index 52%
rename from react/features/speaker-stats/components/AbstractSpeakerStatsButton.js
rename to react/features/speaker-stats/components/AbstractSpeakerStatsButton.tsx
index d73cfff00..cd1ee5731 100644
--- a/react/features/speaker-stats/components/AbstractSpeakerStatsButton.js
+++ b/react/features/speaker-stats/components/AbstractSpeakerStatsButton.tsx
@@ -1,24 +1,22 @@
-// @flow
-
-import type { Dispatch } from 'redux';
-
-import { IconConnection } from '../../base/icons';
-import { AbstractButton } from '../../base/toolbox/components';
-import type { AbstractButtonProps } from '../../base/toolbox/components';
+import { IStore } from '../../app/types';
+import { IconConnection } from '../../base/icons/svg';
+// eslint-disable-next-line lines-around-comment
+// @ts-ignore
+import { AbstractButton, type AbstractButtonProps } from '../../base/toolbox/components';
type Props = AbstractButtonProps & {
/**
* True if the navigation bar should be visible.
*/
- dispatch: Dispatch
+ dispatch: IStore['dispatch'];
};
/**
* Implementation of a button for opening speaker stats dialog.
*/
-class AbstractSpeakerStatsButton extends AbstractButton {
+class AbstractSpeakerStatsButton extends AbstractButton {
accessibilityLabel = 'toolbar.accessibilityLabel.speakerStats';
icon = IconConnection;
label = 'toolbar.speakerStats';
diff --git a/react/features/speaker-stats/components/AbstractSpeakerStatsList.js b/react/features/speaker-stats/components/AbstractSpeakerStatsList.ts
similarity index 54%
rename from react/features/speaker-stats/components/AbstractSpeakerStatsList.js
rename to react/features/speaker-stats/components/AbstractSpeakerStatsList.ts
index 31e11e005..eb1282fae 100644
--- a/react/features/speaker-stats/components/AbstractSpeakerStatsList.js
+++ b/react/features/speaker-stats/components/AbstractSpeakerStatsList.ts
@@ -1,11 +1,10 @@
-// @flow
-
import { useCallback, useEffect, useRef } from 'react';
import { useTranslation } from 'react-i18next';
import { useDispatch, useSelector } from 'react-redux';
-import { getLocalParticipant } from '../../base/participants';
-import { initUpdateStats } from '../actions';
+import { IReduxState } from '../../app/types';
+import { getLocalParticipant } from '../../base/participants/functions';
+import { initUpdateStats } from '../actions.any';
import {
SPEAKER_STATS_RELOAD_INTERVAL
} from '../constants';
@@ -17,21 +16,22 @@ import {
* @param {Object} itemStyles - Styles for the speaker stats item.
* @returns {Function}
*/
-const abstractSpeakerStatsList = (speakerStatsItem: Function, itemStyles?: Object): Function[] => {
+const abstractSpeakerStatsList = (speakerStatsItem: Function): Function[] => {
const dispatch = useDispatch();
const { t } = useTranslation();
- const conference = useSelector(state => state['features/base/conference'].conference);
+ const { conference } = useSelector((state: IReduxState) => state['features/base/conference']);
const {
stats: speakerStats,
showFaceExpressions,
sortedSpeakerStatsIds
- } = useSelector(state => state['features/speaker-stats']);
+ } = useSelector((state: IReduxState) => state['features/speaker-stats']);
const localParticipant = useSelector(getLocalParticipant);
const { defaultRemoteDisplayName } = useSelector(
- state => state['features/base/config']) || {};
- const { faceLandmarks } = useSelector(state => state['features/base/config']) || {};
- const { faceExpressions } = useSelector(state => state['features/face-landmarks']) || {};
- const reloadInterval = useRef(null);
+ (state: IReduxState) => state['features/base/config']) || {};
+ const { faceLandmarks: faceLandmarksConfig } = useSelector((state: IReduxState) =>
+ state['features/base/config']) || {};
+ const { faceLandmarks } = useSelector((state: IReduxState) => state['features/face-landmarks']) || {};
+ const reloadInterval = useRef();
/**
* Update the internal state with the latest speaker stats.
@@ -40,7 +40,7 @@ const abstractSpeakerStatsList = (speakerStatsItem: Function, itemStyles?: Objec
* @private
*/
const getSpeakerStats = useCallback(() => {
- const stats = conference.getSpeakerStats();
+ const stats = conference?.getSpeakerStats();
for (const userId in stats) {
if (stats[userId]) {
@@ -48,40 +48,42 @@ const abstractSpeakerStatsList = (speakerStatsItem: Function, itemStyles?: Objec
const meString = t('me');
stats[userId].setDisplayName(
- localParticipant.name
+ localParticipant?.name
? `${localParticipant.name} (${meString})`
: meString
);
- if (faceLandmarks?.enableDisplayFaceExpressions) {
- stats[userId].setFaceExpressions(faceExpressions);
+
+ if (faceLandmarksConfig?.enableDisplayFaceExpressions) {
+ stats[userId].setFaceLandmarks(faceLandmarks);
}
}
if (!stats[userId].getDisplayName()) {
stats[userId].setDisplayName(
- conference.getParticipantById(userId)?.name
+ conference?.getParticipantById(userId)?.name
);
}
}
}
- return stats;
- }, [ faceExpressions ]);
+ return stats ?? {};
+ }, [ faceLandmarks ]);
const updateStats = useCallback(
() => dispatch(initUpdateStats(getSpeakerStats)),
[ dispatch, initUpdateStats, getSpeakerStats ]);
useEffect(() => {
- if (reloadInterval.current) {
- clearInterval(reloadInterval.current);
- }
- reloadInterval.current = setInterval(() => {
+ reloadInterval.current = window.setInterval(() => {
updateStats();
}, SPEAKER_STATS_RELOAD_INTERVAL);
- return () => clearInterval(reloadInterval.current);
- }, [ faceExpressions ]);
+ return () => {
+ if (reloadInterval.current) {
+ clearInterval(reloadInterval.current);
+ }
+ };
+ }, [ faceLandmarks ]);
const localSpeakerStats = Object.keys(speakerStats).length === 0 ? getSpeakerStats() : speakerStats;
const localSortedSpeakerStatsIds
@@ -91,22 +93,17 @@ const abstractSpeakerStatsList = (speakerStatsItem: Function, itemStyles?: Objec
return userIds.map(userId => {
const statsModel = localSpeakerStats[userId];
- const props = {};
-
- props.isDominantSpeaker = statsModel.isDominantSpeaker();
- props.dominantSpeakerTime = statsModel.getTotalDominantSpeakerTime();
- props.participantId = userId;
- props.hasLeft = statsModel.hasLeft();
- if (showFaceExpressions) {
- props.faceExpressions = statsModel.getFaceExpressions();
- }
- props.hidden = statsModel.hidden;
- props.showFaceExpressions = showFaceExpressions;
- props.displayName = statsModel.getDisplayName() || defaultRemoteDisplayName;
- if (itemStyles) {
- props.styles = itemStyles;
- }
- props.t = t;
+ const props = {
+ isDominantSpeaker: statsModel.isDominantSpeaker(),
+ dominantSpeakerTime: statsModel.getTotalDominantSpeakerTime(),
+ participantId: userId,
+ hasLeft: statsModel.hasLeft(),
+ faceLandmarks: showFaceExpressions ? statsModel.getFaceLandmarks() : undefined,
+ hidden: statsModel.hidden,
+ showFaceExpressions,
+ displayName: statsModel.getDisplayName() || defaultRemoteDisplayName,
+ t
+ };
return speakerStatsItem(props);
});
diff --git a/react/features/speaker-stats/components/_.native.js b/react/features/speaker-stats/components/_.native.ts
similarity index 65%
rename from react/features/speaker-stats/components/_.native.js
rename to react/features/speaker-stats/components/_.native.ts
index 738c4d2b8..88a747325 100644
--- a/react/features/speaker-stats/components/_.native.js
+++ b/react/features/speaker-stats/components/_.native.ts
@@ -1 +1,2 @@
+// @ts-ignore
export * from './native';
diff --git a/react/features/speaker-stats/components/_.web.js b/react/features/speaker-stats/components/_.web.ts
similarity index 100%
rename from react/features/speaker-stats/components/_.web.js
rename to react/features/speaker-stats/components/_.web.ts
diff --git a/react/features/speaker-stats/components/index.js b/react/features/speaker-stats/components/index.ts
similarity index 60%
rename from react/features/speaker-stats/components/index.js
rename to react/features/speaker-stats/components/index.ts
index cda61441e..796480f8e 100644
--- a/react/features/speaker-stats/components/index.js
+++ b/react/features/speaker-stats/components/index.ts
@@ -1 +1,2 @@
+// @ts-ignore
export * from './_';
diff --git a/react/features/speaker-stats/components/timeFunctions.js b/react/features/speaker-stats/components/timeFunctions.ts
similarity index 90%
rename from react/features/speaker-stats/components/timeFunctions.js
rename to react/features/speaker-stats/components/timeFunctions.ts
index 2b2ce7b1f..a6f933833 100644
--- a/react/features/speaker-stats/components/timeFunctions.js
+++ b/react/features/speaker-stats/components/timeFunctions.ts
@@ -7,7 +7,7 @@
* @private
* @returns {number}
*/
-function getHoursCount(milliseconds) {
+function getHoursCount(milliseconds: number) {
return Math.floor(milliseconds / (60 * 60 * 1000));
}
@@ -18,7 +18,7 @@ function getHoursCount(milliseconds) {
* @private
* @returns {number}
*/
-function getMinutesCount(milliseconds) {
+function getMinutesCount(milliseconds: number) {
return Math.floor(milliseconds / (60 * 1000) % 60);
}
@@ -29,7 +29,7 @@ function getMinutesCount(milliseconds) {
* @private
* @returns {number}
*/
-function getSecondsCount(milliseconds) {
+function getSecondsCount(milliseconds: number) {
return Math.floor(milliseconds / 1000 % 60);
}
@@ -85,6 +85,6 @@ export function createLocalizedTime(time: number, t: Function) {
* key for react to iterate upon.
* @returns {string}
*/
-function createTimeDisplay(count, countNounKey, t) {
+function createTimeDisplay(count: number, countNounKey: string, t: Function) {
return t(countNounKey, { count });
}
diff --git a/react/features/speaker-stats/components/web/SpeakerStats.tsx b/react/features/speaker-stats/components/web/SpeakerStats.tsx
index d865a40fb..3cc4fef57 100644
--- a/react/features/speaker-stats/components/web/SpeakerStats.tsx
+++ b/react/features/speaker-stats/components/web/SpeakerStats.tsx
@@ -1,15 +1,28 @@
import React, { useCallback, useEffect } from 'react';
+import { useTranslation } from 'react-i18next';
import { useDispatch, useSelector } from 'react-redux';
import { makeStyles } from 'tss-react/mui';
import { IReduxState } from '../../../app/types';
+import Icon from '../../../base/icons/components/Icon';
+import {
+ IconEmotionsAngry,
+ IconEmotionsDisgusted,
+ IconEmotionsFearful,
+ IconEmotionsHappy,
+ IconEmotionsNeutral,
+ IconEmotionsSad,
+ IconEmotionsSurprised
+} from '../../../base/icons/svg';
+// eslint-disable-next-line lines-around-comment
+// @ts-ignore
+import { Tooltip } from '../../../base/tooltip';
import Dialog from '../../../base/ui/components/web/Dialog';
import { escapeRegexp } from '../../../base/util/helpers';
-import { initSearch, resetSearchCriteria, toggleFaceExpressions } from '../../actions';
+import { initSearch, resetSearchCriteria, toggleFaceExpressions } from '../../actions.any';
import {
DISPLAY_SWITCH_BREAKPOINT,
- MOBILE_BREAKPOINT,
- RESIZE_SEARCH_SWITCH_CONTAINER_BREAKPOINT
+ MOBILE_BREAKPOINT
} from '../../constants';
import FaceExpressionsSwitch from './FaceExpressionsSwitch';
@@ -20,69 +33,171 @@ import SpeakerStatsSearch from './SpeakerStatsSearch';
const useStyles = makeStyles()(theme => {
return {
speakerStats: {
+ '& .header': {
+ position: 'fixed',
+ backgroundColor: theme.palette.ui01,
+ paddingLeft: theme.spacing(4),
+ paddingRight: theme.spacing(4),
+ marginLeft: `-${theme.spacing(4)}`,
+ '&.large': {
+ width: '616px'
+ },
+ '&.medium': {
+ width: '352px'
+ },
+ '@media (max-width: 448px)': {
+ width: 'calc(100% - 48px) !important'
+ },
+ '& .upper-header': {
+ display: 'flex',
+ justifyContent: 'space-between',
+ alignItems: 'center',
+ width: '100%',
+ '& .search-switch-container': {
+ display: 'flex',
+ width: '100%',
+ '& .search-container': {
+ width: 175,
+ marginRight: theme.spacing(3)
+ },
+ '& .search-container-full-width': {
+ width: '100%'
+ }
+ },
+ '& .emotions-icons': {
+ display: 'flex',
+ '& svg': {
+ fill: '#000'
+ },
+ '&>div': {
+ marginRight: theme.spacing(3)
+ },
+ '&>div:last-child': {
+ marginRight: 0
+ }
+ }
+ }
+ },
'& .row': {
display: 'flex',
alignItems: 'center',
-
- '& .avatar': {
- width: '32px',
- marginRight: theme.spacing(3)
- },
-
'& .name-time': {
width: 'calc(100% - 48px)',
display: 'flex',
justifyContent: 'space-between',
- alignItems: 'center'
+ alignItems: 'center',
+ '&.expressions-on': {
+ width: 'calc(47% - 48px)',
+ marginRight: theme.spacing(4)
+ }
},
-
- '& .name-time_expressions-on': {
- width: 'calc(47% - 48px)'
- },
-
- '& .expressions': {
- width: 'calc(53% - 29px)',
+ '& .timeline-container': {
+ height: '100%',
+ width: `calc(53% - ${theme.spacing(4)})`,
display: 'flex',
- justifyContent: 'space-between',
-
- '& .expression': {
- width: '30px',
- textAlign: 'center'
+ alignItems: 'center',
+ borderLeftWidth: 1,
+ borderLeftColor: theme.palette.ui02,
+ borderLeftStyle: 'solid',
+ '& .timeline': {
+ height: theme.spacing(2),
+ display: 'flex',
+ width: '100%',
+ '&>div': {
+ marginRight: theme.spacing(1),
+ borderRadius: 5
+ },
+ '&>div:first-child': {
+ borderRadius: '0 5px 5px 0'
+ },
+ '&>div:last-child': {
+ marginRight: 0,
+ borderRadius: '5px 0 0 5px'
+ }
+ }
+ },
+ '& .axis-container': {
+ height: '100%',
+ width: `calc(53% - ${theme.spacing(6)})`,
+ display: 'flex',
+ alignItems: 'center',
+ marginLeft: theme.spacing(3),
+ '& div': {
+ borderRadius: 5
+ },
+ '& .axis': {
+ height: theme.spacing(1),
+ display: 'flex',
+ width: '100%',
+ backgroundColor: theme.palette.ui03,
+ position: 'relative',
+ '& .left-bound': {
+ position: 'absolute',
+ bottom: 10,
+ left: 0
+ },
+ '& .right-bound': {
+ position: 'absolute',
+ bottom: 10,
+ right: 0
+ },
+ '& .handler': {
+ position: 'absolute',
+ backgroundColor: theme.palette.ui09,
+ height: 12,
+ marginTop: -4,
+ display: 'flex',
+ justifyContent: 'space-between',
+ '& .resize': {
+ height: '100%',
+ width: 5,
+ cursor: 'col-resize'
+ }
+ }
}
}
+ },
+ '& .separator': {
+ width: 'calc(100% + 48px)',
+ height: 1,
+ marginLeft: -24,
+ backgroundColor: theme.palette.ui02
}
- },
- labelsContainer: {
- position: 'relative'
- },
- separator: {
- position: 'absolute',
- width: 'calc(100% + 48px)',
- height: 1,
- left: -24,
- backgroundColor: theme.palette.ui05
- },
- searchSwitchContainer: {
- display: 'flex',
- justifyContent: 'space-between',
- alignItems: 'center',
- width: '100%'
- },
- searchSwitchContainerExpressionsOn: {
- width: '58.5%',
- [theme.breakpoints.down(RESIZE_SEARCH_SWITCH_CONTAINER_BREAKPOINT)]: {
- width: '100%'
- }
- },
- searchContainer: {
- width: '50%'
- },
- searchContainerFullWidth: {
- width: '100%'
}
};
});
+const EMOTIONS_LEGEND = [
+ {
+ translationKey: 'speakerStats.neutral',
+ icon: IconEmotionsNeutral
+ },
+ {
+ translationKey: 'speakerStats.happy',
+ icon: IconEmotionsHappy
+ },
+ {
+ translationKey: 'speakerStats.surprised',
+ icon: IconEmotionsSurprised
+ },
+ {
+ translationKey: 'speakerStats.sad',
+ icon: IconEmotionsSad
+ },
+ {
+ translationKey: 'speakerStats.fearful',
+ icon: IconEmotionsFearful
+ },
+ {
+ translationKey: 'speakerStats.angry',
+ icon: IconEmotionsAngry
+ },
+ {
+ translationKey: 'speakerStats.disgusted',
+ icon: IconEmotionsDisgusted
+ }
+];
+
const SpeakerStats = () => {
const { faceLandmarks } = useSelector((state: IReduxState) => state['features/base/config']);
const { showFaceExpressions } = useSelector((state: IReduxState) => state['features/speaker-stats']);
@@ -91,6 +206,7 @@ const SpeakerStats = () => {
const displayLabels = clientWidth > MOBILE_BREAKPOINT;
const dispatch = useDispatch();
const { classes } = useStyles();
+ const { t } = useTranslation();
const onToggleFaceExpressions = useCallback(() =>
dispatch(toggleFaceExpressions())
@@ -104,9 +220,9 @@ const SpeakerStats = () => {
useEffect(() => {
showFaceExpressions && !displaySwitch && dispatch(toggleFaceExpressions());
}, [ clientWidth ]);
- useEffect(() => () => {
- dispatch(resetSearchCriteria());
- }, []);
+
+ // @ts-ignore
+ useEffect(() => () => dispatch(resetSearchCriteria()), []);
return (
diff --git a/react/features/speaker-stats/components/web/SpeakerStatsButton.js b/react/features/speaker-stats/components/web/SpeakerStatsButton.tsx
similarity index 60%
rename from react/features/speaker-stats/components/web/SpeakerStatsButton.js
rename to react/features/speaker-stats/components/web/SpeakerStatsButton.tsx
index ae4eb3532..df266727d 100644
--- a/react/features/speaker-stats/components/web/SpeakerStatsButton.js
+++ b/react/features/speaker-stats/components/web/SpeakerStatsButton.tsx
@@ -1,12 +1,12 @@
-// @flow
-
-import { createToolbarEvent, sendAnalytics } from '../../../analytics';
-import { openDialog } from '../../../base/dialog';
-import { translate } from '../../../base/i18n';
-import { connect } from '../../../base/redux';
+import { createToolbarEvent } from '../../../analytics/AnalyticsEvents';
+import { sendAnalytics } from '../../../analytics/functions';
+import { openDialog } from '../../../base/dialog/actions';
+import { translate } from '../../../base/i18n/functions';
+import { connect } from '../../../base/redux/functions';
import AbstractSpeakerStatsButton from '../AbstractSpeakerStatsButton';
-import { SpeakerStats } from './';
+import SpeakerStats from './SpeakerStats';
+
/**
* Implementation of a button for opening speaker stats dialog.
@@ -20,6 +20,7 @@ class SpeakerStatsButton extends AbstractSpeakerStatsButton {
* @returns {void}
*/
_handleClick() {
+ // @ts-ignore
const { dispatch } = this.props;
sendAnalytics(createToolbarEvent('speaker.stats'));
@@ -27,4 +28,5 @@ class SpeakerStatsButton extends AbstractSpeakerStatsButton {
}
}
+// @ts-ignore
export default translate(connect()(SpeakerStatsButton));
diff --git a/react/features/speaker-stats/components/web/SpeakerStatsItem.js b/react/features/speaker-stats/components/web/SpeakerStatsItem.js
deleted file mode 100644
index 3640bfac3..000000000
--- a/react/features/speaker-stats/components/web/SpeakerStatsItem.js
+++ /dev/null
@@ -1,136 +0,0 @@
-/* @flow */
-
-import React from 'react';
-
-import { Avatar, StatelessAvatar } from '../../../base/avatar';
-import { getInitials } from '../../../base/avatar/functions';
-import BaseTheme from '../../../base/ui/components/BaseTheme';
-import { FACE_EXPRESSIONS } from '../../../face-landmarks/constants';
-
-import TimeElapsed from './TimeElapsed';
-
-/**
- * The type of the React {@code Component} props of {@link SpeakerStatsItem}.
- */
-type Props = {
-
- /**
- * The name of the participant.
- */
- displayName: string,
-
- /**
- * The object that has as keys the face expressions of the
- * participant and as values a number that represents the count .
- */
- faceExpressions: Object,
-
- /**
- * True if the face expressions detection is not disabled.
- */
- showFaceExpressions: boolean,
-
- /**
- * The total milliseconds the participant has been dominant speaker.
- */
- dominantSpeakerTime: number,
-
- /**
- * The id of the user.
- */
- participantId: string,
-
- /**
- * True if the participant is no longer in the meeting.
- */
- hasLeft: boolean,
-
- /**
- * True if the participant is not shown in speaker stats.
- */
- hidden: boolean,
-
- /**
- * True if the participant is currently the dominant speaker.
- */
- isDominantSpeaker: boolean,
-
- /**
- * Styles for the item.
- */
- styles: Object,
-
- /**
- * Invoked to obtain translated strings.
- */
- t: Function
-}
-
-const SpeakerStatsItem = (props: Props) => {
- const hasLeftClass = props.hasLeft ? props.styles.hasLeft : '';
- const rowDisplayClass = `row ${hasLeftClass} ${props.styles.item}`;
- const expressionClass = 'expression';
- const nameTimeClass = `name-time${
- props.showFaceExpressions ? ' name-time_expressions-on' : ''
- }`;
- const timeClass = `${props.styles.time} ${props.isDominantSpeaker ? props.styles.dominant : ''}`;
-
-
- const FaceExpressions = () => FACE_EXPRESSIONS.map(
- expression => (
-
- { props.faceExpressions[expression] }
-
- )
- );
-
- return (
-
-
- {
- props.hasLeft ? (
-
- ) : (
-
- )
- }
-
-
-
- { props.displayName }
-
-
-
-
-
- { props.showFaceExpressions
- && (
-
-
-
- )}
-
- );
-};
-
-export default SpeakerStatsItem;
diff --git a/react/features/speaker-stats/components/web/SpeakerStatsItem.tsx b/react/features/speaker-stats/components/web/SpeakerStatsItem.tsx
new file mode 100644
index 000000000..2357d5be0
--- /dev/null
+++ b/react/features/speaker-stats/components/web/SpeakerStatsItem.tsx
@@ -0,0 +1,115 @@
+// eslint-disable-next-line lines-around-comment
+import React from 'react';
+
+// @ts-ignore
+import Avatar from '../../../base/avatar/components/Avatar';
+import StatelessAvatar from '../../../base/avatar/components/web/StatelessAvatar';
+import { getInitials } from '../../../base/avatar/functions';
+import BaseTheme from '../../../base/ui/components/BaseTheme.web';
+import { FaceLandmarks } from '../../../face-landmarks/types';
+
+import TimeElapsed from './TimeElapsed';
+import Timeline from './Timeline';
+
+/**
+ * The type of the React {@code Component} props of {@link SpeakerStatsItem}.
+ */
+type Props = {
+
+ /**
+ * The name of the participant.
+ */
+ displayName: string;
+
+ /**
+ * The total milliseconds the participant has been dominant speaker.
+ */
+ dominantSpeakerTime: number;
+
+ /**
+ * The object that has as keys the face expressions of the
+ * participant and as values a number that represents the count .
+ */
+ faceLandmarks?: FaceLandmarks[];
+
+ /**
+ * True if the participant is no longer in the meeting.
+ */
+ hasLeft: boolean;
+
+ /**
+ * True if the participant is not shown in speaker stats.
+ */
+ hidden: boolean;
+
+ /**
+ * True if the participant is currently the dominant speaker.
+ */
+ isDominantSpeaker: boolean;
+
+ /**
+ * The id of the user.
+ */
+ participantId: string;
+
+ /**
+ * True if the face expressions detection is not disabled.
+ */
+ showFaceExpressions: boolean;
+
+ /**
+ * Invoked to obtain translated strings.
+ */
+ t: Function;
+};
+
+const SpeakerStatsItem = (props: Props) => {
+ const rowDisplayClass = `row item ${props.hasLeft ? 'has-left' : ''}`;
+ const nameTimeClass = `name-time${
+ props.showFaceExpressions ? ' expressions-on' : ''
+ }`;
+ const timeClass = `time ${props.isDominantSpeaker ? 'dominant' : ''}`;
+
+ return (
+