diff --git a/globals.native.d.ts b/globals.native.d.ts index c631700ce..110e465ef 100644 --- a/globals.native.d.ts +++ b/globals.native.d.ts @@ -23,6 +23,8 @@ interface IWindow { onerror: (event: string, source: any, lineno: any, colno: any, e: Error) => void; onunhandledrejection: (event: any) => void; + setInterval: typeof setInterval; + clearInterval: typeof clearInterval; setTimeout: typeof setTimeout; clearTimeout: typeof clearTimeout; setImmediate: typeof setImmediate; diff --git a/lang/main-enGB.json b/lang/main-enGB.json index fd0df0499..be09e4965 100644 --- a/lang/main-enGB.json +++ b/lang/main-enGB.json @@ -365,7 +365,7 @@ "mute": "Mute or unmute your microphone", "pushToTalk": "Press to transmit", "raiseHand": "Raise or lower your hand", - "showSpeakerStats": "Show speaker stats", + "showSpeakerStats": "Show participants stats", "toggleChat": "Open or close the chat", "toggleFilmstrip": "Show or hide video thumbnails", "toggleScreensharing": "Switch between camera and screen sharing", @@ -579,7 +579,7 @@ "minutes": "{{count}}m", "name": "Name", "seconds": "{{count}}s", - "speakerStats": "Speaker Stats", + "speakerStats": "Participants Stats", "speakerTime": "Speaker Time" }, "startupoverlay": { @@ -626,7 +626,7 @@ "sharedvideo": "Toggle video sharing", "shortcuts": "Toggle shortcuts", "show": "Show on stage", - "speakerStats": "Toggle speaker statistics", + "speakerStats": "Toggle participants statistics", "tileView": "Toggle tile view", "toggleCamera": "Toggle camera", "videoblur": "", @@ -662,7 +662,7 @@ "shareRoom": "Invite someone", "sharedvideo": "Share video", "shortcuts": "View shortcuts", - "speakerStats": "Speaker stats", + "speakerStats": "Participants stats", "startScreenSharing": "Start screen sharing", "startSubtitles": "Start subtitles", "startvideoblur": "", diff --git a/lang/main.json b/lang/main.json index 1572b5b8d..eea77a973 100644 --- a/lang/main.json +++ b/lang/main.json @@ -511,7 +511,7 @@ "mute": "Mute or unmute your microphone", "pushToTalk": "Push to talk", "raiseHand": "Raise or lower your hand", - "showSpeakerStats": "Show speaker stats", + "showSpeakerStats": "Show participants stats", "toggleChat": "Open or close the chat", "toggleFilmstrip": "Show or hide video thumbnails", "toggleParticipantsPane": "Show or hide the participants pane", @@ -1038,7 +1038,7 @@ "sad": "Sad", "search": "Search", "seconds": "{{count}}s", - "speakerStats": "Speaker Stats", + "speakerStats": "Participants Stats", "speakerTime": "Speaker Time", "surprised": "Surprised" }, @@ -1119,7 +1119,7 @@ "shortcuts": "Toggle shortcuts", "show": "Show on stage", "silence": "Silence", - "speakerStats": "Toggle speaker statistics", + "speakerStats": "Toggle participants statistics", "surprised": "Surprised", "tileView": "Toggle tile view", "toggleCamera": "Toggle camera", @@ -1206,7 +1206,7 @@ "shortcuts": "View shortcuts", "showWhiteboard": "Show whiteboard", "silence": "Silence", - "speakerStats": "Speaker stats", + "speakerStats": "Participants stats", "startScreenSharing": "Start screen sharing", "startSubtitles": "Subtitles • {{language}}", "stopAudioSharing": "Stop audio sharing", diff --git a/react/features/base/conference/reducer.ts b/react/features/base/conference/reducer.ts index 6167a3101..46d888add 100644 --- a/react/features/base/conference/reducer.ts +++ b/react/features/base/conference/reducer.ts @@ -1,4 +1,6 @@ +import { FaceLandmarks } from '../../face-landmarks/types'; import { LOCKED_LOCALLY, LOCKED_REMOTELY } from '../../room-lock/constants'; +import { ISpeakerStats } from '../../speaker-stats/reducer'; import { CONNECTION_WILL_CONNECT, SET_LOCATION_URL } from '../connection/actionTypes'; import { JitsiConferenceErrors } from '../lib-jitsi-meet'; import ReducerRegistry from '../redux/ReducerRegistry'; @@ -53,6 +55,7 @@ export interface IJitsiConference { getMeetingUniqueId: Function; getParticipantById: Function; getParticipants: Function; + getSpeakerStats: () => ISpeakerStats; grantOwner: Function; isAVModerationSupported: Function; isCallstatsEnabled: Function; @@ -74,6 +77,7 @@ export interface IJitsiConference { sendCommand: Function; sendCommandOnce: Function; sendEndpointMessage: Function; + sendFaceLandmarks: (faceLandmarks: FaceLandmarks) => void; sendFeedback: Function; sendLobbyMessage: Function; sessionId: string; diff --git a/react/features/base/icons/svg/emotions-angry.svg b/react/features/base/icons/svg/emotions-angry.svg new file mode 100644 index 000000000..fa5a81306 --- /dev/null +++ b/react/features/base/icons/svg/emotions-angry.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/react/features/base/icons/svg/emotions-disgusted.svg b/react/features/base/icons/svg/emotions-disgusted.svg new file mode 100644 index 000000000..c80162e8d --- /dev/null +++ b/react/features/base/icons/svg/emotions-disgusted.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/react/features/base/icons/svg/emotions-fearful.svg b/react/features/base/icons/svg/emotions-fearful.svg new file mode 100644 index 000000000..4a31450d4 --- /dev/null +++ b/react/features/base/icons/svg/emotions-fearful.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/react/features/base/icons/svg/emotions-happy.svg b/react/features/base/icons/svg/emotions-happy.svg new file mode 100644 index 000000000..009295656 --- /dev/null +++ b/react/features/base/icons/svg/emotions-happy.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/react/features/base/icons/svg/emotions-neutral.svg b/react/features/base/icons/svg/emotions-neutral.svg new file mode 100644 index 000000000..50d191d60 --- /dev/null +++ b/react/features/base/icons/svg/emotions-neutral.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/react/features/base/icons/svg/emotions-sad.svg b/react/features/base/icons/svg/emotions-sad.svg new file mode 100644 index 000000000..bc311e86b --- /dev/null +++ b/react/features/base/icons/svg/emotions-sad.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/react/features/base/icons/svg/emotions-surprised.svg b/react/features/base/icons/svg/emotions-surprised.svg new file mode 100644 index 000000000..5365469de --- /dev/null +++ b/react/features/base/icons/svg/emotions-surprised.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/react/features/base/icons/svg/index.ts b/react/features/base/icons/svg/index.ts index dc7d4d852..a77dfd6e5 100644 --- a/react/features/base/icons/svg/index.ts +++ b/react/features/base/icons/svg/index.ts @@ -29,6 +29,13 @@ export { default as IconE2EE } from './e2ee.svg'; export { default as IconEnlarge } from './enlarge.svg'; export { default as IconEnterFullscreen } from './enter-fullscreen.svg'; export { default as IconEnvelope } from './envelope.svg'; +export { default as IconEmotionsAngry } from './emotions-angry.svg'; +export { default as IconEmotionsDisgusted } from './emotions-disgusted.svg'; +export { default as IconEmotionsFearful } from './emotions-fearful.svg'; +export { default as IconEmotionsHappy } from './emotions-happy.svg'; +export { default as IconEmotionsNeutral } from './emotions-neutral.svg'; +export { default as IconEmotionsSad } from './emotions-sad.svg'; +export { default as IconEmotionsSurprised } from './emotions-surprised.svg'; export { default as IconExclamationSolid } from './exclamation-solid.svg'; export { default as IconExclamationTriangle } from './exclamation-triangle.svg'; export { default as IconExitFullscreen } from './exit-fullscreen.svg'; diff --git a/react/features/face-landmarks/FaceLandmarksDetector.ts b/react/features/face-landmarks/FaceLandmarksDetector.ts index 7fe803f2f..0af9c56a4 100644 --- a/react/features/face-landmarks/FaceLandmarksDetector.ts +++ b/react/features/face-landmarks/FaceLandmarksDetector.ts @@ -5,20 +5,21 @@ import { getLocalVideoTrack } from '../base/tracks/functions'; import { getBaseUrl } from '../base/util/helpers'; import { - addFaceExpression, + addFaceLandmarks, clearFaceExpressionBuffer, newFaceBox } from './actions'; import { DETECTION_TYPES, DETECT_FACE, - FACE_LANDMARK_DETECTION_ERROR_THRESHOLD, + FACE_LANDMARKS_DETECTION_ERROR_THRESHOLD, INIT_WORKER, + NO_DETECTION, + NO_FACE_DETECTION_THRESHOLD, WEBHOOK_SEND_TIME_INTERVAL } from './constants'; import { getDetectionInterval, - getFaceExpressionDuration, sendFaceExpressionsWebhook } from './functions'; import logger from './logger'; @@ -33,13 +34,14 @@ class FaceLandmarksDetector { private worker: Worker | null = null; private lastFaceExpression: string | null = null; private lastFaceExpressionTimestamp: number | null = null; - private duplicateConsecutiveExpressions = 0; private webhookSendInterval: number | null = null; private detectionInterval: number | null = null; private recognitionActive = false; private canvas?: HTMLCanvasElement; private context?: CanvasRenderingContext2D | null; private errorCount = 0; + private noDetectionCount = 0; + private noDetectionStartTimestamp: number | null = null; /** * Constructor for class, checks if the environment supports OffscreenCanvas. @@ -97,27 +99,48 @@ class FaceLandmarksDetector { // @ts-ignore const workerBlob = new Blob([ `importScripts("${workerUrl}");` ], { type: 'application/javascript' }); + const state = getState(); + const addToBuffer = Boolean(state['features/base/config'].webhookProxyUrl); // @ts-ignore workerUrl = window.URL.createObjectURL(workerBlob); - this.worker = new Worker(workerUrl, { name: 'Face Recognition Worker' }); + this.worker = new Worker(workerUrl, { name: 'Face Landmarks Worker' }); this.worker.onmessage = ({ data }: MessageEvent) => { - const { faceExpression, faceBox } = data; + const { faceExpression, faceBox, faceCount } = data; + const messageTimestamp = Date.now(); - if (faceExpression) { - if (faceExpression === this.lastFaceExpression) { - this.duplicateConsecutiveExpressions++; - } else { - if (this.lastFaceExpression && this.lastFaceExpressionTimestamp) { - dispatch(addFaceExpression( - this.lastFaceExpression, - getFaceExpressionDuration(getState(), this.duplicateConsecutiveExpressions + 1), - this.lastFaceExpressionTimestamp - )); - } - this.lastFaceExpression = faceExpression; - this.lastFaceExpressionTimestamp = Date.now(); - this.duplicateConsecutiveExpressions = 0; + // if the number of faces detected is different from 1 we do not take into consideration that detection + if (faceCount !== 1) { + if (this.noDetectionCount === 0) { + this.noDetectionStartTimestamp = messageTimestamp; + } + this.noDetectionCount++; + + if (this.noDetectionCount === NO_FACE_DETECTION_THRESHOLD && this.noDetectionStartTimestamp) { + this.addFaceLandmarks( + dispatch, + this.noDetectionStartTimestamp, + NO_DETECTION, + addToBuffer + ); + } + + return; + } else if (this.noDetectionCount > 0) { + this.noDetectionCount = 0; + this.noDetectionStartTimestamp = null; + } + + if (faceExpression?.expression) { + const { expression } = faceExpression; + + if (expression !== this.lastFaceExpression) { + this.addFaceLandmarks( + dispatch, + messageTimestamp, + expression, + addToBuffer + ); } } @@ -128,7 +151,7 @@ class FaceLandmarksDetector { APP.API.notifyFaceLandmarkDetected(faceBox, faceExpression); }; - const { faceLandmarks } = getState()['features/base/config']; + const { faceLandmarks } = state['features/base/config']; const detectionTypes = [ faceLandmarks?.enableFaceCentering && DETECTION_TYPES.FACE_BOX, faceLandmarks?.enableFaceExpressionsDetection && DETECTION_TYPES.FACE_EXPRESSIONS @@ -162,7 +185,7 @@ class FaceLandmarksDetector { } if (this.recognitionActive) { - logger.log('Face detection already active.'); + logger.log('Face landmarks detection already active.'); return; } @@ -179,7 +202,7 @@ class FaceLandmarksDetector { this.imageCapture = new ImageCapture(firstVideoTrack); this.recognitionActive = true; - logger.log('Start face detection'); + logger.log('Start face landmarks detection'); const { faceLandmarks } = state['features/base/config']; @@ -191,7 +214,7 @@ class FaceLandmarksDetector { ).then(status => { if (status) { this.errorCount = 0; - } else if (++this.errorCount > FACE_LANDMARK_DETECTION_ERROR_THRESHOLD) { + } else if (++this.errorCount > FACE_LANDMARKS_DETECTION_ERROR_THRESHOLD) { /* this prevents the detection from stopping immediately after occurring an error * sometimes due to the small detection interval when starting the detection some errors * might occur due to the track not being ready @@ -228,18 +251,11 @@ class FaceLandmarksDetector { if (!this.recognitionActive || !this.isInitialized()) { return; } + const stopTimestamp = Date.now(); + const addToBuffer = Boolean(getState()['features/base/config'].webhookProxyUrl); if (this.lastFaceExpression && this.lastFaceExpressionTimestamp) { - dispatch( - addFaceExpression( - this.lastFaceExpression, - getFaceExpressionDuration(getState(), this.duplicateConsecutiveExpressions + 1), - this.lastFaceExpressionTimestamp - ) - ); - this.duplicateConsecutiveExpressions = 0; - this.lastFaceExpression = null; - this.lastFaceExpressionTimestamp = null; + this.addFaceLandmarks(dispatch, stopTimestamp, null, addToBuffer); } this.webhookSendInterval && window.clearInterval(this.webhookSendInterval); @@ -248,7 +264,36 @@ class FaceLandmarksDetector { this.detectionInterval = null; this.imageCapture = null; this.recognitionActive = false; - logger.log('Stop face detection'); + logger.log('Stop face landmarks detection'); + } + + /** + * Dispatches the action for adding new face landmarks and changes the state of the class. + * + * @param {IStore.dispatch} dispatch - The redux dispatch function. + * @param {number} endTimestamp - The timestamp when the face landmarks ended. + * @param {string} newFaceExpression - The new face expression. + * @param {boolean} addToBuffer - Flag for adding the face landmarks to the buffer. + * @returns {void} + */ + private addFaceLandmarks( + dispatch: IStore['dispatch'], + endTimestamp: number, + newFaceExpression: string | null, + addToBuffer = false) { + if (this.lastFaceExpression && this.lastFaceExpressionTimestamp) { + dispatch(addFaceLandmarks( + { + duration: endTimestamp - this.lastFaceExpressionTimestamp, + faceExpression: this.lastFaceExpression, + timestamp: this.lastFaceExpressionTimestamp + }, + addToBuffer + )); + } + + this.lastFaceExpression = newFaceExpression; + this.lastFaceExpressionTimestamp = endTimestamp; } /** diff --git a/react/features/face-landmarks/FaceLandmarksHelper.ts b/react/features/face-landmarks/FaceLandmarksHelper.ts index 2aba627e0..015f08502 100644 --- a/react/features/face-landmarks/FaceLandmarksHelper.ts +++ b/react/features/face-landmarks/FaceLandmarksHelper.ts @@ -2,7 +2,7 @@ import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm'; import { Config, FaceResult, Human } from '@vladmandic/human'; import { DETECTION_TYPES, FACE_DETECTION_SCORE_THRESHOLD, FACE_EXPRESSIONS_NAMING_MAPPING } from './constants'; -import { DetectInput, DetectOutput, FaceBox, InitInput } from './types'; +import { DetectInput, DetectOutput, FaceBox, FaceExpression, InitInput } from './types'; export interface IFaceLandmarksHelper { detect: ({ image, threshold }: DetectInput) => Promise; @@ -10,7 +10,7 @@ export interface IFaceLandmarksHelper { getDetections: (image: ImageBitmap | ImageData) => Promise>; getFaceBox: (detections: Array, threshold: number) => FaceBox | undefined; getFaceCount: (detections: Array) => number; - getFaceExpression: (detections: Array) => string | undefined; + getFaceExpression: (detections: Array) => FaceExpression | undefined; init: () => Promise; } @@ -144,13 +144,18 @@ export class HumanHelper implements IFaceLandmarksHelper { * @param {Array} detections - The array with the detections. * @returns {string | undefined} */ - getFaceExpression(detections: Array): string | undefined { + getFaceExpression(detections: Array): FaceExpression | undefined { if (this.getFaceCount(detections) !== 1) { return; } - if (detections[0].emotion) { - return FACE_EXPRESSIONS_NAMING_MAPPING[detections[0].emotion[0].emotion]; + const detection = detections[0]; + + if (detection.emotion) { + return { + expression: FACE_EXPRESSIONS_NAMING_MAPPING[detection.emotion[0].emotion], + score: detection.emotion[0].score + }; } } diff --git a/react/features/face-landmarks/actionTypes.ts b/react/features/face-landmarks/actionTypes.ts index e4231f934..ba88a4668 100644 --- a/react/features/face-landmarks/actionTypes.ts +++ b/react/features/face-landmarks/actionTypes.ts @@ -1,32 +1,21 @@ /** - * Redux action type dispatched in order to add a face expression. + * Redux action type dispatched in order to add real-time faceLandmarks to timeline. * * { - * type: ADD_FACE_EXPRESSION, - * faceExpression: string, - * duration: number + * type: ADD_FACE_LANDMARKS, + * faceLandmarks: FaceLandmarks * } */ -export const ADD_FACE_EXPRESSION = 'ADD_FACE_EXPRESSION'; +export const ADD_FACE_LANDMARKS = 'ADD_FACE_LANDMARKS'; /** - * Redux action type dispatched in order to add a expression to the face expressions buffer. + * Redux action type dispatched in order to clear the faceLandmarks buffer for webhook in the state. * * { - * type: ADD_TO_FACE_EXPRESSIONS_BUFFER, - * faceExpression: string + * type: CLEAR_FACE_LANDMARKS_BUFFER * } */ -export const ADD_TO_FACE_EXPRESSIONS_BUFFER = 'ADD_TO_FACE_EXPRESSIONS_BUFFER'; - -/** - * Redux action type dispatched in order to clear the face expressions buffer in the state. - * - * { - * type: CLEAR_FACE_EXPRESSIONS_BUFFER - * } -*/ -export const CLEAR_FACE_EXPRESSIONS_BUFFER = 'CLEAR_FACE_EXPRESSIONS_BUFFER'; +export const CLEAR_FACE_LANDMARKS_BUFFER = 'CLEAR_FACE_LANDMARKS_BUFFER'; /** * Redux action type dispatched in order to update coordinates of a detected face. diff --git a/react/features/face-landmarks/actions.ts b/react/features/face-landmarks/actions.ts index a603a0313..d016d9dcc 100644 --- a/react/features/face-landmarks/actions.ts +++ b/react/features/face-landmarks/actions.ts @@ -3,56 +3,35 @@ import './createImageBitmap'; import { AnyAction } from 'redux'; import { - ADD_FACE_EXPRESSION, - ADD_TO_FACE_EXPRESSIONS_BUFFER, - CLEAR_FACE_EXPRESSIONS_BUFFER, + ADD_FACE_LANDMARKS, + CLEAR_FACE_LANDMARKS_BUFFER, NEW_FACE_COORDINATES } from './actionTypes'; -import { FaceBox } from './types'; +import { FaceBox, FaceLandmarks } from './types'; /** - * Adds a new face expression and its duration. + * Adds new face landmarks to the timeline. * - * @param {string} faceExpression - Face expression to be added. - * @param {number} duration - Duration in seconds of the face expression. - * @param {number} timestamp - Duration in seconds of the face expression. + * @param {FaceLandmarks} faceLandmarks - The new face landmarks to timeline. + * @param {boolean} addToBuffer - If true adds the face landmarks to a buffer in the reducer for webhook. * @returns {AnyAction} */ -export function addFaceExpression(faceExpression: string, duration: number, timestamp: number): AnyAction { +export function addFaceLandmarks(faceLandmarks: FaceLandmarks, addToBuffer: boolean): AnyAction { return { - type: ADD_FACE_EXPRESSION, - faceExpression, - duration, - timestamp + type: ADD_FACE_LANDMARKS, + faceLandmarks, + addToBuffer }; } /** - * Adds a face expression with its timestamp to the face expression buffer. + * Clears the face landmarks array in the state. * - * @param {Object} faceExpression - Object containing face expression string and its timestamp. * @returns {AnyAction} */ -export function addToFaceExpressionsBuffer( - faceExpression: { - emotion: string; - timestamp: number; - } -): AnyAction { +export function clearFaceExpressionBuffer(): AnyAction { return { - type: ADD_TO_FACE_EXPRESSIONS_BUFFER, - faceExpression - }; -} - -/** - * Clears the face expressions array in the state. - * - * @returns {Object} - */ -export function clearFaceExpressionBuffer() { - return { - type: CLEAR_FACE_EXPRESSIONS_BUFFER + type: CLEAR_FACE_LANDMARKS_BUFFER }; } diff --git a/react/features/face-landmarks/constants.ts b/react/features/face-landmarks/constants.ts index 14954bd27..81d4cff7f 100644 --- a/react/features/face-landmarks/constants.ts +++ b/react/features/face-landmarks/constants.ts @@ -37,6 +37,11 @@ export const INIT_WORKER = 'INIT_WORKER'; */ export const FACE_BOX_EVENT_TYPE = 'face-box'; +/** + * Type of event sent on the data channel. + */ +export const FACE_LANDMARKS_EVENT_TYPE = 'face-landmarks'; + /** * Milliseconds interval value for sending new image data to the worker. */ @@ -64,4 +69,15 @@ export const FACE_DETECTION_SCORE_THRESHOLD = 0.75; /** * Threshold for stopping detection after a certain number of consecutive errors have occurred. */ -export const FACE_LANDMARK_DETECTION_ERROR_THRESHOLD = 4; +export const FACE_LANDMARKS_DETECTION_ERROR_THRESHOLD = 4; + +/** + * Threshold for number of consecutive detections with no face, + * so that when achieved there will be dispatched an action. + */ +export const NO_FACE_DETECTION_THRESHOLD = 5; + +/** + * Constant type used for signaling that no valid face detection is found. + */ +export const NO_DETECTION = 'no-detection'; diff --git a/react/features/face-landmarks/faceLandmarksWorker.ts b/react/features/face-landmarks/faceLandmarksWorker.ts index b94fbe17e..a71c7f005 100644 --- a/react/features/face-landmarks/faceLandmarksWorker.ts +++ b/react/features/face-landmarks/faceLandmarksWorker.ts @@ -12,10 +12,9 @@ onmessage = async function({ data }: MessageEvent) { const detections = await helper.detect(data); - if (detections && (detections.faceBox || detections.faceExpression || detections.faceCount)) { + if (detections) { self.postMessage(detections); } - break; } diff --git a/react/features/face-landmarks/functions.ts b/react/features/face-landmarks/functions.ts index 5602eb57c..cf2f5d6ae 100644 --- a/react/features/face-landmarks/functions.ts +++ b/react/features/face-landmarks/functions.ts @@ -1,40 +1,27 @@ import { IReduxState } from '../app/types'; +import { IJitsiConference } from '../base/conference/reducer'; import { getLocalParticipant } from '../base/participants/functions'; import { extractFqnFromPath } from '../dynamic-branding/functions.any'; -import { DETECT_FACE, FACE_BOX_EVENT_TYPE, SEND_IMAGE_INTERVAL_MS } from './constants'; +import { FACE_BOX_EVENT_TYPE, FACE_LANDMARKS_EVENT_TYPE, SEND_IMAGE_INTERVAL_MS } from './constants'; import logger from './logger'; -import { FaceBox } from './types'; - -let canvas: HTMLCanvasElement; -let context: CanvasRenderingContext2D | null; - -if (typeof OffscreenCanvas === 'undefined') { - canvas = document.createElement('canvas'); - context = canvas.getContext('2d'); -} +import { FaceBox, FaceLandmarks } from './types'; /** - * Sends the face expression with its duration to all the other participants. + * Sends the face landmarks to other participants via the data channel. * * @param {any} conference - The current conference. - * @param {string} faceExpression - Face expression to be sent. - * @param {number} duration - The duration of the face expression in seconds. + * @param {FaceLandmarks} faceLandmarks - Face landmarks to be sent. * @returns {void} */ -export function sendFaceExpressionToParticipants( - conference: any, - faceExpression: string, - duration: number -): void { +export function sendFaceExpressionToParticipants(conference: any, faceLandmarks: FaceLandmarks): void { try { conference.sendEndpointMessage('', { - type: 'face_landmark', - faceExpression, - duration + type: FACE_LANDMARKS_EVENT_TYPE, + faceLandmarks }); } catch (err) { - logger.warn('Could not broadcast the face expression to the other participants', err); + logger.warn('Could not broadcast the face landmarks to the other participants', err); } } @@ -61,30 +48,22 @@ export function sendFaceBoxToParticipants( } /** - * Sends the face expression with its duration to xmpp server. + * Sends the face landmarks to prosody. * * @param {any} conference - The current conference. - * @param {string} faceExpression - Face expression to be sent. - * @param {number} duration - The duration of the face expression in seconds. + * @param {FaceLandmarks} faceLandmarks - Face landmarks to be sent. * @returns {void} */ -export function sendFaceExpressionToServer( - conference: any, - faceExpression: string, - duration: number -): void { +export function sendFaceExpressionToServer(conference: IJitsiConference, faceLandmarks: FaceLandmarks): void { try { - conference.sendFaceLandmarks({ - faceExpression, - duration - }); + conference.sendFaceLandmarks(faceLandmarks); } catch (err) { - logger.warn('Could not send the face expression to xmpp server', err); + logger.warn('Could not send the face landmarks to prosody', err); } } /** - * Sends face expression to backend. + * Sends face landmarks to backend. * * @param {Object} state - Redux state. * @returns {boolean} - True if sent, false otherwise. @@ -96,9 +75,9 @@ export async function sendFaceExpressionsWebhook(state: IReduxState) { const { connection } = state['features/base/connection']; const jid = connection?.getJid(); const localParticipant = getLocalParticipant(state); - const { faceExpressionsBuffer } = state['features/face-landmarks']; + const { faceLandmarksBuffer } = state['features/face-landmarks']; - if (faceExpressionsBuffer.length === 0) { + if (faceLandmarksBuffer.length === 0) { return false; } @@ -111,7 +90,7 @@ export async function sendFaceExpressionsWebhook(state: IReduxState) { meetingFqn: extractFqnFromPath(), sessionId: conference?.sessionId, submitted: Date.now(), - emotions: faceExpressionsBuffer, + emotions: faceLandmarksBuffer, participantId: localParticipant?.jwtId, participantName: localParticipant?.name, participantJid: jid @@ -138,55 +117,6 @@ export async function sendFaceExpressionsWebhook(state: IReduxState) { } -/** - * Sends the image data a canvas from the track in the image capture to the face recognition worker. - * - * @param {Worker} worker - Face recognition worker. - * @param {Object} imageCapture - Image capture that contains the current track. - * @param {number} threshold - Movement threshold as percentage for sharing face coordinates. - * @returns {Promise} - True if sent, false otherwise. - */ -export async function sendDataToWorker( - worker: Worker, - imageCapture: ImageCapture, - threshold = 10 -): Promise { - if (imageCapture === null || imageCapture === undefined) { - return false; - } - - let imageBitmap; - let image; - - try { - imageBitmap = await imageCapture.grabFrame(); - } catch (err) { - logger.warn(err); - - return false; - } - - if (typeof OffscreenCanvas === 'undefined') { - canvas.width = imageBitmap.width; - canvas.height = imageBitmap.height; - context?.drawImage(imageBitmap, 0, 0); - - image = context?.getImageData(0, 0, imageBitmap.width, imageBitmap.height); - } else { - image = imageBitmap; - } - - worker.postMessage({ - type: DETECT_FACE, - image, - threshold - }); - - imageBitmap.close(); - - return true; -} - /** * Gets face box for a participant id. * @@ -230,14 +160,3 @@ export function getDetectionInterval(state: IReduxState) { return Math.max(faceLandmarks?.captureInterval || SEND_IMAGE_INTERVAL_MS); } - -/** - * Returns the duration in seconds of a face expression. - * - * @param {IReduxState} state - The redux state. - * @param {number} faceExpressionCount - The number of consecutive face expressions. - * @returns {number} - Duration of face expression in seconds. - */ -export function getFaceExpressionDuration(state: IReduxState, faceExpressionCount: number) { - return faceExpressionCount * (getDetectionInterval(state) / 1000); -} diff --git a/react/features/face-landmarks/middleware.ts b/react/features/face-landmarks/middleware.ts index 2ba9aa61f..72e8f8b8d 100644 --- a/react/features/face-landmarks/middleware.ts +++ b/react/features/face-landmarks/middleware.ts @@ -11,18 +11,15 @@ import MiddlewareRegistry from '../base/redux/MiddlewareRegistry'; import { TRACK_ADDED, TRACK_REMOVED, TRACK_UPDATED } from '../base/tracks/actionTypes'; import FaceLandmarksDetector from './FaceLandmarksDetector'; -import { ADD_FACE_EXPRESSION, NEW_FACE_COORDINATES, UPDATE_FACE_COORDINATES } from './actionTypes'; -import { - addToFaceExpressionsBuffer -} from './actions'; +import { ADD_FACE_LANDMARKS, NEW_FACE_COORDINATES, UPDATE_FACE_COORDINATES } from './actionTypes'; import { FACE_BOX_EVENT_TYPE } from './constants'; import { sendFaceBoxToParticipants, sendFaceExpressionToParticipants, sendFaceExpressionToServer } from './functions'; MiddlewareRegistry.register((store: IStore) => (next: Function) => (action: any) => { const { dispatch, getState } = store; - const { faceLandmarks } = getState()['features/base/config']; - const isEnabled = faceLandmarks?.enableFaceCentering || faceLandmarks?.enableFaceExpressionsDetection; + const { faceLandmarks: faceLandmarksConfig } = getState()['features/base/config']; + const isEnabled = faceLandmarksConfig?.enableFaceCentering || faceLandmarksConfig?.enableFaceExpressionsDetection; if (action.type === CONFERENCE_JOINED) { if (isEnabled) { @@ -99,19 +96,16 @@ MiddlewareRegistry.register((store: IStore) => (next: Function) => (action: any) return next(action); } - case ADD_FACE_EXPRESSION: { + case ADD_FACE_LANDMARKS: { const state = getState(); - const { faceExpression, duration, timestamp } = action; + const { faceLandmarks } = action; const conference = getCurrentConference(state); if (getParticipantCount(state) > 1) { - sendFaceExpressionToParticipants(conference, faceExpression, duration); + sendFaceExpressionToParticipants(conference, faceLandmarks); } - sendFaceExpressionToServer(conference, faceExpression, duration); - dispatch(addToFaceExpressionsBuffer({ - emotion: faceExpression, - timestamp - })); + + sendFaceExpressionToServer(conference, faceLandmarks); return next(action); } diff --git a/react/features/face-landmarks/reducer.ts b/react/features/face-landmarks/reducer.ts index 9514c7cfe..b9625a015 100644 --- a/react/features/face-landmarks/reducer.ts +++ b/react/features/face-landmarks/reducer.ts @@ -1,42 +1,25 @@ import ReducerRegistry from '../base/redux/ReducerRegistry'; import { - ADD_FACE_EXPRESSION, - ADD_TO_FACE_EXPRESSIONS_BUFFER, - CLEAR_FACE_EXPRESSIONS_BUFFER, + ADD_FACE_LANDMARKS, + CLEAR_FACE_LANDMARKS_BUFFER, UPDATE_FACE_COORDINATES } from './actionTypes'; -import { FaceBox } from './types'; +import { FaceBox, FaceLandmarks } from './types'; const defaultState = { faceBoxes: {}, - faceExpressions: { - happy: 0, - neutral: 0, - surprised: 0, - angry: 0, - fearful: 0, - disgusted: 0, - sad: 0 - }, - faceExpressionsBuffer: [], + faceLandmarks: [], + faceLandmarksBuffer: [], recognitionActive: false }; export interface IFaceLandmarksState { faceBoxes: { [key: string]: FaceBox; }; - faceExpressions: { - angry: number; - disgusted: number; - fearful: number; - happy: number; - neutral: number; - sad: number; - surprised: number; - }; - faceExpressionsBuffer: Array<{ + faceLandmarks: Array; + faceLandmarksBuffer: Array<{ emotion: string; - timestamp: string; + timestamp: number; }>; recognitionActive: boolean; } @@ -44,26 +27,23 @@ export interface IFaceLandmarksState { ReducerRegistry.register('features/face-landmarks', (state = defaultState, action): IFaceLandmarksState => { switch (action.type) { - case ADD_FACE_EXPRESSION: { + case ADD_FACE_LANDMARKS: { + const { addToBuffer, faceLandmarks }: { addToBuffer: boolean; faceLandmarks: FaceLandmarks; } = action; + return { ...state, - faceExpressions: { - ...state.faceExpressions, - [action.faceExpression]: state.faceExpressions[ - action.faceExpression as keyof typeof state.faceExpressions] + action.duration - } + faceLandmarks: [ ...state.faceLandmarks, faceLandmarks ], + faceLandmarksBuffer: addToBuffer ? [ ...state.faceLandmarksBuffer, + { + emotion: faceLandmarks.faceExpression, + timestamp: faceLandmarks.timestamp + } ] : state.faceLandmarksBuffer }; } - case ADD_TO_FACE_EXPRESSIONS_BUFFER: { + case CLEAR_FACE_LANDMARKS_BUFFER: { return { ...state, - faceExpressionsBuffer: [ ...state.faceExpressionsBuffer, action.faceExpression ] - }; - } - case CLEAR_FACE_EXPRESSIONS_BUFFER: { - return { - ...state, - faceExpressionsBuffer: [] + faceLandmarksBuffer: [] }; } case UPDATE_FACE_COORDINATES: { diff --git a/react/features/face-landmarks/types.ts b/react/features/face-landmarks/types.ts index 406815836..876754eb2 100644 --- a/react/features/face-landmarks/types.ts +++ b/react/features/face-landmarks/types.ts @@ -19,5 +19,21 @@ export type InitInput = { export type DetectOutput = { faceBox?: FaceBox; faceCount: number; - faceExpression?: string; + faceExpression?: FaceExpression; +}; + +export type FaceExpression = { + expression: string; + score: number; +}; + +export type FaceLandmarks = { + + // duration in milliseconds of the face landmarks + duration: number; + faceExpression: string; + score?: number; + + // the start timestamp of the expression + timestamp: number; }; diff --git a/react/features/rtcstats/middleware.ts b/react/features/rtcstats/middleware.ts index fbe0cf838..d7ca51dac 100644 --- a/react/features/rtcstats/middleware.ts +++ b/react/features/rtcstats/middleware.ts @@ -14,7 +14,8 @@ import MiddlewareRegistry from '../base/redux/MiddlewareRegistry'; import { TRACK_ADDED, TRACK_UPDATED } from '../base/tracks/actionTypes'; import { getCurrentRoomId, isInBreakoutRoom } from '../breakout-rooms/functions'; import { extractFqnFromPath } from '../dynamic-branding/functions.any'; -import { ADD_FACE_EXPRESSION } from '../face-landmarks/actionTypes'; +import { ADD_FACE_LANDMARKS } from '../face-landmarks/actionTypes'; +import { FaceLandmarks } from '../face-landmarks/types'; import RTCStats from './RTCStats'; import { @@ -164,17 +165,19 @@ MiddlewareRegistry.register((store: IStore) => (next: Function) => (action: AnyA } break; } - case ADD_FACE_EXPRESSION: + case ADD_FACE_LANDMARKS: { if (canSendFaceLandmarksRtcstatsData(state)) { - const { duration, faceExpression, timestamp } = action; + const { duration, faceExpression, timestamp } = action.faceLandmarks as FaceLandmarks; + const durationSeconds = Math.round(duration / 1000); RTCStats.sendFaceLandmarksData({ - duration, + duration: durationSeconds, faceLandmarks: faceExpression, timestamp }); } break; + } case CONFERENCE_TIMESTAMP_CHANGED: { if (canSendRtcstatsData(state)) { const { conferenceTimestamp } = action; diff --git a/react/features/speaker-stats/actionTypes.ts b/react/features/speaker-stats/actionTypes.ts index 369bb83e9..ae6e48ff5 100644 --- a/react/features/speaker-stats/actionTypes.ts +++ b/react/features/speaker-stats/actionTypes.ts @@ -63,3 +63,20 @@ export const RESET_SEARCH_CRITERIA = 'RESET_SEARCH_CRITERIA' */ export const TOGGLE_FACE_EXPRESSIONS = 'SHOW_FACE_EXPRESSIONS'; + +export const INCREASE_ZOOM = 'INCREASE_ZOOM'; + +export const DECREASE_ZOOM = 'DECREASE_ZOOM'; + +export const ADD_TO_OFFSET = 'ADD_TO_OFFSET'; + +export const SET_OFFSET = 'RESET_OFFSET'; + +export const ADD_TO_OFFSET_LEFT = 'ADD_TO_OFFSET_LEFT'; + +export const ADD_TO_OFFSET_RIGHT = 'ADD_TO_OFFSET_RIGHT'; + +export const SET_TIMELINE_BOUNDARY = 'SET_TIMELINE_BOUNDARY'; + +export const SET_PANNING = 'SET_PANNING'; + diff --git a/react/features/speaker-stats/actions.any.ts b/react/features/speaker-stats/actions.any.ts new file mode 100644 index 000000000..3400f6273 --- /dev/null +++ b/react/features/speaker-stats/actions.any.ts @@ -0,0 +1,231 @@ +import { IStore } from '../app/types'; + +import { + ADD_TO_OFFSET, + ADD_TO_OFFSET_LEFT, + ADD_TO_OFFSET_RIGHT, + INIT_REORDER_STATS, + INIT_SEARCH, + INIT_UPDATE_STATS, + RESET_SEARCH_CRITERIA, + SET_PANNING, + SET_TIMELINE_BOUNDARY, + TOGGLE_FACE_EXPRESSIONS, + UPDATE_SORTED_SPEAKER_STATS_IDS, + UPDATE_STATS +} from './actionTypes'; +import { MINIMUM_INTERVAL } from './constants'; +import { getCurrentDuration, getTimelineBoundaries } from './functions'; +import { ISpeakerStats } from './reducer'; + +/** + * Starts a search by criteria. + * + * @param {string} criteria - The search criteria. + * @returns {Object} + */ +export function initSearch(criteria: string) { + return { + type: INIT_SEARCH, + criteria + }; +} + +/** + * Gets the new stats and triggers update. + * + * @param {Function} getSpeakerStats - Function to get the speaker stats. + * @returns {Object} + */ +export function initUpdateStats(getSpeakerStats: () => ISpeakerStats) { + return { + type: INIT_UPDATE_STATS, + getSpeakerStats + }; +} + +/** + * Updates the stats with new stats. + * + * @param {Object} stats - The new stats. + * @returns {Object} + */ +export function updateStats(stats: Object) { + return { + type: UPDATE_STATS, + stats + }; +} + +/** + * Updates the speaker stats order. + * + * @param {Array} participantIds - Participant ids. + * @returns {Object} + */ +export function updateSortedSpeakerStatsIds(participantIds: Array) { + return { + type: UPDATE_SORTED_SPEAKER_STATS_IDS, + participantIds + }; +} + +/** + * Initiates reordering of the stats. + * + * @returns {Object} + */ +export function initReorderStats() { + return { + type: INIT_REORDER_STATS + }; +} + +/** + * Resets the search criteria. + * + * @returns {Object} + */ +export function resetSearchCriteria() { + return { + type: RESET_SEARCH_CRITERIA + }; +} + +/** + * Toggles the face expressions grid. + * + * @returns {Object} + */ +export function toggleFaceExpressions() { + return { + type: TOGGLE_FACE_EXPRESSIONS + }; +} + +/** + * Adds a value to the boundary offset of the timeline. + * + * @param {number} value - The value to be added. + * @param {number} left - The left boundary. + * @param {number} right - The right boundary. + * @param {number} currentDuration - The currentDuration of the conference. + * @returns {Object} + */ +export function addToOffset(value: number) { + return (dispatch: IStore['dispatch'], getState: IStore['getState']) => { + const state = getState(); + const { left, right } = getTimelineBoundaries(state); + const currentDuration = getCurrentDuration(state) ?? 0; + const newLeft = left + value; + const newRight = right + value; + + if (newLeft >= 0 && newRight <= currentDuration) { + dispatch({ + type: ADD_TO_OFFSET, + value + }); + } else if (newLeft < 0) { + dispatch({ + type: ADD_TO_OFFSET, + value: -left + }); + } else if (newRight > currentDuration) { + dispatch({ + type: ADD_TO_OFFSET, + value: currentDuration - right + }); + } + }; +} + +/** + * Adds the value to the offset of the left boundary for the timeline. + * + * @param {number} value - The new value for the offset. + * @returns {Object} + */ +export function addToOffsetLeft(value: number) { + return (dispatch: IStore['dispatch'], getState: IStore['getState']) => { + const state = getState(); + const { left, right } = getTimelineBoundaries(state); + const newLeft = left + value; + + if (newLeft >= 0 && right - newLeft > MINIMUM_INTERVAL) { + dispatch({ + type: ADD_TO_OFFSET_LEFT, + value + }); + } else if (newLeft < 0) { + dispatch({ + type: ADD_TO_OFFSET_LEFT, + value: -left + }); + } + }; +} + +/** + * Adds the value to the offset of the right boundary for the timeline. + * + * @param {number} value - The new value for the offset. + * @returns {Object} + */ +export function addToOffsetRight(value: number) { + return (dispatch: IStore['dispatch'], getState: IStore['getState']) => { + const state = getState(); + const { left, right } = getTimelineBoundaries(state); + const currentDuration = getCurrentDuration(state) ?? 0; + const newRight = right + value; + + if (newRight <= currentDuration && newRight - left > MINIMUM_INTERVAL) { + dispatch({ + type: ADD_TO_OFFSET_RIGHT, + value + }); + } else if (newRight > currentDuration) { + dispatch({ + type: ADD_TO_OFFSET_RIGHT, + value: currentDuration - right + }); + } + }; +} + +/** + * Sets the current time boundary of the timeline, when zoomed in. + * + * @param {number} boundary - The current time boundary. + * @returns {Object} + */ +export function setTimelineBoundary(boundary: number) { + return { + type: SET_TIMELINE_BOUNDARY, + boundary + }; +} + +/** + * Clears the current time boundary of the timeline, when zoomed out full. + * + * @returns {Object} + */ +export function clearTimelineBoundary() { + return { + type: SET_TIMELINE_BOUNDARY, + boundary: null + }; +} + +/** + * Sets the state of the timeline panning. + * + * @param {Object} panning - The state of the timeline panning. + * @returns {Object} + */ +export function setTimelinePanning(panning: { active: boolean; x: number; }) { + return { + type: SET_PANNING, + panning + }; +} diff --git a/react/features/speaker-stats/actions.native.ts b/react/features/speaker-stats/actions.native.ts new file mode 100644 index 000000000..02b37d475 --- /dev/null +++ b/react/features/speaker-stats/actions.native.ts @@ -0,0 +1 @@ +export * from './actions.any'; diff --git a/react/features/speaker-stats/actions.ts b/react/features/speaker-stats/actions.ts deleted file mode 100644 index af30f8e71..000000000 --- a/react/features/speaker-stats/actions.ts +++ /dev/null @@ -1,94 +0,0 @@ -import { - INIT_REORDER_STATS, - INIT_SEARCH, - INIT_UPDATE_STATS, - RESET_SEARCH_CRITERIA, - TOGGLE_FACE_EXPRESSIONS, - UPDATE_SORTED_SPEAKER_STATS_IDS, - UPDATE_STATS -} from './actionTypes'; - -/** - * Starts a search by criteria. - * - * @param {string | null} criteria - The search criteria. - * @returns {Object} - */ -export function initSearch(criteria: string | null) { - return { - type: INIT_SEARCH, - criteria - }; -} - -/** - * Gets the new stats and triggers update. - * - * @param {Function} getSpeakerStats - Function to get the speaker stats. - * @returns {Object} - */ -export function initUpdateStats(getSpeakerStats: Function) { - return { - type: INIT_UPDATE_STATS, - getSpeakerStats - }; -} - -/** - * Updates the stats with new stats. - * - * @param {Object} stats - The new stats. - * @returns {Object} - */ -export function updateStats(stats: Object) { - return { - type: UPDATE_STATS, - stats - }; -} - -/** - * Updates the speaker stats order. - * - * @param {Object} participantIds - Participant ids. - * @returns {Object} - */ -export function updateSortedSpeakerStatsIds(participantIds?: Array) { - return { - type: UPDATE_SORTED_SPEAKER_STATS_IDS, - participantIds - }; -} - -/** - * Initiates reordering of the stats. - * - * @returns {Object} - */ -export function initReorderStats() { - return { - type: INIT_REORDER_STATS - }; -} - -/** - * Resets the search criteria. - * - * @returns {Object} - */ -export function resetSearchCriteria() { - return { - type: RESET_SEARCH_CRITERIA - }; -} - -/** - * Toggles the face expressions grid. - * - * @returns {Object} - */ -export function toggleFaceExpressions() { - return { - type: TOGGLE_FACE_EXPRESSIONS - }; -} diff --git a/react/features/speaker-stats/actions.web.ts b/react/features/speaker-stats/actions.web.ts new file mode 100644 index 000000000..02b37d475 --- /dev/null +++ b/react/features/speaker-stats/actions.web.ts @@ -0,0 +1 @@ +export * from './actions.any'; diff --git a/react/features/speaker-stats/components/AbstractSpeakerStatsButton.js b/react/features/speaker-stats/components/AbstractSpeakerStatsButton.tsx similarity index 52% rename from react/features/speaker-stats/components/AbstractSpeakerStatsButton.js rename to react/features/speaker-stats/components/AbstractSpeakerStatsButton.tsx index d73cfff00..cd1ee5731 100644 --- a/react/features/speaker-stats/components/AbstractSpeakerStatsButton.js +++ b/react/features/speaker-stats/components/AbstractSpeakerStatsButton.tsx @@ -1,24 +1,22 @@ -// @flow - -import type { Dispatch } from 'redux'; - -import { IconConnection } from '../../base/icons'; -import { AbstractButton } from '../../base/toolbox/components'; -import type { AbstractButtonProps } from '../../base/toolbox/components'; +import { IStore } from '../../app/types'; +import { IconConnection } from '../../base/icons/svg'; +// eslint-disable-next-line lines-around-comment +// @ts-ignore +import { AbstractButton, type AbstractButtonProps } from '../../base/toolbox/components'; type Props = AbstractButtonProps & { /** * True if the navigation bar should be visible. */ - dispatch: Dispatch + dispatch: IStore['dispatch']; }; /** * Implementation of a button for opening speaker stats dialog. */ -class AbstractSpeakerStatsButton extends AbstractButton { +class AbstractSpeakerStatsButton extends AbstractButton { accessibilityLabel = 'toolbar.accessibilityLabel.speakerStats'; icon = IconConnection; label = 'toolbar.speakerStats'; diff --git a/react/features/speaker-stats/components/AbstractSpeakerStatsList.js b/react/features/speaker-stats/components/AbstractSpeakerStatsList.ts similarity index 54% rename from react/features/speaker-stats/components/AbstractSpeakerStatsList.js rename to react/features/speaker-stats/components/AbstractSpeakerStatsList.ts index 31e11e005..eb1282fae 100644 --- a/react/features/speaker-stats/components/AbstractSpeakerStatsList.js +++ b/react/features/speaker-stats/components/AbstractSpeakerStatsList.ts @@ -1,11 +1,10 @@ -// @flow - import { useCallback, useEffect, useRef } from 'react'; import { useTranslation } from 'react-i18next'; import { useDispatch, useSelector } from 'react-redux'; -import { getLocalParticipant } from '../../base/participants'; -import { initUpdateStats } from '../actions'; +import { IReduxState } from '../../app/types'; +import { getLocalParticipant } from '../../base/participants/functions'; +import { initUpdateStats } from '../actions.any'; import { SPEAKER_STATS_RELOAD_INTERVAL } from '../constants'; @@ -17,21 +16,22 @@ import { * @param {Object} itemStyles - Styles for the speaker stats item. * @returns {Function} */ -const abstractSpeakerStatsList = (speakerStatsItem: Function, itemStyles?: Object): Function[] => { +const abstractSpeakerStatsList = (speakerStatsItem: Function): Function[] => { const dispatch = useDispatch(); const { t } = useTranslation(); - const conference = useSelector(state => state['features/base/conference'].conference); + const { conference } = useSelector((state: IReduxState) => state['features/base/conference']); const { stats: speakerStats, showFaceExpressions, sortedSpeakerStatsIds - } = useSelector(state => state['features/speaker-stats']); + } = useSelector((state: IReduxState) => state['features/speaker-stats']); const localParticipant = useSelector(getLocalParticipant); const { defaultRemoteDisplayName } = useSelector( - state => state['features/base/config']) || {}; - const { faceLandmarks } = useSelector(state => state['features/base/config']) || {}; - const { faceExpressions } = useSelector(state => state['features/face-landmarks']) || {}; - const reloadInterval = useRef(null); + (state: IReduxState) => state['features/base/config']) || {}; + const { faceLandmarks: faceLandmarksConfig } = useSelector((state: IReduxState) => + state['features/base/config']) || {}; + const { faceLandmarks } = useSelector((state: IReduxState) => state['features/face-landmarks']) || {}; + const reloadInterval = useRef(); /** * Update the internal state with the latest speaker stats. @@ -40,7 +40,7 @@ const abstractSpeakerStatsList = (speakerStatsItem: Function, itemStyles?: Objec * @private */ const getSpeakerStats = useCallback(() => { - const stats = conference.getSpeakerStats(); + const stats = conference?.getSpeakerStats(); for (const userId in stats) { if (stats[userId]) { @@ -48,40 +48,42 @@ const abstractSpeakerStatsList = (speakerStatsItem: Function, itemStyles?: Objec const meString = t('me'); stats[userId].setDisplayName( - localParticipant.name + localParticipant?.name ? `${localParticipant.name} (${meString})` : meString ); - if (faceLandmarks?.enableDisplayFaceExpressions) { - stats[userId].setFaceExpressions(faceExpressions); + + if (faceLandmarksConfig?.enableDisplayFaceExpressions) { + stats[userId].setFaceLandmarks(faceLandmarks); } } if (!stats[userId].getDisplayName()) { stats[userId].setDisplayName( - conference.getParticipantById(userId)?.name + conference?.getParticipantById(userId)?.name ); } } } - return stats; - }, [ faceExpressions ]); + return stats ?? {}; + }, [ faceLandmarks ]); const updateStats = useCallback( () => dispatch(initUpdateStats(getSpeakerStats)), [ dispatch, initUpdateStats, getSpeakerStats ]); useEffect(() => { - if (reloadInterval.current) { - clearInterval(reloadInterval.current); - } - reloadInterval.current = setInterval(() => { + reloadInterval.current = window.setInterval(() => { updateStats(); }, SPEAKER_STATS_RELOAD_INTERVAL); - return () => clearInterval(reloadInterval.current); - }, [ faceExpressions ]); + return () => { + if (reloadInterval.current) { + clearInterval(reloadInterval.current); + } + }; + }, [ faceLandmarks ]); const localSpeakerStats = Object.keys(speakerStats).length === 0 ? getSpeakerStats() : speakerStats; const localSortedSpeakerStatsIds @@ -91,22 +93,17 @@ const abstractSpeakerStatsList = (speakerStatsItem: Function, itemStyles?: Objec return userIds.map(userId => { const statsModel = localSpeakerStats[userId]; - const props = {}; - - props.isDominantSpeaker = statsModel.isDominantSpeaker(); - props.dominantSpeakerTime = statsModel.getTotalDominantSpeakerTime(); - props.participantId = userId; - props.hasLeft = statsModel.hasLeft(); - if (showFaceExpressions) { - props.faceExpressions = statsModel.getFaceExpressions(); - } - props.hidden = statsModel.hidden; - props.showFaceExpressions = showFaceExpressions; - props.displayName = statsModel.getDisplayName() || defaultRemoteDisplayName; - if (itemStyles) { - props.styles = itemStyles; - } - props.t = t; + const props = { + isDominantSpeaker: statsModel.isDominantSpeaker(), + dominantSpeakerTime: statsModel.getTotalDominantSpeakerTime(), + participantId: userId, + hasLeft: statsModel.hasLeft(), + faceLandmarks: showFaceExpressions ? statsModel.getFaceLandmarks() : undefined, + hidden: statsModel.hidden, + showFaceExpressions, + displayName: statsModel.getDisplayName() || defaultRemoteDisplayName, + t + }; return speakerStatsItem(props); }); diff --git a/react/features/speaker-stats/components/_.native.js b/react/features/speaker-stats/components/_.native.ts similarity index 65% rename from react/features/speaker-stats/components/_.native.js rename to react/features/speaker-stats/components/_.native.ts index 738c4d2b8..88a747325 100644 --- a/react/features/speaker-stats/components/_.native.js +++ b/react/features/speaker-stats/components/_.native.ts @@ -1 +1,2 @@ +// @ts-ignore export * from './native'; diff --git a/react/features/speaker-stats/components/_.web.js b/react/features/speaker-stats/components/_.web.ts similarity index 100% rename from react/features/speaker-stats/components/_.web.js rename to react/features/speaker-stats/components/_.web.ts diff --git a/react/features/speaker-stats/components/index.js b/react/features/speaker-stats/components/index.ts similarity index 60% rename from react/features/speaker-stats/components/index.js rename to react/features/speaker-stats/components/index.ts index cda61441e..796480f8e 100644 --- a/react/features/speaker-stats/components/index.js +++ b/react/features/speaker-stats/components/index.ts @@ -1 +1,2 @@ +// @ts-ignore export * from './_'; diff --git a/react/features/speaker-stats/components/timeFunctions.js b/react/features/speaker-stats/components/timeFunctions.ts similarity index 90% rename from react/features/speaker-stats/components/timeFunctions.js rename to react/features/speaker-stats/components/timeFunctions.ts index 2b2ce7b1f..a6f933833 100644 --- a/react/features/speaker-stats/components/timeFunctions.js +++ b/react/features/speaker-stats/components/timeFunctions.ts @@ -7,7 +7,7 @@ * @private * @returns {number} */ -function getHoursCount(milliseconds) { +function getHoursCount(milliseconds: number) { return Math.floor(milliseconds / (60 * 60 * 1000)); } @@ -18,7 +18,7 @@ function getHoursCount(milliseconds) { * @private * @returns {number} */ -function getMinutesCount(milliseconds) { +function getMinutesCount(milliseconds: number) { return Math.floor(milliseconds / (60 * 1000) % 60); } @@ -29,7 +29,7 @@ function getMinutesCount(milliseconds) { * @private * @returns {number} */ -function getSecondsCount(milliseconds) { +function getSecondsCount(milliseconds: number) { return Math.floor(milliseconds / 1000 % 60); } @@ -85,6 +85,6 @@ export function createLocalizedTime(time: number, t: Function) { * key for react to iterate upon. * @returns {string} */ -function createTimeDisplay(count, countNounKey, t) { +function createTimeDisplay(count: number, countNounKey: string, t: Function) { return t(countNounKey, { count }); } diff --git a/react/features/speaker-stats/components/web/SpeakerStats.tsx b/react/features/speaker-stats/components/web/SpeakerStats.tsx index d865a40fb..3cc4fef57 100644 --- a/react/features/speaker-stats/components/web/SpeakerStats.tsx +++ b/react/features/speaker-stats/components/web/SpeakerStats.tsx @@ -1,15 +1,28 @@ import React, { useCallback, useEffect } from 'react'; +import { useTranslation } from 'react-i18next'; import { useDispatch, useSelector } from 'react-redux'; import { makeStyles } from 'tss-react/mui'; import { IReduxState } from '../../../app/types'; +import Icon from '../../../base/icons/components/Icon'; +import { + IconEmotionsAngry, + IconEmotionsDisgusted, + IconEmotionsFearful, + IconEmotionsHappy, + IconEmotionsNeutral, + IconEmotionsSad, + IconEmotionsSurprised +} from '../../../base/icons/svg'; +// eslint-disable-next-line lines-around-comment +// @ts-ignore +import { Tooltip } from '../../../base/tooltip'; import Dialog from '../../../base/ui/components/web/Dialog'; import { escapeRegexp } from '../../../base/util/helpers'; -import { initSearch, resetSearchCriteria, toggleFaceExpressions } from '../../actions'; +import { initSearch, resetSearchCriteria, toggleFaceExpressions } from '../../actions.any'; import { DISPLAY_SWITCH_BREAKPOINT, - MOBILE_BREAKPOINT, - RESIZE_SEARCH_SWITCH_CONTAINER_BREAKPOINT + MOBILE_BREAKPOINT } from '../../constants'; import FaceExpressionsSwitch from './FaceExpressionsSwitch'; @@ -20,69 +33,171 @@ import SpeakerStatsSearch from './SpeakerStatsSearch'; const useStyles = makeStyles()(theme => { return { speakerStats: { + '& .header': { + position: 'fixed', + backgroundColor: theme.palette.ui01, + paddingLeft: theme.spacing(4), + paddingRight: theme.spacing(4), + marginLeft: `-${theme.spacing(4)}`, + '&.large': { + width: '616px' + }, + '&.medium': { + width: '352px' + }, + '@media (max-width: 448px)': { + width: 'calc(100% - 48px) !important' + }, + '& .upper-header': { + display: 'flex', + justifyContent: 'space-between', + alignItems: 'center', + width: '100%', + '& .search-switch-container': { + display: 'flex', + width: '100%', + '& .search-container': { + width: 175, + marginRight: theme.spacing(3) + }, + '& .search-container-full-width': { + width: '100%' + } + }, + '& .emotions-icons': { + display: 'flex', + '& svg': { + fill: '#000' + }, + '&>div': { + marginRight: theme.spacing(3) + }, + '&>div:last-child': { + marginRight: 0 + } + } + } + }, '& .row': { display: 'flex', alignItems: 'center', - - '& .avatar': { - width: '32px', - marginRight: theme.spacing(3) - }, - '& .name-time': { width: 'calc(100% - 48px)', display: 'flex', justifyContent: 'space-between', - alignItems: 'center' + alignItems: 'center', + '&.expressions-on': { + width: 'calc(47% - 48px)', + marginRight: theme.spacing(4) + } }, - - '& .name-time_expressions-on': { - width: 'calc(47% - 48px)' - }, - - '& .expressions': { - width: 'calc(53% - 29px)', + '& .timeline-container': { + height: '100%', + width: `calc(53% - ${theme.spacing(4)})`, display: 'flex', - justifyContent: 'space-between', - - '& .expression': { - width: '30px', - textAlign: 'center' + alignItems: 'center', + borderLeftWidth: 1, + borderLeftColor: theme.palette.ui02, + borderLeftStyle: 'solid', + '& .timeline': { + height: theme.spacing(2), + display: 'flex', + width: '100%', + '&>div': { + marginRight: theme.spacing(1), + borderRadius: 5 + }, + '&>div:first-child': { + borderRadius: '0 5px 5px 0' + }, + '&>div:last-child': { + marginRight: 0, + borderRadius: '5px 0 0 5px' + } + } + }, + '& .axis-container': { + height: '100%', + width: `calc(53% - ${theme.spacing(6)})`, + display: 'flex', + alignItems: 'center', + marginLeft: theme.spacing(3), + '& div': { + borderRadius: 5 + }, + '& .axis': { + height: theme.spacing(1), + display: 'flex', + width: '100%', + backgroundColor: theme.palette.ui03, + position: 'relative', + '& .left-bound': { + position: 'absolute', + bottom: 10, + left: 0 + }, + '& .right-bound': { + position: 'absolute', + bottom: 10, + right: 0 + }, + '& .handler': { + position: 'absolute', + backgroundColor: theme.palette.ui09, + height: 12, + marginTop: -4, + display: 'flex', + justifyContent: 'space-between', + '& .resize': { + height: '100%', + width: 5, + cursor: 'col-resize' + } + } } } + }, + '& .separator': { + width: 'calc(100% + 48px)', + height: 1, + marginLeft: -24, + backgroundColor: theme.palette.ui02 } - }, - labelsContainer: { - position: 'relative' - }, - separator: { - position: 'absolute', - width: 'calc(100% + 48px)', - height: 1, - left: -24, - backgroundColor: theme.palette.ui05 - }, - searchSwitchContainer: { - display: 'flex', - justifyContent: 'space-between', - alignItems: 'center', - width: '100%' - }, - searchSwitchContainerExpressionsOn: { - width: '58.5%', - [theme.breakpoints.down(RESIZE_SEARCH_SWITCH_CONTAINER_BREAKPOINT)]: { - width: '100%' - } - }, - searchContainer: { - width: '50%' - }, - searchContainerFullWidth: { - width: '100%' } }; }); +const EMOTIONS_LEGEND = [ + { + translationKey: 'speakerStats.neutral', + icon: IconEmotionsNeutral + }, + { + translationKey: 'speakerStats.happy', + icon: IconEmotionsHappy + }, + { + translationKey: 'speakerStats.surprised', + icon: IconEmotionsSurprised + }, + { + translationKey: 'speakerStats.sad', + icon: IconEmotionsSad + }, + { + translationKey: 'speakerStats.fearful', + icon: IconEmotionsFearful + }, + { + translationKey: 'speakerStats.angry', + icon: IconEmotionsAngry + }, + { + translationKey: 'speakerStats.disgusted', + icon: IconEmotionsDisgusted + } +]; + const SpeakerStats = () => { const { faceLandmarks } = useSelector((state: IReduxState) => state['features/base/config']); const { showFaceExpressions } = useSelector((state: IReduxState) => state['features/speaker-stats']); @@ -91,6 +206,7 @@ const SpeakerStats = () => { const displayLabels = clientWidth > MOBILE_BREAKPOINT; const dispatch = useDispatch(); const { classes } = useStyles(); + const { t } = useTranslation(); const onToggleFaceExpressions = useCallback(() => dispatch(toggleFaceExpressions()) @@ -104,9 +220,9 @@ const SpeakerStats = () => { useEffect(() => { showFaceExpressions && !displaySwitch && dispatch(toggleFaceExpressions()); }, [ clientWidth ]); - useEffect(() => () => { - dispatch(resetSearchCriteria()); - }, []); + + // @ts-ignore + useEffect(() => () => dispatch(resetSearchCriteria()), []); return ( { size = { showFaceExpressions ? 'large' : 'medium' } titleKey = 'speakerStats.speakerStats'>
-
-
- -
+
+
+
+
+ +
- { displaySwitch + { displaySwitch && - } -
- { displayLabels && ( -
+ + } +
+ { showFaceExpressions &&
+ { + EMOTIONS_LEGEND.map(emotion => ( + + + + )) + } +
} +
+ { displayLabels && ( -
-
- )} + )} +
diff --git a/react/features/speaker-stats/components/web/SpeakerStatsButton.js b/react/features/speaker-stats/components/web/SpeakerStatsButton.tsx similarity index 60% rename from react/features/speaker-stats/components/web/SpeakerStatsButton.js rename to react/features/speaker-stats/components/web/SpeakerStatsButton.tsx index ae4eb3532..df266727d 100644 --- a/react/features/speaker-stats/components/web/SpeakerStatsButton.js +++ b/react/features/speaker-stats/components/web/SpeakerStatsButton.tsx @@ -1,12 +1,12 @@ -// @flow - -import { createToolbarEvent, sendAnalytics } from '../../../analytics'; -import { openDialog } from '../../../base/dialog'; -import { translate } from '../../../base/i18n'; -import { connect } from '../../../base/redux'; +import { createToolbarEvent } from '../../../analytics/AnalyticsEvents'; +import { sendAnalytics } from '../../../analytics/functions'; +import { openDialog } from '../../../base/dialog/actions'; +import { translate } from '../../../base/i18n/functions'; +import { connect } from '../../../base/redux/functions'; import AbstractSpeakerStatsButton from '../AbstractSpeakerStatsButton'; -import { SpeakerStats } from './'; +import SpeakerStats from './SpeakerStats'; + /** * Implementation of a button for opening speaker stats dialog. @@ -20,6 +20,7 @@ class SpeakerStatsButton extends AbstractSpeakerStatsButton { * @returns {void} */ _handleClick() { + // @ts-ignore const { dispatch } = this.props; sendAnalytics(createToolbarEvent('speaker.stats')); @@ -27,4 +28,5 @@ class SpeakerStatsButton extends AbstractSpeakerStatsButton { } } +// @ts-ignore export default translate(connect()(SpeakerStatsButton)); diff --git a/react/features/speaker-stats/components/web/SpeakerStatsItem.js b/react/features/speaker-stats/components/web/SpeakerStatsItem.js deleted file mode 100644 index 3640bfac3..000000000 --- a/react/features/speaker-stats/components/web/SpeakerStatsItem.js +++ /dev/null @@ -1,136 +0,0 @@ -/* @flow */ - -import React from 'react'; - -import { Avatar, StatelessAvatar } from '../../../base/avatar'; -import { getInitials } from '../../../base/avatar/functions'; -import BaseTheme from '../../../base/ui/components/BaseTheme'; -import { FACE_EXPRESSIONS } from '../../../face-landmarks/constants'; - -import TimeElapsed from './TimeElapsed'; - -/** - * The type of the React {@code Component} props of {@link SpeakerStatsItem}. - */ -type Props = { - - /** - * The name of the participant. - */ - displayName: string, - - /** - * The object that has as keys the face expressions of the - * participant and as values a number that represents the count . - */ - faceExpressions: Object, - - /** - * True if the face expressions detection is not disabled. - */ - showFaceExpressions: boolean, - - /** - * The total milliseconds the participant has been dominant speaker. - */ - dominantSpeakerTime: number, - - /** - * The id of the user. - */ - participantId: string, - - /** - * True if the participant is no longer in the meeting. - */ - hasLeft: boolean, - - /** - * True if the participant is not shown in speaker stats. - */ - hidden: boolean, - - /** - * True if the participant is currently the dominant speaker. - */ - isDominantSpeaker: boolean, - - /** - * Styles for the item. - */ - styles: Object, - - /** - * Invoked to obtain translated strings. - */ - t: Function -} - -const SpeakerStatsItem = (props: Props) => { - const hasLeftClass = props.hasLeft ? props.styles.hasLeft : ''; - const rowDisplayClass = `row ${hasLeftClass} ${props.styles.item}`; - const expressionClass = 'expression'; - const nameTimeClass = `name-time${ - props.showFaceExpressions ? ' name-time_expressions-on' : '' - }`; - const timeClass = `${props.styles.time} ${props.isDominantSpeaker ? props.styles.dominant : ''}`; - - - const FaceExpressions = () => FACE_EXPRESSIONS.map( - expression => ( -
- { props.faceExpressions[expression] } -
- ) - ); - - return ( -
-
- { - props.hasLeft ? ( - - ) : ( - - ) - } -
-
-
- { props.displayName } -
-
- -
-
- { props.showFaceExpressions - && ( -
- -
- )} -
- ); -}; - -export default SpeakerStatsItem; diff --git a/react/features/speaker-stats/components/web/SpeakerStatsItem.tsx b/react/features/speaker-stats/components/web/SpeakerStatsItem.tsx new file mode 100644 index 000000000..2357d5be0 --- /dev/null +++ b/react/features/speaker-stats/components/web/SpeakerStatsItem.tsx @@ -0,0 +1,115 @@ +// eslint-disable-next-line lines-around-comment +import React from 'react'; + +// @ts-ignore +import Avatar from '../../../base/avatar/components/Avatar'; +import StatelessAvatar from '../../../base/avatar/components/web/StatelessAvatar'; +import { getInitials } from '../../../base/avatar/functions'; +import BaseTheme from '../../../base/ui/components/BaseTheme.web'; +import { FaceLandmarks } from '../../../face-landmarks/types'; + +import TimeElapsed from './TimeElapsed'; +import Timeline from './Timeline'; + +/** + * The type of the React {@code Component} props of {@link SpeakerStatsItem}. + */ +type Props = { + + /** + * The name of the participant. + */ + displayName: string; + + /** + * The total milliseconds the participant has been dominant speaker. + */ + dominantSpeakerTime: number; + + /** + * The object that has as keys the face expressions of the + * participant and as values a number that represents the count . + */ + faceLandmarks?: FaceLandmarks[]; + + /** + * True if the participant is no longer in the meeting. + */ + hasLeft: boolean; + + /** + * True if the participant is not shown in speaker stats. + */ + hidden: boolean; + + /** + * True if the participant is currently the dominant speaker. + */ + isDominantSpeaker: boolean; + + /** + * The id of the user. + */ + participantId: string; + + /** + * True if the face expressions detection is not disabled. + */ + showFaceExpressions: boolean; + + /** + * Invoked to obtain translated strings. + */ + t: Function; +}; + +const SpeakerStatsItem = (props: Props) => { + const rowDisplayClass = `row item ${props.hasLeft ? 'has-left' : ''}`; + const nameTimeClass = `name-time${ + props.showFaceExpressions ? ' expressions-on' : '' + }`; + const timeClass = `time ${props.isDominantSpeaker ? 'dominant' : ''}`; + + return ( +
+
+
+ { + props.hasLeft ? ( + + ) : ( + + ) + } +
+
+
+ { props.displayName } +
+
+ +
+
+ { props.showFaceExpressions + && + } + +
+
+
+ ); +}; + +export default SpeakerStatsItem; diff --git a/react/features/speaker-stats/components/web/SpeakerStatsLabels.tsx b/react/features/speaker-stats/components/web/SpeakerStatsLabels.tsx index b19c9db95..2732b0003 100644 --- a/react/features/speaker-stats/components/web/SpeakerStatsLabels.tsx +++ b/react/features/speaker-stats/components/web/SpeakerStatsLabels.tsx @@ -2,21 +2,18 @@ import React from 'react'; import { useTranslation } from 'react-i18next'; import { makeStyles } from 'tss-react/mui'; -import { withPixelLineHeight } from '../../../base/styles/functions.web'; -// eslint-disable-next-line lines-around-comment -// @ts-ignore -import { Tooltip } from '../../../base/tooltip'; -import { FACE_EXPRESSIONS_EMOJIS } from '../../../face-landmarks/constants'; +import TimelineAxis from './TimelineAxis'; const useStyles = makeStyles()(theme => { return { labels: { padding: '22px 0 7px 0', - height: 20 - }, - emojis: { - paddingLeft: 27, - ...withPixelLineHeight(theme.typography.bodyShortRegularLarge) + height: 20, + '& .avatar-placeholder': { + width: '32px', + marginRight: theme.spacing(3) + + } } }; }); @@ -36,12 +33,12 @@ const SpeakerStatsLabels = (props: IProps) => { const { t } = useTranslation(); const { classes } = useStyles(); const nameTimeClass = `name-time${ - props.showFaceExpressions ? ' name-time_expressions-on' : '' + props.showFaceExpressions ? ' expressions-on' : '' }`; return (
-
+
@@ -51,27 +48,7 @@ const SpeakerStatsLabels = (props: IProps) => { { t('speakerStats.speakerTime') }
- { - props.showFaceExpressions - &&
- {Object.keys(FACE_EXPRESSIONS_EMOJIS).map( - expression => ( -
- -
- {FACE_EXPRESSIONS_EMOJIS[expression as keyof typeof FACE_EXPRESSIONS_EMOJIS]} -
- -
-
- ) - )} -
- } + {props.showFaceExpressions && }
); }; diff --git a/react/features/speaker-stats/components/web/SpeakerStatsList.tsx b/react/features/speaker-stats/components/web/SpeakerStatsList.tsx index 5b89f5872..bd7d5b2db 100644 --- a/react/features/speaker-stats/components/web/SpeakerStatsList.tsx +++ b/react/features/speaker-stats/components/web/SpeakerStatsList.tsx @@ -13,40 +13,40 @@ import SpeakerStatsItem from './SpeakerStatsItem'; const useStyles = makeStyles()(theme => { return { list: { - marginTop: theme.spacing(3), - marginBottom: theme.spacing(3) - }, - item: { - height: theme.spacing(7), - [theme.breakpoints.down(MOBILE_BREAKPOINT)]: { - height: theme.spacing(8) + paddingTop: 90, + '& .item': { + height: theme.spacing(7), + [theme.breakpoints.down(MOBILE_BREAKPOINT)]: { + height: theme.spacing(8) + }, + '& .has-left': { + color: theme.palette.text03 + }, + '& .avatar': { + width: '32px', + marginRight: theme.spacing(3), + height: theme.spacing(5) + }, + '& .time': { + padding: '2px 4px', + borderRadius: '4px', + ...withPixelLineHeight(theme.typography.labelBold), + [theme.breakpoints.down(MOBILE_BREAKPOINT)]: { + ...withPixelLineHeight(theme.typography.bodyShortRegularLarge) + }, + backgroundColor: theme.palette.ui02 + }, + '& .display-name': { + ...withPixelLineHeight(theme.typography.bodyShortRegular), + [theme.breakpoints.down(MOBILE_BREAKPOINT)]: { + ...withPixelLineHeight(theme.typography.bodyShortRegularLarge) + } + }, + '& .dominant': { + backgroundColor: theme.palette.success02 + } } - }, - avatar: { - height: theme.spacing(5) - }, - expressions: { - paddingLeft: 29 - }, - hasLeft: { - color: theme.palette.text03 - }, - displayName: { - ...withPixelLineHeight(theme.typography.bodyShortRegular), - [theme.breakpoints.down(MOBILE_BREAKPOINT)]: { - ...withPixelLineHeight(theme.typography.bodyShortRegularLarge) - } - }, - time: { - padding: '2px 4px', - borderRadius: '4px', - ...withPixelLineHeight(theme.typography.labelBold), - [theme.breakpoints.down(MOBILE_BREAKPOINT)]: { - ...withPixelLineHeight(theme.typography.bodyShortRegularLarge) - } - }, - dominant: { - backgroundColor: theme.palette.success02 + } }; }); @@ -58,10 +58,11 @@ const useStyles = makeStyles()(theme => { */ const SpeakerStatsList = () => { const { classes } = useStyles(); - const items = abstractSpeakerStatsList(SpeakerStatsItem, classes); + const items = abstractSpeakerStatsList(SpeakerStatsItem); return (
+
{items}
); diff --git a/react/features/speaker-stats/components/web/TimeElapsed.js b/react/features/speaker-stats/components/web/TimeElapsed.js deleted file mode 100644 index ce17c5354..000000000 --- a/react/features/speaker-stats/components/web/TimeElapsed.js +++ /dev/null @@ -1,50 +0,0 @@ -/* @flow */ - -import React, { Component } from 'react'; - -import { translate } from '../../../base/i18n'; -import { createLocalizedTime } from '../timeFunctions'; - -/** - * The type of the React {@code Component} props of {@link TimeElapsed}. - */ -type Props = { - - /** - * The function to translate human-readable text. - */ - t: Function, - - /** - * The milliseconds to be converted into a human-readable format. - */ - time: number -}; - -/** - * React component for displaying total time elapsed. Converts a total count of - * milliseconds into a more humanized form: "# hours, # minutes, # seconds". - * With a time of 0, "0s" will be displayed. - * - * @augments Component - */ -class TimeElapsed extends Component { - /** - * Implements React's {@link Component#render()}. - * - * @inheritdoc - * @returns {ReactElement} - */ - render() { - const { time, t } = this.props; - const timeElapsed = createLocalizedTime(time, t); - - return ( -
- { timeElapsed } -
- ); - } -} - -export default translate(TimeElapsed); diff --git a/react/features/speaker-stats/components/web/TimeElapsed.tsx b/react/features/speaker-stats/components/web/TimeElapsed.tsx new file mode 100644 index 000000000..94b364690 --- /dev/null +++ b/react/features/speaker-stats/components/web/TimeElapsed.tsx @@ -0,0 +1,36 @@ +import React from 'react'; +import { useTranslation } from 'react-i18next'; + +import { createLocalizedTime } from '../timeFunctions'; + +/** + * The type of the React {@code Component} props of {@link TimeElapsed}. + */ +type Props = { + + /** + * The milliseconds to be converted into a human-readable format. + */ + time: number; +}; + +/** + * React component for displaying total time elapsed. Converts a total count of + * milliseconds into a more humanized form: "# hours, # minutes, # seconds". + * With a time of 0, "0s" will be displayed. + * + * @augments Component + */ + +const TimeElapsed = ({ time }: Props) => { + const { t } = useTranslation(); + const timeElapsed = createLocalizedTime(time, t); + + return ( + + { timeElapsed } + + ); +}; + +export default TimeElapsed; diff --git a/react/features/speaker-stats/components/web/Timeline.tsx b/react/features/speaker-stats/components/web/Timeline.tsx new file mode 100644 index 000000000..5dd67a256 --- /dev/null +++ b/react/features/speaker-stats/components/web/Timeline.tsx @@ -0,0 +1,207 @@ +import React, { MouseEvent, useCallback, useEffect, useMemo, useRef } from 'react'; +import { useDispatch, useSelector } from 'react-redux'; + +import { IReduxState } from '../../../app/types'; +import { getConferenceTimestamp } from '../../../base/conference/functions'; +import { FaceLandmarks } from '../../../face-landmarks/types'; +import { addToOffset, setTimelinePanning } from '../../actions.any'; +import { SCROLL_RATE, TIMELINE_COLORS } from '../../constants'; +import { getFaceLandmarksEnd, getFaceLandmarksStart, getTimelineBoundaries } from '../../functions'; + +type Props = { + faceLandmarks?: FaceLandmarks[]; +}; + +const Timeline = ({ faceLandmarks }: Props) => { + const startTimestamp = useSelector((state: IReduxState) => getConferenceTimestamp(state)) ?? 0; + const { left, right } = useSelector((state: IReduxState) => getTimelineBoundaries(state)); + const { timelinePanning } = useSelector((state: IReduxState) => state['features/speaker-stats']); + const dispatch = useDispatch(); + const containerRef = useRef(null); + const intervalDuration = useMemo(() => right - left, [ left, right ]); + + const getSegments = useCallback(() => { + const segments = faceLandmarks?.filter(landmarks => { + const timeStart = getFaceLandmarksStart(landmarks, startTimestamp); + const timeEnd = getFaceLandmarksEnd(landmarks, startTimestamp); + + if (timeEnd > left && timeStart < right) { + + return true; + } + + return false; + }) ?? []; + + let leftCut; + let rightCut; + + if (segments.length) { + const start = getFaceLandmarksStart(segments[0], startTimestamp); + const end = getFaceLandmarksEnd(segments[segments.length - 1], startTimestamp); + + if (start <= left) { + leftCut = segments[0]; + } + if (end >= right) { + rightCut = segments[segments.length - 1]; + } + } + + if (leftCut) { + segments.shift(); + } + if (rightCut) { + segments.pop(); + } + + return { + segments, + leftCut, + rightCut + }; + }, [ faceLandmarks, left, right, startTimestamp ]); + + const { segments, leftCut, rightCut } = getSegments(); + + const getStyle = useCallback((duration: number, faceExpression: string) => { + return { + width: `${100 / (intervalDuration / duration)}%`, + backgroundColor: TIMELINE_COLORS[faceExpression] ?? TIMELINE_COLORS['no-detection'] + }; + }, [ intervalDuration ]); + + + const getStartStyle = useCallback(() => { + let startDuration = 0; + let color = TIMELINE_COLORS['no-detection']; + + if (leftCut) { + const { faceExpression } = leftCut; + + startDuration = getFaceLandmarksEnd(leftCut, startTimestamp) - left; + color = TIMELINE_COLORS[faceExpression]; + } else if (segments.length) { + startDuration = getFaceLandmarksStart(segments[0], startTimestamp) - left; + } else if (rightCut) { + startDuration = getFaceLandmarksStart(rightCut, startTimestamp) - left; + } + + return { + width: `${100 / (intervalDuration / startDuration)}%`, + backgroundColor: color + }; + }, [ leftCut, rightCut, startTimestamp, left, intervalDuration, segments ]); + + const getEndStyle = useCallback(() => { + let endDuration = 0; + let color = TIMELINE_COLORS['no-detection']; + + if (rightCut) { + const { faceExpression } = rightCut; + + endDuration = right - getFaceLandmarksStart(rightCut, startTimestamp); + color = TIMELINE_COLORS[faceExpression]; + } else if (segments.length) { + endDuration = right - getFaceLandmarksEnd(segments[segments.length - 1], startTimestamp); + } else if (leftCut) { + endDuration = right - getFaceLandmarksEnd(leftCut, startTimestamp); + } + + return { + width: `${100 / (intervalDuration / endDuration)}%`, + backgroundColor: color + }; + }, [ leftCut, rightCut, startTimestamp, right, intervalDuration, segments ]); + + const getOneSegmentStyle = useCallback((faceExpression?: string) => { + return { + width: '100%', + backgroundColor: faceExpression ? TIMELINE_COLORS[faceExpression] : TIMELINE_COLORS['no-detection'], + borderRadius: 0 + }; + }, []); + + const handleOnWheel = useCallback((event: WheelEvent) => { + // check if horizontal scroll + if (Math.abs(event.deltaX) >= Math.abs(event.deltaY)) { + const value = event.deltaX * SCROLL_RATE; + + dispatch(addToOffset(value)); + event.preventDefault(); + } + }, [ dispatch, addToOffset ]); + + const hideStartAndEndSegments = useCallback(() => leftCut && rightCut + && leftCut.faceExpression === rightCut.faceExpression + && !segments.length, + [ leftCut, rightCut, segments ]); + + useEffect(() => { + containerRef.current?.addEventListener('wheel', handleOnWheel, { passive: false }); + + return () => containerRef.current?.removeEventListener('wheel', handleOnWheel); + }, []); + + const getPointOnTimeline = useCallback((event: MouseEvent) => { + const axisRect = event.currentTarget.getBoundingClientRect(); + const eventOffsetX = event.pageX - axisRect.left; + + return (eventOffsetX * right) / axisRect.width; + }, [ right ]); + + + const handleOnMouseMove = useCallback((event: MouseEvent) => { + const { active, x } = timelinePanning; + + if (active) { + const point = getPointOnTimeline(event); + + dispatch(addToOffset(x - point)); + dispatch(setTimelinePanning({ ...timelinePanning, + x: point })); + } + }, [ timelinePanning, dispatch, addToOffset, setTimelinePanning, getPointOnTimeline ]); + + const handleOnMouseDown = useCallback((event: MouseEvent) => { + const point = getPointOnTimeline(event); + + dispatch(setTimelinePanning( + { + active: true, + x: point + } + )); + + event.preventDefault(); + event.stopPropagation(); + }, [ getPointOnTimeline, dispatch, setTimelinePanning ]); + + return ( +
+
+ {!hideStartAndEndSegments() &&
} + {hideStartAndEndSegments() &&
} + {segments?.map(({ duration, timestamp, faceExpression }) => + (
)) } + + {!hideStartAndEndSegments() &&
} +
+
+ ); +}; + +export default Timeline; diff --git a/react/features/speaker-stats/components/web/TimelineAxis.tsx b/react/features/speaker-stats/components/web/TimelineAxis.tsx new file mode 100644 index 000000000..764a5b52a --- /dev/null +++ b/react/features/speaker-stats/components/web/TimelineAxis.tsx @@ -0,0 +1,187 @@ +import React, { MouseEvent, useCallback, useEffect, useRef, useState } from 'react'; +import { useDispatch, useSelector } from 'react-redux'; + +import { IReduxState } from '../../../app/types'; +import { addToOffset, addToOffsetLeft, addToOffsetRight, setTimelinePanning } from '../../actions.any'; +import { MIN_HANDLER_WIDTH } from '../../constants'; +import { getCurrentDuration, getTimelineBoundaries } from '../../functions'; + +import TimeElapsed from './TimeElapsed'; + +const TimelineAxis = () => { + const currentDuration = useSelector((state: IReduxState) => getCurrentDuration(state)) ?? 0; + const { left, right } = useSelector((state: IReduxState) => getTimelineBoundaries(state)); + const { timelinePanning } = useSelector((state: IReduxState) => state['features/speaker-stats']); + const dispatch = useDispatch(); + const axisRef = useRef(null); + + const [ dragLeft, setDragLeft ] = useState(false); + const [ dragRight, setDragRight ] = useState(false); + + const getPointOnAxis = useCallback((event: MouseEvent) => { + const axisRect = event.currentTarget.getBoundingClientRect(); + const eventOffsetX = event.pageX - axisRect.left; + + return (eventOffsetX * currentDuration) / axisRect.width; + }, [ currentDuration ]); + + const startResizeHandlerLeft = useCallback((event: MouseEvent) => { + if (!timelinePanning.active && !dragRight) { + setDragLeft(true); + } + event.preventDefault(); + event.stopPropagation(); + }, [ dragRight, timelinePanning, setDragLeft ]); + + const stopResizeLeft = () => { + setDragLeft(false); + }; + + const resizeHandlerLeft = useCallback((event: MouseEvent) => { + if (dragLeft) { + const point = getPointOnAxis(event); + + if (point >= 0 && point < right) { + const value = point - left; + + dispatch(addToOffsetLeft(value)); + } + } + }, [ dragLeft, getPointOnAxis, dispatch, addToOffsetLeft ]); + + const startResizeHandlerRight = useCallback((event: MouseEvent) => { + if (!timelinePanning.active && !dragRight) { + setDragRight(true); + } + event.preventDefault(); + event.stopPropagation(); + }, [ timelinePanning, dragRight ]); + + const stopResizeRight = useCallback(() => { + setDragRight(false); + }, [ setDragRight ]); + + const resizeHandlerRight = (event: MouseEvent) => { + if (dragRight) { + const point = getPointOnAxis(event); + + if (point > left && point <= currentDuration) { + const value = point - right; + + dispatch(addToOffsetRight(value)); + } + } + }; + + const startMoveHandler = useCallback((event: MouseEvent) => { + if (!dragLeft && !dragRight) { + const point = getPointOnAxis(event); + + dispatch(setTimelinePanning( + { + active: true, + x: point + } + )); + } + event.preventDefault(); + event.stopPropagation(); + }, [ dragLeft, dragRight, getPointOnAxis, dispatch, setTimelinePanning ]); + + const stopMoveHandler = () => { + dispatch(setTimelinePanning({ ...timelinePanning, + active: false })); + }; + + const moveHandler = useCallback((event: MouseEvent) => { + const { active, x } = timelinePanning; + + if (active) { + const point = getPointOnAxis(event); + + dispatch(addToOffset(point - x)); + dispatch(setTimelinePanning({ ...timelinePanning, + x: point })); + } + }, [ timelinePanning, getPointOnAxis, dispatch, addToOffset, setTimelinePanning ]); + + const handleOnMouseMove = useCallback((event: MouseEvent) => { + resizeHandlerLeft(event); + resizeHandlerRight(event); + moveHandler(event); + }, [ resizeHandlerLeft, resizeHandlerRight ]); + + const handleOnMouseUp = useCallback(() => { + stopResizeLeft(); + stopResizeRight(); + stopMoveHandler(); + }, [ stopResizeLeft, stopResizeRight, stopMoveHandler ]); + + const getHandlerStyle = useCallback(() => { + let marginLeft = 100 / (currentDuration / left); + let width = 100 / (currentDuration / (right - left)); + + if (axisRef.current) { + const axisWidth = axisRef.current.getBoundingClientRect().width; + let handlerWidth = (width / 100) * axisWidth; + + if (handlerWidth < MIN_HANDLER_WIDTH) { + const newLeft = right - ((currentDuration * MIN_HANDLER_WIDTH) / axisWidth); + + handlerWidth = MIN_HANDLER_WIDTH; + marginLeft = 100 / (currentDuration / newLeft); + width = 100 / (currentDuration / (right - newLeft)); + } + + if (marginLeft + width > 100) { + return { + marginLeft: `calc(100% - ${handlerWidth}px)`, + width: handlerWidth + }; + } + } + + return { + marginLeft: `${marginLeft > 0 ? marginLeft : 0}%`, + width: `${width}%` + }; + }, [ currentDuration, left, right, axisRef ]); + + useEffect(() => { + window.addEventListener('mouseup', handleOnMouseUp); + + return () => window.removeEventListener('mouseup', handleOnMouseUp); + }, []); + + return ( +
+
+
+ +
+
+ +
+
+
+ +
+
+ ); +}; + +export default TimelineAxis; diff --git a/react/features/speaker-stats/components/web/index.js b/react/features/speaker-stats/components/web/index.ts similarity index 100% rename from react/features/speaker-stats/components/web/index.js rename to react/features/speaker-stats/components/web/index.ts diff --git a/react/features/speaker-stats/constants.ts b/react/features/speaker-stats/constants.ts index 1476fd04a..e3c41ccf9 100644 --- a/react/features/speaker-stats/constants.ts +++ b/react/features/speaker-stats/constants.ts @@ -2,6 +2,27 @@ export const SPEAKER_STATS_RELOAD_INTERVAL = 1000; export const DISPLAY_SWITCH_BREAKPOINT = 600; -export const RESIZE_SEARCH_SWITCH_CONTAINER_BREAKPOINT = 750; - export const MOBILE_BREAKPOINT = 480; + +export const THRESHOLD_FIXED_AXIS = 10000; + +export const MINIMUM_INTERVAL = 4000; + +export const SCROLL_RATE = 500; + +export const MIN_HANDLER_WIDTH = 30; + +export const TIMELINE_COLORS: { + [key: string]: string; +} = { + happy: '#F3AD26', + neutral: '#676767', + sad: '#539EF9', + surprised: '#BC72E1', + angry: '#F35826', + fearful: '#3AC8C8', + disgusted: '#65B16B', + 'no-detection': '#FFFFFF00' +}; + +export const CLEAR_TIME_BOUNDARY_THRESHOLD = 1000; diff --git a/react/features/speaker-stats/functions.ts b/react/features/speaker-stats/functions.ts index 0a935b47c..bb0fc249d 100644 --- a/react/features/speaker-stats/functions.ts +++ b/react/features/speaker-stats/functions.ts @@ -1,8 +1,13 @@ import _ from 'lodash'; import { IReduxState } from '../app/types'; +import { getConferenceTimestamp } from '../base/conference/functions'; import { PARTICIPANT_ROLE } from '../base/participants/constants'; import { getParticipantById } from '../base/participants/functions'; +import { FaceLandmarks } from '../face-landmarks/types'; + +import { THRESHOLD_FIXED_AXIS } from './constants'; +import { ISpeaker, ISpeakerStats } from './reducer'; /** * Checks if the speaker stats search is disabled. @@ -71,12 +76,12 @@ export function getPendingReorder(state: IReduxState) { /** * Get sorted speaker stats ids based on a configuration setting. * - * @param {IReduxState} state - The redux state. - * @param {Object} stats - The current speaker stats. - * @returns {Object} - Ordered speaker stats ids. + * @param {IState} state - The redux state. + * @param {IState} stats - The current speaker stats. + * @returns {string[] | undefined} - Ordered speaker stats ids. * @public */ -export function getSortedSpeakerStatsIds(state: IReduxState, stats: Object) { +export function getSortedSpeakerStatsIds(state: IReduxState, stats: ISpeakerStats) { const orderConfig = getSpeakerStatsOrder(state); if (orderConfig) { @@ -91,11 +96,11 @@ export function getSortedSpeakerStatsIds(state: IReduxState, stats: Object) { * * Compares the order of two participants in the speaker stats list. * - * @param {Object} currentParticipant - The first participant for comparison. - * @param {Object} nextParticipant - The second participant for comparison. + * @param {ISpeaker} currentParticipant - The first participant for comparison. + * @param {ISpeaker} nextParticipant - The second participant for comparison. * @returns {number} - The sort order of the two participants. */ - function compareFn(currentParticipant: any, nextParticipant: any) { + function compareFn(currentParticipant: ISpeaker, nextParticipant: ISpeaker) { if (orderConfig.includes('hasLeft')) { if (nextParticipant.hasLeft() && !currentParticipant.hasLeft()) { return -1; @@ -104,7 +109,7 @@ export function getSortedSpeakerStatsIds(state: IReduxState, stats: Object) { } } - let result; + let result = 0; for (const sortCriteria of orderConfig) { switch (sortCriteria) { @@ -136,13 +141,13 @@ export function getSortedSpeakerStatsIds(state: IReduxState, stats: Object) { /** * Enhance speaker stats to include data needed for ordering. * - * @param {IReduxState} state - The redux state. - * @param {Object} stats - Speaker stats. + * @param {IState} state - The redux state. + * @param {ISpeakerStats} stats - Speaker stats. * @param {Array} orderConfig - Ordering configuration. - * @returns {Object} - Enhanced speaker stats. + * @returns {ISpeakerStats} - Enhanced speaker stats. * @public */ -function getEnhancedStatsForOrdering(state: IReduxState, stats: any, orderConfig?: string[]) { +function getEnhancedStatsForOrdering(state: IReduxState, stats: ISpeakerStats, orderConfig: Array) { if (!orderConfig) { return stats; } @@ -163,14 +168,14 @@ function getEnhancedStatsForOrdering(state: IReduxState, stats: any, orderConfig /** * Filter stats by search criteria. * - * @param {IReduxState} state - The redux state. - * @param {Object | undefined} stats - The unfiltered stats. + * @param {IState} state - The redux state. + * @param {ISpeakerStats | undefined} stats - The unfiltered stats. * - * @returns {Object} - Filtered speaker stats. + * @returns {ISpeakerStats} - Filtered speaker stats. * @public */ -export function filterBySearchCriteria(state: IReduxState, stats?: Object) { - const filteredStats: any = _.cloneDeep(stats ?? getSpeakerStats(state)); +export function filterBySearchCriteria(state: IReduxState, stats?: ISpeakerStats) { + const filteredStats = _.cloneDeep(stats ?? getSpeakerStats(state)); const criteria = getSearchCriteria(state); if (criteria !== null) { @@ -191,14 +196,14 @@ export function filterBySearchCriteria(state: IReduxState, stats?: Object) { /** * Reset the hidden speaker stats. * - * @param {IReduxState} state - The redux state. - * @param {Object | undefined} stats - The unfiltered stats. + * @param {IState} state - The redux state. + * @param {ISpeakerStats | undefined} stats - The unfiltered stats. * * @returns {Object} - Speaker stats. * @public */ -export function resetHiddenStats(state: IReduxState, stats?: Object) { - const resetStats: any = _.cloneDeep(stats ?? getSpeakerStats(state)); +export function resetHiddenStats(state: IReduxState, stats?: ISpeakerStats) { + const resetStats = _.cloneDeep(stats ?? getSpeakerStats(state)); for (const id in resetStats) { if (resetStats[id].hidden) { @@ -208,3 +213,62 @@ export function resetHiddenStats(state: IReduxState, stats?: Object) { return resetStats; } + +/** + * Gets the current duration of the conference. + * + * @param {IState} state - The redux state. + * @returns {number | null} - The duration in milliseconds or null. + */ +export function getCurrentDuration(state: IReduxState) { + const startTimestamp = getConferenceTimestamp(state); + + return startTimestamp ? Date.now() - startTimestamp : null; +} + +/** + * Gets the boundaries of the emotion timeline. + * + * @param {IState} state - The redux state. + * @returns {Object} - The left and right boundaries. + */ +export function getTimelineBoundaries(state: IReduxState) { + const { timelineBoundary, offsetLeft, offsetRight } = state['features/speaker-stats']; + const currentDuration = getCurrentDuration(state) ?? 0; + const rightBoundary = timelineBoundary ? timelineBoundary : currentDuration; + let leftOffset = 0; + + if (rightBoundary > THRESHOLD_FIXED_AXIS) { + leftOffset = rightBoundary - THRESHOLD_FIXED_AXIS; + } + + const left = offsetLeft + leftOffset; + const right = rightBoundary + offsetRight; + + return { + left, + right + }; +} + +/** + * Returns the conference start time of the face landmarks. + * + * @param {FaceLandmarks} faceLandmarks - The face landmarks. + * @param {number} startTimestamp - The start timestamp of the conference. + * @returns {number} + */ +export function getFaceLandmarksStart(faceLandmarks: FaceLandmarks, startTimestamp: number) { + return faceLandmarks.timestamp - startTimestamp; +} + +/** + * Returns the conference end time of the face landmarks. + * + * @param {FaceLandmarks} faceLandmarks - The face landmarks. + * @param {number} startTimestamp - The start timestamp of the conference. + * @returns {number} + */ +export function getFaceLandmarksEnd(faceLandmarks: FaceLandmarks, startTimestamp: number) { + return getFaceLandmarksStart(faceLandmarks, startTimestamp) + faceLandmarks.duration; +} diff --git a/react/features/speaker-stats/index.js b/react/features/speaker-stats/index.ts similarity index 100% rename from react/features/speaker-stats/index.js rename to react/features/speaker-stats/index.ts diff --git a/react/features/speaker-stats/middleware.ts b/react/features/speaker-stats/middleware.ts index 88799de84..6b8bbf884 100644 --- a/react/features/speaker-stats/middleware.ts +++ b/react/features/speaker-stats/middleware.ts @@ -1,3 +1,6 @@ +import { AnyAction } from 'redux'; + +import { IStore } from '../app/types'; import { PARTICIPANT_JOINED, PARTICIPANT_KICKED, @@ -7,16 +10,29 @@ import { import MiddlewareRegistry from '../base/redux/MiddlewareRegistry'; import { + ADD_TO_OFFSET, INIT_SEARCH, INIT_UPDATE_STATS, RESET_SEARCH_CRITERIA } from './actionTypes'; -import { initReorderStats, updateSortedSpeakerStatsIds, updateStats } from './actions'; -import { filterBySearchCriteria, getPendingReorder, getSortedSpeakerStatsIds, resetHiddenStats } from './functions'; - -MiddlewareRegistry.register(({ dispatch, getState }) => next => action => { - const result = next(action); +import { + clearTimelineBoundary, + initReorderStats, + setTimelineBoundary, + updateSortedSpeakerStatsIds, + updateStats +} from './actions.any'; +import { CLEAR_TIME_BOUNDARY_THRESHOLD } from './constants'; +import { + filterBySearchCriteria, + getCurrentDuration, + getPendingReorder, + getSortedSpeakerStatsIds, + getTimelineBoundaries, + resetHiddenStats +} from './functions'; +MiddlewareRegistry.register(({ dispatch, getState }: IStore) => (next: Function) => (action: AnyAction) => { switch (action.type) { case INIT_SEARCH: { const state = getState(); @@ -34,7 +50,7 @@ MiddlewareRegistry.register(({ dispatch, getState }) => next => action => { const pendingReorder = getPendingReorder(state); if (pendingReorder) { - dispatch(updateSortedSpeakerStatsIds(getSortedSpeakerStatsIds(state, stats))); + dispatch(updateSortedSpeakerStatsIds(getSortedSpeakerStatsIds(state, stats) ?? [])); } dispatch(updateStats(stats)); @@ -57,7 +73,22 @@ MiddlewareRegistry.register(({ dispatch, getState }) => next => action => { break; } + + case ADD_TO_OFFSET: { + const state = getState(); + const { timelineBoundary } = state['features/speaker-stats']; + const { right } = getTimelineBoundaries(state); + const currentDuration = getCurrentDuration(state) ?? 0; + + if (Math.abs((right + action.value) - currentDuration) < CLEAR_TIME_BOUNDARY_THRESHOLD) { + dispatch(clearTimelineBoundary()); + } else if (!timelineBoundary) { + dispatch(setTimelineBoundary(currentDuration ?? 0)); + } + + break; + } } - return result; + return next(action); }); diff --git a/react/features/speaker-stats/reducer.ts b/react/features/speaker-stats/reducer.ts index ca95196d4..870a7045b 100644 --- a/react/features/speaker-stats/reducer.ts +++ b/react/features/speaker-stats/reducer.ts @@ -1,11 +1,17 @@ import _ from 'lodash'; import ReducerRegistry from '../base/redux/ReducerRegistry'; +import { FaceLandmarks } from '../face-landmarks/types'; import { + ADD_TO_OFFSET, + ADD_TO_OFFSET_LEFT, + ADD_TO_OFFSET_RIGHT, INIT_REORDER_STATS, INIT_SEARCH, RESET_SEARCH_CRITERIA, + SET_PANNING, + SET_TIMELINE_BOUNDARY, TOGGLE_FACE_EXPRESSIONS, UPDATE_SORTED_SPEAKER_STATS_IDS, UPDATE_STATS @@ -22,16 +28,52 @@ const INITIAL_STATE = { pendingReorder: true, criteria: null, showFaceExpressions: false, - sortedSpeakerStatsIds: [] + sortedSpeakerStatsIds: [], + timelineBoundary: null, + offsetLeft: 0, + offsetRight: 0, + timelinePanning: { + active: false, + x: 0 + } }; +export interface ISpeaker { + addFaceLandmarks: (faceLandmarks: FaceLandmarks) => void; + displayName?: string; + getDisplayName: () => string; + getFaceLandmarks: () => FaceLandmarks[]; + getTotalDominantSpeakerTime: () => number; + getUserId: () => string; + hasLeft: () => boolean; + hidden?: boolean; + isDominantSpeaker: () => boolean; + isLocalStats: () => boolean; + isModerator?: boolean; + markAsHasLeft: () => boolean; + setDisplayName: (newName: string) => void; + setDominantSpeaker: (isNowDominantSpeaker: boolean, silence: boolean) => void; + setFaceLandmarks: (faceLandmarks: FaceLandmarks[]) => void; +} + +export interface ISpeakerStats { + [key: string]: ISpeaker; +} + export interface ISpeakerStatsState { criteria: string | null; isOpen: boolean; + offsetLeft: number; + offsetRight: number; pendingReorder: boolean; showFaceExpressions: boolean; sortedSpeakerStatsIds: Array; - stats: Object; + stats: ISpeakerStats; + timelineBoundary: number | null; + timelinePanning: { + active: boolean; + x: number; + }; } ReducerRegistry.register('features/speaker-stats', @@ -53,6 +95,37 @@ ReducerRegistry.register('features/speaker-stats', showFaceExpressions: !state.showFaceExpressions }; } + case ADD_TO_OFFSET: { + return { + ...state, + offsetLeft: state.offsetLeft + action.value, + offsetRight: state.offsetRight + action.value + }; + } + case ADD_TO_OFFSET_RIGHT: { + return { + ...state, + offsetRight: state.offsetRight + action.value + }; + } + case ADD_TO_OFFSET_LEFT: { + return { + ...state, + offsetLeft: state.offsetLeft + action.value + }; + } + case SET_TIMELINE_BOUNDARY: { + return { + ...state, + timelineBoundary: action.boundary + }; + } + case SET_PANNING: { + return { + ...state, + timelinePanning: action.panning + }; + } } return state; diff --git a/resources/prosody-plugins/mod_speakerstats_component.lua b/resources/prosody-plugins/mod_speakerstats_component.lua index 06c644ba2..94dc8dd72 100644 --- a/resources/prosody-plugins/mod_speakerstats_component.lua +++ b/resources/prosody-plugins/mod_speakerstats_component.lua @@ -100,10 +100,10 @@ function on_message(event) room.speakerStats['dominantSpeakerId'] = occupant.jid; end - local faceExpression = event.stanza:get_child('faceExpression', 'http://jitsi.org/jitmeet'); + local newFaceLandmarks = event.stanza:get_child('faceLandmarks', 'http://jitsi.org/jitmeet'); - if faceExpression then - local roomAddress = faceExpression.attr.room; + if newFaceLandmarks then + local roomAddress = newFaceLandmarks.attr.room; local room = get_room_from_jid(room_jid_match_rewrite(roomAddress)); if not room then @@ -121,9 +121,13 @@ function on_message(event) log("warn", "No occupant %s found for %s", from, roomAddress); return false; end - local faceExpressions = room.speakerStats[occupant.jid].faceExpressions; - faceExpressions[faceExpression.attr.expression] = - faceExpressions[faceExpression.attr.expression] + tonumber(faceExpression.attr.duration); + local faceLandmarks = room.speakerStats[occupant.jid].faceLandmarks; + table.insert(faceLandmarks, + { + faceExpression = newFaceLandmarks.attr.faceExpression, + timestamp = tonumber(newFaceLandmarks.attr.timestamp), + duration = tonumber(newFaceLandmarks.attr.duration), + }) end return true @@ -142,15 +146,7 @@ function new_SpeakerStats(nick, context_user) nick = nick; context_user = context_user; displayName = nil; - faceExpressions = { - happy = 0, - neutral = 0, - surprised = 0, - angry = 0, - fearful = 0, - disgusted = 0, - sad = 0 - }; + faceLandmarks = {}; }, SpeakerStats); end @@ -243,9 +239,9 @@ function occupant_joined(event) -- and skip focus if sneaked into the table if values and type(values) == 'table' and values.nick ~= nil and values.nick ~= 'focus' then local totalDominantSpeakerTime = values.totalDominantSpeakerTime; - local faceExpressions = values.faceExpressions; + local faceLandmarks = values.faceLandmarks; if totalDominantSpeakerTime > 0 or room:get_occupant_jid(jid) == nil or values:isDominantSpeaker() - or get_participant_expressions_count(faceExpressions) > 0 then + or next(faceLandmarks) ~= nil then -- before sending we need to calculate current dominant speaker state if values:isDominantSpeaker() and not values:isSilent() then local timeElapsed = math.floor(socket.gettime()*1000 - values._dominantSpeakerStart); @@ -255,7 +251,7 @@ function occupant_joined(event) users_json[values.nick] = { displayName = values.displayName, totalDominantSpeakerTime = totalDominantSpeakerTime, - faceExpressions = faceExpressions + faceLandmarks = faceLandmarks }; end end @@ -391,12 +387,3 @@ process_host_module(breakout_room_component_host, function(host_module, host) end); end end); - -function get_participant_expressions_count(faceExpressions) - local count = 0; - for _, value in pairs(faceExpressions) do - count = count + value; - end - - return count; -end