feat(RN): displays transcription subtitles

This commit is contained in:
paweldomas 2018-08-23 14:57:12 -05:00 committed by Любомир Маринов
parent eac069c930
commit 26d906fa46
11 changed files with 400 additions and 208 deletions

View File

@ -17,6 +17,7 @@ import {
participantRoleChanged,
participantUpdated
} from '../participants';
import { endpointMessageReceived } from '../../subtitles';
import { getLocalTracks, trackAdded, trackRemoved } from '../tracks';
import { getJitsiMeetGlobalNS } from '../util';
@ -137,6 +138,10 @@ function _addConferenceListeners(conference, dispatch) {
JitsiConferenceEvents.DOMINANT_SPEAKER_CHANGED,
id => dispatch(dominantSpeakerChanged(id, conference)));
conference.on(
JitsiConferenceEvents.ENDPOINT_MESSAGE_RECEIVED,
(...args) => dispatch(endpointMessageReceived(...args)));
conference.on(
JitsiConferenceEvents.PARTICIPANT_CONN_STATUS_CHANGED,
(...args) => dispatch(participantConnectionStatusChanged(...args)));

View File

@ -22,6 +22,7 @@ import { FILMSTRIP_SIZE, Filmstrip, isFilmstripVisible } from '../../filmstrip';
import { LargeVideo } from '../../large-video';
import { CalleeInfoContainer } from '../../invite';
import { NotificationsContainer } from '../../notifications';
import { Captions } from '../../subtitles';
import { setToolboxVisible, Toolbox } from '../../toolbox';
import styles from './styles';
@ -283,18 +284,20 @@ class Conference extends Component<Props> {
<View
pointerEvents = 'box-none'
style = { styles.toolboxAndFilmstripContainer }>
{
/**
* Notifications are rendered on the very top of other
* components like subtitles, toolbox and filmstrip.
*/
{/*
* Notifications are rendered on the very top of other
* components like subtitles, toolbox and filmstrip.
*/
this._renderNotificationsContainer()
}
<Captions onPress = { this._onClick } />
{/*
* The Toolbox is in a stacking layer bellow the Filmstrip.
*/}
<Toolbox />
{/*
* The Filmstrip is in a stacking layer above the
* LargeVideo. The LargeVideo and the Filmstrip form what
@ -369,28 +372,26 @@ class Conference extends Component<Props> {
}
/**
* Renders a container for notifications to be displayed by
* the base/notifications feature.
* Renders a container for notifications to be displayed by the
* base/notifications feature.
*
* @returns {React$Element}
* @private
* @returns {React$Element}
*/
_renderNotificationsContainer() {
const notificationsStyle = { };
const notificationsStyle = {};
/**
* In the landscape mode (wide) there's problem with notifications being
* shadowed by the filmstrip rendered on the right. This makes the "x"
* button not clickable. In order to avoid that a margin of
* the filmstrip's size is added to the right.
*
* Pawel: after many attempts I failed to make notifications adjust to
* their contents width because of column and rows being used in
* the flex layout. The only option that seemed to limit
* the notification's size was explicit 'width' value which is not
* better than the margin added here.
*/
if (!isNarrowAspectRatio(this) && this.props._filmstripVisible) {
// In the landscape mode (wide) there's problem with notifications being
// shadowed by the filmstrip rendered on the right. This makes the "x"
// button not clickable. In order to avoid that a margin of the
// filmstrip's size is added to the right.
//
// Pawel: after many attempts I failed to make notifications adjust to
// their contents width because of column and rows being used in the
// flex layout. The only option that seemed to limit the notification's
// size was explicit 'width' value which is not better than the margin
// added here.
if (this.props._filmstripVisible && !isNarrowAspectRatio(this)) {
notificationsStyle.marginRight = FILMSTRIP_SIZE;
}

View File

@ -1,34 +1,37 @@
/* @flow */
// @flow
import PropTypes from 'prop-types';
import React, { Component } from 'react';
import { Watermarks } from '../../base/react';
import { TranscriptionSubtitles } from '../../subtitles/';
import { Captions } from '../../subtitles/';
import Labels from './Labels';
declare var interfaceConfig: Object;
/**
* The type of the React {@code Component} props of {@link LargeVideo}.
*/
type Props = {
/**
* True if the {@code VideoQualityLabel} should not be displayed.
*/
hideVideoQualityLabel: boolean
};
/**
* Implements a React {@link Component} which represents the large video (a.k.a.
* the conference participant who is on the local stage) on Web/React.
*
* @extends Component
*/
export default class LargeVideo extends Component<*> {
static propTypes = {
/**
* True if the {@code VideoQualityLabel} should not be displayed.
*/
hideVideoQualityLabel: PropTypes.bool
};
export default class LargeVideo extends Component<Props> {
/**
* Implements React's {@link Component#render()}.
*
* @inheritdoc
* @returns {ReactElement}
* @returns {React$Element}
*/
render() {
return (
@ -52,18 +55,15 @@ export default class LargeVideo extends Component<*> {
<span id = 'remoteConnectionMessage' />
<div id = 'largeVideoElementsContainer'>
<div id = 'largeVideoBackgroundContainer' />
{
/**
* FIXME: the architecture of elements related to the
* large video and the naming. The background is not
* part of largeVideoWrapper because we are controlling
* the size of the video through largeVideoWrapper.
* That's why we need another container for the the
* background and the largeVideoWrapper in order to
* hide/show them.
*/
}
{/*
* FIXME: the architecture of elements related to the large
* video and the naming. The background is not part of
* largeVideoWrapper because we are controlling the size of
* the video through largeVideoWrapper. That's why we need
* another container for the background and the
* largeVideoWrapper in order to hide/show them.
*/}
<div id = 'largeVideoWrapper'>
<video
autoPlay = { true }
@ -72,10 +72,10 @@ export default class LargeVideo extends Component<*> {
</div>
</div>
{ interfaceConfig.DISABLE_TRANSCRIPTION_SUBTITLES
? null : <TranscriptionSubtitles /> }
|| <Captions /> }
<span id = 'localConnectionMessage' />
{ this.props.hideVideoQualityLabel
? null : <Labels /> }
|| <Labels /> }
</div>
);
}

View File

@ -0,0 +1,128 @@
// @flow
import { Component } from 'react';
/**
* {@code AbstractCaptions} properties.
*/
export type AbstractCaptionsProps = {
/**
* Whether local participant is requesting to see subtitles.
*/
_requestingSubtitles: boolean,
/**
* Transcript texts formatted with participant's name and final content.
* Mapped by id just to have the keys for convenience during the rendering
* process.
*/
_transcripts: Map<string, string>
};
/**
* Abstract React {@code Component} which can display speech-to-text results
* from Jigasi as subtitles.
*/
export class AbstractCaptions<P: AbstractCaptionsProps>
extends Component<P> {
/**
* Implements React's {@link Component#render()}.
*
* @inheritdoc
* @returns {React$Element}
*/
render() {
const { _requestingSubtitles, _transcripts } = this.props;
if (!_requestingSubtitles || !_transcripts.size) {
return null;
}
const paragraphs = [];
for (const [ id, text ] of _transcripts) {
paragraphs.push(this._renderParagraph(id, text));
}
return this._renderSubtitlesContainer(paragraphs);
}
/**
* Renders the transcription text.
*
* @abstract
* @param {string} id - The ID of the transcript message from which the
* {@code text} has been created.
* @param {string} text - Subtitles text formatted with the participant's
* name.
* @protected
* @returns {React$Element} - The React element which displays the text.
*/
_renderParagraph: (id: string, text: string) => React$Element<*>;
/**
* Renders the subtitles container.
*
* @abstract
* @param {Array<React$Element>} paragraphs - An array of elements created
* for each subtitle using the {@link _renderParagraph} method.
* @protected
* @returns {React$Element} - The subtitles container.
*/
_renderSubtitlesContainer: (Array<React$Element<*>>) => React$Element<*>;
}
/**
* Formats the transcript messages into text by prefixing participant's name to
* avoid duplicating the effort on platform specific component.
*
* @param {Object} state - The redux state.
* @private
* @returns {Map<string, string>} - Formatted transcript subtitles mapped by
* transcript message IDs.
*/
function _constructTranscripts(state: Object): Map<string, string> {
const { _transcriptMessages } = state['features/subtitles'];
const transcripts = new Map();
for (const [ id, transcriptMessage ] of _transcriptMessages) {
if (transcriptMessage) {
let text = `${transcriptMessage.participantName}: `;
if (transcriptMessage.final) {
text += transcriptMessage.final;
} else {
const stable = transcriptMessage.stable || '';
const unstable = transcriptMessage.unstable || '';
text += stable + unstable;
}
transcripts.set(id, text);
}
}
return transcripts;
}
/**
* Maps the transcriptionSubtitles in the redux state to the associated props of
* {@code AbstractCaptions}.
*
* @param {Object} state - The redux state.
* @private
* @returns {{
* _requestingSubtitles: boolean,
* _transcripts: Map<string, string>
* }}
*/
export function _abstractMapStateToProps(state: Object) {
const { _requestingSubtitles } = state['features/subtitles'];
return {
_requestingSubtitles,
_transcripts: _constructTranscripts(state)
};
}

View File

@ -0,0 +1,68 @@
// @flow
import React from 'react';
import { connect } from 'react-redux';
import { Container, Text } from '../../base/react';
import {
_abstractMapStateToProps,
AbstractCaptions,
type AbstractCaptionsProps
} from './AbstractCaptions';
import styles from './styles';
/**
* The type of the React {@code Component} props of {@link Captions}.
*/
type Props = AbstractCaptionsProps & {
onPress: Function
};
/**
* React {@code Component} which can display speech-to-text results from
* Jigasi as subtitles.
*/
class Captions
extends AbstractCaptions<Props> {
/**
* Renders the transcription text.
*
* @param {string} id - The ID of the transcript message from which the
* {@code text} has been created.
* @param {string} text - Subtitles text formatted with the participant's
* name.
* @protected
* @returns {React$Element} - The React element which displays the text.
*/
_renderParagraph(id: string, text: string): React$Element<*> {
return (
<Text
key = { id }
onPress = { this.props.onPress }
style = { styles.subtitle } >
{ text }
</Text>
);
}
/**
* Renders the subtitles container.
*
* @param {Array<React$Element>} paragraphs - An array of elements created
* for each subtitle using the {@link _renderParagraph} method.
* @protected
* @returns {React$Element} - The subtitles container.
*/
_renderSubtitlesContainer(
paragraphs: Array<React$Element<*>>): React$Element<*> {
return (
<Container style = { styles.subtitlesContainer } >
{ paragraphs }
</Container>
);
}
}
export default connect(_abstractMapStateToProps)(Captions);

View File

@ -0,0 +1,55 @@
// @flow
import React from 'react';
import { connect } from 'react-redux';
import {
_abstractMapStateToProps,
AbstractCaptions,
type AbstractCaptionsProps as Props
} from './AbstractCaptions';
/**
* React {@code Component} which can display speech-to-text results from
* Jigasi as subtitles.
*/
class Captions
extends AbstractCaptions<Props> {
/**
* Renders the transcription text.
*
* @param {string} id - The ID of the transcript message from which the
* {@code text} has been created.
* @param {string} text - Subtitles text formatted with the participant's
* name.
* @protected
* @returns {React$Element} - The React element which displays the text.
*/
_renderParagraph(id: string, text: string): React$Element<*> {
return (
<p key = { id }>
<span>{ text }</span>
</p>
);
}
/**
* Renders the subtitles container.
*
* @param {Array<React$Element>} paragraphs - An array of elements created
* for each subtitle using the {@link _renderParagraph} method.
* @protected
* @returns {React$Element} - The subtitles container.
*/
_renderSubtitlesContainer(
paragraphs: Array<React$Element<*>>): React$Element<*> {
return (
<div className = 'transcription-subtitles' >
{ paragraphs }
</div>
);
}
}
export default connect(_abstractMapStateToProps)(Captions);

View File

@ -1,96 +0,0 @@
// @flow
import React, { Component } from 'react';
import { connect } from 'react-redux';
/**
* The type of the React {@code Component} props of
* {@link TranscriptionSubtitles}.
*/
type Props = {
/**
* Map of transcriptMessageID's with corresponding transcriptMessage.
*/
_transcriptMessages: Map<string, Object>,
/**
* Whether local participant is requesting to see subtitles
*/
_requestingSubtitles: Boolean
};
/**
* React {@code Component} which can display speech-to-text results from
* Jigasi as subtitles.
*/
class TranscriptionSubtitles extends Component<Props> {
/**
* Implements React's {@link Component#render()}.
*
* @inheritdoc
* @returns {ReactElement}
*/
render() {
if (!this.props._requestingSubtitles
|| !this.props._transcriptMessages) {
return null;
}
const paragraphs = [];
for (const [ transcriptMessageID, transcriptMessage ]
of this.props._transcriptMessages) {
let text;
if (transcriptMessage) {
text = `${transcriptMessage.participantName}: `;
if (transcriptMessage.final) {
text += transcriptMessage.final;
} else {
const stable = transcriptMessage.stable || '';
const unstable = transcriptMessage.unstable || '';
text += stable + unstable;
}
paragraphs.push(
<p key = { transcriptMessageID }>
<span>{ text }</span>
</p>
);
}
}
return (
<div className = 'transcription-subtitles' >
{ paragraphs }
</div>
);
}
}
/**
* Maps the transcriptionSubtitles in the Redux state to the associated
* props of {@code TranscriptionSubtitles}.
*
* @param {Object} state - The Redux state.
* @private
* @returns {{
* _transcriptMessages: Map
* }}
*/
function _mapStateToProps(state) {
const {
_transcriptMessages,
_requestingSubtitles
} = state['features/subtitles'];
return {
_transcriptMessages,
_requestingSubtitles
};
}
export default connect(_mapStateToProps)(TranscriptionSubtitles);

View File

@ -1,2 +1,2 @@
export { default as TranscriptionSubtitles } from './TranscriptionSubtitles';
export { default as Captions } from './Captions';
export { default as ClosedCaptionButton } from './ClosedCaptionButton';

View File

@ -0,0 +1,31 @@
// @flow
import { BoxModel, ColorPalette, createStyleSheet } from '../../base/styles';
/**
* The styles of the React {@code Component}s of the feature subtitles.
*/
export default createStyleSheet({
/**
* Style for subtitle paragraph.
*/
subtitle: {
backgroundColor: ColorPalette.black,
borderRadius: BoxModel.margin / 4,
color: ColorPalette.white,
marginBottom: BoxModel.margin,
padding: BoxModel.padding / 2
},
/**
* Style for the subtitles container.
*/
subtitlesContainer: {
alignItems: 'center',
flexDirection: 'column',
flexGrow: 0,
justifyContent: 'flex-end',
margin: BoxModel.margin
}
});

View File

@ -2,14 +2,14 @@
import { MiddlewareRegistry } from '../base/redux';
import {
ENDPOINT_MESSAGE_RECEIVED,
TOGGLE_REQUESTING_SUBTITLES
} from './actionTypes';
import {
removeTranscriptMessage,
updateTranscriptMessage
} from './actions';
import {
ENDPOINT_MESSAGE_RECEIVED,
TOGGLE_REQUESTING_SUBTITLES
} from './actionTypes';
const logger = require('jitsi-meet-logger').getLogger(__filename);
@ -25,34 +25,35 @@ const JSON_TYPE_TRANSCRIPTION_RESULT = 'transcription-result';
*/
const JSON_TYPE_TRANSLATION_RESULT = 'translation-result';
/**
* The local participant property which is used to store the language
* preference for translation for a participant.
*/
const P_NAME_TRANSLATION_LANGUAGE = 'translation_language';
/**
* The local participant property which is used to set whether the local
* participant wants to have a transcriber in the room.
*/
const P_NAME_REQUESTING_TRANSCRIPTION = 'requestingTranscription';
/**
* The local participant property which is used to store the language
* preference for translation for a participant.
*/
const P_NAME_TRANSLATION_LANGUAGE = 'translation_language';
/**
* Time after which the rendered subtitles will be removed.
*/
const REMOVE_AFTER_MS = 3000;
/**
* Middleware that catches actions related to transcript messages
* to be rendered in {@link TranscriptionSubtitles }
* Middleware that catches actions related to transcript messages to be rendered
* in {@link Captions}.
*
* @param {Store} store - Redux store.
* @param {Store} store - The redux store.
* @returns {Function}
*/
MiddlewareRegistry.register(store => next => action => {
switch (action.type) {
case ENDPOINT_MESSAGE_RECEIVED:
return _endpointMessageReceived(store, next, action);
case TOGGLE_REQUESTING_SUBTITLES:
_requestingSubtitlesToggled(store);
break;
@ -61,22 +62,6 @@ MiddlewareRegistry.register(store => next => action => {
return next(action);
});
/**
* Toggle the local property 'requestingTranscription'. This will cause Jicofo
* and Jigasi to decide whether the transcriber needs to be in the room.
*
* @param {Store} store - The redux store.
* @private
* @returns {void}
*/
function _requestingSubtitlesToggled({ getState }) {
const { _requestingSubtitles } = getState()['features/subtitles'];
const { conference } = getState()['features/base/conference'];
conference.setLocalParticipantProperty(P_NAME_REQUESTING_TRANSCRIPTION,
!_requestingSubtitles);
}
/**
* Notifies the feature transcription that the action
* {@code ENDPOINT_MESSAGE_RECEIVED} is being dispatched within a specific redux
@ -92,84 +77,81 @@ function _requestingSubtitlesToggled({ getState }) {
* @returns {Object} The value returned by {@code next(action)}.
*/
function _endpointMessageReceived({ dispatch, getState }, next, action) {
if (!(action.json
&& (action.json.type === JSON_TYPE_TRANSCRIPTION_RESULT
|| action.json.type === JSON_TYPE_TRANSLATION_RESULT))) {
const { json } = action;
if (!(json
&& (json.type === JSON_TYPE_TRANSCRIPTION_RESULT
|| json.type === JSON_TYPE_TRANSLATION_RESULT))) {
return next(action);
}
const json = action.json;
const state = getState();
const translationLanguage
= getState()['features/base/conference'].conference
= state['features/base/conference'].conference
.getLocalParticipantProperty(P_NAME_TRANSLATION_LANGUAGE);
try {
const transcriptMessageID = json.message_id;
const participantName = json.participant.name;
const isInterim = json.is_interim;
const stability = json.stability;
if (json.type === JSON_TYPE_TRANSLATION_RESULT
&& json.language === translationLanguage) {
&& json.language === translationLanguage) {
// Displays final results in the target language if translation is
// enabled.
const newTranscriptMessage = {
participantName,
clearTimeOut: undefined,
final: json.text,
clearTimeOut: undefined
participantName
};
setClearerOnTranscriptMessage(dispatch,
_setClearerOnTranscriptMessage(dispatch,
transcriptMessageID, newTranscriptMessage);
dispatch(updateTranscriptMessage(transcriptMessageID,
newTranscriptMessage));
} else if (json.type === JSON_TYPE_TRANSCRIPTION_RESULT
&& !translationLanguage) {
&& !translationLanguage) {
// Displays interim and final results without any translation if
// translations are disabled.
const text = json.transcript[0].text;
const { text } = json.transcript[0];
// We update the previous transcript message with the same
// message ID or adds a new transcript message if it does not
// exist in the map.
const newTranscriptMessage
= { ...getState()['features/subtitles']._transcriptMessages
.get(transcriptMessageID) || { participantName } };
const newTranscriptMessage = {
...state['features/subtitles']._transcriptMessages
.get(transcriptMessageID)
|| { participantName }
};
setClearerOnTranscriptMessage(dispatch,
_setClearerOnTranscriptMessage(dispatch,
transcriptMessageID, newTranscriptMessage);
// If this is final result, update the state as a final result
// and start a count down to remove the subtitle from the state
if (!isInterim) {
if (!json.is_interim) {
newTranscriptMessage.final = text;
dispatch(updateTranscriptMessage(transcriptMessageID,
newTranscriptMessage));
} else if (stability > 0.85) {
} else if (json.stability > 0.85) {
// If the message has a high stability, we can update the
// stable field of the state and remove the previously
// unstable results
newTranscriptMessage.stable = text;
newTranscriptMessage.unstable = undefined;
dispatch(updateTranscriptMessage(transcriptMessageID,
newTranscriptMessage));
} else {
// Otherwise, this result has an unstable result, which we
// add to the state. The unstable result will be appended
// after the stable part.
newTranscriptMessage.unstable = text;
dispatch(updateTranscriptMessage(transcriptMessageID,
newTranscriptMessage));
}
dispatch(
updateTranscriptMessage(
transcriptMessageID,
newTranscriptMessage));
}
} catch (error) {
logger.error('Error occurred while updating transcriptions\n', error);
@ -178,6 +160,24 @@ function _endpointMessageReceived({ dispatch, getState }, next, action) {
return next(action);
}
/**
* Toggle the local property 'requestingTranscription'. This will cause Jicofo
* and Jigasi to decide whether the transcriber needs to be in the room.
*
* @param {Store} store - The redux store.
* @private
* @returns {void}
*/
function _requestingSubtitlesToggled({ getState }) {
const state = getState();
const { _requestingSubtitles } = state['features/subtitles'];
const { conference } = state['features/base/conference'];
conference.setLocalParticipantProperty(
P_NAME_REQUESTING_TRANSCRIPTION,
!_requestingSubtitles);
}
/**
* Set a timeout on a TranscriptMessage object so it clears itself when it's not
* updated.
@ -185,10 +185,9 @@ function _endpointMessageReceived({ dispatch, getState }, next, action) {
* @param {Function} dispatch - Dispatch remove action to store.
* @param {string} transcriptMessageID - The id of the message to remove.
* @param {Object} transcriptMessage - The message to remove.
*
* @returns {void}
*/
function setClearerOnTranscriptMessage(
function _setClearerOnTranscriptMessage(
dispatch,
transcriptMessageID,
transcriptMessage) {
@ -196,7 +195,8 @@ function setClearerOnTranscriptMessage(
clearTimeout(transcriptMessage.clearTimeOut);
}
transcriptMessage.clearTimeOut = setTimeout(() => {
dispatch(removeTranscriptMessage(transcriptMessageID));
}, REMOVE_AFTER_MS);
transcriptMessage.clearTimeOut
= setTimeout(
() => dispatch(removeTranscriptMessage(transcriptMessageID)),
REMOVE_AFTER_MS);
}