Merge pull request #3223 from ztl8702/local-recording

Feature: Local recording (Ready for review)
This commit is contained in:
bgrozev 2018-08-08 19:35:11 -05:00 committed by GitHub
commit 25aaa74edc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
36 changed files with 3520 additions and 5 deletions

View File

@ -2,6 +2,7 @@ BUILD_DIR = build
CLEANCSS = ./node_modules/.bin/cleancss
DEPLOY_DIR = libs
LIBJITSIMEET_DIR = node_modules/lib-jitsi-meet/
LIBFLAC_DIR = node_modules/libflacjs/dist/min/
NODE_SASS = ./node_modules/.bin/node-sass
NPM = npm
OUTPUT_DIR = .
@ -19,7 +20,7 @@ compile:
clean:
rm -fr $(BUILD_DIR)
deploy: deploy-init deploy-appbundle deploy-lib-jitsi-meet deploy-css deploy-local
deploy: deploy-init deploy-appbundle deploy-lib-jitsi-meet deploy-libflac deploy-css deploy-local
deploy-init:
rm -fr $(DEPLOY_DIR)
@ -33,6 +34,8 @@ deploy-appbundle:
$(BUILD_DIR)/do_external_connect.min.map \
$(BUILD_DIR)/external_api.min.js \
$(BUILD_DIR)/external_api.min.map \
$(BUILD_DIR)/flacEncodeWorker.min.js \
$(BUILD_DIR)/flacEncodeWorker.min.map \
$(BUILD_DIR)/device_selection_popup_bundle.min.js \
$(BUILD_DIR)/device_selection_popup_bundle.min.map \
$(BUILD_DIR)/dial_in_info_bundle.min.js \
@ -50,6 +53,12 @@ deploy-lib-jitsi-meet:
$(LIBJITSIMEET_DIR)/modules/browser/capabilities.json \
$(DEPLOY_DIR)
deploy-libflac:
cp \
$(LIBFLAC_DIR)/libflac4-1.3.2.min.js \
$(LIBFLAC_DIR)/libflac4-1.3.2.min.js.mem \
$(DEPLOY_DIR)
deploy-css:
$(NODE_SASS) $(STYLES_MAIN) $(STYLES_BUNDLE) && \
$(CLEANCSS) $(STYLES_BUNDLE) > $(STYLES_DESTINATION) ; \
@ -58,7 +67,7 @@ deploy-css:
deploy-local:
([ ! -x deploy-local.sh ] || ./deploy-local.sh)
dev: deploy-init deploy-css deploy-lib-jitsi-meet
dev: deploy-init deploy-css deploy-lib-jitsi-meet deploy-libflac
$(WEBPACK_DEV_SERVER)
source-package:

View File

@ -347,6 +347,24 @@ var config = {
// userRegion: "asia"
}
// Local Recording
//
// localRecording: {
// Enables local recording.
// Additionally, 'localrecording' (all lowercase) needs to be added to
// TOOLBAR_BUTTONS in interface_config.js for the Local Recording
// button to show up on the toolbar.
//
// enabled: true,
//
// The recording format, can be one of 'ogg', 'flac' or 'wav'.
// format: 'flac'
//
// }
// Options related to end-to-end (participant to participant) ping.
// e2eping: {
// // The interval in milliseconds at which pings will be sent.
@ -408,6 +426,7 @@ var config = {
nick
startBitrate
*/
};
/* eslint-enable no-unused-vars, no-var */

View File

@ -45,6 +45,7 @@
@import 'modals/settings/settings';
@import 'modals/speaker_stats/speaker_stats';
@import 'modals/video-quality/video-quality';
@import 'modals/local-recording/local-recording';
@import 'videolayout_default';
@import 'notice';
@import 'popup_menu';

View File

@ -0,0 +1,92 @@
.localrec-participant-stats {
list-style: none;
padding: 0;
width: 100%;
font-weight: 500;
.localrec-participant-stats-item__status-dot {
position: relative;
display: block;
width: 9px;
height: 9px;
border-radius: 50%;
margin: 0 auto;
&.status-on {
background: green;
}
&.status-off {
background: gray;
}
&.status-unknown {
background: darkgoldenrod;
}
&.status-error {
background: darkred;
}
}
.localrec-participant-stats-item__status,
.localrec-participant-stats-item__name,
.localrec-participant-stats-item__sessionid {
display: inline-block;
margin: 5px 0;
vertical-align: middle;
}
.localrec-participant-stats-item__status {
width: 5%;
}
.localrec-participant-stats-item__name {
width: 40%;
}
.localrec-participant-stats-item__sessionid {
width: 55%;
}
.localrec-participant-stats-item__name,
.localrec-participant-stats-item__sessionid {
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
}
.localrec-control-info-label {
font-weight: bold;
}
.localrec-control-info-label:after {
content: ' ';
}
.localrec-control-action-link {
display: inline-block;
line-height: 1.5em;
a {
cursor: pointer;
vertical-align: middle;
}
}
.localrec-control-action-link:before {
color: $linkFontColor;
content: '\2022';
font-size: 1.5em;
padding: 0 10px;
vertical-align: middle;
}
.localrec-control-action-link:first-child:before {
content: '';
padding: 0;
}
.localrec-control-action-links {
font-weight: bold;
margin-top: 10px;
white-space: nowrap;
}

View File

@ -168,6 +168,10 @@
background: #FF5630;
}
.circular-label.local-rec {
background: #FF5630;
}
.circular-label.stream {
background: #0065FF;
}

View File

@ -43,7 +43,8 @@
"mute": "Mute or unmute your microphone",
"fullScreen": "View or exit full screen",
"videoMute": "Start or stop your camera",
"showSpeakerStats": "Show speaker stats"
"showSpeakerStats": "Show speaker stats",
"localRecording": "Show or hide local recording controls"
},
"welcomepage":{
"accessibilityLabel": {
@ -87,6 +88,7 @@
"fullScreen": "Toggle full screen",
"hangup": "Leave the call",
"invite": "Invite people",
"localRecording": "Toggle local recording controls",
"lockRoom": "Toggle room lock",
"moreActions": "Toggle more actions menu",
"moreActionsMenu": "More actions menu",
@ -668,5 +670,34 @@
"decline": "Dismiss",
"productLabel": "from Jitsi Meet",
"videoCallTitle": "Incoming video call"
},
"localRecording": {
"localRecording": "Local Recording",
"dialogTitle": "Local Recording Controls",
"start": "Start Recording",
"stop": "Stop Recording",
"moderator": "Moderator",
"me": "Me",
"duration": "Duration",
"durationNA": "N/A",
"encoding": "Encoding",
"participantStats": "Participant Stats",
"participant": "Participant",
"sessionToken": "Session Token",
"clientState": {
"on": "On",
"off": "Off",
"unknown": "Unknown"
},
"messages": {
"engaged": "Local recording engaged.",
"finished": "Recording session __token__ finished. Please send the recorded file to the moderator.",
"finishedModerator": "Recording session __token__ finished. The recording of the local track has been saved. Please ask the other participants to submit their recordings.",
"notModerator": "You are not the moderator. You cannot start or stop local recording."
},
"yes": "Yes",
"no": "No",
"label": "LOR",
"labelToolTip": "Local recording is engaged"
}
}

4
package-lock.json generated
View File

@ -9736,6 +9736,10 @@
"yaeti": "1.0.1"
}
},
"libflacjs": {
"version": "github:mmig/libflac.js#93d37e7f811f01cf7d8b6a603e38bd3c3810907d",
"from": "github:mmig/libflac.js#93d37e7f811f01cf7d8b6a603e38bd3c3810907d"
},
"load-json-file": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz",

View File

@ -48,6 +48,7 @@
"jsc-android": "224109.1.0",
"jwt-decode": "2.2.0",
"lib-jitsi-meet": "github:jitsi/lib-jitsi-meet#4a28a196160411d657518022de8bded7c02ad679",
"libflacjs": "github:mmig/libflac.js#93d37e7f811f01cf7d8b6a603e38bd3c3810907d",
"lodash": "4.17.4",
"moment": "2.19.4",
"moment-duration-format": "2.2.2",

View File

@ -3,6 +3,7 @@
import React, { Component } from 'react';
import { isFilmstripVisible } from '../../filmstrip';
import { LocalRecordingLabel } from '../../local-recording';
import { RecordingLabel } from '../../recording';
import { shouldDisplayTileView } from '../../video-layout';
import { VideoQualityLabel } from '../../video-quality';
@ -69,6 +70,18 @@ export default class AbstractLabels<P: Props, S> extends Component<P, S> {
<TranscribingLabel />
);
}
/**
* Renders the {@code LocalRecordingLabel}.
*
* @returns {React$Element}
* @protected
*/
_renderLocalRecordingLabel() {
return (
<LocalRecordingLabel />
);
}
}
/**

View File

@ -85,6 +85,9 @@ class Labels extends AbstractLabels<Props, State> {
this._renderRecordingLabel(
JitsiRecordingConstants.mode.STREAM)
}
{
this._renderLocalRecordingLabel()
}
{
this._renderTranscribingLabel()
}
@ -101,6 +104,8 @@ class Labels extends AbstractLabels<Props, State> {
_renderVideoQualityLabel: () => React$Element<*>
_renderTranscribingLabel: () => React$Element<*>
_renderLocalRecordingLabel: () => React$Element<*>
}
export default connect(_mapStateToProps)(Labels);

View File

@ -0,0 +1,32 @@
/**
* Action to signal that the local client has started to perform recording,
* (as in: {@code RecordingAdapter} is actively collecting audio data).
*
* {
* type: LOCAL_RECORDING_ENGAGED,
* recordingEngagedAt: Date
* }
*/
export const LOCAL_RECORDING_ENGAGED = Symbol('LOCAL_RECORDING_ENGAGED');
/**
* Action to signal that the local client has stopped recording,
* (as in: {@code RecordingAdapter} is no longer collecting audio data).
*
* {
* type: LOCAL_RECORDING_UNENGAGED
* }
*/
export const LOCAL_RECORDING_UNENGAGED = Symbol('LOCAL_RECORDING_UNENGAGED');
/**
* Action to update {@code LocalRecordingInfoDialog} with stats from all
* clients.
*
* {
* type: LOCAL_RECORDING_STATS_UPDATE,
* stats: Object
* }
*/
export const LOCAL_RECORDING_STATS_UPDATE
= Symbol('LOCAL_RECORDING_STATS_UPDATE');

View File

@ -0,0 +1,59 @@
/* @flow */
import {
LOCAL_RECORDING_ENGAGED,
LOCAL_RECORDING_UNENGAGED,
LOCAL_RECORDING_STATS_UPDATE
} from './actionTypes';
// The following two actions signal state changes in local recording engagement.
// In other words, the events of the local WebWorker / MediaRecorder starting to
// record and finishing recording.
// Note that this is not the event fired when the users tries to start the
// recording in the UI.
/**
* Signals that local recording has been engaged.
*
* @param {Date} startTime - Time when the recording is engaged.
* @returns {{
* type: LOCAL_RECORDING_ENGAGED,
* recordingEngagedAt: Date
* }}
*/
export function localRecordingEngaged(startTime: Date) {
return {
type: LOCAL_RECORDING_ENGAGED,
recordingEngagedAt: startTime
};
}
/**
* Signals that local recording has finished.
*
* @returns {{
* type: LOCAL_RECORDING_UNENGAGED
* }}
*/
export function localRecordingUnengaged() {
return {
type: LOCAL_RECORDING_UNENGAGED
};
}
/**
* Updates the the local recording stats from each client,
* to be displayed on {@code LocalRecordingInfoDialog}.
*
* @param {*} stats - The stats object.
* @returns {{
* type: LOCAL_RECORDING_STATS_UPDATE,
* stats: Object
* }}
*/
export function statsUpdate(stats: Object) {
return {
type: LOCAL_RECORDING_STATS_UPDATE,
stats
};
}

View File

@ -0,0 +1,86 @@
/* @flow */
import React, { Component } from 'react';
import { translate } from '../../base/i18n';
import { ToolbarButton } from '../../toolbox';
/**
* The type of the React {@code Component} state of
* {@link LocalRecordingButton}.
*/
type Props = {
/**
* Whether or not {@link LocalRecordingInfoDialog} should be displayed.
*/
isDialogShown: boolean,
/**
* Callback function called when {@link LocalRecordingButton} is clicked.
*/
onClick: Function,
/**
* Invoked to obtain translated strings.
*/
t: Function
}
/**
* A React {@code Component} for opening or closing the
* {@code LocalRecordingInfoDialog}.
*
* @extends Component
*/
class LocalRecordingButton extends Component<Props> {
/**
* Initializes a new {@code LocalRecordingButton} instance.
*
* @param {Object} props - The read-only properties with which the new
* instance is to be initialized.
*/
constructor(props: Props) {
super(props);
// Bind event handlers so they are only bound once per instance.
this._onClick = this._onClick.bind(this);
}
/**
* Implements React's {@link Component#render()}.
*
* @inheritdoc
* @returns {ReactElement}
*/
render() {
const { isDialogShown, t } = this.props;
const iconClasses
= `icon-thumb-menu ${isDialogShown
? 'icon-rec toggled' : 'icon-rec'}`;
return (
<ToolbarButton
accessibilityLabel
= { t('toolbar.accessibilityLabel.localRecording') }
iconName = { iconClasses }
onClick = { this._onClick }
tooltip = { t('localRecording.dialogTitle') } />
);
}
_onClick: () => void;
/**
* Callback invoked when the Toolbar button is clicked.
*
* @private
* @returns {void}
*/
_onClick() {
this.props.onClick();
}
}
export default translate(LocalRecordingButton);

View File

@ -0,0 +1,403 @@
/* @flow */
import moment from 'moment';
import React, { Component } from 'react';
import { connect } from 'react-redux';
import { Dialog } from '../../base/dialog';
import { translate } from '../../base/i18n';
import {
PARTICIPANT_ROLE,
getLocalParticipant
} from '../../base/participants';
import { statsUpdate } from '../actions';
import { recordingController } from '../controller';
/**
* The type of the React {@code Component} props of
* {@link LocalRecordingInfoDialog}.
*/
type Props = {
/**
* Redux store dispatch function.
*/
dispatch: Dispatch<*>,
/**
* Current encoding format.
*/
encodingFormat: string,
/**
* Whether the local user is the moderator.
*/
isModerator: boolean,
/**
* Whether local recording is engaged.
*/
isEngaged: boolean,
/**
* The start time of the current local recording session.
* Used to calculate the duration of recording.
*/
recordingEngagedAt: Date,
/**
* Stats of all the participant.
*/
stats: Object,
/**
* Invoked to obtain translated strings.
*/
t: Function
}
/**
* The type of the React {@code Component} state of
* {@link LocalRecordingInfoDialog}.
*/
type State = {
/**
* The recording duration string to be displayed on the UI.
*/
durationString: string
}
/**
* A React Component with the contents for a dialog that shows information about
* local recording. For users with moderator rights, this is also the "control
* panel" for starting/stopping local recording on all clients.
*
* @extends Component
*/
class LocalRecordingInfoDialog extends Component<Props, State> {
/**
* Saves a handle to the timer for UI updates,
* so that it can be cancelled when the component unmounts.
*/
_timer: ?IntervalID;
/**
* Initializes a new {@code LocalRecordingInfoDialog} instance.
*
* @param {Props} props - The React {@code Component} props to initialize
* the new {@code LocalRecordingInfoDialog} instance with.
*/
constructor(props: Props) {
super(props);
this.state = {
durationString: ''
};
}
/**
* Implements React's {@link Component#componentDidMount()}.
*
* @returns {void}
*/
componentDidMount() {
this._timer = setInterval(
() => {
this.setState((_prevState, props) => {
const nowTime = new Date();
return {
durationString: this._getDuration(nowTime,
props.recordingEngagedAt)
};
});
try {
this.props.dispatch(
statsUpdate(recordingController
.getParticipantsStats()));
} catch (e) {
// do nothing
}
},
1000
);
}
/**
* Implements React's {@link Component#componentWillUnmount()}.
*
* @returns {void}
*/
componentWillUnmount() {
if (this._timer) {
clearInterval(this._timer);
this._timer = null;
}
}
/**
* Implements React's {@link Component#render()}.
*
* @inheritdoc
* @returns {ReactElement}
*/
render() {
const { isModerator, t } = this.props;
return (
<Dialog
cancelTitleKey = { 'dialog.close' }
submitDisabled = { true }
titleKey = 'localRecording.dialogTitle'>
<div className = 'localrec-control'>
<span className = 'localrec-control-info-label'>
{`${t('localRecording.moderator')}:`}
</span>
<span className = 'info-value'>
{ isModerator
? t('localRecording.yes')
: t('localRecording.no') }
</span>
</div>
{ this._renderModeratorControls() }
{ this._renderDurationAndFormat() }
</Dialog>
);
}
/**
* Renders the recording duration and encoding format. Only shown if local
* recording is engaged.
*
* @private
* @returns {ReactElement|null}
*/
_renderDurationAndFormat() {
const { encodingFormat, isEngaged, t } = this.props;
const { durationString } = this.state;
if (!isEngaged) {
return null;
}
return (
<div>
<div>
<span className = 'localrec-control-info-label'>
{`${t('localRecording.duration')}:`}
</span>
<span className = 'info-value'>
{ durationString === ''
? t('localRecording.durationNA')
: durationString }
</span>
</div>
<div>
<span className = 'localrec-control-info-label'>
{`${t('localRecording.encoding')}:`}
</span>
<span className = 'info-value'>
{ encodingFormat }
</span>
</div>
</div>
);
}
/**
* Returns React elements for displaying the local recording stats of
* each participant.
*
* @private
* @returns {ReactElement|null}
*/
_renderStats() {
const { stats } = this.props;
if (stats === undefined) {
return null;
}
const ids = Object.keys(stats);
return (
<div className = 'localrec-participant-stats' >
{ this._renderStatsHeader() }
{ ids.map((id, i) => this._renderStatsLine(i, id)) }
</div>
);
}
/**
* Renders the stats for one participant.
*
* @private
* @param {*} lineKey - The key required by React for elements in lists.
* @param {*} id - The ID of the participant.
* @returns {ReactElement}
*/
_renderStatsLine(lineKey, id) {
const { stats } = this.props;
let statusClass = 'localrec-participant-stats-item__status-dot ';
statusClass += stats[id].recordingStats
? stats[id].recordingStats.isRecording
? 'status-on'
: 'status-off'
: 'status-unknown';
return (
<div
className = 'localrec-participant-stats-item'
key = { lineKey } >
<div className = 'localrec-participant-stats-item__status'>
<span className = { statusClass } />
</div>
<div className = 'localrec-participant-stats-item__name'>
{ stats[id].displayName || id }
</div>
<div className = 'localrec-participant-stats-item__sessionid'>
{ stats[id].recordingStats.currentSessionToken }
</div>
</div>
);
}
/**
* Renders the participant stats header line.
*
* @private
* @returns {ReactElement}
*/
_renderStatsHeader() {
const { t } = this.props;
return (
<div className = 'localrec-participant-stats-item'>
<div className = 'localrec-participant-stats-item__status' />
<div className = 'localrec-participant-stats-item__name'>
{ t('localRecording.participant') }
</div>
<div className = 'localrec-participant-stats-item__sessionid'>
{ t('localRecording.sessionToken') }
</div>
</div>
);
}
/**
* Renders the moderator-only controls, i.e. stats of all users and the
* action links.
*
* @private
* @returns {ReactElement|null}
*/
_renderModeratorControls() {
const { isModerator, isEngaged, t } = this.props;
if (!isModerator) {
return null;
}
return (
<div>
<div className = 'localrec-control-action-links'>
<div className = 'localrec-control-action-link'>
{ isEngaged ? <a
onClick = { this._onStop }>
{ t('localRecording.stop') }
</a>
: <a
onClick = { this._onStart }>
{ t('localRecording.start') }
</a>
}
</div>
</div>
<div>
<span className = 'localrec-control-info-label'>
{`${t('localRecording.participantStats')}:`}
</span>
</div>
{ this._renderStats() }
</div>
);
}
/**
* Creates a duration string "HH:MM:SS" from two Date objects.
*
* @param {Date} now - Current time.
* @param {Date} prev - Previous time, the time to be subtracted.
* @returns {string}
*/
_getDuration(now, prev) {
if (prev === null || prev === undefined) {
return '';
}
// Still a hack, as moment.js does not support formatting of duration
// (i.e. TimeDelta). Only works if total duration < 24 hours.
// But who is going to have a 24-hour long conference?
return moment(now - prev).utc()
.format('HH:mm:ss');
}
/**
* Callback function for the Start UI action.
*
* @private
* @returns {void}
*/
_onStart() {
recordingController.startRecording();
}
/**
* Callback function for the Stop UI action.
*
* @private
* @returns {void}
*/
_onStop() {
recordingController.stopRecording();
}
}
/**
* Maps (parts of) the Redux state to the associated props for the
* {@code LocalRecordingInfoDialog} component.
*
* @param {Object} state - The Redux state.
* @private
* @returns {{
* encodingFormat: string,
* isModerator: boolean,
* isEngaged: boolean,
* recordingEngagedAt: Date,
* stats: Object
* }}
*/
function _mapStateToProps(state) {
const {
encodingFormat,
isEngaged,
recordingEngagedAt,
stats
} = state['features/local-recording'];
const isModerator
= getLocalParticipant(state).role === PARTICIPANT_ROLE.MODERATOR;
return {
encodingFormat,
isModerator,
isEngaged,
recordingEngagedAt,
stats
};
}
export default translate(connect(_mapStateToProps)(LocalRecordingInfoDialog));

View File

@ -0,0 +1,75 @@
// @flow
import Tooltip from '@atlaskit/tooltip';
import React, { Component } from 'react';
import { connect } from 'react-redux';
import { translate } from '../../base/i18n/index';
import { CircularLabel } from '../../base/label/index';
/**
* The type of the React {@code Component} props of {@link LocalRecordingLabel}.
*/
type Props = {
/**
* Invoked to obtain translated strings.
*/
t: Function,
/**
* Whether local recording is engaged or not.
*/
isEngaged: boolean
};
/**
* React Component for displaying a label when local recording is engaged.
*
* @extends Component
*/
class LocalRecordingLabel extends Component<Props> {
/**
* Implements React's {@link Component#render()}.
*
* @inheritdoc
* @returns {ReactElement}
*/
render() {
if (!this.props.isEngaged) {
return null;
}
return (
<Tooltip
content = { this.props.t('localRecording.labelToolTip') }
position = { 'left' }>
<CircularLabel
className = 'local-rec'
label = { this.props.t('localRecording.label') } />
</Tooltip>
);
}
}
/**
* Maps (parts of) the Redux state to the associated props for the
* {@code LocalRecordingLabel} component.
*
* @param {Object} state - The Redux state.
* @private
* @returns {{
* }}
*/
function _mapStateToProps(state) {
const { isEngaged } = state['features/local-recording'];
return {
isEngaged
};
}
export default translate(connect(_mapStateToProps)(LocalRecordingLabel));

View File

@ -0,0 +1,5 @@
export { default as LocalRecordingButton } from './LocalRecordingButton';
export { default as LocalRecordingLabel } from './LocalRecordingLabel';
export {
default as LocalRecordingInfoDialog
} from './LocalRecordingInfoDialog';

View File

@ -0,0 +1,687 @@
/* @flow */
import { i18next } from '../../base/i18n';
import {
FlacAdapter,
OggAdapter,
WavAdapter,
downloadBlob
} from '../recording';
import { sessionManager } from '../session';
const logger = require('jitsi-meet-logger').getLogger(__filename);
/**
* XMPP command for signaling the start of local recording to all clients.
* Should be sent by the moderator only.
*/
const COMMAND_START = 'localRecStart';
/**
* XMPP command for signaling the stop of local recording to all clients.
* Should be sent by the moderator only.
*/
const COMMAND_STOP = 'localRecStop';
/**
* One-time command used to trigger the moderator to resend the commands.
* This is a workaround for newly-joined clients to receive remote presence.
*/
const COMMAND_PING = 'localRecPing';
/**
* One-time command sent upon receiving a {@code COMMAND_PING}.
* Only the moderator sends this command.
* This command does not carry any information itself, but rather forces the
* XMPP server to resend the remote presence.
*/
const COMMAND_PONG = 'localRecPong';
/**
* Participant property key for local recording stats.
*/
const PROPERTY_STATS = 'localRecStats';
/**
* Supported recording formats.
*/
const RECORDING_FORMATS = new Set([ 'flac', 'wav', 'ogg' ]);
/**
* Default recording format.
*/
const DEFAULT_RECORDING_FORMAT = 'flac';
/**
* States of the {@code RecordingController}.
*/
const ControllerState = Object.freeze({
/**
* Idle (not recording).
*/
IDLE: Symbol('IDLE'),
/**
* Starting.
*/
STARTING: Symbol('STARTING'),
/**
* Engaged (recording).
*/
RECORDING: Symbol('RECORDING'),
/**
* Stopping.
*/
STOPPING: Symbol('STOPPING'),
/**
* Failed, due to error during starting / stopping process.
*/
FAILED: Symbol('FAILED')
});
/**
* Type of the stats reported by each participant (client).
*/
type RecordingStats = {
/**
* Current local recording session token used by the participant.
*/
currentSessionToken: number,
/**
* Whether local recording is engaged on the participant's device.
*/
isRecording: boolean,
/**
* Total recorded bytes. (Reserved for future use.)
*/
recordedBytes: number,
/**
* Total recording duration. (Reserved for future use.)
*/
recordedLength: number
}
/**
* The component responsible for the coordination of local recording, across
* multiple participants.
* Current implementation requires that there is only one moderator in a room.
*/
class RecordingController {
/**
* For each recording session, there is a separate @{code RecordingAdapter}
* instance so that encoded bits from the previous sessions can still be
* retrieved after they ended.
*
* @private
*/
_adapters = {};
/**
* The {@code JitsiConference} instance.
*
* @private
*/
_conference: * = null;
/**
* Current recording session token.
* Session token is a number generated by the moderator, to ensure every
* client is in the same recording state.
*
* @private
*/
_currentSessionToken: number = -1;
/**
* Current state of {@code RecordingController}.
*
* @private
*/
_state = ControllerState.IDLE;
/**
* Whether or not the audio is muted in the UI. This is stored as internal
* state of {@code RecordingController} because we might have recording
* sessions that start muted.
*/
_isMuted = false;
/**
* The ID of the active microphone.
*
* @private
*/
_micDeviceId = 'default';
/**
* Current recording format. This will be in effect from the next
* recording session, i.e., if this value is changed during an on-going
* recording session, that on-going session will not use the new format.
*
* @private
*/
_format = DEFAULT_RECORDING_FORMAT;
/**
* Whether or not the {@code RecordingController} has registered for
* XMPP events. Prevents initialization from happening multiple times.
*
* @private
*/
_registered = false;
/**
* FIXME: callback function for the {@code RecordingController} to notify
* UI it wants to display a notice. Keeps {@code RecordingController}
* decoupled from UI.
*/
_onNotify: ?(messageKey: string, messageParams?: Object) => void;
/**
* FIXME: callback function for the {@code RecordingController} to notify
* UI it wants to display a warning. Keeps {@code RecordingController}
* decoupled from UI.
*/
_onWarning: ?(messageKey: string, messageParams?: Object) => void;
/**
* FIXME: callback function for the {@code RecordingController} to notify
* UI that the local recording state has changed.
*/
_onStateChanged: ?(boolean) => void;
/**
* Constructor.
*
* @returns {void}
*/
constructor() {
this.registerEvents = this.registerEvents.bind(this);
this.getParticipantsStats = this.getParticipantsStats.bind(this);
this._onStartCommand = this._onStartCommand.bind(this);
this._onStopCommand = this._onStopCommand.bind(this);
this._onPingCommand = this._onPingCommand.bind(this);
this._doStartRecording = this._doStartRecording.bind(this);
this._doStopRecording = this._doStopRecording.bind(this);
this._updateStats = this._updateStats.bind(this);
this._switchToNewSession = this._switchToNewSession.bind(this);
}
registerEvents: () => void;
/**
* Registers listeners for XMPP events.
*
* @param {JitsiConference} conference - {@code JitsiConference} instance.
* @returns {void}
*/
registerEvents(conference: Object) {
if (!this._registered) {
this._conference = conference;
if (this._conference) {
this._conference
.addCommandListener(COMMAND_STOP, this._onStopCommand);
this._conference
.addCommandListener(COMMAND_START, this._onStartCommand);
this._conference
.addCommandListener(COMMAND_PING, this._onPingCommand);
this._registered = true;
}
if (!this._conference.isModerator()) {
this._conference.sendCommandOnce(COMMAND_PING, {});
}
}
}
/**
* Sets the event handler for {@code onStateChanged}.
*
* @param {Function} delegate - The event handler.
* @returns {void}
*/
set onStateChanged(delegate: Function) {
this._onStateChanged = delegate;
}
/**
* Sets the event handler for {@code onNotify}.
*
* @param {Function} delegate - The event handler.
* @returns {void}
*/
set onNotify(delegate: Function) {
this._onNotify = delegate;
}
/**
* Sets the event handler for {@code onWarning}.
*
* @param {Function} delegate - The event handler.
* @returns {void}
*/
set onWarning(delegate: Function) {
this._onWarning = delegate;
}
/**
* Signals the participants to start local recording.
*
* @returns {void}
*/
startRecording() {
this.registerEvents();
if (this._conference && this._conference.isModerator()) {
this._conference.removeCommand(COMMAND_STOP);
this._conference.sendCommand(COMMAND_START, {
attributes: {
sessionToken: this._getRandomToken(),
format: this._format
}
});
} else if (this._onWarning) {
this._onWarning('localRecording.messages.notModerator');
}
}
/**
* Signals the participants to stop local recording.
*
* @returns {void}
*/
stopRecording() {
if (this._conference) {
if (this._conference.isModerator()) {
this._conference.removeCommand(COMMAND_START);
this._conference.sendCommand(COMMAND_STOP, {
attributes: {
sessionToken: this._currentSessionToken
}
});
} else if (this._onWarning) {
this._onWarning('localRecording.messages.notModerator');
}
}
}
/**
* Triggers the download of recorded data.
* Browser only.
*
* @param {number} sessionToken - The token of the session to download.
* @returns {void}
*/
downloadRecordedData(sessionToken: number) {
if (this._adapters[sessionToken]) {
this._adapters[sessionToken].exportRecordedData()
.then(args => {
const { data, format } = args;
const filename = `session_${sessionToken}`
+ `_${this._conference.myUserId()}.${format}`;
downloadBlob(data, filename);
})
.catch(error => {
logger.error('Failed to download audio for'
+ ` session ${sessionToken}. Error: ${error}`);
});
} else {
logger.error(`Invalid session token for download ${sessionToken}`);
}
}
/**
* Changes the current microphone.
*
* @param {string} micDeviceId - The new microphone device ID.
* @returns {void}
*/
setMicDevice(micDeviceId: string) {
if (micDeviceId !== this._micDeviceId) {
this._micDeviceId = String(micDeviceId);
if (this._state === ControllerState.RECORDING) {
// sessionManager.endSegment(this._currentSessionToken);
logger.log('Before switching microphone...');
this._adapters[this._currentSessionToken]
.setMicDevice(this._micDeviceId)
.then(() => {
logger.log('Finished switching microphone.');
// sessionManager.beginSegment(this._currentSesoken);
})
.catch(() => {
logger.error('Failed to switch microphone');
});
}
logger.log(`Switch microphone to ${this._micDeviceId}`);
}
}
/**
* Mute or unmute audio. When muted, the ongoing local recording should
* produce silence.
*
* @param {boolean} muted - If the audio should be muted.
* @returns {void}
*/
setMuted(muted: boolean) {
this._isMuted = Boolean(muted);
if (this._state === ControllerState.RECORDING) {
this._adapters[this._currentSessionToken].setMuted(this._isMuted);
}
}
/**
* Switches the recording format.
*
* @param {string} newFormat - The new format.
* @returns {void}
*/
switchFormat(newFormat: string) {
if (!RECORDING_FORMATS.has(newFormat)) {
logger.log(`Unknown format ${newFormat}. Ignoring...`);
return;
}
this._format = newFormat;
logger.log(`Recording format switched to ${newFormat}`);
// the new format will be used in the next recording session
}
/**
* Returns the local recording stats.
*
* @returns {RecordingStats}
*/
getLocalStats(): RecordingStats {
return {
currentSessionToken: this._currentSessionToken,
isRecording: this._state === ControllerState.RECORDING,
recordedBytes: 0,
recordedLength: 0
};
}
getParticipantsStats: () => *;
/**
* Returns the remote participants' local recording stats.
*
* @returns {*}
*/
getParticipantsStats() {
const members
= this._conference.getParticipants()
.map(member => {
return {
id: member.getId(),
displayName: member.getDisplayName(),
recordingStats:
JSON.parse(member.getProperty(PROPERTY_STATS) || '{}'),
isSelf: false
};
});
// transform into a dictionary for consistent ordering
const result = {};
for (let i = 0; i < members.length; ++i) {
result[members[i].id] = members[i];
}
const localId = this._conference.myUserId();
result[localId] = {
id: localId,
displayName: i18next.t('localRecording.me'),
recordingStats: this.getLocalStats(),
isSelf: true
};
return result;
}
_changeState: (Symbol) => void;
/**
* Changes the current state of {@code RecordingController}.
*
* @private
* @param {Symbol} newState - The new state.
* @returns {void}
*/
_changeState(newState: Symbol) {
if (this._state !== newState) {
logger.log(`state change: ${this._state.toString()} -> `
+ `${newState.toString()}`);
this._state = newState;
}
}
_updateStats: () => void;
/**
* Sends out updates about the local recording stats via XMPP.
*
* @private
* @returns {void}
*/
_updateStats() {
if (this._conference) {
this._conference.setLocalParticipantProperty(PROPERTY_STATS,
JSON.stringify(this.getLocalStats()));
}
}
_onStartCommand: (*) => void;
/**
* Callback function for XMPP event.
*
* @private
* @param {*} value - The event args.
* @returns {void}
*/
_onStartCommand(value) {
const { sessionToken, format } = value.attributes;
if (this._state === ControllerState.IDLE) {
this._changeState(ControllerState.STARTING);
this._switchToNewSession(sessionToken, format);
this._doStartRecording();
} else if (this._state === ControllerState.RECORDING
&& this._currentSessionToken !== sessionToken) {
// There is local recording going on, but not for the same session.
// This means the current state might be out-of-sync with the
// moderator's, so we need to restart the recording.
this._changeState(ControllerState.STOPPING);
this._doStopRecording().then(() => {
this._changeState(ControllerState.STARTING);
this._switchToNewSession(sessionToken, format);
this._doStartRecording();
});
}
}
_onStopCommand: (*) => void;
/**
* Callback function for XMPP event.
*
* @private
* @param {*} value - The event args.
* @returns {void}
*/
_onStopCommand(value) {
if (this._state === ControllerState.RECORDING
&& this._currentSessionToken === value.attributes.sessionToken) {
this._changeState(ControllerState.STOPPING);
this._doStopRecording();
}
}
_onPingCommand: () => void;
/**
* Callback function for XMPP event.
*
* @private
* @returns {void}
*/
_onPingCommand() {
if (this._conference.isModerator()) {
logger.log('Received ping, sending pong.');
this._conference.sendCommandOnce(COMMAND_PONG, {});
}
}
/**
* Generates a token that can be used to distinguish each local recording
* session.
*
* @returns {number}
*/
_getRandomToken() {
return Math.floor(Math.random() * 100000000) + 1;
}
_doStartRecording: () => void;
/**
* Starts the recording locally.
*
* @private
* @returns {void}
*/
_doStartRecording() {
if (this._state === ControllerState.STARTING) {
const delegate = this._adapters[this._currentSessionToken];
delegate.start(this._micDeviceId)
.then(() => {
this._changeState(ControllerState.RECORDING);
sessionManager.beginSegment(this._currentSessionToken);
logger.log('Local recording engaged.');
if (this._onNotify) {
this._onNotify('localRecording.messages.engaged');
}
if (this._onStateChanged) {
this._onStateChanged(true);
}
delegate.setMuted(this._isMuted);
this._updateStats();
})
.catch(err => {
logger.error('Failed to start local recording.', err);
});
}
}
_doStopRecording: () => Promise<void>;
/**
* Stops the recording locally.
*
* @private
* @returns {Promise<void>}
*/
_doStopRecording() {
if (this._state === ControllerState.STOPPING) {
const token = this._currentSessionToken;
return this._adapters[this._currentSessionToken]
.stop()
.then(() => {
this._changeState(ControllerState.IDLE);
sessionManager.endSegment(this._currentSessionToken);
logger.log('Local recording unengaged.');
this.downloadRecordedData(token);
const messageKey
= this._conference.isModerator()
? 'localRecording.messages.finishedModerator'
: 'localRecording.messages.finished';
const messageParams = {
token
};
if (this._onNotify) {
this._onNotify(messageKey, messageParams);
}
if (this._onStateChanged) {
this._onStateChanged(false);
}
this._updateStats();
})
.catch(err => {
logger.error('Failed to stop local recording.', err);
});
}
/* eslint-disable */
return (Promise.resolve(): Promise<void>);
// FIXME: better ways to satisfy flow and ESLint at the same time?
/* eslint-enable */
}
_switchToNewSession: (string, string) => void;
/**
* Switches to a new local recording session.
*
* @param {string} sessionToken - The session Token.
* @param {string} format - The recording format for the session.
* @returns {void}
*/
_switchToNewSession(sessionToken, format) {
this._format = format;
this._currentSessionToken = sessionToken;
logger.log(`New session: ${this._currentSessionToken}, `
+ `format: ${this._format}`);
this._adapters[sessionToken]
= this._createRecordingAdapter();
sessionManager.createSession(sessionToken, this._format);
}
/**
* Creates a recording adapter according to the current recording format.
*
* @private
* @returns {RecordingAdapter}
*/
_createRecordingAdapter() {
logger.debug('[RecordingController] creating recording'
+ ` adapter for ${this._format} format.`);
switch (this._format) {
case 'ogg':
return new OggAdapter();
case 'flac':
return new FlacAdapter();
case 'wav':
return new WavAdapter();
default:
throw new Error(`Unknown format: ${this._format}`);
}
}
}
/**
* Global singleton of {@code RecordingController}.
*/
export const recordingController = new RecordingController();

View File

@ -0,0 +1 @@
export * from './RecordingController';

View File

@ -0,0 +1,7 @@
export * from './actions';
export * from './actionTypes';
export * from './components';
export * from './controller';
import './middleware';
import './reducer';

View File

@ -0,0 +1,92 @@
/* @flow */
import { createShortcutEvent, sendAnalytics } from '../analytics';
import { APP_WILL_MOUNT, APP_WILL_UNMOUNT } from '../base/app';
import { CONFERENCE_JOINED } from '../base/conference';
import { toggleDialog } from '../base/dialog';
import { i18next } from '../base/i18n';
import { SET_AUDIO_MUTED } from '../base/media';
import { MiddlewareRegistry } from '../base/redux';
import { SETTINGS_UPDATED } from '../base/settings/actionTypes';
import { showNotification } from '../notifications';
import { localRecordingEngaged, localRecordingUnengaged } from './actions';
import { LocalRecordingInfoDialog } from './components';
import { recordingController } from './controller';
declare var APP: Object;
declare var config: Object;
const isFeatureEnabled = typeof config === 'object' && config.localRecording
&& config.localRecording.enabled === true;
isFeatureEnabled
&& MiddlewareRegistry.register(({ getState, dispatch }) => next => action => {
const result = next(action);
switch (action.type) {
case CONFERENCE_JOINED: {
const { conference } = getState()['features/base/conference'];
const { localRecording } = getState()['features/base/config'];
if (localRecording && localRecording.format) {
recordingController.switchFormat(localRecording.format);
}
recordingController.registerEvents(conference);
break;
}
case APP_WILL_MOUNT:
// realize the delegates on recordingController, allowing the UI to
// react to state changes in recordingController.
recordingController.onStateChanged = isEngaged => {
if (isEngaged) {
const nowTime = new Date();
dispatch(localRecordingEngaged(nowTime));
} else {
dispatch(localRecordingUnengaged());
}
};
recordingController.onWarning = (messageKey, messageParams) => {
dispatch(showNotification({
title: i18next.t('localRecording.localRecording'),
description: i18next.t(messageKey, messageParams)
}, 10000));
};
recordingController.onNotify = (messageKey, messageParams) => {
dispatch(showNotification({
title: i18next.t('localRecording.localRecording'),
description: i18next.t(messageKey, messageParams)
}, 10000));
};
typeof APP === 'object' && typeof APP.keyboardshortcut === 'object'
&& APP.keyboardshortcut.registerShortcut('L', null, () => {
sendAnalytics(createShortcutEvent('local.recording'));
dispatch(toggleDialog(LocalRecordingInfoDialog));
}, 'keyboardShortcuts.localRecording');
break;
case APP_WILL_UNMOUNT:
recordingController.onStateChanged = null;
recordingController.onNotify = null;
recordingController.onWarning = null;
break;
case SET_AUDIO_MUTED:
recordingController.setMuted(action.muted);
break;
case SETTINGS_UPDATED: {
const { micDeviceId } = getState()['features/base/settings'];
if (micDeviceId) {
recordingController.setMicDevice(micDeviceId);
}
break;
}
}
return result;
});

View File

@ -0,0 +1,129 @@
import { RecordingAdapter } from './RecordingAdapter';
const logger = require('jitsi-meet-logger').getLogger(__filename);
/**
* Base class for {@code AudioContext}-based recording adapters.
*/
export class AbstractAudioContextAdapter extends RecordingAdapter {
/**
* The {@code AudioContext} instance.
*/
_audioContext = null;
/**
* The {@code ScriptProcessorNode} instance.
*/
_audioProcessingNode = null;
/**
* The {@code MediaStreamAudioSourceNode} instance.
*/
_audioSource = null;
/**
* The {@code MediaStream} instance, representing the current audio device.
*/
_stream = null;
/**
* Sample rate.
*/
_sampleRate = 44100;
/**
* Constructor.
*/
constructor() {
super();
// sampleRate is browser and OS dependent.
// Setting sampleRate explicitly is in the specs but not implemented
// by browsers.
// See: https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/
// AudioContext#Browser_compatibility
// And https://bugs.chromium.org/p/chromium/issues/detail?id=432248
this._audioContext = new AudioContext();
this._sampleRate = this._audioContext.sampleRate;
logger.log(`Current sampleRate ${this._sampleRate}.`);
}
/**
* Sets up the audio graph in the AudioContext.
*
* @protected
* @param {string} micDeviceId - The current microphone device ID.
* @param {Function} callback - Callback function to
* handle AudioProcessingEvents.
* @returns {Promise}
*/
_initializeAudioContext(micDeviceId, callback) {
if (typeof callback !== 'function') {
return Promise.reject('a callback function is required.');
}
return this._getAudioStream(micDeviceId)
.then(stream => {
this._stream = stream;
this._audioSource
= this._audioContext.createMediaStreamSource(stream);
this._audioProcessingNode
= this._audioContext.createScriptProcessor(4096, 1, 1);
this._audioProcessingNode.onaudioprocess = callback;
logger.debug('AudioContext is set up.');
})
.catch(err => {
logger.error(`Error calling getUserMedia(): ${err}`);
return Promise.reject(err);
});
}
/**
* Connects the nodes in the {@code AudioContext} to start the flow of
* audio data.
*
* @protected
* @returns {void}
*/
_connectAudioGraph() {
this._audioSource.connect(this._audioProcessingNode);
this._audioProcessingNode.connect(this._audioContext.destination);
}
/**
* Disconnects the nodes in the {@code AudioContext}.
*
* @protected
* @returns {void}
*/
_disconnectAudioGraph() {
this._audioProcessingNode.onaudioprocess = undefined;
this._audioProcessingNode.disconnect();
this._audioSource.disconnect();
}
/**
* Replaces the current microphone MediaStream.
*
* @protected
* @param {string} micDeviceId - New microphone ID.
* @returns {Promise}
*/
_replaceMic(micDeviceId) {
if (this._audioContext && this._audioProcessingNode) {
return this._getAudioStream(micDeviceId).then(newStream => {
const newSource = this._audioContext
.createMediaStreamSource(newStream);
this._audioSource.disconnect();
newSource.connect(this._audioProcessingNode);
this._stream = newStream;
this._audioSource = newSource;
});
}
return Promise.resolve();
}
}

View File

@ -0,0 +1,143 @@
import { RecordingAdapter } from './RecordingAdapter';
const logger = require('jitsi-meet-logger').getLogger(__filename);
/**
* Recording adapter that uses {@code MediaRecorder} (default browser encoding
* with Opus codec).
*/
export class OggAdapter extends RecordingAdapter {
/**
* Instance of MediaRecorder.
* @private
*/
_mediaRecorder = null;
/**
* Initialization promise.
* @private
*/
_initPromise = null;
/**
* The recorded audio file.
* @private
*/
_recordedData = null;
/**
* Implements {@link RecordingAdapter#start()}.
*
* @inheritdoc
*/
start(micDeviceId) {
if (!this._initPromise) {
this._initPromise = this._initialize(micDeviceId);
}
return this._initPromise.then(() =>
new Promise(resolve => {
this._mediaRecorder.start();
resolve();
})
);
}
/**
* Implements {@link RecordingAdapter#stop()}.
*
* @inheritdoc
*/
stop() {
return new Promise(
resolve => {
this._mediaRecorder.onstop = () => resolve();
this._mediaRecorder.stop();
}
);
}
/**
* Implements {@link RecordingAdapter#exportRecordedData()}.
*
* @inheritdoc
*/
exportRecordedData() {
if (this._recordedData !== null) {
return Promise.resolve({
data: this._recordedData,
format: 'ogg'
});
}
return Promise.reject('No audio data recorded.');
}
/**
* Implements {@link RecordingAdapter#setMuted()}.
*
* @inheritdoc
*/
setMuted(muted) {
const shouldEnable = !muted;
if (!this._stream) {
return Promise.resolve();
}
const track = this._stream.getAudioTracks()[0];
if (!track) {
logger.error('Cannot mute/unmute. Track not found!');
return Promise.resolve();
}
if (track.enabled !== shouldEnable) {
track.enabled = shouldEnable;
logger.log(muted ? 'Mute' : 'Unmute');
}
return Promise.resolve();
}
/**
* Initialize the adapter.
*
* @private
* @param {string} micDeviceId - The current microphone device ID.
* @returns {Promise}
*/
_initialize(micDeviceId) {
if (this._mediaRecorder) {
return Promise.resolve();
}
return new Promise((resolve, error) => {
this._getAudioStream(micDeviceId)
.then(stream => {
this._stream = stream;
this._mediaRecorder = new MediaRecorder(stream);
this._mediaRecorder.ondataavailable
= e => this._saveMediaData(e.data);
resolve();
})
.catch(err => {
logger.error(`Error calling getUserMedia(): ${err}`);
error();
});
});
}
/**
* Callback for storing the encoded data.
*
* @private
* @param {Blob} data - Encoded data.
* @returns {void}
*/
_saveMediaData(data) {
this._recordedData = data;
}
}

View File

@ -0,0 +1,85 @@
import JitsiMeetJS from '../../base/lib-jitsi-meet';
/**
* Base class for recording backends.
*/
export class RecordingAdapter {
/**
* Starts recording.
*
* @param {string} micDeviceId - The microphone to record on.
* @returns {Promise}
*/
start(/* eslint-disable no-unused-vars */
micDeviceId/* eslint-enable no-unused-vars */) {
throw new Error('Not implemented');
}
/**
* Stops recording.
*
* @returns {Promise}
*/
stop() {
throw new Error('Not implemented');
}
/**
* Export the recorded and encoded audio file.
*
* @returns {Promise<Object>}
*/
exportRecordedData() {
throw new Error('Not implemented');
}
/**
* Mutes or unmutes the current recording.
*
* @param {boolean} muted - Whether to mute or to unmute.
* @returns {Promise}
*/
setMuted(/* eslint-disable no-unused-vars */
muted/* eslint-enable no-unused-vars */) {
throw new Error('Not implemented');
}
/**
* Changes the current microphone.
*
* @param {string} micDeviceId - The new microphone device ID.
* @returns {Promise}
*/
setMicDevice(/* eslint-disable no-unused-vars */
micDeviceId/* eslint-enable no-unused-vars */) {
throw new Error('Not implemented');
}
/**
* Helper method for getting an audio {@code MediaStream}. Use this instead
* of calling browser APIs directly.
*
* @protected
* @param {number} micDeviceId - The ID of the current audio device.
* @returns {Promise}
*/
_getAudioStream(micDeviceId) {
return JitsiMeetJS.createLocalTracks({
devices: [ 'audio' ],
micDeviceId
}).then(result => {
if (result.length !== 1) {
throw new Error('Unexpected number of streams '
+ 'from createLocalTracks.');
}
const mediaStream = result[0].stream;
if (mediaStream === undefined) {
throw new Error('Failed to create local track.');
}
return mediaStream;
});
}
}

View File

@ -0,0 +1,20 @@
/**
* Force download of Blob in browser by faking an <a> tag.
*
* @param {Blob} blob - Base64 URL.
* @param {string} fileName - The filename to appear in the download dialog.
* @returns {void}
*/
export function downloadBlob(blob, fileName = 'recording.ogg') {
const base64Url = window.URL.createObjectURL(blob);
// fake a anchor tag
const a = document.createElement('a');
a.style = 'display: none';
a.href = base64Url;
a.download = fileName;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
}

View File

@ -0,0 +1,290 @@
import { AbstractAudioContextAdapter } from './AbstractAudioContextAdapter';
const logger = require('jitsi-meet-logger').getLogger(__filename);
const WAV_BITS_PER_SAMPLE = 16;
/**
* Recording adapter for raw WAVE format.
*/
export class WavAdapter extends AbstractAudioContextAdapter {
/**
* Length of the WAVE file, in number of samples.
*/
_wavLength = 0;
/**
* The {@code ArrayBuffer}s that stores the PCM bits.
*/
_wavBuffers = [];
/**
* Whether or not the {@code WavAdapter} is in a ready state.
*/
_isInitialized = false;
/**
* Initialization promise.
*/
_initPromise = null;
/**
* Constructor.
*/
constructor() {
super();
this._onAudioProcess = this._onAudioProcess.bind(this);
}
/**
* Implements {@link RecordingAdapter#start()}.
*
* @inheritdoc
*/
start(micDeviceId) {
if (!this._initPromise) {
this._initPromise = this._initialize(micDeviceId);
}
return this._initPromise.then(() => {
this._wavBuffers = [];
this._wavLength = 0;
this._connectAudioGraph();
});
}
/**
* Implements {@link RecordingAdapter#stop()}.
*
* @inheritdoc
*/
stop() {
this._disconnectAudioGraph();
this._data = this._exportMonoWAV(this._wavBuffers, this._wavLength);
this._audioProcessingNode = null;
this._audioSource = null;
this._isInitialized = false;
return Promise.resolve();
}
/**
* Implements {@link RecordingAdapter#exportRecordedData()}.
*
* @inheritdoc
*/
exportRecordedData() {
if (this._data !== null) {
return Promise.resolve({
data: this._data,
format: 'wav'
});
}
return Promise.reject('No audio data recorded.');
}
/**
* Implements {@link RecordingAdapter#setMuted()}.
*
* @inheritdoc
*/
setMuted(muted) {
const shouldEnable = !muted;
if (!this._stream) {
return Promise.resolve();
}
const track = this._stream.getAudioTracks()[0];
if (!track) {
logger.error('Cannot mute/unmute. Track not found!');
return Promise.resolve();
}
if (track.enabled !== shouldEnable) {
track.enabled = shouldEnable;
logger.log(muted ? 'Mute' : 'Unmute');
}
return Promise.resolve();
}
/**
* Implements {@link RecordingAdapter#setMicDevice()}.
*
* @inheritdoc
*/
setMicDevice(micDeviceId) {
return this._replaceMic(micDeviceId);
}
/**
* Creates a WAVE file header.
*
* @private
* @param {number} dataLength - Length of the payload (PCM data), in bytes.
* @returns {Uint8Array}
*/
_createWavHeader(dataLength) {
// adapted from
// https://github.com/mmig/speech-to-flac/blob/master/encoder.js
// ref: http://soundfile.sapp.org/doc/WaveFormat/
// create our WAVE file header
const buffer = new ArrayBuffer(44);
const view = new DataView(buffer);
// RIFF chunk descriptor
writeUTFBytes(view, 0, 'RIFF');
// set file size at the end
writeUTFBytes(view, 8, 'WAVE');
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
// NumChannels
view.setUint16(22, 1, true);
// SampleRate
view.setUint32(24, this._sampleRate, true);
// ByteRate
view.setUint32(28,
Number(this._sampleRate) * 1 * WAV_BITS_PER_SAMPLE / 8, true);
// BlockAlign
view.setUint16(32, 1 * Number(WAV_BITS_PER_SAMPLE) / 8, true);
view.setUint16(34, WAV_BITS_PER_SAMPLE, true);
// data sub-chunk
writeUTFBytes(view, 36, 'data');
// file length
view.setUint32(4, 32 + dataLength, true);
// data chunk length
view.setUint32(40, dataLength, true);
return new Uint8Array(buffer);
}
/**
* Initialize the adapter.
*
* @private
* @param {string} micDeviceId - The current microphone device ID.
* @returns {Promise}
*/
_initialize(micDeviceId) {
if (this._isInitialized) {
return Promise.resolve();
}
return this._initializeAudioContext(micDeviceId, this._onAudioProcess)
.then(() => {
this._isInitialized = true;
});
}
/**
* Callback function for handling AudioProcessingEvents.
*
* @private
* @param {AudioProcessingEvent} e - The event containing the raw PCM.
* @returns {void}
*/
_onAudioProcess(e) {
// See: https://developer.mozilla.org/en-US/docs/Web/API/
// AudioBuffer/getChannelData
// The returned value is an Float32Array.
const channelLeft = e.inputBuffer.getChannelData(0);
// Need to copy the Float32Array:
// unlike passing to WebWorker, this data is passed by reference,
// so we need to copy it, otherwise the resulting audio file will be
// just repeating the last segment.
this._wavBuffers.push(new Float32Array(channelLeft));
this._wavLength += channelLeft.length;
}
/**
* Combines buffers and export to a wav file.
*
* @private
* @param {Float32Array[]} buffers - The stored buffers.
* @param {number} length - Total length (number of samples).
* @returns {Blob}
*/
_exportMonoWAV(buffers, length) {
const dataLength = length * 2; // each sample = 16 bit = 2 bytes
const buffer = new ArrayBuffer(44 + dataLength);
const view = new DataView(buffer);
// copy WAV header data into the array buffer
const header = this._createWavHeader(dataLength);
const len = header.length;
for (let i = 0; i < len; ++i) {
view.setUint8(i, header[i]);
}
// write audio data
floatTo16BitPCM(view, 44, buffers);
return new Blob([ view ], { type: 'audio/wav' });
}
}
/**
* Helper function. Writes a UTF string to memory
* using big endianness. Required by WAVE headers.
*
* @param {ArrayBuffer} view - The view to memory.
* @param {number} offset - Offset.
* @param {string} string - The string to be written.
* @returns {void}
*/
function writeUTFBytes(view, offset, string) {
const lng = string.length;
// convert to big endianness
for (let i = 0; i < lng; ++i) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
/**
* Helper function for converting Float32Array to Int16Array.
*
* @param {DataView} output - View to the output buffer.
* @param {number} offset - The offset in output buffer to write from.
* @param {Float32Array[]} inputBuffers - The input buffers.
* @returns {void}
*/
function floatTo16BitPCM(output, offset, inputBuffers) {
let i, j;
let input, s, sampleCount;
const bufferCount = inputBuffers.length;
let o = offset;
for (i = 0; i < bufferCount; ++i) {
input = inputBuffers[i];
sampleCount = input.length;
for (j = 0; j < sampleCount; ++j, o += 2) {
s = Math.max(-1, Math.min(1, input[j]));
output.setInt16(o, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
}
}

View File

@ -0,0 +1,262 @@
import {
DEBUG,
MAIN_THREAD_FINISH,
MAIN_THREAD_INIT,
MAIN_THREAD_NEW_DATA_ARRIVED,
WORKER_BLOB_READY,
WORKER_LIBFLAC_READY
} from './messageTypes';
import { AbstractAudioContextAdapter } from '../AbstractAudioContextAdapter';
const logger = require('jitsi-meet-logger').getLogger(__filename);
/**
* Recording adapter that uses libflac.js in the background.
*/
export class FlacAdapter extends AbstractAudioContextAdapter {
/**
* Instance of WebWorker (flacEncodeWorker).
*/
_encoder = null;
/**
* Resolve function of the Promise returned by {@code stop()}.
* This is called after the WebWorker sends back {@code WORKER_BLOB_READY}.
*/
_stopPromiseResolver = null;
/**
* Resolve function of the Promise that initializes the flacEncodeWorker.
*/
_initWorkerPromiseResolver = null;
/**
* Initialization promise.
*/
_initPromise = null;
/**
* Constructor.
*/
constructor() {
super();
this._onAudioProcess = this._onAudioProcess.bind(this);
this._onWorkerMessage = this._onWorkerMessage.bind(this);
}
/**
* Implements {@link RecordingAdapter#start()}.
*
* @inheritdoc
*/
start(micDeviceId) {
if (!this._initPromise) {
this._initPromise = this._initialize(micDeviceId);
}
return this._initPromise.then(() => {
this._connectAudioGraph();
});
}
/**
* Implements {@link RecordingAdapter#stop()}.
*
* @inheritdoc
*/
stop() {
if (!this._encoder) {
logger.error('Attempting to stop but has nothing to stop.');
return Promise.reject();
}
return new Promise(resolve => {
this._initPromise = null;
this._disconnectAudioGraph();
this._stopPromiseResolver = resolve;
this._encoder.postMessage({
command: MAIN_THREAD_FINISH
});
});
}
/**
* Implements {@link RecordingAdapter#exportRecordedData()}.
*
* @inheritdoc
*/
exportRecordedData() {
if (this._data !== null) {
return Promise.resolve({
data: this._data,
format: 'flac'
});
}
return Promise.reject('No audio data recorded.');
}
/**
* Implements {@link RecordingAdapter#setMuted()}.
*
* @inheritdoc
*/
setMuted(muted) {
const shouldEnable = !muted;
if (!this._stream) {
return Promise.resolve();
}
const track = this._stream.getAudioTracks()[0];
if (!track) {
logger.error('Cannot mute/unmute. Track not found!');
return Promise.resolve();
}
if (track.enabled !== shouldEnable) {
track.enabled = shouldEnable;
logger.log(muted ? 'Mute' : 'Unmute');
}
return Promise.resolve();
}
/**
* Implements {@link RecordingAdapter#setMicDevice()}.
*
* @inheritdoc
*/
setMicDevice(micDeviceId) {
return this._replaceMic(micDeviceId);
}
/**
* Initialize the adapter.
*
* @private
* @param {string} micDeviceId - The current microphone device ID.
* @returns {Promise}
*/
_initialize(micDeviceId) {
if (this._encoder !== null) {
return Promise.resolve();
}
const promiseInitWorker = new Promise((resolve, reject) => {
try {
this._loadWebWorker();
} catch (e) {
reject();
}
// Save the Promise's resolver to resolve it later.
// This Promise is only resolved in _onWorkerMessage when we
// receive WORKER_LIBFLAC_READY from the WebWorker.
this._initWorkerPromiseResolver = resolve;
// set up listener for messages from the WebWorker
this._encoder.onmessage = this._onWorkerMessage;
this._encoder.postMessage({
command: MAIN_THREAD_INIT,
config: {
sampleRate: this._sampleRate,
bps: 16
}
});
});
// Arrow function is used here because we want AudioContext to be
// initialized only **after** promiseInitWorker is resolved.
return promiseInitWorker
.then(() =>
this._initializeAudioContext(
micDeviceId,
this._onAudioProcess
));
}
/**
* Callback function for handling AudioProcessingEvents.
*
* @private
* @param {AudioProcessingEvent} e - The event containing the raw PCM.
* @returns {void}
*/
_onAudioProcess(e) {
// Delegates to the WebWorker to do the encoding.
// The return of getChannelData() is a Float32Array,
// each element representing one sample.
const channelLeft = e.inputBuffer.getChannelData(0);
this._encoder.postMessage({
command: MAIN_THREAD_NEW_DATA_ARRIVED,
buf: channelLeft
});
}
/**
* Handler for messages from flacEncodeWorker.
*
* @private
* @param {MessageEvent} e - The event sent by the WebWorker.
* @returns {void}
*/
_onWorkerMessage(e) {
switch (e.data.command) {
case WORKER_BLOB_READY:
// Received a Blob representing an encoded FLAC file.
this._data = e.data.buf;
if (this._stopPromiseResolver !== null) {
this._stopPromiseResolver();
this._stopPromiseResolver = null;
this._encoder.terminate();
this._encoder = null;
}
break;
case DEBUG:
logger.log(e.data);
break;
case WORKER_LIBFLAC_READY:
logger.log('libflac is ready.');
this._initWorkerPromiseResolver();
break;
default:
logger.error(
`Unknown event
from encoder (WebWorker): "${e.data.command}"!`);
break;
}
}
/**
* Loads the WebWorker.
*
* @private
* @returns {void}
*/
_loadWebWorker() {
// FIXME: Workaround for different file names in development/
// production environments.
// We cannot import flacEncodeWorker as a webpack module,
// because it is in a different bundle and should be lazy-loaded
// only when flac recording is in use.
try {
// try load the minified version first
this._encoder = new Worker('/libs/flacEncodeWorker.min.js');
} catch (exception1) {
// if failed, try unminified version
try {
this._encoder = new Worker('/libs/flacEncodeWorker.js');
} catch (exception2) {
throw new Error('Failed to load flacEncodeWorker.');
}
}
}
}

View File

@ -0,0 +1,397 @@
import {
MAIN_THREAD_FINISH,
MAIN_THREAD_INIT,
MAIN_THREAD_NEW_DATA_ARRIVED,
WORKER_BLOB_READY,
WORKER_LIBFLAC_READY
} from './messageTypes';
const logger = require('jitsi-meet-logger').getLogger(__filename);
/**
* WebWorker that does FLAC encoding using libflac.js
*/
self.FLAC_SCRIPT_LOCATION = '/libs/';
/* eslint-disable */
importScripts('/libs/libflac4-1.3.2.min.js');
/* eslint-enable */
// There is a number of API calls to libflac.js, which does not conform
// to the camalCase naming convention, but we cannot change it.
// So we disable the ESLint rule `new-cap` in this file.
/* eslint-disable new-cap */
// Flow will complain about the number keys in `FLAC_ERRORS`,
// ESLint will complain about the `declare` statement.
// As the current workaround, add an exception for eslint.
/* eslint-disable flowtype/no-types-missing-file-annotation */
declare var Flac: Object;
const FLAC_ERRORS = {
// The encoder is in the normal OK state and samples can be processed.
0: 'FLAC__STREAM_ENCODER_OK',
// The encoder is in the uninitialized state one of the
// FLAC__stream_encoder_init_*() functions must be called before samples can
// be processed.
1: 'FLAC__STREAM_ENCODER_UNINITIALIZED',
// An error occurred in the underlying Ogg layer.
2: 'FLAC__STREAM_ENCODER_OGG_ERROR',
// An error occurred in the underlying verify stream decoder; check
// FLAC__stream_encoder_get_verify_decoder_state().
3: 'FLAC__STREAM_ENCODER_VERIFY_DECODER_ERROR',
// The verify decoder detected a mismatch between the original audio signal
// and the decoded audio signal.
4: 'FLAC__STREAM_ENCODER_VERIFY_MISMATCH_IN_AUDIO_DATA',
// One of the callbacks returned a fatal error.
5: 'FLAC__STREAM_ENCODER_CLIENT_ERROR',
// An I/O error occurred while opening/reading/writing a file. Check errno.
6: 'FLAC__STREAM_ENCODER_IO_ERROR',
// An error occurred while writing the stream; usually, the write_callback
// returned an error.
7: 'FLAC__STREAM_ENCODER_FRAMING_ERROR',
// Memory allocation failed.
8: 'FLAC__STREAM_ENCODER_MEMORY_ALLOCATION_ERROR'
};
/**
* States of the {@code Encoder}.
*/
const EncoderState = Object.freeze({
/**
* Initial state, when libflac.js is not initialized.
*/
UNINTIALIZED: Symbol('uninitialized'),
/**
* Actively encoding new audio bits.
*/
WORKING: Symbol('working'),
/**
* Encoding has finished and encoded bits are available.
*/
FINISHED: Symbol('finished')
});
/**
* Default FLAC compression level.
*/
const FLAC_COMPRESSION_LEVEL = 5;
/**
* Concat multiple Uint8Arrays into one.
*
* @param {Uint8Array[]} arrays - Array of Uint8 arrays.
* @param {number} totalLength - Total length of all Uint8Arrays.
* @returns {Uint8Array}
*/
function mergeUint8Arrays(arrays, totalLength) {
const result = new Uint8Array(totalLength);
let offset = 0;
const len = arrays.length;
for (let i = 0; i < len; i++) {
const buffer = arrays[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
/**
* Wrapper class around libflac API.
*/
class Encoder {
/**
* Flac encoder instance ID. (As per libflac.js API).
* @private
*/
_encoderId = 0;
/**
* Sample rate.
* @private
*/
_sampleRate;
/**
* Bit depth (bits per sample).
* @private
*/
_bitDepth;
/**
* Buffer size.
* @private
*/
_bufferSize;
/**
* Buffers to store encoded bits temporarily.
*/
_flacBuffers = [];
/**
* Length of encoded FLAC bits.
*/
_flacLength = 0;
/**
* The current state of the {@code Encoder}.
*/
_state = EncoderState.UNINTIALIZED;
/**
* The ready-for-grab downloadable Blob.
*/
_data = null;
/**
* Constructor.
* Note: only create instance when Flac.isReady() returns true.
*
* @param {number} sampleRate - Sample rate of the raw audio data.
* @param {number} bitDepth - Bit depth (bit per sample).
* @param {number} bufferSize - The size of each batch.
*/
constructor(sampleRate, bitDepth = 16, bufferSize = 4096) {
if (!Flac.isReady()) {
throw new Error('libflac is not ready yet!');
}
this._sampleRate = sampleRate;
this._bitDepth = bitDepth;
this._bufferSize = bufferSize;
// create the encoder
this._encoderId = Flac.init_libflac_encoder(
this._sampleRate,
// Mono channel
1,
this._bitDepth,
FLAC_COMPRESSION_LEVEL,
// Pass 0 in becuase of unknown total samples,
0,
// checksum, FIXME: double-check whether this is necessary
true,
// Auto-determine block size (samples per frame)
0
);
if (this._encoderId === 0) {
throw new Error('Failed to create libflac encoder.');
}
// initialize the encoder
const initResult = Flac.init_encoder_stream(
this._encoderId,
this._onEncodedData.bind(this),
this._onMetadataAvailable.bind(this)
);
if (initResult !== 0) {
throw new Error('Failed to initalise libflac encoder.');
}
this._state = EncoderState.WORKING;
}
/**
* Receive and encode new data.
*
* @param {Float32Array} audioData - Raw audio data.
* @returns {void}
*/
encode(audioData) {
if (this._state !== EncoderState.WORKING) {
throw new Error('Encoder is not ready or has finished.');
}
if (!Flac.isReady()) {
throw new Error('Flac not ready');
}
const bufferLength = audioData.length;
// Convert sample to signed 32-bit integers.
// According to libflac documentation:
// each sample in the buffers should be a signed integer,
// right-justified to the resolution set by
// FLAC__stream_encoder_set_bits_per_sample().
// Here we are using 16 bits per sample, the samples should all be in
// the range [-32768,32767]. This is achieved by multipling Float32
// numbers with 0x7FFF.
const bufferI32 = new Int32Array(bufferLength);
const view = new DataView(bufferI32.buffer);
const volume = 1;
let index = 0;
for (let i = 0; i < bufferLength; i++) {
view.setInt32(index, audioData[i] * (0x7FFF * volume), true);
index += 4; // 4 bytes (32-bit)
}
// pass it to libflac
const status = Flac.FLAC__stream_encoder_process_interleaved(
this._encoderId,
bufferI32,
bufferI32.length
);
if (status !== 1) {
// gets error number
const errorNo
= Flac.FLAC__stream_encoder_get_state(this._encoderId);
logger.error('Error during encoding', FLAC_ERRORS[errorNo]);
}
}
/**
* Signals the termination of encoding.
*
* @returns {void}
*/
finish() {
if (this._state === EncoderState.WORKING) {
this._state = EncoderState.FINISHED;
const status = Flac.FLAC__stream_encoder_finish(this._encoderId);
logger.log('Flac encoding finished: ', status);
// free up resources
Flac.FLAC__stream_encoder_delete(this._encoderId);
this._data = this._exportFlacBlob();
}
}
/**
* Gets the encoded flac file.
*
* @returns {Blob} - The encoded flac file.
*/
getBlob() {
if (this._state === EncoderState.FINISHED) {
return this._data;
}
return null;
}
/**
* Converts flac buffer to a Blob.
*
* @private
* @returns {void}
*/
_exportFlacBlob() {
const samples = mergeUint8Arrays(this._flacBuffers, this._flacLength);
const blob = new Blob([ samples ], { type: 'audio/flac' });
return blob;
}
/* eslint-disable no-unused-vars */
/**
* Callback function for saving encoded Flac data.
* This is invoked by libflac.
*
* @private
* @param {Uint8Array} buffer - The encoded Flac data.
* @param {number} bytes - Number of bytes in the data.
* @returns {void}
*/
_onEncodedData(buffer, bytes) {
this._flacBuffers.push(buffer);
this._flacLength += buffer.byteLength;
}
/* eslint-enable no-unused-vars */
/**
* Callback function for receiving metadata.
*
* @private
* @returns {void}
*/
_onMetadataAvailable = () => {
// reserved for future use
}
}
let encoder = null;
self.onmessage = function(e) {
switch (e.data.command) {
case MAIN_THREAD_INIT:
{
const bps = e.data.config.bps;
const sampleRate = e.data.config.sampleRate;
if (Flac.isReady()) {
encoder = new Encoder(sampleRate, bps);
self.postMessage({
command: WORKER_LIBFLAC_READY
});
} else {
Flac.onready = function() {
setTimeout(() => {
encoder = new Encoder(sampleRate, bps);
self.postMessage({
command: WORKER_LIBFLAC_READY
});
}, 0);
};
}
break;
}
case MAIN_THREAD_NEW_DATA_ARRIVED:
if (encoder === null) {
logger.error('flacEncoderWorker received data when the encoder is'
+ 'not ready.');
} else {
encoder.encode(e.data.buf);
}
break;
case MAIN_THREAD_FINISH:
if (encoder !== null) {
encoder.finish();
const data = encoder.getBlob();
self.postMessage(
{
command: WORKER_BLOB_READY,
buf: data
}
);
encoder = null;
}
break;
}
};

View File

@ -0,0 +1 @@
export * from './FlacAdapter';

View File

@ -0,0 +1,44 @@
/**
* Types of messages that are passed between the main thread and the WebWorker
* ({@code flacEncodeWorker})
*/
// Messages sent by the main thread
/**
* Message type that signals the termination of encoding,
* after which no new audio bits should be sent to the
* WebWorker.
*/
export const MAIN_THREAD_FINISH = 'MAIN_THREAD_FINISH';
/**
* Message type that carries initial parameters for
* the WebWorker.
*/
export const MAIN_THREAD_INIT = 'MAIN_THREAD_INIT';
/**
* Message type that carries the newly received raw audio bits
* for the WebWorker to encode.
*/
export const MAIN_THREAD_NEW_DATA_ARRIVED = 'MAIN_THREAD_NEW_DATA_ARRIVED';
// Messages sent by the WebWorker
/**
* Message type that signals libflac is ready to receive audio bits.
*/
export const WORKER_LIBFLAC_READY = 'WORKER_LIBFLAC_READY';
/**
* Message type that carries the encoded FLAC file as a Blob.
*/
export const WORKER_BLOB_READY = 'WORKER_BLOB_READY';
// Messages sent by either the main thread or the WebWorker
/**
* Debug messages.
*/
export const DEBUG = 'DEBUG';

View File

@ -0,0 +1,5 @@
export * from './OggAdapter';
export * from './RecordingAdapter';
export * from './Utils';
export * from './WavAdapter';
export * from './flac';

View File

@ -0,0 +1,35 @@
/* @flow */
import { ReducerRegistry } from '../base/redux';
import {
LOCAL_RECORDING_ENGAGED,
LOCAL_RECORDING_STATS_UPDATE,
LOCAL_RECORDING_UNENGAGED
} from './actionTypes';
import { recordingController } from './controller';
ReducerRegistry.register('features/local-recording', (state = {}, action) => {
switch (action.type) {
case LOCAL_RECORDING_ENGAGED: {
return {
...state,
isEngaged: true,
recordingEngagedAt: action.recordingEngagedAt,
encodingFormat: recordingController._format
};
}
case LOCAL_RECORDING_UNENGAGED:
return {
...state,
isEngaged: false,
recordingEngagedAt: null
};
case LOCAL_RECORDING_STATS_UPDATE:
return {
...state,
stats: action.stats
};
default:
return state;
}
});

View File

@ -0,0 +1,439 @@
/* @flow */
import jitsiLocalStorage from '../../../../modules/util/JitsiLocalStorage';
const logger = require('jitsi-meet-logger').getLogger(__filename);
/**
* Gets high precision system time.
*
* @returns {number}
*/
function highPrecisionTime(): number {
return window.performance
&& window.performance.now
&& window.performance.timing
&& window.performance.timing.navigationStart
? window.performance.now() + window.performance.timing.navigationStart
: Date.now();
}
// Have to use string literal here, instead of Symbols,
// because these values need to be JSON-serializible.
/**
* Types of SessionEvents.
*/
const SessionEventType = Object.freeze({
/**
* Start of local recording session. This is recorded when the
* {@code RecordingController} receives the signal to start local recording,
* before the actual adapter is engaged.
*/
SESSION_STARTED: 'SESSION_STARTED',
/**
* Start of a continuous segment. This is recorded when the adapter is
* engaged. Can happen multiple times in a local recording session,
* due to browser reloads or switching of recording device.
*/
SEGMENT_STARTED: 'SEGMENT_STARTED',
/**
* End of a continuous segment. This is recorded when the adapter unengages.
*/
SEGMENT_ENDED: 'SEGMENT_ENDED'
});
/**
* Represents an event during a local recording session.
* The event can be either that the adapter started recording, or stopped
* recording.
*/
type SessionEvent = {
/**
* The type of the event.
* Should be one of the values in {@code SessionEventType}.
*/
type: string,
/**
* The timestamp of the event.
*/
timestamp: number
};
/**
* Representation of the metadata of a segment.
*/
type SegmentInfo = {
/**
* The length of gap before this segment, in milliseconds.
* mull if unknown.
*/
gapBefore?: ?number,
/**
* The duration of this segment, in milliseconds.
* null if unknown or the segment is not finished.
*/
duration?: ?number,
/**
* The start time, in milliseconds.
*/
start?: ?number,
/**
* The end time, in milliseconds.
* null if unknown, the segment is not finished, or the recording is
* interrupted (e.g. browser reload).
*/
end?: ?number
};
/**
* Representation of metadata of a local recording session.
*/
type SessionInfo = {
/**
* The session token.
*/
sessionToken: string,
/**
* The start time of the session.
*/
start: ?number,
/**
* The recording format.
*/
format: string,
/**
* Array of segments in the session.
*/
segments: SegmentInfo[]
}
/**
* {@code localStorage} key.
*/
const LOCAL_STORAGE_KEY = 'localRecordingMetadataVersion1';
/**
* SessionManager manages the metadata of each segment during each local
* recording session.
*
* A segment is a continous portion of recording done using the same adapter
* on the same microphone device.
*
* Browser refreshes, switching of microphone will cause new segments to be
* created.
*
* A recording session can consist of one or more segments.
*/
class SessionManager {
/**
* The metadata.
*/
_sessionsMetadata = {
};
/**
* Constructor.
*/
constructor() {
this._loadMetadata();
}
/**
* Loads metadata from localStorage.
*
* @private
* @returns {void}
*/
_loadMetadata() {
const dataStr = jitsiLocalStorage.getItem(LOCAL_STORAGE_KEY);
if (dataStr !== null) {
try {
const dataObject = JSON.parse(dataStr);
this._sessionsMetadata = dataObject;
} catch (e) {
logger.warn('Failed to parse localStorage item.');
return;
}
}
}
/**
* Persists metadata to localStorage.
*
* @private
* @returns {void}
*/
_saveMetadata() {
jitsiLocalStorage.setItem(LOCAL_STORAGE_KEY,
JSON.stringify(this._sessionsMetadata));
}
/**
* Creates a session if not exists.
*
* @param {string} sessionToken - The local recording session token.
* @param {string} format - The local recording format.
* @returns {void}
*/
createSession(sessionToken: string, format: string) {
if (this._sessionsMetadata[sessionToken] === undefined) {
this._sessionsMetadata[sessionToken] = {
format,
events: []
};
this._sessionsMetadata[sessionToken].events.push({
type: SessionEventType.SESSION_STARTED,
timestamp: highPrecisionTime()
});
this._saveMetadata();
} else {
logger.warn(`Session ${sessionToken} already exists`);
}
}
/**
* Gets all the Sessions.
*
* @returns {SessionInfo[]}
*/
getSessions(): SessionInfo[] {
const sessionTokens = Object.keys(this._sessionsMetadata);
const output = [];
for (let i = 0; i < sessionTokens.length; ++i) {
const thisSession = this._sessionsMetadata[sessionTokens[i]];
const newSessionInfo : SessionInfo = {
start: thisSession.events[0].timestamp,
format: thisSession.format,
sessionToken: sessionTokens[i],
segments: this.getSegments(sessionTokens[i])
};
output.push(newSessionInfo);
}
output.sort((a, b) => (a.start || 0) - (b.start || 0));
return output;
}
/**
* Removes session metadata.
*
* @param {string} sessionToken - The session token.
* @returns {void}
*/
removeSession(sessionToken: string) {
delete this._sessionsMetadata[sessionToken];
this._saveMetadata();
}
/**
* Get segments of a given Session.
*
* @param {string} sessionToken - The session token.
* @returns {SegmentInfo[]}
*/
getSegments(sessionToken: string): SegmentInfo[] {
const thisSession = this._sessionsMetadata[sessionToken];
if (thisSession) {
return this._constructSegments(thisSession.events);
}
return [];
}
/**
* Marks the start of a new segment.
* This should be invoked by {@code RecordingAdapter}s when they need to
* start asynchronous operations (such as switching tracks) that interrupts
* recording.
*
* @param {string} sessionToken - The token of the session to start a new
* segment in.
* @returns {number} - Current segment index.
*/
beginSegment(sessionToken: string): number {
if (this._sessionsMetadata[sessionToken] === undefined) {
logger.warn('Attempting to add segments to nonexistent'
+ ` session ${sessionToken}`);
return -1;
}
this._sessionsMetadata[sessionToken].events.push({
type: SessionEventType.SEGMENT_STARTED,
timestamp: highPrecisionTime()
});
this._saveMetadata();
return this.getSegments(sessionToken).length - 1;
}
/**
* Gets the current segment index. Starting from 0 for the first
* segment.
*
* @param {string} sessionToken - The session token.
* @returns {number}
*/
getCurrentSegmentIndex(sessionToken: string): number {
if (this._sessionsMetadata[sessionToken] === undefined) {
return -1;
}
const segments = this.getSegments(sessionToken);
if (segments.length === 0) {
return -1;
}
const lastSegment = segments[segments.length - 1];
if (lastSegment.end) {
// last segment is already ended
return -1;
}
return segments.length - 1;
}
/**
* Marks the end of the last segment in a session.
*
* @param {string} sessionToken - The session token.
* @returns {void}
*/
endSegment(sessionToken: string) {
if (this._sessionsMetadata[sessionToken] === undefined) {
logger.warn('Attempting to end a segment in nonexistent'
+ ` session ${sessionToken}`);
} else {
this._sessionsMetadata[sessionToken].events.push({
type: SessionEventType.SEGMENT_ENDED,
timestamp: highPrecisionTime()
});
this._saveMetadata();
}
}
/**
* Constructs an array of {@code SegmentInfo} from an array of
* {@code SessionEvent}s.
*
* @private
* @param {SessionEvent[]} events - The array of {@code SessionEvent}s.
* @returns {SegmentInfo[]}
*/
_constructSegments(events: SessionEvent[]): SegmentInfo[] {
if (events.length === 0) {
return [];
}
const output = [];
let sessionStartTime = null;
let currentSegment : SegmentInfo = {
};
/**
* Helper function for adding a new {@code SegmentInfo} object to the
* output.
*
* @returns {void}
*/
function commit() {
if (currentSegment.gapBefore === undefined
|| currentSegment.gapBefore === null) {
if (output.length > 0 && output[output.length - 1].end) {
const lastSegment = output[output.length - 1];
if (currentSegment.start && lastSegment.end) {
currentSegment.gapBefore = currentSegment.start
- lastSegment.end;
} else {
currentSegment.gapBefore = null;
}
} else if (sessionStartTime !== null && output.length === 0) {
currentSegment.gapBefore = currentSegment.start
? currentSegment.start - sessionStartTime
: null;
} else {
currentSegment.gapBefore = null;
}
}
currentSegment.duration = currentSegment.end && currentSegment.start
? currentSegment.end - currentSegment.start
: null;
output.push(currentSegment);
currentSegment = {};
}
for (let i = 0; i < events.length; ++i) {
const currentEvent = events[i];
switch (currentEvent.type) {
case SessionEventType.SESSION_STARTED:
if (sessionStartTime === null) {
sessionStartTime = currentEvent.timestamp;
} else {
logger.warn('Unexpected SESSION_STARTED event.'
, currentEvent);
}
break;
case SessionEventType.SEGMENT_STARTED:
if (currentSegment.start === undefined
|| currentSegment.start === null) {
currentSegment.start = currentEvent.timestamp;
} else {
commit();
currentSegment.start = currentEvent.timestamp;
}
break;
case SessionEventType.SEGMENT_ENDED:
if (currentSegment.start === undefined
|| currentSegment.start === null) {
logger.warn('Unexpected SEGMENT_ENDED event', currentEvent);
} else {
currentSegment.end = currentEvent.timestamp;
commit();
}
break;
default:
logger.warn('Unexpected error during _constructSegments');
break;
}
}
if (currentSegment.start) {
commit();
}
return output;
}
}
/**
* Global singleton of {@code SessionManager}.
*/
export const sessionManager = new SessionManager();
// For debug only. To remove later.
window.sessionManager = sessionManager;

View File

@ -0,0 +1 @@
export * from './SessionManager';

View File

@ -28,6 +28,10 @@ import {
isDialOutEnabled
} from '../../../invite';
import { openKeyboardShortcutsDialog } from '../../../keyboard-shortcuts';
import {
LocalRecordingButton,
LocalRecordingInfoDialog
} from '../../../local-recording';
import {
LiveStreamButton,
RecordButton
@ -129,6 +133,11 @@ type Props = {
*/
_localParticipantID: String,
/**
* The subsection of Redux state for local recording
*/
_localRecState: Object,
/**
* Whether or not the overflow menu is visible.
*/
@ -159,6 +168,7 @@ type Props = {
*/
_visible: boolean,
/**
* Set with the buttons which this Toolbox should display.
*/
@ -228,6 +238,8 @@ class Toolbox extends Component<Props> {
= this._onToolbarToggleScreenshare.bind(this);
this._onToolbarToggleSharedVideo
= this._onToolbarToggleSharedVideo.bind(this);
this._onToolbarOpenLocalRecordingInfoDialog
= this._onToolbarOpenLocalRecordingInfoDialog.bind(this);
}
/**
@ -370,6 +382,12 @@ class Toolbox extends Component<Props> {
visible = { this._shouldShowButton('camera') } />
</div>
<div className = 'button-group-right'>
{ this._shouldShowButton('localrecording')
&& <LocalRecordingButton
onClick = {
this._onToolbarOpenLocalRecordingInfoDialog
} />
}
{ this._shouldShowButton('tileview')
&& <TileViewButton /> }
{ this._shouldShowButton('invite')
@ -842,6 +860,20 @@ class Toolbox extends Component<Props> {
this._doToggleSharedVideo();
}
_onToolbarOpenLocalRecordingInfoDialog: () => void;
/**
* Opens the {@code LocalRecordingInfoDialog}.
*
* @private
* @returns {void}
*/
_onToolbarOpenLocalRecordingInfoDialog() {
sendAnalytics(createToolbarEvent('local.recording'));
this.props.dispatch(openDialog(LocalRecordingInfoDialog));
}
/**
* Renders a button for toggleing screen sharing.
*
@ -984,7 +1016,7 @@ class Toolbox extends Component<Props> {
* Returns if a button name has been explicitly configured to be displayed.
*
* @param {string} buttonName - The name of the button, as expected in
* {@link intefaceConfig}.
* {@link interfaceConfig}.
* @private
* @returns {boolean} True if the button should be displayed.
*/
@ -1021,6 +1053,7 @@ function _mapStateToProps(state) {
visible
} = state['features/toolbox'];
const localParticipant = getLocalParticipant(state);
const localRecordingStates = state['features/local-recording'];
const localVideo = getLocalVideoTrack(state['features/base/tracks']);
const addPeopleEnabled = isAddPeopleEnabled(state);
const dialOutEnabled = isDialOutEnabled(state);
@ -1061,6 +1094,7 @@ function _mapStateToProps(state) {
_isGuest: state['features/base/jwt'].isGuest,
_fullScreen: fullScreen,
_localParticipantID: localParticipant.id,
_localRecState: localRecordingStates,
_overflowMenuVisible: overflowMenuVisible,
_raisedHand: localParticipant.raisedHand,
_screensharing: localVideo && localVideo.videoType === 'desktop',

View File

@ -149,7 +149,11 @@ module.exports = [
],
'do_external_connect':
'./connection_optimization/do_external_connect.js'
'./connection_optimization/do_external_connect.js',
'flacEncodeWorker':
'./react/features/local-recording/'
+ 'recording/flac/flacEncodeWorker.js'
}
}),