Implement local recording
index.js of local recording local-recording(ui): recording button local-recording(encoding): flac support with libflac.js Fixes in RecordingController; integration with UI local-recording(controller): coordinate recording on different clients local-recording(controller): allow recording on remote participants local-recording(controller): global singleton local-recording(controller): use middleware to init LocalRecording cleanup and documentation in RecordingController local-recording(refactor): "Delegate" -> "Adapter" code style stop eslint and flow from complaining temp save: client status fix linter issues fix some docs; remove global LocalRecording instance use node.js packaging for libflac.js; remove vendor/ folder code style: flacEncodeWorker.js use moment.js to do time diff remove the use of console.log code style: flac related files remove excessive empty lines; and more docs remove the use of clockTick for UI updates initalize flacEncodeWorker properly, to avoid premature audio data transmission move the realization of recordingController events from LocalRecordingButton to middleware i18n strings minor markup changes in LocalRecordingInfoDialog fix documentation
This commit is contained in:
parent
2ee1bf9351
commit
07bc70c2f5
13
Makefile
13
Makefile
|
@ -2,6 +2,7 @@ BUILD_DIR = build
|
|||
CLEANCSS = ./node_modules/.bin/cleancss
|
||||
DEPLOY_DIR = libs
|
||||
LIBJITSIMEET_DIR = node_modules/lib-jitsi-meet/
|
||||
LIBFLAC_DIR = node_modules/libflac/dist/
|
||||
NODE_SASS = ./node_modules/.bin/node-sass
|
||||
NPM = npm
|
||||
OUTPUT_DIR = .
|
||||
|
@ -19,7 +20,7 @@ compile:
|
|||
clean:
|
||||
rm -fr $(BUILD_DIR)
|
||||
|
||||
deploy: deploy-init deploy-appbundle deploy-lib-jitsi-meet deploy-css deploy-local
|
||||
deploy: deploy-init deploy-appbundle deploy-lib-jitsi-meet deploy-libflac deploy-css deploy-local
|
||||
|
||||
deploy-init:
|
||||
rm -fr $(DEPLOY_DIR)
|
||||
|
@ -33,6 +34,8 @@ deploy-appbundle:
|
|||
$(BUILD_DIR)/do_external_connect.min.map \
|
||||
$(BUILD_DIR)/external_api.min.js \
|
||||
$(BUILD_DIR)/external_api.min.map \
|
||||
$(BUILD_DIR)/flacEncodeWorker.min.js \
|
||||
$(BUILD_DIR)/flacEncodeWorker.min.map \
|
||||
$(BUILD_DIR)/device_selection_popup_bundle.min.js \
|
||||
$(BUILD_DIR)/device_selection_popup_bundle.min.map \
|
||||
$(BUILD_DIR)/dial_in_info_bundle.min.js \
|
||||
|
@ -50,6 +53,12 @@ deploy-lib-jitsi-meet:
|
|||
$(LIBJITSIMEET_DIR)/modules/browser/capabilities.json \
|
||||
$(DEPLOY_DIR)
|
||||
|
||||
deploy-libflac:
|
||||
cp \
|
||||
$(LIBFLAC_DIR)/libflac3-1.3.2.min.js \
|
||||
$(LIBFLAC_DIR)/libflac3-1.3.2.min.js.mem \
|
||||
$(DEPLOY_DIR)
|
||||
|
||||
deploy-css:
|
||||
$(NODE_SASS) $(STYLES_MAIN) $(STYLES_BUNDLE) && \
|
||||
$(CLEANCSS) $(STYLES_BUNDLE) > $(STYLES_DESTINATION) ; \
|
||||
|
@ -58,7 +67,7 @@ deploy-css:
|
|||
deploy-local:
|
||||
([ ! -x deploy-local.sh ] || ./deploy-local.sh)
|
||||
|
||||
dev: deploy-init deploy-css deploy-lib-jitsi-meet
|
||||
dev: deploy-init deploy-css deploy-lib-jitsi-meet deploy-libflac
|
||||
$(WEBPACK_DEV_SERVER)
|
||||
|
||||
source-package:
|
||||
|
|
|
@ -666,5 +666,28 @@
|
|||
"decline": "Dismiss",
|
||||
"productLabel": "from Jitsi Meet",
|
||||
"videoCallTitle": "Incoming video call"
|
||||
},
|
||||
"localRecording": {
|
||||
"localRecording": "Local Recording",
|
||||
"dialogTitle": "Local Recording Controls",
|
||||
"start": "Start",
|
||||
"stop": "Stop",
|
||||
"moderator": "Moderator",
|
||||
"localUser": "Local user",
|
||||
"duration": "Duration",
|
||||
"encoding": "Encoding",
|
||||
"participantStats": "Participant Stats",
|
||||
"clientState": {
|
||||
"on": "On",
|
||||
"off": "Off",
|
||||
"unknown": "Unknown"
|
||||
},
|
||||
"messages": {
|
||||
"engaged": "Local recording engaged.",
|
||||
"finished": "Recording session __token__ finished. Please send the recorded file to the moderator.",
|
||||
"notModerator": "You are not the moderator. You cannot start or stop local recording."
|
||||
},
|
||||
"yes": "Yes",
|
||||
"no": "No"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9736,6 +9736,10 @@
|
|||
"yaeti": "1.0.1"
|
||||
}
|
||||
},
|
||||
"libflac": {
|
||||
"version": "git+https://github.com/ztl8702/libflac.git#31368097eaf9dcb5ef59365ef60b259cb7b97f07",
|
||||
"from": "git+https://github.com/ztl8702/libflac.git#31368097eaf9dcb5ef59365ef60b259cb7b97f07"
|
||||
},
|
||||
"load-json-file": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz",
|
||||
|
|
|
@ -48,6 +48,7 @@
|
|||
"jsc-android": "224109.1.0",
|
||||
"jwt-decode": "2.2.0",
|
||||
"lib-jitsi-meet": "github:jitsi/lib-jitsi-meet#e097a1189ed99838605d90b959e129155bc0e50a",
|
||||
"libflac": "git+https://github.com/ztl8702/libflac.git#31368097eaf9dcb5ef59365ef60b259cb7b97f07",
|
||||
"lodash": "4.17.4",
|
||||
"moment": "2.19.4",
|
||||
"moment-duration-format": "2.2.2",
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
/**
|
||||
* Action to signal that the local client has started to perform recording,
|
||||
* (as in: {@code RecordingAdapter} is actively collecting audio data).
|
||||
*
|
||||
* {
|
||||
* type: LOCAL_RECORDING_ENGAGED
|
||||
* }
|
||||
*/
|
||||
export const LOCAL_RECORDING_ENGAGED = Symbol('LOCAL_RECORDING_ENGAGED');
|
||||
|
||||
/**
|
||||
* Action to signal that the local client has stopped recording,
|
||||
* (as in: {@code RecordingAdapter} is no longer collecting audio data).
|
||||
*
|
||||
* {
|
||||
* type: LOCAL_RECORDING_UNENGAGED
|
||||
* }
|
||||
*/
|
||||
export const LOCAL_RECORDING_UNENGAGED = Symbol('LOCAL_RECORDING_UNENGAGED');
|
||||
|
||||
/**
|
||||
* Action to show/hide {@code LocalRecordingInfoDialog}.
|
||||
*
|
||||
* {
|
||||
* type: LOCAL_RECORDING_TOGGLE_DIALOG
|
||||
* }
|
||||
*/
|
||||
export const LOCAL_RECORDING_TOGGLE_DIALOG
|
||||
= Symbol('LOCAL_RECORDING_TOGGLE_DIALOG');
|
||||
|
||||
/**
|
||||
* Action to update {@code LocalRecordingInfoDialog} with stats
|
||||
* from all clients.
|
||||
*
|
||||
* {
|
||||
* type: LOCAL_RECORDING_STATS_UPDATE
|
||||
* }
|
||||
*/
|
||||
export const LOCAL_RECORDING_STATS_UPDATE
|
||||
= Symbol('LOCAL_RECORDING_STATS_UPDATE');
|
|
@ -0,0 +1,59 @@
|
|||
/* @flow */
|
||||
|
||||
import {
|
||||
LOCAL_RECORDING_ENGAGED,
|
||||
LOCAL_RECORDING_UNENGAGED,
|
||||
LOCAL_RECORDING_TOGGLE_DIALOG,
|
||||
LOCAL_RECORDING_STATS_UPDATE
|
||||
} from './actionTypes';
|
||||
|
||||
/**
|
||||
* Signals state change in local recording engagement.
|
||||
* In other words, the events of the local WebWorker / MediaRecorder
|
||||
* starting to record and finishing recording.
|
||||
*
|
||||
* Note that this is not the event fired when the users tries to start
|
||||
* the recording in the UI.
|
||||
*
|
||||
* @param {bool} isEngaged - Whether local recording is engaged or not.
|
||||
* @returns {{
|
||||
* type: LOCAL_RECORDING_ENGAGED
|
||||
* }|{
|
||||
* type: LOCAL_RECORDING_UNENGAGED
|
||||
* }}
|
||||
*/
|
||||
export function signalLocalRecordingEngagement(isEngaged: boolean) {
|
||||
return {
|
||||
type: isEngaged ? LOCAL_RECORDING_ENGAGED : LOCAL_RECORDING_UNENGAGED
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggles the open/close state of {@code LocalRecordingInfoDialog}.
|
||||
*
|
||||
* @returns {{
|
||||
* type: LOCAL_RECORDING_TOGGLE_DIALOG
|
||||
* }}
|
||||
*/
|
||||
export function toggleLocalRecordingInfoDialog() {
|
||||
return {
|
||||
type: LOCAL_RECORDING_TOGGLE_DIALOG
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the the local recording stats from each client,
|
||||
* to be displayed on {@code LocalRecordingInfoDialog}.
|
||||
*
|
||||
* @param {*} stats - The stats object.
|
||||
* @returns {{
|
||||
* type: LOCAL_RECORDING_STATS_UPDATE,
|
||||
* stats: Object
|
||||
* }}
|
||||
*/
|
||||
export function statsUpdate(stats: Object) {
|
||||
return {
|
||||
type: LOCAL_RECORDING_STATS_UPDATE,
|
||||
stats
|
||||
};
|
||||
}
|
|
@ -0,0 +1,111 @@
|
|||
/* @flow */
|
||||
|
||||
import InlineDialog from '@atlaskit/inline-dialog';
|
||||
import React, { Component } from 'react';
|
||||
|
||||
import { translate } from '../../base/i18n';
|
||||
import { ToolbarButton } from '../../toolbox';
|
||||
|
||||
import LocalRecordingInfoDialog from './LocalRecordingInfoDialog';
|
||||
|
||||
/**
|
||||
* The type of the React {@code Component} state of
|
||||
* {@link LocalRecordingButton}.
|
||||
*/
|
||||
type Props = {
|
||||
|
||||
/**
|
||||
* Whether or not {@link LocalRecordingInfoDialog} should be displayed.
|
||||
*/
|
||||
isDialogShown: boolean,
|
||||
|
||||
/**
|
||||
* Callback function called when {@link LocalRecordingButton} is clicked.
|
||||
*/
|
||||
onClick: Function,
|
||||
|
||||
/**
|
||||
* Invoked to obtain translated strings.
|
||||
*/
|
||||
t: Function
|
||||
}
|
||||
|
||||
/**
|
||||
* A React {@code Component} for opening or closing the
|
||||
* {@code LocalRecordingInfoDialog}.
|
||||
*
|
||||
* @extends Component
|
||||
*/
|
||||
class LocalRecordingButton extends Component<Props> {
|
||||
|
||||
/**
|
||||
* Initializes a new {@code LocalRecordingButton} instance.
|
||||
*
|
||||
* @param {Object} props - The read-only properties with which the new
|
||||
* instance is to be initialized.
|
||||
*/
|
||||
constructor(props: Props) {
|
||||
super(props);
|
||||
|
||||
// Bind event handlers so they are only bound once per instance.
|
||||
this._onClick = this._onClick.bind(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements React's {@link Component#render()}.
|
||||
*
|
||||
* @inheritdoc
|
||||
* @returns {ReactElement}
|
||||
*/
|
||||
render() {
|
||||
const { isDialogShown, t } = this.props;
|
||||
const iconClasses
|
||||
= `icon-thumb-menu ${isDialogShown
|
||||
? 'icon-rec toggled' : 'icon-rec'}`;
|
||||
|
||||
return (
|
||||
<div className = 'toolbox-button-wth-dialog'>
|
||||
<InlineDialog
|
||||
content = {
|
||||
<LocalRecordingInfoDialog />
|
||||
}
|
||||
isOpen = { isDialogShown }
|
||||
onClose = { this._onCloseDialog }
|
||||
position = { 'top right' }>
|
||||
<ToolbarButton
|
||||
iconName = { iconClasses }
|
||||
onClick = { this._onClick }
|
||||
tooltip = { t('localRecording.dialogTitle') } />
|
||||
</InlineDialog>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
_onClick: () => void;
|
||||
|
||||
/**
|
||||
* Callback invoked when the Toolbar button is clicked.
|
||||
*
|
||||
* @private
|
||||
* @returns {void}
|
||||
*/
|
||||
_onClick() {
|
||||
this.props.onClick();
|
||||
}
|
||||
|
||||
_onCloseDialog: () => void;
|
||||
|
||||
/**
|
||||
* Callback invoked when {@code InlineDialog} signals that it should be
|
||||
* close.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
_onCloseDialog() {
|
||||
// Do nothing for now, because we want the dialog to stay open
|
||||
// after certain time, otherwise the moderator might need to repeatly
|
||||
// open the dialog to see the stats.
|
||||
}
|
||||
}
|
||||
|
||||
export default translate(LocalRecordingButton);
|
|
@ -0,0 +1,332 @@
|
|||
/* @flow */
|
||||
|
||||
import moment from 'moment';
|
||||
import React, { Component } from 'react';
|
||||
import { connect } from 'react-redux';
|
||||
|
||||
import { translate } from '../../base/i18n';
|
||||
import {
|
||||
PARTICIPANT_ROLE,
|
||||
getLocalParticipant
|
||||
} from '../../base/participants';
|
||||
|
||||
import { statsUpdate } from '../actions';
|
||||
import { recordingController } from '../controller';
|
||||
|
||||
|
||||
/**
|
||||
* The type of the React {@code Component} props of
|
||||
* {@link LocalRecordingInfoDialog}.
|
||||
*/
|
||||
type Props = {
|
||||
|
||||
/**
|
||||
* Redux store dispatch function.
|
||||
*/
|
||||
dispatch: Dispatch<*>,
|
||||
|
||||
/**
|
||||
* Current encoding format.
|
||||
*/
|
||||
encodingFormat: string,
|
||||
|
||||
/**
|
||||
* Whether the local user is the moderator.
|
||||
*/
|
||||
isModerator: boolean,
|
||||
|
||||
/**
|
||||
* Whether local recording is engaged.
|
||||
*/
|
||||
isOn: boolean,
|
||||
|
||||
/**
|
||||
* The start time of the current local recording session.
|
||||
* Used to calculate the duration of recording.
|
||||
*/
|
||||
recordingStartedAt: Date,
|
||||
|
||||
/**
|
||||
* Stats of all the participant.
|
||||
*/
|
||||
stats: Object,
|
||||
|
||||
/**
|
||||
* Invoked to obtain translated strings.
|
||||
*/
|
||||
t: Function
|
||||
}
|
||||
|
||||
/**
|
||||
* The type of the React {@code Component} state of
|
||||
* {@link LocalRecordingInfoDialog}.
|
||||
*/
|
||||
type State = {
|
||||
|
||||
/**
|
||||
* The recording duration string to be displayed on the UI.
|
||||
*/
|
||||
durationString: string
|
||||
}
|
||||
|
||||
/**
|
||||
* A React Component with the contents for a dialog that shows information about
|
||||
* local recording. For users with moderator rights, this is also the "control
|
||||
* panel" for starting/stopping local recording on all clients.
|
||||
*
|
||||
* @extends Component
|
||||
*/
|
||||
class LocalRecordingInfoDialog extends Component<Props, State> {
|
||||
|
||||
/**
|
||||
* Saves a handle to the timer for UI updates,
|
||||
* so that it can be cancelled when the component unmounts.
|
||||
*/
|
||||
_timer: ?IntervalID;
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*/
|
||||
constructor() {
|
||||
super();
|
||||
this.state = {
|
||||
durationString: 'N/A'
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements React's {@link Component#componentWillMount()}.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
componentWillMount() {
|
||||
this._timer = setInterval(
|
||||
() => {
|
||||
this.setState((_prevState, props) => {
|
||||
const nowTime = new Date(Date.now());
|
||||
|
||||
return {
|
||||
durationString: this._getDuration(nowTime,
|
||||
props.recordingStartedAt)
|
||||
};
|
||||
});
|
||||
try {
|
||||
this.props.dispatch(
|
||||
statsUpdate(recordingController
|
||||
.getParticipantsStats()));
|
||||
} catch (e) {
|
||||
// do nothing
|
||||
}
|
||||
},
|
||||
1000
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements React's {@link Component#componentWillUnmount()}.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
componentWillUnmount() {
|
||||
if (this._timer) {
|
||||
clearInterval(this._timer);
|
||||
this._timer = null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns React elements for displaying the local recording stats of
|
||||
* each participant.
|
||||
*
|
||||
* @returns {ReactElement}
|
||||
*/
|
||||
renderStats() {
|
||||
const { stats, t } = this.props;
|
||||
|
||||
if (stats === undefined) {
|
||||
return <ul />;
|
||||
}
|
||||
const ids = Object.keys(stats);
|
||||
|
||||
return (
|
||||
<ul>
|
||||
{ids.map((id, i) =>
|
||||
|
||||
// FIXME: a workaround, as arrow functions without `return`
|
||||
// keyword need to be wrapped in parenthesis.
|
||||
/* eslint-disable no-extra-parens */
|
||||
(<li key = { i }>
|
||||
<span>{stats[id].displayName || id}: </span>
|
||||
<span>{stats[id].recordingStats
|
||||
? `${stats[id].recordingStats.isRecording
|
||||
? t('localRecording.clientState.on')
|
||||
: t('localRecording.clientState.off')} `
|
||||
+ `(${stats[id]
|
||||
.recordingStats.currentSessionToken})`
|
||||
: t('localRecording.clientState.unknown')}</span>
|
||||
</li>)
|
||||
/* eslint-enable no-extra-parens */
|
||||
)}
|
||||
</ul>
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements React's {@link Component#render()}.
|
||||
*
|
||||
* @inheritdoc
|
||||
* @returns {ReactElement}
|
||||
*/
|
||||
render() {
|
||||
const { isModerator, encodingFormat, isOn, t } = this.props;
|
||||
const { durationString } = this.state;
|
||||
|
||||
return (
|
||||
<div
|
||||
className = 'info-dialog' >
|
||||
<div className = 'info-dialog-column'>
|
||||
<h4 className = 'info-dialog-icon'>
|
||||
<i className = 'icon-info' />
|
||||
</h4>
|
||||
</div>
|
||||
<div className = 'info-dialog-column'>
|
||||
<div className = 'info-dialog-title'>
|
||||
{ t('localRecording.localRecording') }
|
||||
</div>
|
||||
<div>
|
||||
<span className = 'info-label'>
|
||||
{`${t('localRecording.moderator')}:`}
|
||||
</span>
|
||||
<span className = 'spacer'> </span>
|
||||
<span className = 'info-value'>
|
||||
{ isModerator
|
||||
? t('localRecording.yes')
|
||||
: t('localRecording.no') }
|
||||
</span>
|
||||
</div>
|
||||
{ isOn && <div>
|
||||
<span className = 'info-label'>
|
||||
{`${t('localRecording.duration')}:`}
|
||||
</span>
|
||||
<span className = 'spacer'> </span>
|
||||
<span className = 'info-value'>
|
||||
{ durationString }
|
||||
</span>
|
||||
</div>
|
||||
}
|
||||
{isOn
|
||||
&& <div>
|
||||
<span className = 'info-label'>
|
||||
{`${t('localRecording.encoding')}:`}
|
||||
</span>
|
||||
<span className = 'spacer'> </span>
|
||||
<span className = 'info-value'>
|
||||
{ encodingFormat }
|
||||
</span>
|
||||
</div>
|
||||
}
|
||||
{
|
||||
isModerator
|
||||
&& <div>
|
||||
<div>
|
||||
<span className = 'info-label'>
|
||||
{`${t('localRecording.participantStats')}:`}
|
||||
</span>
|
||||
</div>
|
||||
{ this.renderStats() }
|
||||
</div>
|
||||
}
|
||||
{
|
||||
isModerator
|
||||
&& <div className = 'info-dialog-action-links'>
|
||||
<div className = 'info-dialog-action-link'>
|
||||
{isOn ? <a
|
||||
onClick = { this._onStop }>
|
||||
{ t('localRecording.stop') }
|
||||
</a>
|
||||
: <a
|
||||
onClick = { this._onStart }>
|
||||
{ t('localRecording.start') }
|
||||
</a>
|
||||
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a duration string "HH:MM:SS" from two Date objects.
|
||||
*
|
||||
* @param {Date} now - Current time.
|
||||
* @param {Date} prev - Previous time, the time to be subtracted.
|
||||
* @returns {string}
|
||||
*/
|
||||
_getDuration(now, prev) {
|
||||
// Still a hack, as moment.js does not support formatting of duration
|
||||
// (i.e. TimeDelta). Only works if total duration < 24 hours.
|
||||
// But who is going to have a 24-hour long conference?
|
||||
return moment(now - prev).utc()
|
||||
.format('HH:mm:ss');
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback function for the Start UI action.
|
||||
*
|
||||
* @private
|
||||
* @returns {void}
|
||||
*/
|
||||
_onStart() {
|
||||
recordingController.startRecording();
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback function for the Stop UI action.
|
||||
*
|
||||
* @private
|
||||
* @returns {void}
|
||||
*/
|
||||
_onStop() {
|
||||
recordingController.stopRecording();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps (parts of) the Redux state to the associated props for the
|
||||
* {@code LocalRecordingInfoDialog} component.
|
||||
*
|
||||
* @param {Object} state - The Redux state.
|
||||
* @private
|
||||
* @returns {{
|
||||
* encodingFormat: string,
|
||||
* isModerator: boolean,
|
||||
* isOn: boolean,
|
||||
* recordingStartedAt: Date,
|
||||
* stats: Object
|
||||
* }}
|
||||
*/
|
||||
function _mapStateToProps(state) {
|
||||
const {
|
||||
encodingFormat,
|
||||
isEngaged: isOn,
|
||||
recordingStartedAt,
|
||||
stats
|
||||
} = state['features/local-recording'];
|
||||
const isModerator
|
||||
= getLocalParticipant(state).role === PARTICIPANT_ROLE.MODERATOR;
|
||||
|
||||
return {
|
||||
encodingFormat,
|
||||
isModerator,
|
||||
isOn,
|
||||
recordingStartedAt,
|
||||
stats
|
||||
};
|
||||
}
|
||||
|
||||
export default translate(connect(_mapStateToProps)(LocalRecordingInfoDialog));
|
|
@ -0,0 +1 @@
|
|||
export { default as LocalRecordingButton } from './LocalRecordingButton';
|
|
@ -0,0 +1,493 @@
|
|||
/* @flow */
|
||||
|
||||
import { i18next } from '../../base/i18n';
|
||||
import {
|
||||
FlacAdapter,
|
||||
OggAdapter,
|
||||
WavAdapter
|
||||
} from '../recording';
|
||||
|
||||
const logger = require('jitsi-meet-logger').getLogger(__filename);
|
||||
|
||||
/**
|
||||
* XMPP command for signaling the start of local recording to all clients.
|
||||
* Should be sent by the moderator only.
|
||||
*/
|
||||
const COMMAND_START = 'localRecStart';
|
||||
|
||||
/**
|
||||
* XMPP command for signaling the stop of local recording to all clients.
|
||||
* Should be sent by the moderator only.
|
||||
*/
|
||||
const COMMAND_STOP = 'localRecStop';
|
||||
|
||||
/**
|
||||
* Participant property key for local recording stats.
|
||||
*/
|
||||
const PROPERTY_STATS = 'localRecStats';
|
||||
|
||||
/**
|
||||
* Default recording format.
|
||||
*/
|
||||
const DEFAULT_RECORDING_FORMAT = 'flac';
|
||||
|
||||
/**
|
||||
* States of the {@code RecordingController}.
|
||||
*/
|
||||
const ControllerState = Object.freeze({
|
||||
/**
|
||||
* Idle (not recording).
|
||||
*/
|
||||
IDLE: Symbol('IDLE'),
|
||||
|
||||
/**
|
||||
* Engaged (recording).
|
||||
*/
|
||||
RECORDING: Symbol('RECORDING')
|
||||
});
|
||||
|
||||
/**
|
||||
* Type of the stats reported by each participant (client).
|
||||
*/
|
||||
type RecordingStats = {
|
||||
|
||||
/**
|
||||
* Current local recording session token used by the participant.
|
||||
*/
|
||||
currentSessionToken: number,
|
||||
|
||||
/**
|
||||
* Whether local recording is engaged on the participant's device.
|
||||
*/
|
||||
isRecording: boolean,
|
||||
|
||||
/**
|
||||
* Total recorded bytes. (Reserved for future use.)
|
||||
*/
|
||||
recordedBytes: number,
|
||||
|
||||
/**
|
||||
* Total recording duration. (Reserved for future use.)
|
||||
*/
|
||||
recordedLength: number
|
||||
}
|
||||
|
||||
/**
|
||||
* The component responsible for the coordination of local recording, across
|
||||
* multiple participants.
|
||||
* Current implementation requires that there is only one moderator in a room.
|
||||
*/
|
||||
class RecordingController {
|
||||
|
||||
/**
|
||||
* For each recording session, there is a separate @{code RecordingAdapter}
|
||||
* instance so that encoded bits from the previous sessions can still be
|
||||
* retrieved after they ended.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
_adapters = {};
|
||||
|
||||
/**
|
||||
* The {@code JitsiConference} instance.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
_conference: * = null;
|
||||
|
||||
/**
|
||||
* Current recording session token.
|
||||
* Session token is a number generated by the moderator, to ensure every
|
||||
* client is in the same recording state.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
_currentSessionToken: number = -1;
|
||||
|
||||
/**
|
||||
* Current state of {@code RecordingController}.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
_state = ControllerState.IDLE;
|
||||
|
||||
/**
|
||||
* Current recording format. This will be in effect from the next
|
||||
* recording session, i.e., if this value is changed during an on-going
|
||||
* recording session, that on-going session will not use the new format.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
_format = DEFAULT_RECORDING_FORMAT;
|
||||
|
||||
/**
|
||||
* Whether or not the {@code RecordingController} has registered for
|
||||
* XMPP events. Prevents initialization from happening multiple times.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
_registered = false;
|
||||
|
||||
/**
|
||||
* FIXME: callback function for the {@code RecordingController} to notify
|
||||
* UI it wants to display a notice. Keeps {@code RecordingController}
|
||||
* decoupled from UI.
|
||||
*/
|
||||
onNotify: ?(string) => void;
|
||||
|
||||
/**
|
||||
* FIXME: callback function for the {@code RecordingController} to notify
|
||||
* UI it wants to display a warning. Keeps {@code RecordingController}
|
||||
* decoupled from UI.
|
||||
*/
|
||||
onWarning: ?(string) => void;
|
||||
|
||||
/**
|
||||
* FIXME: callback function for the {@code RecordingController} to notify
|
||||
* UI that the local recording state has changed.
|
||||
*/
|
||||
onStateChanged: ?(boolean) => void;
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
constructor() {
|
||||
this._updateStats = this._updateStats.bind(this);
|
||||
this._onStartCommand = this._onStartCommand.bind(this);
|
||||
this._onStopCommand = this._onStopCommand.bind(this);
|
||||
this._doStartRecording = this._doStartRecording.bind(this);
|
||||
this._doStopRecording = this._doStopRecording.bind(this);
|
||||
this.registerEvents = this.registerEvents.bind(this);
|
||||
this.getParticipantsStats = this.getParticipantsStats.bind(this);
|
||||
}
|
||||
|
||||
registerEvents: () => void;
|
||||
|
||||
/**
|
||||
* Registers listeners for XMPP events.
|
||||
*
|
||||
* @param {JitsiConference} conference - {@code JitsiConference} instance.
|
||||
* @returns {void}
|
||||
*/
|
||||
registerEvents(conference: Object) {
|
||||
if (!this._registered) {
|
||||
this._conference = conference;
|
||||
if (this._conference) {
|
||||
this._conference
|
||||
.addCommandListener(COMMAND_STOP, this._onStopCommand);
|
||||
this._conference
|
||||
.addCommandListener(COMMAND_START, this._onStartCommand);
|
||||
this._registered = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Signals the participants to start local recording.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
startRecording() {
|
||||
this.registerEvents();
|
||||
if (this._conference && this._conference.isModerator()) {
|
||||
this._conference.removeCommand(COMMAND_STOP);
|
||||
this._conference.sendCommand(COMMAND_START, {
|
||||
attributes: {
|
||||
sessionToken: this._getRandomToken(),
|
||||
format: this._format
|
||||
}
|
||||
});
|
||||
} else {
|
||||
const message = i18next.t('localRecording.messages.notModerator');
|
||||
|
||||
if (this.onWarning) {
|
||||
this.onWarning(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Signals the participants to stop local recording.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
stopRecording() {
|
||||
if (this._conference) {
|
||||
if (this._conference.isModerator) {
|
||||
this._conference.removeCommand(COMMAND_START);
|
||||
this._conference.sendCommand(COMMAND_STOP, {
|
||||
attributes: {
|
||||
sessionToken: this._currentSessionToken
|
||||
}
|
||||
});
|
||||
} else {
|
||||
const message
|
||||
= i18next.t('localRecording.messages.notModerator');
|
||||
|
||||
if (this.onWarning) {
|
||||
this.onWarning(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers the download of recorded data.
|
||||
* Browser only.
|
||||
*
|
||||
* @param {number} sessionToken - The token of the session to download.
|
||||
* @returns {void}
|
||||
*/
|
||||
downloadRecordedData(sessionToken: number) {
|
||||
if (this._adapters[sessionToken]) {
|
||||
this._adapters[sessionToken].download();
|
||||
} else {
|
||||
logger.error(`Invalid session token for download ${sessionToken}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Switches the recording format.
|
||||
*
|
||||
* @param {string} newFormat - The new format.
|
||||
* @returns {void}
|
||||
*/
|
||||
switchFormat(newFormat: string) {
|
||||
this._format = newFormat;
|
||||
logger.log(`Recording format switched to ${newFormat}`);
|
||||
|
||||
// will be used next time
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the local recording stats.
|
||||
*
|
||||
* @returns {RecordingStats}
|
||||
*/
|
||||
getLocalStats(): RecordingStats {
|
||||
return {
|
||||
currentSessionToken: this._currentSessionToken,
|
||||
isRecording: this._state === ControllerState.RECORDING,
|
||||
recordedBytes: 0,
|
||||
recordedLength: 0
|
||||
};
|
||||
}
|
||||
|
||||
getParticipantsStats: () => *;
|
||||
|
||||
/**
|
||||
* Returns the remote participants' local recording stats.
|
||||
*
|
||||
* @returns {*}
|
||||
*/
|
||||
getParticipantsStats() {
|
||||
const members
|
||||
= this._conference.getParticipants()
|
||||
.map(member => {
|
||||
return {
|
||||
id: member.getId(),
|
||||
displayName: member.getDisplayName(),
|
||||
recordingStats:
|
||||
JSON.parse(member.getProperty(PROPERTY_STATS) || '{}'),
|
||||
isSelf: false
|
||||
};
|
||||
});
|
||||
|
||||
// transform into a dictionary,
|
||||
// for consistent ordering
|
||||
const result = {};
|
||||
|
||||
for (let i = 0; i < members.length; ++i) {
|
||||
result[members[i].id] = members[i];
|
||||
}
|
||||
const localId = this._conference.myUserId();
|
||||
|
||||
result[localId] = {
|
||||
id: localId,
|
||||
displayName: i18next.t('localRecording.localUser'),
|
||||
recordingStats: this.getLocalStats(),
|
||||
isSelf: true
|
||||
};
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
_updateStats: () => void;
|
||||
|
||||
/**
|
||||
* Sends out updates about the local recording stats via XMPP.
|
||||
*
|
||||
* @private
|
||||
* @returns {void}
|
||||
*/
|
||||
_updateStats() {
|
||||
if (this._conference) {
|
||||
this._conference.setLocalParticipantProperty(PROPERTY_STATS,
|
||||
JSON.stringify(this.getLocalStats()));
|
||||
}
|
||||
}
|
||||
|
||||
_onStartCommand: (*) => void;
|
||||
|
||||
/**
|
||||
* Callback function for XMPP event.
|
||||
*
|
||||
* @private
|
||||
* @param {*} value - The event args.
|
||||
* @returns {void}
|
||||
*/
|
||||
_onStartCommand(value) {
|
||||
const { sessionToken, format } = value.attributes;
|
||||
|
||||
if (this._state === ControllerState.IDLE) {
|
||||
this._format = format;
|
||||
this._currentSessionToken = sessionToken;
|
||||
this._adapters[sessionToken]
|
||||
= this._createRecordingAdapter();
|
||||
this._doStartRecording();
|
||||
} else if (this._currentSessionToken !== sessionToken) {
|
||||
// we need to restart the recording
|
||||
this._doStopRecording().then(() => {
|
||||
this._format = format;
|
||||
this._currentSessionToken = sessionToken;
|
||||
this._adapters[sessionToken]
|
||||
= this._createRecordingAdapter();
|
||||
this._doStartRecording();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
_onStopCommand: (*) => void;
|
||||
|
||||
/**
|
||||
* Callback function for XMPP event.
|
||||
*
|
||||
* @private
|
||||
* @param {*} value - The event args.
|
||||
* @returns {void}
|
||||
*/
|
||||
_onStopCommand(value) {
|
||||
if (this._state === ControllerState.RECORDING
|
||||
&& this._currentSessionToken === value.attributes.sessionToken) {
|
||||
this._doStopRecording();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a token that can be used to distinguish each
|
||||
* recording session.
|
||||
*
|
||||
* @returns {number}
|
||||
*/
|
||||
_getRandomToken() {
|
||||
return Math.floor(Math.random() * 10000) + 1;
|
||||
}
|
||||
|
||||
_doStartRecording: () => void;
|
||||
|
||||
/**
|
||||
* Starts the recording locally.
|
||||
*
|
||||
* @private
|
||||
* @returns {void}
|
||||
*/
|
||||
_doStartRecording() {
|
||||
if (this._state === ControllerState.IDLE) {
|
||||
this._state = ControllerState.RECORDING;
|
||||
const delegate = this._adapters[this._currentSessionToken];
|
||||
|
||||
delegate.ensureInitialized()
|
||||
.then(() => delegate.start())
|
||||
.then(() => {
|
||||
logger.log('Local recording engaged.');
|
||||
const message = i18next.t('localRecording.messages.engaged');
|
||||
|
||||
if (this.onNotify) {
|
||||
this.onNotify(message);
|
||||
}
|
||||
if (this.onStateChanged) {
|
||||
this.onStateChanged(true);
|
||||
}
|
||||
this._updateStats();
|
||||
})
|
||||
.catch(err => {
|
||||
logger.error('Failed to start local recording.', err);
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
_doStopRecording: () => Promise<void>;
|
||||
|
||||
/**
|
||||
* Stops the recording locally.
|
||||
*
|
||||
* @private
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
_doStopRecording() {
|
||||
if (this._state === ControllerState.RECORDING) {
|
||||
const token = this._currentSessionToken;
|
||||
|
||||
return this._adapters[this._currentSessionToken]
|
||||
.stop()
|
||||
.then(() => {
|
||||
this._state = ControllerState.IDLE;
|
||||
logger.log('Local recording unengaged.');
|
||||
this.downloadRecordedData(token);
|
||||
|
||||
const message
|
||||
= i18next.t('localRecording.messages.finished',
|
||||
{
|
||||
token
|
||||
});
|
||||
|
||||
if (this.onNotify) {
|
||||
this.onNotify(message);
|
||||
}
|
||||
if (this.onStateChanged) {
|
||||
this.onStateChanged(false);
|
||||
}
|
||||
this._updateStats();
|
||||
})
|
||||
.catch(err => {
|
||||
logger.error('Failed to stop local recording.', err);
|
||||
});
|
||||
}
|
||||
|
||||
/* eslint-disable */
|
||||
return (Promise.resolve(): Promise<void>);
|
||||
// FIXME: better ways to satisfy flow and ESLint at the same time?
|
||||
/* eslint-enable */
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a recording adapter according to the current recording format.
|
||||
*
|
||||
* @private
|
||||
* @returns {RecordingAdapter}
|
||||
*/
|
||||
_createRecordingAdapter() {
|
||||
logger.debug('[RecordingController] creating recording'
|
||||
+ ` adapter for ${this._format} format.`);
|
||||
|
||||
switch (this._format) {
|
||||
case 'ogg':
|
||||
return new OggAdapter();
|
||||
case 'flac':
|
||||
return new FlacAdapter();
|
||||
case 'wav':
|
||||
return new WavAdapter();
|
||||
default:
|
||||
throw new Error(`Unknown format: ${this._format}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Global singleton of {@code RecordingController}.
|
||||
*/
|
||||
export const recordingController = new RecordingController();
|
|
@ -0,0 +1 @@
|
|||
export * from './RecordingController';
|
|
@ -0,0 +1,7 @@
|
|||
export * from './actions';
|
||||
export * from './actionTypes';
|
||||
export * from './components';
|
||||
export * from './controller';
|
||||
|
||||
import './middleware';
|
||||
import './reducer';
|
|
@ -0,0 +1,52 @@
|
|||
/* @flow */
|
||||
|
||||
import { APP_WILL_MOUNT, APP_WILL_UNMOUNT } from '../app';
|
||||
import { CONFERENCE_JOINED } from '../base/conference';
|
||||
import { i18next } from '../base/i18n';
|
||||
import { MiddlewareRegistry } from '../base/redux';
|
||||
import { showNotification } from '../notifications';
|
||||
|
||||
import { recordingController } from './controller';
|
||||
import { signalLocalRecordingEngagement } from './actions';
|
||||
|
||||
MiddlewareRegistry.register(({ getState, dispatch }) => next => action => {
|
||||
const result = next(action);
|
||||
|
||||
switch (action.type) {
|
||||
case CONFERENCE_JOINED: {
|
||||
// the Conference object is ready
|
||||
const { conference } = getState()['features/base/conference'];
|
||||
|
||||
recordingController.registerEvents(conference);
|
||||
break;
|
||||
}
|
||||
case APP_WILL_MOUNT:
|
||||
// realize the delegates on recordingController,
|
||||
// providing UI reactions.
|
||||
recordingController.onStateChanged = function(state) {
|
||||
dispatch(signalLocalRecordingEngagement(state));
|
||||
};
|
||||
|
||||
recordingController.onWarning = function(message) {
|
||||
dispatch(showNotification({
|
||||
title: i18next.t('localRecording.localRecording'),
|
||||
description: message
|
||||
}, 10000));
|
||||
};
|
||||
|
||||
recordingController.onNotify = function(message) {
|
||||
dispatch(showNotification({
|
||||
title: i18next.t('localRecording.localRecording'),
|
||||
description: message
|
||||
}, 10000));
|
||||
};
|
||||
break;
|
||||
case APP_WILL_UNMOUNT:
|
||||
recordingController.onStateChanged = null;
|
||||
recordingController.onNotify = null;
|
||||
recordingController.onWarning = null;
|
||||
break;
|
||||
}
|
||||
|
||||
return result;
|
||||
});
|
|
@ -0,0 +1,107 @@
|
|||
import { RecordingAdapter } from './RecordingAdapter';
|
||||
import { downloadBlob, timestampString } from './Utils';
|
||||
|
||||
const logger = require('jitsi-meet-logger').getLogger(__filename);
|
||||
|
||||
/**
|
||||
* RecordingAdapter implementation that uses MediaRecorder
|
||||
* (default browser encoding with Opus codec)
|
||||
*/
|
||||
export class OggAdapter extends RecordingAdapter {
|
||||
|
||||
_mediaRecorder = null;
|
||||
|
||||
/**
|
||||
* Implements {@link RecordingAdapter#ensureInitialized()}.
|
||||
*
|
||||
* @inheritdoc
|
||||
*/
|
||||
ensureInitialized() {
|
||||
let p = null;
|
||||
|
||||
if (this._mediaRecorder === null) {
|
||||
p = new Promise((resolve, error) => {
|
||||
navigator.getUserMedia(
|
||||
|
||||
// constraints, only audio needed
|
||||
{
|
||||
audioBitsPerSecond: 44100, // 44 kbps
|
||||
audio: true,
|
||||
mimeType: 'application/ogg'
|
||||
},
|
||||
|
||||
// success callback
|
||||
stream => {
|
||||
this._mediaRecorder = new MediaRecorder(stream);
|
||||
this._mediaRecorder.ondataavailable
|
||||
= e => this._saveMediaData(e.data);
|
||||
resolve();
|
||||
},
|
||||
|
||||
// Error callback
|
||||
err => {
|
||||
logger.error(`Error calling getUserMedia(): ${err}`);
|
||||
error();
|
||||
}
|
||||
);
|
||||
});
|
||||
} else {
|
||||
p = new Promise(resolve => {
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
|
||||
return p;
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements {@link RecordingAdapter#start()}.
|
||||
*
|
||||
* @inheritdoc
|
||||
*/
|
||||
start() {
|
||||
return new Promise(resolve => {
|
||||
this._mediaRecorder.start();
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements {@link RecordingAdapter#stop()}.
|
||||
*
|
||||
* @inheritdoc
|
||||
*/
|
||||
stop() {
|
||||
return new Promise(
|
||||
resolve => {
|
||||
this._mediaRecorder.onstop = () => resolve();
|
||||
this._mediaRecorder.stop();
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements {@link RecordingAdapter#download()}.
|
||||
*
|
||||
* @inheritdoc
|
||||
*/
|
||||
download() {
|
||||
if (this._recordedData !== null) {
|
||||
const audioURL = window.URL.createObjectURL(this._recordedData);
|
||||
|
||||
downloadBlob(audioURL, `recording${timestampString()}.ogg`);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback for encoded data.
|
||||
*
|
||||
* @private
|
||||
* @param {*} data - Encoded data.
|
||||
* @returns {void}
|
||||
*/
|
||||
_saveMediaData(data) {
|
||||
this._recordedData = data;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
/**
|
||||
* Common interface for recording mechanisms
|
||||
*/
|
||||
export class RecordingAdapter {
|
||||
|
||||
/**
|
||||
* Initialize the recording backend.
|
||||
*
|
||||
* @returns {Promise}
|
||||
*/
|
||||
ensureInitialized() {
|
||||
throw new Error('Not implemented');
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts recording.
|
||||
*
|
||||
* @returns {Promise}
|
||||
*/
|
||||
start() {
|
||||
throw new Error('Not implemented');
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops recording.
|
||||
*
|
||||
* @returns {Promise}
|
||||
*/
|
||||
stop() {
|
||||
throw new Error('Not implemented');
|
||||
}
|
||||
|
||||
/**
|
||||
* Initiates download of the recorded and encoded audio file.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
download() {
|
||||
throw new Error('Not implemented');
|
||||
}
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
/**
|
||||
* Force download of Blob in browser by faking an <a> tag.
|
||||
*
|
||||
* @param {string} blob - Base64 URL.
|
||||
* @param {string} fileName - The filename to appear in the download dialog.
|
||||
* @returns {void}
|
||||
*/
|
||||
export function downloadBlob(blob, fileName = 'recording.ogg') {
|
||||
// fake a anchor tag
|
||||
const a = document.createElement('a');
|
||||
|
||||
document.body.appendChild(a);
|
||||
a.style = 'display: none';
|
||||
a.href = blob;
|
||||
a.download = fileName;
|
||||
a.click();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtains a timestamp of now.
|
||||
* Used in filenames.
|
||||
*
|
||||
* @returns {string}
|
||||
*/
|
||||
export function timestampString() {
|
||||
const timeStampInMs = window.performance
|
||||
&& window.performance.now
|
||||
&& window.performance.timing
|
||||
&& window.performance.timing.navigationStart
|
||||
? window.performance.now() + window.performance.timing.navigationStart
|
||||
: Date.now();
|
||||
|
||||
return timeStampInMs.toString();
|
||||
}
|
|
@ -0,0 +1,284 @@
|
|||
import { RecordingAdapter } from './RecordingAdapter';
|
||||
import { downloadBlob, timestampString } from './Utils';
|
||||
|
||||
const logger = require('jitsi-meet-logger').getLogger(__filename);
|
||||
|
||||
const WAV_BITS_PER_SAMPLE = 16;
|
||||
const WAV_SAMPLE_RATE = 44100;
|
||||
|
||||
/**
|
||||
* Recording adapter for raw WAVE format.
|
||||
*/
|
||||
export class WavAdapter extends RecordingAdapter {
|
||||
|
||||
_audioContext = null;
|
||||
_audioProcessingNode = null;
|
||||
_audioSource = null;
|
||||
|
||||
_wavLength = 0;
|
||||
_wavBuffers = [];
|
||||
_isInitialized = false;
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
*/
|
||||
constructor() {
|
||||
super();
|
||||
|
||||
this._saveWavPCM = this._saveWavPCM.bind(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements {@link RecordingAdapter#ensureInitialized()}.
|
||||
*
|
||||
* @inheritdoc
|
||||
*/
|
||||
ensureInitialized() {
|
||||
if (this._isInitialized) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
const p = new Promise((resolve, reject) => {
|
||||
navigator.getUserMedia(
|
||||
|
||||
// constraints - only audio needed for this app
|
||||
{
|
||||
audioBitsPerSecond: WAV_SAMPLE_RATE * WAV_BITS_PER_SAMPLE,
|
||||
audio: true,
|
||||
mimeType: 'application/ogg' // useless?
|
||||
},
|
||||
|
||||
// Success callback
|
||||
stream => {
|
||||
this._audioContext = new AudioContext();
|
||||
this._audioSource
|
||||
= this._audioContext.createMediaStreamSource(stream);
|
||||
this._audioProcessingNode
|
||||
= this._audioContext.createScriptProcessor(4096, 1, 1);
|
||||
this._audioProcessingNode.onaudioprocess = e => {
|
||||
const channelLeft = e.inputBuffer.getChannelData(0);
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/
|
||||
// Web/API/AudioBuffer/getChannelData
|
||||
// the returned value is an Float32Array
|
||||
this._saveWavPCM(channelLeft);
|
||||
};
|
||||
this._isInitialized = true;
|
||||
resolve();
|
||||
},
|
||||
|
||||
// Error callback
|
||||
err => {
|
||||
logger.error(`Error calling getUserMedia(): ${err}`);
|
||||
reject();
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
return p;
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements {@link RecordingAdapter#start()}.
|
||||
*
|
||||
* @inheritdoc
|
||||
*/
|
||||
start() {
|
||||
return new Promise(
|
||||
(resolve, /* eslint-disable */_reject/* eslint-enable */) => {
|
||||
this._wavBuffers = [];
|
||||
this._wavLength = 0;
|
||||
this._wavBuffers.push(this._createWavHeader());
|
||||
|
||||
this._audioSource.connect(this._audioProcessingNode);
|
||||
this._audioProcessingNode
|
||||
.connect(this._audioContext.destination);
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements {@link RecordingAdapter#stop()}.
|
||||
*
|
||||
* @inheritdoc
|
||||
*/
|
||||
stop() {
|
||||
this._audioProcessingNode.disconnect();
|
||||
this._audioSource.disconnect();
|
||||
this._data = this._exportMonoWAV(this._wavBuffers, this._wavLength);
|
||||
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements {@link RecordingAdapter#download()}.
|
||||
*
|
||||
* @inheritdoc
|
||||
*/
|
||||
download() {
|
||||
if (this._data !== null) {
|
||||
const audioURL = window.URL.createObjectURL(this._data);
|
||||
|
||||
downloadBlob(audioURL, `recording${timestampString()}.wav`);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a WAVE file header.
|
||||
*
|
||||
* @private
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
_createWavHeader() {
|
||||
// adapted from
|
||||
// https://github.com/mmig/speech-to-flac/blob/master/encoder.js
|
||||
|
||||
// ref: http://soundfile.sapp.org/doc/WaveFormat/
|
||||
|
||||
// create our WAVE file header
|
||||
const buffer = new ArrayBuffer(44);
|
||||
const view = new DataView(buffer);
|
||||
|
||||
// RIFF chunk descriptor
|
||||
writeUTFBytes(view, 0, 'RIFF');
|
||||
|
||||
// set file size at the end
|
||||
writeUTFBytes(view, 8, 'WAVE');
|
||||
|
||||
// FMT sub-chunk
|
||||
writeUTFBytes(view, 12, 'fmt ');
|
||||
view.setUint32(16, 16, true);
|
||||
view.setUint16(20, 1, true);
|
||||
|
||||
// NumChannels
|
||||
view.setUint16(22, 1, true);
|
||||
|
||||
// SampleRate
|
||||
view.setUint32(24, WAV_SAMPLE_RATE, true);
|
||||
|
||||
// ByteRate
|
||||
view.setUint32(28,
|
||||
Number(WAV_SAMPLE_RATE) * 1 * WAV_BITS_PER_SAMPLE / 8, true);
|
||||
|
||||
// BlockAlign
|
||||
view.setUint16(32, 1 * Number(WAV_BITS_PER_SAMPLE) / 8, true);
|
||||
|
||||
view.setUint16(34, WAV_BITS_PER_SAMPLE, true);
|
||||
|
||||
// data sub-chunk
|
||||
writeUTFBytes(view, 36, 'data');
|
||||
|
||||
// DUMMY file length (set real value on export)
|
||||
view.setUint32(4, 10, true);
|
||||
|
||||
// DUMMY data chunk length (set real value on export)
|
||||
view.setUint32(40, 10, true);
|
||||
|
||||
return new Uint8Array(buffer);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Callback function that saves the PCM bits.
|
||||
*
|
||||
* @private
|
||||
* @param {Float32Array} data - The audio PCM data.
|
||||
* @returns {void}
|
||||
*/
|
||||
_saveWavPCM(data) {
|
||||
// need to copy the Float32Array,
|
||||
// unlike passing to WebWorker,
|
||||
// this data is passed by reference,
|
||||
// so we need to copy it, otherwise the
|
||||
// audio file will be just repeating the last
|
||||
// segment.
|
||||
this._wavBuffers.push(new Float32Array(data));
|
||||
this._wavLength += data.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Combines buffers and export to a wav file.
|
||||
*
|
||||
* @private
|
||||
* @param {*} buffers - The stored buffers.
|
||||
* @param {*} length - Total length (in bytes).
|
||||
* @returns {Blob}
|
||||
*/
|
||||
_exportMonoWAV(buffers, length) {
|
||||
// buffers: array with
|
||||
// buffers[0] = header information (with missing length information)
|
||||
// buffers[1] = Float32Array object (audio data)
|
||||
// ...
|
||||
// buffers[n] = Float32Array object (audio data)
|
||||
|
||||
const dataLength = length * 2; // why multiply by 2 here?
|
||||
const buffer = new ArrayBuffer(44 + dataLength);
|
||||
const view = new DataView(buffer);
|
||||
|
||||
// copy WAV header data into the array buffer
|
||||
const header = buffers[0];
|
||||
const len = header.length;
|
||||
|
||||
for (let i = 0; i < len; ++i) {
|
||||
view.setUint8(i, header[i]);
|
||||
}
|
||||
|
||||
// add file length in header
|
||||
view.setUint32(4, 32 + dataLength, true);
|
||||
|
||||
// add data chunk length in header
|
||||
view.setUint32(40, dataLength, true);
|
||||
|
||||
// write audio data
|
||||
floatTo16BitPCM(view, 44, buffers);
|
||||
|
||||
return new Blob([ view ], { type: 'audio/wav' });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Helper function. Writes a UTF string to memory
|
||||
* using big endianness. Required by WAVE headers.
|
||||
*
|
||||
* @param {ArrayBuffer} view - The view to memory.
|
||||
* @param {*} offset - Offset.
|
||||
* @param {*} string - The string to be written.
|
||||
* @returns {void}
|
||||
*/
|
||||
function writeUTFBytes(view, offset, string) {
|
||||
const lng = string.length;
|
||||
|
||||
// convert to big endianness
|
||||
for (let i = 0; i < lng; ++i) {
|
||||
view.setUint8(offset + i, string.charCodeAt(i));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function for converting Float32Array to Int16Array.
|
||||
*
|
||||
* @param {*} output - The output buffer.
|
||||
* @param {*} offset - The offset in output buffer to write from.
|
||||
* @param {*} inputBuffers - The input buffers.
|
||||
* @returns {void}
|
||||
*/
|
||||
function floatTo16BitPCM(output, offset, inputBuffers) {
|
||||
|
||||
let i, input, isize, s;
|
||||
const jsize = inputBuffers.length;
|
||||
let o = offset;
|
||||
|
||||
// first entry is header information (already used in exportMonoWAV),
|
||||
// rest is Float32Array-entries -> ignore header entry
|
||||
for (let j = 1; j < jsize; ++j) {
|
||||
input = inputBuffers[j];
|
||||
isize = input.length;
|
||||
for (i = 0; i < isize; ++i, o += 2) {
|
||||
s = Math.max(-1, Math.min(1, input[i]));
|
||||
output.setInt16(o, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,170 @@
|
|||
import { RecordingAdapter } from '../RecordingAdapter';
|
||||
import { downloadBlob, timestampString } from '../Utils';
|
||||
import {
|
||||
DEBUG,
|
||||
MAIN_THREAD_FINISH,
|
||||
MAIN_THREAD_INIT,
|
||||
MAIN_THREAD_NEW_DATA_ARRIVED,
|
||||
WORKER_BLOB_READY,
|
||||
WORKER_LIBFLAC_READY
|
||||
} from './messageTypes';
|
||||
|
||||
const logger = require('jitsi-meet-logger').getLogger(__filename);
|
||||
|
||||
/**
|
||||
* Recording adapter that uses libflac in the background
|
||||
*/
|
||||
export class FlacAdapter extends RecordingAdapter {
|
||||
|
||||
_encoder = null;
|
||||
_audioContext = null;
|
||||
_audioProcessingNode = null;
|
||||
_audioSource = null;
|
||||
|
||||
_stopPromiseResolver = null;
|
||||
|
||||
/**
|
||||
* Implements {@link RecordingAdapter#ensureInitialized}.
|
||||
*
|
||||
* @inheritdoc
|
||||
*/
|
||||
ensureInitialized() {
|
||||
if (this._encoder !== null) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
const promiseInitWorker = new Promise((resolve, reject) => {
|
||||
// FIXME: workaround for different file names in development/
|
||||
// production environments.
|
||||
// We cannot import flacEncodeWorker as a webpack module,
|
||||
// because it is in a different bundle and should be lazy-loaded
|
||||
// only when flac recording is in use.
|
||||
try {
|
||||
// try load the minified version first
|
||||
this._encoder = new Worker('/libs/flacEncodeWorker.min.js');
|
||||
} catch (exception1) {
|
||||
// if failed, try un minified version
|
||||
try {
|
||||
this._encoder = new Worker('/libs/flacEncodeWorker.js');
|
||||
} catch (exception2) {
|
||||
logger.error('Failed to load flacEncodeWorker.');
|
||||
reject();
|
||||
}
|
||||
}
|
||||
|
||||
// set up listen for messages from the WebWorker
|
||||
this._encoder.onmessage = e => {
|
||||
if (e.data.command === WORKER_BLOB_READY) {
|
||||
// receiving blob
|
||||
this._data = e.data.buf;
|
||||
if (this._stopPromiseResolver !== null) {
|
||||
this._stopPromiseResolver();
|
||||
this._stopPromiseResolver = null;
|
||||
}
|
||||
} else if (e.data.command === DEBUG) {
|
||||
logger.log(e.data);
|
||||
} else if (e.data.command === WORKER_LIBFLAC_READY) {
|
||||
logger.debug('libflac is ready.');
|
||||
resolve();
|
||||
} else {
|
||||
logger.error(
|
||||
`Unknown event
|
||||
from encoder (WebWorker): "${e.data.command}"!`);
|
||||
}
|
||||
};
|
||||
|
||||
this._encoder.postMessage({
|
||||
command: MAIN_THREAD_INIT,
|
||||
config: {
|
||||
sampleRate: 44100,
|
||||
bps: 16
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const callbackInitAudioContext = (resolve, reject) => {
|
||||
navigator.getUserMedia(
|
||||
|
||||
// constraints - only audio needed for this app
|
||||
{
|
||||
audioBitsPerSecond: 44100, // 44 kbps
|
||||
audio: true,
|
||||
mimeType: 'application/ogg' // useless?
|
||||
},
|
||||
|
||||
// Success callback
|
||||
stream => {
|
||||
this._audioContext = new AudioContext();
|
||||
this._audioSource
|
||||
= this._audioContext.createMediaStreamSource(stream);
|
||||
this._audioProcessingNode
|
||||
= this._audioContext.createScriptProcessor(4096, 1, 1);
|
||||
this._audioProcessingNode.onaudioprocess = e => {
|
||||
// delegate to the WebWorker to do the encoding
|
||||
const channelLeft = e.inputBuffer.getChannelData(0);
|
||||
|
||||
this._encoder.postMessage({
|
||||
command: MAIN_THREAD_NEW_DATA_ARRIVED,
|
||||
buf: channelLeft
|
||||
});
|
||||
};
|
||||
logger.debug('AudioContext is set up.');
|
||||
resolve();
|
||||
},
|
||||
|
||||
// Error callback
|
||||
err => {
|
||||
logger.error(`Error calling getUserMedia(): ${err}`);
|
||||
reject();
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
// FIXME: because Promise constructor immediately executes the executor
|
||||
// function. This is undesirable, we want callbackInitAudioContext to be
|
||||
// executed only **after** promiseInitWorker is resolved.
|
||||
return promiseInitWorker
|
||||
.then(() => new Promise(callbackInitAudioContext));
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements {@link RecordingAdapter#start()}.
|
||||
*
|
||||
* @inheritdoc
|
||||
*/
|
||||
start() {
|
||||
this._audioSource.connect(this._audioProcessingNode);
|
||||
this._audioProcessingNode.connect(this._audioContext.destination);
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements {@link RecordingAdapter#stop()}.
|
||||
*
|
||||
* @inheritdoc
|
||||
*/
|
||||
stop() {
|
||||
return new Promise(resolve => {
|
||||
this._audioProcessingNode.onaudioprocess = undefined;
|
||||
this._audioProcessingNode.disconnect();
|
||||
this._audioSource.disconnect();
|
||||
this._stopPromiseResolver = resolve;
|
||||
this._encoder.postMessage({
|
||||
command: MAIN_THREAD_FINISH
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements {@link RecordingAdapter#download()}.
|
||||
*
|
||||
* @inheritdoc
|
||||
*/
|
||||
download() {
|
||||
if (this._data !== null) {
|
||||
const audioURL = window.URL.createObjectURL(this._data);
|
||||
|
||||
downloadBlob(audioURL, `recording${timestampString()}.flac`);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
|
@ -0,0 +1,416 @@
|
|||
import {
|
||||
MAIN_THREAD_FINISH,
|
||||
MAIN_THREAD_INIT,
|
||||
MAIN_THREAD_NEW_DATA_ARRIVED,
|
||||
WORKER_BLOB_READY,
|
||||
WORKER_LIBFLAC_READY
|
||||
} from './messageTypes';
|
||||
|
||||
/**
|
||||
* WebWorker that does FLAC encoding using libflac.js
|
||||
*/
|
||||
|
||||
/* eslint-disable */
|
||||
importScripts('/libs/libflac3-1.3.2.min.js');
|
||||
/* eslint-enable */
|
||||
|
||||
// There is a number of API calls to libflac.js, which does not conform
|
||||
// to the camalCase naming convention, but we cannot change it.
|
||||
// So we disable the ESLint rule `new-cap` in this file.
|
||||
/* eslint-disable new-cap */
|
||||
|
||||
// Flow will complain about the number keys in `FLAC_ERRORS,
|
||||
// ESLint will complain about the `declare` statement.
|
||||
// As the current workaround, add an exception for eslint.
|
||||
/* eslint-disable flowtype/no-types-missing-file-annotation*/
|
||||
declare var Flac: Object;
|
||||
|
||||
const FLAC_ERRORS = {
|
||||
// The encoder is in the normal OK state and
|
||||
// samples can be processed.
|
||||
0: 'FLAC__STREAM_ENCODER_OK',
|
||||
|
||||
// The encoder is in the
|
||||
// uninitialized state one of the FLAC__stream_encoder_init_*() functions
|
||||
// must be called before samples can be processed.
|
||||
1: 'FLAC__STREAM_ENCODER_UNINITIALIZED',
|
||||
|
||||
// An error occurred in the underlying Ogg layer.
|
||||
2: 'FLAC__STREAM_ENCODER_OGG_ERROR',
|
||||
|
||||
// An error occurred in the
|
||||
// underlying verify stream decoder; check
|
||||
// FLAC__stream_encoder_get_verify_decoder_state().
|
||||
3: 'FLAC__STREAM_ENCODER_VERIFY_DECODER_ERROR',
|
||||
|
||||
// The verify decoder detected a mismatch between the
|
||||
// original audio signal and the decoded audio signal.
|
||||
|
||||
4: 'FLAC__STREAM_ENCODER_VERIFY_MISMATCH_IN_AUDIO_DATA',
|
||||
|
||||
// One of the callbacks returned
|
||||
// a fatal error.
|
||||
5: 'FLAC__STREAM_ENCODER_CLIENT_ERROR',
|
||||
|
||||
// An I/O error occurred while
|
||||
// opening/reading/writing a file. Check errno.
|
||||
|
||||
6: 'FLAC__STREAM_ENCODER_IO_ERROR',
|
||||
|
||||
// An error occurred while writing
|
||||
// the stream; usually, the write_callback returned an error.
|
||||
7: 'FLAC__STREAM_ENCODER_FRAMING_ERROR',
|
||||
|
||||
// Memory allocation failed.
|
||||
8: 'FLAC__STREAM_ENCODER_MEMORY_ALLOCATION_ERROR'
|
||||
};
|
||||
|
||||
/**
|
||||
* States of the {@code Encoder}.
|
||||
*/
|
||||
const EncoderState = Object.freeze({
|
||||
/**
|
||||
* Initial state, when libflac.js is not initialized.
|
||||
*/
|
||||
UNINTIALIZED: Symbol('uninitialized'),
|
||||
|
||||
/**
|
||||
* Actively encoding new audio bits.
|
||||
*/
|
||||
WORKING: Symbol('working'),
|
||||
|
||||
/**
|
||||
* Encoding has finished and encoded bits are available.
|
||||
*/
|
||||
FINISHED: Symbol('finished')
|
||||
});
|
||||
|
||||
/**
|
||||
* Default compression level.
|
||||
*/
|
||||
const FLAC_COMPRESSION_LEVEL = 5;
|
||||
|
||||
/**
|
||||
* Concat multiple Uint8Arrays into one.
|
||||
*
|
||||
* @param {Array} arrays - Array of Uint8 arrays.
|
||||
* @param {*} totalLength - Total length of all Uint8Arrays.
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
function mergeUint8Arrays(arrays, totalLength) {
|
||||
const result = new Uint8Array(totalLength);
|
||||
let offset = 0;
|
||||
const len = arrays.length;
|
||||
|
||||
for (let i = 0; i < len; i++) {
|
||||
const buffer = arrays[i];
|
||||
|
||||
result.set(buffer, offset);
|
||||
offset += buffer.length;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper class around libflac API.
|
||||
*/
|
||||
class Encoder {
|
||||
|
||||
/**
|
||||
* Flac encoder instance ID. (As per libflac.js API).
|
||||
* @private
|
||||
*/
|
||||
_encoderId = 0;
|
||||
|
||||
/**
|
||||
* Sample rate.
|
||||
* @private
|
||||
*/
|
||||
_sampleRate;
|
||||
|
||||
/**
|
||||
* Bit depth (bits per sample).
|
||||
* @private
|
||||
*/
|
||||
_bitDepth;
|
||||
|
||||
/**
|
||||
* Buffer size.
|
||||
* @private
|
||||
*/
|
||||
_bufferSize;
|
||||
|
||||
/**
|
||||
* Buffers to store encoded bits temporarily.
|
||||
*/
|
||||
_flacBuffers = [];
|
||||
|
||||
/**
|
||||
* Length of encoded FLAC bits.
|
||||
*/
|
||||
_flacLength = 0;
|
||||
|
||||
/**
|
||||
* The current state of the {@code Encoder}.
|
||||
*/
|
||||
_state = EncoderState.UNINTIALIZED;
|
||||
|
||||
/**
|
||||
* The ready-for-grab downloadable blob.
|
||||
*/
|
||||
_data = null;
|
||||
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
* Note: only create instance when Flac.isReady() returns true.
|
||||
*
|
||||
* @param {number} sampleRate - Sample rate of the raw audio data.
|
||||
* @param {number} bitDepth - Bit depth (bit per sample).
|
||||
* @param {number} bufferSize - The size of each batch.
|
||||
*/
|
||||
constructor(sampleRate, bitDepth = 16, bufferSize = 4096) {
|
||||
if (!Flac.isReady()) {
|
||||
throw new Error('libflac is not ready yet!');
|
||||
}
|
||||
|
||||
this._sampleRate = sampleRate;
|
||||
this._bitDepth = bitDepth;
|
||||
this._bufferSize = bufferSize;
|
||||
|
||||
// create the encoder
|
||||
this._encoderId = Flac.init_libflac_encoder(
|
||||
this._sampleRate,
|
||||
|
||||
// Mono channel
|
||||
1,
|
||||
this._bitDepth,
|
||||
|
||||
FLAC_COMPRESSION_LEVEL,
|
||||
|
||||
// Pass 0 in becuase of unknown total samples,
|
||||
0,
|
||||
|
||||
// checksum, FIXME: double-check whether this is necessary
|
||||
true,
|
||||
|
||||
// Auto-determine block size (samples per frame)
|
||||
0
|
||||
);
|
||||
|
||||
if (this._encoderId === 0) {
|
||||
throw new Error('Failed to create libflac encoder.');
|
||||
}
|
||||
|
||||
// initialize the encoder
|
||||
const initResult = Flac.init_encoder_stream(
|
||||
this._encoderId,
|
||||
this._onEncodedData.bind(this),
|
||||
this._onMetadataAvailable.bind(this)
|
||||
);
|
||||
|
||||
if (initResult !== 0) {
|
||||
throw new Error('Failed to initalise libflac encoder.');
|
||||
}
|
||||
|
||||
this._state = EncoderState.WORKING;
|
||||
}
|
||||
|
||||
/**
|
||||
* Receive and encode new data.
|
||||
*
|
||||
* @param {*} audioData - Raw audio data.
|
||||
* @returns {void}
|
||||
*/
|
||||
encode(audioData) {
|
||||
if (this._state !== EncoderState.WORKING) {
|
||||
throw new Error('Encoder is not ready or has finished.');
|
||||
}
|
||||
|
||||
if (!Flac.isReady()) {
|
||||
throw new Error('Flac not ready');
|
||||
}
|
||||
const bufferLength = audioData.length;
|
||||
|
||||
// convert to Uint32,
|
||||
// appearantly libflac requires 32-bit signed integer input
|
||||
// FIXME: why unsigned 32bit array?
|
||||
const bufferI32 = new Int32Array(bufferLength);
|
||||
const view = new DataView(bufferI32.buffer);
|
||||
const volume = 1;
|
||||
let index = 0;
|
||||
|
||||
for (let i = 0; i < bufferLength; i++) {
|
||||
view.setInt32(index, audioData[i] * (0x7FFF * volume), true);
|
||||
index += 4; // 4 bytes (32bit)
|
||||
}
|
||||
|
||||
// pass it to libflac
|
||||
const status = Flac.FLAC__stream_encoder_process_interleaved(
|
||||
this._encoderId,
|
||||
bufferI32,
|
||||
bufferI32.length
|
||||
);
|
||||
|
||||
if (status !== 1) {
|
||||
// get error
|
||||
|
||||
const errorNo
|
||||
= Flac.FLAC__stream_encoder_get_state(this._encoderId);
|
||||
|
||||
console.error('Error during encoding', FLAC_ERRORS[errorNo]);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Signals the termination of encoding.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
finish() {
|
||||
if (this._state === EncoderState.WORKING) {
|
||||
this._state = EncoderState.FINISHED;
|
||||
|
||||
const status = Flac.FLAC__stream_encoder_finish(this._encoderId);
|
||||
|
||||
console.log('flac encoding finish: ', status);
|
||||
|
||||
// free up resources
|
||||
Flac.FLAC__stream_encoder_delete(this._encoderId);
|
||||
|
||||
this._data = this._exportFlacBlob();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the stats.
|
||||
*
|
||||
* @returns {Object}
|
||||
*/
|
||||
getStats() {
|
||||
return {
|
||||
'samplesEncoded': this._bufferSize
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the encoded flac file.
|
||||
*
|
||||
* @returns {Blob} - The encoded flac file.
|
||||
*/
|
||||
getBlob() {
|
||||
if (this._state === EncoderState.FINISHED) {
|
||||
return this._data;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts flac buffer to a Blob.
|
||||
*
|
||||
* @private
|
||||
* @returns {void}
|
||||
*/
|
||||
_exportFlacBlob() {
|
||||
const samples = mergeUint8Arrays(this._flacBuffers, this._flacLength);
|
||||
|
||||
const blob = new Blob([ samples ], { type: 'audio/flac' });
|
||||
|
||||
return blob;
|
||||
}
|
||||
|
||||
/* eslint-disable no-unused-vars */
|
||||
/**
|
||||
* Callback function for saving encoded Flac data.
|
||||
* This is invoked by libflac.
|
||||
*
|
||||
* @private
|
||||
* @param {*} buffer - The encoded Flac data.
|
||||
* @param {*} bytes - Number of bytes in the data.
|
||||
* @returns {void}
|
||||
*/
|
||||
_onEncodedData(buffer, bytes) {
|
||||
this._flacBuffers.push(buffer);
|
||||
this._flacLength += buffer.byteLength;
|
||||
}
|
||||
/* eslint-enable no-unused-vars */
|
||||
|
||||
/**
|
||||
* Callback function for receiving metadata.
|
||||
*
|
||||
* @private
|
||||
* @returns {void}
|
||||
*/
|
||||
_onMetadataAvailable = () => {
|
||||
// reserved for future use
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
let encoder = null;
|
||||
|
||||
self.onmessage = function(e) {
|
||||
|
||||
switch (e.data.command) {
|
||||
case MAIN_THREAD_INIT:
|
||||
{
|
||||
const bps = e.data.config.bps;
|
||||
const sampleRate = e.data.config.sampleRate;
|
||||
|
||||
if (Flac.isReady()) {
|
||||
encoder = new Encoder(sampleRate, bps);
|
||||
self.postMessage({
|
||||
command: WORKER_LIBFLAC_READY
|
||||
});
|
||||
} else {
|
||||
Flac.onready = function() {
|
||||
setTimeout(() => {
|
||||
encoder = new Encoder(sampleRate, bps);
|
||||
self.postMessage({
|
||||
command: WORKER_LIBFLAC_READY
|
||||
});
|
||||
}, 0);
|
||||
};
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case MAIN_THREAD_NEW_DATA_ARRIVED:
|
||||
if (encoder === null) {
|
||||
console
|
||||
.error('flacEncoderWorker:'
|
||||
+ 'received data when the encoder is not ready.');
|
||||
} else {
|
||||
encoder.encode(e.data.buf);
|
||||
}
|
||||
break;
|
||||
|
||||
case MAIN_THREAD_FINISH:
|
||||
if (encoder !== null) {
|
||||
encoder.finish();
|
||||
const data = encoder.getBlob();
|
||||
|
||||
self.postMessage(
|
||||
{
|
||||
command: WORKER_BLOB_READY,
|
||||
buf: data
|
||||
}
|
||||
);
|
||||
encoder = null;
|
||||
}
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* if(wavBuffers.length > 0){
|
||||
//if there is buffered audio: encode buffered first (and clear buffer)
|
||||
var len = wavBuffers.length;
|
||||
var buffered = wavBuffers.splice(0, len);
|
||||
for(var i=0; i < len; ++i){
|
||||
doEncodeFlac(buffered[i]);
|
||||
}
|
||||
}
|
||||
*/
|
|
@ -0,0 +1 @@
|
|||
export * from './FlacAdapter';
|
|
@ -0,0 +1,44 @@
|
|||
/**
|
||||
* Types of messages that are passed between the main thread and the WebWorker
|
||||
* ({@code flacEncodeWorker})
|
||||
*/
|
||||
|
||||
// Messages sent by the main thread
|
||||
|
||||
/**
|
||||
* Message type that signals the termination of encoding,
|
||||
* after which no new audio bits should be sent to the
|
||||
* WebWorker.
|
||||
*/
|
||||
export const MAIN_THREAD_FINISH = 'MAIN_THREAD_FINISH';
|
||||
|
||||
/**
|
||||
* Message type that carries initial parameters for
|
||||
* the WebWorker.
|
||||
*/
|
||||
export const MAIN_THREAD_INIT = 'MAIN_THREAD_INIT';
|
||||
|
||||
/**
|
||||
* Message type that carries the newly received raw audio bits
|
||||
* for the WebWorker to encode.
|
||||
*/
|
||||
export const MAIN_THREAD_NEW_DATA_ARRIVED = 'MAIN_THREAD_NEW_DATA_ARRIVED';
|
||||
|
||||
// Messages sent by the WebWorker
|
||||
|
||||
/**
|
||||
* Message type that signals libflac is ready to receive audio bits.
|
||||
*/
|
||||
export const WORKER_LIBFLAC_READY = 'WORKER_LIBFLAC_READY';
|
||||
|
||||
/**
|
||||
* Message type that carries the encoded FLAC file as a Blob.
|
||||
*/
|
||||
export const WORKER_BLOB_READY = 'WORKER_BLOB_READY';
|
||||
|
||||
// Messages sent by either the main thread or the WebWorker
|
||||
|
||||
/**
|
||||
* Debug messages.
|
||||
*/
|
||||
export const DEBUG = 'DEBUG';
|
|
@ -0,0 +1,4 @@
|
|||
export * from './RecordingAdapter';
|
||||
export * from './flac';
|
||||
export * from './OggAdapter';
|
||||
export * from './WavAdapter';
|
|
@ -0,0 +1,46 @@
|
|||
/* @flow */
|
||||
|
||||
import { ReducerRegistry } from '../base/redux';
|
||||
import {
|
||||
LOCAL_RECORDING_ENGAGED,
|
||||
LOCAL_RECORDING_STATS_UPDATE,
|
||||
LOCAL_RECORDING_TOGGLE_DIALOG,
|
||||
LOCAL_RECORDING_UNENGAGED
|
||||
} from './actionTypes';
|
||||
import { recordingController } from './controller';
|
||||
|
||||
const logger = require('jitsi-meet-logger').getLogger(__filename);
|
||||
|
||||
ReducerRegistry.register('features/local-recording', (state = {}, action) => {
|
||||
logger.debug(`Redux state (features/local-recording):\n ${
|
||||
JSON.stringify(state)}`);
|
||||
switch (action.type) {
|
||||
case LOCAL_RECORDING_ENGAGED: {
|
||||
return {
|
||||
...state,
|
||||
isEngaged: true,
|
||||
recordingStartedAt: new Date(Date.now()),
|
||||
encodingFormat: recordingController._format
|
||||
};
|
||||
}
|
||||
case LOCAL_RECORDING_UNENGAGED:
|
||||
return {
|
||||
...state,
|
||||
isEngaged: false,
|
||||
recordingStartedAt: null
|
||||
};
|
||||
case LOCAL_RECORDING_TOGGLE_DIALOG:
|
||||
return {
|
||||
...state,
|
||||
showDialog: state.showDialog === undefined
|
||||
|| state.showDialog === false
|
||||
};
|
||||
case LOCAL_RECORDING_STATS_UPDATE:
|
||||
return {
|
||||
...state,
|
||||
stats: action.stats
|
||||
};
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
});
|
|
@ -28,6 +28,10 @@ import {
|
|||
isDialOutEnabled
|
||||
} from '../../../invite';
|
||||
import { openKeyboardShortcutsDialog } from '../../../keyboard-shortcuts';
|
||||
import {
|
||||
toggleLocalRecordingInfoDialog,
|
||||
LocalRecordingButton
|
||||
} from '../../../local-recording';
|
||||
import {
|
||||
LiveStreamButton,
|
||||
RecordButton
|
||||
|
@ -148,6 +152,8 @@ type Props = {
|
|||
*/
|
||||
_sharingVideo: boolean,
|
||||
|
||||
_localRecState: Object,
|
||||
|
||||
/**
|
||||
* Whether or not transcribing is enabled.
|
||||
*/
|
||||
|
@ -158,6 +164,8 @@ type Props = {
|
|||
*/
|
||||
_visible: boolean,
|
||||
|
||||
_localRecState: any,
|
||||
|
||||
/**
|
||||
* Set with the buttons which this Toolbox should display.
|
||||
*/
|
||||
|
@ -227,6 +235,8 @@ class Toolbox extends Component<Props> {
|
|||
= this._onToolbarToggleScreenshare.bind(this);
|
||||
this._onToolbarToggleSharedVideo
|
||||
= this._onToolbarToggleSharedVideo.bind(this);
|
||||
this._onToolbarToggleLocalRecordingInfoDialog
|
||||
= this._onToolbarToggleLocalRecordingInfoDialog.bind(this);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -369,6 +379,11 @@ class Toolbox extends Component<Props> {
|
|||
visible = { this._shouldShowButton('camera') } />
|
||||
</div>
|
||||
<div className = 'button-group-right'>
|
||||
<LocalRecordingButton
|
||||
isDialogShown = { this.props._localRecState.showDialog }
|
||||
onClick = {
|
||||
this._onToolbarToggleLocalRecordingInfoDialog
|
||||
} />
|
||||
{ this._shouldShowButton('invite')
|
||||
&& !_hideInviteButton
|
||||
&& <ToolbarButton
|
||||
|
@ -839,6 +854,18 @@ class Toolbox extends Component<Props> {
|
|||
this._doToggleSharedVideo();
|
||||
}
|
||||
|
||||
_onToolbarToggleLocalRecordingInfoDialog: () => void;
|
||||
|
||||
/**
|
||||
* Switches local recording on or off.
|
||||
*
|
||||
* @private
|
||||
* @returns {void}
|
||||
*/
|
||||
_onToolbarToggleLocalRecordingInfoDialog() {
|
||||
this.props.dispatch(toggleLocalRecordingInfoDialog());
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders a button for toggleing screen sharing.
|
||||
*
|
||||
|
@ -1021,6 +1048,7 @@ function _mapStateToProps(state) {
|
|||
const localVideo = getLocalVideoTrack(state['features/base/tracks']);
|
||||
const addPeopleEnabled = isAddPeopleEnabled(state);
|
||||
const dialOutEnabled = isDialOutEnabled(state);
|
||||
const localRecordingStates = state['features/local-recording'];
|
||||
|
||||
let desktopSharingDisabledTooltipKey;
|
||||
|
||||
|
@ -1059,6 +1087,7 @@ function _mapStateToProps(state) {
|
|||
_fullScreen: fullScreen,
|
||||
_localParticipantID: localParticipant.id,
|
||||
_overflowMenuVisible: overflowMenuVisible,
|
||||
_localRecState: localRecordingStates,
|
||||
_raisedHand: localParticipant.raisedHand,
|
||||
_screensharing: localVideo && localVideo.videoType === 'desktop',
|
||||
_transcribingEnabled: transcribingEnabled,
|
||||
|
|
|
@ -149,7 +149,11 @@ module.exports = [
|
|||
],
|
||||
|
||||
'do_external_connect':
|
||||
'./connection_optimization/do_external_connect.js'
|
||||
'./connection_optimization/do_external_connect.js',
|
||||
|
||||
'flacEncodeWorker':
|
||||
'./react/features/local-recording/'
|
||||
+ 'recording/flac/flacEncodeWorker.js'
|
||||
}
|
||||
}),
|
||||
|
||||
|
|
Loading…
Reference in New Issue