feat(local-recordings) drop old "local recordings" implementation

It's about to become very confusing, since we are going to add actual
local recordings with video.

This feature was never fully finalizeed since it required manual
processing of the files, as they were not uploaded anywhere.

In addition, unless one opens the local audio device without any audio
processing first, any tracks opened later will have audio proceessing
turned on, something not desirable for the scenario this feature was
designed for in the first place: podcasts.

This feature will likely come back as a JaaS demo / MVP where the local
recording is made outside of the Jitsi Meet iframe.
This commit is contained in:
Saúl Ibarra Corretgé 2022-04-29 12:44:26 +02:00 committed by Saúl Ibarra Corretgé
parent de7c9bd001
commit 9dd44fc48e
39 changed files with 5 additions and 3444 deletions

View File

@ -2,7 +2,6 @@ BUILD_DIR = build
CLEANCSS = ./node_modules/.bin/cleancss CLEANCSS = ./node_modules/.bin/cleancss
DEPLOY_DIR = libs DEPLOY_DIR = libs
LIBJITSIMEET_DIR = node_modules/lib-jitsi-meet LIBJITSIMEET_DIR = node_modules/lib-jitsi-meet
LIBFLAC_DIR = node_modules/libflacjs/dist/min
OLM_DIR = node_modules/@matrix-org/olm OLM_DIR = node_modules/@matrix-org/olm
TF_WASM_DIR = node_modules/@tensorflow/tfjs-backend-wasm/dist/ TF_WASM_DIR = node_modules/@tensorflow/tfjs-backend-wasm/dist/
RNNOISE_WASM_DIR = node_modules/rnnoise-wasm/dist RNNOISE_WASM_DIR = node_modules/rnnoise-wasm/dist
@ -30,7 +29,7 @@ clean:
rm -fr $(BUILD_DIR) rm -fr $(BUILD_DIR)
.NOTPARALLEL: .NOTPARALLEL:
deploy: deploy-init deploy-appbundle deploy-rnnoise-binary deploy-tflite deploy-meet-models deploy-lib-jitsi-meet deploy-libflac deploy-olm deploy-tf-wasm deploy-css deploy-local deploy-face-landmarks deploy: deploy-init deploy-appbundle deploy-rnnoise-binary deploy-tflite deploy-meet-models deploy-lib-jitsi-meet deploy-olm deploy-tf-wasm deploy-css deploy-local deploy-face-landmarks
deploy-init: deploy-init:
rm -fr $(DEPLOY_DIR) rm -fr $(DEPLOY_DIR)
@ -44,8 +43,6 @@ deploy-appbundle:
$(BUILD_DIR)/do_external_connect.min.js.map \ $(BUILD_DIR)/do_external_connect.min.js.map \
$(BUILD_DIR)/external_api.min.js \ $(BUILD_DIR)/external_api.min.js \
$(BUILD_DIR)/external_api.min.js.map \ $(BUILD_DIR)/external_api.min.js.map \
$(BUILD_DIR)/flacEncodeWorker.min.js \
$(BUILD_DIR)/flacEncodeWorker.min.js.map \
$(BUILD_DIR)/dial_in_info_bundle.min.js \ $(BUILD_DIR)/dial_in_info_bundle.min.js \
$(BUILD_DIR)/dial_in_info_bundle.min.js.map \ $(BUILD_DIR)/dial_in_info_bundle.min.js.map \
$(BUILD_DIR)/alwaysontop.min.js \ $(BUILD_DIR)/alwaysontop.min.js \
@ -70,12 +67,6 @@ deploy-lib-jitsi-meet:
$(LIBJITSIMEET_DIR)/modules/browser/capabilities.json \ $(LIBJITSIMEET_DIR)/modules/browser/capabilities.json \
$(DEPLOY_DIR) $(DEPLOY_DIR)
deploy-libflac:
cp \
$(LIBFLAC_DIR)/libflac4-1.3.2.min.js \
$(LIBFLAC_DIR)/libflac4-1.3.2.min.js.mem \
$(DEPLOY_DIR)
deploy-olm: deploy-olm:
cp \ cp \
$(OLM_DIR)/olm.wasm \ $(OLM_DIR)/olm.wasm \
@ -118,7 +109,7 @@ deploy-local:
([ ! -x deploy-local.sh ] || ./deploy-local.sh) ([ ! -x deploy-local.sh ] || ./deploy-local.sh)
.NOTPARALLEL: .NOTPARALLEL:
dev: deploy-init deploy-css deploy-rnnoise-binary deploy-tflite deploy-meet-models deploy-lib-jitsi-meet deploy-libflac deploy-olm deploy-tf-wasm deploy-face-landmarks dev: deploy-init deploy-css deploy-rnnoise-binary deploy-tflite deploy-meet-models deploy-lib-jitsi-meet deploy-olm deploy-tf-wasm deploy-face-landmarks
$(WEBPACK_DEV_SERVER) $(WEBPACK_DEV_SERVER)
source-package: source-package:

View File

@ -1143,7 +1143,7 @@ var config = {
// If a label's id is not in any of the 2 arrays, it will not be visible at all on the header. // If a label's id is not in any of the 2 arrays, it will not be visible at all on the header.
// conferenceInfo: { // conferenceInfo: {
// // those labels will not be hidden in tandem with the toolbox. // // those labels will not be hidden in tandem with the toolbox.
// alwaysVisible: ['recording', 'local-recording', 'raised-hands-count'], // alwaysVisible: ['recording', 'raised-hands-count'],
// // those labels will be auto-hidden in tandem with the toolbox buttons. // // those labels will be auto-hidden in tandem with the toolbox buttons.
// autoHide: [ // autoHide: [
// 'subject', // 'subject',

View File

@ -40,7 +40,6 @@ $flagsImagePath: "../images/";
@import 'modals/invite/info'; @import 'modals/invite/info';
@import 'modals/screen-share/share-audio'; @import 'modals/screen-share/share-audio';
@import 'modals/screen-share/share-screen-warning'; @import 'modals/screen-share/share-screen-warning';
@import 'modals/local-recording/local-recording';
@import 'videolayout_default'; @import 'videolayout_default';
@import 'notice'; @import 'notice';
@import 'subject'; @import 'subject';

View File

@ -1,92 +0,0 @@
.localrec-participant-stats {
list-style: none;
padding: 0;
width: 100%;
font-weight: 500;
.localrec-participant-stats-item__status-dot {
position: relative;
display: block;
width: 9px;
height: 9px;
border-radius: 50%;
margin: 0 auto;
&.status-on {
background: green;
}
&.status-off {
background: gray;
}
&.status-unknown {
background: darkgoldenrod;
}
&.status-error {
background: darkred;
}
}
.localrec-participant-stats-item__status,
.localrec-participant-stats-item__name,
.localrec-participant-stats-item__sessionid {
display: inline-block;
margin: 5px 0;
vertical-align: middle;
}
.localrec-participant-stats-item__status {
width: 5%;
}
.localrec-participant-stats-item__name {
width: 40%;
}
.localrec-participant-stats-item__sessionid {
width: 55%;
}
.localrec-participant-stats-item__name,
.localrec-participant-stats-item__sessionid {
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
}
.localrec-control-info-label {
font-weight: bold;
}
.localrec-control-info-label:after {
content: ' ';
}
.localrec-control-action-link {
display: inline-block;
line-height: 1.5em;
a {
cursor: pointer;
vertical-align: middle;
}
}
.localrec-control-action-link:before {
color: $linkFontColor;
content: '\2022';
font-size: 1.5em;
padding: 0 10px;
vertical-align: middle;
}
.localrec-control-action-link:first-child:before {
content: '';
padding: 0;
}
.localrec-control-action-links {
font-weight: bold;
margin-top: 10px;
white-space: nowrap;
}

29
package-lock.json generated
View File

@ -73,7 +73,6 @@
"js-md5": "0.6.1", "js-md5": "0.6.1",
"jwt-decode": "2.2.0", "jwt-decode": "2.2.0",
"lib-jitsi-meet": "https://github.com/jitsi/lib-jitsi-meet/releases/download/v1430.0.0+ccf9ebed/lib-jitsi-meet.tgz", "lib-jitsi-meet": "https://github.com/jitsi/lib-jitsi-meet/releases/download/v1430.0.0+ccf9ebed/lib-jitsi-meet.tgz",
"libflacjs": "https://git@github.com/mmig/libflac.js#93d37e7f811f01cf7d8b6a603e38bd3c3810907d",
"lodash": "4.17.21", "lodash": "4.17.21",
"moment": "2.29.2", "moment": "2.29.2",
"moment-duration-format": "2.2.2", "moment-duration-format": "2.2.2",
@ -12155,29 +12154,6 @@
"uuid": "dist/bin/uuid" "uuid": "dist/bin/uuid"
} }
}, },
"node_modules/libflacjs": {
"version": "4.0.0",
"resolved": "git+https://git@github.com/mmig/libflac.js.git#93d37e7f811f01cf7d8b6a603e38bd3c3810907d",
"integrity": "sha512-7cscxyqMkeUa5PpHOqhIkXgyrmPqxCzYobtXnrnwXFkY5+tvRjwsZqQQ52Z9K4AebDzpNaApK+NVn+gK4CwWUw==",
"license": "MIT",
"bin": {
"libflac4-1.3.2.dev.js": "dist/dev/libflac4-1.3.2.dev.js",
"libflac4-1.3.2.dev.js.map": "dist/dev/libflac4-1.3.2.dev.js.map",
"libflac4-1.3.2.dev.wasm.js": "dist/dev/libflac4-1.3.2.dev.wasm.js",
"libflac4-1.3.2.dev.wasm.wasm": "dist/dev/libflac4-1.3.2.dev.wasm.wasm",
"libflac4-1.3.2.dev.wasm.wasm.map": "dist/dev/libflac4-1.3.2.dev.wasm.wasm.map",
"libflac4-1.3.2.dev.wasm.wast": "dist/dev/libflac4-1.3.2.dev.wasm.wast",
"libflac4-1.3.2.js": "dist/libflac4-1.3.2.js",
"libflac4-1.3.2.min.js": "dist/min/libflac4-1.3.2.min.js",
"libflac4-1.3.2.min.js.mem": "dist/min/libflac4-1.3.2.min.js.mem",
"libflac4-1.3.2.min.js.symbols": "dist/min/libflac4-1.3.2.min.js.symbols",
"libflac4-1.3.2.min.wasm.js": "dist/min/libflac4-1.3.2.min.wasm.js",
"libflac4-1.3.2.min.wasm.js.symbols": "dist/min/libflac4-1.3.2.min.wasm.js.symbols",
"libflac4-1.3.2.min.wasm.wasm": "dist/min/libflac4-1.3.2.min.wasm.wasm",
"libflac4-1.3.2.wasm.js": "dist/libflac4-1.3.2.wasm.js",
"libflac4-1.3.2.wasm.wasm": "dist/libflac4-1.3.2.wasm.wasm"
}
},
"node_modules/lines-and-columns": { "node_modules/lines-and-columns": {
"version": "1.2.4", "version": "1.2.4",
"resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz",
@ -29312,11 +29288,6 @@
} }
} }
}, },
"libflacjs": {
"version": "git+https://git@github.com/mmig/libflac.js.git#93d37e7f811f01cf7d8b6a603e38bd3c3810907d",
"integrity": "sha512-7cscxyqMkeUa5PpHOqhIkXgyrmPqxCzYobtXnrnwXFkY5+tvRjwsZqQQ52Z9K4AebDzpNaApK+NVn+gK4CwWUw==",
"from": "libflacjs@https://git@github.com/mmig/libflac.js#93d37e7f811f01cf7d8b6a603e38bd3c3810907d"
},
"lines-and-columns": { "lines-and-columns": {
"version": "1.2.4", "version": "1.2.4",
"resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz",

View File

@ -78,7 +78,6 @@
"js-md5": "0.6.1", "js-md5": "0.6.1",
"jwt-decode": "2.2.0", "jwt-decode": "2.2.0",
"lib-jitsi-meet": "https://github.com/jitsi/lib-jitsi-meet/releases/download/v1430.0.0+ccf9ebed/lib-jitsi-meet.tgz", "lib-jitsi-meet": "https://github.com/jitsi/lib-jitsi-meet/releases/download/v1430.0.0+ccf9ebed/lib-jitsi-meet.tgz",
"libflacjs": "https://git@github.com/mmig/libflac.js#93d37e7f811f01cf7d8b6a603e38bd3c3810907d",
"lodash": "4.17.21", "lodash": "4.17.21",
"moment": "2.29.2", "moment": "2.29.2",
"moment-duration-format": "2.2.2", "moment-duration-format": "2.2.2",

View File

@ -7,7 +7,6 @@ import '../dynamic-branding/middleware';
import '../e2ee/middleware'; import '../e2ee/middleware';
import '../external-api/middleware'; import '../external-api/middleware';
import '../keyboard-shortcuts/middleware'; import '../keyboard-shortcuts/middleware';
import '../local-recording/middleware';
import '../no-audio-signal/middleware'; import '../no-audio-signal/middleware';
import '../notifications/middleware'; import '../notifications/middleware';
import '../noise-detection/middleware'; import '../noise-detection/middleware';

View File

@ -4,7 +4,6 @@ import '../base/devices/reducer';
import '../e2ee/reducer'; import '../e2ee/reducer';
import '../face-landmarks/reducer'; import '../face-landmarks/reducer';
import '../feedback/reducer'; import '../feedback/reducer';
import '../local-recording/reducer';
import '../no-audio-signal/reducer'; import '../no-audio-signal/reducer';
import '../noise-detection/reducer'; import '../noise-detection/reducer';
import '../participants-pane/reducer'; import '../participants-pane/reducer';

View File

@ -65,7 +65,7 @@ const CONFERENCE_HEADER_MAPPING = {
hideConferenceTimer: [ 'conference-timer' ], hideConferenceTimer: [ 'conference-timer' ],
hideConferenceSubject: [ 'subject' ], hideConferenceSubject: [ 'subject' ],
hideParticipantsStats: [ 'participants-count' ], hideParticipantsStats: [ 'participants-count' ],
hideRecordingLabel: [ 'recording', 'local-recording' ] hideRecordingLabel: [ 'recording' ]
}; };
ReducerRegistry.register('features/base/config', (state = _getInitialState(), action) => { ReducerRegistry.register('features/base/config', (state = _getInitialState(), action) => {

View File

@ -1,5 +1,5 @@
export const CONFERENCE_INFO = { export const CONFERENCE_INFO = {
alwaysVisible: [ 'recording', 'local-recording', 'raised-hands-count' ], alwaysVisible: [ 'recording', 'raised-hands-count' ],
autoHide: [ autoHide: [
'highlight-moment', 'highlight-moment',
'subject', 'subject',

View File

@ -7,7 +7,6 @@ import React, { Component } from 'react';
import { JitsiRecordingConstants } from '../../../base/lib-jitsi-meet'; import { JitsiRecordingConstants } from '../../../base/lib-jitsi-meet';
import { connect } from '../../../base/redux'; import { connect } from '../../../base/redux';
import { E2EELabel } from '../../../e2ee'; import { E2EELabel } from '../../../e2ee';
import { LocalRecordingLabel } from '../../../local-recording';
import { RecordingLabel } from '../../../recording'; import { RecordingLabel } from '../../../recording';
import HighlightButton from '../../../recording/components/Recording/web/HighlightButton'; import HighlightButton from '../../../recording/components/Recording/web/HighlightButton';
import { isToolboxVisible } from '../../../toolbox/functions.web'; import { isToolboxVisible } from '../../../toolbox/functions.web';
@ -68,10 +67,6 @@ const COMPONENTS = [
), ),
id: 'recording' id: 'recording'
}, },
{
Component: LocalRecordingLabel,
id: 'local-recording'
},
{ {
Component: RaisedHandsCountLabel, Component: RaisedHandsCountLabel,
id: 'raised-hands-count' id: 'raised-hands-count'

View File

@ -1,32 +0,0 @@
/**
* Action to signal that the local client has started to perform recording,
* (as in: {@code RecordingAdapter} is actively collecting audio data).
*
* {
* type: LOCAL_RECORDING_ENGAGED,
* recordingEngagedAt: Date
* }
*/
export const LOCAL_RECORDING_ENGAGED = 'LOCAL_RECORDING_ENGAGED';
/**
* Action to signal that the local client has stopped recording,
* (as in: {@code RecordingAdapter} is no longer collecting audio data).
*
* {
* type: LOCAL_RECORDING_UNENGAGED
* }
*/
export const LOCAL_RECORDING_UNENGAGED = 'LOCAL_RECORDING_UNENGAGED';
/**
* Action to update {@code LocalRecordingInfoDialog} with stats from all
* clients.
*
* {
* type: LOCAL_RECORDING_STATS_UPDATE,
* stats: Object
* }
*/
export const LOCAL_RECORDING_STATS_UPDATE
= 'LOCAL_RECORDING_STATS_UPDATE';

View File

@ -1,59 +0,0 @@
/* @flow */
import {
LOCAL_RECORDING_ENGAGED,
LOCAL_RECORDING_UNENGAGED,
LOCAL_RECORDING_STATS_UPDATE
} from './actionTypes';
// The following two actions signal state changes in local recording engagement.
// In other words, the events of the local WebWorker / MediaRecorder starting to
// record and finishing recording.
// Note that this is not the event fired when the users tries to start the
// recording in the UI.
/**
* Signals that local recording has been engaged.
*
* @param {Date} startTime - Time when the recording is engaged.
* @returns {{
* type: LOCAL_RECORDING_ENGAGED,
* recordingEngagedAt: Date
* }}
*/
export function localRecordingEngaged(startTime: Date) {
return {
type: LOCAL_RECORDING_ENGAGED,
recordingEngagedAt: startTime
};
}
/**
* Signals that local recording has finished.
*
* @returns {{
* type: LOCAL_RECORDING_UNENGAGED
* }}
*/
export function localRecordingUnengaged() {
return {
type: LOCAL_RECORDING_UNENGAGED
};
}
/**
* Updates the the local recording stats from each client,
* to be displayed on {@code LocalRecordingInfoDialog}.
*
* @param {*} stats - The stats object.
* @returns {{
* type: LOCAL_RECORDING_STATS_UPDATE,
* stats: Object
* }}
*/
export function statsUpdate(stats: Object) {
return {
type: LOCAL_RECORDING_STATS_UPDATE,
stats
};
}

View File

@ -1,46 +0,0 @@
// @flow
import { createToolbarEvent, sendAnalytics } from '../../analytics';
import { openDialog } from '../../base/dialog';
import { translate } from '../../base/i18n';
import { IconRec } from '../../base/icons';
import { connect } from '../../base/redux';
import { AbstractButton, type AbstractButtonProps } from '../../base/toolbox/components';
import LocalRecordingInfoDialog from './LocalRecordingInfoDialog';
/**
* The type of the React {@code Component} props of {@link LocalRecording}.
*/
type Props = AbstractButtonProps & {
/**
* The redux {@code dispatch} function.
*/
dispatch: Function
};
/**
* Implementation of a button for opening local recording dialog.
*/
class LocalRecording extends AbstractButton<Props, *> {
accessibilityLabel = 'toolbar.accessibilityLabel.localRecording';
icon = IconRec;
label = 'localRecording.dialogTitle';
tooltip = 'localRecording.dialogTitle';
/**
* Handles clicking / pressing the button, and opens the appropriate dialog.
*
* @protected
* @returns {void}
*/
_handleClick() {
const { dispatch } = this.props;
sendAnalytics(createToolbarEvent('local.recording'));
dispatch(openDialog(LocalRecordingInfoDialog));
}
}
export default translate(connect()(LocalRecording));

View File

@ -1,407 +0,0 @@
// @flow
import moment from 'moment';
import React, { Component } from 'react';
import type { Dispatch } from 'redux';
import { Dialog } from '../../base/dialog';
import { translate } from '../../base/i18n';
import {
PARTICIPANT_ROLE,
getLocalParticipant
} from '../../base/participants';
import { connect } from '../../base/redux';
import { statsUpdate } from '../actions';
import { recordingController } from '../controller';
/**
* The type of the React {@code Component} props of
* {@link LocalRecordingInfoDialog}.
*/
type Props = {
/**
* Redux store dispatch function.
*/
dispatch: Dispatch<any>,
/**
* Current encoding format.
*/
encodingFormat: string,
/**
* Whether the local user is the moderator.
*/
isModerator: boolean,
/**
* Whether local recording is engaged.
*/
isEngaged: boolean,
/**
* The start time of the current local recording session.
* Used to calculate the duration of recording.
*/
recordingEngagedAt: Date,
/**
* Stats of all the participant.
*/
stats: Object,
/**
* Invoked to obtain translated strings.
*/
t: Function
}
/**
* The type of the React {@code Component} state of
* {@link LocalRecordingInfoDialog}.
*/
type State = {
/**
* The recording duration string to be displayed on the UI.
*/
durationString: string
}
/**
* A React Component with the contents for a dialog that shows information about
* local recording. For users with moderator rights, this is also the "control
* panel" for starting/stopping local recording on all clients.
*
* @augments Component
*/
class LocalRecordingInfoDialog extends Component<Props, State> {
/**
* Saves a handle to the timer for UI updates,
* so that it can be cancelled when the component unmounts.
*/
_timer: ?IntervalID;
/**
* Initializes a new {@code LocalRecordingInfoDialog} instance.
*
* @param {Props} props - The React {@code Component} props to initialize
* the new {@code LocalRecordingInfoDialog} instance with.
*/
constructor(props: Props) {
super(props);
this.state = {
durationString: ''
};
}
/**
* Implements React's {@link Component#componentDidMount()}.
*
* @returns {void}
*/
componentDidMount() {
this._timer = setInterval(
() => {
this.setState((_prevState, props) => {
const nowTime = new Date();
return {
durationString: this._getDuration(nowTime,
props.recordingEngagedAt)
};
});
try {
this.props.dispatch(
statsUpdate(recordingController
.getParticipantsStats()));
} catch (e) {
// do nothing
}
},
1000
);
}
/**
* Implements React's {@link Component#componentWillUnmount()}.
*
* @returns {void}
*/
componentWillUnmount() {
if (this._timer) {
clearInterval(this._timer);
this._timer = null;
}
}
/**
* Implements React's {@link Component#render()}.
*
* @inheritdoc
* @returns {ReactElement}
*/
render() {
const { isModerator, t } = this.props;
return (
<Dialog
cancelKey = { 'dialog.close' }
submitDisabled = { true }
titleKey = 'localRecording.dialogTitle'>
<div className = 'localrec-control'>
<span className = 'localrec-control-info-label'>
{`${t('localRecording.moderator')}:`}
</span>
<span className = 'info-value'>
{ isModerator
? t('localRecording.yes')
: t('localRecording.no') }
</span>
</div>
{ this._renderModeratorControls() }
{ this._renderDurationAndFormat() }
</Dialog>
);
}
/**
* Renders the recording duration and encoding format. Only shown if local
* recording is engaged.
*
* @private
* @returns {ReactElement|null}
*/
_renderDurationAndFormat() {
const { encodingFormat, isEngaged, t } = this.props;
const { durationString } = this.state;
if (!isEngaged) {
return null;
}
return (
<div>
<div>
<span className = 'localrec-control-info-label'>
{`${t('localRecording.duration')}:`}
</span>
<span className = 'info-value'>
{ durationString === ''
? t('localRecording.durationNA')
: durationString }
</span>
</div>
<div>
<span className = 'localrec-control-info-label'>
{`${t('localRecording.encoding')}:`}
</span>
<span className = 'info-value'>
{ encodingFormat }
</span>
</div>
</div>
);
}
/**
* Returns React elements for displaying the local recording stats of
* each participant.
*
* @private
* @returns {ReactElement|null}
*/
_renderStats() {
const { stats } = this.props;
if (stats === undefined) {
return null;
}
const ids = Object.keys(stats);
return (
<div className = 'localrec-participant-stats' >
{ this._renderStatsHeader() }
{ ids.map((id, i) => this._renderStatsLine(i, id)) }
</div>
);
}
/**
* Renders the stats for one participant.
*
* @private
* @param {*} lineKey - The key required by React for elements in lists.
* @param {*} id - The ID of the participant.
* @returns {ReactElement}
*/
_renderStatsLine(lineKey, id) {
const { stats } = this.props;
let statusClass = 'localrec-participant-stats-item__status-dot ';
statusClass += stats[id].recordingStats
? stats[id].recordingStats.isRecording
? 'status-on'
: 'status-off'
: 'status-unknown';
return (
<div
className = 'localrec-participant-stats-item'
key = { lineKey } >
<div className = 'localrec-participant-stats-item__status'>
<span className = { statusClass } />
</div>
<div className = 'localrec-participant-stats-item__name'>
{ stats[id].displayName || id }
</div>
<div className = 'localrec-participant-stats-item__sessionid'>
{ stats[id].recordingStats.currentSessionToken }
</div>
</div>
);
}
/**
* Renders the participant stats header line.
*
* @private
* @returns {ReactElement}
*/
_renderStatsHeader() {
const { t } = this.props;
return (
<div className = 'localrec-participant-stats-item'>
<div className = 'localrec-participant-stats-item__status' />
<div className = 'localrec-participant-stats-item__name'>
{ t('localRecording.participant') }
</div>
<div className = 'localrec-participant-stats-item__sessionid'>
{ t('localRecording.sessionToken') }
</div>
</div>
);
}
/**
* Renders the moderator-only controls: The stats of all users and the
* action links.
*
* @private
* @returns {ReactElement|null}
*/
_renderModeratorControls() {
const { isModerator, isEngaged, t } = this.props;
if (!isModerator) {
return null;
}
return (
<div>
<div className = 'localrec-control-action-links'>
<div className = 'localrec-control-action-link'>
{ isEngaged ? <a
onClick = { this._onStop }
role = 'button'
tabIndex = { 0 }>
{ t('localRecording.stop') }
</a>
: <a
onClick = { this._onStart }
role = 'button'
tabIndex = { 0 }>
{ t('localRecording.start') }
</a>
}
</div>
</div>
<div>
<span className = 'localrec-control-info-label'>
{`${t('localRecording.participantStats')}:`}
</span>
</div>
{ this._renderStats() }
</div>
);
}
/**
* Creates a duration string "HH:MM:SS" from two Date objects.
*
* @param {Date} now - Current time.
* @param {Date} prev - Previous time, the time to be subtracted.
* @returns {string}
*/
_getDuration(now, prev) {
if (prev === null || prev === undefined) {
return '';
}
// Still a hack, as moment.js does not support formatting of duration
// (i.e. TimeDelta). Only works if total duration < 24 hours.
// But who is going to have a 24-hour long conference?
return moment(now - prev).utc()
.format('HH:mm:ss');
}
/**
* Callback function for the Start UI action.
*
* @private
* @returns {void}
*/
_onStart() {
recordingController.startRecording();
}
/**
* Callback function for the Stop UI action.
*
* @private
* @returns {void}
*/
_onStop() {
recordingController.stopRecording();
}
}
/**
* Maps (parts of) the Redux state to the associated props for the
* {@code LocalRecordingInfoDialog} component.
*
* @param {Object} state - The Redux state.
* @private
* @returns {{
* encodingFormat: string,
* isModerator: boolean,
* isEngaged: boolean,
* recordingEngagedAt: Date,
* stats: Object
* }}
*/
function _mapStateToProps(state) {
const {
encodingFormat,
isEngaged,
recordingEngagedAt,
stats
} = state['features/local-recording'];
const isModerator
= getLocalParticipant(state).role === PARTICIPANT_ROLE.MODERATOR;
return {
encodingFormat,
isModerator,
isEngaged,
recordingEngagedAt,
stats
};
}
export default translate(connect(_mapStateToProps)(LocalRecordingInfoDialog));

View File

@ -1,82 +0,0 @@
// @flow
import React, { Component } from 'react';
import { translate } from '../../base/i18n/index';
import { Label } from '../../base/label/index';
import { connect } from '../../base/redux';
import { Tooltip } from '../../base/tooltip';
/**
* The type of the React {@code Component} props of {@link LocalRecordingLabel}.
*/
type Props = {
/**
* Whether this is the Jibri recorder participant.
*/
_iAmRecorder: boolean,
/**
* Whether local recording is engaged or not.
*/
_isEngaged: boolean,
/**
* Invoked to obtain translated strings.
*/
t: Function,
};
/**
* React Component for displaying a label when local recording is engaged.
*
* @augments Component
*/
class LocalRecordingLabel extends Component<Props> {
/**
* Implements React's {@link Component#render()}.
*
* @inheritdoc
* @returns {ReactElement}
*/
render() {
if (!this.props._isEngaged || this.props._iAmRecorder) {
return null;
}
return (
<Tooltip
content = { this.props.t('localRecording.labelToolTip') }
position = { 'bottom' }>
<Label
className = 'local-rec'
text = { this.props.t('localRecording.label') } />
</Tooltip>
);
}
}
/**
* Maps (parts of) the Redux state to the associated props for the
* {@code LocalRecordingLabel} component.
*
* @param {Object} state - The Redux state.
* @private
* @returns {{
* }}
*/
function _mapStateToProps(state) {
const { isEngaged } = state['features/local-recording'];
const { iAmRecorder } = state['features/base/config'];
return {
_isEngaged: isEngaged,
_iAmRecorder: iAmRecorder
};
}
export default translate(connect(_mapStateToProps)(LocalRecordingLabel));

View File

@ -1,5 +0,0 @@
export { default as LocalRecordingButton } from './LocalRecordingButton';
export { default as LocalRecordingLabel } from './LocalRecordingLabel';
export {
default as LocalRecordingInfoDialog
} from './LocalRecordingInfoDialog';

View File

@ -1,687 +0,0 @@
/* @flow */
import Bourne from '@hapi/bourne';
import { i18next } from '../../base/i18n';
import logger from '../logger';
import {
FlacAdapter,
OggAdapter,
WavAdapter,
downloadBlob
} from '../recording';
import { sessionManager } from '../session';
/**
* XMPP command for signaling the start of local recording to all clients.
* Should be sent by the moderator only.
*/
const COMMAND_START = 'localRecStart';
/**
* XMPP command for signaling the stop of local recording to all clients.
* Should be sent by the moderator only.
*/
const COMMAND_STOP = 'localRecStop';
/**
* One-time command used to trigger the moderator to resend the commands.
* This is a workaround for newly-joined clients to receive remote presence.
*/
const COMMAND_PING = 'localRecPing';
/**
* One-time command sent upon receiving a {@code COMMAND_PING}.
* Only the moderator sends this command.
* This command does not carry any information itself, but rather forces the
* XMPP server to resend the remote presence.
*/
const COMMAND_PONG = 'localRecPong';
/**
* Participant property key for local recording stats.
*/
const PROPERTY_STATS = 'localRecStats';
/**
* Supported recording formats.
*/
const RECORDING_FORMATS = new Set([ 'flac', 'wav', 'ogg' ]);
/**
* Default recording format.
*/
const DEFAULT_RECORDING_FORMAT = 'flac';
/**
* States of the {@code RecordingController}.
*/
const ControllerState = Object.freeze({
/**
* Idle (not recording).
*/
IDLE: Symbol('IDLE'),
/**
* Starting.
*/
STARTING: Symbol('STARTING'),
/**
* Engaged (recording).
*/
RECORDING: Symbol('RECORDING'),
/**
* Stopping.
*/
STOPPING: Symbol('STOPPING'),
/**
* Failed, due to error during starting / stopping process.
*/
FAILED: Symbol('FAILED')
});
/**
* Type of the stats reported by each participant (client).
*/
type RecordingStats = {
/**
* Current local recording session token used by the participant.
*/
currentSessionToken: number,
/**
* Whether local recording is engaged on the participant's device.
*/
isRecording: boolean,
/**
* Total recorded bytes. (Reserved for future use.).
*/
recordedBytes: number,
/**
* Total recording duration. (Reserved for future use.).
*/
recordedLength: number
}
/**
* The component responsible for the coordination of local recording, across
* multiple participants.
* Current implementation requires that there is only one moderator in a room.
*/
class RecordingController {
/**
* For each recording session, there is a separate @{code RecordingAdapter}
* instance so that encoded bits from the previous sessions can still be
* retrieved after they ended.
*
* @private
*/
_adapters = {};
/**
* The {@code JitsiConference} instance.
*
* @private
*/
_conference: * = null;
/**
* Current recording session token.
* Session token is a number generated by the moderator, to ensure every
* client is in the same recording state.
*
* @private
*/
_currentSessionToken: number = -1;
/**
* Current state of {@code RecordingController}.
*
* @private
*/
_state = ControllerState.IDLE;
/**
* Whether or not the audio is muted in the UI. This is stored as internal
* state of {@code RecordingController} because we might have recording
* sessions that start muted.
*/
_isMuted = false;
/**
* The ID of the active microphone.
*
* @private
*/
_micDeviceId = 'default';
/**
* Current recording format. This will be in effect from the next
* recording session, i.e., if this value is changed during an on-going
* recording session, that on-going session will not use the new format.
*
* @private
*/
_format = DEFAULT_RECORDING_FORMAT;
/**
* Whether or not the {@code RecordingController} has registered for
* XMPP events. Prevents initialization from happening multiple times.
*
* @private
*/
_registered = false;
/**
* FIXME: callback function for the {@code RecordingController} to notify
* UI it wants to display a notice. Keeps {@code RecordingController}
* decoupled from UI.
*/
_onNotify: ?(messageKey: string, messageParams?: Object) => void;
/**
* FIXME: callback function for the {@code RecordingController} to notify
* UI it wants to display a warning. Keeps {@code RecordingController}
* decoupled from UI.
*/
_onWarning: ?(messageKey: string, messageParams?: Object) => void;
/**
* FIXME: callback function for the {@code RecordingController} to notify
* UI that the local recording state has changed.
*/
_onStateChanged: ?(boolean) => void;
/**
* Constructor.
*
* @returns {void}
*/
constructor() {
this.registerEvents = this.registerEvents.bind(this);
this.getParticipantsStats = this.getParticipantsStats.bind(this);
this._onStartCommand = this._onStartCommand.bind(this);
this._onStopCommand = this._onStopCommand.bind(this);
this._onPingCommand = this._onPingCommand.bind(this);
this._doStartRecording = this._doStartRecording.bind(this);
this._doStopRecording = this._doStopRecording.bind(this);
this._updateStats = this._updateStats.bind(this);
this._switchToNewSession = this._switchToNewSession.bind(this);
}
registerEvents: () => void;
/**
* Registers listeners for XMPP events.
*
* @param {JitsiConference} conference - A {@code JitsiConference} instance.
* @returns {void}
*/
registerEvents(conference: Object) {
if (!this._registered) {
this._conference = conference;
if (this._conference) {
this._conference
.addCommandListener(COMMAND_STOP, this._onStopCommand);
this._conference
.addCommandListener(COMMAND_START, this._onStartCommand);
this._conference
.addCommandListener(COMMAND_PING, this._onPingCommand);
this._registered = true;
}
if (!this._conference.isModerator()) {
this._conference.sendCommandOnce(COMMAND_PING, {});
}
}
}
/**
* Sets the event handler for {@code onStateChanged}.
*
* @param {Function} delegate - The event handler.
* @returns {void}
*/
set onStateChanged(delegate: Function) {
this._onStateChanged = delegate;
}
/**
* Sets the event handler for {@code onNotify}.
*
* @param {Function} delegate - The event handler.
* @returns {void}
*/
set onNotify(delegate: Function) {
this._onNotify = delegate;
}
/**
* Sets the event handler for {@code onWarning}.
*
* @param {Function} delegate - The event handler.
* @returns {void}
*/
set onWarning(delegate: Function) {
this._onWarning = delegate;
}
/**
* Signals the participants to start local recording.
*
* @returns {void}
*/
startRecording() {
this.registerEvents();
if (this._conference && this._conference.isModerator()) {
this._conference.removeCommand(COMMAND_STOP);
this._conference.sendCommand(COMMAND_START, {
attributes: {
sessionToken: this._getRandomToken(),
format: this._format
}
});
} else if (this._onWarning) {
this._onWarning('localRecording.messages.notModerator');
}
}
/**
* Signals the participants to stop local recording.
*
* @returns {void}
*/
stopRecording() {
if (this._conference) {
if (this._conference.isModerator()) {
this._conference.removeCommand(COMMAND_START);
this._conference.sendCommand(COMMAND_STOP, {
attributes: {
sessionToken: this._currentSessionToken
}
});
} else if (this._onWarning) {
this._onWarning('localRecording.messages.notModerator');
}
}
}
/**
* Triggers the download of recorded data.
* Browser only.
*
* @param {number} sessionToken - The token of the session to download.
* @returns {void}
*/
downloadRecordedData(sessionToken: number) {
if (this._adapters[sessionToken]) {
this._adapters[sessionToken].exportRecordedData()
.then(args => {
const { data, format } = args;
const filename = `session_${sessionToken}`
+ `_${this._conference.myUserId()}.${format}`;
downloadBlob(data, filename);
})
.catch(error => {
logger.error('Failed to download audio for'
+ ` session ${sessionToken}. Error: ${error}`);
});
} else {
logger.error(`Invalid session token for download ${sessionToken}`);
}
}
/**
* Changes the current microphone.
*
* @param {string} micDeviceId - The new microphone device ID.
* @returns {void}
*/
setMicDevice(micDeviceId: string) {
if (micDeviceId !== this._micDeviceId) {
this._micDeviceId = String(micDeviceId);
if (this._state === ControllerState.RECORDING) {
// sessionManager.endSegment(this._currentSessionToken);
logger.log('Before switching microphone...');
this._adapters[this._currentSessionToken]
.setMicDevice(this._micDeviceId)
.then(() => {
logger.log('Finished switching microphone.');
// sessionManager.beginSegment(this._currentSesoken);
})
.catch(() => {
logger.error('Failed to switch microphone');
});
}
logger.log(`Switch microphone to ${this._micDeviceId}`);
}
}
/**
* Mute or unmute audio. When muted, the ongoing local recording should
* produce silence.
*
* @param {boolean} muted - If the audio should be muted.
* @returns {void}
*/
setMuted(muted: boolean) {
this._isMuted = Boolean(muted);
if (this._state === ControllerState.RECORDING) {
this._adapters[this._currentSessionToken].setMuted(this._isMuted);
}
}
/**
* Switches the recording format.
*
* @param {string} newFormat - The new format.
* @returns {void}
*/
switchFormat(newFormat: string) {
if (!RECORDING_FORMATS.has(newFormat)) {
logger.log(`Unknown format ${newFormat}. Ignoring...`);
return;
}
this._format = newFormat;
logger.log(`Recording format switched to ${newFormat}`);
// the new format will be used in the next recording session
}
/**
* Returns the local recording stats.
*
* @returns {RecordingStats}
*/
getLocalStats(): RecordingStats {
return {
currentSessionToken: this._currentSessionToken,
isRecording: this._state === ControllerState.RECORDING,
recordedBytes: 0,
recordedLength: 0
};
}
getParticipantsStats: () => *;
/**
* Returns the remote participants' local recording stats.
*
* @returns {*}
*/
getParticipantsStats() {
const members
= this._conference.getParticipants()
.map(member => {
return {
id: member.getId(),
displayName: member.getDisplayName(),
recordingStats:
Bourne.parse(member.getProperty(PROPERTY_STATS) || '{}'),
isSelf: false
};
});
// transform into a dictionary for consistent ordering
const result = {};
for (let i = 0; i < members.length; ++i) {
result[members[i].id] = members[i];
}
const localId = this._conference.myUserId();
result[localId] = {
id: localId,
displayName: i18next.t('localRecording.me'),
recordingStats: this.getLocalStats(),
isSelf: true
};
return result;
}
_changeState: (symbol) => void;
/**
* Changes the current state of {@code RecordingController}.
*
* @private
* @param {symbol} newState - The new state.
* @returns {void}
*/
_changeState(newState: symbol) {
if (this._state !== newState) {
logger.log(`state change: ${this._state.toString()} -> `
+ `${newState.toString()}`);
this._state = newState;
}
}
_updateStats: () => void;
/**
* Sends out updates about the local recording stats via XMPP.
*
* @private
* @returns {void}
*/
_updateStats() {
if (this._conference) {
this._conference.setLocalParticipantProperty(PROPERTY_STATS,
JSON.stringify(this.getLocalStats()));
}
}
_onStartCommand: (*) => void;
/**
* Callback function for XMPP event.
*
* @private
* @param {*} value - The event args.
* @returns {void}
*/
_onStartCommand(value) {
const { sessionToken, format } = value.attributes;
if (this._state === ControllerState.IDLE) {
this._changeState(ControllerState.STARTING);
this._switchToNewSession(sessionToken, format);
this._doStartRecording();
} else if (this._state === ControllerState.RECORDING
&& this._currentSessionToken !== sessionToken) {
// There is local recording going on, but not for the same session.
// This means the current state might be out-of-sync with the
// moderator's, so we need to restart the recording.
this._changeState(ControllerState.STOPPING);
this._doStopRecording().then(() => {
this._changeState(ControllerState.STARTING);
this._switchToNewSession(sessionToken, format);
this._doStartRecording();
});
}
}
_onStopCommand: (*) => void;
/**
* Callback function for XMPP event.
*
* @private
* @param {*} value - The event args.
* @returns {void}
*/
_onStopCommand(value) {
if (this._state === ControllerState.RECORDING
&& this._currentSessionToken === value.attributes.sessionToken) {
this._changeState(ControllerState.STOPPING);
this._doStopRecording();
}
}
_onPingCommand: () => void;
/**
* Callback function for XMPP event.
*
* @private
* @returns {void}
*/
_onPingCommand() {
if (this._conference.isModerator()) {
logger.log('Received ping, sending pong.');
this._conference.sendCommandOnce(COMMAND_PONG, {});
}
}
/**
* Generates a token that can be used to distinguish each local recording
* session.
*
* @returns {number}
*/
_getRandomToken() {
return Math.floor(Math.random() * 100000000) + 1;
}
_doStartRecording: () => void;
/**
* Starts the recording locally.
*
* @private
* @returns {void}
*/
_doStartRecording() {
if (this._state === ControllerState.STARTING) {
const delegate = this._adapters[this._currentSessionToken];
delegate.start(this._micDeviceId)
.then(() => {
this._changeState(ControllerState.RECORDING);
sessionManager.beginSegment(this._currentSessionToken);
logger.log('Local recording engaged.');
if (this._onNotify) {
this._onNotify('localRecording.messages.engaged');
}
if (this._onStateChanged) {
this._onStateChanged(true);
}
delegate.setMuted(this._isMuted);
this._updateStats();
})
.catch(err => {
logger.error('Failed to start local recording.', err);
});
}
}
_doStopRecording: () => Promise<void>;
/**
* Stops the recording locally.
*
* @private
* @returns {Promise<void>}
*/
_doStopRecording() {
if (this._state === ControllerState.STOPPING) {
const token = this._currentSessionToken;
return this._adapters[this._currentSessionToken]
.stop()
.then(() => {
this._changeState(ControllerState.IDLE);
sessionManager.endSegment(this._currentSessionToken);
logger.log('Local recording unengaged.');
this.downloadRecordedData(token);
const messageKey
= this._conference.isModerator()
? 'localRecording.messages.finishedModerator'
: 'localRecording.messages.finished';
const messageParams = {
token
};
if (this._onNotify) {
this._onNotify(messageKey, messageParams);
}
if (this._onStateChanged) {
this._onStateChanged(false);
}
this._updateStats();
})
.catch(err => {
logger.error('Failed to stop local recording.', err);
});
}
/* eslint-disable */
return (Promise.resolve(): Promise<void>);
// FIXME: better ways to satisfy flow and ESLint at the same time?
/* eslint-enable */
}
_switchToNewSession: (string, string) => void;
/**
* Switches to a new local recording session.
*
* @param {string} sessionToken - The session Token.
* @param {string} format - The recording format for the session.
* @returns {void}
*/
_switchToNewSession(sessionToken, format) {
this._format = format;
this._currentSessionToken = sessionToken;
logger.log(`New session: ${this._currentSessionToken}, `
+ `format: ${this._format}`);
this._adapters[sessionToken]
= this._createRecordingAdapter();
sessionManager.createSession(sessionToken, this._format);
}
/**
* Creates a recording adapter according to the current recording format.
*
* @private
* @returns {RecordingAdapter}
*/
_createRecordingAdapter() {
logger.debug('[RecordingController] creating recording'
+ ` adapter for ${this._format} format.`);
switch (this._format) {
case 'ogg':
return new OggAdapter();
case 'flac':
return new FlacAdapter();
case 'wav':
return new WavAdapter();
default:
throw new Error(`Unknown format: ${this._format}`);
}
}
}
/**
* Global singleton of {@code RecordingController}.
*/
export const recordingController = new RecordingController();

View File

@ -1 +0,0 @@
export * from './RecordingController';

View File

@ -1,4 +0,0 @@
export * from './actions';
export * from './actionTypes';
export * from './components';
export * from './controller';

View File

@ -1,5 +0,0 @@
// @flow
import { getLogger } from '../base/logging/functions';
export default getLogger('features/local-recording');

View File

@ -1,97 +0,0 @@
/* @flow */
import { createShortcutEvent, sendAnalytics } from '../analytics';
import { APP_WILL_UNMOUNT } from '../base/app/actionTypes';
import { CONFERENCE_JOINED } from '../base/conference/actionTypes';
import { toggleDialog } from '../base/dialog/actions';
import { i18next } from '../base/i18n';
import { SET_AUDIO_MUTED } from '../base/media/actionTypes';
import { MiddlewareRegistry } from '../base/redux';
import { SETTINGS_UPDATED } from '../base/settings/actionTypes';
import { NOTIFICATION_TIMEOUT_TYPE } from '../notifications';
import { showNotification } from '../notifications/actions';
import { localRecordingEngaged, localRecordingUnengaged } from './actions';
import { LocalRecordingInfoDialog } from './components';
import { recordingController } from './controller';
declare var APP: Object;
MiddlewareRegistry.register(({ getState, dispatch }) => next => action => {
const result = next(action);
switch (action.type) {
case CONFERENCE_JOINED: {
const { localRecording } = getState()['features/base/config'];
const isLocalRecordingEnabled = Boolean(
localRecording
&& localRecording.enabled
&& typeof APP === 'object'
);
if (!isLocalRecordingEnabled) {
break;
}
// realize the delegates on recordingController, allowing the UI to
// react to state changes in recordingController.
recordingController.onStateChanged = isEngaged => {
if (isEngaged) {
const nowTime = new Date();
dispatch(localRecordingEngaged(nowTime));
} else {
dispatch(localRecordingUnengaged());
}
};
recordingController.onWarning = (messageKey, messageParams) => {
dispatch(showNotification({
titleKey: 'localRecording.localRecording',
description: i18next.t(messageKey, messageParams)
}, NOTIFICATION_TIMEOUT_TYPE.LONG));
};
recordingController.onNotify = (messageKey, messageParams) => {
dispatch(showNotification({
titleKey: 'localRecording.localRecording',
description: i18next.t(messageKey, messageParams)
}, NOTIFICATION_TIMEOUT_TYPE.LONG));
};
typeof APP === 'object' && typeof APP.keyboardshortcut === 'object'
&& APP.keyboardshortcut.registerShortcut('L', null, () => {
sendAnalytics(createShortcutEvent('local.recording'));
dispatch(toggleDialog(LocalRecordingInfoDialog));
}, 'keyboardShortcuts.localRecording');
if (localRecording.format) {
recordingController.switchFormat(localRecording.format);
}
const { conference } = getState()['features/base/conference'];
recordingController.registerEvents(conference);
break;
}
case APP_WILL_UNMOUNT:
recordingController.onStateChanged = null;
recordingController.onNotify = null;
recordingController.onWarning = null;
break;
case SET_AUDIO_MUTED:
recordingController.setMuted(action.muted);
break;
case SETTINGS_UPDATED: {
const { micDeviceId } = getState()['features/base/settings'];
if (micDeviceId) {
recordingController.setMicDevice(micDeviceId);
}
break;
}
}
return result;
});

View File

@ -1,129 +0,0 @@
import logger from '../logger';
import { RecordingAdapter } from './RecordingAdapter';
/**
* Base class for {@code AudioContext}-based recording adapters.
*/
export class AbstractAudioContextAdapter extends RecordingAdapter {
/**
* The {@code AudioContext} instance.
*/
_audioContext = null;
/**
* The {@code ScriptProcessorNode} instance.
*/
_audioProcessingNode = null;
/**
* The {@code MediaStreamAudioSourceNode} instance.
*/
_audioSource = null;
/**
* The {@code MediaStream} instance, representing the current audio device.
*/
_stream = null;
/**
* Sample rate.
*/
_sampleRate = 44100;
/**
* Constructor.
*/
constructor() {
super();
// sampleRate is browser and OS dependent.
// Setting sampleRate explicitly is in the specs but not implemented
// by browsers.
// See: https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/
// AudioContext#Browser_compatibility
// And https://bugs.chromium.org/p/chromium/issues/detail?id=432248
this._audioContext = new AudioContext();
this._sampleRate = this._audioContext.sampleRate;
logger.log(`Current sampleRate ${this._sampleRate}.`);
}
/**
* Sets up the audio graph in the AudioContext.
*
* @protected
* @param {string} micDeviceId - The current microphone device ID.
* @param {Function} callback - Callback function to
* handle AudioProcessingEvents.
* @returns {Promise}
*/
_initializeAudioContext(micDeviceId, callback) {
if (typeof callback !== 'function') {
return Promise.reject('a callback function is required.');
}
return this._getAudioStream(micDeviceId)
.then(stream => {
this._stream = stream;
this._audioSource
= this._audioContext.createMediaStreamSource(stream);
this._audioProcessingNode
= this._audioContext.createScriptProcessor(4096, 1, 1);
this._audioProcessingNode.onaudioprocess = callback;
logger.debug('AudioContext is set up.');
})
.catch(err => {
logger.error(`Error calling getUserMedia(): ${err}`);
return Promise.reject(err);
});
}
/**
* Connects the nodes in the {@code AudioContext} to start the flow of
* audio data.
*
* @protected
* @returns {void}
*/
_connectAudioGraph() {
this._audioSource.connect(this._audioProcessingNode);
this._audioProcessingNode.connect(this._audioContext.destination);
}
/**
* Disconnects the nodes in the {@code AudioContext}.
*
* @protected
* @returns {void}
*/
_disconnectAudioGraph() {
this._audioProcessingNode.onaudioprocess = undefined;
this._audioProcessingNode.disconnect();
this._audioSource.disconnect();
}
/**
* Replaces the current microphone MediaStream.
*
* @protected
* @param {string} micDeviceId - New microphone ID.
* @returns {Promise}
*/
_replaceMic(micDeviceId) {
if (this._audioContext && this._audioProcessingNode) {
return this._getAudioStream(micDeviceId).then(newStream => {
const newSource = this._audioContext
.createMediaStreamSource(newStream);
this._audioSource.disconnect();
newSource.connect(this._audioProcessingNode);
this._stream = newStream;
this._audioSource = newSource;
});
}
return Promise.resolve();
}
}

View File

@ -1,146 +0,0 @@
import logger from '../logger';
import { RecordingAdapter } from './RecordingAdapter';
/**
* Recording adapter that uses {@code MediaRecorder} (default browser encoding
* with Opus codec).
*/
export class OggAdapter extends RecordingAdapter {
/**
* Instance of MediaRecorder.
*
* @private
*/
_mediaRecorder = null;
/**
* Initialization promise.
*
* @private
*/
_initPromise = null;
/**
* The recorded audio file.
*
* @private
*/
_recordedData = null;
/**
* Implements {@link RecordingAdapter#start()}.
*
* @inheritdoc
*/
start(micDeviceId) {
if (!this._initPromise) {
this._initPromise = this._initialize(micDeviceId);
}
return this._initPromise.then(() =>
new Promise(resolve => {
this._mediaRecorder.start();
resolve();
})
);
}
/**
* Implements {@link RecordingAdapter#stop()}.
*
* @inheritdoc
*/
stop() {
return new Promise(
resolve => {
this._mediaRecorder.onstop = () => resolve();
this._mediaRecorder.stop();
}
);
}
/**
* Implements {@link RecordingAdapter#exportRecordedData()}.
*
* @inheritdoc
*/
exportRecordedData() {
if (this._recordedData !== null) {
return Promise.resolve({
data: this._recordedData,
format: 'ogg'
});
}
return Promise.reject('No audio data recorded.');
}
/**
* Implements {@link RecordingAdapter#setMuted()}.
*
* @inheritdoc
*/
setMuted(muted) {
const shouldEnable = !muted;
if (!this._stream) {
return Promise.resolve();
}
const track = this._stream.getAudioTracks()[0];
if (!track) {
logger.error('Cannot mute/unmute. Track not found!');
return Promise.resolve();
}
if (track.enabled !== shouldEnable) {
track.enabled = shouldEnable;
logger.log(muted ? 'Mute' : 'Unmute');
}
return Promise.resolve();
}
/**
* Initialize the adapter.
*
* @private
* @param {string} micDeviceId - The current microphone device ID.
* @returns {Promise}
*/
_initialize(micDeviceId) {
if (this._mediaRecorder) {
return Promise.resolve();
}
return new Promise((resolve, error) => {
this._getAudioStream(micDeviceId)
.then(stream => {
this._stream = stream;
this._mediaRecorder = new MediaRecorder(stream);
this._mediaRecorder.ondataavailable
= e => this._saveMediaData(e.data);
resolve();
})
.catch(err => {
logger.error(`Error calling getUserMedia(): ${err}`);
error();
});
});
}
/**
* Callback for storing the encoded data.
*
* @private
* @param {Blob} data - Encoded data.
* @returns {void}
*/
_saveMediaData(data) {
this._recordedData = data;
}
}

View File

@ -1,85 +0,0 @@
import JitsiMeetJS from '../../base/lib-jitsi-meet';
/**
* Base class for recording backends.
*/
export class RecordingAdapter {
/**
* Starts recording.
*
* @param {string} micDeviceId - The microphone to record on.
* @returns {Promise}
*/
start(/* eslint-disable no-unused-vars */
micDeviceId/* eslint-enable no-unused-vars */) {
throw new Error('Not implemented');
}
/**
* Stops recording.
*
* @returns {Promise}
*/
stop() {
throw new Error('Not implemented');
}
/**
* Export the recorded and encoded audio file.
*
* @returns {Promise<Object>}
*/
exportRecordedData() {
throw new Error('Not implemented');
}
/**
* Mutes or unmutes the current recording.
*
* @param {boolean} muted - Whether to mute or to unmute.
* @returns {Promise}
*/
setMuted(/* eslint-disable no-unused-vars */
muted/* eslint-enable no-unused-vars */) {
throw new Error('Not implemented');
}
/**
* Changes the current microphone.
*
* @param {string} micDeviceId - The new microphone device ID.
* @returns {Promise}
*/
setMicDevice(/* eslint-disable no-unused-vars */
micDeviceId/* eslint-enable no-unused-vars */) {
throw new Error('Not implemented');
}
/**
* Helper method for getting an audio {@code MediaStream}. Use this instead
* of calling browser APIs directly.
*
* @protected
* @param {number} micDeviceId - The ID of the current audio device.
* @returns {Promise}
*/
_getAudioStream(micDeviceId) {
return JitsiMeetJS.createLocalTracks({
devices: [ 'audio' ],
micDeviceId
}).then(result => {
if (result.length !== 1) {
throw new Error('Unexpected number of streams '
+ 'from createLocalTracks.');
}
const mediaStream = result[0].stream;
if (mediaStream === undefined) {
throw new Error('Failed to create local track.');
}
return mediaStream;
});
}
}

View File

@ -1,20 +0,0 @@
/**
* Force download of Blob in browser by faking an <a> tag.
*
* @param {Blob} blob - Base64 URL.
* @param {string} fileName - The filename to appear in the download dialog.
* @returns {void}
*/
export function downloadBlob(blob, fileName = 'recording.ogg') {
const base64Url = window.URL.createObjectURL(blob);
// fake a anchor tag
const a = document.createElement('a');
a.style = 'display: none';
a.href = base64Url;
a.download = fileName;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
}

View File

@ -1,290 +0,0 @@
import logger from '../logger';
import { AbstractAudioContextAdapter } from './AbstractAudioContextAdapter';
const WAV_BITS_PER_SAMPLE = 16;
/**
* Recording adapter for raw WAVE format.
*/
export class WavAdapter extends AbstractAudioContextAdapter {
/**
* Length of the WAVE file, in number of samples.
*/
_wavLength = 0;
/**
* The {@code ArrayBuffer}s that stores the PCM bits.
*/
_wavBuffers = [];
/**
* Whether or not the {@code WavAdapter} is in a ready state.
*/
_isInitialized = false;
/**
* Initialization promise.
*/
_initPromise = null;
/**
* Constructor.
*/
constructor() {
super();
this._onAudioProcess = this._onAudioProcess.bind(this);
}
/**
* Implements {@link RecordingAdapter#start()}.
*
* @inheritdoc
*/
start(micDeviceId) {
if (!this._initPromise) {
this._initPromise = this._initialize(micDeviceId);
}
return this._initPromise.then(() => {
this._wavBuffers = [];
this._wavLength = 0;
this._connectAudioGraph();
});
}
/**
* Implements {@link RecordingAdapter#stop()}.
*
* @inheritdoc
*/
stop() {
this._disconnectAudioGraph();
this._data = this._exportMonoWAV(this._wavBuffers, this._wavLength);
this._audioProcessingNode = null;
this._audioSource = null;
this._isInitialized = false;
return Promise.resolve();
}
/**
* Implements {@link RecordingAdapter#exportRecordedData()}.
*
* @inheritdoc
*/
exportRecordedData() {
if (this._data !== null) {
return Promise.resolve({
data: this._data,
format: 'wav'
});
}
return Promise.reject('No audio data recorded.');
}
/**
* Implements {@link RecordingAdapter#setMuted()}.
*
* @inheritdoc
*/
setMuted(muted) {
const shouldEnable = !muted;
if (!this._stream) {
return Promise.resolve();
}
const track = this._stream.getAudioTracks()[0];
if (!track) {
logger.error('Cannot mute/unmute. Track not found!');
return Promise.resolve();
}
if (track.enabled !== shouldEnable) {
track.enabled = shouldEnable;
logger.log(muted ? 'Mute' : 'Unmute');
}
return Promise.resolve();
}
/**
* Implements {@link RecordingAdapter#setMicDevice()}.
*
* @inheritdoc
*/
setMicDevice(micDeviceId) {
return this._replaceMic(micDeviceId);
}
/**
* Creates a WAVE file header.
*
* @private
* @param {number} dataLength - Length of the payload (PCM data), in bytes.
* @returns {Uint8Array}
*/
_createWavHeader(dataLength) {
// adapted from
// https://github.com/mmig/speech-to-flac/blob/master/encoder.js
// ref: http://soundfile.sapp.org/doc/WaveFormat/
// create our WAVE file header
const buffer = new ArrayBuffer(44);
const view = new DataView(buffer);
// RIFF chunk descriptor
writeUTFBytes(view, 0, 'RIFF');
// set file size at the end
writeUTFBytes(view, 8, 'WAVE');
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
// NumChannels
view.setUint16(22, 1, true);
// SampleRate
view.setUint32(24, this._sampleRate, true);
// ByteRate
view.setUint32(28,
Number(this._sampleRate) * 1 * WAV_BITS_PER_SAMPLE / 8, true);
// BlockAlign
view.setUint16(32, 1 * Number(WAV_BITS_PER_SAMPLE) / 8, true);
view.setUint16(34, WAV_BITS_PER_SAMPLE, true);
// data sub-chunk
writeUTFBytes(view, 36, 'data');
// file length
view.setUint32(4, 32 + dataLength, true);
// data chunk length
view.setUint32(40, dataLength, true);
return new Uint8Array(buffer);
}
/**
* Initialize the adapter.
*
* @private
* @param {string} micDeviceId - The current microphone device ID.
* @returns {Promise}
*/
_initialize(micDeviceId) {
if (this._isInitialized) {
return Promise.resolve();
}
return this._initializeAudioContext(micDeviceId, this._onAudioProcess)
.then(() => {
this._isInitialized = true;
});
}
/**
* Callback function for handling AudioProcessingEvents.
*
* @private
* @param {AudioProcessingEvent} e - The event containing the raw PCM.
* @returns {void}
*/
_onAudioProcess(e) {
// See: https://developer.mozilla.org/en-US/docs/Web/API/
// AudioBuffer/getChannelData
// The returned value is an Float32Array.
const channelLeft = e.inputBuffer.getChannelData(0);
// Need to copy the Float32Array:
// unlike passing to WebWorker, this data is passed by reference,
// so we need to copy it, otherwise the resulting audio file will be
// just repeating the last segment.
this._wavBuffers.push(new Float32Array(channelLeft));
this._wavLength += channelLeft.length;
}
/**
* Combines buffers and export to a wav file.
*
* @private
* @param {Float32Array[]} buffers - The stored buffers.
* @param {number} length - Total length (number of samples).
* @returns {Blob}
*/
_exportMonoWAV(buffers, length) {
const dataLength = length * 2; // each sample = 16 bit = 2 bytes
const buffer = new ArrayBuffer(44 + dataLength);
const view = new DataView(buffer);
// copy WAV header data into the array buffer
const header = this._createWavHeader(dataLength);
const len = header.length;
for (let i = 0; i < len; ++i) {
view.setUint8(i, header[i]);
}
// write audio data
floatTo16BitPCM(view, 44, buffers);
return new Blob([ view ], { type: 'audio/wav' });
}
}
/**
* Helper function. Writes a UTF string to memory
* using big endianness. Required by WAVE headers.
*
* @param {ArrayBuffer} view - The view to memory.
* @param {number} offset - Offset.
* @param {string} string - The string to be written.
* @returns {void}
*/
function writeUTFBytes(view, offset, string) {
const lng = string.length;
// convert to big endianness
for (let i = 0; i < lng; ++i) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
/**
* Helper function for converting Float32Array to Int16Array.
*
* @param {DataView} output - View to the output buffer.
* @param {number} offset - The offset in output buffer to write from.
* @param {Float32Array[]} inputBuffers - The input buffers.
* @returns {void}
*/
function floatTo16BitPCM(output, offset, inputBuffers) {
let i, j;
let input, s, sampleCount;
const bufferCount = inputBuffers.length;
let o = offset;
for (i = 0; i < bufferCount; ++i) {
input = inputBuffers[i];
sampleCount = input.length;
for (j = 0; j < sampleCount; ++j, o += 2) {
s = Math.max(-1, Math.min(1, input[j]));
output.setInt16(o, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
}
}

View File

@ -1,262 +0,0 @@
import logger from '../../logger';
import { AbstractAudioContextAdapter } from '../AbstractAudioContextAdapter';
import {
DEBUG,
MAIN_THREAD_FINISH,
MAIN_THREAD_INIT,
MAIN_THREAD_NEW_DATA_ARRIVED,
WORKER_BLOB_READY,
WORKER_LIBFLAC_READY
} from './messageTypes';
/**
* Recording adapter that uses libflac.js in the background.
*/
export class FlacAdapter extends AbstractAudioContextAdapter {
/**
* Instance of WebWorker (flacEncodeWorker).
*/
_encoder = null;
/**
* Resolve function of the Promise returned by {@code stop()}.
* This is called after the WebWorker sends back {@code WORKER_BLOB_READY}.
*/
_stopPromiseResolver = null;
/**
* Resolve function of the Promise that initializes the flacEncodeWorker.
*/
_initWorkerPromiseResolver = null;
/**
* Initialization promise.
*/
_initPromise = null;
/**
* Constructor.
*/
constructor() {
super();
this._onAudioProcess = this._onAudioProcess.bind(this);
this._onWorkerMessage = this._onWorkerMessage.bind(this);
}
/**
* Implements {@link RecordingAdapter#start()}.
*
* @inheritdoc
*/
start(micDeviceId) {
if (!this._initPromise) {
this._initPromise = this._initialize(micDeviceId);
}
return this._initPromise.then(() => {
this._connectAudioGraph();
});
}
/**
* Implements {@link RecordingAdapter#stop()}.
*
* @inheritdoc
*/
stop() {
if (!this._encoder) {
logger.error('Attempting to stop but has nothing to stop.');
return Promise.reject();
}
return new Promise(resolve => {
this._initPromise = null;
this._disconnectAudioGraph();
this._stopPromiseResolver = resolve;
this._encoder.postMessage({
command: MAIN_THREAD_FINISH
});
});
}
/**
* Implements {@link RecordingAdapter#exportRecordedData()}.
*
* @inheritdoc
*/
exportRecordedData() {
if (this._data !== null) {
return Promise.resolve({
data: this._data,
format: 'flac'
});
}
return Promise.reject('No audio data recorded.');
}
/**
* Implements {@link RecordingAdapter#setMuted()}.
*
* @inheritdoc
*/
setMuted(muted) {
const shouldEnable = !muted;
if (!this._stream) {
return Promise.resolve();
}
const track = this._stream.getAudioTracks()[0];
if (!track) {
logger.error('Cannot mute/unmute. Track not found!');
return Promise.resolve();
}
if (track.enabled !== shouldEnable) {
track.enabled = shouldEnable;
logger.log(muted ? 'Mute' : 'Unmute');
}
return Promise.resolve();
}
/**
* Implements {@link RecordingAdapter#setMicDevice()}.
*
* @inheritdoc
*/
setMicDevice(micDeviceId) {
return this._replaceMic(micDeviceId);
}
/**
* Initialize the adapter.
*
* @private
* @param {string} micDeviceId - The current microphone device ID.
* @returns {Promise}
*/
_initialize(micDeviceId) {
if (this._encoder !== null) {
return Promise.resolve();
}
const promiseInitWorker = new Promise((resolve, reject) => {
try {
this._loadWebWorker();
} catch (e) {
reject();
}
// Save the Promise's resolver to resolve it later.
// This Promise is only resolved in _onWorkerMessage when we
// receive WORKER_LIBFLAC_READY from the WebWorker.
this._initWorkerPromiseResolver = resolve;
// set up listener for messages from the WebWorker
this._encoder.onmessage = this._onWorkerMessage;
this._encoder.postMessage({
command: MAIN_THREAD_INIT,
config: {
sampleRate: this._sampleRate,
bps: 16
}
});
});
// Arrow function is used here because we want AudioContext to be
// initialized only **after** promiseInitWorker is resolved.
return promiseInitWorker
.then(() =>
this._initializeAudioContext(
micDeviceId,
this._onAudioProcess
));
}
/**
* Callback function for handling AudioProcessingEvents.
*
* @private
* @param {AudioProcessingEvent} e - The event containing the raw PCM.
* @returns {void}
*/
_onAudioProcess(e) {
// Delegates to the WebWorker to do the encoding.
// The return of getChannelData() is a Float32Array,
// each element representing one sample.
const channelLeft = e.inputBuffer.getChannelData(0);
this._encoder.postMessage({
command: MAIN_THREAD_NEW_DATA_ARRIVED,
buf: channelLeft
});
}
/**
* Handler for messages from flacEncodeWorker.
*
* @private
* @param {MessageEvent} e - The event sent by the WebWorker.
* @returns {void}
*/
_onWorkerMessage(e) {
switch (e.data.command) {
case WORKER_BLOB_READY:
// Received a Blob representing an encoded FLAC file.
this._data = e.data.buf;
if (this._stopPromiseResolver !== null) {
this._stopPromiseResolver();
this._stopPromiseResolver = null;
this._encoder.terminate();
this._encoder = null;
}
break;
case DEBUG:
logger.log(e.data);
break;
case WORKER_LIBFLAC_READY:
logger.log('libflac is ready.');
this._initWorkerPromiseResolver();
break;
default:
logger.error(
`Unknown event
from encoder (WebWorker): "${e.data.command}"!`);
break;
}
}
/**
* Loads the WebWorker.
*
* @private
* @returns {void}
*/
_loadWebWorker() {
// FIXME: Workaround for different file names in development/
// production environments.
// We cannot import flacEncodeWorker as a webpack module,
// because it is in a different bundle and should be lazy-loaded
// only when flac recording is in use.
try {
// try load the minified version first
this._encoder = new Worker('/libs/flacEncodeWorker.min.js', { name: 'FLAC encoder worker' });
} catch (exception1) {
// if failed, try unminified version
try {
this._encoder = new Worker('/libs/flacEncodeWorker.js', { name: 'FLAC encoder worker' });
} catch (exception2) {
throw new Error('Failed to load flacEncodeWorker.');
}
}
}
}

View File

@ -1,399 +0,0 @@
import {
MAIN_THREAD_FINISH,
MAIN_THREAD_INIT,
MAIN_THREAD_NEW_DATA_ARRIVED,
WORKER_BLOB_READY,
WORKER_LIBFLAC_READY
} from './messageTypes';
/**
* WebWorker that does FLAC encoding using libflac.js.
*/
self.FLAC_SCRIPT_LOCATION = '/libs/';
/* eslint-disable */
importScripts('/libs/libflac4-1.3.2.min.js');
/* eslint-enable */
// There is a number of API calls to libflac.js, which does not conform
// to the camalCase naming convention, but we cannot change it.
// So we disable the ESLint rule `new-cap` in this file.
/* eslint-disable new-cap */
// Flow will complain about the number keys in `FLAC_ERRORS`,
// ESLint will complain about the `declare` statement.
// As the current workaround, add an exception for eslint.
/* eslint-disable flowtype/no-types-missing-file-annotation */
declare var Flac: Object;
const FLAC_ERRORS = {
// The encoder is in the normal OK state and samples can be processed.
0: 'FLAC__STREAM_ENCODER_OK',
// The encoder is in the uninitialized state one of the
// FLAC__stream_encoder_init_*() functions must be called before samples can
// be processed.
1: 'FLAC__STREAM_ENCODER_UNINITIALIZED',
// An error occurred in the underlying Ogg layer.
2: 'FLAC__STREAM_ENCODER_OGG_ERROR',
// An error occurred in the underlying verify stream decoder; check
// FLAC__stream_encoder_get_verify_decoder_state().
3: 'FLAC__STREAM_ENCODER_VERIFY_DECODER_ERROR',
// The verify decoder detected a mismatch between the original audio signal
// and the decoded audio signal.
4: 'FLAC__STREAM_ENCODER_VERIFY_MISMATCH_IN_AUDIO_DATA',
// One of the callbacks returned a fatal error.
5: 'FLAC__STREAM_ENCODER_CLIENT_ERROR',
// An I/O error occurred while opening/reading/writing a file. Check errno.
6: 'FLAC__STREAM_ENCODER_IO_ERROR',
// An error occurred while writing the stream; usually, the write_callback
// returned an error.
7: 'FLAC__STREAM_ENCODER_FRAMING_ERROR',
// Memory allocation failed.
8: 'FLAC__STREAM_ENCODER_MEMORY_ALLOCATION_ERROR'
};
/**
* States of the {@code Encoder}.
*/
const EncoderState = Object.freeze({
/**
* Initial state, when libflac.js is not initialized.
*/
UNINTIALIZED: 'uninitialized',
/**
* Actively encoding new audio bits.
*/
WORKING: 'working',
/**
* Encoding has finished and encoded bits are available.
*/
FINISHED: 'finished'
});
/**
* Default FLAC compression level.
*/
const FLAC_COMPRESSION_LEVEL = 5;
/**
* Concat multiple Uint8Arrays into one.
*
* @param {Uint8Array[]} arrays - Array of Uint8 arrays.
* @param {number} totalLength - Total length of all Uint8Arrays.
* @returns {Uint8Array}
*/
function mergeUint8Arrays(arrays, totalLength) {
const result = new Uint8Array(totalLength);
let offset = 0;
const len = arrays.length;
for (let i = 0; i < len; i++) {
const buffer = arrays[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
/**
* Wrapper class around libflac API.
*/
class Encoder {
/**
* Flac encoder instance ID. (As per libflac.js API).
*
* @private
*/
_encoderId = 0;
/**
* Sample rate.
*
* @private
*/
_sampleRate;
/**
* Bit depth (bits per sample).
*
* @private
*/
_bitDepth;
/**
* Buffer size.
*
* @private
*/
_bufferSize;
/**
* Buffers to store encoded bits temporarily.
*/
_flacBuffers = [];
/**
* Length of encoded FLAC bits.
*/
_flacLength = 0;
/**
* The current state of the {@code Encoder}.
*/
_state = EncoderState.UNINTIALIZED;
/**
* The ready-for-grab downloadable Blob.
*/
_data = null;
/**
* Constructor.
* Note: Only create instance when Flac.isReady() returns true.
*
* @param {number} sampleRate - Sample rate of the raw audio data.
* @param {number} bitDepth - Bit depth (bit per sample).
* @param {number} bufferSize - The size of each batch.
*/
constructor(sampleRate, bitDepth = 16, bufferSize = 4096) {
if (!Flac.isReady()) {
throw new Error('libflac is not ready yet!');
}
this._sampleRate = sampleRate;
this._bitDepth = bitDepth;
this._bufferSize = bufferSize;
// create the encoder
this._encoderId = Flac.init_libflac_encoder(
this._sampleRate,
// Mono channel
1,
this._bitDepth,
FLAC_COMPRESSION_LEVEL,
// Pass 0 in because of unknown total samples,
0,
// checksum, FIXME: double-check whether this is necessary
true,
// Auto-determine block size (samples per frame)
0
);
if (this._encoderId === 0) {
throw new Error('Failed to create libflac encoder.');
}
// initialize the encoder
const initResult = Flac.init_encoder_stream(
this._encoderId,
this._onEncodedData.bind(this),
this._onMetadataAvailable.bind(this)
);
if (initResult !== 0) {
throw new Error('Failed to initialise libflac encoder.');
}
this._state = EncoderState.WORKING;
}
/**
* Receive and encode new data.
*
* @param {Float32Array} audioData - Raw audio data.
* @returns {void}
*/
encode(audioData) {
if (this._state !== EncoderState.WORKING) {
throw new Error('Encoder is not ready or has finished.');
}
if (!Flac.isReady()) {
throw new Error('Flac not ready');
}
const bufferLength = audioData.length;
// Convert sample to signed 32-bit integers.
// According to libflac documentation:
// each sample in the buffers should be a signed integer,
// right-justified to the resolution set by
// FLAC__stream_encoder_set_bits_per_sample().
// Here we are using 16 bits per sample, the samples should all be in
// the range [-32768,32767]. This is achieved by multipling Float32
// numbers with 0x7FFF.
const bufferI32 = new Int32Array(bufferLength);
const view = new DataView(bufferI32.buffer);
const volume = 1;
let index = 0;
for (let i = 0; i < bufferLength; i++) {
view.setInt32(index, audioData[i] * (0x7FFF * volume), true);
index += 4; // 4 bytes (32-bit)
}
// pass it to libflac
const status = Flac.FLAC__stream_encoder_process_interleaved(
this._encoderId,
bufferI32,
bufferI32.length
);
if (status !== 1) {
// gets error number
const errorNo
= Flac.FLAC__stream_encoder_get_state(this._encoderId);
console.error('Error during encoding', FLAC_ERRORS[errorNo]);
}
}
/**
* Signals the termination of encoding.
*
* @returns {void}
*/
finish() {
if (this._state === EncoderState.WORKING) {
this._state = EncoderState.FINISHED;
const status = Flac.FLAC__stream_encoder_finish(this._encoderId);
console.log('Flac encoding finished: ', status);
// free up resources
Flac.FLAC__stream_encoder_delete(this._encoderId);
this._data = this._exportFlacBlob();
}
}
/**
* Gets the encoded flac file.
*
* @returns {Blob} - The encoded flac file.
*/
getBlob() {
if (this._state === EncoderState.FINISHED) {
return this._data;
}
return null;
}
/**
* Converts flac buffer to a Blob.
*
* @private
* @returns {void}
*/
_exportFlacBlob() {
const samples = mergeUint8Arrays(this._flacBuffers, this._flacLength);
const blob = new Blob([ samples ], { type: 'audio/flac' });
return blob;
}
/* eslint-disable no-unused-vars */
/**
* Callback function for saving encoded Flac data.
* This is invoked by libflac.
*
* @private
* @param {Uint8Array} buffer - The encoded Flac data.
* @param {number} bytes - Number of bytes in the data.
* @returns {void}
*/
_onEncodedData(buffer, bytes) {
this._flacBuffers.push(buffer);
this._flacLength += buffer.byteLength;
}
/* eslint-enable no-unused-vars */
/**
* Callback function for receiving metadata.
*
* @private
* @returns {void}
*/
_onMetadataAvailable = () => {
// reserved for future use
};
}
let encoder = null;
self.onmessage = function(e) {
switch (e.data.command) {
case MAIN_THREAD_INIT:
{
const bps = e.data.config.bps;
const sampleRate = e.data.config.sampleRate;
if (Flac.isReady()) {
encoder = new Encoder(sampleRate, bps);
self.postMessage({
command: WORKER_LIBFLAC_READY
});
} else {
Flac.onready = function() {
setTimeout(() => {
encoder = new Encoder(sampleRate, bps);
self.postMessage({
command: WORKER_LIBFLAC_READY
});
}, 0);
};
}
break;
}
case MAIN_THREAD_NEW_DATA_ARRIVED:
if (encoder === null) {
console.error('flacEncoderWorker received data when the encoder is not ready.');
} else {
encoder.encode(e.data.buf);
}
break;
case MAIN_THREAD_FINISH:
if (encoder !== null) {
encoder.finish();
const data = encoder.getBlob();
self.postMessage(
{
command: WORKER_BLOB_READY,
buf: data
}
);
encoder = null;
}
break;
}
};

View File

@ -1 +0,0 @@
export * from './FlacAdapter';

View File

@ -1,44 +0,0 @@
/**
* Types of messages that are passed between the main thread and the WebWorker
* ({@code flacEncodeWorker}).
*/
// Messages sent by the main thread
/**
* Message type that signals the termination of encoding,
* after which no new audio bits should be sent to the
* WebWorker.
*/
export const MAIN_THREAD_FINISH = 'MAIN_THREAD_FINISH';
/**
* Message type that carries initial parameters for
* the WebWorker.
*/
export const MAIN_THREAD_INIT = 'MAIN_THREAD_INIT';
/**
* Message type that carries the newly received raw audio bits
* for the WebWorker to encode.
*/
export const MAIN_THREAD_NEW_DATA_ARRIVED = 'MAIN_THREAD_NEW_DATA_ARRIVED';
// Messages sent by the WebWorker
/**
* Message type that signals libflac is ready to receive audio bits.
*/
export const WORKER_LIBFLAC_READY = 'WORKER_LIBFLAC_READY';
/**
* Message type that carries the encoded FLAC file as a Blob.
*/
export const WORKER_BLOB_READY = 'WORKER_BLOB_READY';
// Messages sent by either the main thread or the WebWorker
/**
* Debug messages.
*/
export const DEBUG = 'DEBUG';

View File

@ -1,5 +0,0 @@
export * from './OggAdapter';
export * from './RecordingAdapter';
export * from './Utils';
export * from './WavAdapter';
export * from './flac';

View File

@ -1,36 +0,0 @@
/* @flow */
import { ReducerRegistry } from '../base/redux';
import {
LOCAL_RECORDING_ENGAGED,
LOCAL_RECORDING_STATS_UPDATE,
LOCAL_RECORDING_UNENGAGED
} from './actionTypes';
import { recordingController } from './controller';
ReducerRegistry.register('features/local-recording', (state = {}, action) => {
switch (action.type) {
case LOCAL_RECORDING_ENGAGED: {
return {
...state,
isEngaged: true,
recordingEngagedAt: action.recordingEngagedAt,
encodingFormat: recordingController._format
};
}
case LOCAL_RECORDING_UNENGAGED:
return {
...state,
isEngaged: false,
recordingEngagedAt: null
};
case LOCAL_RECORDING_STATS_UPDATE:
return {
...state,
stats: action.stats
};
default:
return state;
}
});

View File

@ -1,439 +0,0 @@
/* @flow */
import Bourne from '@hapi/bourne';
import { jitsiLocalStorage } from '@jitsi/js-utils';
import logger from '../logger';
/**
* Gets high precision system time.
*
* @returns {number}
*/
function highPrecisionTime(): number {
return window.performance
&& window.performance.now
&& window.performance.timing
&& window.performance.timing.navigationStart
? window.performance.now() + window.performance.timing.navigationStart
: Date.now();
}
// Have to use string literal here, instead of Symbols,
// because these values need to be JSON-serializible.
/**
* Types of SessionEvents.
*/
const SessionEventType = Object.freeze({
/**
* Start of local recording session. This is recorded when the
* {@code RecordingController} receives the signal to start local recording,
* before the actual adapter is engaged.
*/
SESSION_STARTED: 'SESSION_STARTED',
/**
* Start of a continuous segment. This is recorded when the adapter is
* engaged. Can happen multiple times in a local recording session,
* due to browser reloads or switching of recording device.
*/
SEGMENT_STARTED: 'SEGMENT_STARTED',
/**
* End of a continuous segment. This is recorded when the adapter unengages.
*/
SEGMENT_ENDED: 'SEGMENT_ENDED'
});
/**
* Represents an event during a local recording session.
* The event can be either that the adapter started recording, or stopped
* recording.
*/
type SessionEvent = {
/**
* The type of the event.
* Should be one of the values in {@code SessionEventType}.
*/
type: string,
/**
* The timestamp of the event.
*/
timestamp: number
};
/**
* Representation of the metadata of a segment.
*/
type SegmentInfo = {
/**
* The length of gap before this segment, in milliseconds.
* Mull if unknown.
*/
gapBefore?: ?number,
/**
* The duration of this segment, in milliseconds.
* Null if unknown or the segment is not finished.
*/
duration?: ?number,
/**
* The start time, in milliseconds.
*/
start?: ?number,
/**
* The end time, in milliseconds.
* Null if unknown, the segment is not finished, or the recording is
* interrupted (e.g. Browser reload).
*/
end?: ?number
};
/**
* Representation of metadata of a local recording session.
*/
type SessionInfo = {
/**
* The session token.
*/
sessionToken: string,
/**
* The start time of the session.
*/
start: ?number,
/**
* The recording format.
*/
format: string,
/**
* Array of segments in the session.
*/
segments: SegmentInfo[]
}
/**
* {@code localStorage} Key.
*/
const LOCAL_STORAGE_KEY = 'localRecordingMetadataVersion1';
/**
* SessionManager manages the metadata of each segment during each local
* recording session.
*
* A segment is a continuous portion of recording done using the same adapter
* on the same microphone device.
*
* Browser refreshes, switching of microphone will cause new segments to be
* created.
*
* A recording session can consist of one or more segments.
*/
class SessionManager {
/**
* The metadata.
*/
_sessionsMetadata = {
};
/**
* Constructor.
*/
constructor() {
this._loadMetadata();
}
/**
* Loads metadata from localStorage.
*
* @private
* @returns {void}
*/
_loadMetadata() {
const dataStr = jitsiLocalStorage.getItem(LOCAL_STORAGE_KEY);
if (dataStr !== null) {
try {
const dataObject = Bourne.parse(dataStr);
this._sessionsMetadata = dataObject;
} catch (e) {
logger.warn('Failed to parse localStorage item.');
return;
}
}
}
/**
* Persists metadata to localStorage.
*
* @private
* @returns {void}
*/
_saveMetadata() {
jitsiLocalStorage.setItem(LOCAL_STORAGE_KEY,
JSON.stringify(this._sessionsMetadata));
}
/**
* Creates a session if not exists.
*
* @param {string} sessionToken - The local recording session token.
* @param {string} format - The local recording format.
* @returns {void}
*/
createSession(sessionToken: string, format: string) {
if (this._sessionsMetadata[sessionToken] === undefined) {
this._sessionsMetadata[sessionToken] = {
format,
events: []
};
this._sessionsMetadata[sessionToken].events.push({
type: SessionEventType.SESSION_STARTED,
timestamp: highPrecisionTime()
});
this._saveMetadata();
} else {
logger.warn(`Session ${sessionToken} already exists`);
}
}
/**
* Gets all the Sessions.
*
* @returns {SessionInfo[]}
*/
getSessions(): SessionInfo[] {
const sessionTokens = Object.keys(this._sessionsMetadata);
const output = [];
for (let i = 0; i < sessionTokens.length; ++i) {
const thisSession = this._sessionsMetadata[sessionTokens[i]];
const newSessionInfo: SessionInfo = {
start: thisSession.events[0].timestamp,
format: thisSession.format,
sessionToken: sessionTokens[i],
segments: this.getSegments(sessionTokens[i])
};
output.push(newSessionInfo);
}
output.sort((a, b) => (a.start || 0) - (b.start || 0));
return output;
}
/**
* Removes session metadata.
*
* @param {string} sessionToken - The session token.
* @returns {void}
*/
removeSession(sessionToken: string) {
delete this._sessionsMetadata[sessionToken];
this._saveMetadata();
}
/**
* Get segments of a given Session.
*
* @param {string} sessionToken - The session token.
* @returns {SegmentInfo[]}
*/
getSegments(sessionToken: string): SegmentInfo[] {
const thisSession = this._sessionsMetadata[sessionToken];
if (thisSession) {
return this._constructSegments(thisSession.events);
}
return [];
}
/**
* Marks the start of a new segment.
* This should be invoked by {@code RecordingAdapter}s when they need to
* start asynchronous operations (such as switching tracks) that interrupts
* recording.
*
* @param {string} sessionToken - The token of the session to start a new
* segment in.
* @returns {number} - Current segment index.
*/
beginSegment(sessionToken: string): number {
if (this._sessionsMetadata[sessionToken] === undefined) {
logger.warn('Attempting to add segments to nonexistent'
+ ` session ${sessionToken}`);
return -1;
}
this._sessionsMetadata[sessionToken].events.push({
type: SessionEventType.SEGMENT_STARTED,
timestamp: highPrecisionTime()
});
this._saveMetadata();
return this.getSegments(sessionToken).length - 1;
}
/**
* Gets the current segment index. Starting from 0 for the first
* segment.
*
* @param {string} sessionToken - The session token.
* @returns {number}
*/
getCurrentSegmentIndex(sessionToken: string): number {
if (this._sessionsMetadata[sessionToken] === undefined) {
return -1;
}
const segments = this.getSegments(sessionToken);
if (segments.length === 0) {
return -1;
}
const lastSegment = segments[segments.length - 1];
if (lastSegment.end) {
// last segment is already ended
return -1;
}
return segments.length - 1;
}
/**
* Marks the end of the last segment in a session.
*
* @param {string} sessionToken - The session token.
* @returns {void}
*/
endSegment(sessionToken: string) {
if (this._sessionsMetadata[sessionToken] === undefined) {
logger.warn('Attempting to end a segment in nonexistent'
+ ` session ${sessionToken}`);
} else {
this._sessionsMetadata[sessionToken].events.push({
type: SessionEventType.SEGMENT_ENDED,
timestamp: highPrecisionTime()
});
this._saveMetadata();
}
}
/**
* Constructs an array of {@code SegmentInfo} from an array of
* {@code SessionEvent}s.
*
* @private
* @param {SessionEvent[]} events - The array of {@code SessionEvent}s.
* @returns {SegmentInfo[]}
*/
_constructSegments(events: SessionEvent[]): SegmentInfo[] {
if (events.length === 0) {
return [];
}
const output = [];
let sessionStartTime = null;
let currentSegment: SegmentInfo = {};
/**
* Helper function for adding a new {@code SegmentInfo} object to the
* output.
*
* @returns {void}
*/
function commit() {
if (currentSegment.gapBefore === undefined
|| currentSegment.gapBefore === null) {
if (output.length > 0 && output[output.length - 1].end) {
const lastSegment = output[output.length - 1];
if (currentSegment.start && lastSegment.end) {
currentSegment.gapBefore = currentSegment.start
- lastSegment.end;
} else {
currentSegment.gapBefore = null;
}
} else if (sessionStartTime !== null && output.length === 0) {
currentSegment.gapBefore = currentSegment.start
? currentSegment.start - sessionStartTime
: null;
} else {
currentSegment.gapBefore = null;
}
}
currentSegment.duration = currentSegment.end && currentSegment.start
? currentSegment.end - currentSegment.start
: null;
output.push(currentSegment);
currentSegment = {};
}
for (let i = 0; i < events.length; ++i) {
const currentEvent = events[i];
switch (currentEvent.type) {
case SessionEventType.SESSION_STARTED:
if (sessionStartTime === null) {
sessionStartTime = currentEvent.timestamp;
} else {
logger.warn('Unexpected SESSION_STARTED event.'
, currentEvent);
}
break;
case SessionEventType.SEGMENT_STARTED:
if (currentSegment.start === undefined
|| currentSegment.start === null) {
currentSegment.start = currentEvent.timestamp;
} else {
commit();
currentSegment.start = currentEvent.timestamp;
}
break;
case SessionEventType.SEGMENT_ENDED:
if (currentSegment.start === undefined
|| currentSegment.start === null) {
logger.warn('Unexpected SEGMENT_ENDED event', currentEvent);
} else {
currentSegment.end = currentEvent.timestamp;
commit();
}
break;
default:
logger.warn('Unexpected error during _constructSegments');
break;
}
}
if (currentSegment.start) {
commit();
}
return output;
}
}
/**
* Global singleton of {@code SessionManager}.
*/
export const sessionManager = new SessionManager();
// For debug only. To remove later.
window.sessionManager = sessionManager;

View File

@ -1 +0,0 @@
export * from './SessionManager';

View File

@ -36,7 +36,6 @@ import { isGifEnabled } from '../../../gifs/functions';
import { InviteButton } from '../../../invite/components/add-people-dialog'; import { InviteButton } from '../../../invite/components/add-people-dialog';
import { isVpaasMeeting } from '../../../jaas/functions'; import { isVpaasMeeting } from '../../../jaas/functions';
import { KeyboardShortcutsButton } from '../../../keyboard-shortcuts'; import { KeyboardShortcutsButton } from '../../../keyboard-shortcuts';
import { LocalRecordingButton } from '../../../local-recording';
import { import {
close as closeParticipantsPane, close as closeParticipantsPane,
open as openParticipantsPane open as openParticipantsPane
@ -732,12 +731,6 @@ class Toolbox extends Component<Props> {
group: 2 group: 2
}; };
const localRecording = {
key: 'localrecording',
Content: LocalRecordingButton,
group: 2
};
const livestreaming = { const livestreaming = {
key: 'livestreaming', key: 'livestreaming',
Content: LiveStreamButton, Content: LiveStreamButton,
@ -844,7 +837,6 @@ class Toolbox extends Component<Props> {
security, security,
cc, cc,
recording, recording,
localRecording,
livestreaming, livestreaming,
linkToSalesforce, linkToSalesforce,
muteEveryone, muteEveryone,

View File

@ -347,16 +347,6 @@ module.exports = (_env, argv) => {
], ],
performance: getPerformanceHints(perfHintOptions, 5 * 1024) performance: getPerformanceHints(perfHintOptions, 5 * 1024)
}), }),
Object.assign({}, config, {
entry: {
'flacEncodeWorker': './react/features/local-recording/recording/flac/flacEncodeWorker.js'
},
plugins: [
...config.plugins,
...getBundleAnalyzerPlugin(analyzeBundle, 'flacEncodeWorker')
],
performance: getPerformanceHints(perfHintOptions, 5 * 1024)
}),
Object.assign({}, config, { Object.assign({}, config, {
entry: { entry: {
'analytics-ga': './react/features/analytics/handlers/GoogleAnalyticsHandler.js' 'analytics-ga': './react/features/analytics/handlers/GoogleAnalyticsHandler.js'