parent
5daa91ec1b
commit
65c76dcde5
|
@ -40,6 +40,11 @@ const COMMAND_PONG = 'localRecPong';
|
||||||
*/
|
*/
|
||||||
const PROPERTY_STATS = 'localRecStats';
|
const PROPERTY_STATS = 'localRecStats';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Supported recording formats.
|
||||||
|
*/
|
||||||
|
const RECORDING_FORMATS = new Set([ 'flac', 'wav', 'ogg' ]);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Default recording format.
|
* Default recording format.
|
||||||
*/
|
*/
|
||||||
|
@ -135,6 +140,13 @@ class RecordingController {
|
||||||
*/
|
*/
|
||||||
_state = ControllerState.IDLE;
|
_state = ControllerState.IDLE;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Whether or not the audio is muted in the UI. This is stored as internal
|
||||||
|
* state of {@code RecordingController} because we might have recording
|
||||||
|
* sessions that start muted.
|
||||||
|
*/
|
||||||
|
_isMuted = false;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Current recording format. This will be in effect from the next
|
* Current recording format. This will be in effect from the next
|
||||||
* recording session, i.e., if this value is changed during an on-going
|
* recording session, i.e., if this value is changed during an on-going
|
||||||
|
@ -299,6 +311,21 @@ class RecordingController {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mute or unmute audio. When muted, the ongoing local recording should
|
||||||
|
* produce silence.
|
||||||
|
*
|
||||||
|
* @param {boolean} muted - If the audio should be muted.
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
setMuted(muted: boolean) {
|
||||||
|
this._isMuted = Boolean(muted);
|
||||||
|
|
||||||
|
if (this._state === ControllerState.RECORDING) {
|
||||||
|
this._adapters[this._currentSessionToken].setMuted(muted);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Switches the recording format.
|
* Switches the recording format.
|
||||||
*
|
*
|
||||||
|
@ -306,6 +333,11 @@ class RecordingController {
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
switchFormat(newFormat: string) {
|
switchFormat(newFormat: string) {
|
||||||
|
if (!RECORDING_FORMATS.has(newFormat)) {
|
||||||
|
logger.log(`Unknown format ${newFormat}. Ignoring...`);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
this._format = newFormat;
|
this._format = newFormat;
|
||||||
logger.log(`Recording format switched to ${newFormat}`);
|
logger.log(`Recording format switched to ${newFormat}`);
|
||||||
|
|
||||||
|
@ -465,13 +497,13 @@ class RecordingController {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a token that can be used to distinguish each
|
* Generates a token that can be used to distinguish each local recording
|
||||||
* recording session.
|
* session.
|
||||||
*
|
*
|
||||||
* @returns {number}
|
* @returns {number}
|
||||||
*/
|
*/
|
||||||
_getRandomToken() {
|
_getRandomToken() {
|
||||||
return Math.floor(Math.random() * 10000) + 1;
|
return Math.floor(Math.random() * 100000000) + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
_doStartRecording: () => void;
|
_doStartRecording: () => void;
|
||||||
|
@ -497,6 +529,8 @@ class RecordingController {
|
||||||
if (this._onStateChanged) {
|
if (this._onStateChanged) {
|
||||||
this._onStateChanged(true);
|
this._onStateChanged(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
delegate.setMuted(this._isMuted);
|
||||||
this._updateStats();
|
this._updateStats();
|
||||||
})
|
})
|
||||||
.catch(err => {
|
.catch(err => {
|
||||||
|
|
|
@ -5,6 +5,7 @@ import { APP_WILL_MOUNT, APP_WILL_UNMOUNT } from '../base/app';
|
||||||
import { CONFERENCE_JOINED } from '../base/conference';
|
import { CONFERENCE_JOINED } from '../base/conference';
|
||||||
import { toggleDialog } from '../base/dialog';
|
import { toggleDialog } from '../base/dialog';
|
||||||
import { i18next } from '../base/i18n';
|
import { i18next } from '../base/i18n';
|
||||||
|
import { SET_AUDIO_MUTED } from '../base/media';
|
||||||
import { MiddlewareRegistry } from '../base/redux';
|
import { MiddlewareRegistry } from '../base/redux';
|
||||||
import { showNotification } from '../notifications';
|
import { showNotification } from '../notifications';
|
||||||
|
|
||||||
|
@ -25,11 +26,17 @@ isFeatureEnabled
|
||||||
switch (action.type) {
|
switch (action.type) {
|
||||||
case CONFERENCE_JOINED: {
|
case CONFERENCE_JOINED: {
|
||||||
const { conference } = getState()['features/base/conference'];
|
const { conference } = getState()['features/base/conference'];
|
||||||
|
const { localRecording } = getState()['features/base/config'];
|
||||||
|
|
||||||
|
if (localRecording && localRecording.format) {
|
||||||
|
recordingController.switchFormat(localRecording.format);
|
||||||
|
}
|
||||||
|
|
||||||
recordingController.registerEvents(conference);
|
recordingController.registerEvents(conference);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case APP_WILL_MOUNT:
|
case APP_WILL_MOUNT:
|
||||||
|
|
||||||
// realize the delegates on recordingController, allowing the UI to
|
// realize the delegates on recordingController, allowing the UI to
|
||||||
// react to state changes in recordingController.
|
// react to state changes in recordingController.
|
||||||
recordingController.onStateChanged = isEngaged => {
|
recordingController.onStateChanged = isEngaged => {
|
||||||
|
@ -66,6 +73,9 @@ isFeatureEnabled
|
||||||
recordingController.onNotify = null;
|
recordingController.onNotify = null;
|
||||||
recordingController.onWarning = null;
|
recordingController.onWarning = null;
|
||||||
break;
|
break;
|
||||||
|
case SET_AUDIO_MUTED:
|
||||||
|
recordingController.setMuted(action.muted);
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
// @todo: detect change in features/base/settings micDeviceID
|
// @todo: detect change in features/base/settings micDeviceID
|
||||||
|
|
|
@ -64,6 +64,34 @@ export class OggAdapter extends RecordingAdapter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Implements {@link RecordingAdapter#setMuted()}.
|
||||||
|
*
|
||||||
|
* @inheritdoc
|
||||||
|
*/
|
||||||
|
setMuted(muted) {
|
||||||
|
const shouldEnable = !muted;
|
||||||
|
|
||||||
|
if (!this._stream) {
|
||||||
|
return Promise.resolve();
|
||||||
|
}
|
||||||
|
|
||||||
|
const track = this._stream.getAudioTracks()[0];
|
||||||
|
|
||||||
|
if (!track) {
|
||||||
|
logger.error('Cannot mute/unmute. Track not found!');
|
||||||
|
|
||||||
|
return Promise.resolve();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (track.enabled !== shouldEnable) {
|
||||||
|
track.enabled = shouldEnable;
|
||||||
|
logger.log(muted ? 'Mute' : 'Unmute');
|
||||||
|
}
|
||||||
|
|
||||||
|
return Promise.resolve();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initialize the adapter.
|
* Initialize the adapter.
|
||||||
*
|
*
|
||||||
|
@ -78,6 +106,7 @@ export class OggAdapter extends RecordingAdapter {
|
||||||
return new Promise((resolve, error) => {
|
return new Promise((resolve, error) => {
|
||||||
this._getAudioStream(0)
|
this._getAudioStream(0)
|
||||||
.then(stream => {
|
.then(stream => {
|
||||||
|
this._stream = stream;
|
||||||
this._mediaRecorder = new MediaRecorder(stream);
|
this._mediaRecorder = new MediaRecorder(stream);
|
||||||
this._mediaRecorder.ondataavailable
|
this._mediaRecorder.ondataavailable
|
||||||
= e => this._saveMediaData(e.data);
|
= e => this._saveMediaData(e.data);
|
||||||
|
|
|
@ -33,8 +33,19 @@ export class RecordingAdapter {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Helper method for getting an audio MediaStream. Use this instead of
|
* Mutes or unmutes the current recording.
|
||||||
* calling browser APIs directly.
|
*
|
||||||
|
* @param {boolean} muted - Whether to mute or to unmute.
|
||||||
|
* @returns {Promise}
|
||||||
|
*/
|
||||||
|
setMuted(/* eslint-disable no-unused-vars */
|
||||||
|
muted/* eslint-enable no-unused-vars */) {
|
||||||
|
throw new Error('Not implemented');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper method for getting an audio {@code MediaStream}. Use this instead
|
||||||
|
* of calling browser APIs directly.
|
||||||
*
|
*
|
||||||
* @protected
|
* @protected
|
||||||
* @param {number} micDeviceId - The ID of the current audio device.
|
* @param {number} micDeviceId - The ID of the current audio device.
|
||||||
|
@ -52,7 +63,7 @@ export class RecordingAdapter {
|
||||||
const mediaStream = result[0].stream;
|
const mediaStream = result[0].stream;
|
||||||
|
|
||||||
if (mediaStream === undefined) {
|
if (mediaStream === undefined) {
|
||||||
throw new Error('Failed to get MediaStream.');
|
throw new Error('Failed to create local track.');
|
||||||
}
|
}
|
||||||
|
|
||||||
return mediaStream;
|
return mediaStream;
|
||||||
|
|
|
@ -17,8 +17,7 @@ export function downloadBlob(blob, fileName = 'recording.ogg') {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Obtains a timestamp of now.
|
* Obtains a timestamp of now. Used in filenames.
|
||||||
* Used in filenames.
|
|
||||||
*
|
*
|
||||||
* @returns {string}
|
* @returns {string}
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -11,6 +11,11 @@ const WAV_SAMPLE_RATE = 44100;
|
||||||
*/
|
*/
|
||||||
export class WavAdapter extends RecordingAdapter {
|
export class WavAdapter extends RecordingAdapter {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The current {@code MediaStream} instance.
|
||||||
|
*/
|
||||||
|
_stream = null;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* {@code AudioContext} instance.
|
* {@code AudioContext} instance.
|
||||||
*/
|
*/
|
||||||
|
@ -65,17 +70,15 @@ export class WavAdapter extends RecordingAdapter {
|
||||||
this._initPromise = this._initialize();
|
this._initPromise = this._initialize();
|
||||||
}
|
}
|
||||||
|
|
||||||
return new Promise(
|
return this._initPromise.then(() => {
|
||||||
(resolve, /* eslint-disable */_reject/* eslint-enable */) => {
|
this._wavBuffers = [];
|
||||||
this._wavBuffers = [];
|
this._wavLength = 0;
|
||||||
this._wavLength = 0;
|
this._wavBuffers.push(this._createWavHeader());
|
||||||
this._wavBuffers.push(this._createWavHeader());
|
|
||||||
|
|
||||||
this._audioSource.connect(this._audioProcessingNode);
|
this._audioSource.connect(this._audioProcessingNode);
|
||||||
this._audioProcessingNode
|
this._audioProcessingNode
|
||||||
.connect(this._audioContext.destination);
|
.connect(this._audioContext.destination);
|
||||||
resolve();
|
});
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -108,6 +111,34 @@ export class WavAdapter extends RecordingAdapter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Implements {@link RecordingAdapter#setMuted()}.
|
||||||
|
*
|
||||||
|
* @inheritdoc
|
||||||
|
*/
|
||||||
|
setMuted(muted) {
|
||||||
|
const shouldEnable = !muted;
|
||||||
|
|
||||||
|
if (!this._stream) {
|
||||||
|
return Promise.resolve();
|
||||||
|
}
|
||||||
|
|
||||||
|
const track = this._stream.getAudioTracks()[0];
|
||||||
|
|
||||||
|
if (!track) {
|
||||||
|
logger.error('Cannot mute/unmute. Track not found!');
|
||||||
|
|
||||||
|
return Promise.resolve();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (track.enabled !== shouldEnable) {
|
||||||
|
track.enabled = shouldEnable;
|
||||||
|
logger.log(muted ? 'Mute' : 'Unmute');
|
||||||
|
}
|
||||||
|
|
||||||
|
return Promise.resolve();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a WAVE file header.
|
* Creates a WAVE file header.
|
||||||
*
|
*
|
||||||
|
@ -176,6 +207,7 @@ export class WavAdapter extends RecordingAdapter {
|
||||||
const p = new Promise((resolve, reject) => {
|
const p = new Promise((resolve, reject) => {
|
||||||
this._getAudioStream(0)
|
this._getAudioStream(0)
|
||||||
.then(stream => {
|
.then(stream => {
|
||||||
|
this._stream = stream;
|
||||||
this._audioContext = new AudioContext();
|
this._audioContext = new AudioContext();
|
||||||
this._audioSource
|
this._audioSource
|
||||||
= this._audioContext.createMediaStreamSource(stream);
|
= this._audioContext.createMediaStreamSource(stream);
|
||||||
|
@ -209,12 +241,10 @@ export class WavAdapter extends RecordingAdapter {
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
_saveWavPCM(data) {
|
_saveWavPCM(data) {
|
||||||
// need to copy the Float32Array,
|
// Need to copy the Float32Array:
|
||||||
// unlike passing to WebWorker,
|
// unlike passing to WebWorker, this data is passed by reference,
|
||||||
// this data is passed by reference,
|
// so we need to copy it, otherwise the resulting audio file will be
|
||||||
// so we need to copy it, otherwise the
|
// just repeating the last segment.
|
||||||
// audio file will be just repeating the last
|
|
||||||
// segment.
|
|
||||||
this._wavBuffers.push(new Float32Array(data));
|
this._wavBuffers.push(new Float32Array(data));
|
||||||
this._wavLength += data.length;
|
this._wavLength += data.length;
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@ export class FlacAdapter extends RecordingAdapter {
|
||||||
_audioContext = null;
|
_audioContext = null;
|
||||||
_audioProcessingNode = null;
|
_audioProcessingNode = null;
|
||||||
_audioSource = null;
|
_audioSource = null;
|
||||||
|
_stream = null;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Resolve function of the promise returned by {@code stop()}.
|
* Resolve function of the promise returned by {@code stop()}.
|
||||||
|
@ -85,6 +86,34 @@ export class FlacAdapter extends RecordingAdapter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Implements {@link RecordingAdapter#setMuted()}.
|
||||||
|
*
|
||||||
|
* @inheritdoc
|
||||||
|
*/
|
||||||
|
setMuted(muted) {
|
||||||
|
const shouldEnable = !muted;
|
||||||
|
|
||||||
|
if (!this._stream) {
|
||||||
|
return Promise.resolve();
|
||||||
|
}
|
||||||
|
|
||||||
|
const track = this._stream.getAudioTracks()[0];
|
||||||
|
|
||||||
|
if (!track) {
|
||||||
|
logger.error('Cannot mute/unmute. Track not found!');
|
||||||
|
|
||||||
|
return Promise.resolve();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (track.enabled !== shouldEnable) {
|
||||||
|
track.enabled = shouldEnable;
|
||||||
|
logger.log(muted ? 'Mute' : 'Unmute');
|
||||||
|
}
|
||||||
|
|
||||||
|
return Promise.resolve();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initialize the adapter.
|
* Initialize the adapter.
|
||||||
*
|
*
|
||||||
|
@ -138,6 +167,7 @@ export class FlacAdapter extends RecordingAdapter {
|
||||||
const callbackInitAudioContext = (resolve, reject) => {
|
const callbackInitAudioContext = (resolve, reject) => {
|
||||||
this._getAudioStream(0)
|
this._getAudioStream(0)
|
||||||
.then(stream => {
|
.then(stream => {
|
||||||
|
this._stream = stream;
|
||||||
this._audioContext = new AudioContext();
|
this._audioContext = new AudioContext();
|
||||||
this._audioSource
|
this._audioSource
|
||||||
= this._audioContext.createMediaStreamSource(stream);
|
= this._audioContext.createMediaStreamSource(stream);
|
||||||
|
@ -161,7 +191,7 @@ export class FlacAdapter extends RecordingAdapter {
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
// FIXME: because Promise constructor immediately executes the executor
|
// Because Promise constructor immediately executes the executor
|
||||||
// function. This is undesirable, we want callbackInitAudioContext to be
|
// function. This is undesirable, we want callbackInitAudioContext to be
|
||||||
// executed only **after** promiseInitWorker is resolved.
|
// executed only **after** promiseInitWorker is resolved.
|
||||||
return promiseInitWorker
|
return promiseInitWorker
|
||||||
|
|
Loading…
Reference in New Issue