diff --git a/react/features/local-recording/recording/AbstractAudioContextAdapter.js b/react/features/local-recording/recording/AbstractAudioContextAdapter.js new file mode 100644 index 000000000..26cf40c11 --- /dev/null +++ b/react/features/local-recording/recording/AbstractAudioContextAdapter.js @@ -0,0 +1,129 @@ +import { RecordingAdapter } from './RecordingAdapter'; + +const logger = require('jitsi-meet-logger').getLogger(__filename); + +/** + * Base class for {@code AudioContext}-based recording adapters. + */ +export class AbstractAudioContextAdapter extends RecordingAdapter { + /** + * The {@code AudioContext} instance. + */ + _audioContext = null; + + /** + * The {@code ScriptProcessorNode} instance. + */ + _audioProcessingNode = null; + + /** + * The {@code MediaStreamAudioSourceNode} instance. + */ + _audioSource = null; + + /** + * The {@code MediaStream} instance, representing the current audio device. + */ + _stream = null; + + /** + * Sample rate. + */ + _sampleRate = 44100; + + /** + * Constructor. + */ + constructor() { + super(); + + // sampleRate is browser and OS dependent. + // Setting sampleRate explicitly is in the specs but not implemented + // by browsers. + // See: https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/ + // AudioContext#Browser_compatibility + // And https://bugs.chromium.org/p/chromium/issues/detail?id=432248 + + this._audioContext = new AudioContext(); + this._sampleRate = this._audioContext.sampleRate; + logger.log(`Current sampleRate ${this._sampleRate}.`); + } + + /** + * Sets up the audio graph in the AudioContext. + * + * @protected + * @param {string} micDeviceId - The current microphone device ID. + * @param {Function} callback - Callback function to + * handle AudioProcessingEvents. + * @returns {Promise} + */ + _initializeAudioContext(micDeviceId, callback) { + if (typeof callback !== 'function') { + return Promise.reject('a callback function is required.'); + } + + return this._getAudioStream(micDeviceId) + .then(stream => { + this._stream = stream; + this._audioSource + = this._audioContext.createMediaStreamSource(stream); + this._audioProcessingNode + = this._audioContext.createScriptProcessor(4096, 1, 1); + this._audioProcessingNode.onaudioprocess = callback; + logger.debug('AudioContext is set up.'); + }) + .catch(err => { + logger.error(`Error calling getUserMedia(): ${err}`); + + return Promise.reject(err); + }); + } + + /** + * Connects the nodes in the {@code AudioContext} to start the flow of + * audio data. + * + * @protected + * @returns {void} + */ + _connectAudioGraph() { + this._audioSource.connect(this._audioProcessingNode); + this._audioProcessingNode.connect(this._audioContext.destination); + } + + /** + * Disconnects the nodes in the {@code AudioContext}. + * + * @protected + * @returns {void} + */ + _disconnectAudioGraph() { + this._audioProcessingNode.onaudioprocess = undefined; + this._audioProcessingNode.disconnect(); + this._audioSource.disconnect(); + } + + /** + * Replaces the current microphone MediaStream. + * + * @protected + * @param {string} micDeviceId - New microphone ID. + * @returns {Promise} + */ + _replaceMic(micDeviceId) { + if (this._audioContext && this._audioProcessingNode) { + return this._getAudioStream(micDeviceId).then(newStream => { + const newSource = this._audioContext + .createMediaStreamSource(newStream); + + this._audioSource.disconnect(); + newSource.connect(this._audioProcessingNode); + this._stream = newStream; + this._audioSource = newSource; + }); + } + + return Promise.resolve(); + } +} diff --git a/react/features/local-recording/recording/WavAdapter.js b/react/features/local-recording/recording/WavAdapter.js index dc6b16162..d30930a3a 100644 --- a/react/features/local-recording/recording/WavAdapter.js +++ b/react/features/local-recording/recording/WavAdapter.js @@ -1,34 +1,13 @@ -import { RecordingAdapter } from './RecordingAdapter'; +import { AbstractAudioContextAdapter } from './AbstractAudioContextAdapter'; const logger = require('jitsi-meet-logger').getLogger(__filename); const WAV_BITS_PER_SAMPLE = 16; -const WAV_SAMPLE_RATE = 44100; /** * Recording adapter for raw WAVE format. */ -export class WavAdapter extends RecordingAdapter { - - /** - * The current {@code MediaStream} instance. - */ - _stream = null; - - /** - * {@code AudioContext} instance. - */ - _audioContext = null; - - /** - * {@code ScriptProcessorNode} instance, which receives the raw PCM bits. - */ - _audioProcessingNode = null; - - /** - * {@code MediaStreamAudioSourceNode} instance, which represents the mic. - */ - _audioSource = null; +export class WavAdapter extends AbstractAudioContextAdapter { /** * Length of the WAVE file, in number of samples. @@ -55,8 +34,7 @@ export class WavAdapter extends RecordingAdapter { */ constructor() { super(); - - this._onReceivePCM = this._onReceivePCM.bind(this); + this._onAudioProcess = this._onAudioProcess.bind(this); } /** @@ -73,9 +51,7 @@ export class WavAdapter extends RecordingAdapter { this._wavBuffers = []; this._wavLength = 0; - this._audioSource.connect(this._audioProcessingNode); - this._audioProcessingNode - .connect(this._audioContext.destination); + this._connectAudioGraph(); }); } @@ -85,10 +61,8 @@ export class WavAdapter extends RecordingAdapter { * @inheritdoc */ stop() { - this._audioProcessingNode.disconnect(); - this._audioSource.disconnect(); + this._disconnectAudioGraph(); this._data = this._exportMonoWAV(this._wavBuffers, this._wavLength); - this._audioContext = null; this._audioProcessingNode = null; this._audioSource = null; this._isInitialized = false; @@ -149,34 +123,6 @@ export class WavAdapter extends RecordingAdapter { return this._replaceMic(micDeviceId); } - /** - * Replaces the current microphone MediaStream. - * - * @param {*} micDeviceId - New microphone ID. - * @returns {Promise} - */ - _replaceMic(micDeviceId) { - if (this._audioContext && this._audioProcessingNode) { - return new Promise((resolve, reject) => { - this._getAudioStream(micDeviceId).then(newStream => { - const newSource = this._audioContext - .createMediaStreamSource(newStream); - - this._audioSource.disconnect(); - newSource.connect(this._audioProcessingNode); - this._stream = newStream; - this._audioSource = newSource; - resolve(); - }) - .catch(() => { - reject(); - }); - }); - } - - return Promise.resolve(); - } - /** * Creates a WAVE file header. * @@ -209,11 +155,11 @@ export class WavAdapter extends RecordingAdapter { view.setUint16(22, 1, true); // SampleRate - view.setUint32(24, WAV_SAMPLE_RATE, true); + view.setUint32(24, this._sampleRate, true); // ByteRate view.setUint32(28, - Number(WAV_SAMPLE_RATE) * 1 * WAV_BITS_PER_SAMPLE / 8, true); + Number(this._sampleRate) * 1 * WAV_BITS_PER_SAMPLE / 8, true); // BlockAlign view.setUint16(32, 1 * Number(WAV_BITS_PER_SAMPLE) / 8, true); @@ -244,51 +190,31 @@ export class WavAdapter extends RecordingAdapter { return Promise.resolve(); } - const p = new Promise((resolve, reject) => { - this._getAudioStream(micDeviceId) - .then(stream => { - this._stream = stream; - this._audioContext = new AudioContext({ - sampleRate: WAV_SAMPLE_RATE - }); - this._audioSource - = this._audioContext.createMediaStreamSource(stream); - this._audioProcessingNode - = this._audioContext.createScriptProcessor(4096, 1, 1); - this._audioProcessingNode.onaudioprocess = e => { - const channelLeft = e.inputBuffer.getChannelData(0); - - // See: https://developer.mozilla.org/en-US/docs/Web/API/ - // AudioBuffer/getChannelData - // The returned value is an Float32Array. - this._onReceivePCM(channelLeft); - }; + return this._initializeAudioContext(micDeviceId, this._onAudioProcess) + .then(() => { this._isInitialized = true; - resolve(); - }) - .catch(err => { - logger.error(`Error calling getUserMedia(): ${err}`); - reject(); }); - }); - - return p; } /** - * Callback function that saves the PCM bits. + * Callback function for handling AudioProcessingEvents. * * @private - * @param {Float32Array} data - The audio PCM data. + * @param {AudioProcessingEvent} e - The event containing the raw PCM. * @returns {void} */ - _onReceivePCM(data) { + _onAudioProcess(e) { + // See: https://developer.mozilla.org/en-US/docs/Web/API/ + // AudioBuffer/getChannelData + // The returned value is an Float32Array. + const channelLeft = e.inputBuffer.getChannelData(0); + // Need to copy the Float32Array: // unlike passing to WebWorker, this data is passed by reference, // so we need to copy it, otherwise the resulting audio file will be // just repeating the last segment. - this._wavBuffers.push(new Float32Array(data)); - this._wavLength += data.length; + this._wavBuffers.push(new Float32Array(channelLeft)); + this._wavLength += channelLeft.length; } /** diff --git a/react/features/local-recording/recording/flac/FlacAdapter.js b/react/features/local-recording/recording/flac/FlacAdapter.js index a46f493c4..930ab72a9 100644 --- a/react/features/local-recording/recording/flac/FlacAdapter.js +++ b/react/features/local-recording/recording/flac/FlacAdapter.js @@ -1,4 +1,3 @@ -import { RecordingAdapter } from '../RecordingAdapter'; import { DEBUG, MAIN_THREAD_FINISH, @@ -8,50 +7,41 @@ import { WORKER_LIBFLAC_READY } from './messageTypes'; +import { AbstractAudioContextAdapter } from '../AbstractAudioContextAdapter'; + const logger = require('jitsi-meet-logger').getLogger(__filename); /** * Recording adapter that uses libflac.js in the background. */ -export class FlacAdapter extends RecordingAdapter { +export class FlacAdapter extends AbstractAudioContextAdapter { /** * Instance of flacEncodeWorker. */ _encoder = null; - /** - * The {@code AudioContext} instance. - */ - _audioContext = null; - - /** - * The {@code ScriptProcessorNode} instance. - */ - _audioProcessingNode = null; - - /** - * The {@code MediaStreamAudioSourceNode} instance. - */ - _audioSource = null; - - /** - * The {@code MediaStream} instance, representing the current audio device. - */ - _stream = null; - /** * Resolve function of the promise returned by {@code stop()}. * This is called after the WebWorker sends back {@code WORKER_BLOB_READY}. */ _stopPromiseResolver = null; + _initPromiseResolver = null; + /** * Initialization promise. */ _initPromise = null; - _sampleRate = 44100; + /** + * Constructor. + */ + constructor() { + super(); + this._onAudioProcess = this._onAudioProcess.bind(this); + this._onWorkerMessage = this._onWorkerMessage.bind(this); + } /** * Implements {@link RecordingAdapter#start()}. @@ -64,8 +54,7 @@ export class FlacAdapter extends RecordingAdapter { } return this._initPromise.then(() => { - this._audioSource.connect(this._audioProcessingNode); - this._audioProcessingNode.connect(this._audioContext.destination); + this._connectAudioGraph(); }); } @@ -83,9 +72,7 @@ export class FlacAdapter extends RecordingAdapter { return new Promise(resolve => { this._initPromise = null; - this._audioProcessingNode.onaudioprocess = undefined; - this._audioProcessingNode.disconnect(); - this._audioSource.disconnect(); + this._disconnectAudioGraph(); this._stopPromiseResolver = resolve; this._encoder.postMessage({ command: MAIN_THREAD_FINISH @@ -146,29 +133,6 @@ export class FlacAdapter extends RecordingAdapter { return this._replaceMic(micDeviceId); } - /** - * Replaces the current microphone MediaStream. - * - * @param {string} micDeviceId - New microphone ID. - * @returns {Promise} - */ - _replaceMic(micDeviceId) { - if (this._audioContext && this._audioProcessingNode) { - return this._getAudioStream(micDeviceId).then(newStream => { - const newSource = this._audioContext - .createMediaStreamSource(newStream); - - this._audioSource.disconnect(); - newSource.connect(this._audioProcessingNode); - this._stream = newStream; - this._audioSource = newSource; - - }); - } - - return Promise.resolve(); - } - /** * Initialize the adapter. * @@ -181,17 +145,6 @@ export class FlacAdapter extends RecordingAdapter { return Promise.resolve(); } - // sampleRate is browser and OS dependent. - // Setting sampleRate explicitly is in the specs but not implemented - // by browsers. - // See: https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/ - // AudioContext#Browser_compatibility - // And https://bugs.chromium.org/p/chromium/issues/detail?id=432248 - - this._audioContext = new AudioContext(); - this._sampleRate = this._audioContext.sampleRate; - logger.log(`Current sampleRate ${this._sampleRate}.`); - const promiseInitWorker = new Promise((resolve, reject) => { try { this._loadWebWorker(); @@ -199,28 +152,11 @@ export class FlacAdapter extends RecordingAdapter { reject(); } - // set up listen for messages from the WebWorker - this._encoder.onmessage = e => { - if (e.data.command === WORKER_BLOB_READY) { - // Received a Blob representing an encoded FLAC file. - this._data = e.data.buf; - if (this._stopPromiseResolver !== null) { - this._stopPromiseResolver(); - this._stopPromiseResolver = null; - this._encoder.terminate(); - this._encoder = null; - } - } else if (e.data.command === DEBUG) { - logger.log(e.data); - } else if (e.data.command === WORKER_LIBFLAC_READY) { - logger.log('libflac is ready.'); - resolve(); - } else { - logger.error( - `Unknown event - from encoder (WebWorker): "${e.data.command}"!`); - } - }; + // save the Promise's resolver to resolve it later. + this._initPromiseResolver = resolve; + + // set up listener for messages from the WebWorker + this._encoder.onmessage = this._onWorkerMessage; this._encoder.postMessage({ command: MAIN_THREAD_INIT, @@ -231,38 +167,67 @@ export class FlacAdapter extends RecordingAdapter { }); }); - const callbackInitAudioContext = () => - this._getAudioStream(micDeviceId) - .then(stream => { - this._stream = stream; - this._audioSource - = this._audioContext.createMediaStreamSource(stream); - this._audioProcessingNode - = this._audioContext.createScriptProcessor(4096, 1, 1); - this._audioProcessingNode.onaudioprocess = e => { - // Delegates to the WebWorker to do the encoding. - // The return of getChannelData() is a Float32Array, - // each element representing one sample. - const channelLeft = e.inputBuffer.getChannelData(0); - - this._encoder.postMessage({ - command: MAIN_THREAD_NEW_DATA_ARRIVED, - buf: channelLeft - }); - }; - logger.debug('AudioContext is set up.'); - }) - .catch(err => { - logger.error(`Error calling getUserMedia(): ${err}`); - - return Promise.reject(err); - }); - - // Because Promise constructor immediately executes the executor - // function. This is undesirable, we want callbackInitAudioContext to be - // executed only **after** promiseInitWorker is resolved. + // Arrow function is used here because we want AudioContext to be + // initialized only **after** promiseInitWorker is resolved. return promiseInitWorker - .then(callbackInitAudioContext); + .then(() => + this._initializeAudioContext( + micDeviceId, + this._onAudioProcess + )); + } + + /** + * Callback function for handling AudioProcessingEvents. + * + * @private + * @param {AudioProcessingEvent} e - The event containing the raw PCM. + * @returns {void} + */ + _onAudioProcess(e) { + // Delegates to the WebWorker to do the encoding. + // The return of getChannelData() is a Float32Array, + // each element representing one sample. + const channelLeft = e.inputBuffer.getChannelData(0); + + this._encoder.postMessage({ + command: MAIN_THREAD_NEW_DATA_ARRIVED, + buf: channelLeft + }); + } + + /** + * Handler for messages from flacEncodeWorker. + * + * @private + * @param {MessageEvent} e - The event sent by the WebWorker. + * @returns {void} + */ + _onWorkerMessage(e) { + switch (e.data.command) { + case WORKER_BLOB_READY: + // Received a Blob representing an encoded FLAC file. + this._data = e.data.buf; + if (this._stopPromiseResolver !== null) { + this._stopPromiseResolver(); + this._stopPromiseResolver = null; + this._encoder.terminate(); + this._encoder = null; + } + break; + case DEBUG: + logger.log(e.data); + break; + case WORKER_LIBFLAC_READY: + logger.log('libflac is ready.'); + this._initPromiseResolver(); + break; + default: + logger.error( + `Unknown event + from encoder (WebWorker): "${e.data.command}"!`); + break; + } } /** diff --git a/react/features/local-recording/recording/flac/flacEncodeWorker.js b/react/features/local-recording/recording/flac/flacEncodeWorker.js index c7390d738..f3f861fed 100644 --- a/react/features/local-recording/recording/flac/flacEncodeWorker.js +++ b/react/features/local-recording/recording/flac/flacEncodeWorker.js @@ -6,6 +6,8 @@ import { WORKER_LIBFLAC_READY } from './messageTypes'; +const logger = require('jitsi-meet-logger').getLogger(__filename); + /** * WebWorker that does FLAC encoding using libflac.js */ @@ -261,7 +263,7 @@ class Encoder { const errorNo = Flac.FLAC__stream_encoder_get_state(this._encoderId); - console.error('Error during encoding', FLAC_ERRORS[errorNo]); + logger.error('Error during encoding', FLAC_ERRORS[errorNo]); } } @@ -276,7 +278,7 @@ class Encoder { const status = Flac.FLAC__stream_encoder_finish(this._encoderId); - console.log('flac encoding finish: ', status); + logger.log('Flac encoding finished: ', status); // free up resources Flac.FLAC__stream_encoder_delete(this._encoderId); @@ -370,9 +372,8 @@ self.onmessage = function(e) { case MAIN_THREAD_NEW_DATA_ARRIVED: if (encoder === null) { - console - .error('flacEncoderWorker:' - + 'received data when the encoder is not ready.'); + logger.error('flacEncoderWorker received data when the encoder is' + + 'not ready.'); } else { encoder.encode(e.data.buf); }