jiti-meet/react/features/local-recording/recording/WavAdapter.js

376 lines
10 KiB
JavaScript
Raw Normal View History

import { RecordingAdapter } from './RecordingAdapter';
import { downloadBlob, timestampString } from './Utils';
const logger = require('jitsi-meet-logger').getLogger(__filename);
const WAV_BITS_PER_SAMPLE = 16;
const WAV_SAMPLE_RATE = 44100;
/**
* Recording adapter for raw WAVE format.
*/
export class WavAdapter extends RecordingAdapter {
2018-07-25 14:52:11 +00:00
/**
* The current {@code MediaStream} instance.
*/
_stream = null;
2018-07-10 11:20:36 +00:00
/**
* {@code AudioContext} instance.
*/
_audioContext = null;
2018-07-10 11:20:36 +00:00
/**
* {@code ScriptProcessorNode} instance, which receives the raw PCM bits.
*/
_audioProcessingNode = null;
2018-07-10 11:20:36 +00:00
/**
* {@code MediaStreamAudioSourceNode} instance, which represents the mic.
*/
_audioSource = null;
2018-07-10 11:20:36 +00:00
/**
* Length of the WAVE file, in units of {@code sizeof(Float32)}.
*/
_wavLength = 0;
2018-07-10 11:20:36 +00:00
/**
* The {@code ArrayBuffer}s that stores the PCM bits.
*/
_wavBuffers = [];
2018-07-10 11:20:36 +00:00
/**
* Whether or not the {@code WavAdapter} is in a ready state.
*/
_isInitialized = false;
2018-07-10 11:20:36 +00:00
/**
* Initialization promise.
*/
_initPromise = null;
/**
* Constructor.
*/
constructor() {
super();
this._saveWavPCM = this._saveWavPCM.bind(this);
}
/**
* Implements {@link RecordingAdapter#start()}.
*
* @inheritdoc
*/
start(micDeviceId) {
2018-07-10 11:20:36 +00:00
if (!this._initPromise) {
this._initPromise = this._initialize(micDeviceId);
2018-07-10 11:20:36 +00:00
}
2018-07-25 14:52:11 +00:00
return this._initPromise.then(() => {
this._wavBuffers = [];
this._wavLength = 0;
this._wavBuffers.push(this._createWavHeader());
2018-07-25 14:52:11 +00:00
this._audioSource.connect(this._audioProcessingNode);
this._audioProcessingNode
.connect(this._audioContext.destination);
});
}
/**
* Implements {@link RecordingAdapter#stop()}.
*
* @inheritdoc
*/
stop() {
this._audioProcessingNode.disconnect();
this._audioSource.disconnect();
this._data = this._exportMonoWAV(this._wavBuffers, this._wavLength);
2018-07-10 11:20:36 +00:00
this._audioContext = null;
this._audioProcessingNode = null;
this._audioSource = null;
this._isInitialized = false;
return Promise.resolve();
}
/**
* Implements {@link RecordingAdapter#download()}.
*
* @inheritdoc
*/
download() {
if (this._data !== null) {
const audioURL = window.URL.createObjectURL(this._data);
downloadBlob(audioURL, `recording${timestampString()}.wav`);
}
}
2018-07-25 14:52:11 +00:00
/**
* Implements {@link RecordingAdapter#setMuted()}.
*
* @inheritdoc
*/
setMuted(muted) {
const shouldEnable = !muted;
if (!this._stream) {
return Promise.resolve();
}
const track = this._stream.getAudioTracks()[0];
if (!track) {
logger.error('Cannot mute/unmute. Track not found!');
return Promise.resolve();
}
if (track.enabled !== shouldEnable) {
track.enabled = shouldEnable;
logger.log(muted ? 'Mute' : 'Unmute');
}
return Promise.resolve();
}
2018-07-31 21:56:54 +00:00
/**
* Implements {@link RecordingAdapter#setMicDevice()}.
*
* @inheritdoc
*/
setMicDevice(micDeviceId) {
return this._replaceMic(micDeviceId);
}
/**
* Replaces the current microphone MediaStream.
*
* @param {*} micDeviceId - New microphone ID.
* @returns {Promise}
*/
_replaceMic(micDeviceId) {
if (this._audioContext && this._audioProcessingNode) {
return new Promise((resolve, reject) => {
this._getAudioStream(micDeviceId).then(newStream => {
const newSource = this._audioContext
.createMediaStreamSource(newStream);
this._audioSource.disconnect();
newSource.connect(this._audioProcessingNode);
this._stream = newStream;
this._audioSource = newSource;
resolve();
})
.catch(() => {
reject();
});
});
}
return Promise.resolve();
}
/**
* Creates a WAVE file header.
*
* @private
* @returns {Uint8Array}
*/
_createWavHeader() {
// adapted from
// https://github.com/mmig/speech-to-flac/blob/master/encoder.js
// ref: http://soundfile.sapp.org/doc/WaveFormat/
// create our WAVE file header
const buffer = new ArrayBuffer(44);
const view = new DataView(buffer);
// RIFF chunk descriptor
writeUTFBytes(view, 0, 'RIFF');
// set file size at the end
writeUTFBytes(view, 8, 'WAVE');
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
// NumChannels
view.setUint16(22, 1, true);
// SampleRate
view.setUint32(24, WAV_SAMPLE_RATE, true);
// ByteRate
view.setUint32(28,
Number(WAV_SAMPLE_RATE) * 1 * WAV_BITS_PER_SAMPLE / 8, true);
// BlockAlign
view.setUint16(32, 1 * Number(WAV_BITS_PER_SAMPLE) / 8, true);
view.setUint16(34, WAV_BITS_PER_SAMPLE, true);
// data sub-chunk
writeUTFBytes(view, 36, 'data');
// DUMMY file length (set real value on export)
view.setUint32(4, 10, true);
// DUMMY data chunk length (set real value on export)
view.setUint32(40, 10, true);
return new Uint8Array(buffer);
}
2018-07-10 11:20:36 +00:00
/**
* Initialize the adapter.
*
* @private
* @param {string} micDeviceId - The current microphone device ID.
2018-07-10 11:20:36 +00:00
* @returns {Promise}
*/
_initialize(micDeviceId) {
2018-07-10 11:20:36 +00:00
if (this._isInitialized) {
return Promise.resolve();
}
const p = new Promise((resolve, reject) => {
this._getAudioStream(micDeviceId)
2018-07-10 11:20:36 +00:00
.then(stream => {
2018-07-25 14:52:11 +00:00
this._stream = stream;
2018-07-31 21:56:54 +00:00
this._audioContext = new AudioContext({
sampleRate: WAV_SAMPLE_RATE
});
2018-07-10 11:20:36 +00:00
this._audioSource
= this._audioContext.createMediaStreamSource(stream);
this._audioProcessingNode
= this._audioContext.createScriptProcessor(4096, 1, 1);
this._audioProcessingNode.onaudioprocess = e => {
const channelLeft = e.inputBuffer.getChannelData(0);
// See: https://developer.mozilla.org/en-US/docs/Web/API/
// AudioBuffer/getChannelData
// The returned value is an Float32Array.
2018-07-10 11:20:36 +00:00
this._saveWavPCM(channelLeft);
};
this._isInitialized = true;
resolve();
})
.catch(err => {
logger.error(`Error calling getUserMedia(): ${err}`);
reject();
});
});
return p;
}
/**
* Callback function that saves the PCM bits.
*
* @private
* @param {Float32Array} data - The audio PCM data.
* @returns {void}
*/
_saveWavPCM(data) {
2018-07-25 14:52:11 +00:00
// Need to copy the Float32Array:
// unlike passing to WebWorker, this data is passed by reference,
// so we need to copy it, otherwise the resulting audio file will be
// just repeating the last segment.
this._wavBuffers.push(new Float32Array(data));
this._wavLength += data.length;
}
/**
* Combines buffers and export to a wav file.
*
* @private
* @param {*} buffers - The stored buffers.
2018-07-31 21:56:54 +00:00
* @param {*} length - Total length (number of samples).
* @returns {Blob}
*/
_exportMonoWAV(buffers, length) {
// buffers: array with
// buffers[0] = header information (with missing length information)
// buffers[1] = Float32Array object (audio data)
// ...
// buffers[n] = Float32Array object (audio data)
2018-07-31 21:56:54 +00:00
const dataLength = length * 2; // each sample = 16 bit = 2 bytes
const buffer = new ArrayBuffer(44 + dataLength);
const view = new DataView(buffer);
// copy WAV header data into the array buffer
const header = buffers[0];
const len = header.length;
for (let i = 0; i < len; ++i) {
view.setUint8(i, header[i]);
}
// add file length in header
view.setUint32(4, 32 + dataLength, true);
// add data chunk length in header
view.setUint32(40, dataLength, true);
// write audio data
floatTo16BitPCM(view, 44, buffers);
return new Blob([ view ], { type: 'audio/wav' });
}
}
/**
* Helper function. Writes a UTF string to memory
* using big endianness. Required by WAVE headers.
*
* @param {ArrayBuffer} view - The view to memory.
* @param {*} offset - Offset.
* @param {*} string - The string to be written.
* @returns {void}
*/
function writeUTFBytes(view, offset, string) {
const lng = string.length;
// convert to big endianness
for (let i = 0; i < lng; ++i) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
/**
* Helper function for converting Float32Array to Int16Array.
*
* @param {*} output - The output buffer.
* @param {*} offset - The offset in output buffer to write from.
* @param {*} inputBuffers - The input buffers.
* @returns {void}
*/
function floatTo16BitPCM(output, offset, inputBuffers) {
let i, input, isize, s;
const jsize = inputBuffers.length;
let o = offset;
// first entry is header information (already used in exportMonoWAV),
// rest is Float32Array-entries -> ignore header entry
for (let j = 1; j < jsize; ++j) {
input = inputBuffers[j];
isize = input.length;
for (i = 0; i < isize; ++i, o += 2) {
s = Math.max(-1, Math.min(1, input[i]));
output.setInt16(o, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
}
}