2018-06-19 16:43:33 +00:00
|
|
|
import { RecordingAdapter } from './RecordingAdapter';
|
|
|
|
import { downloadBlob, timestampString } from './Utils';
|
|
|
|
|
|
|
|
const logger = require('jitsi-meet-logger').getLogger(__filename);
|
|
|
|
|
|
|
|
const WAV_BITS_PER_SAMPLE = 16;
|
|
|
|
const WAV_SAMPLE_RATE = 44100;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Recording adapter for raw WAVE format.
|
|
|
|
*/
|
|
|
|
export class WavAdapter extends RecordingAdapter {
|
|
|
|
|
2018-07-25 14:52:11 +00:00
|
|
|
/**
|
|
|
|
* The current {@code MediaStream} instance.
|
|
|
|
*/
|
|
|
|
_stream = null;
|
|
|
|
|
2018-07-10 11:20:36 +00:00
|
|
|
/**
|
|
|
|
* {@code AudioContext} instance.
|
|
|
|
*/
|
2018-06-19 16:43:33 +00:00
|
|
|
_audioContext = null;
|
2018-07-10 11:20:36 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* {@code ScriptProcessorNode} instance, which receives the raw PCM bits.
|
|
|
|
*/
|
2018-06-19 16:43:33 +00:00
|
|
|
_audioProcessingNode = null;
|
2018-07-10 11:20:36 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* {@code MediaStreamAudioSourceNode} instance, which represents the mic.
|
|
|
|
*/
|
2018-06-19 16:43:33 +00:00
|
|
|
_audioSource = null;
|
|
|
|
|
2018-07-10 11:20:36 +00:00
|
|
|
/**
|
|
|
|
* Length of the WAVE file, in units of {@code sizeof(Float32)}.
|
|
|
|
*/
|
2018-06-19 16:43:33 +00:00
|
|
|
_wavLength = 0;
|
2018-07-10 11:20:36 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* The {@code ArrayBuffer}s that stores the PCM bits.
|
|
|
|
*/
|
2018-06-19 16:43:33 +00:00
|
|
|
_wavBuffers = [];
|
2018-07-10 11:20:36 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Whether or not the {@code WavAdapter} is in a ready state.
|
|
|
|
*/
|
2018-06-19 16:43:33 +00:00
|
|
|
_isInitialized = false;
|
|
|
|
|
2018-07-10 11:20:36 +00:00
|
|
|
/**
|
|
|
|
* Initialization promise.
|
|
|
|
*/
|
|
|
|
_initPromise = null;
|
|
|
|
|
2018-06-19 16:43:33 +00:00
|
|
|
/**
|
|
|
|
* Constructor.
|
|
|
|
*/
|
|
|
|
constructor() {
|
|
|
|
super();
|
|
|
|
|
|
|
|
this._saveWavPCM = this._saveWavPCM.bind(this);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Implements {@link RecordingAdapter#start()}.
|
|
|
|
*
|
|
|
|
* @inheritdoc
|
|
|
|
*/
|
2018-07-31 09:53:22 +00:00
|
|
|
start(micDeviceId) {
|
2018-07-10 11:20:36 +00:00
|
|
|
if (!this._initPromise) {
|
2018-07-31 09:53:22 +00:00
|
|
|
this._initPromise = this._initialize(micDeviceId);
|
2018-07-10 11:20:36 +00:00
|
|
|
}
|
|
|
|
|
2018-07-25 14:52:11 +00:00
|
|
|
return this._initPromise.then(() => {
|
|
|
|
this._wavBuffers = [];
|
|
|
|
this._wavLength = 0;
|
|
|
|
this._wavBuffers.push(this._createWavHeader());
|
2018-06-19 16:43:33 +00:00
|
|
|
|
2018-07-25 14:52:11 +00:00
|
|
|
this._audioSource.connect(this._audioProcessingNode);
|
|
|
|
this._audioProcessingNode
|
|
|
|
.connect(this._audioContext.destination);
|
|
|
|
});
|
2018-06-19 16:43:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Implements {@link RecordingAdapter#stop()}.
|
|
|
|
*
|
|
|
|
* @inheritdoc
|
|
|
|
*/
|
|
|
|
stop() {
|
|
|
|
this._audioProcessingNode.disconnect();
|
|
|
|
this._audioSource.disconnect();
|
|
|
|
this._data = this._exportMonoWAV(this._wavBuffers, this._wavLength);
|
2018-07-10 11:20:36 +00:00
|
|
|
this._audioContext = null;
|
|
|
|
this._audioProcessingNode = null;
|
|
|
|
this._audioSource = null;
|
|
|
|
this._isInitialized = false;
|
2018-06-19 16:43:33 +00:00
|
|
|
|
|
|
|
return Promise.resolve();
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Implements {@link RecordingAdapter#download()}.
|
|
|
|
*
|
|
|
|
* @inheritdoc
|
|
|
|
*/
|
|
|
|
download() {
|
|
|
|
if (this._data !== null) {
|
|
|
|
const audioURL = window.URL.createObjectURL(this._data);
|
|
|
|
|
|
|
|
downloadBlob(audioURL, `recording${timestampString()}.wav`);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-07-25 14:52:11 +00:00
|
|
|
/**
|
|
|
|
* Implements {@link RecordingAdapter#setMuted()}.
|
|
|
|
*
|
|
|
|
* @inheritdoc
|
|
|
|
*/
|
|
|
|
setMuted(muted) {
|
|
|
|
const shouldEnable = !muted;
|
|
|
|
|
|
|
|
if (!this._stream) {
|
|
|
|
return Promise.resolve();
|
|
|
|
}
|
|
|
|
|
|
|
|
const track = this._stream.getAudioTracks()[0];
|
|
|
|
|
|
|
|
if (!track) {
|
|
|
|
logger.error('Cannot mute/unmute. Track not found!');
|
|
|
|
|
|
|
|
return Promise.resolve();
|
|
|
|
}
|
|
|
|
|
|
|
|
if (track.enabled !== shouldEnable) {
|
|
|
|
track.enabled = shouldEnable;
|
|
|
|
logger.log(muted ? 'Mute' : 'Unmute');
|
|
|
|
}
|
|
|
|
|
|
|
|
return Promise.resolve();
|
|
|
|
}
|
|
|
|
|
2018-06-19 16:43:33 +00:00
|
|
|
/**
|
|
|
|
* Creates a WAVE file header.
|
|
|
|
*
|
|
|
|
* @private
|
|
|
|
* @returns {Uint8Array}
|
|
|
|
*/
|
|
|
|
_createWavHeader() {
|
|
|
|
// adapted from
|
|
|
|
// https://github.com/mmig/speech-to-flac/blob/master/encoder.js
|
|
|
|
|
|
|
|
// ref: http://soundfile.sapp.org/doc/WaveFormat/
|
|
|
|
|
|
|
|
// create our WAVE file header
|
|
|
|
const buffer = new ArrayBuffer(44);
|
|
|
|
const view = new DataView(buffer);
|
|
|
|
|
|
|
|
// RIFF chunk descriptor
|
|
|
|
writeUTFBytes(view, 0, 'RIFF');
|
|
|
|
|
|
|
|
// set file size at the end
|
|
|
|
writeUTFBytes(view, 8, 'WAVE');
|
|
|
|
|
|
|
|
// FMT sub-chunk
|
|
|
|
writeUTFBytes(view, 12, 'fmt ');
|
|
|
|
view.setUint32(16, 16, true);
|
|
|
|
view.setUint16(20, 1, true);
|
|
|
|
|
|
|
|
// NumChannels
|
|
|
|
view.setUint16(22, 1, true);
|
|
|
|
|
|
|
|
// SampleRate
|
|
|
|
view.setUint32(24, WAV_SAMPLE_RATE, true);
|
|
|
|
|
|
|
|
// ByteRate
|
|
|
|
view.setUint32(28,
|
|
|
|
Number(WAV_SAMPLE_RATE) * 1 * WAV_BITS_PER_SAMPLE / 8, true);
|
|
|
|
|
|
|
|
// BlockAlign
|
|
|
|
view.setUint16(32, 1 * Number(WAV_BITS_PER_SAMPLE) / 8, true);
|
|
|
|
|
|
|
|
view.setUint16(34, WAV_BITS_PER_SAMPLE, true);
|
|
|
|
|
|
|
|
// data sub-chunk
|
|
|
|
writeUTFBytes(view, 36, 'data');
|
|
|
|
|
|
|
|
// DUMMY file length (set real value on export)
|
|
|
|
view.setUint32(4, 10, true);
|
|
|
|
|
|
|
|
// DUMMY data chunk length (set real value on export)
|
|
|
|
view.setUint32(40, 10, true);
|
|
|
|
|
|
|
|
return new Uint8Array(buffer);
|
|
|
|
}
|
|
|
|
|
2018-07-10 11:20:36 +00:00
|
|
|
/**
|
|
|
|
* Initialize the adapter.
|
|
|
|
*
|
|
|
|
* @private
|
2018-07-31 09:53:22 +00:00
|
|
|
* @param {string} micDeviceId - The current microphone device ID.
|
2018-07-10 11:20:36 +00:00
|
|
|
* @returns {Promise}
|
|
|
|
*/
|
2018-07-31 09:53:22 +00:00
|
|
|
_initialize(micDeviceId) {
|
2018-07-10 11:20:36 +00:00
|
|
|
if (this._isInitialized) {
|
|
|
|
return Promise.resolve();
|
|
|
|
}
|
|
|
|
|
|
|
|
const p = new Promise((resolve, reject) => {
|
2018-07-31 09:53:22 +00:00
|
|
|
this._getAudioStream(micDeviceId)
|
2018-07-10 11:20:36 +00:00
|
|
|
.then(stream => {
|
2018-07-25 14:52:11 +00:00
|
|
|
this._stream = stream;
|
2018-07-10 11:20:36 +00:00
|
|
|
this._audioContext = new AudioContext();
|
|
|
|
this._audioSource
|
|
|
|
= this._audioContext.createMediaStreamSource(stream);
|
|
|
|
this._audioProcessingNode
|
|
|
|
= this._audioContext.createScriptProcessor(4096, 1, 1);
|
|
|
|
this._audioProcessingNode.onaudioprocess = e => {
|
|
|
|
const channelLeft = e.inputBuffer.getChannelData(0);
|
|
|
|
|
2018-07-31 09:53:22 +00:00
|
|
|
// See: https://developer.mozilla.org/en-US/docs/Web/API/
|
|
|
|
// AudioBuffer/getChannelData
|
|
|
|
// The returned value is an Float32Array.
|
2018-07-10 11:20:36 +00:00
|
|
|
this._saveWavPCM(channelLeft);
|
|
|
|
};
|
|
|
|
this._isInitialized = true;
|
|
|
|
resolve();
|
|
|
|
})
|
|
|
|
.catch(err => {
|
|
|
|
logger.error(`Error calling getUserMedia(): ${err}`);
|
|
|
|
reject();
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
return p;
|
|
|
|
}
|
2018-06-19 16:43:33 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Callback function that saves the PCM bits.
|
|
|
|
*
|
|
|
|
* @private
|
|
|
|
* @param {Float32Array} data - The audio PCM data.
|
|
|
|
* @returns {void}
|
|
|
|
*/
|
|
|
|
_saveWavPCM(data) {
|
2018-07-25 14:52:11 +00:00
|
|
|
// Need to copy the Float32Array:
|
|
|
|
// unlike passing to WebWorker, this data is passed by reference,
|
|
|
|
// so we need to copy it, otherwise the resulting audio file will be
|
|
|
|
// just repeating the last segment.
|
2018-06-19 16:43:33 +00:00
|
|
|
this._wavBuffers.push(new Float32Array(data));
|
|
|
|
this._wavLength += data.length;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Combines buffers and export to a wav file.
|
|
|
|
*
|
|
|
|
* @private
|
|
|
|
* @param {*} buffers - The stored buffers.
|
|
|
|
* @param {*} length - Total length (in bytes).
|
|
|
|
* @returns {Blob}
|
|
|
|
*/
|
|
|
|
_exportMonoWAV(buffers, length) {
|
|
|
|
// buffers: array with
|
|
|
|
// buffers[0] = header information (with missing length information)
|
|
|
|
// buffers[1] = Float32Array object (audio data)
|
|
|
|
// ...
|
|
|
|
// buffers[n] = Float32Array object (audio data)
|
|
|
|
|
|
|
|
const dataLength = length * 2; // why multiply by 2 here?
|
|
|
|
const buffer = new ArrayBuffer(44 + dataLength);
|
|
|
|
const view = new DataView(buffer);
|
|
|
|
|
|
|
|
// copy WAV header data into the array buffer
|
|
|
|
const header = buffers[0];
|
|
|
|
const len = header.length;
|
|
|
|
|
|
|
|
for (let i = 0; i < len; ++i) {
|
|
|
|
view.setUint8(i, header[i]);
|
|
|
|
}
|
|
|
|
|
|
|
|
// add file length in header
|
|
|
|
view.setUint32(4, 32 + dataLength, true);
|
|
|
|
|
|
|
|
// add data chunk length in header
|
|
|
|
view.setUint32(40, dataLength, true);
|
|
|
|
|
|
|
|
// write audio data
|
|
|
|
floatTo16BitPCM(view, 44, buffers);
|
|
|
|
|
|
|
|
return new Blob([ view ], { type: 'audio/wav' });
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Helper function. Writes a UTF string to memory
|
|
|
|
* using big endianness. Required by WAVE headers.
|
|
|
|
*
|
|
|
|
* @param {ArrayBuffer} view - The view to memory.
|
|
|
|
* @param {*} offset - Offset.
|
|
|
|
* @param {*} string - The string to be written.
|
|
|
|
* @returns {void}
|
|
|
|
*/
|
|
|
|
function writeUTFBytes(view, offset, string) {
|
|
|
|
const lng = string.length;
|
|
|
|
|
|
|
|
// convert to big endianness
|
|
|
|
for (let i = 0; i < lng; ++i) {
|
|
|
|
view.setUint8(offset + i, string.charCodeAt(i));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Helper function for converting Float32Array to Int16Array.
|
|
|
|
*
|
|
|
|
* @param {*} output - The output buffer.
|
|
|
|
* @param {*} offset - The offset in output buffer to write from.
|
|
|
|
* @param {*} inputBuffers - The input buffers.
|
|
|
|
* @returns {void}
|
|
|
|
*/
|
|
|
|
function floatTo16BitPCM(output, offset, inputBuffers) {
|
|
|
|
|
|
|
|
let i, input, isize, s;
|
|
|
|
const jsize = inputBuffers.length;
|
|
|
|
let o = offset;
|
|
|
|
|
|
|
|
// first entry is header information (already used in exportMonoWAV),
|
|
|
|
// rest is Float32Array-entries -> ignore header entry
|
|
|
|
for (let j = 1; j < jsize; ++j) {
|
|
|
|
input = inputBuffers[j];
|
|
|
|
isize = input.length;
|
|
|
|
for (i = 0; i < isize; ++i, o += 2) {
|
|
|
|
s = Math.max(-1, Math.min(1, input[i]));
|
|
|
|
output.setInt16(o, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|