2020-03-30 14:17:18 +00:00
|
|
|
// @flow
|
|
|
|
|
|
|
|
import React, { Component } from 'react';
|
|
|
|
|
|
|
|
import { translate } from '../../../../base/i18n';
|
|
|
|
import { IconMicrophoneEmpty, IconVolumeEmpty } from '../../../../base/icons';
|
2020-12-30 14:19:55 +00:00
|
|
|
import JitsiMeetJS from '../../../../base/lib-jitsi-meet';
|
2020-06-25 10:31:14 +00:00
|
|
|
import { equals } from '../../../../base/redux';
|
|
|
|
import { createLocalAudioTracks } from '../../../functions';
|
2020-05-20 10:57:03 +00:00
|
|
|
|
|
|
|
import AudioSettingsHeader from './AudioSettingsHeader';
|
2020-03-30 14:17:18 +00:00
|
|
|
import MicrophoneEntry from './MicrophoneEntry';
|
|
|
|
import SpeakerEntry from './SpeakerEntry';
|
|
|
|
|
2020-12-30 14:19:55 +00:00
|
|
|
const browser = JitsiMeetJS.util.browser;
|
|
|
|
|
2020-03-30 14:17:18 +00:00
|
|
|
export type Props = {
|
|
|
|
|
|
|
|
/**
|
|
|
|
* The deviceId of the microphone in use.
|
|
|
|
*/
|
|
|
|
currentMicDeviceId: string,
|
|
|
|
|
|
|
|
/**
|
|
|
|
* The deviceId of the output device in use.
|
|
|
|
*/
|
|
|
|
currentOutputDeviceId: string,
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Used to set a new microphone as the current one.
|
|
|
|
*/
|
|
|
|
setAudioInputDevice: Function,
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Used to set a new output device as the current one.
|
|
|
|
*/
|
|
|
|
setAudioOutputDevice: Function,
|
|
|
|
|
|
|
|
/**
|
|
|
|
* A list of objects containing the labels and deviceIds
|
|
|
|
* of all the output devices.
|
|
|
|
*/
|
|
|
|
outputDevices: Object[],
|
|
|
|
|
|
|
|
/**
|
|
|
|
* A list with objects containing the labels and deviceIds
|
|
|
|
* of all the input devices.
|
|
|
|
*/
|
|
|
|
microphoneDevices: Object[],
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Invoked to obtain translated strings.
|
|
|
|
*/
|
|
|
|
t: Function
|
|
|
|
};
|
|
|
|
|
|
|
|
type State = {
|
|
|
|
|
|
|
|
/**
|
2020-06-25 10:31:14 +00:00
|
|
|
* An list of objects, each containing the microphone label, audio track, device id
|
|
|
|
* and track error if the case.
|
2020-03-30 14:17:18 +00:00
|
|
|
*/
|
2020-06-25 10:31:14 +00:00
|
|
|
audioTracks: Object[]
|
2020-03-30 14:17:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Implements a React {@link Component} which displayes a list of all
|
|
|
|
* the audio input & output devices to choose from.
|
|
|
|
*
|
|
|
|
* @extends Component
|
|
|
|
*/
|
|
|
|
class AudioSettingsContent extends Component<Props, State> {
|
|
|
|
_componentWasUnmounted: boolean;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Initializes a new {@code AudioSettingsContent} instance.
|
|
|
|
*
|
|
|
|
* @param {Object} props - The read-only properties with which the new
|
|
|
|
* instance is to be initialized.
|
|
|
|
*/
|
|
|
|
constructor(props) {
|
|
|
|
super(props);
|
|
|
|
|
|
|
|
this._onMicrophoneEntryClick = this._onMicrophoneEntryClick.bind(this);
|
|
|
|
this._onSpeakerEntryClick = this._onSpeakerEntryClick.bind(this);
|
|
|
|
|
|
|
|
this.state = {
|
2020-06-25 10:31:14 +00:00
|
|
|
audioTracks: props.microphoneDevices.map(({ deviceId, label }) => {
|
|
|
|
return {
|
|
|
|
deviceId,
|
|
|
|
hasError: false,
|
|
|
|
jitsiTrack: null,
|
|
|
|
label
|
|
|
|
};
|
|
|
|
})
|
2020-03-30 14:17:18 +00:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
_onMicrophoneEntryClick: (string) => void;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Click handler for the microphone entries.
|
|
|
|
*
|
|
|
|
* @param {string} deviceId - The deviceId for the clicked microphone.
|
|
|
|
* @returns {void}
|
|
|
|
*/
|
|
|
|
_onMicrophoneEntryClick(deviceId) {
|
|
|
|
this.props.setAudioInputDevice(deviceId);
|
|
|
|
}
|
|
|
|
|
|
|
|
_onSpeakerEntryClick: (string) => void;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Click handler for the speaker entries.
|
|
|
|
*
|
|
|
|
* @param {string} deviceId - The deviceId for the clicked speaker.
|
|
|
|
* @returns {void}
|
|
|
|
*/
|
|
|
|
_onSpeakerEntryClick(deviceId) {
|
|
|
|
this.props.setAudioOutputDevice(deviceId);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Renders a single microphone entry.
|
|
|
|
*
|
2020-06-25 10:31:14 +00:00
|
|
|
* @param {Object} data - An object with the deviceId, jitsiTrack & label of the microphone.
|
2020-03-30 14:17:18 +00:00
|
|
|
* @param {number} index - The index of the element, used for creating a key.
|
|
|
|
* @returns {React$Node}
|
|
|
|
*/
|
|
|
|
_renderMicrophoneEntry(data, index) {
|
2020-06-25 10:31:14 +00:00
|
|
|
const { deviceId, label, jitsiTrack, hasError } = data;
|
2020-03-30 14:17:18 +00:00
|
|
|
const isSelected = deviceId === this.props.currentMicDeviceId;
|
|
|
|
|
|
|
|
return (
|
|
|
|
<MicrophoneEntry
|
|
|
|
deviceId = { deviceId }
|
|
|
|
hasError = { hasError }
|
|
|
|
isSelected = { isSelected }
|
|
|
|
jitsiTrack = { jitsiTrack }
|
2020-06-25 10:31:14 +00:00
|
|
|
key = { `me-${index}` }
|
2020-03-30 14:17:18 +00:00
|
|
|
onClick = { this._onMicrophoneEntryClick }>
|
|
|
|
{label}
|
|
|
|
</MicrophoneEntry>
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Renders a single speaker entry.
|
|
|
|
*
|
|
|
|
* @param {Object} data - An object with the deviceId and label of the speaker.
|
|
|
|
* @param {number} index - The index of the element, used for creating a key.
|
|
|
|
* @returns {React$Node}
|
|
|
|
*/
|
|
|
|
_renderSpeakerEntry(data, index) {
|
|
|
|
const { deviceId, label } = data;
|
|
|
|
const key = `se-${index}`;
|
|
|
|
|
|
|
|
return (
|
|
|
|
<SpeakerEntry
|
|
|
|
deviceId = { deviceId }
|
|
|
|
isSelected = { deviceId === this.props.currentOutputDeviceId }
|
|
|
|
key = { key }
|
|
|
|
onClick = { this._onSpeakerEntryClick }>
|
|
|
|
{label}
|
|
|
|
</SpeakerEntry>
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
2020-06-25 10:31:14 +00:00
|
|
|
* Creates and updates the audio tracks.
|
2020-03-30 14:17:18 +00:00
|
|
|
*
|
|
|
|
* @returns {void}
|
|
|
|
*/
|
2020-06-25 10:31:14 +00:00
|
|
|
async _setTracks() {
|
2020-12-30 14:19:55 +00:00
|
|
|
if (browser.isSafari()) {
|
|
|
|
|
|
|
|
// It appears that at the time of this writing, creating audio tracks blocks the browser's main thread for
|
|
|
|
// long time on safari. Wasn't able to confirm which part of track creation does the blocking exactly, but
|
|
|
|
// not creating the tracks seems to help and makes the UI much more responsive.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-06-25 10:31:14 +00:00
|
|
|
this._disposeTracks(this.state.audioTracks);
|
2020-03-30 14:17:18 +00:00
|
|
|
|
2020-06-25 10:31:14 +00:00
|
|
|
const audioTracks = await createLocalAudioTracks(
|
|
|
|
this.props.microphoneDevices
|
2020-03-30 14:17:18 +00:00
|
|
|
);
|
|
|
|
|
|
|
|
if (this._componentWasUnmounted) {
|
2020-06-25 10:31:14 +00:00
|
|
|
this._disposeTracks(audioTracks);
|
2020-03-30 14:17:18 +00:00
|
|
|
} else {
|
|
|
|
this.setState({
|
2020-06-25 10:31:14 +00:00
|
|
|
audioTracks
|
2020-03-30 14:17:18 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
2020-06-25 10:31:14 +00:00
|
|
|
* Disposes the audio tracks.
|
2020-03-30 14:17:18 +00:00
|
|
|
*
|
2020-06-25 10:31:14 +00:00
|
|
|
* @param {Object} audioTracks - The object holding the audio tracks.
|
|
|
|
* @returns {void}
|
2020-03-30 14:17:18 +00:00
|
|
|
*/
|
2020-06-25 10:31:14 +00:00
|
|
|
_disposeTracks(audioTracks) {
|
|
|
|
audioTracks.forEach(({ jitsiTrack }) => {
|
|
|
|
jitsiTrack && jitsiTrack.dispose();
|
|
|
|
});
|
2020-03-30 14:17:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Implements React's {@link Component#componentDidMount}.
|
|
|
|
*
|
|
|
|
* @inheritdoc
|
|
|
|
*/
|
|
|
|
componentDidMount() {
|
2020-06-25 10:31:14 +00:00
|
|
|
this._setTracks();
|
2020-03-30 14:17:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Implements React's {@link Component#componentWillUnmount}.
|
|
|
|
*
|
|
|
|
* @inheritdoc
|
|
|
|
*/
|
|
|
|
componentWillUnmount() {
|
|
|
|
this._componentWasUnmounted = true;
|
2020-06-25 10:31:14 +00:00
|
|
|
this._disposeTracks(this.state.audioTracks);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Implements React's {@link Component#componentDidUpdate}.
|
|
|
|
*
|
|
|
|
* @inheritdoc
|
|
|
|
*/
|
|
|
|
componentDidUpdate(prevProps) {
|
|
|
|
if (!equals(this.props.microphoneDevices, prevProps.microphoneDevices)) {
|
|
|
|
this._setTracks();
|
|
|
|
}
|
2020-03-30 14:17:18 +00:00
|
|
|
}
|
|
|
|
|
2020-06-25 10:31:14 +00:00
|
|
|
|
2020-03-30 14:17:18 +00:00
|
|
|
/**
|
|
|
|
* Implements React's {@link Component#render}.
|
|
|
|
*
|
|
|
|
* @inheritdoc
|
|
|
|
*/
|
|
|
|
render() {
|
2020-06-25 10:31:14 +00:00
|
|
|
const { outputDevices, t } = this.props;
|
2020-03-30 14:17:18 +00:00
|
|
|
|
|
|
|
return (
|
|
|
|
<div>
|
|
|
|
<div className = 'audio-preview-content'>
|
|
|
|
<AudioSettingsHeader
|
|
|
|
IconComponent = { IconMicrophoneEmpty }
|
2020-03-30 14:44:45 +00:00
|
|
|
text = { t('settings.microphones') } />
|
2020-06-25 10:31:14 +00:00
|
|
|
{this.state.audioTracks.map((data, i) =>
|
2020-03-30 14:17:18 +00:00
|
|
|
this._renderMicrophoneEntry(data, i),
|
|
|
|
)}
|
2020-12-30 14:19:55 +00:00
|
|
|
{ outputDevices.length > 0 && (
|
|
|
|
<AudioSettingsHeader
|
|
|
|
IconComponent = { IconVolumeEmpty }
|
|
|
|
text = { t('settings.speakers') } />)
|
|
|
|
}
|
2020-03-30 14:17:18 +00:00
|
|
|
{outputDevices.map((data, i) =>
|
|
|
|
this._renderSpeakerEntry(data, i),
|
|
|
|
)}
|
|
|
|
</div>
|
|
|
|
</div>
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export default translate(AudioSettingsContent);
|