fix(tracks): Do not signal muted audio tracks.
Do not add the muted audio tracks to peerconnection until the user unmutes the first time. This applies to startSilent, startWithAudioMuted and startAudioMuted/startVideoMuted config.js settings.
This commit is contained in:
parent
d93a402cc2
commit
dcda89012e
|
@ -99,6 +99,7 @@ import {
|
|||
destroyLocalTracks,
|
||||
getLocalJitsiAudioTrack,
|
||||
getLocalJitsiVideoTrack,
|
||||
getLocalTracks,
|
||||
isLocalCameraTrackMuted,
|
||||
isLocalTrackMuted,
|
||||
isUserInteractionRequiredForUnmute,
|
||||
|
@ -473,18 +474,13 @@ export default {
|
|||
*/
|
||||
createInitialLocalTracks(options = {}) {
|
||||
const errors = {};
|
||||
|
||||
// Always get a handle on the audio input device so that we have statistics (such as "No audio input" or
|
||||
// "Are you trying to speak?" ) even if the user joins the conference muted.
|
||||
const initialDevices = config.disableInitialGUM ? [] : [ 'audio' ];
|
||||
const requestedAudio = !config.disableInitialGUM;
|
||||
let requestedVideo = false;
|
||||
|
||||
// Always get a handle on the audio input device so that we have statistics even if the user joins the
|
||||
// conference muted. Previous implementation would only acquire the handle when the user first unmuted,
|
||||
// which would results in statistics ( such as "No audio input" or "Are you trying to speak?") being available
|
||||
// only after that point.
|
||||
if (options.startWithAudioMuted) {
|
||||
this.muteAudio(true, true);
|
||||
}
|
||||
|
||||
if (!config.disableInitialGUM
|
||||
&& !options.startWithVideoMuted
|
||||
&& !options.startAudioOnly
|
||||
|
@ -824,12 +820,16 @@ export default {
|
|||
return this._setLocalAudioVideoStreams(tracks);
|
||||
}
|
||||
|
||||
const [ tracks, con ] = await this.createInitialLocalTracksAndConnect(
|
||||
roomName, initialOptions);
|
||||
const [ tracks, con ] = await this.createInitialLocalTracksAndConnect(roomName, initialOptions);
|
||||
let localTracks = tracks;
|
||||
|
||||
this._initDeviceList(true);
|
||||
|
||||
return this.startConference(con, tracks);
|
||||
if (initialOptions.startWithAudioMuted) {
|
||||
localTracks = localTracks.filter(track => track.getType() !== MEDIA_TYPE.AUDIO);
|
||||
}
|
||||
|
||||
return this.startConference(con, localTracks);
|
||||
},
|
||||
|
||||
/**
|
||||
|
@ -1320,7 +1320,11 @@ export default {
|
|||
this._getConferenceOptions());
|
||||
|
||||
APP.store.dispatch(conferenceWillJoin(room));
|
||||
this._setLocalAudioVideoStreams(localTracks);
|
||||
|
||||
// Filter out the tracks that are muted.
|
||||
const tracks = localTracks.filter(track => !track.isMuted());
|
||||
|
||||
this._setLocalAudioVideoStreams(tracks);
|
||||
this._room = room; // FIXME do not use this
|
||||
|
||||
sendLocalParticipant(APP.store, room);
|
||||
|
@ -2163,8 +2167,26 @@ export default {
|
|||
}
|
||||
);
|
||||
room.on(JitsiConferenceEvents.STARTED_MUTED, () => {
|
||||
(room.isStartAudioMuted() || room.isStartVideoMuted())
|
||||
&& APP.UI.notifyInitiallyMuted();
|
||||
const audioMuted = room.isStartAudioMuted();
|
||||
const videoMuted = room.isStartVideoMuted();
|
||||
const localTracks = getLocalTracks(APP.store.getState()['features/base/tracks']);
|
||||
const promises = [];
|
||||
|
||||
APP.store.dispatch(setAudioMuted(audioMuted));
|
||||
APP.store.dispatch(setVideoMuted(videoMuted));
|
||||
|
||||
// Remove the tracks from the peerconnection.
|
||||
for (const track of localTracks) {
|
||||
if (audioMuted && track.jitsiTrack?.getType() === MEDIA_TYPE.AUDIO) {
|
||||
promises.push(this.useAudioStream(null));
|
||||
}
|
||||
if (videoMuted && track.jitsiTrack?.getType() === MEDIA_TYPE.VIDEO) {
|
||||
promises.push(this.useVideoStream(null));
|
||||
}
|
||||
}
|
||||
|
||||
Promise.allSettled(promises)
|
||||
.then(() => APP.UI.notifyInitiallyMuted());
|
||||
});
|
||||
|
||||
room.on(
|
||||
|
|
|
@ -10,7 +10,7 @@ import { getName } from '../../app/functions';
|
|||
import { endpointMessageReceived } from '../../subtitles';
|
||||
import { JITSI_CONNECTION_CONFERENCE_KEY } from '../connection';
|
||||
import { JitsiConferenceEvents } from '../lib-jitsi-meet';
|
||||
import { setAudioMuted, setVideoMuted } from '../media';
|
||||
import { MEDIA_TYPE, setAudioMuted, setVideoMuted } from '../media';
|
||||
import {
|
||||
dominantSpeakerChanged,
|
||||
getLocalParticipant,
|
||||
|
@ -22,7 +22,7 @@ import {
|
|||
participantRoleChanged,
|
||||
participantUpdated
|
||||
} from '../participants';
|
||||
import { getLocalTracks, trackAdded, trackRemoved } from '../tracks';
|
||||
import { getLocalTracks, replaceLocalTrack, trackAdded, trackRemoved } from '../tracks';
|
||||
import {
|
||||
getBackendSafePath,
|
||||
getBackendSafeRoomName,
|
||||
|
@ -72,10 +72,11 @@ declare var APP: Object;
|
|||
*
|
||||
* @param {JitsiConference} conference - The JitsiConference instance.
|
||||
* @param {Dispatch} dispatch - The Redux dispatch function.
|
||||
* @param {Object} state - The Redux state.
|
||||
* @private
|
||||
* @returns {void}
|
||||
*/
|
||||
function _addConferenceListeners(conference, dispatch) {
|
||||
function _addConferenceListeners(conference, dispatch, state) {
|
||||
// A simple logger for conference errors received through
|
||||
// the listener. These errors are not handled now, but logged.
|
||||
conference.on(JitsiConferenceEvents.CONFERENCE_ERROR,
|
||||
|
@ -118,13 +119,12 @@ function _addConferenceListeners(conference, dispatch) {
|
|||
conference.on(
|
||||
JitsiConferenceEvents.STARTED_MUTED,
|
||||
() => {
|
||||
const audioMuted = Boolean(conference.startAudioMuted);
|
||||
const videoMuted = Boolean(conference.startVideoMuted);
|
||||
const audioMuted = Boolean(conference.isStartAudioMuted());
|
||||
const videoMuted = Boolean(conference.isStartVideoMuted());
|
||||
const localTracks = getLocalTracks(state['features/base/tracks']);
|
||||
|
||||
sendAnalytics(createStartMutedConfigurationEvent(
|
||||
'remote', audioMuted, videoMuted));
|
||||
logger.log(`Start muted: ${audioMuted ? 'audio, ' : ''}${
|
||||
videoMuted ? 'video' : ''}`);
|
||||
sendAnalytics(createStartMutedConfigurationEvent('remote', audioMuted, videoMuted));
|
||||
logger.log(`Start muted: ${audioMuted ? 'audio, ' : ''}${videoMuted ? 'video' : ''}`);
|
||||
|
||||
// XXX Jicofo tells lib-jitsi-meet to start with audio and/or video
|
||||
// muted i.e. Jicofo expresses an intent. Lib-jitsi-meet has turned
|
||||
|
@ -136,6 +136,14 @@ function _addConferenceListeners(conference, dispatch) {
|
|||
// acting on Jicofo's intent without the app's knowledge.
|
||||
dispatch(setAudioMuted(audioMuted));
|
||||
dispatch(setVideoMuted(videoMuted));
|
||||
|
||||
// Remove the tracks from peerconnection as well.
|
||||
for (const track of localTracks) {
|
||||
if ((audioMuted && track.jitsiTrack.getType() === MEDIA_TYPE.AUDIO)
|
||||
|| (videoMuted && track.jitsiTrack.getType() === MEDIA_TYPE.VIDEO)) {
|
||||
replaceLocalTrack(track.jitsiTrack, null, conference);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Dispatches into features/base/tracks follow:
|
||||
|
@ -448,7 +456,7 @@ export function createConference() {
|
|||
|
||||
dispatch(_conferenceWillJoin(conference));
|
||||
|
||||
_addConferenceListeners(conference, dispatch);
|
||||
_addConferenceListeners(conference, dispatch, state);
|
||||
|
||||
sendLocalParticipant(state, conference);
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ import { SET_AUDIO_MUTED, SET_VIDEO_MUTED } from '../base/media';
|
|||
import { MiddlewareRegistry } from '../base/redux';
|
||||
import { updateSettings } from '../base/settings';
|
||||
import {
|
||||
getLocalVideoTrack,
|
||||
getLocalTracks,
|
||||
replaceLocalTrack,
|
||||
TRACK_ADDED,
|
||||
TRACK_NO_DATA_FROM_SOURCE
|
||||
|
@ -33,7 +33,7 @@ MiddlewareRegistry.register(store => next => async action => {
|
|||
const { getState, dispatch } = store;
|
||||
const state = getState();
|
||||
const { userSelectedSkipPrejoin } = state['features/prejoin'];
|
||||
const localVideoTrack = getLocalVideoTrack(state['features/base/tracks']);
|
||||
const localTracks = getLocalTracks(state['features/base/tracks']);
|
||||
const { options } = action;
|
||||
|
||||
options && store.dispatch(updateConfig(options));
|
||||
|
@ -42,13 +42,13 @@ MiddlewareRegistry.register(store => next => async action => {
|
|||
userSelectedSkipPrejoin
|
||||
}));
|
||||
|
||||
if (localVideoTrack?.muted) {
|
||||
await dispatch(replaceLocalTrack(localVideoTrack.jitsiTrack, null));
|
||||
// Do not signal audio/video tracks if the user joins muted.
|
||||
for (const track of localTracks) {
|
||||
if (track.muted) {
|
||||
await dispatch(replaceLocalTrack(track.jitsiTrack, null));
|
||||
}
|
||||
}
|
||||
|
||||
const jitsiTracks = getState()['features/base/tracks']
|
||||
.map(t => t.jitsiTrack)
|
||||
.filter(t => Boolean(t)); // Filter out GUM in progress tracks...
|
||||
const jitsiTracks = localTracks.map(t => t.jitsiTrack);
|
||||
|
||||
dispatch(setPrejoinPageVisibility(false));
|
||||
APP.conference.prejoinStart(jitsiTracks);
|
||||
|
|
Loading…
Reference in New Issue