diff --git a/conference.js b/conference.js
index 0db7b7cdc..e4297d7f7 100644
--- a/conference.js
+++ b/conference.js
@@ -38,8 +38,12 @@ import {
isFatalJitsiConnectionError
} from './react/features/base/lib-jitsi-meet';
import {
+ isVideoMutedByUser,
+ MEDIA_TYPE,
setAudioAvailable,
- setVideoAvailable
+ setAudioMuted,
+ setVideoAvailable,
+ setVideoMuted
} from './react/features/base/media';
import {
localParticipantConnectionStatusChanged,
@@ -54,6 +58,7 @@ import {
} from './react/features/base/participants';
import {
createLocalTracks,
+ isLocalTrackMuted,
replaceLocalTrack,
trackAdded,
trackRemoved
@@ -87,7 +92,6 @@ const eventEmitter = new EventEmitter();
let room;
let connection;
-let localAudio, localVideo;
/*
* Logic to open a desktop picker put on the window global for
@@ -134,7 +138,7 @@ function connect(roomName) {
* @param {string} value new value
*/
function sendData(command, value) {
- if(!room) {
+ if (!room) {
return;
}
@@ -184,47 +188,18 @@ function getDisplayName(id) {
/**
* Mute or unmute local audio stream if it exists.
* @param {boolean} muted - if audio stream should be muted or unmuted.
- *
- * @returns {Promise} resolved in case mute/unmute operations succeeds or
- * rejected with an error if something goes wrong. It is expected that often
- * the error will be of the {@link JitsiTrackError} type, but it's not
- * guaranteed.
*/
function muteLocalAudio(muted) {
- return muteLocalMedia(localAudio, muted);
-}
-
-/**
- * Mute or unmute local media stream if it exists.
- * @param {JitsiLocalTrack} localTrack
- * @param {boolean} muted
- *
- * @returns {Promise} resolved in case mute/unmute operations succeeds or
- * rejected with an error if something goes wrong. It is expected that often
- * the error will be of the {@link JitsiTrackError} type, but it's not
- * guaranteed.
- */
-function muteLocalMedia(localTrack, muted) {
- if (!localTrack) {
- return Promise.resolve();
- }
-
- const method = muted ? 'mute' : 'unmute';
-
- return localTrack[method]();
+ APP.store.dispatch(setAudioMuted(muted));
}
/**
* Mute or unmute local video stream if it exists.
* @param {boolean} muted if video stream should be muted or unmuted.
*
- * @returns {Promise} resolved in case mute/unmute operations succeeds or
- * rejected with an error if something goes wrong. It is expected that often
- * the error will be of the {@link JitsiTrackError} type, but it's not
- * guaranteed.
*/
function muteLocalVideo(muted) {
- return muteLocalMedia(localVideo, muted);
+ APP.store.dispatch(setVideoMuted(muted));
}
/**
@@ -458,8 +433,6 @@ export default {
*/
_localTracksInitialized: false,
isModerator: false,
- audioMuted: false,
- videoMuted: false,
isSharingScreen: false,
/**
* Indicates if the desktop sharing functionality has been enabled.
@@ -491,6 +464,21 @@ export default {
*/
isDominantSpeaker: false,
+ /**
+ * The local audio track (if any).
+ * FIXME tracks from redux store should be the single source of truth
+ * @type {JitsiLocalTrack|null}
+ */
+ localAudio: null,
+
+ /**
+ * The local video track (if any).
+ * FIXME tracks from redux store should be the single source of truth, but
+ * more refactoring is required around screen sharing ('localVideo' usages).
+ * @type {JitsiLocalTrack|null}
+ */
+ localVideo: null,
+
/**
* Creates local media tracks and connects to a room. Will show error
* dialogs in case accessing the local microphone and/or camera failed. Will
@@ -655,13 +643,13 @@ export default {
init(options) {
this.roomName = options.roomName;
// attaches global error handler, if there is already one, respect it
- if(JitsiMeetJS.getGlobalOnErrorHandler){
+ if (JitsiMeetJS.getGlobalOnErrorHandler){
var oldOnErrorHandler = window.onerror;
window.onerror = function (message, source, lineno, colno, error) {
JitsiMeetJS.getGlobalOnErrorHandler(
message, source, lineno, colno, error);
- if(oldOnErrorHandler)
+ if (oldOnErrorHandler)
oldOnErrorHandler(message, source, lineno, colno, error);
};
@@ -671,7 +659,7 @@ export default {
JitsiMeetJS.getGlobalOnErrorHandler(
null, null, null, null, event.reason);
- if(oldOnUnhandledRejection)
+ if (oldOnUnhandledRejection)
oldOnUnhandledRejection(event);
};
}
@@ -690,9 +678,10 @@ export default {
});
}).then(([tracks, con]) => {
tracks.forEach(track => {
- if (track.isAudioTrack() && this.audioMuted) {
+ if (track.isAudioTrack() && this.isLocalAudioMuted()) {
track.mute();
- } else if (track.isVideoTrack() && this.videoMuted) {
+ } else if (track.isVideoTrack()
+ && this.isLocalVideoMuted()) {
track.mute();
}
});
@@ -731,12 +720,10 @@ export default {
// to the conference
if (!tracks.find((t) => t.isAudioTrack())) {
this.setAudioMuteStatus(true);
- APP.UI.setAudioMuted(this.getMyUserId(), this.audioMuted);
}
if (!tracks.find((t) => t.isVideoTrack())) {
this.setVideoMuteStatus(true);
- APP.UI.setVideoMuted(this.getMyUserId(), this.videoMuted);
}
this._initDeviceList();
@@ -759,6 +746,20 @@ export default {
isLocalId(id) {
return this.getMyUserId() === id;
},
+
+ /**
+ * Tells whether the local video is muted or not.
+ * @return {boolean}
+ */
+ isLocalVideoMuted() {
+ // If the tracks are not ready, read from base/media state
+ return this._localTracksInitialized
+ ? isLocalTrackMuted(
+ APP.store.getState()['features/base/tracks'],
+ MEDIA_TYPE.VIDEO)
+ : isVideoMutedByUser(APP.store);
+ },
+
/**
* Simulates toolbar button click for audio mute. Used by shortcuts and API.
* @param {boolean} mute true for mute and false for unmute.
@@ -768,38 +769,31 @@ export default {
muteAudio(mute, showUI = true) {
// Not ready to modify track's state yet
if (!this._localTracksInitialized) {
+ // This will only modify base/media.audio.muted which is then synced
+ // up with the track at the end of local tracks initialization.
+ muteLocalAudio(mute);
this.setAudioMuteStatus(mute);
+
return;
- } else if (localAudio && localAudio.isMuted() === mute) {
+ } else if (this.isLocalAudioMuted() === mute) {
// NO-OP
return;
}
- const maybeShowErrorDialog = (error) => {
- if (showUI) {
- APP.UI.showMicErrorNotification(error);
- }
- };
-
- if (!localAudio && this.audioMuted && !mute) {
+ if (!this.localAudio && !mute) {
createLocalTracks({ devices: ['audio'] }, false)
.then(([audioTrack]) => audioTrack)
.catch(error => {
- maybeShowErrorDialog(error);
+ if (showUI) {
+ APP.UI.showMicErrorNotification(error);
+ }
// Rollback the audio muted status by using null track
return null;
})
.then(audioTrack => this.useAudioStream(audioTrack));
} else {
- const oldMutedStatus = this.audioMuted;
-
- muteLocalAudio(mute)
- .catch(error => {
- maybeShowErrorDialog(error);
- this.setAudioMuteStatus(oldMutedStatus);
- APP.UI.setAudioMuted(this.getMyUserId(), this.audioMuted);
- });
+ muteLocalAudio(mute);
}
},
/**
@@ -807,7 +801,13 @@ export default {
* @returns {boolean}
*/
isLocalAudioMuted() {
- return this.audioMuted;
+ // If the tracks are not ready, read from base/media state
+ return this._localTracksInitialized
+ ? isLocalTrackMuted(
+ APP.store.getState()['features/base/tracks'],
+ MEDIA_TYPE.AUDIO)
+ : Boolean(
+ APP.store.getState()['features/base/media'].audio.muted);
},
/**
* Simulates toolbar button click for audio mute. Used by shortcuts
@@ -816,7 +816,7 @@ export default {
* dialogs in case of media permissions error.
*/
toggleAudioMuted(showUI = true) {
- this.muteAudio(!this.audioMuted, showUI);
+ this.muteAudio(!this.isLocalAudioMuted(), showUI);
},
/**
* Simulates toolbar button click for video mute. Used by shortcuts and API.
@@ -825,12 +825,15 @@ export default {
* dialogs in case of media permissions error.
*/
muteVideo(mute, showUI = true) {
- // Not ready to modify track's state yet
+ // If not ready to modify track's state yet adjust the base/media
if (!this._localTracksInitialized) {
+ // This will only modify base/media.video.muted which is then synced
+ // up with the track at the end of local tracks initialization.
+ muteLocalVideo(mute);
this.setVideoMuteStatus(mute);
return;
- } else if (localVideo && localVideo.isMuted() === mute) {
+ } else if (this.isLocalVideoMuted() === mute) {
// NO-OP
return;
}
@@ -841,7 +844,10 @@ export default {
}
};
- if (!localVideo && this.videoMuted && !mute) {
+ // FIXME it is possible to queue this task twice, but it's not causing
+ // any issues. Specifically this can happen when the previous
+ // get user media call is blocked on "ask user for permissions" dialog.
+ if (!this.localVideo && !mute) {
// Try to create local video if there wasn't any.
// This handles the case when user joined with no video
// (dismissed screen sharing screen or in audio only mode), but
@@ -861,14 +867,8 @@ export default {
})
.then(videoTrack => this.useVideoStream(videoTrack));
} else {
- const oldMutedStatus = this.videoMuted;
-
- muteLocalVideo(mute)
- .catch(error => {
- maybeShowErrorDialog(error);
- this.setVideoMuteStatus(oldMutedStatus);
- APP.UI.setVideoMuted(this.getMyUserId(), this.videoMuted);
- });
+ // FIXME show error dialog if it fails (should be handled by react)
+ muteLocalVideo(mute);
}
},
/**
@@ -877,7 +877,7 @@ export default {
* dialogs in case of media permissions error.
*/
toggleVideoMuted(showUI = true) {
- this.muteVideo(!this.videoMuted, showUI);
+ this.muteVideo(!this.isLocalVideoMuted(), showUI);
},
/**
* Retrieve list of conference participants (without local user).
@@ -1202,7 +1202,7 @@ export default {
_getConferenceOptions() {
let options = config;
- if(config.enableRecording && !config.recordingType) {
+ if (config.enableRecording && !config.recordingType) {
options.recordingType = (config.hosts &&
(typeof config.hosts.jirecon != "undefined"))?
"jirecon" : "colibri";
@@ -1219,20 +1219,18 @@ export default {
*/
useVideoStream(newStream) {
return APP.store.dispatch(
- replaceLocalTrack(localVideo, newStream, room))
+ replaceLocalTrack(this.localVideo, newStream, room))
.then(() => {
- localVideo = newStream;
+ this.localVideo = newStream;
+
if (newStream) {
- this.setVideoMuteStatus(newStream.isMuted());
this.isSharingScreen = newStream.videoType === 'desktop';
APP.UI.addLocalStream(newStream);
} else {
- // No video is treated the same way as being video muted
- this.setVideoMuteStatus(true);
this.isSharingScreen = false;
}
- APP.UI.setVideoMuted(this.getMyUserId(), this.videoMuted);
+ this.setVideoMuteStatus(this.isLocalVideoMuted());
APP.UI.updateDesktopSharingButtons();
});
},
@@ -1245,18 +1243,13 @@ export default {
*/
useAudioStream(newStream) {
return APP.store.dispatch(
- replaceLocalTrack(localAudio, newStream, room))
+ replaceLocalTrack(this.localAudio, newStream, room))
.then(() => {
- localAudio = newStream;
-
+ this.localAudio = newStream;
if (newStream) {
- this.setAudioMuteStatus(newStream.isMuted());
APP.UI.addLocalStream(newStream);
- } else {
- // No audio is treated the same way as being audio muted
- this.setAudioMuteStatus(true);
}
- APP.UI.setAudioMuted(this.getMyUserId(), this.audioMuted);
+ this.setAudioMuteStatus(this.isLocalAudioMuted());
});
},
@@ -1339,10 +1332,10 @@ export default {
JitsiMeetJS.analytics.sendEvent(
'conference.sharingDesktop.stop');
logger.log('switched back to local video');
- if (!localVideo && wasVideoMuted) {
+ if (!this.localVideo && wasVideoMuted) {
return Promise.reject('No local video to be muted!');
- } else if (wasVideoMuted && localVideo) {
- return localVideo.mute();
+ } else if (wasVideoMuted && this.localVideo) {
+ return this.localVideo.mute();
}
})
.catch(error => {
@@ -1416,8 +1409,8 @@ export default {
_createDesktopTrack(options = {}) {
let externalInstallation = false;
let DSExternalInstallationInProgress = false;
- const didHaveVideo = Boolean(localVideo);
- const wasVideoMuted = this.videoMuted;
+ const didHaveVideo = Boolean(this.localVideo);
+ const wasVideoMuted = this.isLocalVideoMuted();
return createLocalTracks({
desktopSharingSources: options.desktopSharingSources,
@@ -1671,28 +1664,28 @@ export default {
});
room.on(ConferenceEvents.TRACK_ADDED, (track) => {
- if(!track || track.isLocal())
+ if (!track || track.isLocal())
return;
APP.store.dispatch(trackAdded(track));
});
room.on(ConferenceEvents.TRACK_REMOVED, (track) => {
- if(!track || track.isLocal())
+ if (!track || track.isLocal())
return;
APP.store.dispatch(trackRemoved(track));
});
room.on(ConferenceEvents.TRACK_AUDIO_LEVEL_CHANGED, (id, lvl) => {
- if(this.isLocalId(id) && localAudio && localAudio.isMuted()) {
+ if (this.isLocalId(id)
+ && this.localAudio && this.localAudio.isMuted()) {
lvl = 0;
}
- if(config.debug)
- {
+ if (config.debug) {
this.audioLevelsMap[id] = lvl;
- if(config.debugAudioLevels)
+ if (config.debugAudioLevels)
logger.log("AudioLevel:" + id + "/" + lvl);
}
@@ -1866,12 +1859,14 @@ export default {
this.deviceChangeListener);
// stop local video
- if (localVideo) {
- localVideo.dispose();
+ if (this.localVideo) {
+ this.localVideo.dispose();
+ this.localVideo = null;
}
// stop local audio
- if (localAudio) {
- localAudio.dispose();
+ if (this.localAudio) {
+ this.localAudio.dispose();
+ this.localAudio = null;
}
});
@@ -2215,14 +2210,14 @@ export default {
// storage and settings menu. This is a workaround until
// getConstraints() method will be implemented
// in browsers.
- if (localAudio) {
+ if (this.localAudio) {
APP.settings.setMicDeviceId(
- localAudio.getDeviceId(), false);
+ this.localAudio.getDeviceId(), false);
}
- if (localVideo) {
+ if (this.localVideo) {
APP.settings.setCameraDeviceId(
- localVideo.getDeviceId(), false);
+ this.localVideo.getDeviceId(), false);
}
mediaDeviceHelper.setCurrentMediaDevices(devices);
@@ -2263,10 +2258,13 @@ export default {
let newDevices =
mediaDeviceHelper.getNewMediaDevicesAfterDeviceListChanged(
- devices, this.isSharingScreen, localVideo, localAudio);
+ devices,
+ this.isSharingScreen,
+ this.localVideo,
+ this.localAudio);
let promises = [];
- let audioWasMuted = this.audioMuted;
- let videoWasMuted = this.videoMuted;
+ let audioWasMuted = this.isLocalAudioMuted();
+ let videoWasMuted = this.isLocalVideoMuted();
let availableAudioInputDevices =
mediaDeviceHelper.getDevicesFromListByKind(devices, 'audioinput');
let availableVideoInputDevices =
@@ -2323,11 +2321,11 @@ export default {
// The audio functionality is considered available if there are any
// audio devices detected or if the local audio stream already exists.
- const available = audioDeviceCount > 0 || Boolean(localAudio);
+ const available = audioDeviceCount > 0 || Boolean(this.localAudio);
logger.debug(
'Microphone button enabled: ' + available,
- 'local audio: ' + localAudio,
+ 'local audio: ' + this.localAudio,
'audio devices: ' + audioMediaDevices,
'device count: ' + audioDeviceCount);
@@ -2348,11 +2346,11 @@ export default {
// active which could be either screensharing stream or a video track
// created before the permissions were rejected (through browser
// config).
- const available = videoDeviceCount > 0 || Boolean(localVideo);
+ const available = videoDeviceCount > 0 || Boolean(this.localVideo);
logger.debug(
'Camera button enabled: ' + available,
- 'local video: ' + localVideo,
+ 'local video: ' + this.localVideo,
'video devices: ' + videoMediaDevices,
'device count: ' + videoDeviceCount);
@@ -2393,10 +2391,10 @@ export default {
* NOTE: Should be used after conference.init
*/
logEvent(name, value, label) {
- if(JitsiMeetJS.analytics) {
+ if (JitsiMeetJS.analytics) {
JitsiMeetJS.analytics.sendEvent(name, {value, label});
}
- if(room) {
+ if (room) {
room.sendApplicationLog(JSON.stringify({name, value, label}));
}
},
@@ -2553,7 +2551,7 @@ export default {
* track or the source id is not available, undefined will be returned.
*/
getDesktopSharingSourceId() {
- return localVideo.sourceId;
+ return this.localVideo.sourceId;
},
/**
@@ -2565,7 +2563,7 @@ export default {
* returned.
*/
getDesktopSharingSourceType() {
- return localVideo.sourceType;
+ return this.localVideo.sourceType;
},
/**
@@ -2574,10 +2572,8 @@ export default {
* @param {boolean} muted - New muted status.
*/
setVideoMuteStatus(muted) {
- if (this.videoMuted !== muted) {
- this.videoMuted = muted;
- APP.API.notifyVideoMutedStatusChanged(muted);
- }
+ APP.UI.setVideoMuted(this.getMyUserId(), muted);
+ APP.API.notifyVideoMutedStatusChanged(muted);
},
/**
@@ -2586,9 +2582,7 @@ export default {
* @param {boolean} muted - New muted status.
*/
setAudioMuteStatus(muted) {
- if (this.audioMuted !== muted) {
- this.audioMuted = muted;
- APP.API.notifyAudioMutedStatusChanged(muted);
- }
- },
+ APP.UI.setAudioMuted(this.getMyUserId(), muted);
+ APP.API.notifyAudioMutedStatusChanged(muted);
+ }
};
diff --git a/modules/UI/UI.js b/modules/UI/UI.js
index 59ce0d02e..a5346db67 100644
--- a/modules/UI/UI.js
+++ b/modules/UI/UI.js
@@ -24,7 +24,6 @@ import Settings from "./../settings/Settings";
import { debounce } from "../util/helpers";
import { updateDeviceList } from '../../react/features/base/devices';
-import { setAudioMuted, setVideoMuted } from '../../react/features/base/media';
import {
openDeviceSelectionDialog
} from '../../react/features/device-selection';
@@ -669,7 +668,6 @@ UI.askForNickname = function () {
UI.setAudioMuted = function (id, muted) {
VideoLayout.onAudioMute(id, muted);
if (APP.conference.isLocalId(id)) {
- APP.store.dispatch(setAudioMuted(muted));
APP.conference.updateAudioIconEnabled();
}
};
@@ -680,7 +678,6 @@ UI.setAudioMuted = function (id, muted) {
UI.setVideoMuted = function (id, muted) {
VideoLayout.onVideoMute(id, muted);
if (APP.conference.isLocalId(id)) {
- APP.store.dispatch(setVideoMuted(muted));
APP.conference.updateVideoIconEnabled();
}
};
diff --git a/react/features/base/media/functions.js b/react/features/base/media/functions.js
index 29aa10142..cc3d86fa4 100644
--- a/react/features/base/media/functions.js
+++ b/react/features/base/media/functions.js
@@ -1,3 +1,5 @@
+import { VIDEO_MUTISM_AUTHORITY } from './constants';
+
/**
* Determines whether a specific videoTrack should be rendered.
*
@@ -14,3 +16,15 @@ export function shouldRenderVideoTrack(videoTrack, waitForVideoStarted) {
&& !videoTrack.muted
&& (!waitForVideoStarted || videoTrack.videoStarted));
}
+
+/**
+ * Checks if video is currently muted by the user authority.
+ *
+ * @param {Object} store - The redux store instance.
+ * @returns {boolean}
+ */
+export function isVideoMutedByUser({ getState }) {
+ return Boolean(
+ getState()['features/base/media'] // eslint-disable-line no-bitwise
+ .video.muted & VIDEO_MUTISM_AUTHORITY.USER);
+}
diff --git a/react/features/base/media/middleware.js b/react/features/base/media/middleware.js
index f17971d56..c80bbebe2 100644
--- a/react/features/base/media/middleware.js
+++ b/react/features/base/media/middleware.js
@@ -93,7 +93,7 @@ function _setRoom({ dispatch, getState }, next, action) {
* @private
* @returns {void}
*/
-function _syncTrackMutedState({ dispatch, getState }, track) {
+function _syncTrackMutedState({ getState }, track) {
const state = getState()['features/base/media'];
const muted = Boolean(state[track.mediaType].muted);
@@ -104,6 +104,6 @@ function _syncTrackMutedState({ dispatch, getState }, track) {
// fired before track gets to state.
if (track.muted !== muted) {
track.muted = muted;
- dispatch(setTrackMuted(track.jitsiTrack, muted));
+ setTrackMuted(track.jitsiTrack, muted);
}
}
diff --git a/react/features/base/tracks/actions.js b/react/features/base/tracks/actions.js
index b5c59d3fd..eabaa57c7 100644
--- a/react/features/base/tracks/actions.js
+++ b/react/features/base/tracks/actions.js
@@ -348,53 +348,6 @@ function _getLocalTracksToChange(currentTracks, newTracks) {
};
}
-/**
- * Mutes or unmutes a specific JitsiLocalTrack. If the muted state of
- * the specified track is already in accord with the specified
- * muted value, then does nothing. In case the actual muting/unmuting
- * fails, a rollback action will be dispatched to undo the muting/unmuting.
- *
- * @param {JitsiLocalTrack} track - The JitsiLocalTrack to mute or
- * unmute.
- * @param {boolean} muted - If the specified track is to be muted, then
- * true; otherwise, false.
- * @returns {Function}
- */
-export function setTrackMuted(track, muted) {
- return dispatch => {
- muted = Boolean(muted); // eslint-disable-line no-param-reassign
-
- if (track.isMuted() === muted) {
- return Promise.resolve();
- }
-
- const f = muted ? 'mute' : 'unmute';
-
- return track[f]().catch(error => {
- console.error(`set track ${f} failed`, error);
-
- if (navigator.product === 'ReactNative') {
- // Synchronizing the state of base/tracks into the state of
- // base/media is not required in React (and, respectively, React
- // Native) because base/media expresses the app's and the user's
- // desires/expectations/intents and base/tracks expresses
- // practice/reality. Unfortunately, the old Web does not comply
- // and/or does the opposite.
- return;
- }
-
- const setMuted
- = track.mediaType === MEDIA_TYPE.AUDIO
- ? setAudioMuted
- : setVideoMuted;
-
- // FIXME The following disregards VIDEO_MUTISM_AUTHORITY (in the
- // case of setVideoMuted, of course).
- dispatch(setMuted(!muted));
- });
- };
-}
-
/**
* Returns true if the provided JitsiTrack should be rendered as a mirror.
*
diff --git a/react/features/base/tracks/functions.js b/react/features/base/tracks/functions.js
index 1741db4a8..3a44d84b5 100644
--- a/react/features/base/tracks/functions.js
+++ b/react/features/base/tracks/functions.js
@@ -155,3 +155,45 @@ export function getTrackByJitsiTrack(tracks, jitsiTrack) {
export function getTracksByMediaType(tracks, mediaType) {
return tracks.filter(t => t.mediaType === mediaType);
}
+
+/**
+ * Checks if the first local track in the given tracks set is muted.
+ *
+ * @param {Track[]} tracks - List of all tracks.
+ * @param {MEDIA_TYPE} mediaType - The media type of tracks to be checked.
+ * @returns {boolean} True if local track is muted or false if the track is
+ * unmuted or if there are no local tracks of the given media type in the given
+ * set of tracks.
+ */
+export function isLocalTrackMuted(tracks, mediaType) {
+ const track = getLocalTrack(tracks, mediaType);
+
+ return !track || track.muted;
+}
+
+/**
+ * Mutes or unmutes a specific JitsiLocalTrack. If the muted state of
+ * the specified track is already in accord with the specified
+ * muted value, then does nothing.
+ *
+ * @param {JitsiLocalTrack} track - The JitsiLocalTrack to mute or
+ * unmute.
+ * @param {boolean} muted - If the specified track is to be muted, then
+ * true; otherwise, false.
+ * @returns {Promise}
+ */
+export function setTrackMuted(track, muted) {
+ muted = Boolean(muted); // eslint-disable-line no-param-reassign
+
+ if (track.isMuted() === muted) {
+ return Promise.resolve();
+ }
+
+ const f = muted ? 'mute' : 'unmute';
+
+ return track[f]().catch(error => {
+
+ // FIXME emit mute failed, so that the app can show error dialog
+ console.error(`set track ${f} failed`, error);
+ });
+}
diff --git a/react/features/base/tracks/middleware.js b/react/features/base/tracks/middleware.js
index 7952f90ef..a8d172a57 100644
--- a/react/features/base/tracks/middleware.js
+++ b/react/features/base/tracks/middleware.js
@@ -6,16 +6,13 @@ import {
SET_AUDIO_MUTED,
SET_CAMERA_FACING_MODE,
SET_VIDEO_MUTED,
- setAudioMuted,
- setVideoMuted,
TOGGLE_CAMERA_FACING_MODE,
toggleCameraFacingMode
} from '../media';
import { MiddlewareRegistry } from '../redux';
-import { setTrackMuted } from './actions';
import { TRACK_ADDED, TRACK_REMOVED, TRACK_UPDATED } from './actionTypes';
-import { getLocalTrack } from './functions';
+import { getLocalTrack, setTrackMuted } from './functions';
declare var APP: Object;
@@ -108,30 +105,20 @@ MiddlewareRegistry.register(store => next => action => {
const participantID = jitsiTrack.getParticipantId();
const isVideoTrack = jitsiTrack.isVideoTrack();
- if (jitsiTrack.isLocal()) {
- if (isVideoTrack) {
+ if (isVideoTrack) {
+ if (jitsiTrack.isLocal()) {
APP.conference.setVideoMuteStatus(muted);
} else {
- APP.conference.setAudioMuteStatus(muted);
+ APP.UI.setVideoMuted(participantID, muted);
}
- }
-
- if (isVideoTrack) {
- APP.UI.setVideoMuted(participantID, muted);
APP.UI.onPeerVideoTypeChanged(
participantID,
jitsiTrack.videoType);
+ } else if (jitsiTrack.isLocal()) {
+ APP.conference.setAudioMuteStatus(muted);
} else {
APP.UI.setAudioMuted(participantID, muted);
}
-
- // XXX The following synchronizes the state of base/tracks into the
- // state of base/media. Which is not required in React (and,
- // respectively, React Native) because base/media expresses the
- // app's and the user's desires/expectations/intents and base/tracks
- // expresses practice/reality. Unfortunately, the old Web does not
- // comply and/or does the opposite. Hence, the following:
- return _trackUpdated(store, next, action);
}
}
@@ -169,66 +156,5 @@ function _getLocalTrack({ getState }, mediaType: MEDIA_TYPE) {
function _setMuted(store, { muted }, mediaType: MEDIA_TYPE) {
const localTrack = _getLocalTrack(store, mediaType);
- localTrack && store.dispatch(setTrackMuted(localTrack.jitsiTrack, muted));
-}
-
-/**
- * Intercepts the action TRACK_UPDATED in order to synchronize the
- * muted states of the local tracks of features/base/tracks with the muted
- * states of features/base/media.
- *
- * @param {Store} store - The redux store in which the specified action
- * is being dispatched.
- * @param {Dispatch} next - The redux dispatch function to dispatch the
- * specified action to the specified store.
- * @param {Action} action - The redux action TRACK_UPDATED which is
- * being dispatched in the specified store.
- * @private
- * @returns {Object} The new state that is the result of the reduction of the
- * specified action.
- */
-function _trackUpdated(store, next, action) {
- // Determine the muted state of the local track before the update.
- const track = action.track;
- let mediaType;
- let oldMuted;
-
- if ('muted' in track) {
- // XXX The return value of JitsiTrack.getType() is of type MEDIA_TYPE
- // that happens to be compatible with the type MEDIA_TYPE defined by
- // jitsi-meet.
- mediaType = track.jitsiTrack.getType();
-
- const localTrack = _getLocalTrack(store, mediaType);
-
- if (localTrack) {
- oldMuted = localTrack.muted;
- }
- }
-
- const result = next(action);
-
- if (typeof oldMuted !== 'undefined') {
- // Determine the muted state of the local track after the update. If the
- // muted states before and after the update differ, then the respective
- // media state should by synchronized.
- const localTrack = _getLocalTrack(store, mediaType);
-
- if (localTrack) {
- const newMuted = localTrack.muted;
-
- if (oldMuted !== newMuted) {
- switch (mediaType) {
- case MEDIA_TYPE.AUDIO:
- store.dispatch(setAudioMuted(newMuted));
- break;
- case MEDIA_TYPE.VIDEO:
- store.dispatch(setVideoMuted(newMuted));
- break;
- }
- }
- }
- }
-
- return result;
+ localTrack && setTrackMuted(localTrack.jitsiTrack, muted);
}
diff --git a/react/features/toolbox/defaultToolbarButtons.js b/react/features/toolbox/defaultToolbarButtons.js
index 702c20a20..ff107acbf 100644
--- a/react/features/toolbox/defaultToolbarButtons.js
+++ b/react/features/toolbox/defaultToolbarButtons.js
@@ -40,13 +40,14 @@ const buttons: Object = {
isDisplayed: () => true,
id: 'toolbar_button_camera',
onClick() {
- if (APP.conference.videoMuted) {
+ const newVideoMutedState = !APP.conference.isLocalVideoMuted();
+
+ if (newVideoMutedState) {
JitsiMeetJS.analytics.sendEvent('toolbar.video.enabled');
- APP.UI.emitEvent(UIEvents.VIDEO_MUTED, false);
} else {
JitsiMeetJS.analytics.sendEvent('toolbar.video.disabled');
- APP.UI.emitEvent(UIEvents.VIDEO_MUTED, true);
}
+ APP.UI.emitEvent(UIEvents.VIDEO_MUTED, newVideoMutedState);
},
popups: [
{
@@ -290,7 +291,7 @@ const buttons: Object = {
onClick() {
const sharedVideoManager = APP.UI.getSharedVideoManager();
- if (APP.conference.audioMuted) {
+ if (APP.conference.isLocalAudioMuted()) {
// If there's a shared video with the volume "on" and we aren't
// the video owner, we warn the user
// that currently it's not possible to unmute.
diff --git a/react/features/toolbox/functions.native.js b/react/features/toolbox/functions.native.js
index 1c443c368..dc7fd440a 100644
--- a/react/features/toolbox/functions.native.js
+++ b/react/features/toolbox/functions.native.js
@@ -3,7 +3,8 @@
import type { Dispatch } from 'redux';
import { appNavigate } from '../app';
-import { getLocalAudioTrack, getLocalVideoTrack } from '../base/tracks';
+import { MEDIA_TYPE } from '../base/media';
+import { isLocalTrackMuted } from '../base/tracks';
/**
* Maps redux actions to {@link Toolbox} (React {@code Component}) props.
@@ -58,9 +59,6 @@ export function abstractMapStateToProps(state: Object): Object {
const tracks = state['features/base/tracks'];
const { visible } = state['features/toolbox'];
- const audioTrack = getLocalAudioTrack(tracks);
- const videoTrack = getLocalVideoTrack(tracks);
-
return {
/**
* Flag showing whether audio is muted.
@@ -68,7 +66,7 @@ export function abstractMapStateToProps(state: Object): Object {
* @protected
* @type {boolean}
*/
- _audioMuted: !audioTrack || audioTrack.muted,
+ _audioMuted: isLocalTrackMuted(tracks, MEDIA_TYPE.AUDIO),
/**
* Flag showing whether video is muted.
@@ -76,7 +74,7 @@ export function abstractMapStateToProps(state: Object): Object {
* @protected
* @type {boolean}
*/
- _videoMuted: !videoTrack || videoTrack.muted,
+ _videoMuted: isLocalTrackMuted(tracks, MEDIA_TYPE.VIDEO),
/**
* Flag showing whether toolbox is visible.
diff --git a/react/features/toolbox/middleware.js b/react/features/toolbox/middleware.js
index 533d21082..8cba61cbb 100644
--- a/react/features/toolbox/middleware.js
+++ b/react/features/toolbox/middleware.js
@@ -1,11 +1,11 @@
/* @flow */
import {
+ MEDIA_TYPE,
SET_AUDIO_AVAILABLE,
- SET_AUDIO_MUTED,
- SET_VIDEO_AVAILABLE,
- SET_VIDEO_MUTED } from '../base/media';
+ SET_VIDEO_AVAILABLE } from '../base/media';
import { MiddlewareRegistry } from '../base/redux';
+import { isLocalTrackMuted, TRACK_UPDATED } from '../base/tracks';
import { setToolbarButton } from './actions';
import { CLEAR_TOOLBOX_TIMEOUT, SET_TOOLBOX_TIMEOUT } from './actionTypes';
@@ -37,66 +37,84 @@ MiddlewareRegistry.register(store => next => action => {
break;
}
- case SET_AUDIO_AVAILABLE:
- case SET_AUDIO_MUTED: {
- return _setAudioAvailableOrMuted(store, next, action);
+ case SET_AUDIO_AVAILABLE: {
+ return _setMediaAvailableOrMuted(store, next, action);
+ }
+
+ case SET_VIDEO_AVAILABLE: {
+ return _setMediaAvailableOrMuted(store, next, action);
+ }
+
+ case TRACK_UPDATED: {
+ if (action.track.jitsiTrack.isLocal()) {
+ return _setMediaAvailableOrMuted(store, next, action);
+ }
+ break;
}
- case SET_VIDEO_AVAILABLE:
- case SET_VIDEO_MUTED:
- return _setVideoAvailableOrMuted(store, next, action);
}
return next(action);
});
/**
- * Adjusts the state of toolbar's microphone button.
+ * Adjusts the state of toolbar's microphone or camera button.
*
* @param {Store} store - The Redux store instance.
* @param {Function} next - The redux function to continue dispatching the
* specified {@code action} in the specified {@code store}.
- * @param {Object} action - Either SET_AUDIO_AVAILABLE or SET_AUDIO_MUTED.
+ * @param {Object} action - SET_AUDIO_AVAILABLE, SET_VIDEO_AVAILABLE or
+ * TRACK_UPDATED.
*
* @returns {*}
*/
-function _setAudioAvailableOrMuted({ dispatch, getState }, next, action) {
+function _setMediaAvailableOrMuted({ dispatch, getState }, next, action) {
const result = next(action);
- const { available, muted } = getState()['features/base/media'].audio;
- const i18nKey = available ? 'mute' : 'micDisabled';
+ let mediaType;
- dispatch(setToolbarButton('microphone', {
- enabled: available,
- i18n: `[content]toolbar.${i18nKey}`,
- toggled: available ? muted : true
- }));
-
- return result;
-}
-
-/**
- * Adjusts the state of toolbar's camera button.
- *
- * @param {Store} store - The redux store.
- * @param {Function} next - The redux function to continue dispatching the
- * specified {@code action} in the specified {@code store}.
- * @param {Object} action - Either {@link SET_VIDEO_AVAILABLE} or
- * {@link SET_VIDEO_MUTED}.
- * @returns {Object} The new state that is the result of the reduction of the
- * specified {@code action}.
- */
-function _setVideoAvailableOrMuted({ dispatch, getState }, next, action) {
- const result = next(action);
-
- const { available, muted } = getState()['features/base/media'].video;
- const i18nKey = available ? 'videomute' : 'cameraDisabled';
-
- dispatch(setToolbarButton('camera', {
- enabled: available,
- i18n: `[content]toolbar.${i18nKey}`,
- toggled: available ? muted : true
- }));
+ switch (action.type) {
+ case SET_AUDIO_AVAILABLE: {
+ mediaType = MEDIA_TYPE.AUDIO;
+ break;
+ }
+
+ case SET_VIDEO_AVAILABLE: {
+ mediaType = MEDIA_TYPE.VIDEO;
+ break;
+ }
+
+ case TRACK_UPDATED: {
+ mediaType
+ = action.track.jitsiTrack.isAudioTrack()
+ ? MEDIA_TYPE.AUDIO : MEDIA_TYPE.VIDEO;
+ break;
+ }
+
+ default: {
+ throw new Error(`Unsupported action ${action}`);
+ }
+
+ }
+
+ const mediaState = getState()['features/base/media'];
+ const { available }
+ = mediaType === MEDIA_TYPE.AUDIO
+ ? mediaState.audio : mediaState.video;
+ const i18nKey
+ = mediaType === MEDIA_TYPE.AUDIO
+ ? available ? 'mute' : 'micDisabled'
+ : available ? 'videomute' : 'cameraDisabled';
+
+ const tracks = getState()['features/base/tracks'];
+ const muted = isLocalTrackMuted(tracks, mediaType);
+
+ dispatch(setToolbarButton(
+ mediaType === MEDIA_TYPE.AUDIO ? 'microphone' : 'camera', {
+ enabled: available,
+ i18n: `[content]toolbar.${i18nKey}`,
+ toggled: available ? muted : true
+ }));
return result;
}