feat(conference, toolbox, API) get rid of {audio,video}Muted' flags

* ref: video muted state

Get rid of 'videoMuted' flag in conference.js

* ref: audio muted state

Get rid of 'audioMuted' flag in conference.js

* fix(conference.js|API): early audio/video muted updates

* ref(conference.js): rename isVideoMuted

Rename isVideoMuted to isLocalVideoMuted to be consistent with
isLocalAudioMuted.

* doc|style(conference.js): comments and space after if

* ref: move 'setTrackMuted' to functions

* fix(tracks/middleware): no-lonely-if

* ref(features/toolbox): get rid of last argument

* ref(defaultToolbarButtons): rename var
This commit is contained in:
Paweł Domas 2017-08-18 12:30:30 +01:00 committed by Saúl Ibarra Corretgé
parent e0e3e873b8
commit 99ce46cfa8
10 changed files with 259 additions and 316 deletions

View File

@ -38,8 +38,12 @@ import {
isFatalJitsiConnectionError isFatalJitsiConnectionError
} from './react/features/base/lib-jitsi-meet'; } from './react/features/base/lib-jitsi-meet';
import { import {
isVideoMutedByUser,
MEDIA_TYPE,
setAudioAvailable, setAudioAvailable,
setVideoAvailable setAudioMuted,
setVideoAvailable,
setVideoMuted
} from './react/features/base/media'; } from './react/features/base/media';
import { import {
localParticipantConnectionStatusChanged, localParticipantConnectionStatusChanged,
@ -54,6 +58,7 @@ import {
} from './react/features/base/participants'; } from './react/features/base/participants';
import { import {
createLocalTracks, createLocalTracks,
isLocalTrackMuted,
replaceLocalTrack, replaceLocalTrack,
trackAdded, trackAdded,
trackRemoved trackRemoved
@ -87,7 +92,6 @@ const eventEmitter = new EventEmitter();
let room; let room;
let connection; let connection;
let localAudio, localVideo;
/* /*
* Logic to open a desktop picker put on the window global for * Logic to open a desktop picker put on the window global for
@ -134,7 +138,7 @@ function connect(roomName) {
* @param {string} value new value * @param {string} value new value
*/ */
function sendData(command, value) { function sendData(command, value) {
if(!room) { if (!room) {
return; return;
} }
@ -184,47 +188,18 @@ function getDisplayName(id) {
/** /**
* Mute or unmute local audio stream if it exists. * Mute or unmute local audio stream if it exists.
* @param {boolean} muted - if audio stream should be muted or unmuted. * @param {boolean} muted - if audio stream should be muted or unmuted.
*
* @returns {Promise} resolved in case mute/unmute operations succeeds or
* rejected with an error if something goes wrong. It is expected that often
* the error will be of the {@link JitsiTrackError} type, but it's not
* guaranteed.
*/ */
function muteLocalAudio(muted) { function muteLocalAudio(muted) {
return muteLocalMedia(localAudio, muted); APP.store.dispatch(setAudioMuted(muted));
}
/**
* Mute or unmute local media stream if it exists.
* @param {JitsiLocalTrack} localTrack
* @param {boolean} muted
*
* @returns {Promise} resolved in case mute/unmute operations succeeds or
* rejected with an error if something goes wrong. It is expected that often
* the error will be of the {@link JitsiTrackError} type, but it's not
* guaranteed.
*/
function muteLocalMedia(localTrack, muted) {
if (!localTrack) {
return Promise.resolve();
}
const method = muted ? 'mute' : 'unmute';
return localTrack[method]();
} }
/** /**
* Mute or unmute local video stream if it exists. * Mute or unmute local video stream if it exists.
* @param {boolean} muted if video stream should be muted or unmuted. * @param {boolean} muted if video stream should be muted or unmuted.
* *
* @returns {Promise} resolved in case mute/unmute operations succeeds or
* rejected with an error if something goes wrong. It is expected that often
* the error will be of the {@link JitsiTrackError} type, but it's not
* guaranteed.
*/ */
function muteLocalVideo(muted) { function muteLocalVideo(muted) {
return muteLocalMedia(localVideo, muted); APP.store.dispatch(setVideoMuted(muted));
} }
/** /**
@ -458,8 +433,6 @@ export default {
*/ */
_localTracksInitialized: false, _localTracksInitialized: false,
isModerator: false, isModerator: false,
audioMuted: false,
videoMuted: false,
isSharingScreen: false, isSharingScreen: false,
/** /**
* Indicates if the desktop sharing functionality has been enabled. * Indicates if the desktop sharing functionality has been enabled.
@ -491,6 +464,21 @@ export default {
*/ */
isDominantSpeaker: false, isDominantSpeaker: false,
/**
* The local audio track (if any).
* FIXME tracks from redux store should be the single source of truth
* @type {JitsiLocalTrack|null}
*/
localAudio: null,
/**
* The local video track (if any).
* FIXME tracks from redux store should be the single source of truth, but
* more refactoring is required around screen sharing ('localVideo' usages).
* @type {JitsiLocalTrack|null}
*/
localVideo: null,
/** /**
* Creates local media tracks and connects to a room. Will show error * Creates local media tracks and connects to a room. Will show error
* dialogs in case accessing the local microphone and/or camera failed. Will * dialogs in case accessing the local microphone and/or camera failed. Will
@ -655,13 +643,13 @@ export default {
init(options) { init(options) {
this.roomName = options.roomName; this.roomName = options.roomName;
// attaches global error handler, if there is already one, respect it // attaches global error handler, if there is already one, respect it
if(JitsiMeetJS.getGlobalOnErrorHandler){ if (JitsiMeetJS.getGlobalOnErrorHandler){
var oldOnErrorHandler = window.onerror; var oldOnErrorHandler = window.onerror;
window.onerror = function (message, source, lineno, colno, error) { window.onerror = function (message, source, lineno, colno, error) {
JitsiMeetJS.getGlobalOnErrorHandler( JitsiMeetJS.getGlobalOnErrorHandler(
message, source, lineno, colno, error); message, source, lineno, colno, error);
if(oldOnErrorHandler) if (oldOnErrorHandler)
oldOnErrorHandler(message, source, lineno, colno, error); oldOnErrorHandler(message, source, lineno, colno, error);
}; };
@ -671,7 +659,7 @@ export default {
JitsiMeetJS.getGlobalOnErrorHandler( JitsiMeetJS.getGlobalOnErrorHandler(
null, null, null, null, event.reason); null, null, null, null, event.reason);
if(oldOnUnhandledRejection) if (oldOnUnhandledRejection)
oldOnUnhandledRejection(event); oldOnUnhandledRejection(event);
}; };
} }
@ -690,9 +678,10 @@ export default {
}); });
}).then(([tracks, con]) => { }).then(([tracks, con]) => {
tracks.forEach(track => { tracks.forEach(track => {
if (track.isAudioTrack() && this.audioMuted) { if (track.isAudioTrack() && this.isLocalAudioMuted()) {
track.mute(); track.mute();
} else if (track.isVideoTrack() && this.videoMuted) { } else if (track.isVideoTrack()
&& this.isLocalVideoMuted()) {
track.mute(); track.mute();
} }
}); });
@ -731,12 +720,10 @@ export default {
// to the conference // to the conference
if (!tracks.find((t) => t.isAudioTrack())) { if (!tracks.find((t) => t.isAudioTrack())) {
this.setAudioMuteStatus(true); this.setAudioMuteStatus(true);
APP.UI.setAudioMuted(this.getMyUserId(), this.audioMuted);
} }
if (!tracks.find((t) => t.isVideoTrack())) { if (!tracks.find((t) => t.isVideoTrack())) {
this.setVideoMuteStatus(true); this.setVideoMuteStatus(true);
APP.UI.setVideoMuted(this.getMyUserId(), this.videoMuted);
} }
this._initDeviceList(); this._initDeviceList();
@ -759,6 +746,20 @@ export default {
isLocalId(id) { isLocalId(id) {
return this.getMyUserId() === id; return this.getMyUserId() === id;
}, },
/**
* Tells whether the local video is muted or not.
* @return {boolean}
*/
isLocalVideoMuted() {
// If the tracks are not ready, read from base/media state
return this._localTracksInitialized
? isLocalTrackMuted(
APP.store.getState()['features/base/tracks'],
MEDIA_TYPE.VIDEO)
: isVideoMutedByUser(APP.store);
},
/** /**
* Simulates toolbar button click for audio mute. Used by shortcuts and API. * Simulates toolbar button click for audio mute. Used by shortcuts and API.
* @param {boolean} mute true for mute and false for unmute. * @param {boolean} mute true for mute and false for unmute.
@ -768,38 +769,31 @@ export default {
muteAudio(mute, showUI = true) { muteAudio(mute, showUI = true) {
// Not ready to modify track's state yet // Not ready to modify track's state yet
if (!this._localTracksInitialized) { if (!this._localTracksInitialized) {
// This will only modify base/media.audio.muted which is then synced
// up with the track at the end of local tracks initialization.
muteLocalAudio(mute);
this.setAudioMuteStatus(mute); this.setAudioMuteStatus(mute);
return; return;
} else if (localAudio && localAudio.isMuted() === mute) { } else if (this.isLocalAudioMuted() === mute) {
// NO-OP // NO-OP
return; return;
} }
const maybeShowErrorDialog = (error) => { if (!this.localAudio && !mute) {
if (showUI) {
APP.UI.showMicErrorNotification(error);
}
};
if (!localAudio && this.audioMuted && !mute) {
createLocalTracks({ devices: ['audio'] }, false) createLocalTracks({ devices: ['audio'] }, false)
.then(([audioTrack]) => audioTrack) .then(([audioTrack]) => audioTrack)
.catch(error => { .catch(error => {
maybeShowErrorDialog(error); if (showUI) {
APP.UI.showMicErrorNotification(error);
}
// Rollback the audio muted status by using null track // Rollback the audio muted status by using null track
return null; return null;
}) })
.then(audioTrack => this.useAudioStream(audioTrack)); .then(audioTrack => this.useAudioStream(audioTrack));
} else { } else {
const oldMutedStatus = this.audioMuted; muteLocalAudio(mute);
muteLocalAudio(mute)
.catch(error => {
maybeShowErrorDialog(error);
this.setAudioMuteStatus(oldMutedStatus);
APP.UI.setAudioMuted(this.getMyUserId(), this.audioMuted);
});
} }
}, },
/** /**
@ -807,7 +801,13 @@ export default {
* @returns {boolean} * @returns {boolean}
*/ */
isLocalAudioMuted() { isLocalAudioMuted() {
return this.audioMuted; // If the tracks are not ready, read from base/media state
return this._localTracksInitialized
? isLocalTrackMuted(
APP.store.getState()['features/base/tracks'],
MEDIA_TYPE.AUDIO)
: Boolean(
APP.store.getState()['features/base/media'].audio.muted);
}, },
/** /**
* Simulates toolbar button click for audio mute. Used by shortcuts * Simulates toolbar button click for audio mute. Used by shortcuts
@ -816,7 +816,7 @@ export default {
* dialogs in case of media permissions error. * dialogs in case of media permissions error.
*/ */
toggleAudioMuted(showUI = true) { toggleAudioMuted(showUI = true) {
this.muteAudio(!this.audioMuted, showUI); this.muteAudio(!this.isLocalAudioMuted(), showUI);
}, },
/** /**
* Simulates toolbar button click for video mute. Used by shortcuts and API. * Simulates toolbar button click for video mute. Used by shortcuts and API.
@ -825,12 +825,15 @@ export default {
* dialogs in case of media permissions error. * dialogs in case of media permissions error.
*/ */
muteVideo(mute, showUI = true) { muteVideo(mute, showUI = true) {
// Not ready to modify track's state yet // If not ready to modify track's state yet adjust the base/media
if (!this._localTracksInitialized) { if (!this._localTracksInitialized) {
// This will only modify base/media.video.muted which is then synced
// up with the track at the end of local tracks initialization.
muteLocalVideo(mute);
this.setVideoMuteStatus(mute); this.setVideoMuteStatus(mute);
return; return;
} else if (localVideo && localVideo.isMuted() === mute) { } else if (this.isLocalVideoMuted() === mute) {
// NO-OP // NO-OP
return; return;
} }
@ -841,7 +844,10 @@ export default {
} }
}; };
if (!localVideo && this.videoMuted && !mute) { // FIXME it is possible to queue this task twice, but it's not causing
// any issues. Specifically this can happen when the previous
// get user media call is blocked on "ask user for permissions" dialog.
if (!this.localVideo && !mute) {
// Try to create local video if there wasn't any. // Try to create local video if there wasn't any.
// This handles the case when user joined with no video // This handles the case when user joined with no video
// (dismissed screen sharing screen or in audio only mode), but // (dismissed screen sharing screen or in audio only mode), but
@ -861,14 +867,8 @@ export default {
}) })
.then(videoTrack => this.useVideoStream(videoTrack)); .then(videoTrack => this.useVideoStream(videoTrack));
} else { } else {
const oldMutedStatus = this.videoMuted; // FIXME show error dialog if it fails (should be handled by react)
muteLocalVideo(mute);
muteLocalVideo(mute)
.catch(error => {
maybeShowErrorDialog(error);
this.setVideoMuteStatus(oldMutedStatus);
APP.UI.setVideoMuted(this.getMyUserId(), this.videoMuted);
});
} }
}, },
/** /**
@ -877,7 +877,7 @@ export default {
* dialogs in case of media permissions error. * dialogs in case of media permissions error.
*/ */
toggleVideoMuted(showUI = true) { toggleVideoMuted(showUI = true) {
this.muteVideo(!this.videoMuted, showUI); this.muteVideo(!this.isLocalVideoMuted(), showUI);
}, },
/** /**
* Retrieve list of conference participants (without local user). * Retrieve list of conference participants (without local user).
@ -1202,7 +1202,7 @@ export default {
_getConferenceOptions() { _getConferenceOptions() {
let options = config; let options = config;
if(config.enableRecording && !config.recordingType) { if (config.enableRecording && !config.recordingType) {
options.recordingType = (config.hosts && options.recordingType = (config.hosts &&
(typeof config.hosts.jirecon != "undefined"))? (typeof config.hosts.jirecon != "undefined"))?
"jirecon" : "colibri"; "jirecon" : "colibri";
@ -1219,20 +1219,18 @@ export default {
*/ */
useVideoStream(newStream) { useVideoStream(newStream) {
return APP.store.dispatch( return APP.store.dispatch(
replaceLocalTrack(localVideo, newStream, room)) replaceLocalTrack(this.localVideo, newStream, room))
.then(() => { .then(() => {
localVideo = newStream; this.localVideo = newStream;
if (newStream) { if (newStream) {
this.setVideoMuteStatus(newStream.isMuted());
this.isSharingScreen = newStream.videoType === 'desktop'; this.isSharingScreen = newStream.videoType === 'desktop';
APP.UI.addLocalStream(newStream); APP.UI.addLocalStream(newStream);
} else { } else {
// No video is treated the same way as being video muted
this.setVideoMuteStatus(true);
this.isSharingScreen = false; this.isSharingScreen = false;
} }
APP.UI.setVideoMuted(this.getMyUserId(), this.videoMuted); this.setVideoMuteStatus(this.isLocalVideoMuted());
APP.UI.updateDesktopSharingButtons(); APP.UI.updateDesktopSharingButtons();
}); });
}, },
@ -1245,18 +1243,13 @@ export default {
*/ */
useAudioStream(newStream) { useAudioStream(newStream) {
return APP.store.dispatch( return APP.store.dispatch(
replaceLocalTrack(localAudio, newStream, room)) replaceLocalTrack(this.localAudio, newStream, room))
.then(() => { .then(() => {
localAudio = newStream; this.localAudio = newStream;
if (newStream) { if (newStream) {
this.setAudioMuteStatus(newStream.isMuted());
APP.UI.addLocalStream(newStream); APP.UI.addLocalStream(newStream);
} else {
// No audio is treated the same way as being audio muted
this.setAudioMuteStatus(true);
} }
APP.UI.setAudioMuted(this.getMyUserId(), this.audioMuted); this.setAudioMuteStatus(this.isLocalAudioMuted());
}); });
}, },
@ -1339,10 +1332,10 @@ export default {
JitsiMeetJS.analytics.sendEvent( JitsiMeetJS.analytics.sendEvent(
'conference.sharingDesktop.stop'); 'conference.sharingDesktop.stop');
logger.log('switched back to local video'); logger.log('switched back to local video');
if (!localVideo && wasVideoMuted) { if (!this.localVideo && wasVideoMuted) {
return Promise.reject('No local video to be muted!'); return Promise.reject('No local video to be muted!');
} else if (wasVideoMuted && localVideo) { } else if (wasVideoMuted && this.localVideo) {
return localVideo.mute(); return this.localVideo.mute();
} }
}) })
.catch(error => { .catch(error => {
@ -1416,8 +1409,8 @@ export default {
_createDesktopTrack(options = {}) { _createDesktopTrack(options = {}) {
let externalInstallation = false; let externalInstallation = false;
let DSExternalInstallationInProgress = false; let DSExternalInstallationInProgress = false;
const didHaveVideo = Boolean(localVideo); const didHaveVideo = Boolean(this.localVideo);
const wasVideoMuted = this.videoMuted; const wasVideoMuted = this.isLocalVideoMuted();
return createLocalTracks({ return createLocalTracks({
desktopSharingSources: options.desktopSharingSources, desktopSharingSources: options.desktopSharingSources,
@ -1671,28 +1664,28 @@ export default {
}); });
room.on(ConferenceEvents.TRACK_ADDED, (track) => { room.on(ConferenceEvents.TRACK_ADDED, (track) => {
if(!track || track.isLocal()) if (!track || track.isLocal())
return; return;
APP.store.dispatch(trackAdded(track)); APP.store.dispatch(trackAdded(track));
}); });
room.on(ConferenceEvents.TRACK_REMOVED, (track) => { room.on(ConferenceEvents.TRACK_REMOVED, (track) => {
if(!track || track.isLocal()) if (!track || track.isLocal())
return; return;
APP.store.dispatch(trackRemoved(track)); APP.store.dispatch(trackRemoved(track));
}); });
room.on(ConferenceEvents.TRACK_AUDIO_LEVEL_CHANGED, (id, lvl) => { room.on(ConferenceEvents.TRACK_AUDIO_LEVEL_CHANGED, (id, lvl) => {
if(this.isLocalId(id) && localAudio && localAudio.isMuted()) { if (this.isLocalId(id)
&& this.localAudio && this.localAudio.isMuted()) {
lvl = 0; lvl = 0;
} }
if(config.debug) if (config.debug) {
{
this.audioLevelsMap[id] = lvl; this.audioLevelsMap[id] = lvl;
if(config.debugAudioLevels) if (config.debugAudioLevels)
logger.log("AudioLevel:" + id + "/" + lvl); logger.log("AudioLevel:" + id + "/" + lvl);
} }
@ -1866,12 +1859,14 @@ export default {
this.deviceChangeListener); this.deviceChangeListener);
// stop local video // stop local video
if (localVideo) { if (this.localVideo) {
localVideo.dispose(); this.localVideo.dispose();
this.localVideo = null;
} }
// stop local audio // stop local audio
if (localAudio) { if (this.localAudio) {
localAudio.dispose(); this.localAudio.dispose();
this.localAudio = null;
} }
}); });
@ -2215,14 +2210,14 @@ export default {
// storage and settings menu. This is a workaround until // storage and settings menu. This is a workaround until
// getConstraints() method will be implemented // getConstraints() method will be implemented
// in browsers. // in browsers.
if (localAudio) { if (this.localAudio) {
APP.settings.setMicDeviceId( APP.settings.setMicDeviceId(
localAudio.getDeviceId(), false); this.localAudio.getDeviceId(), false);
} }
if (localVideo) { if (this.localVideo) {
APP.settings.setCameraDeviceId( APP.settings.setCameraDeviceId(
localVideo.getDeviceId(), false); this.localVideo.getDeviceId(), false);
} }
mediaDeviceHelper.setCurrentMediaDevices(devices); mediaDeviceHelper.setCurrentMediaDevices(devices);
@ -2263,10 +2258,13 @@ export default {
let newDevices = let newDevices =
mediaDeviceHelper.getNewMediaDevicesAfterDeviceListChanged( mediaDeviceHelper.getNewMediaDevicesAfterDeviceListChanged(
devices, this.isSharingScreen, localVideo, localAudio); devices,
this.isSharingScreen,
this.localVideo,
this.localAudio);
let promises = []; let promises = [];
let audioWasMuted = this.audioMuted; let audioWasMuted = this.isLocalAudioMuted();
let videoWasMuted = this.videoMuted; let videoWasMuted = this.isLocalVideoMuted();
let availableAudioInputDevices = let availableAudioInputDevices =
mediaDeviceHelper.getDevicesFromListByKind(devices, 'audioinput'); mediaDeviceHelper.getDevicesFromListByKind(devices, 'audioinput');
let availableVideoInputDevices = let availableVideoInputDevices =
@ -2323,11 +2321,11 @@ export default {
// The audio functionality is considered available if there are any // The audio functionality is considered available if there are any
// audio devices detected or if the local audio stream already exists. // audio devices detected or if the local audio stream already exists.
const available = audioDeviceCount > 0 || Boolean(localAudio); const available = audioDeviceCount > 0 || Boolean(this.localAudio);
logger.debug( logger.debug(
'Microphone button enabled: ' + available, 'Microphone button enabled: ' + available,
'local audio: ' + localAudio, 'local audio: ' + this.localAudio,
'audio devices: ' + audioMediaDevices, 'audio devices: ' + audioMediaDevices,
'device count: ' + audioDeviceCount); 'device count: ' + audioDeviceCount);
@ -2348,11 +2346,11 @@ export default {
// active which could be either screensharing stream or a video track // active which could be either screensharing stream or a video track
// created before the permissions were rejected (through browser // created before the permissions were rejected (through browser
// config). // config).
const available = videoDeviceCount > 0 || Boolean(localVideo); const available = videoDeviceCount > 0 || Boolean(this.localVideo);
logger.debug( logger.debug(
'Camera button enabled: ' + available, 'Camera button enabled: ' + available,
'local video: ' + localVideo, 'local video: ' + this.localVideo,
'video devices: ' + videoMediaDevices, 'video devices: ' + videoMediaDevices,
'device count: ' + videoDeviceCount); 'device count: ' + videoDeviceCount);
@ -2393,10 +2391,10 @@ export default {
* NOTE: Should be used after conference.init * NOTE: Should be used after conference.init
*/ */
logEvent(name, value, label) { logEvent(name, value, label) {
if(JitsiMeetJS.analytics) { if (JitsiMeetJS.analytics) {
JitsiMeetJS.analytics.sendEvent(name, {value, label}); JitsiMeetJS.analytics.sendEvent(name, {value, label});
} }
if(room) { if (room) {
room.sendApplicationLog(JSON.stringify({name, value, label})); room.sendApplicationLog(JSON.stringify({name, value, label}));
} }
}, },
@ -2553,7 +2551,7 @@ export default {
* track or the source id is not available, undefined will be returned. * track or the source id is not available, undefined will be returned.
*/ */
getDesktopSharingSourceId() { getDesktopSharingSourceId() {
return localVideo.sourceId; return this.localVideo.sourceId;
}, },
/** /**
@ -2565,7 +2563,7 @@ export default {
* returned. * returned.
*/ */
getDesktopSharingSourceType() { getDesktopSharingSourceType() {
return localVideo.sourceType; return this.localVideo.sourceType;
}, },
/** /**
@ -2574,10 +2572,8 @@ export default {
* @param {boolean} muted - New muted status. * @param {boolean} muted - New muted status.
*/ */
setVideoMuteStatus(muted) { setVideoMuteStatus(muted) {
if (this.videoMuted !== muted) { APP.UI.setVideoMuted(this.getMyUserId(), muted);
this.videoMuted = muted; APP.API.notifyVideoMutedStatusChanged(muted);
APP.API.notifyVideoMutedStatusChanged(muted);
}
}, },
/** /**
@ -2586,9 +2582,7 @@ export default {
* @param {boolean} muted - New muted status. * @param {boolean} muted - New muted status.
*/ */
setAudioMuteStatus(muted) { setAudioMuteStatus(muted) {
if (this.audioMuted !== muted) { APP.UI.setAudioMuted(this.getMyUserId(), muted);
this.audioMuted = muted; APP.API.notifyAudioMutedStatusChanged(muted);
APP.API.notifyAudioMutedStatusChanged(muted); }
}
},
}; };

View File

@ -24,7 +24,6 @@ import Settings from "./../settings/Settings";
import { debounce } from "../util/helpers"; import { debounce } from "../util/helpers";
import { updateDeviceList } from '../../react/features/base/devices'; import { updateDeviceList } from '../../react/features/base/devices';
import { setAudioMuted, setVideoMuted } from '../../react/features/base/media';
import { import {
openDeviceSelectionDialog openDeviceSelectionDialog
} from '../../react/features/device-selection'; } from '../../react/features/device-selection';
@ -669,7 +668,6 @@ UI.askForNickname = function () {
UI.setAudioMuted = function (id, muted) { UI.setAudioMuted = function (id, muted) {
VideoLayout.onAudioMute(id, muted); VideoLayout.onAudioMute(id, muted);
if (APP.conference.isLocalId(id)) { if (APP.conference.isLocalId(id)) {
APP.store.dispatch(setAudioMuted(muted));
APP.conference.updateAudioIconEnabled(); APP.conference.updateAudioIconEnabled();
} }
}; };
@ -680,7 +678,6 @@ UI.setAudioMuted = function (id, muted) {
UI.setVideoMuted = function (id, muted) { UI.setVideoMuted = function (id, muted) {
VideoLayout.onVideoMute(id, muted); VideoLayout.onVideoMute(id, muted);
if (APP.conference.isLocalId(id)) { if (APP.conference.isLocalId(id)) {
APP.store.dispatch(setVideoMuted(muted));
APP.conference.updateVideoIconEnabled(); APP.conference.updateVideoIconEnabled();
} }
}; };

View File

@ -1,3 +1,5 @@
import { VIDEO_MUTISM_AUTHORITY } from './constants';
/** /**
* Determines whether a specific videoTrack should be rendered. * Determines whether a specific videoTrack should be rendered.
* *
@ -14,3 +16,15 @@ export function shouldRenderVideoTrack(videoTrack, waitForVideoStarted) {
&& !videoTrack.muted && !videoTrack.muted
&& (!waitForVideoStarted || videoTrack.videoStarted)); && (!waitForVideoStarted || videoTrack.videoStarted));
} }
/**
* Checks if video is currently muted by the user authority.
*
* @param {Object} store - The redux store instance.
* @returns {boolean}
*/
export function isVideoMutedByUser({ getState }) {
return Boolean(
getState()['features/base/media'] // eslint-disable-line no-bitwise
.video.muted & VIDEO_MUTISM_AUTHORITY.USER);
}

View File

@ -93,7 +93,7 @@ function _setRoom({ dispatch, getState }, next, action) {
* @private * @private
* @returns {void} * @returns {void}
*/ */
function _syncTrackMutedState({ dispatch, getState }, track) { function _syncTrackMutedState({ getState }, track) {
const state = getState()['features/base/media']; const state = getState()['features/base/media'];
const muted = Boolean(state[track.mediaType].muted); const muted = Boolean(state[track.mediaType].muted);
@ -104,6 +104,6 @@ function _syncTrackMutedState({ dispatch, getState }, track) {
// fired before track gets to state. // fired before track gets to state.
if (track.muted !== muted) { if (track.muted !== muted) {
track.muted = muted; track.muted = muted;
dispatch(setTrackMuted(track.jitsiTrack, muted)); setTrackMuted(track.jitsiTrack, muted);
} }
} }

View File

@ -348,53 +348,6 @@ function _getLocalTracksToChange(currentTracks, newTracks) {
}; };
} }
/**
* Mutes or unmutes a specific <tt>JitsiLocalTrack</tt>. If the muted state of
* the specified <tt>track</tt> is already in accord with the specified
* <tt>muted</tt> value, then does nothing. In case the actual muting/unmuting
* fails, a rollback action will be dispatched to undo the muting/unmuting.
*
* @param {JitsiLocalTrack} track - The <tt>JitsiLocalTrack</tt> to mute or
* unmute.
* @param {boolean} muted - If the specified <tt>track</tt> is to be muted, then
* <tt>true</tt>; otherwise, <tt>false</tt>.
* @returns {Function}
*/
export function setTrackMuted(track, muted) {
return dispatch => {
muted = Boolean(muted); // eslint-disable-line no-param-reassign
if (track.isMuted() === muted) {
return Promise.resolve();
}
const f = muted ? 'mute' : 'unmute';
return track[f]().catch(error => {
console.error(`set track ${f} failed`, error);
if (navigator.product === 'ReactNative') {
// Synchronizing the state of base/tracks into the state of
// base/media is not required in React (and, respectively, React
// Native) because base/media expresses the app's and the user's
// desires/expectations/intents and base/tracks expresses
// practice/reality. Unfortunately, the old Web does not comply
// and/or does the opposite.
return;
}
const setMuted
= track.mediaType === MEDIA_TYPE.AUDIO
? setAudioMuted
: setVideoMuted;
// FIXME The following disregards VIDEO_MUTISM_AUTHORITY (in the
// case of setVideoMuted, of course).
dispatch(setMuted(!muted));
});
};
}
/** /**
* Returns true if the provided JitsiTrack should be rendered as a mirror. * Returns true if the provided JitsiTrack should be rendered as a mirror.
* *

View File

@ -155,3 +155,45 @@ export function getTrackByJitsiTrack(tracks, jitsiTrack) {
export function getTracksByMediaType(tracks, mediaType) { export function getTracksByMediaType(tracks, mediaType) {
return tracks.filter(t => t.mediaType === mediaType); return tracks.filter(t => t.mediaType === mediaType);
} }
/**
* Checks if the first local track in the given tracks set is muted.
*
* @param {Track[]} tracks - List of all tracks.
* @param {MEDIA_TYPE} mediaType - The media type of tracks to be checked.
* @returns {boolean} True if local track is muted or false if the track is
* unmuted or if there are no local tracks of the given media type in the given
* set of tracks.
*/
export function isLocalTrackMuted(tracks, mediaType) {
const track = getLocalTrack(tracks, mediaType);
return !track || track.muted;
}
/**
* Mutes or unmutes a specific <tt>JitsiLocalTrack</tt>. If the muted state of
* the specified <tt>track</tt> is already in accord with the specified
* <tt>muted</tt> value, then does nothing.
*
* @param {JitsiLocalTrack} track - The <tt>JitsiLocalTrack</tt> to mute or
* unmute.
* @param {boolean} muted - If the specified <tt>track</tt> is to be muted, then
* <tt>true</tt>; otherwise, <tt>false</tt>.
* @returns {Promise}
*/
export function setTrackMuted(track, muted) {
muted = Boolean(muted); // eslint-disable-line no-param-reassign
if (track.isMuted() === muted) {
return Promise.resolve();
}
const f = muted ? 'mute' : 'unmute';
return track[f]().catch(error => {
// FIXME emit mute failed, so that the app can show error dialog
console.error(`set track ${f} failed`, error);
});
}

View File

@ -6,16 +6,13 @@ import {
SET_AUDIO_MUTED, SET_AUDIO_MUTED,
SET_CAMERA_FACING_MODE, SET_CAMERA_FACING_MODE,
SET_VIDEO_MUTED, SET_VIDEO_MUTED,
setAudioMuted,
setVideoMuted,
TOGGLE_CAMERA_FACING_MODE, TOGGLE_CAMERA_FACING_MODE,
toggleCameraFacingMode toggleCameraFacingMode
} from '../media'; } from '../media';
import { MiddlewareRegistry } from '../redux'; import { MiddlewareRegistry } from '../redux';
import { setTrackMuted } from './actions';
import { TRACK_ADDED, TRACK_REMOVED, TRACK_UPDATED } from './actionTypes'; import { TRACK_ADDED, TRACK_REMOVED, TRACK_UPDATED } from './actionTypes';
import { getLocalTrack } from './functions'; import { getLocalTrack, setTrackMuted } from './functions';
declare var APP: Object; declare var APP: Object;
@ -108,30 +105,20 @@ MiddlewareRegistry.register(store => next => action => {
const participantID = jitsiTrack.getParticipantId(); const participantID = jitsiTrack.getParticipantId();
const isVideoTrack = jitsiTrack.isVideoTrack(); const isVideoTrack = jitsiTrack.isVideoTrack();
if (jitsiTrack.isLocal()) { if (isVideoTrack) {
if (isVideoTrack) { if (jitsiTrack.isLocal()) {
APP.conference.setVideoMuteStatus(muted); APP.conference.setVideoMuteStatus(muted);
} else { } else {
APP.conference.setAudioMuteStatus(muted); APP.UI.setVideoMuted(participantID, muted);
} }
}
if (isVideoTrack) {
APP.UI.setVideoMuted(participantID, muted);
APP.UI.onPeerVideoTypeChanged( APP.UI.onPeerVideoTypeChanged(
participantID, participantID,
jitsiTrack.videoType); jitsiTrack.videoType);
} else if (jitsiTrack.isLocal()) {
APP.conference.setAudioMuteStatus(muted);
} else { } else {
APP.UI.setAudioMuted(participantID, muted); APP.UI.setAudioMuted(participantID, muted);
} }
// XXX The following synchronizes the state of base/tracks into the
// state of base/media. Which is not required in React (and,
// respectively, React Native) because base/media expresses the
// app's and the user's desires/expectations/intents and base/tracks
// expresses practice/reality. Unfortunately, the old Web does not
// comply and/or does the opposite. Hence, the following:
return _trackUpdated(store, next, action);
} }
} }
@ -169,66 +156,5 @@ function _getLocalTrack({ getState }, mediaType: MEDIA_TYPE) {
function _setMuted(store, { muted }, mediaType: MEDIA_TYPE) { function _setMuted(store, { muted }, mediaType: MEDIA_TYPE) {
const localTrack = _getLocalTrack(store, mediaType); const localTrack = _getLocalTrack(store, mediaType);
localTrack && store.dispatch(setTrackMuted(localTrack.jitsiTrack, muted)); localTrack && setTrackMuted(localTrack.jitsiTrack, muted);
}
/**
* Intercepts the action <tt>TRACK_UPDATED</tt> in order to synchronize the
* muted states of the local tracks of features/base/tracks with the muted
* states of features/base/media.
*
* @param {Store} store - The redux store in which the specified <tt>action</tt>
* is being dispatched.
* @param {Dispatch} next - The redux dispatch function to dispatch the
* specified <tt>action</tt> to the specified <tt>store</tt>.
* @param {Action} action - The redux action <tt>TRACK_UPDATED</tt> which is
* being dispatched in the specified <tt>store</tt>.
* @private
* @returns {Object} The new state that is the result of the reduction of the
* specified <tt>action</tt>.
*/
function _trackUpdated(store, next, action) {
// Determine the muted state of the local track before the update.
const track = action.track;
let mediaType;
let oldMuted;
if ('muted' in track) {
// XXX The return value of JitsiTrack.getType() is of type MEDIA_TYPE
// that happens to be compatible with the type MEDIA_TYPE defined by
// jitsi-meet.
mediaType = track.jitsiTrack.getType();
const localTrack = _getLocalTrack(store, mediaType);
if (localTrack) {
oldMuted = localTrack.muted;
}
}
const result = next(action);
if (typeof oldMuted !== 'undefined') {
// Determine the muted state of the local track after the update. If the
// muted states before and after the update differ, then the respective
// media state should by synchronized.
const localTrack = _getLocalTrack(store, mediaType);
if (localTrack) {
const newMuted = localTrack.muted;
if (oldMuted !== newMuted) {
switch (mediaType) {
case MEDIA_TYPE.AUDIO:
store.dispatch(setAudioMuted(newMuted));
break;
case MEDIA_TYPE.VIDEO:
store.dispatch(setVideoMuted(newMuted));
break;
}
}
}
}
return result;
} }

View File

@ -40,13 +40,14 @@ const buttons: Object = {
isDisplayed: () => true, isDisplayed: () => true,
id: 'toolbar_button_camera', id: 'toolbar_button_camera',
onClick() { onClick() {
if (APP.conference.videoMuted) { const newVideoMutedState = !APP.conference.isLocalVideoMuted();
if (newVideoMutedState) {
JitsiMeetJS.analytics.sendEvent('toolbar.video.enabled'); JitsiMeetJS.analytics.sendEvent('toolbar.video.enabled');
APP.UI.emitEvent(UIEvents.VIDEO_MUTED, false);
} else { } else {
JitsiMeetJS.analytics.sendEvent('toolbar.video.disabled'); JitsiMeetJS.analytics.sendEvent('toolbar.video.disabled');
APP.UI.emitEvent(UIEvents.VIDEO_MUTED, true);
} }
APP.UI.emitEvent(UIEvents.VIDEO_MUTED, newVideoMutedState);
}, },
popups: [ popups: [
{ {
@ -290,7 +291,7 @@ const buttons: Object = {
onClick() { onClick() {
const sharedVideoManager = APP.UI.getSharedVideoManager(); const sharedVideoManager = APP.UI.getSharedVideoManager();
if (APP.conference.audioMuted) { if (APP.conference.isLocalAudioMuted()) {
// If there's a shared video with the volume "on" and we aren't // If there's a shared video with the volume "on" and we aren't
// the video owner, we warn the user // the video owner, we warn the user
// that currently it's not possible to unmute. // that currently it's not possible to unmute.

View File

@ -3,7 +3,8 @@
import type { Dispatch } from 'redux'; import type { Dispatch } from 'redux';
import { appNavigate } from '../app'; import { appNavigate } from '../app';
import { getLocalAudioTrack, getLocalVideoTrack } from '../base/tracks'; import { MEDIA_TYPE } from '../base/media';
import { isLocalTrackMuted } from '../base/tracks';
/** /**
* Maps redux actions to {@link Toolbox} (React {@code Component}) props. * Maps redux actions to {@link Toolbox} (React {@code Component}) props.
@ -58,9 +59,6 @@ export function abstractMapStateToProps(state: Object): Object {
const tracks = state['features/base/tracks']; const tracks = state['features/base/tracks'];
const { visible } = state['features/toolbox']; const { visible } = state['features/toolbox'];
const audioTrack = getLocalAudioTrack(tracks);
const videoTrack = getLocalVideoTrack(tracks);
return { return {
/** /**
* Flag showing whether audio is muted. * Flag showing whether audio is muted.
@ -68,7 +66,7 @@ export function abstractMapStateToProps(state: Object): Object {
* @protected * @protected
* @type {boolean} * @type {boolean}
*/ */
_audioMuted: !audioTrack || audioTrack.muted, _audioMuted: isLocalTrackMuted(tracks, MEDIA_TYPE.AUDIO),
/** /**
* Flag showing whether video is muted. * Flag showing whether video is muted.
@ -76,7 +74,7 @@ export function abstractMapStateToProps(state: Object): Object {
* @protected * @protected
* @type {boolean} * @type {boolean}
*/ */
_videoMuted: !videoTrack || videoTrack.muted, _videoMuted: isLocalTrackMuted(tracks, MEDIA_TYPE.VIDEO),
/** /**
* Flag showing whether toolbox is visible. * Flag showing whether toolbox is visible.

View File

@ -1,11 +1,11 @@
/* @flow */ /* @flow */
import { import {
MEDIA_TYPE,
SET_AUDIO_AVAILABLE, SET_AUDIO_AVAILABLE,
SET_AUDIO_MUTED, SET_VIDEO_AVAILABLE } from '../base/media';
SET_VIDEO_AVAILABLE,
SET_VIDEO_MUTED } from '../base/media';
import { MiddlewareRegistry } from '../base/redux'; import { MiddlewareRegistry } from '../base/redux';
import { isLocalTrackMuted, TRACK_UPDATED } from '../base/tracks';
import { setToolbarButton } from './actions'; import { setToolbarButton } from './actions';
import { CLEAR_TOOLBOX_TIMEOUT, SET_TOOLBOX_TIMEOUT } from './actionTypes'; import { CLEAR_TOOLBOX_TIMEOUT, SET_TOOLBOX_TIMEOUT } from './actionTypes';
@ -37,66 +37,84 @@ MiddlewareRegistry.register(store => next => action => {
break; break;
} }
case SET_AUDIO_AVAILABLE: case SET_AUDIO_AVAILABLE: {
case SET_AUDIO_MUTED: { return _setMediaAvailableOrMuted(store, next, action);
return _setAudioAvailableOrMuted(store, next, action); }
case SET_VIDEO_AVAILABLE: {
return _setMediaAvailableOrMuted(store, next, action);
}
case TRACK_UPDATED: {
if (action.track.jitsiTrack.isLocal()) {
return _setMediaAvailableOrMuted(store, next, action);
}
break;
} }
case SET_VIDEO_AVAILABLE:
case SET_VIDEO_MUTED:
return _setVideoAvailableOrMuted(store, next, action);
} }
return next(action); return next(action);
}); });
/** /**
* Adjusts the state of toolbar's microphone button. * Adjusts the state of toolbar's microphone or camera button.
* *
* @param {Store} store - The Redux store instance. * @param {Store} store - The Redux store instance.
* @param {Function} next - The redux function to continue dispatching the * @param {Function} next - The redux function to continue dispatching the
* specified {@code action} in the specified {@code store}. * specified {@code action} in the specified {@code store}.
* @param {Object} action - Either SET_AUDIO_AVAILABLE or SET_AUDIO_MUTED. * @param {Object} action - SET_AUDIO_AVAILABLE, SET_VIDEO_AVAILABLE or
* TRACK_UPDATED.
* *
* @returns {*} * @returns {*}
*/ */
function _setAudioAvailableOrMuted({ dispatch, getState }, next, action) { function _setMediaAvailableOrMuted({ dispatch, getState }, next, action) {
const result = next(action); const result = next(action);
const { available, muted } = getState()['features/base/media'].audio; let mediaType;
const i18nKey = available ? 'mute' : 'micDisabled';
dispatch(setToolbarButton('microphone', { switch (action.type) {
enabled: available, case SET_AUDIO_AVAILABLE: {
i18n: `[content]toolbar.${i18nKey}`, mediaType = MEDIA_TYPE.AUDIO;
toggled: available ? muted : true break;
})); }
return result; case SET_VIDEO_AVAILABLE: {
} mediaType = MEDIA_TYPE.VIDEO;
break;
/** }
* Adjusts the state of toolbar's camera button.
* case TRACK_UPDATED: {
* @param {Store} store - The redux store. mediaType
* @param {Function} next - The redux function to continue dispatching the = action.track.jitsiTrack.isAudioTrack()
* specified {@code action} in the specified {@code store}. ? MEDIA_TYPE.AUDIO : MEDIA_TYPE.VIDEO;
* @param {Object} action - Either {@link SET_VIDEO_AVAILABLE} or break;
* {@link SET_VIDEO_MUTED}. }
* @returns {Object} The new state that is the result of the reduction of the
* specified {@code action}. default: {
*/ throw new Error(`Unsupported action ${action}`);
function _setVideoAvailableOrMuted({ dispatch, getState }, next, action) { }
const result = next(action);
}
const { available, muted } = getState()['features/base/media'].video;
const i18nKey = available ? 'videomute' : 'cameraDisabled'; const mediaState = getState()['features/base/media'];
const { available }
dispatch(setToolbarButton('camera', { = mediaType === MEDIA_TYPE.AUDIO
enabled: available, ? mediaState.audio : mediaState.video;
i18n: `[content]toolbar.${i18nKey}`, const i18nKey
toggled: available ? muted : true = mediaType === MEDIA_TYPE.AUDIO
})); ? available ? 'mute' : 'micDisabled'
: available ? 'videomute' : 'cameraDisabled';
const tracks = getState()['features/base/tracks'];
const muted = isLocalTrackMuted(tracks, mediaType);
dispatch(setToolbarButton(
mediaType === MEDIA_TYPE.AUDIO ? 'microphone' : 'camera', {
enabled: available,
i18n: `[content]toolbar.${i18nKey}`,
toggled: available ? muted : true
}));
return result; return result;
} }