feat(mute): Add analytics and console logs for audio/video mutes
This commit is contained in:
parent
df1437f018
commit
0eddef4d62
|
@ -674,9 +674,13 @@ export default {
|
||||||
}).then(([tracks, con]) => {
|
}).then(([tracks, con]) => {
|
||||||
tracks.forEach(track => {
|
tracks.forEach(track => {
|
||||||
if (track.isAudioTrack() && this.isLocalAudioMuted()) {
|
if (track.isAudioTrack() && this.isLocalAudioMuted()) {
|
||||||
|
sendEvent('conference.audio.initiallyMuted');
|
||||||
|
logger.log('Audio mute: initially muted');
|
||||||
track.mute();
|
track.mute();
|
||||||
} else if (track.isVideoTrack()
|
} else if (track.isVideoTrack()
|
||||||
&& this.isLocalVideoMuted()) {
|
&& this.isLocalVideoMuted()) {
|
||||||
|
sendEvent('conference.video.initiallyMuted');
|
||||||
|
logger.log('Video mute: initially muted');
|
||||||
track.mute();
|
track.mute();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -2260,6 +2264,8 @@ export default {
|
||||||
if (audioWasMuted ||
|
if (audioWasMuted ||
|
||||||
currentDevices.audioinput.length >
|
currentDevices.audioinput.length >
|
||||||
availableAudioInputDevices.length) {
|
availableAudioInputDevices.length) {
|
||||||
|
sendEvent('deviceListChanged.audio.muted');
|
||||||
|
logger.log('Audio mute: device list changed');
|
||||||
muteLocalAudio(true);
|
muteLocalAudio(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2269,6 +2275,8 @@ export default {
|
||||||
(videoWasMuted ||
|
(videoWasMuted ||
|
||||||
currentDevices.videoinput.length >
|
currentDevices.videoinput.length >
|
||||||
availableVideoInputDevices.length)) {
|
availableVideoInputDevices.length)) {
|
||||||
|
sendEvent('deviceListChanged.video.muted');
|
||||||
|
logger.log('Video mute: device list changed');
|
||||||
muteLocalVideo(true);
|
muteLocalVideo(true);
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
|
|
|
@ -2,10 +2,13 @@
|
||||||
|
|
||||||
import * as JitsiMeetConferenceEvents from '../../ConferenceEvents';
|
import * as JitsiMeetConferenceEvents from '../../ConferenceEvents';
|
||||||
import { parseJWTFromURLParams } from '../../react/features/base/jwt';
|
import { parseJWTFromURLParams } from '../../react/features/base/jwt';
|
||||||
|
import { sendEvent } from '../../react/features/analytics';
|
||||||
import { getJitsiMeetTransport } from '../transport';
|
import { getJitsiMeetTransport } from '../transport';
|
||||||
|
|
||||||
import { API_ID } from './constants';
|
import { API_ID } from './constants';
|
||||||
|
|
||||||
|
const logger = require('jitsi-meet-logger').getLogger(__filename);
|
||||||
|
|
||||||
declare var APP: Object;
|
declare var APP: Object;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -52,9 +55,13 @@ function initCommands() {
|
||||||
'display-name':
|
'display-name':
|
||||||
APP.conference.changeLocalDisplayName.bind(APP.conference),
|
APP.conference.changeLocalDisplayName.bind(APP.conference),
|
||||||
'toggle-audio': () => {
|
'toggle-audio': () => {
|
||||||
|
sendEvent('api.toggle.audio');
|
||||||
|
logger.log('Audio toggle: API command received');
|
||||||
APP.conference.toggleAudioMuted(false /* no UI */);
|
APP.conference.toggleAudioMuted(false /* no UI */);
|
||||||
},
|
},
|
||||||
'toggle-video': () => {
|
'toggle-video': () => {
|
||||||
|
sendEvent('api.toggle.video');
|
||||||
|
logger.log('Video toggle: API command received');
|
||||||
APP.conference.toggleVideoMuted(false /* no UI */);
|
APP.conference.toggleVideoMuted(false /* no UI */);
|
||||||
},
|
},
|
||||||
'toggle-film-strip': APP.UI.toggleFilmstrip,
|
'toggle-film-strip': APP.UI.toggleFilmstrip,
|
||||||
|
|
|
@ -533,7 +533,8 @@ export default class SharedVideoManager {
|
||||||
if (APP.conference.isLocalAudioMuted()
|
if (APP.conference.isLocalAudioMuted()
|
||||||
&& !this.mutedWithUserInteraction
|
&& !this.mutedWithUserInteraction
|
||||||
&& !this.isSharedVideoVolumeOn()) {
|
&& !this.isSharedVideoVolumeOn()) {
|
||||||
|
sendEvent("sharedvideo.audio.unmuted");
|
||||||
|
logger.log('Shared video: audio unmuted');
|
||||||
this.emitter.emit(UIEvents.AUDIO_MUTED, false, false);
|
this.emitter.emit(UIEvents.AUDIO_MUTED, false, false);
|
||||||
this.showMicMutedPopup(false);
|
this.showMicMutedPopup(false);
|
||||||
}
|
}
|
||||||
|
@ -546,7 +547,8 @@ export default class SharedVideoManager {
|
||||||
smartAudioMute() {
|
smartAudioMute() {
|
||||||
if (!APP.conference.isLocalAudioMuted()
|
if (!APP.conference.isLocalAudioMuted()
|
||||||
&& this.isSharedVideoVolumeOn()) {
|
&& this.isSharedVideoVolumeOn()) {
|
||||||
|
sendEvent("sharedvideo.audio.muted");
|
||||||
|
logger.log('Shared video: audio muted');
|
||||||
this.emitter.emit(UIEvents.AUDIO_MUTED, true, false);
|
this.emitter.emit(UIEvents.AUDIO_MUTED, true, false);
|
||||||
this.showMicMutedPopup(true);
|
this.showMicMutedPopup(true);
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,8 @@ import { toggleDialog } from '../../react/features/base/dialog';
|
||||||
import { sendEvent } from '../../react/features/analytics';
|
import { sendEvent } from '../../react/features/analytics';
|
||||||
import { SpeakerStats } from '../../react/features/speaker-stats';
|
import { SpeakerStats } from '../../react/features/speaker-stats';
|
||||||
|
|
||||||
|
const logger = require('jitsi-meet-logger').getLogger(__filename);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The reference to the shortcut dialogs when opened.
|
* The reference to the shortcut dialogs when opened.
|
||||||
*/
|
*/
|
||||||
|
@ -28,6 +30,7 @@ function initGlobalShortcuts() {
|
||||||
// register SPACE shortcut in two steps to insure visibility of help message
|
// register SPACE shortcut in two steps to insure visibility of help message
|
||||||
KeyboardShortcut.registerShortcut(" ", null, function() {
|
KeyboardShortcut.registerShortcut(" ", null, function() {
|
||||||
sendEvent("shortcut.talk.clicked");
|
sendEvent("shortcut.talk.clicked");
|
||||||
|
logger.log('Talk shortcut pressed');
|
||||||
APP.conference.muteAudio(true);
|
APP.conference.muteAudio(true);
|
||||||
});
|
});
|
||||||
KeyboardShortcut._addShortcutToHelp("SPACE","keyboardShortcuts.pushToTalk");
|
KeyboardShortcut._addShortcutToHelp("SPACE","keyboardShortcuts.pushToTalk");
|
||||||
|
@ -119,10 +122,13 @@ const KeyboardShortcut = {
|
||||||
$(":focus").is("textarea"))) {
|
$(":focus").is("textarea"))) {
|
||||||
var key = self._getKeyboardKey(e).toUpperCase();
|
var key = self._getKeyboardKey(e).toUpperCase();
|
||||||
if(key === " ") {
|
if(key === " ") {
|
||||||
if(APP.conference.isLocalAudioMuted())
|
if(APP.conference.isLocalAudioMuted()) {
|
||||||
|
sendEvent("shortcut.talk.released");
|
||||||
|
logger.log('Talk shortcut released');
|
||||||
APP.conference.muteAudio(false);
|
APP.conference.muteAudio(false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
// @flow
|
// @flow
|
||||||
|
|
||||||
|
import { sendEvent } from '../../analytics';
|
||||||
import { JitsiConferenceEvents } from '../lib-jitsi-meet';
|
import { JitsiConferenceEvents } from '../lib-jitsi-meet';
|
||||||
import { setAudioMuted, setVideoMuted } from '../media';
|
import { setAudioMuted, setVideoMuted } from '../media';
|
||||||
import {
|
import {
|
||||||
|
@ -39,6 +40,8 @@ import { _addLocalTracksToConference } from './functions';
|
||||||
|
|
||||||
import type { Dispatch } from 'redux';
|
import type { Dispatch } from 'redux';
|
||||||
|
|
||||||
|
const logger = require('jitsi-meet-logger').getLogger(__filename);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Adds conference (event) listeners.
|
* Adds conference (event) listeners.
|
||||||
*
|
*
|
||||||
|
@ -69,6 +72,16 @@ function _addConferenceListeners(conference, dispatch) {
|
||||||
conference.on(
|
conference.on(
|
||||||
JitsiConferenceEvents.STARTED_MUTED,
|
JitsiConferenceEvents.STARTED_MUTED,
|
||||||
() => {
|
() => {
|
||||||
|
const audioMuted = Boolean(conference.startAudioMuted);
|
||||||
|
const videoMuted = Boolean(conference.startVideoMuted);
|
||||||
|
|
||||||
|
sendEvent(
|
||||||
|
`startmuted.server.audio.${audioMuted ? 'muted' : 'unmuted'}`);
|
||||||
|
sendEvent(
|
||||||
|
`startmuted.server.video.${videoMuted ? 'muted' : 'unmuted'}`);
|
||||||
|
logger.log(`Start muted: ${audioMuted ? 'audio, ' : ''}${
|
||||||
|
videoMuted ? 'video' : ''}`);
|
||||||
|
|
||||||
// XXX Jicofo tells lib-jitsi-meet to start with audio and/or video
|
// XXX Jicofo tells lib-jitsi-meet to start with audio and/or video
|
||||||
// muted i.e. Jicofo expresses an intent. Lib-jitsi-meet has turned
|
// muted i.e. Jicofo expresses an intent. Lib-jitsi-meet has turned
|
||||||
// Jicofo's intent into reality by actually muting the respective
|
// Jicofo's intent into reality by actually muting the respective
|
||||||
|
@ -77,8 +90,8 @@ function _addConferenceListeners(conference, dispatch) {
|
||||||
// TODO Maybe the app needs to learn about Jicofo's intent and
|
// TODO Maybe the app needs to learn about Jicofo's intent and
|
||||||
// transfer that intent to lib-jitsi-meet instead of lib-jitsi-meet
|
// transfer that intent to lib-jitsi-meet instead of lib-jitsi-meet
|
||||||
// acting on Jicofo's intent without the app's knowledge.
|
// acting on Jicofo's intent without the app's knowledge.
|
||||||
dispatch(setAudioMuted(Boolean(conference.startAudioMuted)));
|
dispatch(setAudioMuted(audioMuted));
|
||||||
dispatch(setVideoMuted(Boolean(conference.startVideoMuted)));
|
dispatch(setVideoMuted(videoMuted));
|
||||||
});
|
});
|
||||||
|
|
||||||
// Dispatches into features/base/tracks follow:
|
// Dispatches into features/base/tracks follow:
|
||||||
|
|
|
@ -35,6 +35,8 @@ import {
|
||||||
_removeLocalTracksFromConference
|
_removeLocalTracksFromConference
|
||||||
} from './functions';
|
} from './functions';
|
||||||
|
|
||||||
|
const logger = require('jitsi-meet-logger').getLogger(__filename);
|
||||||
|
|
||||||
declare var APP: Object;
|
declare var APP: Object;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -121,8 +123,11 @@ function _connectionEstablished(store, next, action) {
|
||||||
function _conferenceFailedOrLeft({ dispatch, getState }, next, action) {
|
function _conferenceFailedOrLeft({ dispatch, getState }, next, action) {
|
||||||
const result = next(action);
|
const result = next(action);
|
||||||
|
|
||||||
getState()['features/base/conference'].audioOnly
|
if (getState()['features/base/conference'].audioOnly) {
|
||||||
&& dispatch(setAudioOnly(false));
|
sendEvent('audioonly.disabled');
|
||||||
|
logger.log('Audio only disabled');
|
||||||
|
dispatch(setAudioOnly(false));
|
||||||
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
/* @flow */
|
/* @flow */
|
||||||
|
|
||||||
|
import { sendEvent } from '../../analytics';
|
||||||
import { SET_ROOM, setAudioOnly } from '../conference';
|
import { SET_ROOM, setAudioOnly } from '../conference';
|
||||||
import { parseURLParams } from '../config';
|
import { parseURLParams } from '../config';
|
||||||
import { MiddlewareRegistry } from '../redux';
|
import { MiddlewareRegistry } from '../redux';
|
||||||
|
@ -8,6 +9,8 @@ import { setTrackMuted, TRACK_ADDED } from '../tracks';
|
||||||
import { setAudioMuted, setCameraFacingMode, setVideoMuted } from './actions';
|
import { setAudioMuted, setCameraFacingMode, setVideoMuted } from './actions';
|
||||||
import { CAMERA_FACING_MODE } from './constants';
|
import { CAMERA_FACING_MODE } from './constants';
|
||||||
|
|
||||||
|
const logger = require('jitsi-meet-logger').getLogger(__filename);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Implements the entry point of the middleware of the feature base/media.
|
* Implements the entry point of the middleware of the feature base/media.
|
||||||
*
|
*
|
||||||
|
@ -77,14 +80,23 @@ function _setRoom({ dispatch, getState }, next, action) {
|
||||||
typeof videoMuted === 'undefined'
|
typeof videoMuted === 'undefined'
|
||||||
&& (videoMuted = config.startWithVideoMuted);
|
&& (videoMuted = config.startWithVideoMuted);
|
||||||
|
|
||||||
|
audioMuted = Boolean(audioMuted);
|
||||||
|
videoMuted = Boolean(videoMuted);
|
||||||
|
|
||||||
// Apply the config.
|
// Apply the config.
|
||||||
|
|
||||||
|
sendEvent(`startmuted.client.audio.${audioMuted ? 'muted' : 'unmuted'}`);
|
||||||
|
sendEvent(`startmuted.client.video.${videoMuted ? 'muted' : 'unmuted'}`);
|
||||||
|
|
||||||
|
logger.log(`Start muted: ${audioMuted ? 'audio, ' : ''}${
|
||||||
|
videoMuted ? 'video' : ''}`);
|
||||||
|
|
||||||
// Unconditionally express the desires/expectations/intents of the app and
|
// Unconditionally express the desires/expectations/intents of the app and
|
||||||
// the user i.e. the state of base/media. Eventually, practice/reality i.e.
|
// the user i.e. the state of base/media. Eventually, practice/reality i.e.
|
||||||
// the state of base/tracks will or will not agree with the desires.
|
// the state of base/tracks will or will not agree with the desires.
|
||||||
dispatch(setAudioMuted(Boolean(audioMuted)));
|
dispatch(setAudioMuted(audioMuted));
|
||||||
dispatch(setCameraFacingMode(CAMERA_FACING_MODE.USER));
|
dispatch(setCameraFacingMode(CAMERA_FACING_MODE.USER));
|
||||||
dispatch(setVideoMuted(Boolean(videoMuted)));
|
dispatch(setVideoMuted(videoMuted));
|
||||||
|
|
||||||
// config.startAudioOnly
|
// config.startAudioOnly
|
||||||
//
|
//
|
||||||
|
@ -97,7 +109,10 @@ function _setRoom({ dispatch, getState }, next, action) {
|
||||||
let audioOnly = urlParams && urlParams['config.startAudioOnly'];
|
let audioOnly = urlParams && urlParams['config.startAudioOnly'];
|
||||||
|
|
||||||
typeof audioOnly === 'undefined' && (audioOnly = config.startAudioOnly);
|
typeof audioOnly === 'undefined' && (audioOnly = config.startAudioOnly);
|
||||||
dispatch(setAudioOnly(Boolean(audioOnly)));
|
audioOnly = Boolean(audioOnly);
|
||||||
|
sendEvent(`startaudioonly.${audioOnly ? 'enabled' : 'disabled'}`);
|
||||||
|
logger.log(`Start audio only set to ${audioOnly.toString()}`);
|
||||||
|
dispatch(setAudioOnly(audioOnly));
|
||||||
}
|
}
|
||||||
|
|
||||||
return next(action);
|
return next(action);
|
||||||
|
@ -121,6 +136,10 @@ function _syncTrackMutedState({ getState }, track) {
|
||||||
// not yet in redux state and JitsiTrackEvents.TRACK_MUTE_CHANGED may be
|
// not yet in redux state and JitsiTrackEvents.TRACK_MUTE_CHANGED may be
|
||||||
// fired before track gets to state.
|
// fired before track gets to state.
|
||||||
if (track.muted !== muted) {
|
if (track.muted !== muted) {
|
||||||
|
sendEvent(
|
||||||
|
`synctrackstate.${track.mediaType}.${muted ? 'muted' : 'unmuted'}`);
|
||||||
|
logger.log(`Sync ${track.mediaType} track muted state to ${
|
||||||
|
muted ? 'muted' : 'unmuted'}`);
|
||||||
track.muted = muted;
|
track.muted = muted;
|
||||||
setTrackMuted(track.jitsiTrack, muted);
|
setTrackMuted(track.jitsiTrack, muted);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import { sendEvent } from '../../analytics';
|
||||||
import { JitsiTrackErrors, JitsiTrackEvents } from '../lib-jitsi-meet';
|
import { JitsiTrackErrors, JitsiTrackEvents } from '../lib-jitsi-meet';
|
||||||
import {
|
import {
|
||||||
CAMERA_FACING_MODE,
|
CAMERA_FACING_MODE,
|
||||||
|
@ -15,6 +16,8 @@ import {
|
||||||
} from './actionTypes';
|
} from './actionTypes';
|
||||||
import { createLocalTracksF } from './functions';
|
import { createLocalTracksF } from './functions';
|
||||||
|
|
||||||
|
const logger = require('jitsi-meet-logger').getLogger(__filename);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Requests the creating of the desired media type tracks. Desire is expressed
|
* Requests the creating of the desired media type tracks. Desire is expressed
|
||||||
* by base/media unless the function caller specifies desired media types
|
* by base/media unless the function caller specifies desired media types
|
||||||
|
@ -154,8 +157,14 @@ export function replaceLocalTrack(oldTrack, newTrack, conference) {
|
||||||
= newTrack.isVideoTrack()
|
= newTrack.isVideoTrack()
|
||||||
? setVideoMuted
|
? setVideoMuted
|
||||||
: setAudioMuted;
|
: setAudioMuted;
|
||||||
|
const isMuted = newTrack.isMuted();
|
||||||
|
|
||||||
return dispatch(setMuted(newTrack.isMuted()));
|
sendEvent(`replacetrack.${newTrack.getType()}.${
|
||||||
|
isMuted ? 'muted' : 'unmuted'}`);
|
||||||
|
logger.log(`Replace ${newTrack.getType()} track - ${
|
||||||
|
isMuted ? 'muted' : 'unmuted'}`);
|
||||||
|
|
||||||
|
return dispatch(setMuted());
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.then(() => {
|
.then(() => {
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
/* @flow */
|
/* @flow */
|
||||||
|
|
||||||
|
import { sendEvent } from '../../analytics';
|
||||||
import { setLastN } from '../../base/conference';
|
import { setLastN } from '../../base/conference';
|
||||||
import { setVideoMuted, VIDEO_MUTISM_AUTHORITY } from '../../base/media';
|
import { setVideoMuted, VIDEO_MUTISM_AUTHORITY } from '../../base/media';
|
||||||
|
|
||||||
|
@ -41,6 +42,9 @@ export function _setBackgroundVideoMuted(muted: boolean) {
|
||||||
const { audioOnly } = getState()['features/base/conference'];
|
const { audioOnly } = getState()['features/base/conference'];
|
||||||
|
|
||||||
audioOnly || dispatch(setLastN(muted ? 0 : undefined));
|
audioOnly || dispatch(setLastN(muted ? 0 : undefined));
|
||||||
|
|
||||||
|
sendEvent('callkit.background.video.muted');
|
||||||
|
|
||||||
dispatch(setVideoMuted(muted, VIDEO_MUTISM_AUTHORITY.BACKGROUND));
|
dispatch(setVideoMuted(muted, VIDEO_MUTISM_AUTHORITY.BACKGROUND));
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
import { NativeModules } from 'react-native';
|
import { NativeModules } from 'react-native';
|
||||||
import uuid from 'uuid';
|
import uuid from 'uuid';
|
||||||
|
|
||||||
|
import { sendEvent } from '../../analytics';
|
||||||
import { APP_WILL_MOUNT, APP_WILL_UNMOUNT, appNavigate } from '../../app';
|
import { APP_WILL_MOUNT, APP_WILL_UNMOUNT, appNavigate } from '../../app';
|
||||||
import {
|
import {
|
||||||
CONFERENCE_FAILED,
|
CONFERENCE_FAILED,
|
||||||
|
@ -268,7 +269,10 @@ function _onPerformSetMutedCallAction({ callUUID, muted: newValue }) {
|
||||||
const { muted: oldValue } = getState()['features/base/media'].audio;
|
const { muted: oldValue } = getState()['features/base/media'].audio;
|
||||||
|
|
||||||
if (oldValue !== newValue) {
|
if (oldValue !== newValue) {
|
||||||
dispatch(setAudioMuted(Boolean(newValue)));
|
const value = Boolean(newValue);
|
||||||
|
|
||||||
|
sendEvent(`callkit.audio.${value ? 'muted' : 'unmuted'}`);
|
||||||
|
dispatch(setAudioMuted(value));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -98,7 +98,7 @@ class MuteButton extends Component {
|
||||||
const { dispatch, onClick, participantID } = this.props;
|
const { dispatch, onClick, participantID } = this.props;
|
||||||
|
|
||||||
sendEvent(
|
sendEvent(
|
||||||
'remotevideomenu.mute',
|
'remotevideomenu.mute.clicked',
|
||||||
{
|
{
|
||||||
value: 1,
|
value: 1,
|
||||||
label: participantID
|
label: participantID
|
||||||
|
|
|
@ -3,6 +3,7 @@ import React, { Component } from 'react';
|
||||||
import { View } from 'react-native';
|
import { View } from 'react-native';
|
||||||
import { connect } from 'react-redux';
|
import { connect } from 'react-redux';
|
||||||
|
|
||||||
|
import { sendEvent } from '../../analytics';
|
||||||
import { toggleAudioOnly } from '../../base/conference';
|
import { toggleAudioOnly } from '../../base/conference';
|
||||||
import {
|
import {
|
||||||
MEDIA_TYPE,
|
MEDIA_TYPE,
|
||||||
|
@ -174,6 +175,10 @@ class Toolbox extends Component {
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
_onToggleAudio() {
|
_onToggleAudio() {
|
||||||
|
const mute = !this.props._audioMuted;
|
||||||
|
|
||||||
|
sendEvent(`toolbar.audio.${mute ? 'muted' : 'unmuted'}`);
|
||||||
|
|
||||||
// The user sees the reality i.e. the state of base/tracks and intends
|
// The user sees the reality i.e. the state of base/tracks and intends
|
||||||
// to change reality by tapping on the respective button i.e. the user
|
// to change reality by tapping on the respective button i.e. the user
|
||||||
// sets the state of base/media. Whether the user's intention will turn
|
// sets the state of base/media. Whether the user's intention will turn
|
||||||
|
@ -181,7 +186,7 @@ class Toolbox extends Component {
|
||||||
// tapping.
|
// tapping.
|
||||||
this.props.dispatch(
|
this.props.dispatch(
|
||||||
setAudioMuted(
|
setAudioMuted(
|
||||||
!this.props._audioMuted,
|
mute,
|
||||||
VIDEO_MUTISM_AUTHORITY.USER,
|
VIDEO_MUTISM_AUTHORITY.USER,
|
||||||
/* ensureTrack */ true));
|
/* ensureTrack */ true));
|
||||||
}
|
}
|
||||||
|
@ -193,6 +198,10 @@ class Toolbox extends Component {
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
_onToggleVideo() {
|
_onToggleVideo() {
|
||||||
|
const mute = !this.props._videoMuted;
|
||||||
|
|
||||||
|
sendEvent(`toolbar.video.${mute ? 'muted' : 'unmuted'}`);
|
||||||
|
|
||||||
// The user sees the reality i.e. the state of base/tracks and intends
|
// The user sees the reality i.e. the state of base/tracks and intends
|
||||||
// to change reality by tapping on the respective button i.e. the user
|
// to change reality by tapping on the respective button i.e. the user
|
||||||
// sets the state of base/media. Whether the user's intention will turn
|
// sets the state of base/media. Whether the user's intention will turn
|
||||||
|
|
|
@ -3,6 +3,7 @@ import PropTypes from 'prop-types';
|
||||||
import React, { Component } from 'react';
|
import React, { Component } from 'react';
|
||||||
import { connect } from 'react-redux';
|
import { connect } from 'react-redux';
|
||||||
|
|
||||||
|
import { sendEvent } from '../../analytics';
|
||||||
import {
|
import {
|
||||||
setAudioOnly,
|
setAudioOnly,
|
||||||
setReceiveVideoQuality,
|
setReceiveVideoQuality,
|
||||||
|
@ -10,6 +11,8 @@ import {
|
||||||
} from '../../base/conference';
|
} from '../../base/conference';
|
||||||
import { translate } from '../../base/i18n';
|
import { translate } from '../../base/i18n';
|
||||||
|
|
||||||
|
const logger = require('jitsi-meet-logger').getLogger(__filename);
|
||||||
|
|
||||||
const {
|
const {
|
||||||
HIGH,
|
HIGH,
|
||||||
STANDARD,
|
STANDARD,
|
||||||
|
@ -211,6 +214,8 @@ class VideoQualityDialog extends Component {
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
_enableAudioOnly() {
|
_enableAudioOnly() {
|
||||||
|
sendEvent('toolbar.audioonly.enabled');
|
||||||
|
logger.log('Video quality: audio only enabled');
|
||||||
this.props.dispatch(setAudioOnly(true));
|
this.props.dispatch(setAudioOnly(true));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -222,6 +227,8 @@ class VideoQualityDialog extends Component {
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
_enableHighDefinition() {
|
_enableHighDefinition() {
|
||||||
|
sendEvent('toolbar.videoquality.high');
|
||||||
|
logger.log('Video quality: high enabled');
|
||||||
this.props.dispatch(setReceiveVideoQuality(HIGH));
|
this.props.dispatch(setReceiveVideoQuality(HIGH));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -233,6 +240,8 @@ class VideoQualityDialog extends Component {
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
_enableLowDefinition() {
|
_enableLowDefinition() {
|
||||||
|
sendEvent('toolbar.videoquality.low');
|
||||||
|
logger.log('Video quality: low enabled');
|
||||||
this.props.dispatch(setReceiveVideoQuality(LOW));
|
this.props.dispatch(setReceiveVideoQuality(LOW));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -244,6 +253,8 @@ class VideoQualityDialog extends Component {
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
_enableStandardDefinition() {
|
_enableStandardDefinition() {
|
||||||
|
sendEvent('toolbar.videoquality.standard');
|
||||||
|
logger.log('Video quality: standard enabled');
|
||||||
this.props.dispatch(setReceiveVideoQuality(STANDARD));
|
this.props.dispatch(setReceiveVideoQuality(STANDARD));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -324,4 +335,3 @@ function _mapStateToProps(state) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export default translate(connect(_mapStateToProps)(VideoQualityDialog));
|
export default translate(connect(_mapStateToProps)(VideoQualityDialog));
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue