feat(mute): Add analytics and console logs for audio/video mutes

This commit is contained in:
hristoterezov 2017-10-09 16:40:38 -05:00 committed by yanas
parent df1437f018
commit 0eddef4d62
13 changed files with 111 additions and 15 deletions

View File

@ -674,9 +674,13 @@ export default {
}).then(([tracks, con]) => {
tracks.forEach(track => {
if (track.isAudioTrack() && this.isLocalAudioMuted()) {
sendEvent('conference.audio.initiallyMuted');
logger.log('Audio mute: initially muted');
track.mute();
} else if (track.isVideoTrack()
&& this.isLocalVideoMuted()) {
sendEvent('conference.video.initiallyMuted');
logger.log('Video mute: initially muted');
track.mute();
}
});
@ -2260,6 +2264,8 @@ export default {
if (audioWasMuted ||
currentDevices.audioinput.length >
availableAudioInputDevices.length) {
sendEvent('deviceListChanged.audio.muted');
logger.log('Audio mute: device list changed');
muteLocalAudio(true);
}
@ -2269,6 +2275,8 @@ export default {
(videoWasMuted ||
currentDevices.videoinput.length >
availableVideoInputDevices.length)) {
sendEvent('deviceListChanged.video.muted');
logger.log('Video mute: device list changed');
muteLocalVideo(true);
}
}));

View File

@ -2,10 +2,13 @@
import * as JitsiMeetConferenceEvents from '../../ConferenceEvents';
import { parseJWTFromURLParams } from '../../react/features/base/jwt';
import { sendEvent } from '../../react/features/analytics';
import { getJitsiMeetTransport } from '../transport';
import { API_ID } from './constants';
const logger = require('jitsi-meet-logger').getLogger(__filename);
declare var APP: Object;
/**
@ -52,9 +55,13 @@ function initCommands() {
'display-name':
APP.conference.changeLocalDisplayName.bind(APP.conference),
'toggle-audio': () => {
sendEvent('api.toggle.audio');
logger.log('Audio toggle: API command received');
APP.conference.toggleAudioMuted(false /* no UI */);
},
'toggle-video': () => {
sendEvent('api.toggle.video');
logger.log('Video toggle: API command received');
APP.conference.toggleVideoMuted(false /* no UI */);
},
'toggle-film-strip': APP.UI.toggleFilmstrip,

View File

@ -533,7 +533,8 @@ export default class SharedVideoManager {
if (APP.conference.isLocalAudioMuted()
&& !this.mutedWithUserInteraction
&& !this.isSharedVideoVolumeOn()) {
sendEvent("sharedvideo.audio.unmuted");
logger.log('Shared video: audio unmuted');
this.emitter.emit(UIEvents.AUDIO_MUTED, false, false);
this.showMicMutedPopup(false);
}
@ -546,7 +547,8 @@ export default class SharedVideoManager {
smartAudioMute() {
if (!APP.conference.isLocalAudioMuted()
&& this.isSharedVideoVolumeOn()) {
sendEvent("sharedvideo.audio.muted");
logger.log('Shared video: audio muted');
this.emitter.emit(UIEvents.AUDIO_MUTED, true, false);
this.showMicMutedPopup(true);
}

View File

@ -4,6 +4,8 @@ import { toggleDialog } from '../../react/features/base/dialog';
import { sendEvent } from '../../react/features/analytics';
import { SpeakerStats } from '../../react/features/speaker-stats';
const logger = require('jitsi-meet-logger').getLogger(__filename);
/**
* The reference to the shortcut dialogs when opened.
*/
@ -28,6 +30,7 @@ function initGlobalShortcuts() {
// register SPACE shortcut in two steps to insure visibility of help message
KeyboardShortcut.registerShortcut(" ", null, function() {
sendEvent("shortcut.talk.clicked");
logger.log('Talk shortcut pressed');
APP.conference.muteAudio(true);
});
KeyboardShortcut._addShortcutToHelp("SPACE","keyboardShortcuts.pushToTalk");
@ -119,8 +122,11 @@ const KeyboardShortcut = {
$(":focus").is("textarea"))) {
var key = self._getKeyboardKey(e).toUpperCase();
if(key === " ") {
if(APP.conference.isLocalAudioMuted())
if(APP.conference.isLocalAudioMuted()) {
sendEvent("shortcut.talk.released");
logger.log('Talk shortcut released');
APP.conference.muteAudio(false);
}
}
}
};

View File

@ -1,5 +1,6 @@
// @flow
import { sendEvent } from '../../analytics';
import { JitsiConferenceEvents } from '../lib-jitsi-meet';
import { setAudioMuted, setVideoMuted } from '../media';
import {
@ -39,6 +40,8 @@ import { _addLocalTracksToConference } from './functions';
import type { Dispatch } from 'redux';
const logger = require('jitsi-meet-logger').getLogger(__filename);
/**
* Adds conference (event) listeners.
*
@ -69,6 +72,16 @@ function _addConferenceListeners(conference, dispatch) {
conference.on(
JitsiConferenceEvents.STARTED_MUTED,
() => {
const audioMuted = Boolean(conference.startAudioMuted);
const videoMuted = Boolean(conference.startVideoMuted);
sendEvent(
`startmuted.server.audio.${audioMuted ? 'muted' : 'unmuted'}`);
sendEvent(
`startmuted.server.video.${videoMuted ? 'muted' : 'unmuted'}`);
logger.log(`Start muted: ${audioMuted ? 'audio, ' : ''}${
videoMuted ? 'video' : ''}`);
// XXX Jicofo tells lib-jitsi-meet to start with audio and/or video
// muted i.e. Jicofo expresses an intent. Lib-jitsi-meet has turned
// Jicofo's intent into reality by actually muting the respective
@ -77,8 +90,8 @@ function _addConferenceListeners(conference, dispatch) {
// TODO Maybe the app needs to learn about Jicofo's intent and
// transfer that intent to lib-jitsi-meet instead of lib-jitsi-meet
// acting on Jicofo's intent without the app's knowledge.
dispatch(setAudioMuted(Boolean(conference.startAudioMuted)));
dispatch(setVideoMuted(Boolean(conference.startVideoMuted)));
dispatch(setAudioMuted(audioMuted));
dispatch(setVideoMuted(videoMuted));
});
// Dispatches into features/base/tracks follow:

View File

@ -35,6 +35,8 @@ import {
_removeLocalTracksFromConference
} from './functions';
const logger = require('jitsi-meet-logger').getLogger(__filename);
declare var APP: Object;
/**
@ -121,8 +123,11 @@ function _connectionEstablished(store, next, action) {
function _conferenceFailedOrLeft({ dispatch, getState }, next, action) {
const result = next(action);
getState()['features/base/conference'].audioOnly
&& dispatch(setAudioOnly(false));
if (getState()['features/base/conference'].audioOnly) {
sendEvent('audioonly.disabled');
logger.log('Audio only disabled');
dispatch(setAudioOnly(false));
}
return result;
}

View File

@ -1,5 +1,6 @@
/* @flow */
import { sendEvent } from '../../analytics';
import { SET_ROOM, setAudioOnly } from '../conference';
import { parseURLParams } from '../config';
import { MiddlewareRegistry } from '../redux';
@ -8,6 +9,8 @@ import { setTrackMuted, TRACK_ADDED } from '../tracks';
import { setAudioMuted, setCameraFacingMode, setVideoMuted } from './actions';
import { CAMERA_FACING_MODE } from './constants';
const logger = require('jitsi-meet-logger').getLogger(__filename);
/**
* Implements the entry point of the middleware of the feature base/media.
*
@ -77,14 +80,23 @@ function _setRoom({ dispatch, getState }, next, action) {
typeof videoMuted === 'undefined'
&& (videoMuted = config.startWithVideoMuted);
audioMuted = Boolean(audioMuted);
videoMuted = Boolean(videoMuted);
// Apply the config.
sendEvent(`startmuted.client.audio.${audioMuted ? 'muted' : 'unmuted'}`);
sendEvent(`startmuted.client.video.${videoMuted ? 'muted' : 'unmuted'}`);
logger.log(`Start muted: ${audioMuted ? 'audio, ' : ''}${
videoMuted ? 'video' : ''}`);
// Unconditionally express the desires/expectations/intents of the app and
// the user i.e. the state of base/media. Eventually, practice/reality i.e.
// the state of base/tracks will or will not agree with the desires.
dispatch(setAudioMuted(Boolean(audioMuted)));
dispatch(setAudioMuted(audioMuted));
dispatch(setCameraFacingMode(CAMERA_FACING_MODE.USER));
dispatch(setVideoMuted(Boolean(videoMuted)));
dispatch(setVideoMuted(videoMuted));
// config.startAudioOnly
//
@ -97,7 +109,10 @@ function _setRoom({ dispatch, getState }, next, action) {
let audioOnly = urlParams && urlParams['config.startAudioOnly'];
typeof audioOnly === 'undefined' && (audioOnly = config.startAudioOnly);
dispatch(setAudioOnly(Boolean(audioOnly)));
audioOnly = Boolean(audioOnly);
sendEvent(`startaudioonly.${audioOnly ? 'enabled' : 'disabled'}`);
logger.log(`Start audio only set to ${audioOnly.toString()}`);
dispatch(setAudioOnly(audioOnly));
}
return next(action);
@ -121,6 +136,10 @@ function _syncTrackMutedState({ getState }, track) {
// not yet in redux state and JitsiTrackEvents.TRACK_MUTE_CHANGED may be
// fired before track gets to state.
if (track.muted !== muted) {
sendEvent(
`synctrackstate.${track.mediaType}.${muted ? 'muted' : 'unmuted'}`);
logger.log(`Sync ${track.mediaType} track muted state to ${
muted ? 'muted' : 'unmuted'}`);
track.muted = muted;
setTrackMuted(track.jitsiTrack, muted);
}

View File

@ -1,3 +1,4 @@
import { sendEvent } from '../../analytics';
import { JitsiTrackErrors, JitsiTrackEvents } from '../lib-jitsi-meet';
import {
CAMERA_FACING_MODE,
@ -15,6 +16,8 @@ import {
} from './actionTypes';
import { createLocalTracksF } from './functions';
const logger = require('jitsi-meet-logger').getLogger(__filename);
/**
* Requests the creating of the desired media type tracks. Desire is expressed
* by base/media unless the function caller specifies desired media types
@ -154,8 +157,14 @@ export function replaceLocalTrack(oldTrack, newTrack, conference) {
= newTrack.isVideoTrack()
? setVideoMuted
: setAudioMuted;
const isMuted = newTrack.isMuted();
return dispatch(setMuted(newTrack.isMuted()));
sendEvent(`replacetrack.${newTrack.getType()}.${
isMuted ? 'muted' : 'unmuted'}`);
logger.log(`Replace ${newTrack.getType()} track - ${
isMuted ? 'muted' : 'unmuted'}`);
return dispatch(setMuted());
}
})
.then(() => {

View File

@ -1,5 +1,6 @@
/* @flow */
import { sendEvent } from '../../analytics';
import { setLastN } from '../../base/conference';
import { setVideoMuted, VIDEO_MUTISM_AUTHORITY } from '../../base/media';
@ -41,6 +42,9 @@ export function _setBackgroundVideoMuted(muted: boolean) {
const { audioOnly } = getState()['features/base/conference'];
audioOnly || dispatch(setLastN(muted ? 0 : undefined));
sendEvent('callkit.background.video.muted');
dispatch(setVideoMuted(muted, VIDEO_MUTISM_AUTHORITY.BACKGROUND));
};
}

View File

@ -3,6 +3,7 @@
import { NativeModules } from 'react-native';
import uuid from 'uuid';
import { sendEvent } from '../../analytics';
import { APP_WILL_MOUNT, APP_WILL_UNMOUNT, appNavigate } from '../../app';
import {
CONFERENCE_FAILED,
@ -268,7 +269,10 @@ function _onPerformSetMutedCallAction({ callUUID, muted: newValue }) {
const { muted: oldValue } = getState()['features/base/media'].audio;
if (oldValue !== newValue) {
dispatch(setAudioMuted(Boolean(newValue)));
const value = Boolean(newValue);
sendEvent(`callkit.audio.${value ? 'muted' : 'unmuted'}`);
dispatch(setAudioMuted(value));
}
}
}

View File

@ -98,7 +98,7 @@ class MuteButton extends Component {
const { dispatch, onClick, participantID } = this.props;
sendEvent(
'remotevideomenu.mute',
'remotevideomenu.mute.clicked',
{
value: 1,
label: participantID

View File

@ -3,6 +3,7 @@ import React, { Component } from 'react';
import { View } from 'react-native';
import { connect } from 'react-redux';
import { sendEvent } from '../../analytics';
import { toggleAudioOnly } from '../../base/conference';
import {
MEDIA_TYPE,
@ -174,6 +175,10 @@ class Toolbox extends Component {
* @returns {void}
*/
_onToggleAudio() {
const mute = !this.props._audioMuted;
sendEvent(`toolbar.audio.${mute ? 'muted' : 'unmuted'}`);
// The user sees the reality i.e. the state of base/tracks and intends
// to change reality by tapping on the respective button i.e. the user
// sets the state of base/media. Whether the user's intention will turn
@ -181,7 +186,7 @@ class Toolbox extends Component {
// tapping.
this.props.dispatch(
setAudioMuted(
!this.props._audioMuted,
mute,
VIDEO_MUTISM_AUTHORITY.USER,
/* ensureTrack */ true));
}
@ -193,6 +198,10 @@ class Toolbox extends Component {
* @returns {void}
*/
_onToggleVideo() {
const mute = !this.props._videoMuted;
sendEvent(`toolbar.video.${mute ? 'muted' : 'unmuted'}`);
// The user sees the reality i.e. the state of base/tracks and intends
// to change reality by tapping on the respective button i.e. the user
// sets the state of base/media. Whether the user's intention will turn

View File

@ -3,6 +3,7 @@ import PropTypes from 'prop-types';
import React, { Component } from 'react';
import { connect } from 'react-redux';
import { sendEvent } from '../../analytics';
import {
setAudioOnly,
setReceiveVideoQuality,
@ -10,6 +11,8 @@ import {
} from '../../base/conference';
import { translate } from '../../base/i18n';
const logger = require('jitsi-meet-logger').getLogger(__filename);
const {
HIGH,
STANDARD,
@ -211,6 +214,8 @@ class VideoQualityDialog extends Component {
* @returns {void}
*/
_enableAudioOnly() {
sendEvent('toolbar.audioonly.enabled');
logger.log('Video quality: audio only enabled');
this.props.dispatch(setAudioOnly(true));
}
@ -222,6 +227,8 @@ class VideoQualityDialog extends Component {
* @returns {void}
*/
_enableHighDefinition() {
sendEvent('toolbar.videoquality.high');
logger.log('Video quality: high enabled');
this.props.dispatch(setReceiveVideoQuality(HIGH));
}
@ -233,6 +240,8 @@ class VideoQualityDialog extends Component {
* @returns {void}
*/
_enableLowDefinition() {
sendEvent('toolbar.videoquality.low');
logger.log('Video quality: low enabled');
this.props.dispatch(setReceiveVideoQuality(LOW));
}
@ -244,6 +253,8 @@ class VideoQualityDialog extends Component {
* @returns {void}
*/
_enableStandardDefinition() {
sendEvent('toolbar.videoquality.standard');
logger.log('Video quality: standard enabled');
this.props.dispatch(setReceiveVideoQuality(STANDARD));
}
@ -324,4 +335,3 @@ function _mapStateToProps(state) {
}
export default translate(connect(_mapStateToProps)(VideoQualityDialog));