feat(ts) make tsc happy

This commit is contained in:
Saúl Ibarra Corretgé 2022-11-01 13:36:32 +01:00 committed by Saúl Ibarra Corretgé
parent 49bcf5c179
commit 7cd39b7983
55 changed files with 937 additions and 782 deletions

View File

@ -62,12 +62,14 @@ import {
import {
checkAndNotifyForNewDevice,
getAvailableDevices,
getDefaultDeviceId,
notifyCameraError,
notifyMicError,
setAudioOutputDeviceId,
updateDeviceList
} from './react/features/base/devices';
} from './react/features/base/devices/actions.web';
import {
getDefaultDeviceId,
setAudioOutputDeviceId
} from './react/features/base/devices/functions.web';
import {
JitsiConferenceErrors,
JitsiConferenceEvents,

14
globals.native.d.ts vendored
View File

@ -12,8 +12,21 @@ interface IWindow {
JITSI_MEET_LITE_SDK: boolean;
JitsiMeetJS: any;
config: IConfig;
document: any;
innerHeight: number;
innerWidth: number;
interfaceConfig: any;
location: ILocation;
self: any;
top: any;
onerror: (event: string, source: any, lineno: any, colno: any, e: Error) => void;
onunhandledrejection: (event: any) => void;
setTimeout: typeof setTimeout;
clearTimeout: typeof clearTimeout;
setImmediate: typeof setImmediate;
clearImmediate: typeof clearImmediate;
}
interface INavigator {
@ -22,6 +35,7 @@ interface INavigator {
declare global {
const APP: any;
const document: any;
const interfaceConfig: any;
const navigator: INavigator;
const window: IWindow;

View File

@ -1,10 +1,12 @@
/* global APP, JitsiMeetJS */
import {
getAudioOutputDeviceId,
notifyCameraError,
notifyMicError
} from '../../react/features/base/devices';
} from '../../react/features/base/devices/actions.web';
import {
getAudioOutputDeviceId
} from '../../react/features/base/devices/functions.web';
import {
getUserSelectedCameraDeviceId,
getUserSelectedMicDeviceId,

View File

@ -199,7 +199,7 @@
"tsc:web": "tsc --noEmit --project tsconfig.web.json",
"tsc:native": "tsc --noEmit --project tsconfig.native.json",
"tsc:ci": "npm run tsc:web && npm run tsc:native",
"lint:ci": "eslint --ext .js,.ts,.tsx --max-warnings 0 . && npm run tsc:web",
"lint:ci": "eslint --ext .js,.ts,.tsx --max-warnings 0 . && npm run tsc:ci",
"lang-sort": "./resources/lang-sort.sh",
"lint-fix": "eslint --ext .js,.ts,.tsx --max-warnings 0 --fix .",
"postinstall": "patch-package --error-on-fail && jetify",

View File

@ -3,8 +3,8 @@ import { API_ID } from '../../../modules/API/constants';
import { getName as getAppName } from '../app/functions';
import { IStore } from '../app/types';
import { getAnalyticsRoomName } from '../base/conference/functions';
import checkChromeExtensionsInstalled from '../base/environment/checkChromeExtensionsInstalled';
import {
checkChromeExtensionsInstalled,
isMobileBrowser
} from '../base/environment/utils';
import JitsiMeetJS, {

View File

@ -1,3 +1,5 @@
/* eslint-disable lines-around-comment */
import logger from '../logger';
import AbstractHandler, { IEvent } from './AbstractHandler';
@ -63,7 +65,7 @@ export default class AmplitudeHandler extends AbstractHandler {
* @param {Object} userProps - The user portperties.
* @returns {void}
*/
setUserProperties(userProps: Object) {
setUserProperties(userProps: any) {
if (this._enabled) {
amplitude.getInstance().setUserProperties(userProps);
}
@ -82,6 +84,7 @@ export default class AmplitudeHandler extends AbstractHandler {
return;
}
// @ts-ignore
amplitude.getInstance().logEvent(this._extractName(event) ?? '', event);
}
@ -100,7 +103,9 @@ export default class AmplitudeHandler extends AbstractHandler {
return {
sessionId: amplitude.getInstance().getSessionId(),
// @ts-ignore
deviceId: amplitude.getInstance().options.deviceId,
// @ts-ignore
userId: amplitude.getInstance().options.userId
};
}

View File

@ -9,7 +9,7 @@ import { IAudioOnlyState } from '../base/audio-only/reducer';
import { IConferenceState } from '../base/conference/reducer';
import { IConfigState } from '../base/config/reducer';
import { IConnectionState } from '../base/connection/reducer';
import { IDevicesState } from '../base/devices/reducer';
import { IDevicesState } from '../base/devices/types';
import { IDialogState } from '../base/dialog/reducer';
import { IFlagsState } from '../base/flags/reducer';
import { IJwtState } from '../base/jwt/reducer';

View File

@ -1,7 +1,7 @@
import { IStore } from '../../app/types';
import JitsiMeetJS from '../lib-jitsi-meet';
import { updateSettings } from '../settings/actions';
import { getUserSelectedOutputDeviceId } from '../settings/functions.any';
import { getUserSelectedOutputDeviceId } from '../settings/functions.web';
import {
ADD_PENDING_DEVICE_REQUEST,

View File

@ -0,0 +1,19 @@
import { IReduxState } from '../../app/types';
/**
* Returns true if there are devices of a specific type or on native platform.
*
* @param {Object} state - The state of the application.
* @param {string} type - The type of device: VideoOutput | audioOutput | audioInput.
*
* @returns {boolean}
*/
export function hasAvailableDevices(state: IReduxState, type: string) {
if (state['features/base/devices'] === undefined) {
return true;
}
const availableDevices = state['features/base/devices'].availableDevices;
return Number(availableDevices[type as keyof typeof availableDevices]?.length) > 0;
}

View File

@ -0,0 +1 @@
export * from './functions.any';

View File

@ -5,10 +5,9 @@ import { ISettingsState } from '../settings/reducer';
import { parseURLParams } from '../util/parseURLParams';
import logger from './logger';
import { IDevicesState } from './reducer';
import { IDevicesState } from './types';
declare const APP: any;
export * from './functions.any';
const webrtcKindToJitsiKindTranslator = {
audioinput: 'audioInput',
@ -240,24 +239,6 @@ export function getVideoDeviceIds(state: IReduxState) {
return state['features/base/devices'].availableDevices.videoInput?.map(({ deviceId }) => deviceId);
}
/**
* Returns true if there are devices of a specific type or on native platform.
*
* @param {Object} state - The state of the application.
* @param {string} type - The type of device: VideoOutput | audioOutput | audioInput.
*
* @returns {boolean}
*/
export function hasAvailableDevices(state: IReduxState, type: string) {
if (state['features/base/devices'] === undefined) {
return true;
}
const availableDevices = state['features/base/devices'].availableDevices;
return Number(availableDevices[type as keyof typeof availableDevices]?.length) > 0;
}
/**
* Set device id of the audio output device which is currently in use.
* Empty string stands for default device.

View File

@ -1,3 +0,0 @@
export * from './actions';
export * from './actionTypes';
export * from './functions';

View File

@ -35,7 +35,7 @@ import {
setAudioOutputDeviceId
} from './functions';
import logger from './logger';
import { IDevicesState } from './reducer';
import { IDevicesState } from './types';
const JITSI_TRACK_ERROR_TO_MESSAGE_KEY_MAP = {
microphone: {

View File

@ -8,8 +8,9 @@ import {
SET_VIDEO_INPUT_DEVICE,
UPDATE_DEVICE_LIST
} from './actionTypes';
import { groupDevicesByKind } from './functions';
import { groupDevicesByKind } from './functions.web';
import logger from './logger';
import { IDevicesState } from './types';
const DEFAULT_STATE: IDevicesState = {
@ -25,19 +26,6 @@ const DEFAULT_STATE: IDevicesState = {
}
};
export interface IDevicesState {
availableDevices: {
audioInput?: MediaDeviceInfo[];
audioOutput?: MediaDeviceInfo[];
videoInput?: MediaDeviceInfo[];
};
pendingRequests: Object[];
permissions: {
audio: boolean;
video: boolean;
};
}
/**
* Listen for actions which changes the state of known and used devices.
*

View File

@ -0,0 +1,17 @@
/* eslint-disable lines-around-comment */
export interface IDevicesState {
availableDevices: {
// @ts-ignore
audioInput?: MediaDeviceInfo[];
// @ts-ignore
audioOutput?: MediaDeviceInfo[];
// @ts-ignore
videoInput?: MediaDeviceInfo[];
};
pendingRequests: any[];
permissions: {
audio: boolean;
video: boolean;
};
}

View File

@ -0,0 +1,10 @@
/**
* Checks whether the chrome extensions defined in the config file are installed or not.
*
* @param {Object} _config - Objects containing info about the configured extensions.
*
* @returns {Promise[]}
*/
export default function checkChromeExtensionsInstalled(_config: any = {}) {
return Promise.resolve([]);
}

View File

@ -0,0 +1,26 @@
/**
* Checks whether the chrome extensions defined in the config file are installed or not.
*
* @param {Object} config - Objects containing info about the configured extensions.
*
* @returns {Promise[]}
*/
export default function checkChromeExtensionsInstalled(config: any = {}) {
const isExtensionInstalled = (info: any) => new Promise(resolve => {
const img = new Image();
img.src = `chrome-extension://${info.id}/${info.path}`;
img.setAttribute('aria-hidden', 'true');
img.onload = function() {
resolve(true);
};
img.onerror = function() {
resolve(false);
};
});
const extensionInstalledFunction = (info: any) => isExtensionInstalled(info);
return Promise.all(
(config.chromeExtensionsInfo || []).map((info: any) => extensionInstalledFunction(info))
);
}

View File

@ -18,30 +18,3 @@ export function isMobileBrowser() {
export function isIosMobileBrowser() {
return Platform.OS === 'ios';
}
/**
* Checks whether the chrome extensions defined in the config file are installed or not.
*
* @param {Object} config - Objects containing info about the configured extensions.
*
* @returns {Promise[]}
*/
export function checkChromeExtensionsInstalled(config: any = {}) {
const isExtensionInstalled = (info: any) => new Promise(resolve => {
const img = new Image();
img.src = `chrome-extension://${info.id}/${info.path}`;
img.setAttribute('aria-hidden', 'true');
img.onload = function() {
resolve(true);
};
img.onerror = function() {
resolve(false);
};
});
const extensionInstalledFunction = (info: any) => isExtensionInstalled(info);
return Promise.all(
(config.chromeExtensionsInfo || []).map((info: any) => extensionInstalledFunction(info))
);
}

View File

@ -16,7 +16,7 @@ import { MEET_FEATURES } from './constants';
* @returns {string} The JSON Web Token (JWT), if any, defined by the specified
* {@code url}; otherwise, {@code undefined}.
*/
export function parseJWTFromURLParams(url: URL | Location = window.location) {
export function parseJWTFromURLParams(url: URL | typeof window.location = window.location) {
// @ts-ignore
return parseURLParams(url, true, 'search').jwt;
}

View File

@ -103,160 +103,6 @@ export function getServerURL(stateful: IStateful) {
return state['features/base/settings'].serverURL || DEFAULT_SERVER_URL;
}
/**
* Searches known devices for a matching deviceId and fall back to matching on
* label. Returns the stored preferred cameraDeviceId if a match is not found.
*
* @param {Object|Function} stateful - The redux state object or
* {@code getState} function.
* @returns {string}
*/
export function getUserSelectedCameraDeviceId(stateful: IStateful) {
const state = toState(stateful);
const {
userSelectedCameraDeviceId,
userSelectedCameraDeviceLabel
} = state['features/base/settings'];
const { videoInput } = state['features/base/devices'].availableDevices;
return _getUserSelectedDeviceId({
availableDevices: videoInput,
// Operating systems may append " #{number}" somewhere in the label so
// find and strip that bit.
matchRegex: /\s#\d*(?!.*\s#\d*)/,
userSelectedDeviceId: userSelectedCameraDeviceId,
userSelectedDeviceLabel: userSelectedCameraDeviceLabel,
replacement: ''
});
}
/**
* Searches known devices for a matching deviceId and fall back to matching on
* label. Returns the stored preferred micDeviceId if a match is not found.
*
* @param {Object|Function} stateful - The redux state object or
* {@code getState} function.
* @returns {string}
*/
export function getUserSelectedMicDeviceId(stateful: IStateful) {
const state = toState(stateful);
const {
userSelectedMicDeviceId,
userSelectedMicDeviceLabel
} = state['features/base/settings'];
const { audioInput } = state['features/base/devices'].availableDevices;
return _getUserSelectedDeviceId({
availableDevices: audioInput,
// Operating systems may append " ({number}-" somewhere in the label so
// find and strip that bit.
matchRegex: /\s\(\d*-\s(?!.*\s\(\d*-\s)/,
userSelectedDeviceId: userSelectedMicDeviceId,
userSelectedDeviceLabel: userSelectedMicDeviceLabel,
replacement: ' ('
});
}
/**
* Searches known devices for a matching deviceId and fall back to matching on
* label. Returns the stored preferred audioOutputDeviceId if a match is not found.
*
* @param {Object|Function} stateful - The redux state object or
* {@code getState} function.
* @returns {string}
*/
export function getUserSelectedOutputDeviceId(stateful: IStateful) {
const state = toState(stateful);
const {
userSelectedAudioOutputDeviceId,
userSelectedAudioOutputDeviceLabel
} = state['features/base/settings'];
const { audioOutput } = state['features/base/devices'].availableDevices;
return _getUserSelectedDeviceId({
availableDevices: audioOutput,
matchRegex: undefined,
userSelectedDeviceId: userSelectedAudioOutputDeviceId,
userSelectedDeviceLabel: userSelectedAudioOutputDeviceLabel,
replacement: undefined
});
}
/**
* A helper function to abstract the logic for choosing which device ID to
* use. Falls back to fuzzy matching on label if a device ID match is not found.
*
* @param {Object} options - The arguments used to match find the preferred
* device ID from available devices.
* @param {Array<string>} options.availableDevices - The array of currently
* available devices to match against.
* @param {Object} options.matchRegex - The regex to use to find strings
* appended to the label by the operating system. The matches will be replaced
* with options.replacement, with the intent of matching the same device that
* might have a modified label.
* @param {string} options.userSelectedDeviceId - The device ID the participant
* prefers to use.
* @param {string} options.userSelectedDeviceLabel - The label associated with the
* device ID the participant prefers to use.
* @param {string} options.replacement - The string to use with
* options.matchRegex to remove identifies added to the label by the operating
* system.
* @private
* @returns {string} The preferred device ID to use for media.
*/
function _getUserSelectedDeviceId(options: {
availableDevices: MediaDeviceInfo[] | undefined;
matchRegex?: RegExp;
replacement?: string;
userSelectedDeviceId?: string;
userSelectedDeviceLabel?: string;
}) {
const {
availableDevices,
matchRegex = '',
userSelectedDeviceId,
userSelectedDeviceLabel,
replacement = ''
} = options;
// If there is no label at all, there is no need to fall back to checking
// the label for a fuzzy match.
if (!userSelectedDeviceLabel || !userSelectedDeviceId) {
return userSelectedDeviceId;
}
const foundMatchingBasedonDeviceId = availableDevices?.find(
candidate => candidate.deviceId === userSelectedDeviceId);
// Prioritize matching the deviceId
if (foundMatchingBasedonDeviceId) {
return userSelectedDeviceId;
}
const strippedDeviceLabel
= matchRegex ? userSelectedDeviceLabel.replace(matchRegex, replacement)
: userSelectedDeviceLabel;
const foundMatchBasedOnLabel = availableDevices?.find(candidate => {
const { label } = candidate;
if (!label) {
return false;
} else if (strippedDeviceLabel === label) {
return true;
}
const strippedCandidateLabel
= label.replace(matchRegex, replacement);
return strippedDeviceLabel === strippedCandidateLabel;
});
return foundMatchBasedOnLabel
? foundMatchBasedOnLabel.deviceId : userSelectedDeviceId;
}
/**
* Should we hide the helper dialog when a user tries to do audio only screen sharing.
*

View File

@ -1,5 +1,6 @@
/* eslint-disable @typescript-eslint/no-unused-vars */
import { IReduxState } from '../../app/types';
import { IStateful } from '../app/types';
import { toState } from '../redux/functions';
export * from './functions.any';
@ -58,3 +59,157 @@ function getDeviceIdByType(state: IReduxState, isType: string) {
export function getDisplayName(state: IReduxState): string {
return state['features/base/settings'].displayName || '';
}
/**
* Searches known devices for a matching deviceId and fall back to matching on
* label. Returns the stored preferred cameraDeviceId if a match is not found.
*
* @param {Object|Function} stateful - The redux state object or
* {@code getState} function.
* @returns {string}
*/
export function getUserSelectedCameraDeviceId(stateful: IStateful) {
const state = toState(stateful);
const {
userSelectedCameraDeviceId,
userSelectedCameraDeviceLabel
} = state['features/base/settings'];
const { videoInput } = state['features/base/devices'].availableDevices;
return _getUserSelectedDeviceId({
availableDevices: videoInput,
// Operating systems may append " #{number}" somewhere in the label so
// find and strip that bit.
matchRegex: /\s#\d*(?!.*\s#\d*)/,
userSelectedDeviceId: userSelectedCameraDeviceId,
userSelectedDeviceLabel: userSelectedCameraDeviceLabel,
replacement: ''
});
}
/**
* Searches known devices for a matching deviceId and fall back to matching on
* label. Returns the stored preferred micDeviceId if a match is not found.
*
* @param {Object|Function} stateful - The redux state object or
* {@code getState} function.
* @returns {string}
*/
export function getUserSelectedMicDeviceId(stateful: IStateful) {
const state = toState(stateful);
const {
userSelectedMicDeviceId,
userSelectedMicDeviceLabel
} = state['features/base/settings'];
const { audioInput } = state['features/base/devices'].availableDevices;
return _getUserSelectedDeviceId({
availableDevices: audioInput,
// Operating systems may append " ({number}-" somewhere in the label so
// find and strip that bit.
matchRegex: /\s\(\d*-\s(?!.*\s\(\d*-\s)/,
userSelectedDeviceId: userSelectedMicDeviceId,
userSelectedDeviceLabel: userSelectedMicDeviceLabel,
replacement: ' ('
});
}
/**
* Searches known devices for a matching deviceId and fall back to matching on
* label. Returns the stored preferred audioOutputDeviceId if a match is not found.
*
* @param {Object|Function} stateful - The redux state object or
* {@code getState} function.
* @returns {string}
*/
export function getUserSelectedOutputDeviceId(stateful: IStateful) {
const state = toState(stateful);
const {
userSelectedAudioOutputDeviceId,
userSelectedAudioOutputDeviceLabel
} = state['features/base/settings'];
const { audioOutput } = state['features/base/devices'].availableDevices;
return _getUserSelectedDeviceId({
availableDevices: audioOutput,
matchRegex: undefined,
userSelectedDeviceId: userSelectedAudioOutputDeviceId,
userSelectedDeviceLabel: userSelectedAudioOutputDeviceLabel,
replacement: undefined
});
}
/**
* A helper function to abstract the logic for choosing which device ID to
* use. Falls back to fuzzy matching on label if a device ID match is not found.
*
* @param {Object} options - The arguments used to match find the preferred
* device ID from available devices.
* @param {Array<string>} options.availableDevices - The array of currently
* available devices to match against.
* @param {Object} options.matchRegex - The regex to use to find strings
* appended to the label by the operating system. The matches will be replaced
* with options.replacement, with the intent of matching the same device that
* might have a modified label.
* @param {string} options.userSelectedDeviceId - The device ID the participant
* prefers to use.
* @param {string} options.userSelectedDeviceLabel - The label associated with the
* device ID the participant prefers to use.
* @param {string} options.replacement - The string to use with
* options.matchRegex to remove identifies added to the label by the operating
* system.
* @private
* @returns {string} The preferred device ID to use for media.
*/
function _getUserSelectedDeviceId(options: {
availableDevices: MediaDeviceInfo[] | undefined;
matchRegex?: RegExp;
replacement?: string;
userSelectedDeviceId?: string;
userSelectedDeviceLabel?: string;
}) {
const {
availableDevices,
matchRegex = '',
userSelectedDeviceId,
userSelectedDeviceLabel,
replacement = ''
} = options;
// If there is no label at all, there is no need to fall back to checking
// the label for a fuzzy match.
if (!userSelectedDeviceLabel || !userSelectedDeviceId) {
return userSelectedDeviceId;
}
const foundMatchingBasedonDeviceId = availableDevices?.find(
candidate => candidate.deviceId === userSelectedDeviceId);
// Prioritize matching the deviceId
if (foundMatchingBasedonDeviceId) {
return userSelectedDeviceId;
}
const strippedDeviceLabel
= matchRegex ? userSelectedDeviceLabel.replace(matchRegex, replacement)
: userSelectedDeviceLabel;
const foundMatchBasedOnLabel = availableDevices?.find(candidate => {
const { label } = candidate;
if (!label) {
return false;
} else if (strippedDeviceLabel === label) {
return true;
}
const strippedCandidateLabel
= label.replace(matchRegex, replacement);
return strippedDeviceLabel === strippedCandidateLabel;
});
return foundMatchBasedOnLabel
? foundMatchBasedOnLabel.deviceId : userSelectedDeviceId;
}

View File

@ -1,29 +1,18 @@
import { IReduxState, IStore } from '../../app/types';
import { IStateful } from '../app/types';
import { IReduxState } from '../../app/types';
import {
getMultipleVideoSendingSupportFeatureFlag,
getMultipleVideoSupportFeatureFlag
} from '../config/functions.any';
import { isMobileBrowser } from '../environment/utils';
import JitsiMeetJS, { JitsiTrackErrors, browser } from '../lib-jitsi-meet';
import { setAudioMuted } from '../media/actions';
import { JitsiTrackErrors, browser } from '../lib-jitsi-meet';
import { MEDIA_TYPE, MediaType, VIDEO_TYPE } from '../media/constants';
import {
getVirtualScreenshareParticipantOwnerId,
isScreenShareParticipant
} from '../participants/functions';
import { IParticipant } from '../participants/types';
import { toState } from '../redux/functions';
import {
getUserSelectedCameraDeviceId,
getUserSelectedMicDeviceId
} from '../settings/functions.any';
// @ts-ignore
import loadEffects from './loadEffects';
import logger from './logger';
import { ITrack } from './reducer';
import { ITrackOptions } from './types';
import { ITrack } from './types';
/**
* Returns root tracks state.
@ -79,223 +68,6 @@ export function isParticipantVideoMuted(participant: IParticipant, state: IRedux
return isParticipantMediaMuted(participant, MEDIA_TYPE.VIDEO, state);
}
/**
* Creates a local video track for presenter. The constraints are computed based
* on the height of the desktop that is being shared.
*
* @param {Object} options - The options with which the local presenter track
* is to be created.
* @param {string|null} [options.cameraDeviceId] - Camera device id or
* {@code undefined} to use app's settings.
* @param {number} desktopHeight - The height of the desktop that is being
* shared.
* @returns {Promise<JitsiLocalTrack>}
*/
export async function createLocalPresenterTrack(options: ITrackOptions, desktopHeight: number) {
const { cameraDeviceId } = options;
// compute the constraints of the camera track based on the resolution
// of the desktop screen that is being shared.
const cameraHeights = [ 180, 270, 360, 540, 720 ];
const proportion = 5;
const result = cameraHeights.find(
height => (desktopHeight / proportion) < height);
const constraints = {
video: {
aspectRatio: 4 / 3,
height: {
ideal: result
}
}
};
const [ videoTrack ] = await JitsiMeetJS.createLocalTracks(
{
cameraDeviceId,
constraints,
devices: [ 'video' ]
});
videoTrack.type = MEDIA_TYPE.PRESENTER;
return videoTrack;
}
/**
* Create local tracks of specific types.
*
* @param {Object} options - The options with which the local tracks are to be
* created.
* @param {string|null} [options.cameraDeviceId] - Camera device id or
* {@code undefined} to use app's settings.
* @param {string[]} options.devices - Required track types such as 'audio'
* and/or 'video'.
* @param {string|null} [options.micDeviceId] - Microphone device id or
* {@code undefined} to use app's settings.
* @param {number|undefined} [oprions.timeout] - A timeout for JitsiMeetJS.createLocalTracks used to create the tracks.
* @param {boolean} [options.firePermissionPromptIsShownEvent] - Whether lib-jitsi-meet
* should check for a {@code getUserMedia} permission prompt and fire a
* corresponding event.
* @param {IStore} store - The redux store in the context of which the function
* is to execute and from which state such as {@code config} is to be retrieved.
* @returns {Promise<JitsiLocalTrack[]>}
*/
export function createLocalTracksF(options: ITrackOptions = {}, store?: IStore) {
let { cameraDeviceId, micDeviceId } = options;
const {
desktopSharingSourceDevice,
desktopSharingSources,
firePermissionPromptIsShownEvent,
timeout
} = options;
if (typeof APP !== 'undefined') {
// TODO The app's settings should go in the redux store and then the
// reliance on the global variable APP will go away.
if (!store) {
store = APP.store; // eslint-disable-line no-param-reassign
}
const state = store.getState();
if (typeof cameraDeviceId === 'undefined' || cameraDeviceId === null) {
cameraDeviceId = getUserSelectedCameraDeviceId(state);
}
if (typeof micDeviceId === 'undefined' || micDeviceId === null) {
micDeviceId = getUserSelectedMicDeviceId(state);
}
}
// @ts-ignore
const state = store.getState();
const {
desktopSharingFrameRate,
firefox_fake_device, // eslint-disable-line camelcase
resolution
} = state['features/base/config'];
const constraints = options.constraints ?? state['features/base/config'].constraints;
return (
loadEffects(store).then((effectsArray: Object[]) => {
// Filter any undefined values returned by Promise.resolve().
const effects = effectsArray.filter(effect => Boolean(effect));
return JitsiMeetJS.createLocalTracks(
{
cameraDeviceId,
constraints,
desktopSharingFrameRate,
desktopSharingSourceDevice,
desktopSharingSources,
// Copy array to avoid mutations inside library.
devices: options.devices?.slice(0),
effects,
firefox_fake_device, // eslint-disable-line camelcase
firePermissionPromptIsShownEvent,
micDeviceId,
resolution,
timeout
})
.catch((err: Error) => {
logger.error('Failed to create local tracks', options.devices, err);
return Promise.reject(err);
});
}));
}
/**
* Returns an object containing a promise which resolves with the created tracks &
* the errors resulting from that process.
*
* @returns {Promise<JitsiLocalTrack>}
*
* @todo Refactor to not use APP.
*/
export function createPrejoinTracks() {
const errors: any = {};
const initialDevices = [ 'audio' ];
const requestedAudio = true;
let requestedVideo = false;
const { startAudioOnly, startWithAudioMuted, startWithVideoMuted } = APP.store.getState()['features/base/settings'];
// Always get a handle on the audio input device so that we have statistics even if the user joins the
// conference muted. Previous implementation would only acquire the handle when the user first unmuted,
// which would results in statistics ( such as "No audio input" or "Are you trying to speak?") being available
// only after that point.
if (startWithAudioMuted) {
APP.store.dispatch(setAudioMuted(true));
}
if (!startWithVideoMuted && !startAudioOnly) {
initialDevices.push('video');
requestedVideo = true;
}
let tryCreateLocalTracks;
if (!requestedAudio && !requestedVideo) {
// Resolve with no tracks
tryCreateLocalTracks = Promise.resolve([]);
} else {
tryCreateLocalTracks = createLocalTracksF({
devices: initialDevices,
firePermissionPromptIsShownEvent: true
}, APP.store)
.catch((err: Error) => {
if (requestedAudio && requestedVideo) {
// Try audio only...
errors.audioAndVideoError = err;
return (
createLocalTracksF({
devices: [ 'audio' ],
firePermissionPromptIsShownEvent: true
}));
} else if (requestedAudio && !requestedVideo) {
errors.audioOnlyError = err;
return [];
} else if (requestedVideo && !requestedAudio) {
errors.videoOnlyError = err;
return [];
}
logger.error('Should never happen');
})
.catch((err: Error) => {
// Log this just in case...
if (!requestedAudio) {
logger.error('The impossible just happened', err);
}
errors.audioOnlyError = err;
// Try video only...
return requestedVideo
? createLocalTracksF({
devices: [ 'video' ],
firePermissionPromptIsShownEvent: true
})
: [];
})
.catch((err: Error) => {
// Log this just in case...
if (!requestedVideo) {
logger.error('The impossible just happened', err);
}
errors.videoOnlyError = err;
return [];
});
}
return {
tryCreateLocalTracks,
errors
};
}
/**
* Returns local audio track.
*
@ -667,16 +439,3 @@ export function setTrackMuted(track: any, muted: boolean, state: IReduxState) {
}
});
}
/**
* Determines whether toggle camera should be enabled or not.
*
* @param {Function|Object} stateful - The redux store or {@code getState} function.
* @returns {boolean} - Whether toggle camera should be enabled.
*/
export function isToggleCameraEnabled(stateful: IStateful) {
const state = toState(stateful);
const { videoInput } = state['features/base/devices'].availableDevices;
return isMobileBrowser() && Number(videoInput?.length) > 1;
}

View File

@ -0,0 +1,45 @@
import { IStore } from '../../app/types';
import JitsiMeetJS from '../lib-jitsi-meet';
import { ITrackOptions } from './types';
export * from './functions.any';
/**
* Create local tracks of specific types.
*
* @param {Object} options - The options with which the local tracks are to be
* created.
* @param {string|null} [options.cameraDeviceId] - Camera device id or
* {@code undefined} to use app's settings.
* @param {string[]} options.devices - Required track types such as 'audio'
* and/or 'video'.
* @param {string|null} [options.micDeviceId] - Microphone device id or
* {@code undefined} to use app's settings.
* @param {number|undefined} [oprions.timeout] - A timeout for JitsiMeetJS.createLocalTracks used to create the tracks.
* @param {boolean} [options.firePermissionPromptIsShownEvent] - Whether lib-jitsi-meet
* should check for a {@code getUserMedia} permission prompt and fire a
* corresponding event.
* @param {IStore} store - The redux store in the context of which the function
* is to execute and from which state such as {@code config} is to be retrieved.
* @returns {Promise<JitsiLocalTrack[]>}
*/
export function createLocalTracksF(options: ITrackOptions = {}, store: IStore) {
const { cameraDeviceId, micDeviceId } = options;
const state = store.getState();
const {
resolution
} = state['features/base/config'];
const constraints = options.constraints ?? state['features/base/config'].constraints;
return JitsiMeetJS.createLocalTracks(
{
cameraDeviceId,
constraints,
// Copy array to avoid mutations inside library.
devices: options.devices?.slice(0),
micDeviceId,
resolution
});
}

View File

@ -0,0 +1,242 @@
import { IStore } from '../../app/types';
import { IStateful } from '../app/types';
import { isMobileBrowser } from '../environment/utils';
import JitsiMeetJS from '../lib-jitsi-meet';
import { setAudioMuted } from '../media/actions';
import { MEDIA_TYPE } from '../media/constants';
import { toState } from '../redux/functions';
import {
getUserSelectedCameraDeviceId,
getUserSelectedMicDeviceId
} from '../settings/functions.web';
// @ts-ignore
import loadEffects from './loadEffects';
import logger from './logger';
import { ITrackOptions } from './types';
export * from './functions.any';
/**
* Create local tracks of specific types.
*
* @param {Object} options - The options with which the local tracks are to be
* created.
* @param {string|null} [options.cameraDeviceId] - Camera device id or
* {@code undefined} to use app's settings.
* @param {string[]} options.devices - Required track types such as 'audio'
* and/or 'video'.
* @param {string|null} [options.micDeviceId] - Microphone device id or
* {@code undefined} to use app's settings.
* @param {number|undefined} [oprions.timeout] - A timeout for JitsiMeetJS.createLocalTracks used to create the tracks.
* @param {boolean} [options.firePermissionPromptIsShownEvent] - Whether lib-jitsi-meet
* should check for a {@code getUserMedia} permission prompt and fire a
* corresponding event.
* @param {IStore} store - The redux store in the context of which the function
* is to execute and from which state such as {@code config} is to be retrieved.
* @returns {Promise<JitsiLocalTrack[]>}
*/
export function createLocalTracksF(options: ITrackOptions = {}, store?: IStore) {
let { cameraDeviceId, micDeviceId } = options;
const {
desktopSharingSourceDevice,
desktopSharingSources,
firePermissionPromptIsShownEvent,
timeout
} = options;
// TODO The app's settings should go in the redux store and then the
// reliance on the global variable APP will go away.
store = store || APP.store; // eslint-disable-line no-param-reassign
const state = store.getState();
if (typeof cameraDeviceId === 'undefined' || cameraDeviceId === null) {
cameraDeviceId = getUserSelectedCameraDeviceId(state);
}
if (typeof micDeviceId === 'undefined' || micDeviceId === null) {
micDeviceId = getUserSelectedMicDeviceId(state);
}
const {
desktopSharingFrameRate,
firefox_fake_device, // eslint-disable-line camelcase
resolution
} = state['features/base/config'];
const constraints = options.constraints ?? state['features/base/config'].constraints;
return (
loadEffects(store).then((effectsArray: Object[]) => {
// Filter any undefined values returned by Promise.resolve().
const effects = effectsArray.filter(effect => Boolean(effect));
return JitsiMeetJS.createLocalTracks(
{
cameraDeviceId,
constraints,
desktopSharingFrameRate,
desktopSharingSourceDevice,
desktopSharingSources,
// Copy array to avoid mutations inside library.
devices: options.devices?.slice(0),
effects,
firefox_fake_device, // eslint-disable-line camelcase
firePermissionPromptIsShownEvent,
micDeviceId,
resolution,
timeout
})
.catch((err: Error) => {
logger.error('Failed to create local tracks', options.devices, err);
return Promise.reject(err);
});
}));
}
/**
* Creates a local video track for presenter. The constraints are computed based
* on the height of the desktop that is being shared.
*
* @param {Object} options - The options with which the local presenter track
* is to be created.
* @param {string|null} [options.cameraDeviceId] - Camera device id or
* {@code undefined} to use app's settings.
* @param {number} desktopHeight - The height of the desktop that is being
* shared.
* @returns {Promise<JitsiLocalTrack>}
*/
export async function createLocalPresenterTrack(options: ITrackOptions, desktopHeight: number) {
const { cameraDeviceId } = options;
// compute the constraints of the camera track based on the resolution
// of the desktop screen that is being shared.
const cameraHeights = [ 180, 270, 360, 540, 720 ];
const proportion = 5;
const result = cameraHeights.find(
height => (desktopHeight / proportion) < height);
const constraints = {
video: {
aspectRatio: 4 / 3,
height: {
ideal: result
}
}
};
const [ videoTrack ] = await JitsiMeetJS.createLocalTracks(
{
cameraDeviceId,
constraints,
devices: [ 'video' ]
});
videoTrack.type = MEDIA_TYPE.PRESENTER;
return videoTrack;
}
/**
* Returns an object containing a promise which resolves with the created tracks &
* the errors resulting from that process.
*
* @returns {Promise<JitsiLocalTrack>}
*
* @todo Refactor to not use APP.
*/
export function createPrejoinTracks() {
const errors: any = {};
const initialDevices = [ 'audio' ];
const requestedAudio = true;
let requestedVideo = false;
const { startAudioOnly, startWithAudioMuted, startWithVideoMuted } = APP.store.getState()['features/base/settings'];
// Always get a handle on the audio input device so that we have statistics even if the user joins the
// conference muted. Previous implementation would only acquire the handle when the user first unmuted,
// which would results in statistics ( such as "No audio input" or "Are you trying to speak?") being available
// only after that point.
if (startWithAudioMuted) {
APP.store.dispatch(setAudioMuted(true));
}
if (!startWithVideoMuted && !startAudioOnly) {
initialDevices.push('video');
requestedVideo = true;
}
let tryCreateLocalTracks;
if (!requestedAudio && !requestedVideo) {
// Resolve with no tracks
tryCreateLocalTracks = Promise.resolve([]);
} else {
tryCreateLocalTracks = createLocalTracksF({
devices: initialDevices,
firePermissionPromptIsShownEvent: true
}, APP.store)
.catch((err: Error) => {
if (requestedAudio && requestedVideo) {
// Try audio only...
errors.audioAndVideoError = err;
return (
createLocalTracksF({
devices: [ 'audio' ],
firePermissionPromptIsShownEvent: true
}));
} else if (requestedAudio && !requestedVideo) {
errors.audioOnlyError = err;
return [];
} else if (requestedVideo && !requestedAudio) {
errors.videoOnlyError = err;
return [];
}
logger.error('Should never happen');
})
.catch((err: Error) => {
// Log this just in case...
if (!requestedAudio) {
logger.error('The impossible just happened', err);
}
errors.audioOnlyError = err;
// Try video only...
return requestedVideo
? createLocalTracksF({
devices: [ 'video' ],
firePermissionPromptIsShownEvent: true
})
: [];
})
.catch((err: Error) => {
// Log this just in case...
if (!requestedVideo) {
logger.error('The impossible just happened', err);
}
errors.videoOnlyError = err;
return [];
});
}
return {
tryCreateLocalTracks,
errors
};
}
/**
* Determines whether toggle camera should be enabled or not.
*
* @param {Function|Object} stateful - The redux store or {@code getState} function.
* @returns {boolean} - Whether toggle camera should be enabled.
*/
export function isToggleCameraEnabled(stateful: IStateful) {
const state = toState(stateful);
const { videoInput } = state['features/base/devices'].availableDevices;
return isMobileBrowser() && Number(videoInput?.length) > 1;
}

View File

@ -2,11 +2,9 @@ import { batch } from 'react-redux';
import { IStore } from '../../app/types';
import { _RESET_BREAKOUT_ROOMS } from '../../breakout-rooms/actionTypes';
import { hideNotification } from '../../notifications/actions';
import { isPrejoinPageVisible } from '../../prejoin/functions';
import { getCurrentConference } from '../conference/functions';
import { getMultipleVideoSendingSupportFeatureFlag } from '../config/functions.any';
import { getAvailableDevices } from '../devices/actions';
import {
SET_AUDIO_MUTED,
SET_CAMERA_FACING_MODE,
@ -14,43 +12,31 @@ import {
SET_VIDEO_MUTED,
TOGGLE_CAMERA_FACING_MODE
} from '../media/actionTypes';
import { setScreenshareMuted, toggleCameraFacingMode } from '../media/actions';
import { toggleCameraFacingMode } from '../media/actions';
import {
CAMERA_FACING_MODE,
MEDIA_TYPE,
MediaType,
SCREENSHARE_MUTISM_AUTHORITY,
VIDEO_MUTISM_AUTHORITY,
VIDEO_TYPE
VIDEO_MUTISM_AUTHORITY
} from '../media/constants';
import MiddlewareRegistry from '../redux/MiddlewareRegistry';
import StateListenerRegistry from '../redux/StateListenerRegistry';
import {
TRACK_ADDED,
TRACK_MUTE_UNMUTE_FAILED,
TRACK_NO_DATA_FROM_SOURCE,
TRACK_REMOVED,
TRACK_STOPPED,
TRACK_UPDATED
} from './actionTypes';
import {
createLocalTracksA,
destroyLocalTracks,
showNoDataFromSourceVideoError,
toggleScreensharing,
trackMuteUnmuteFailed,
trackNoDataFromSourceNotificationInfoChanged,
trackRemoved
} from './actions';
import {
getLocalTrack,
getTrackByJitsiTrack,
isUserInteractionRequiredForUnmute,
setTrackMuted
} from './functions';
import { ITrack } from './reducer';
import './subscriber';
/**
@ -63,29 +49,6 @@ import './subscriber';
*/
MiddlewareRegistry.register(store => next => action => {
switch (action.type) {
case TRACK_ADDED: {
const { local } = action.track;
// The devices list needs to be refreshed when no initial video permissions
// were granted and a local video track is added by umuting the video.
if (local) {
store.dispatch(getAvailableDevices());
}
break;
}
case TRACK_NO_DATA_FROM_SOURCE: {
const result = next(action);
_handleNoDataFromSourceErrors(store, action);
return result;
}
case TRACK_REMOVED: {
_removeNoDataFromSourceNotification(store, action.track);
break;
}
case SET_AUDIO_MUTED:
if (!action.muted
&& isUserInteractionRequiredForUnmute(store.getState())) {
@ -153,82 +116,6 @@ MiddlewareRegistry.register(store => next => action => {
}
break;
}
case TRACK_MUTE_UNMUTE_FAILED: {
const { jitsiTrack } = action.track;
const muted = action.wasMuted;
const isVideoTrack = jitsiTrack.getType() !== MEDIA_TYPE.AUDIO;
if (typeof APP !== 'undefined') {
if (isVideoTrack && jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP
&& getMultipleVideoSendingSupportFeatureFlag(store.getState())) {
store.dispatch(setScreenshareMuted(!muted));
} else if (isVideoTrack) {
APP.conference.setVideoMuteStatus();
} else {
APP.conference.setAudioMuteStatus(!muted);
}
}
break;
}
case TRACK_STOPPED: {
const { jitsiTrack } = action.track;
if (typeof APP !== 'undefined'
&& getMultipleVideoSendingSupportFeatureFlag(store.getState())
&& jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP) {
store.dispatch(toggleScreensharing(false));
}
break;
}
case TRACK_UPDATED: {
// TODO Remove the following calls to APP.UI once components interested
// in track mute changes are moved into React and/or redux.
if (typeof APP !== 'undefined') {
const result = next(action);
const state = store.getState();
if (isPrejoinPageVisible(state)) {
return result;
}
const { jitsiTrack } = action.track;
const muted = jitsiTrack.isMuted();
const participantID = jitsiTrack.getParticipantId();
const isVideoTrack = jitsiTrack.type !== MEDIA_TYPE.AUDIO;
if (isVideoTrack) {
// Do not change the video mute state for local presenter tracks.
if (jitsiTrack.type === MEDIA_TYPE.PRESENTER) {
APP.conference.mutePresenter(muted);
} else if (jitsiTrack.isLocal() && !(jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP)) {
APP.conference.setVideoMuteStatus();
} else if (jitsiTrack.isLocal() && muted && jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP) {
!getMultipleVideoSendingSupportFeatureFlag(state)
&& store.dispatch(toggleScreensharing(false, false, true));
} else {
APP.UI.setVideoMuted(participantID);
}
} else if (jitsiTrack.isLocal()) {
APP.conference.setAudioMuteStatus(muted);
} else {
APP.UI.setAudioMuted(participantID, muted);
}
return result;
}
// Mobile.
const { jitsiTrack, local } = action.track;
if (local && jitsiTrack.isMuted()
&& jitsiTrack.type === MEDIA_TYPE.VIDEO && jitsiTrack.videoType === VIDEO_TYPE.DESKTOP) {
store.dispatch(toggleScreensharing(false));
}
break;
}
}
return next(action);
@ -259,53 +146,6 @@ StateListenerRegistry.register(
}
});
/**
* Handles no data from source errors.
*
* @param {Store} store - The redux store in which the specified action is
* dispatched.
* @param {Action} action - The redux action dispatched in the specified store.
* @private
* @returns {void}
*/
function _handleNoDataFromSourceErrors(store: IStore, action: any) {
const { getState, dispatch } = store;
const track = getTrackByJitsiTrack(getState()['features/base/tracks'], action.track.jitsiTrack);
if (!track || !track.local) {
return;
}
const { jitsiTrack } = track;
if (track.mediaType === MEDIA_TYPE.AUDIO && track.isReceivingData) {
_removeNoDataFromSourceNotification(store, action.track);
}
if (track.mediaType === MEDIA_TYPE.VIDEO) {
const { noDataFromSourceNotificationInfo = {} } = track;
if (track.isReceivingData) {
if (noDataFromSourceNotificationInfo.timeout) {
clearTimeout(noDataFromSourceNotificationInfo.timeout);
dispatch(trackNoDataFromSourceNotificationInfoChanged(jitsiTrack, undefined));
}
// try to remove the notification if there is one.
_removeNoDataFromSourceNotification(store, action.track);
} else {
if (noDataFromSourceNotificationInfo.timeout) {
return;
}
const timeout = setTimeout(() => dispatch(showNoDataFromSourceVideoError(jitsiTrack)), 5000);
dispatch(trackNoDataFromSourceNotificationInfoChanged(jitsiTrack, { timeout }));
}
}
}
/**
* Gets the local track associated with a specific {@code MEDIA_TYPE} in a
* specific redux store.
@ -334,23 +174,6 @@ function _getLocalTrack(
includePending));
}
/**
* Removes the no data from source notification associated with the JitsiTrack if displayed.
*
* @param {Store} store - The redux store.
* @param {Track} track - The redux action dispatched in the specified store.
* @returns {void}
*/
function _removeNoDataFromSourceNotification({ getState, dispatch }: IStore, track: ITrack) {
const t = getTrackByJitsiTrack(getState()['features/base/tracks'], track.jitsiTrack);
const { jitsiTrack, noDataFromSourceNotificationInfo = {} } = t || {};
if (noDataFromSourceNotificationInfo?.uid) {
dispatch(hideNotification(noDataFromSourceNotificationInfo.uid));
dispatch(trackNoDataFromSourceNotificationInfoChanged(jitsiTrack, undefined));
}
}
/**
* Mutes or unmutes a local track with a specific media type.
*

View File

@ -0,0 +1,38 @@
import {
MEDIA_TYPE,
VIDEO_TYPE
} from '../media/constants';
import MiddlewareRegistry from '../redux/MiddlewareRegistry';
import {
TRACK_UPDATED
} from './actionTypes';
import {
toggleScreensharing
} from './actions.native';
import './middleware.any';
/**
* Middleware that captures LIB_DID_DISPOSE and LIB_DID_INIT actions and,
* respectively, creates/destroys local media tracks. Also listens to
* media-related actions and performs corresponding operations with tracks.
*
* @param {Store} store - The redux store.
* @returns {Function}
*/
MiddlewareRegistry.register(store => next => action => {
switch (action.type) {
case TRACK_UPDATED: {
const { jitsiTrack, local } = action.track;
if (local && jitsiTrack.isMuted()
&& jitsiTrack.type === MEDIA_TYPE.VIDEO && jitsiTrack.videoType === VIDEO_TYPE.DESKTOP) {
store.dispatch(toggleScreensharing(false));
}
break;
}
}
return next(action);
});

View File

@ -0,0 +1,198 @@
import { IStore } from '../../app/types';
import { hideNotification } from '../../notifications/actions';
import { isPrejoinPageVisible } from '../../prejoin/functions';
import { getMultipleVideoSendingSupportFeatureFlag } from '../config/functions.any';
import { getAvailableDevices } from '../devices/actions.web';
import { setScreenshareMuted } from '../media/actions';
import {
MEDIA_TYPE,
VIDEO_TYPE
} from '../media/constants';
import MiddlewareRegistry from '../redux/MiddlewareRegistry';
import {
TRACK_ADDED,
TRACK_MUTE_UNMUTE_FAILED,
TRACK_NO_DATA_FROM_SOURCE,
TRACK_REMOVED,
TRACK_STOPPED,
TRACK_UPDATED
} from './actionTypes';
import {
showNoDataFromSourceVideoError,
toggleScreensharing,
trackNoDataFromSourceNotificationInfoChanged
} from './actions.web';
import {
getTrackByJitsiTrack
} from './functions.web';
import { ITrack } from './types';
import './middleware.any';
/**
* Middleware that captures LIB_DID_DISPOSE and LIB_DID_INIT actions and,
* respectively, creates/destroys local media tracks. Also listens to
* media-related actions and performs corresponding operations with tracks.
*
* @param {Store} store - The redux store.
* @returns {Function}
*/
MiddlewareRegistry.register(store => next => action => {
switch (action.type) {
case TRACK_ADDED: {
const { local } = action.track;
// The devices list needs to be refreshed when no initial video permissions
// were granted and a local video track is added by umuting the video.
if (local) {
store.dispatch(getAvailableDevices());
}
break;
}
case TRACK_NO_DATA_FROM_SOURCE: {
const result = next(action);
_handleNoDataFromSourceErrors(store, action);
return result;
}
case TRACK_REMOVED: {
_removeNoDataFromSourceNotification(store, action.track);
break;
}
case TRACK_MUTE_UNMUTE_FAILED: {
const { jitsiTrack } = action.track;
const muted = action.wasMuted;
const isVideoTrack = jitsiTrack.getType() !== MEDIA_TYPE.AUDIO;
if (isVideoTrack && jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP
&& getMultipleVideoSendingSupportFeatureFlag(store.getState())) {
store.dispatch(setScreenshareMuted(!muted));
} else if (isVideoTrack) {
APP.conference.setVideoMuteStatus();
} else {
APP.conference.setAudioMuteStatus(!muted);
}
break;
}
case TRACK_STOPPED: {
const { jitsiTrack } = action.track;
if (getMultipleVideoSendingSupportFeatureFlag(store.getState())
&& jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP) {
store.dispatch(toggleScreensharing(false));
}
break;
}
case TRACK_UPDATED: {
// TODO Remove the following calls to APP.UI once components interested
// in track mute changes are moved into React and/or redux.
const result = next(action);
const state = store.getState();
if (isPrejoinPageVisible(state)) {
return result;
}
const { jitsiTrack } = action.track;
const muted = jitsiTrack.isMuted();
const participantID = jitsiTrack.getParticipantId();
const isVideoTrack = jitsiTrack.type !== MEDIA_TYPE.AUDIO;
if (isVideoTrack) {
// Do not change the video mute state for local presenter tracks.
if (jitsiTrack.type === MEDIA_TYPE.PRESENTER) {
APP.conference.mutePresenter(muted);
} else if (jitsiTrack.isLocal() && !(jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP)) {
APP.conference.setVideoMuteStatus();
} else if (jitsiTrack.isLocal() && muted && jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP) {
!getMultipleVideoSendingSupportFeatureFlag(state)
&& store.dispatch(toggleScreensharing(false, false, true));
} else {
APP.UI.setVideoMuted(participantID);
}
} else if (jitsiTrack.isLocal()) {
APP.conference.setAudioMuteStatus(muted);
} else {
APP.UI.setAudioMuted(participantID, muted);
}
return result;
}
}
return next(action);
});
/**
* Handles no data from source errors.
*
* @param {Store} store - The redux store in which the specified action is
* dispatched.
* @param {Action} action - The redux action dispatched in the specified store.
* @private
* @returns {void}
*/
function _handleNoDataFromSourceErrors(store: IStore, action: any) {
const { getState, dispatch } = store;
const track = getTrackByJitsiTrack(getState()['features/base/tracks'], action.track.jitsiTrack);
if (!track || !track.local) {
return;
}
const { jitsiTrack } = track;
if (track.mediaType === MEDIA_TYPE.AUDIO && track.isReceivingData) {
_removeNoDataFromSourceNotification(store, action.track);
}
if (track.mediaType === MEDIA_TYPE.VIDEO) {
const { noDataFromSourceNotificationInfo = {} } = track;
if (track.isReceivingData) {
if (noDataFromSourceNotificationInfo.timeout) {
clearTimeout(noDataFromSourceNotificationInfo.timeout);
dispatch(trackNoDataFromSourceNotificationInfoChanged(jitsiTrack, undefined));
}
// try to remove the notification if there is one.
_removeNoDataFromSourceNotification(store, action.track);
} else {
if (noDataFromSourceNotificationInfo.timeout) {
return;
}
const timeout = setTimeout(() => dispatch(showNoDataFromSourceVideoError(jitsiTrack)), 5000);
dispatch(trackNoDataFromSourceNotificationInfoChanged(jitsiTrack, { timeout }));
}
}
}
/**
* Removes the no data from source notification associated with the JitsiTrack if displayed.
*
* @param {Store} store - The redux store.
* @param {Track} track - The redux action dispatched in the specified store.
* @returns {void}
*/
function _removeNoDataFromSourceNotification({ getState, dispatch }: IStore, track: ITrack) {
const t = getTrackByJitsiTrack(getState()['features/base/tracks'], track.jitsiTrack);
const { jitsiTrack, noDataFromSourceNotificationInfo = {} } = t || {};
if (noDataFromSourceNotificationInfo?.uid) {
dispatch(hideNotification(noDataFromSourceNotificationInfo.uid));
dispatch(trackNoDataFromSourceNotificationInfoChanged(jitsiTrack, undefined));
}
}

View File

@ -1,4 +1,3 @@
import { MediaType } from '../media/constants';
import { PARTICIPANT_ID_CHANGED } from '../participants/actionTypes';
import ReducerRegistry from '../redux/ReducerRegistry';
import { set } from '../redux/functions';
@ -14,48 +13,7 @@ import {
TRACK_UPDATE_LAST_VIDEO_MEDIA_EVENT,
TRACK_WILL_CREATE
} from './actionTypes';
export interface ITrack {
isReceivingData: boolean;
jitsiTrack: any;
lastMediaEvent?: string;
local: boolean;
mediaType: MediaType;
mirror: boolean;
muted: boolean;
noDataFromSourceNotificationInfo?: {
timeout?: number;
uid?: string;
};
participantId: string;
streamingStatus?: string;
videoStarted: boolean;
videoType?: string | null;
}
/**
* Track type.
*
* @typedef {object} Track
* @property {JitsiLocalTrack|JitsiRemoteTrack} jitsiTrack - The associated
* {@code JitsiTrack} instance. Optional for local tracks if those are still
* being created (ie {@code getUserMedia} is still in progress).
* @property {Promise} [gumProcess] - If a local track is still being created,
* it will have no {@code JitsiTrack}, but a {@code gumProcess} set to a
* {@code Promise} with and extra {@code cancel()}.
* @property {boolean} local=false - If the track is local.
* @property {MEDIA_TYPE} mediaType=false - The media type of the track.
* @property {boolean} mirror=false - The indicator which determines whether the
* display/rendering of the track should be mirrored. It only makes sense in the
* context of video (at least at the time of this writing).
* @property {boolean} muted=false - If the track is muted.
* @property {(string|undefined)} participantId - The ID of the participant whom
* the track belongs to.
* @property {boolean} videoStarted=false - If the video track has already
* started to play.
* @property {(VIDEO_TYPE|undefined)} videoType - The type of video track if
* any.
*/
import { ITrack } from './types';
/**
* Reducer function for a single track.

View File

@ -1,3 +1,5 @@
import { MediaType } from '../media/constants';
export interface ITrackOptions {
cameraDeviceId?: string | null;
constraints?: {
@ -18,6 +20,47 @@ export interface ITrackOptions {
timeout?: number;
}
/**
* Track type.
*
* @typedef {object} Track
* @property {JitsiLocalTrack|JitsiRemoteTrack} jitsiTrack - The associated
* {@code JitsiTrack} instance. Optional for local tracks if those are still
* being created (ie {@code getUserMedia} is still in progress).
* @property {Promise} [gumProcess] - If a local track is still being created,
* it will have no {@code JitsiTrack}, but a {@code gumProcess} set to a
* {@code Promise} with and extra {@code cancel()}.
* @property {boolean} local=false - If the track is local.
* @property {MEDIA_TYPE} mediaType=false - The media type of the track.
* @property {boolean} mirror=false - The indicator which determines whether the
* display/rendering of the track should be mirrored. It only makes sense in the
* context of video (at least at the time of this writing).
* @property {boolean} muted=false - If the track is muted.
* @property {(string|undefined)} participantId - The ID of the participant whom
* the track belongs to.
* @property {boolean} videoStarted=false - If the video track has already
* started to play.
* @property {(VIDEO_TYPE|undefined)} videoType - The type of video track if
* any.
*/
export interface ITrack {
isReceivingData: boolean;
jitsiTrack: any;
lastMediaEvent?: string;
local: boolean;
mediaType: MediaType;
mirror: boolean;
muted: boolean;
noDataFromSourceNotificationInfo?: {
timeout?: number;
uid?: string;
};
participantId: string;
streamingStatus?: string;
videoStarted: boolean;
videoType?: string | null;
}
export interface IToggleScreenSharingOptions {
audioOnly: boolean;
enabled?: boolean;

View File

@ -1 +0,0 @@
export * from './native';

View File

@ -1 +0,0 @@
export * from './web';

View File

@ -1,7 +1,9 @@
/* eslint-disable lines-around-comment */
import React from 'react';
import { TouchableRipple } from 'react-native-paper';
import Icon from '../../../icons/components/Icon';
// @ts-ignore
import styles from '../../../react/components/native/styles';
import { IIconButtonProps } from '../../../react/types';
import { BUTTON_TYPES } from '../../constants';

View File

@ -1 +0,0 @@
export { default as Button } from './Button';

View File

@ -63,7 +63,7 @@ export function escapeRegexp(s: string) {
* @param {Object} w - Window object to use instead of the built in one.
* @returns {string}
*/
export function getBaseUrl(w: Window = window) {
export function getBaseUrl(w: typeof window = window) {
const doc = w.document;
const base = doc.querySelector('base');

View File

@ -5,7 +5,7 @@ import { IStore } from '../../../app/types';
import { translate } from '../../../base/i18n/functions';
import { connect } from '../../../base/redux/functions';
import { updateSettings } from '../../../base/settings/actions';
import { Button } from '../../../base/ui/components/web';
import Button from '../../../base/ui/components/web/Button';
import Input from '../../../base/ui/components/web/Input';
// @ts-ignore

View File

@ -8,8 +8,8 @@ import {
sendAnalytics
} from '../../analytics';
import { getCurrentConference } from '../../base/conference/functions';
import checkChromeExtensionsInstalled from '../../base/environment/checkChromeExtensionsInstalled';
import {
checkChromeExtensionsInstalled,
isMobileBrowser
} from '../../base/environment/utils';
import { translate } from '../../base/i18n';

View File

@ -1,6 +1,6 @@
import { JitsiParticipantConnectionStatus, JitsiTrackStreamingStatus } from '../base/lib-jitsi-meet';
import { IParticipant } from '../base/participants/types';
import { ITrack } from '../base/tracks/reducer';
import { ITrack } from '../base/tracks/types';
/**
* Checks if the passed track's streaming status is active.

View File

@ -8,7 +8,7 @@ import { createDeepLinkingPageEvent, sendAnalytics } from '../../analytics';
import { isSupportedBrowser } from '../../base/environment';
import { translate } from '../../base/i18n';
import { connect } from '../../base/redux';
import { Button } from '../../base/ui/components/web';
import Button from '../../base/ui/components/web/Button';
import { BUTTON_TYPES } from '../../base/ui/constants';
import {
openDesktopApp,

View File

@ -6,13 +6,13 @@ import {
setAudioInputDeviceAndUpdateSettings,
setAudioOutputDevice,
setVideoInputDeviceAndUpdateSettings
} from '../base/devices/actions';
} from '../base/devices/actions.web';
import {
areDeviceLabelsInitialized,
getAudioOutputDeviceId,
getDeviceIdByLabel,
groupDevicesByKind
} from '../base/devices/functions';
} from '../base/devices/functions.web';
import { isIosMobileBrowser } from '../base/environment/utils';
import JitsiMeetJS from '../base/lib-jitsi-meet';
import { toState } from '../base/redux/functions';
@ -20,7 +20,7 @@ import {
getUserSelectedCameraDeviceId,
getUserSelectedMicDeviceId,
getUserSelectedOutputDeviceId
} from '../base/settings/functions.any';
} from '../base/settings/functions.web';
/**
* Returns the properties for the device selection dialog from Redux state.

View File

@ -8,7 +8,7 @@ import {
KICKED_OUT
} from '../base/conference';
import { SET_CONFIG } from '../base/config';
import { NOTIFY_CAMERA_ERROR, NOTIFY_MIC_ERROR } from '../base/devices';
import { NOTIFY_CAMERA_ERROR, NOTIFY_MIC_ERROR } from '../base/devices/actionTypes';
import { JitsiConferenceErrors } from '../base/lib-jitsi-meet';
import {
DOMINANT_SPEAKER_CHANGED,

View File

@ -1,4 +1,6 @@
export type DetectInput = {
// @ts-ignore
image: ImageBitmap | ImageData;
threshold: number;
};

View File

@ -1,3 +1,4 @@
/* eslint-disable lines-around-comment */
import React, { useCallback } from 'react';
import { View } from 'react-native';
import { useDispatch, useSelector } from 'react-redux';
@ -7,10 +8,13 @@ import { IconHorizontalPoints } from '../../../base/icons/svg';
import Button from '../../../base/ui/components/native/Button';
import IconButton from '../../../base/ui/components/native/IconButton';
import { BUTTON_TYPES } from '../../../base/ui/constants';
// @ts-ignore
import MuteEveryoneDialog from '../../../video-menu/components/native/MuteEveryoneDialog';
import { isMoreActionsVisible, isMuteAllVisible } from '../../functions';
// @ts-ignore
import { ContextMenuMore } from './ContextMenuMore';
// @ts-ignore
import styles from './styles';

View File

@ -8,7 +8,7 @@ import { makeStyles } from 'tss-react/mui';
import { translate } from '../../../../base/i18n/functions';
import Icon from '../../../../base/icons/components/Icon';
import { IconArrowLeft } from '../../../../base/icons/svg';
import { Button } from '../../../../base/ui/components/web';
import Button from '../../../../base/ui/components/web/Button';
// @ts-ignore
import { getCountryCodeFromPhone } from '../../../utils';
// @ts-ignore

View File

@ -8,7 +8,7 @@ import { makeStyles } from 'tss-react/mui';
import { translate } from '../../../../base/i18n/functions';
import Icon from '../../../../base/icons/components/Icon';
import { IconClose } from '../../../../base/icons/svg';
import { Button } from '../../../../base/ui/components/web';
import Button from '../../../../base/ui/components/web/Button';
// @ts-ignore
import Label from '../Label';
// @ts-ignore

View File

@ -5,7 +5,7 @@ import React, { Component } from 'react';
import { translate } from '../../../base/i18n';
import { connect } from '../../../base/redux';
import { Button } from '../../../base/ui/components/web';
import Button from '../../../base/ui/components/web/Button';
import {
CALENDAR_TYPE,
MicrosoftSignInButton,

View File

@ -11,7 +11,7 @@ import { AbstractDialogTab } from '../../../base/dialog';
// @ts-ignore
import type { Props as AbstractDialogTabProps } from '../../../base/dialog';
import { translate } from '../../../base/i18n/functions';
import { Button } from '../../../base/ui/components/web';
import Button from '../../../base/ui/components/web/Button';
import Input from '../../../base/ui/components/web/Input';
// @ts-ignore
import { openLogoutDialog } from '../../actions';

View File

@ -4,11 +4,13 @@ import React from 'react';
import { areAudioLevelsEnabled } from '../../../../base/config/functions';
import {
getAudioInputDeviceData,
getAudioOutputDeviceData,
setAudioInputDeviceAndUpdateSettings,
setAudioOutputDevice as setAudioOutputDeviceAction
} from '../../../../base/devices';
} from '../../../../base/devices/actions.web';
import {
getAudioInputDeviceData,
getAudioOutputDeviceData
} from '../../../../base/devices/functions.web';
import Popover from '../../../../base/popover/components/Popover.web';
import { connect } from '../../../../base/redux';
import { SMALL_MOBILE_WIDTH } from '../../../../base/responsive-ui/constants';

View File

@ -3,9 +3,11 @@
import React from 'react';
import {
getVideoDeviceIds,
setVideoInputDeviceAndUpdateSettings
} from '../../../../base/devices';
} from '../../../../base/devices/actions.web';
import {
getVideoDeviceIds
} from '../../../../base/devices/functions.web';
import Popover from '../../../../base/popover/components/Popover.web';
import { connect } from '../../../../base/redux';
import { SMALL_MOBILE_WIDTH } from '../../../../base/responsive-ui/constants';

View File

@ -5,7 +5,6 @@ import { isNameReadOnly } from '../base/config/functions';
import { SERVER_URL_CHANGE_ENABLED } from '../base/flags/constants';
import { getFeatureFlag } from '../base/flags/functions';
import i18next, { DEFAULT_LANGUAGE, LANGUAGES } from '../base/i18n/i18next';
import { createLocalTrack } from '../base/lib-jitsi-meet/functions';
import {
getLocalParticipant,
isLocalParticipantModerator
@ -256,67 +255,6 @@ export function getSoundsTabProps(stateful: IStateful) {
};
}
/**
* Returns a promise which resolves with a list of objects containing
* all the video jitsiTracks and appropriate errors for the given device ids.
*
* @param {string[]} ids - The list of the camera ids for which to create tracks.
* @param {number} [timeout] - A timeout for the createLocalTrack function call.
*
* @returns {Promise<Object[]>}
*/
export function createLocalVideoTracks(ids: string[], timeout?: number) {
return Promise.all(ids.map(deviceId => createLocalTrack('video', deviceId, timeout)
.then((jitsiTrack: any) => {
return {
jitsiTrack,
deviceId
};
})
.catch(() => {
return {
jitsiTrack: null,
deviceId,
error: 'deviceSelection.previewUnavailable'
};
})));
}
/**
* Returns a promise which resolves with a list of objects containing
* the audio track and the corresponding audio device information.
*
* @param {Object[]} devices - A list of microphone devices.
* @param {number} [timeout] - A timeout for the createLocalTrack function call.
* @returns {Promise<{
* deviceId: string,
* hasError: boolean,
* jitsiTrack: Object,
* label: string
* }[]>}
*/
export function createLocalAudioTracks(devices: MediaDeviceInfo[], timeout?: number) {
return Promise.all(
devices.map(async ({ deviceId, label }) => {
let jitsiTrack = null;
let hasError = false;
try {
jitsiTrack = await createLocalTrack('audio', deviceId, timeout);
} catch (err) {
hasError = true;
}
return {
deviceId,
hasError,
jitsiTrack,
label
};
}));
}
/**
* Returns the visibility state of the audio settings.
*

View File

@ -0,0 +1 @@
export * from './functions.any';

View File

@ -0,0 +1,64 @@
import { createLocalTrack } from '../base/lib-jitsi-meet/functions';
export * from './functions.any';
/**
* Returns a promise which resolves with a list of objects containing
* all the video jitsiTracks and appropriate errors for the given device ids.
*
* @param {string[]} ids - The list of the camera ids for which to create tracks.
* @param {number} [timeout] - A timeout for the createLocalTrack function call.
*
* @returns {Promise<Object[]>}
*/
export function createLocalVideoTracks(ids: string[], timeout?: number) {
return Promise.all(ids.map(deviceId => createLocalTrack('video', deviceId, timeout)
.then((jitsiTrack: any) => {
return {
jitsiTrack,
deviceId
};
})
.catch(() => {
return {
jitsiTrack: null,
deviceId,
error: 'deviceSelection.previewUnavailable'
};
})));
}
/**
* Returns a promise which resolves with a list of objects containing
* the audio track and the corresponding audio device information.
*
* @param {Object[]} devices - A list of microphone devices.
* @param {number} [timeout] - A timeout for the createLocalTrack function call.
* @returns {Promise<{
* deviceId: string,
* hasError: boolean,
* jitsiTrack: Object,
* label: string
* }[]>}
*/
export function createLocalAudioTracks(devices: MediaDeviceInfo[], timeout?: number) {
return Promise.all(
devices.map(async ({ deviceId, label }) => {
let jitsiTrack = null;
let hasError = false;
try {
jitsiTrack = await createLocalTrack('audio', deviceId, timeout);
} catch (err) {
hasError = true;
}
return {
deviceId,
hasError,
jitsiTrack,
label
};
}));
}

View File

@ -3,7 +3,7 @@ import debounce from 'lodash/debounce';
import { getMultipleVideoSupportFeatureFlag } from '../base/config/functions';
import StateListenerRegistry from '../base/redux/StateListenerRegistry';
import { equals } from '../base/redux/functions';
import { ITrack } from '../base/tracks/reducer';
import { ITrack } from '../base/tracks/types';
import { isFollowMeActive } from '../follow-me/functions';
import { setRemoteParticipantsWithScreenShare, virtualScreenshareParticipantsUpdated } from './actions.web';

View File

@ -24,6 +24,7 @@
"react/features/embed-meeting",
"react/features/face-landmarks",
"react/features/feedback",
"react/features/no-audio-signal",
"react/features/noise-suppression",
"react/features/screen-share",
"react/features/stream-effects/noise-suppression",