Compare commits

...

10 Commits

Author SHA1 Message Date
Pawel Domas e79ae56676 fix after rebase 2022-01-06 17:23:07 -06:00
Pawel Domas aadb5ada8c wip 2022-01-06 17:23:06 -06:00
Pawel Domas 06b32d5014 update ljm 2022-01-06 17:03:35 -06:00
Pawel Domas 6c6c8010db force display video 2022-01-06 16:32:42 -06:00
Pawel Domas c7911e32c2 fix video track selectors 2022-01-06 16:32:42 -06:00
Pawel Domas 30a22254ab Wip 2022-01-06 16:32:26 -06:00
Pawel Domas e8e2b20758 fix camera button muted state 2022-01-06 16:30:00 -06:00
Pawel Domas 3327d4cf92 do not hide remote videos when someone is sharing the screen 2022-01-06 16:30:00 -06:00
Pawel Domas 2c8fe1b3ba select the last video track to be displayed in the speaker view 2022-01-06 16:30:00 -06:00
Pawel Domas bfd37bb087 multiple video stream WiP 2022-01-06 16:29:56 -06:00
10 changed files with 121 additions and 81 deletions

View File

@ -105,6 +105,7 @@ import {
updateSettings
} from './react/features/base/settings';
import {
addLocalTrack,
createLocalPresenterTrack,
createLocalTracksF,
destroyLocalTracks,
@ -1911,7 +1912,24 @@ export default {
if (desktopVideoStream) {
logger.debug(`_switchToScreenSharing is using ${desktopVideoStream} for useVideoStream`);
await this.useVideoStream(desktopVideoStream);
await new Promise((resolve, reject) => {
_replaceLocalVideoTrackQueue.enqueue(onFinish => {
APP.store.dispatch(
addLocalTrack(desktopVideoStream))
.then(() => {
this._setSharingScreen(desktopVideoStream);
// TODO FIXME
// this.setVideoMuteStatus();
})
.then(resolve)
.catch(error => {
logger.error(`_switchToScreenSharing failed: ${error}`);
reject(error);
})
.then(onFinish);
});
});
}
if (this._desktopAudioStream) {

View File

@ -7,7 +7,10 @@ import {
getPinnedParticipant,
getParticipantById
} from '../../../react/features/base/participants';
import { getTrackByMediaTypeAndParticipant } from '../../../react/features/base/tracks';
import {
getLastTrackByMediaTypeAndParticipant,
getTrackByMediaTypeAndParticipant
} from '../../../react/features/base/tracks';
import LargeVideoManager from './LargeVideoManager';
import { VIDEO_CONTAINER_TYPE } from './VideoContainer';
@ -177,7 +180,7 @@ const VideoLayout = {
const currentContainerType = largeVideo.getCurrentContainerType();
const isOnLarge = this.isCurrentlyOnLarge(id);
const state = APP.store.getState();
const videoTrack = getTrackByMediaTypeAndParticipant(state['features/base/tracks'], MEDIA_TYPE.VIDEO, id);
const videoTrack = getLastTrackByMediaTypeAndParticipant(state['features/base/tracks'], MEDIA_TYPE.VIDEO, id);
const videoStream = videoTrack?.jitsiTrack;
if (isOnLarge && !forceUpdate

66
package-lock.json generated
View File

@ -66,7 +66,7 @@
"jquery-i18next": "1.2.1",
"js-md5": "0.6.1",
"jwt-decode": "2.2.0",
"lib-jitsi-meet": "github:jitsi/lib-jitsi-meet#55a03ac1b52f85dcbd9bfe339690ad88436ac029",
"lib-jitsi-meet": "github:jitsi/lib-jitsi-meet#720985f944233df1fe189183d1b7d2789fb50b67",
"libflacjs": "github:mmig/libflac.js#93d37e7f811f01cf7d8b6a603e38bd3c3810907d",
"lodash": "4.17.21",
"moment": "2.29.1",
@ -3410,32 +3410,6 @@
"uuid": "bin/uuid"
}
},
"node_modules/@jitsi/sdp-interop": {
"version": "1.0.5",
"resolved": "git+ssh://git@github.com/jitsi/sdp-interop.git#4669790bb9020cc8f10c1d1f3823c26b08497547",
"integrity": "sha512-4nqEqJWyRFjHM/riI0DQRNx+mgx277iK0r5LhwVAHDZDBYbLN54vYcfZ6JepcmygQiixa8jet/gLJnikdH9wzQ==",
"license": "Apache-2.0",
"dependencies": {
"lodash.clonedeep": "4.5.0",
"sdp-transform": "2.14.1"
}
},
"node_modules/@jitsi/sdp-interop/node_modules/sdp-transform": {
"version": "2.14.1",
"resolved": "https://registry.npmjs.org/sdp-transform/-/sdp-transform-2.14.1.tgz",
"integrity": "sha512-RjZyX3nVwJyCuTo5tGPx+PZWkDMCg7oOLpSlhjDdZfwUoNqG1mM8nyj31IGHyaPWXhjbP7cdK3qZ2bmkJ1GzRw==",
"bin": {
"sdp-verify": "checker.js"
}
},
"node_modules/@jitsi/sdp-simulcast": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/@jitsi/sdp-simulcast/-/sdp-simulcast-0.4.0.tgz",
"integrity": "sha512-jXJM3/XM853UwPlxhSJx5Fc13G4158RIeTgjis3lA286Y7lp18GOR0xFlRvx/5UyhtE3qT2NzSmAwR+CpJohFw==",
"dependencies": {
"sdp-transform": "2.3.0"
}
},
"node_modules/@mapbox/node-pre-gyp": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.5.tgz",
@ -12530,15 +12504,13 @@
},
"node_modules/lib-jitsi-meet": {
"version": "0.0.0",
"resolved": "git+ssh://git@github.com/jitsi/lib-jitsi-meet.git#55a03ac1b52f85dcbd9bfe339690ad88436ac029",
"integrity": "sha512-0ZNhG4ZPzcH+2R7K5xa5tSNVK8CKrKVCGP/bjr07XtiV3pcY65OWI2mH+QzlMIMDOXqgqQtry9RHv4vmzy5pIg==",
"resolved": "git+ssh://git@github.com/jitsi/lib-jitsi-meet.git#720985f944233df1fe189183d1b7d2789fb50b67",
"integrity": "sha512-1+uctwC2glBkOnzQNlg8IYxhM5wrgG2zm7wtN5v3vCJk+sA5grMxBU1CNzY9IpZEvJuLu6Eq0fckrTDaynr0QA==",
"hasInstallScript": true,
"license": "Apache-2.0",
"dependencies": {
"@jitsi/js-utils": "2.0.0",
"@jitsi/logger": "2.0.0",
"@jitsi/sdp-interop": "github:jitsi/sdp-interop#4669790bb9020cc8f10c1d1f3823c26b08497547",
"@jitsi/sdp-simulcast": "0.4.0",
"async": "0.9.0",
"base64-js": "1.3.1",
"current-executing-script": "0.1.3",
@ -23253,30 +23225,6 @@
}
}
},
"@jitsi/sdp-interop": {
"version": "git+ssh://git@github.com/jitsi/sdp-interop.git#4669790bb9020cc8f10c1d1f3823c26b08497547",
"integrity": "sha512-4nqEqJWyRFjHM/riI0DQRNx+mgx277iK0r5LhwVAHDZDBYbLN54vYcfZ6JepcmygQiixa8jet/gLJnikdH9wzQ==",
"from": "@jitsi/sdp-interop@github:jitsi/sdp-interop#4669790bb9020cc8f10c1d1f3823c26b08497547",
"requires": {
"lodash.clonedeep": "4.5.0",
"sdp-transform": "2.14.1"
},
"dependencies": {
"sdp-transform": {
"version": "2.14.1",
"resolved": "https://registry.npmjs.org/sdp-transform/-/sdp-transform-2.14.1.tgz",
"integrity": "sha512-RjZyX3nVwJyCuTo5tGPx+PZWkDMCg7oOLpSlhjDdZfwUoNqG1mM8nyj31IGHyaPWXhjbP7cdK3qZ2bmkJ1GzRw=="
}
}
},
"@jitsi/sdp-simulcast": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/@jitsi/sdp-simulcast/-/sdp-simulcast-0.4.0.tgz",
"integrity": "sha512-jXJM3/XM853UwPlxhSJx5Fc13G4158RIeTgjis3lA286Y7lp18GOR0xFlRvx/5UyhtE3qT2NzSmAwR+CpJohFw==",
"requires": {
"sdp-transform": "2.3.0"
}
},
"@mapbox/node-pre-gyp": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.5.tgz",
@ -30374,14 +30322,12 @@
}
},
"lib-jitsi-meet": {
"version": "git+ssh://git@github.com/jitsi/lib-jitsi-meet.git#55a03ac1b52f85dcbd9bfe339690ad88436ac029",
"integrity": "sha512-0ZNhG4ZPzcH+2R7K5xa5tSNVK8CKrKVCGP/bjr07XtiV3pcY65OWI2mH+QzlMIMDOXqgqQtry9RHv4vmzy5pIg==",
"from": "lib-jitsi-meet@github:jitsi/lib-jitsi-meet#55a03ac1b52f85dcbd9bfe339690ad88436ac029",
"version": "git+ssh://git@github.com/jitsi/lib-jitsi-meet.git#720985f944233df1fe189183d1b7d2789fb50b67",
"integrity": "sha512-1+uctwC2glBkOnzQNlg8IYxhM5wrgG2zm7wtN5v3vCJk+sA5grMxBU1CNzY9IpZEvJuLu6Eq0fckrTDaynr0QA==",
"from": "lib-jitsi-meet@github:jitsi/lib-jitsi-meet#720985f944233df1fe189183d1b7d2789fb50b67",
"requires": {
"@jitsi/js-utils": "2.0.0",
"@jitsi/logger": "2.0.0",
"@jitsi/sdp-interop": "github:jitsi/sdp-interop#4669790bb9020cc8f10c1d1f3823c26b08497547",
"@jitsi/sdp-simulcast": "0.4.0",
"async": "0.9.0",
"base64-js": "1.3.1",
"current-executing-script": "0.1.3",

View File

@ -71,7 +71,7 @@
"jquery-i18next": "1.2.1",
"js-md5": "0.6.1",
"jwt-decode": "2.2.0",
"lib-jitsi-meet": "github:jitsi/lib-jitsi-meet#55a03ac1b52f85dcbd9bfe339690ad88436ac029",
"lib-jitsi-meet": "github:jitsi/lib-jitsi-meet#720985f944233df1fe189183d1b7d2789fb50b67",
"libflacjs": "github:mmig/libflac.js#93d37e7f811f01cf7d8b6a603e38bd3c3810907d",
"lodash": "4.17.21",
"moment": "2.29.1",

View File

@ -5,6 +5,7 @@ import {
sendAnalytics
} from '../../analytics';
import { NOTIFICATION_TIMEOUT_TYPE, showErrorNotification, showNotification } from '../../notifications';
import { getCurrentConference } from '../conference';
import { JitsiTrackErrors, JitsiTrackEvents, createLocalTrack } from '../lib-jitsi-meet';
import {
CAMERA_FACING_MODE,
@ -312,6 +313,19 @@ export function replaceLocalTrack(oldTrack, newTrack, conference) {
};
}
// eslint-disable-next-line require-jsdoc
export function addLocalTrack(newTrack) {
return async (dispatch, getState) => {
const conference = getCurrentConference(getState());
if (conference) {
await conference.addTrack(newTrack);
}
return dispatch(_addTracks([ newTrack ]));
};
}
/**
* Replaces a stored track with another.
*

View File

@ -346,6 +346,18 @@ export function getLocalVideoTrack(tracks) {
return getLocalTrack(tracks, MEDIA_TYPE.VIDEO);
}
// eslint-disable-next-line require-jsdoc
export function getLocalCameraTrack(tracks) {
return getLocalTracks(tracks, false)
.find(t => t.videoType === VIDEO_TYPE.CAMERA);
}
// eslint-disable-next-line require-jsdoc
export function getLocalScreenTrack(tracks) {
return getLocalTracks(tracks, false)
.find(t => t.videoType === VIDEO_TYPE.DESKTOP);
}
/**
* Returns the media type of the local video, presenter or video.
*
@ -399,6 +411,39 @@ export function getTrackByMediaTypeAndParticipant(
);
}
// eslint-disable-next-line require-jsdoc
export function getParticipantsCameraTrack(tracks, participantId) {
return tracks.find(
t => Boolean(t.jitsiTrack)
&& t.participantId === participantId
&& t.mediaType === MEDIA_TYPE.VIDEO
&& (t.videoType === undefined || t.videoType === VIDEO_TYPE.CAMERA)
);
}
// eslint-disable-next-line require-jsdoc
export function getParticipantsScreenTrack(tracks, participantId) {
return tracks.find(
t => Boolean(t.jitsiTrack)
&& t.participantId === participantId
&& t.mediaType === MEDIA_TYPE.VIDEO
&& t.videoType === VIDEO_TYPE.DESKTOP
);
}
// eslint-disable-next-line require-jsdoc
export function getLastTrackByMediaTypeAndParticipant(
tracks,
mediaType,
participantId) {
const userTracks = tracks.filter(
t => Boolean(t.jitsiTrack) && t.participantId === participantId && t.mediaType === mediaType
);
const selected = userTracks[userTracks.length - 1];
return selected;
}
/**
* Returns track source name of specified media type for specified participant id.
*
@ -450,16 +495,15 @@ export function getTracksByMediaType(tracks, mediaType) {
*/
export function isLocalCameraTrackMuted(tracks) {
const presenterTrack = getLocalTrack(tracks, MEDIA_TYPE.PRESENTER);
const videoTrack = getLocalTrack(tracks, MEDIA_TYPE.VIDEO);
const localCameraTrack = getLocalCameraTrack(tracks);
// Make sure we check the mute status of only camera tracks, i.e.,
// presenter track when it exists, camera track when the presenter
// track doesn't exist.
if (presenterTrack) {
return isLocalTrackMuted(tracks, MEDIA_TYPE.PRESENTER);
} else if (videoTrack) {
return videoTrack.videoType === 'camera'
? isLocalTrackMuted(tracks, MEDIA_TYPE.VIDEO) : true;
} else if (localCameraTrack) {
return localCameraTrack.muted;
}
return true;

View File

@ -130,9 +130,13 @@ ReducerRegistry.register('features/base/tracks', (state = [], action) => {
let withoutTrackStub = state;
if (action.track.local) {
withoutTrackStub
= state.filter(
t => !t.local || t.mediaType !== action.track.mediaType);
// FIXME verify if it's working as expected
// The track stubs were introduced to track get user media in progress state.
// Now this check will be over complicated with multiple tracks per type.
withoutTrackStub = state.filter(
t => !t.local
|| t.mediaType !== action.track.mediaType
|| (t.jitsiTrack && t.jitsiTrack !== action.track.jitsiTrack));
}
return [ ...withoutTrackStub, action.track ];

View File

@ -571,7 +571,9 @@ function _mapStateToProps(state) {
const enableThumbnailReordering = testing.enableThumbnailReordering ?? true;
const { visible, remoteParticipants } = state['features/filmstrip'];
const reduceHeight = state['features/toolbox'].visible && toolbarButtons.length;
const remoteVideosVisible = shouldRemoteVideosBeVisible(state);
// TODO remove
// Forced true to stop hiding presenter's camera
const remoteVideosVisible = true;
const { isOpen: shiftRight } = state['features/chat'];
const {
gridDimensions = {},

View File

@ -17,8 +17,8 @@ import { connect } from '../../../base/redux';
import { ASPECT_RATIO_NARROW } from '../../../base/responsive-ui/constants';
import { isTestModeEnabled } from '../../../base/testing';
import {
getLocalAudioTrack,
getLocalVideoTrack,
getLocalAudioTrack, getLocalCameraTrack, getLocalScreenTrack,
getLocalVideoTrack, getParticipantsCameraTrack, getParticipantsScreenTrack,
getTrackByMediaTypeAndParticipant,
updateLastTrackVideoMediaEvent
} from '../../../base/tracks';
@ -192,7 +192,9 @@ export type Props = {|
/**
* Styles that will be set to the Thumbnail's main span element.
*/
style?: ?Object
style?: ?Object,
_hasOnlyScreenStream: boolean,
|};
const defaultStyles = theme => {
@ -369,14 +371,14 @@ class Thumbnail extends Component<Props, State> {
const {
_currentLayout,
_isAudioOnly,
_isScreenSharing
_videoTrack
} = this.props;
const { displayMode } = this.state;
const tileViewActive = _currentLayout === LAYOUTS.TILE_VIEW;
if (!(DISPLAY_VIDEO === displayMode)
&& tileViewActive
&& _isScreenSharing
&& _videoTrack && _videoTrack.videoType === VIDEO_TYPE.DESKTOP
&& !_isAudioOnly) {
sendAnalytics(createScreenSharingIssueEvent({
source: 'thumbnail',
@ -901,8 +903,9 @@ function _mapStateToProps(state, ownProps): Object {
const id = participant?.id;
const isLocal = participant?.local ?? true;
const tracks = state['features/base/tracks'];
const _videoTrack = isLocal
? getLocalVideoTrack(tracks) : getTrackByMediaTypeAndParticipant(tracks, MEDIA_TYPE.VIDEO, participantID);
const _cameraTrack = isLocal ? getLocalCameraTrack(tracks) : getParticipantsCameraTrack(tracks, participantID);
const _screenTrack = isLocal ? getLocalScreenTrack(tracks) : getParticipantsScreenTrack(tracks, participantID);
const _videoTrack = _cameraTrack || _screenTrack;
const _audioTrack = isLocal
? getLocalAudioTrack(tracks) : getTrackByMediaTypeAndParticipant(tracks, MEDIA_TYPE.AUDIO, participantID);
const _currentLayout = getCurrentLayout(state);
@ -969,6 +972,7 @@ function _mapStateToProps(state, ownProps): Object {
_isMobile,
_isMobilePortrait,
_isScreenSharing: _videoTrack?.videoType === 'desktop',
_hasOnlyScreenStream: Boolean(_screenTrack) && !_cameraTrack,
_isTestModeEnabled: isTestModeEnabled(state),
_isVideoPlayable: id && isVideoPlayable(state, id),
_localFlipX: Boolean(localFlipX),

View File

@ -295,14 +295,17 @@ export function computeDisplayModeFromInput(input: Object) {
const {
isAudioOnly,
isCurrentlyOnLargeVideo,
isScreenSharing,
hasOnlyScreenStream,
canPlayEventReceived,
isRemoteParticipant,
tileViewActive
} = input;
const adjustedIsVideoPlayable = input.isVideoPlayable && (!isRemoteParticipant || canPlayEventReceived);
if (!tileViewActive && isScreenSharing && isRemoteParticipant) {
// TODO fix the logic to work with multiple streams
return DISPLAY_VIDEO;
if (!tileViewActive && hasOnlyScreenStream && isRemoteParticipant) {
return DISPLAY_AVATAR;
} else if (isCurrentlyOnLargeVideo && !tileViewActive) {
// Display name is always and only displayed when user is on the stage
@ -326,6 +329,7 @@ export function computeDisplayModeFromInput(input: Object) {
export function getDisplayModeInput(props: Object, state: Object) {
const {
_currentLayout,
_hasOnlyScreenStream,
_isAudioOnly,
_isCurrentlyOnLargeVideo,
_isScreenSharing,
@ -346,6 +350,7 @@ export function getDisplayModeInput(props: Object, state: Object) {
videoStream: Boolean(_videoTrack),
isRemoteParticipant: !_participant?.isFakeParticipant && !_participant?.local,
isScreenSharing: _isScreenSharing,
hasOnlyScreenStream: _hasOnlyScreenStream,
videoStreamMuted: _videoTrack ? _videoTrack.muted : 'no stream'
};
}