Adds support for muting audio on the bridge.
This commit is contained in:
parent
3f3046893e
commit
871c661ba9
43
app.js
43
app.js
|
@ -37,6 +37,16 @@ var videoSrcToSsrc = {};
|
|||
*/
|
||||
var focusedVideoSrc = null;
|
||||
var mutedAudios = {};
|
||||
/**
|
||||
* Remembers if we were muted by the focus.
|
||||
* @type {boolean}
|
||||
*/
|
||||
var forceMuted = false;
|
||||
/**
|
||||
* Indicates if we have muted our audio before the conference has started.
|
||||
* @type {boolean}
|
||||
*/
|
||||
var preMuted = false;
|
||||
|
||||
var localVideoSrc = null;
|
||||
var flipXLocalVideo = true;
|
||||
|
@ -970,28 +980,45 @@ function toggleVideo() {
|
|||
* Mutes / unmutes audio for the local participant.
|
||||
*/
|
||||
function toggleAudio() {
|
||||
setAudioMuted(!isAudioMuted());
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets muted audio state for the local participant.
|
||||
*/
|
||||
function setAudioMuted(mute) {
|
||||
if (!(connection && connection.jingle.localAudio)) {
|
||||
preMuted = mute;
|
||||
// We still click the button.
|
||||
buttonClick("#mute", "icon-microphone icon-mic-disabled");
|
||||
return;
|
||||
}
|
||||
|
||||
if (forceMuted && !mute) {
|
||||
console.info("Asking focus for unmute");
|
||||
connection.moderate.setMute(connection.emuc.myroomjid, mute);
|
||||
// FIXME: wait for result before resetting muted status
|
||||
forceMuted = false;
|
||||
}
|
||||
|
||||
if (mute == isAudioMuted()) {
|
||||
// Nothing to do
|
||||
return;
|
||||
}
|
||||
|
||||
// It is not clear what is the right way to handle multiple tracks.
|
||||
// So at least make sure that they are all muted or all unmuted and
|
||||
// that we send presence just once.
|
||||
var localAudioTracks = connection.jingle.localAudio.getAudioTracks();
|
||||
if (localAudioTracks.length > 0) {
|
||||
var audioEnabled = localAudioTracks[0].enabled;
|
||||
|
||||
for (var idx = 0; idx < localAudioTracks.length; idx++) {
|
||||
localAudioTracks[idx].enabled = !audioEnabled;
|
||||
localAudioTracks[idx].enabled = !mute;
|
||||
}
|
||||
|
||||
// isMuted is the opposite of audioEnabled
|
||||
connection.emuc.addAudioInfoToPresence(audioEnabled);
|
||||
connection.emuc.sendPresence();
|
||||
VideoLayout.showLocalAudioIndicator(audioEnabled);
|
||||
}
|
||||
// isMuted is the opposite of audioEnabled
|
||||
connection.emuc.addAudioInfoToPresence(mute);
|
||||
connection.emuc.sendPresence();
|
||||
VideoLayout.showLocalAudioIndicator(audioEnabled);
|
||||
|
||||
buttonClick("#mute", "icon-microphone icon-mic-disabled");
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
/* global $, $iq, config, connection, messageHandler, Strophe, toggleAudio */
|
||||
/* global $, $iq, config, connection, focusJid, forceMuted, messageHandler,
|
||||
setAudioMuted, Strophe, toggleAudio */
|
||||
/**
|
||||
* Moderate connection plugin.
|
||||
*/
|
||||
|
@ -15,27 +16,39 @@ Strophe.addConnectionPlugin('moderate', {
|
|||
null);
|
||||
},
|
||||
setMute: function (jid, mute) {
|
||||
var iq = $iq({to: jid, type: 'set'})
|
||||
.c('mute', {xmlns: 'http://jitsi.org/jitmeet/audio'})
|
||||
.t(mute.toString())
|
||||
.up();
|
||||
console.info("set mute", mute);
|
||||
var iqToFocus = $iq({to: focusJid, type: 'set'})
|
||||
.c('mute', {
|
||||
xmlns: 'http://jitsi.org/jitmeet/audio',
|
||||
jid: jid
|
||||
})
|
||||
.t(mute.toString())
|
||||
.up();
|
||||
|
||||
this.connection.sendIQ(
|
||||
iq,
|
||||
function (result) {
|
||||
console.log('set mute', result);
|
||||
},
|
||||
function (error) {
|
||||
console.log('set mute error', error);
|
||||
messageHandler.openReportDialog(null, 'Failed to mute ' +
|
||||
$("#participant_" + jid).find(".displayname").text() ||
|
||||
"participant" + '.', error);
|
||||
});
|
||||
iqToFocus,
|
||||
function (result) {
|
||||
console.log('set mute', result);
|
||||
},
|
||||
function (error) {
|
||||
console.log('set mute error', error);
|
||||
// FIXME: this causes an exception
|
||||
//messageHandler.openReportDialog(null, 'Failed to mute ' +
|
||||
// $("#participant_" + jid).find(".displayname").text() ||
|
||||
//"participant" + '.', error);
|
||||
});
|
||||
},
|
||||
onMute: function (iq) {
|
||||
var from = iq.getAttribute('from');
|
||||
if (from !== focusJid) {
|
||||
console.warn("Ignored mute from non focus peer");
|
||||
return false;
|
||||
}
|
||||
var mute = $(iq).find('mute');
|
||||
if (mute.length) {
|
||||
toggleAudio();
|
||||
var doMuteAudio = mute.text() === "true";
|
||||
setAudioMuted(doMuteAudio);
|
||||
forceMuted = doMuteAudio;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
|
|
|
@ -17,6 +17,10 @@ var VideoLayout = (function (my) {
|
|||
RTC.attachMediaStream($('#localAudio'), stream);
|
||||
document.getElementById('localAudio').autoplay = true;
|
||||
document.getElementById('localAudio').volume = 0;
|
||||
if (preMuted) {
|
||||
setAudioMuted(true);
|
||||
preMuted = false;
|
||||
}
|
||||
};
|
||||
|
||||
my.changeLocalVideo = function(stream, flipX) {
|
||||
|
@ -1253,8 +1257,8 @@ var VideoLayout = (function (my) {
|
|||
if ($(this).attr('disabled') != undefined) {
|
||||
event.preventDefault();
|
||||
}
|
||||
var isMute = !mutedAudios[jid];
|
||||
connection.moderate.setMute(jid, isMute);
|
||||
var isMute = mutedAudios[jid] == true;
|
||||
connection.moderate.setMute(jid, !isMute);
|
||||
popupmenuElement.setAttribute('style', 'display:none;');
|
||||
|
||||
if (isMute) {
|
||||
|
@ -1349,10 +1353,9 @@ var VideoLayout = (function (my) {
|
|||
videoSpanId = 'participant_' + Strophe.getResourceFromJid(jid);
|
||||
}
|
||||
|
||||
VideoLayout.ensurePeerContainerExists(jid);
|
||||
mutedAudios[jid] = isMuted;
|
||||
|
||||
if (Moderator.isModerator()) {
|
||||
mutedAudios[jid] = isMuted;
|
||||
VideoLayout.updateRemoteVideoMenu(jid, isMuted);
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue