Merge remote-tracking branch 'upstream/master'

This commit is contained in:
turint 2014-07-29 16:44:10 +03:00
commit 04b790e1fe
14 changed files with 671 additions and 139 deletions

119
app.js
View File

@ -293,7 +293,8 @@ $(document).bind('remotestreamadded.jingle', function (event, data, sid) {
data.stream.onended = function () { data.stream.onended = function () {
console.log('stream ended', this.id); console.log('stream ended', this.id);
// Mark video as removed to cancel waiting loop(if video is removed before has started) // Mark video as removed to cancel waiting loop(if video is removed
// before has started)
sel.removed = true; sel.removed = true;
sel.remove(); sel.remove();
@ -312,8 +313,15 @@ $(document).bind('remotestreamadded.jingle', function (event, data, sid) {
// Add click handler. // Add click handler.
container.onclick = function (event) { container.onclick = function (event) {
VideoLayout.handleVideoThumbClicked( /*
$('#' + container.id + '>video').get(0).src); * FIXME It turns out that videoThumb may not exist (if there is no
* actual video).
*/
var videoThumb = $('#' + container.id + '>video').get(0);
if (videoThumb)
VideoLayout.handleVideoThumbClicked(videoThumb.src);
event.preventDefault(); event.preventDefault();
return false; return false;
}; };
@ -437,31 +445,25 @@ function muteVideo(pc, unmute) {
} }
/** /**
* Callback called by {@link StatsCollector} in intervals supplied to it's * Callback for audio levels changed.
* constructor. * @param jid JID of the user
* @param statsCollector {@link StatsCollector} source of the event. * @param audioLevel the audio level value
*/ */
function statsUpdated(statsCollector) function audioLevelUpdated(jid, audioLevel)
{ {
Object.keys(statsCollector.jid2stats).forEach(function (jid) var resourceJid;
if(jid === LocalStatsCollector.LOCAL_JID)
{ {
var peerStats = statsCollector.jid2stats[jid]; resourceJid = AudioLevels.LOCAL_LEVEL;
Object.keys(peerStats.ssrc2AudioLevel).forEach(function (ssrc) if(isAudioMuted())
{ return;
// console.info(jid + " audio level: " + }
// peerStats.ssrc2AudioLevel[ssrc] + " of ssrc: " + ssrc); else
}); {
}); resourceJid = Strophe.getResourceFromJid(jid);
} }
/** AudioLevels.updateAudioLevel(resourceJid, audioLevel);
* Callback called by {@link LocalStatsCollector} in intervals supplied to it's
* constructor.
* @param statsCollector {@link LocalStatsCollector} source of the event.
*/
function localStatsUpdated(statsCollector)
{
// console.info("Local audio level: " + statsCollector.audioLevel);
} }
/** /**
@ -473,10 +475,7 @@ function startRtpStatsCollector()
if (config.enableRtpStats) if (config.enableRtpStats)
{ {
statsCollector = new StatsCollector( statsCollector = new StatsCollector(
getConferenceHandler().peerconnection, 200, statsUpdated); getConferenceHandler().peerconnection, 200, audioLevelUpdated);
stopLocalRtpStatsCollector();
statsCollector.start(); statsCollector.start();
} }
} }
@ -501,7 +500,7 @@ function startLocalRtpStatsCollector(stream)
{ {
if(config.enableRtpStats) if(config.enableRtpStats)
{ {
localStatsCollector = new LocalStatsCollector(stream, 200, localStatsUpdated); localStatsCollector = new LocalStatsCollector(stream, 100, audioLevelUpdated);
localStatsCollector.start(); localStatsCollector.start();
} }
} }
@ -628,7 +627,11 @@ $(document).bind('joined.muc', function (event, jid, info) {
if (Object.keys(connection.emuc.members).length < 1) { if (Object.keys(connection.emuc.members).length < 1) {
focus = new ColibriFocus(connection, config.hosts.bridge); focus = new ColibriFocus(connection, config.hosts.bridge);
showRecordingButton(false); if (nickname !== null) {
focus.setEndpointDisplayName(connection.emuc.myroomjid,
nickname);
}
Toolbar.showRecordingButton(false);
} }
if (focus && config.etherpad_base) { if (focus && config.etherpad_base) {
@ -660,7 +663,7 @@ $(document).bind('entered.muc', function (event, jid, info, pres) {
if (focus.confid === null) { if (focus.confid === null) {
console.log('make new conference with', jid); console.log('make new conference with', jid);
focus.makeConference(Object.keys(connection.emuc.members)); focus.makeConference(Object.keys(connection.emuc.members));
showRecordingButton(true); Toolbar.showRecordingButton(true);
} else { } else {
console.log('invite', jid, 'into conference'); console.log('invite', jid, 'into conference');
focus.addNewParticipant(jid); focus.addNewParticipant(jid);
@ -706,10 +709,14 @@ $(document).bind('left.muc', function (event, jid) {
&& !sessionTerminated) { && !sessionTerminated) {
console.log('welcome to our new focus... myself'); console.log('welcome to our new focus... myself');
focus = new ColibriFocus(connection, config.hosts.bridge); focus = new ColibriFocus(connection, config.hosts.bridge);
if (nickname !== null) {
focus.setEndpointDisplayName(connection.emuc.myroomjid,
nickname);
}
if (Object.keys(connection.emuc.members).length > 0) { if (Object.keys(connection.emuc.members).length > 0) {
focus.makeConference(Object.keys(connection.emuc.members)); focus.makeConference(Object.keys(connection.emuc.members));
showRecordingButton(true); Toolbar.showRecordingButton(true);
} }
$(document).trigger('focusechanged.muc', [focus]); $(document).trigger('focusechanged.muc', [focus]);
} }
@ -719,7 +726,11 @@ $(document).bind('left.muc', function (event, jid) {
// problems with reinit // problems with reinit
disposeConference(); disposeConference();
focus = new ColibriFocus(connection, config.hosts.bridge); focus = new ColibriFocus(connection, config.hosts.bridge);
showRecordingButton(false); if (nickname !== null) {
focus.setEndpointDisplayName(connection.emuc.myroomjid,
nickname);
}
Toolbar.showRecordingButton(false);
} }
if (connection.emuc.getPrezi(jid)) { if (connection.emuc.getPrezi(jid)) {
$(document).trigger('presentationremoved.muc', $(document).trigger('presentationremoved.muc',
@ -734,7 +745,7 @@ $(document).bind('presence.muc', function (event, jid, info, pres) {
if (ssrc2jid[ssrc] == jid) { if (ssrc2jid[ssrc] == jid) {
delete ssrc2jid[ssrc]; delete ssrc2jid[ssrc];
} }
if (ssrc2videoType == jid) { if (ssrc2videoType[ssrc] == jid) {
delete ssrc2videoType[ssrc]; delete ssrc2videoType[ssrc];
} }
}); });
@ -772,6 +783,10 @@ $(document).bind('presence.muc', function (event, jid, info, pres) {
'participant_' + Strophe.getResourceFromJid(jid), 'participant_' + Strophe.getResourceFromJid(jid),
info.displayName); info.displayName);
} }
if (focus !== null && info.displayName !== null) {
focus.setEndpointDisplayName(jid, info.displayName);
}
}); });
$(document).bind('passwordrequired.muc', function (event, jid) { $(document).bind('passwordrequired.muc', function (event, jid) {
@ -888,6 +903,20 @@ function toggleAudio() {
buttonClick("#mute", "icon-microphone icon-mic-disabled"); buttonClick("#mute", "icon-microphone icon-mic-disabled");
} }
/**
* Checks whether the audio is muted or not.
* @returns {boolean} true if audio is muted and false if not.
*/
function isAudioMuted()
{
var localAudio = connection.jingle.localAudio;
for (var idx = 0; idx < localAudio.getAudioTracks().length; idx++) {
if(localAudio.getAudioTracks()[idx].enabled === true)
return false;
}
return true;
}
// Starts or stops the recording for the conference. // Starts or stops the recording for the conference.
function toggleRecording() { function toggleRecording() {
if (focus === null || focus.confid === null) { if (focus === null || focus.confid === null) {
@ -923,14 +952,14 @@ function toggleRecording() {
} }
var oldState = focus.recordingEnabled; var oldState = focus.recordingEnabled;
buttonClick("#recordButton", "icon-recEnable icon-recDisable"); Toolbar.toggleRecordingButtonState();
focus.setRecording(!oldState, focus.setRecording(!oldState,
recordingToken, recordingToken,
function (state) { function (state) {
console.log("New recording state: ", state); console.log("New recording state: ", state);
if (state == oldState) //failed to change, reset the token because it might have been wrong if (state == oldState) //failed to change, reset the token because it might have been wrong
{ {
buttonClick("#recordButton", "icon-recEnable icon-recDisable"); Toolbar.toggleRecordingButtonState();
setRecordingToken(null); setRecordingToken(null);
} }
} }
@ -1097,11 +1126,7 @@ function disposeConference(onUnload) {
handler.peerconnection.close(); handler.peerconnection.close();
} }
stopRTPStatsCollector(); stopRTPStatsCollector();
if(!onUnload) { if(onUnload) {
startLocalRtpStatsCollector(connection.jingle.localAudio);
}
else
{
stopLocalRtpStatsCollector(); stopLocalRtpStatsCollector();
} }
focus = null; focus = null;
@ -1243,19 +1268,7 @@ function setView(viewName) {
// } // }
} }
function showRecordingButton(show) {
if (!config.enableRecording) {
return;
}
if (show) {
$('#recording').css({display: "inline"});
}
else {
$('#recording').css({display: "none"});
}
}
$(document).bind('fatalError.jingle', $(document).bind('fatalError.jingle',
function (event, session, error) function (event, session, error)

217
audio_levels.js Normal file
View File

@ -0,0 +1,217 @@
/**
* The audio Levels plugin.
*/
var AudioLevels = (function(my) {
var CANVAS_EXTRA = 104;
var CANVAS_RADIUS = 7;
var SHADOW_COLOR = '#00ccff';
var audioLevelCanvasCache = {};
my.LOCAL_LEVEL = 'local';
/**
* Updates the audio level canvas for the given peerJid. If the canvas
* didn't exist we create it.
*/
my.updateAudioLevelCanvas = function (peerJid) {
var resourceJid = null;
var videoSpanId = null;
if (!peerJid)
videoSpanId = 'localVideoContainer';
else {
resourceJid = Strophe.getResourceFromJid(peerJid);
videoSpanId = 'participant_' + resourceJid;
}
videoSpan = document.getElementById(videoSpanId);
if (!videoSpan) {
if (resourceJid)
console.error("No video element for jid", resourceJid);
else
console.error("No video element for local video.");
return;
}
var audioLevelCanvas = $('#' + videoSpanId + '>canvas');
var videoSpaceWidth = $('#remoteVideos').width();
var thumbnailSize
= VideoLayout.calculateThumbnailSize(videoSpaceWidth);
var thumbnailWidth = thumbnailSize[0];
var thumbnailHeight = thumbnailSize[1];
if (!audioLevelCanvas || audioLevelCanvas.length === 0) {
audioLevelCanvas = document.createElement('canvas');
audioLevelCanvas.className = "audiolevel";
audioLevelCanvas.style.bottom = "-" + CANVAS_EXTRA/2 + "px";
audioLevelCanvas.style.left = "-" + CANVAS_EXTRA/2 + "px";
resizeAudioLevelCanvas( audioLevelCanvas,
thumbnailWidth,
thumbnailHeight);
videoSpan.appendChild(audioLevelCanvas);
} else {
audioLevelCanvas = audioLevelCanvas.get(0);
resizeAudioLevelCanvas( audioLevelCanvas,
thumbnailWidth,
thumbnailHeight);
}
};
/**
* Updates the audio level UI for the given resourceJid.
*
* @param resourceJid the resource jid indicating the video element for
* which we draw the audio level
* @param audioLevel the newAudio level to render
*/
my.updateAudioLevel = function (resourceJid, audioLevel) {
drawAudioLevelCanvas(resourceJid, audioLevel);
var videoSpanId = getVideoSpanId(resourceJid);
var audioLevelCanvas = $('#' + videoSpanId + '>canvas').get(0);
if (!audioLevelCanvas)
return;
var drawContext = audioLevelCanvas.getContext('2d');
var canvasCache = audioLevelCanvasCache[resourceJid];
drawContext.clearRect (0, 0,
audioLevelCanvas.width, audioLevelCanvas.height);
drawContext.drawImage(canvasCache, 0, 0);
};
/**
* Resizes the given audio level canvas to match the given thumbnail size.
*/
function resizeAudioLevelCanvas(audioLevelCanvas,
thumbnailWidth,
thumbnailHeight) {
audioLevelCanvas.width = thumbnailWidth + CANVAS_EXTRA;
audioLevelCanvas.height = thumbnailHeight + CANVAS_EXTRA;
};
/**
* Draws the audio level canvas into the cached canvas object.
*
* @param resourceJid the resource jid indicating the video element for
* which we draw the audio level
* @param audioLevel the newAudio level to render
*/
function drawAudioLevelCanvas(resourceJid, audioLevel) {
if (!audioLevelCanvasCache[resourceJid]) {
var videoSpanId = getVideoSpanId(resourceJid);
var audioLevelCanvasOrig = $('#' + videoSpanId + '>canvas').get(0);
/*
* FIXME Testing has shown that audioLevelCanvasOrig may not exist.
* In such a case, the method CanvasUtil.cloneCanvas may throw an
* error. Since audio levels are frequently updated, the errors have
* been observed to pile into the console, strain the CPU.
*/
if (audioLevelCanvasOrig)
{
audioLevelCanvasCache[resourceJid]
= CanvasUtil.cloneCanvas(audioLevelCanvasOrig);
}
}
var canvas = audioLevelCanvasCache[resourceJid];
if (!canvas)
return;
var drawContext = canvas.getContext('2d');
drawContext.clearRect(0, 0, canvas.width, canvas.height);
var shadowLevel = getShadowLevel(audioLevel);
if (shadowLevel > 0)
// drawContext, x, y, w, h, r, shadowColor, shadowLevel
CanvasUtil.drawRoundRectGlow( drawContext,
CANVAS_EXTRA/2, CANVAS_EXTRA/2,
canvas.width - CANVAS_EXTRA,
canvas.height - CANVAS_EXTRA,
CANVAS_RADIUS,
SHADOW_COLOR,
shadowLevel);
};
/**
* Returns the shadow/glow level for the given audio level.
*
* @param audioLevel the audio level from which we determine the shadow
* level
*/
function getShadowLevel (audioLevel) {
var shadowLevel = 0;
if (audioLevel <= 0.3) {
shadowLevel = Math.round(CANVAS_EXTRA/2*(audioLevel/0.3));
}
else if (audioLevel <= 0.6) {
shadowLevel = Math.round(CANVAS_EXTRA/2*((audioLevel - 0.3) / 0.3));
}
else {
shadowLevel = Math.round(CANVAS_EXTRA/2*((audioLevel - 0.6) / 0.4));
}
return shadowLevel;
};
/**
* Returns the video span id corresponding to the given resourceJid or local
* user.
*/
function getVideoSpanId(resourceJid) {
var videoSpanId = null;
if (resourceJid === AudioLevels.LOCAL_LEVEL
|| (connection.emuc.myroomjid && resourceJid
=== Strophe.getResourceFromJid(connection.emuc.myroomjid)))
videoSpanId = 'localVideoContainer';
else
videoSpanId = 'participant_' + resourceJid;
return videoSpanId;
};
/**
* Indicates that the remote video has been resized.
*/
$(document).bind('remotevideo.resized', function (event, width, height) {
var resized = false;
$('#remoteVideos>span>canvas').each(function() {
var canvas = $(this).get(0);
if (canvas.width !== width + CANVAS_EXTRA) {
canvas.width = width + CANVAS_EXTRA;
resized = true;
}
if (canvas.heigh !== height + CANVAS_EXTRA) {
canvas.height = height + CANVAS_EXTRA;
resized = true;
}
});
if (resized)
Object.keys(audioLevelCanvasCache).forEach(function (resourceJid) {
audioLevelCanvasCache[resourceJid].width
= width + CANVAS_EXTRA;
audioLevelCanvasCache[resourceJid].height
= height + CANVAS_EXTRA;
});
});
return my;
})(AudioLevels || {});

109
canvas_util.js Normal file
View File

@ -0,0 +1,109 @@
/**
* Utility class for drawing canvas shapes.
*/
var CanvasUtil = (function(my) {
/**
* Draws a round rectangle with a glow. The glowWidth indicates the depth
* of the glow.
*
* @param drawContext the context of the canvas to draw to
* @param x the x coordinate of the round rectangle
* @param y the y coordinate of the round rectangle
* @param w the width of the round rectangle
* @param h the height of the round rectangle
* @param glowColor the color of the glow
* @param glowWidth the width of the glow
*/
my.drawRoundRectGlow
= function(drawContext, x, y, w, h, r, glowColor, glowWidth) {
// Save the previous state of the context.
drawContext.save();
if (w < 2 * r) r = w / 2;
if (h < 2 * r) r = h / 2;
// Draw a round rectangle.
drawContext.beginPath();
drawContext.moveTo(x+r, y);
drawContext.arcTo(x+w, y, x+w, y+h, r);
drawContext.arcTo(x+w, y+h, x, y+h, r);
drawContext.arcTo(x, y+h, x, y, r);
drawContext.arcTo(x, y, x+w, y, r);
drawContext.closePath();
// Add a shadow around the rectangle
drawContext.shadowColor = glowColor;
drawContext.shadowBlur = glowWidth;
drawContext.shadowOffsetX = 0;
drawContext.shadowOffsetY = 0;
// Fill the shape.
drawContext.fill();
drawContext.save();
drawContext.restore();
// 1) Uncomment this line to use Composite Operation, which is doing the
// same as the clip function below and is also antialiasing the round
// border, but is said to be less fast performance wise.
// drawContext.globalCompositeOperation='destination-out';
drawContext.beginPath();
drawContext.moveTo(x+r, y);
drawContext.arcTo(x+w, y, x+w, y+h, r);
drawContext.arcTo(x+w, y+h, x, y+h, r);
drawContext.arcTo(x, y+h, x, y, r);
drawContext.arcTo(x, y, x+w, y, r);
drawContext.closePath();
// 2) Uncomment this line to use Composite Operation, which is doing the
// same as the clip function below and is also antialiasing the round
// border, but is said to be less fast performance wise.
// drawContext.fill();
// Comment these two lines if choosing to do the same with composite
// operation above 1 and 2.
drawContext.clip();
drawContext.clearRect(0, 0, 277, 200);
// Restore the previous context state.
drawContext.restore();
};
/**
* Clones the given canvas.
*
* @return the new cloned canvas.
*/
my.cloneCanvas = function (oldCanvas) {
/*
* FIXME Testing has shown that oldCanvas may not exist. In such a case,
* the method CanvasUtil.cloneCanvas may throw an error. Since audio
* levels are frequently updated, the errors have been observed to pile
* into the console, strain the CPU.
*/
if (!oldCanvas)
return oldCanvas;
//create a new canvas
var newCanvas = document.createElement('canvas');
var context = newCanvas.getContext('2d');
//set dimensions
newCanvas.width = oldCanvas.width;
newCanvas.height = oldCanvas.height;
//apply the old canvas to the new one
context.drawImage(oldCanvas, 0, 0);
//return the new canvas
return newCanvas;
};
return my;
})(CanvasUtil || {});

21
chat.js
View File

@ -175,7 +175,13 @@ var Chat = (function (my) {
$('#remoteVideos>span').animate({height: thumbnailsHeight, $('#remoteVideos>span').animate({height: thumbnailsHeight,
width: thumbnailsWidth}, width: thumbnailsWidth},
{queue: false, {queue: false,
duration: 500}); duration: 500,
complete: function() {
$(document).trigger(
"remotevideo.resized",
[thumbnailsWidth,
thumbnailsHeight]);
}});
$('#largeVideoContainer').animate({ width: videospaceWidth, $('#largeVideoContainer').animate({ width: videospaceWidth,
height: videospaceHeight}, height: videospaceHeight},
@ -198,8 +204,10 @@ var Chat = (function (my) {
duration: 500}); duration: 500});
} }
else { else {
// Undock the toolbar when the chat is shown. // Undock the toolbar when the chat is shown and if we're in a
Toolbar.dockToolbar(false); // video mode.
if (VideoLayout.isLargeVideoVisible())
Toolbar.dockToolbar(false);
videospace.animate({right: chatSize[0], videospace.animate({right: chatSize[0],
width: videospaceWidth, width: videospaceWidth,
@ -219,7 +227,12 @@ var Chat = (function (my) {
$('#remoteVideos>span').animate({height: thumbnailsHeight, $('#remoteVideos>span').animate({height: thumbnailsHeight,
width: thumbnailsWidth}, width: thumbnailsWidth},
{queue: false, {queue: false,
duration: 500}); duration: 500,
complete: function() {
$(document).trigger(
"remotevideo.resized",
[thumbnailsWidth, thumbnailsHeight]);
}});
$('#largeVideoContainer').animate({ width: videospaceWidth, $('#largeVideoContainer').animate({ width: videospaceWidth,
height: videospaceHeight}, height: videospaceHeight},

View File

@ -12,7 +12,7 @@ var config = {
desktopSharing: 'ext', // Desktop sharing method. Can be set to 'ext', 'webrtc' or false to disable. desktopSharing: 'ext', // Desktop sharing method. Can be set to 'ext', 'webrtc' or false to disable.
chromeExtensionId: 'diibjkoicjeejcmhdnailmkgecihlobk', // Id of desktop streamer Chrome extension chromeExtensionId: 'diibjkoicjeejcmhdnailmkgecihlobk', // Id of desktop streamer Chrome extension
minChromeExtVersion: '0.1', // Required version of Chrome extension minChromeExtVersion: '0.1', // Required version of Chrome extension
enableRtpStats: false, // Enables RTP stats processing enableRtpStats: true, // Enables RTP stats processing
openSctp: true, // Toggle to enable/disable SCTP channels openSctp: true, // Toggle to enable/disable SCTP channels
// channelLastN: -1, // The default value of the channel attribute last-n. // channelLastN: -1, // The default value of the channel attribute last-n.
enableRecording: false enableRecording: false

View File

@ -151,6 +151,27 @@ html, body{
0 -1px 10px #00ccff; 0 -1px 10px #00ccff;
} }
#recordButton {
-webkit-transition: all .5s ease-in-out;
-moz-transition: all .5s ease-in-out;
transition: all .5s ease-in-out;
}
/*#ffde00*/
#recordButton.active {
-webkit-text-shadow: -1px 0 10px #00ccff,
0 1px 10px #00ccff,
1px 0 10px #00ccff,
0 -1px 10px #00ccff;
-moz-text-shadow: 1px 0 10px #00ccff,
0 1px 10px #00ccff,
1px 0 10px #00ccff,
0 -1px 10px #00ccff;
text-shadow: -1px 0 10px #00ccff,
0 1px 10px #00ccff,
1px 0 10px #00ccff,
0 -1px 10px #00ccff;
}
a.button:hover { a.button:hover {
top: 0; top: 0;
cursor: pointer; cursor: pointer;

View File

@ -4,6 +4,7 @@
top: 0px; top: 0px;
left: 0px; left: 0px;
right: 0px; right: 0px;
overflow: hidden;
} }
#remoteVideos { #remoteVideos {
@ -49,8 +50,21 @@
-webkit-animation-name: greyPulse; -webkit-animation-name: greyPulse;
-webkit-animation-duration: 2s; -webkit-animation-duration: 2s;
-webkit-animation-iteration-count: 1; -webkit-animation-iteration-count: 1;
-webkit-box-shadow: 0 0 18px #388396; }
border: 2px solid #388396;
#remoteVideos .videocontainer:hover {
-webkit-box-shadow: inset 0 0 10px #FFFFFF, 0 0 10px #FFFFFF;
border: 2px solid #FFFFFF;
}
#remoteVideos .videocontainer.videoContainerFocused {
-webkit-box-shadow: inset 0 0 28px #006d91;
border: 2px solid #006d91;
}
#remoteVideos .videocontainer.videoContainerFocused:hover {
-webkit-box-shadow: inset 0 0 5px #FFFFFF, 0 0 10px #FFFFFF, inset 0 0 60px #006d91;
border: 2px solid #FFFFFF;
} }
#localVideoWrapper { #localVideoWrapper {
@ -291,3 +305,10 @@
background-image:url(../images/rightwatermark.png); background-image:url(../images/rightwatermark.png);
background-position: center right; background-position: center right;
} }
.audiolevel {
display: inline-block;
position: absolute;
z-index: 0;
border-radius:10px;
}

View File

@ -22,14 +22,14 @@
<script src="//code.jquery.com/ui/1.10.4/jquery-ui.js"></script> <script src="//code.jquery.com/ui/1.10.4/jquery-ui.js"></script>
<script src="libs/tooltip.js?v=1"></script><!-- bootstrap tooltip lib --> <script src="libs/tooltip.js?v=1"></script><!-- bootstrap tooltip lib -->
<script src="libs/popover.js?v=1"></script><!-- bootstrap tooltip lib --> <script src="libs/popover.js?v=1"></script><!-- bootstrap tooltip lib -->
<script src="config.js?v=2"></script><!-- adapt to your needs, i.e. set hosts and bosh path --> <script src="config.js?v=3"></script><!-- adapt to your needs, i.e. set hosts and bosh path -->
<script src="muc.js?v=12"></script><!-- simple MUC library --> <script src="muc.js?v=12"></script><!-- simple MUC library -->
<script src="estos_log.js?v=2"></script><!-- simple stanza logger --> <script src="estos_log.js?v=2"></script><!-- simple stanza logger -->
<script src="desktopsharing.js?v=2"></script><!-- desktop sharing --> <script src="desktopsharing.js?v=2"></script><!-- desktop sharing -->
<script src="data_channels.js?v=2"></script><!-- data channels --> <script src="data_channels.js?v=2"></script><!-- data channels -->
<script src="app.js?v=3"></script><!-- application logic --> <script src="app.js?v=4"></script><!-- application logic -->
<script src="commands.js?v=1"></script><!-- application logic --> <script src="commands.js?v=1"></script><!-- application logic -->
<script src="chat.js?v=7"></script><!-- chat logic --> <script src="chat.js?v=8"></script><!-- chat logic -->
<script src="util.js?v=5"></script><!-- utility functions --> <script src="util.js?v=5"></script><!-- utility functions -->
<script src="etherpad.js?v=8"></script><!-- etherpad plugin --> <script src="etherpad.js?v=8"></script><!-- etherpad plugin -->
<script src="prezi.js?v=4"></script><!-- prezi plugin --> <script src="prezi.js?v=4"></script><!-- prezi plugin -->
@ -39,12 +39,14 @@
<script src="analytics.js?v=1"></script><!-- google analytics plugin --> <script src="analytics.js?v=1"></script><!-- google analytics plugin -->
<script src="rtp_stats.js?v=1"></script><!-- RTP stats processing --> <script src="rtp_stats.js?v=1"></script><!-- RTP stats processing -->
<script src="local_stats.js?v=1"></script><!-- Local stats processing --> <script src="local_stats.js?v=1"></script><!-- Local stats processing -->
<script src="videolayout.js?v=6"></script><!-- video ui --> <script src="videolayout.js?v=7"></script><!-- video ui -->
<script src="toolbar.js?v=3"></script><!-- toolbar ui --> <script src="toolbar.js?v=3"></script><!-- toolbar ui -->
<script src="canvas_util.js?v=1"></script><!-- canvas drawing utils -->
<script src="audio_levels.js?v=1"></script><!-- audio levels plugin -->
<link href="//netdna.bootstrapcdn.com/font-awesome/4.0.3/css/font-awesome.css" rel="stylesheet"> <link href="//netdna.bootstrapcdn.com/font-awesome/4.0.3/css/font-awesome.css" rel="stylesheet">
<link rel="stylesheet" href="css/font.css"/> <link rel="stylesheet" href="css/font.css"/>
<link rel="stylesheet" type="text/css" media="screen" href="css/main.css?v=22"/> <link rel="stylesheet" type="text/css" media="screen" href="css/main.css?v=22"/>
<link rel="stylesheet" type="text/css" media="screen" href="css/videolayout_default.css?v=7" id="videolayout_default"/> <link rel="stylesheet" type="text/css" media="screen" href="css/videolayout_default.css?v=8" id="videolayout_default"/>
<link rel="stylesheet" href="css/jquery-impromptu.css?v=4"> <link rel="stylesheet" href="css/jquery-impromptu.css?v=4">
<link rel="stylesheet" href="css/modaldialog.css?v=3"> <link rel="stylesheet" href="css/modaldialog.css?v=3">
<link rel="stylesheet" href="css/popup_menu.css?v=2"> <link rel="stylesheet" href="css/popup_menu.css?v=2">

View File

@ -84,6 +84,10 @@ function ColibriFocus(connection, bridgejid) {
this.wait = true; this.wait = true;
this.recordingEnabled = false; this.recordingEnabled = false;
// stores information about the endpoints (i.e. display names) to
// be sent to the videobridge.
this.endpointsInfo = null;
} }
// creates a conferences with an initial set of peers // creates a conferences with an initial set of peers
@ -172,10 +176,11 @@ ColibriFocus.prototype.makeConference = function (peers) {
}; };
// Sends a COLIBRI message which enables or disables (according to 'state') the // Sends a COLIBRI message which enables or disables (according to 'state') the
// recording on the bridge. // recording on the bridge. Waits for the result IQ and calls 'callback' with
// the new recording state, according to the IQ.
ColibriFocus.prototype.setRecording = function(state, token, callback) { ColibriFocus.prototype.setRecording = function(state, token, callback) {
var self = this; var self = this;
var elem = $iq({to: this.bridgejid, type: 'get'}); var elem = $iq({to: this.bridgejid, type: 'set'});
elem.c('conference', { elem.c('conference', {
xmlns: 'http://jitsi.org/protocol/colibri', xmlns: 'http://jitsi.org/protocol/colibri',
id: this.confid id: this.confid
@ -187,10 +192,7 @@ ColibriFocus.prototype.setRecording = function(state, token, callback) {
function (result) { function (result) {
console.log('Set recording "', state, '". Result:', result); console.log('Set recording "', state, '". Result:', result);
var recordingElem = $(result).find('>conference>recording'); var recordingElem = $(result).find('>conference>recording');
var newState = recordingElem.attr('state'); var newState = ('true' === recordingElem.attr('state'));
if (newState == null){
newState = false;
}
self.recordingEnabled = newState; self.recordingEnabled = newState;
callback(newState); callback(newState);
@ -201,10 +203,78 @@ ColibriFocus.prototype.setRecording = function(state, token, callback) {
); );
}; };
/*
* Updates the display name for an endpoint with a specific jid.
* jid: the jid associated with the endpoint.
* displayName: the new display name for the endpoint.
*/
ColibriFocus.prototype.setEndpointDisplayName = function(jid, displayName) {
var endpointId = jid.substr(1 + jid.lastIndexOf('/'));
var update = false;
if (this.endpointsInfo === null) {
this.endpointsInfo = {};
}
var endpointInfo = this.endpointsInfo[endpointId];
if ('undefined' === typeof endpointInfo) {
endpointInfo = this.endpointsInfo[endpointId] = {};
}
if (endpointInfo['displayname'] !== displayName) {
endpointInfo['displayname'] = displayName;
update = true;
}
if (update) {
this.updateEndpoints();
}
};
/*
* Sends a colibri message to the bridge that contains the
* current endpoints and their display names.
*/
ColibriFocus.prototype.updateEndpoints = function() {
if (this.confid === null
|| this.endpointsInfo === null) {
return;
}
if (this.confid === 0) {
// the colibri conference is currently initiating
var self = this;
window.setTimeout(function() { self.updateEndpoints()}, 1000);
return;
}
var elem = $iq({to: this.bridgejid, type: 'set'});
elem.c('conference', {
xmlns: 'http://jitsi.org/protocol/colibri',
id: this.confid
});
for (var id in this.endpointsInfo) {
elem.c('endpoint');
elem.attrs({ id: id,
displayname: this.endpointsInfo[id]['displayname']
});
elem.up();
}
//elem.up(); //conference
this.connection.sendIQ(
elem,
function (result) {},
function (error) { console.warn(error); }
);
};
ColibriFocus.prototype._makeConference = function () { ColibriFocus.prototype._makeConference = function () {
var self = this; var self = this;
var elem = $iq({to: this.bridgejid, type: 'get'}); var elem = $iq({ to: this.bridgejid, type: 'get' });
elem.c('conference', {xmlns: 'http://jitsi.org/protocol/colibri'}); elem.c('conference', { xmlns: 'http://jitsi.org/protocol/colibri' });
this.media.forEach(function (name) { this.media.forEach(function (name) {
var elemName; var elemName;
@ -218,11 +288,11 @@ ColibriFocus.prototype._makeConference = function () {
else else
{ {
elemName = 'channel'; elemName = 'channel';
if (('video' === name) && (this.channelLastN >= 0)) if (('video' === name) && (self.channelLastN >= 0))
elemAttrs['last-n'] = this.channelLastN; elemAttrs['last-n'] = self.channelLastN;
} }
elem.c('content', {name: name}); elem.c('content', { name: name });
elem.c(elemName, elemAttrs); elem.c(elemName, elemAttrs);
elem.attrs({ endpoint: self.myMucResource }); elem.attrs({ endpoint: self.myMucResource });
@ -237,6 +307,17 @@ ColibriFocus.prototype._makeConference = function () {
} }
elem.up(); // end of content elem.up(); // end of content
}); });
if (this.endpointsInfo !== null) {
for (var id in this.endpointsInfo) {
elem.c('endpoint');
elem.attrs({ id: id,
displayname: this.endpointsInfo[id]['displayname']
});
elem.up();
}
}
/* /*
var localSDP = new SDP(this.peerconnection.localDescription.sdp); var localSDP = new SDP(this.peerconnection.localDescription.sdp);
localSDP.media.forEach(function (media, channel) { localSDP.media.forEach(function (media, channel) {
@ -657,9 +738,7 @@ ColibriFocus.prototype.addNewParticipant = function (peer) {
{ {
console.error('local description not ready yet, postponing', peer); console.error('local description not ready yet, postponing', peer);
} }
window.setTimeout(function () { window.setTimeout(function () { self.addNewParticipant(peer); }, 250);
self.addNewParticipant(peer);
}, 250);
return; return;
} }
var index = this.channels.length; var index = this.channels.length;
@ -667,7 +746,9 @@ ColibriFocus.prototype.addNewParticipant = function (peer) {
this.peers.push(peer); this.peers.push(peer);
var elem = $iq({to: this.bridgejid, type: 'get'}); var elem = $iq({to: this.bridgejid, type: 'get'});
elem.c('conference', {xmlns: 'http://jitsi.org/protocol/colibri', id: this.confid}); elem.c(
'conference',
{ xmlns: 'http://jitsi.org/protocol/colibri', id: this.confid });
var localSDP = new SDP(this.peerconnection.localDescription.sdp); var localSDP = new SDP(this.peerconnection.localDescription.sdp);
localSDP.media.forEach(function (media, channel) { localSDP.media.forEach(function (media, channel) {
var name = SDPUtil.parse_mid(SDPUtil.find_line(media, 'a=mid:')); var name = SDPUtil.parse_mid(SDPUtil.find_line(media, 'a=mid:'));
@ -687,11 +768,11 @@ ColibriFocus.prototype.addNewParticipant = function (peer) {
else else
{ {
elemName = 'channel'; elemName = 'channel';
if (('video' === name) && (this.channelLastN >= 0)) if (('video' === name) && (self.channelLastN >= 0))
elemAttrs['last-n'] = this.channelLastN; elemAttrs['last-n'] = self.channelLastN;
} }
elem.c('content', {name: name}); elem.c('content', { name: name });
elem.c(elemName, elemAttrs); elem.c(elemName, elemAttrs);
elem.up(); // end of channel/sctpconnection elem.up(); // end of channel/sctpconnection
elem.up(); // end of content elem.up(); // end of content
@ -819,12 +900,7 @@ ColibriFocus.prototype.addSource = function (elem, fromJid) {
if (!this.peerconnection.localDescription) if (!this.peerconnection.localDescription)
{ {
console.warn("addSource - localDescription not ready yet") console.warn("addSource - localDescription not ready yet")
setTimeout(function() setTimeout(function() { self.addSource(elem, fromJid); }, 200);
{
self.addSource(elem, fromJid);
},
200
);
return; return;
} }
@ -865,12 +941,7 @@ ColibriFocus.prototype.removeSource = function (elem, fromJid) {
if (!self.peerconnection.localDescription) if (!self.peerconnection.localDescription)
{ {
console.warn("removeSource - localDescription not ready yet"); console.warn("removeSource - localDescription not ready yet");
setTimeout(function() setTimeout(function() { self.removeSource(elem, fromJid); }, 200);
{
self.removeSource(elem, fromJid);
},
200
);
return; return;
} }
@ -1011,11 +1082,13 @@ ColibriFocus.prototype.sendIceCandidate = function (candidate) {
} }
if (this.drip_container.length === 0) { if (this.drip_container.length === 0) {
// start 20ms callout // start 20ms callout
window.setTimeout(function () { window.setTimeout(
if (self.drip_container.length === 0) return; function () {
self.sendIceCandidates(self.drip_container); if (self.drip_container.length === 0) return;
self.drip_container = []; self.sendIceCandidates(self.drip_container);
}, 20); self.drip_container = [];
},
20);
} }
this.drip_container.push(candidate); this.drip_container.push(candidate);
}; };
@ -1212,17 +1285,17 @@ ColibriFocus.prototype.setChannelLastN = function (channelLastN) {
this.channelLastN = channelLastN; this.channelLastN = channelLastN;
// Update/patch the existing channels. // Update/patch the existing channels.
var patch = $iq({ to:this.bridgejid, type:'set' }); var patch = $iq({ to: this.bridgejid, type: 'set' });
patch.c( patch.c(
'conference', 'conference',
{ xmlns:'http://jitsi.org/protocol/colibri', id:this.confid }); { xmlns: 'http://jitsi.org/protocol/colibri', id: this.confid });
patch.c('content', { name:'video' }); patch.c('content', { name: 'video' });
patch.c( patch.c(
'channel', 'channel',
{ {
id:$(this.mychannel[1 /* video */]).attr('id'), id: $(this.mychannel[1 /* video */]).attr('id'),
'last-n':this.channelLastN 'last-n': this.channelLastN
}); });
patch.up(); // end of channel patch.up(); // end of channel
for (var p = 0; p < this.channels.length; p++) for (var p = 0; p < this.channels.length; p++)
@ -1230,18 +1303,18 @@ ColibriFocus.prototype.setChannelLastN = function (channelLastN) {
patch.c( patch.c(
'channel', 'channel',
{ {
id:$(this.channels[p][1 /* video */]).attr('id'), id: $(this.channels[p][1 /* video */]).attr('id'),
'last-n':this.channelLastN 'last-n': this.channelLastN
}); });
patch.up(); // end of channel patch.up(); // end of channel
} }
this.connection.sendIQ( this.connection.sendIQ(
patch, patch,
function (res) { function (res) {
console.info('Set channel last-n succeeded: ', res); console.info('Set channel last-n succeeded:', res);
}, },
function (err) { function (err) {
console.error('Set channel last-n failed: ', err); console.error('Set channel last-n failed:', err);
}); });
} }
}; };

View File

@ -510,7 +510,7 @@ function getUserMediaWithConstraints(um, success_callback, failure_callback, res
var constraints = {audio: false, video: false}; var constraints = {audio: false, video: false};
if (um.indexOf('video') >= 0) { if (um.indexOf('video') >= 0) {
constraints.video = {mandatory: {}};// same behaviour as true constraints.video = { mandatory: {}, optional: [] };// same behaviour as true
} }
if (um.indexOf('audio') >= 0) { if (um.indexOf('audio') >= 0) {
constraints.audio = {};// same behaviour as true constraints.audio = {};// same behaviour as true
@ -523,7 +523,8 @@ function getUserMediaWithConstraints(um, success_callback, failure_callback, res
maxWidth: window.screen.width, maxWidth: window.screen.width,
maxHeight: window.screen.height, maxHeight: window.screen.height,
maxFrameRate: 3 maxFrameRate: 3
} },
optional: []
}; };
} }
if (um.indexOf('desktop') >= 0) { if (um.indexOf('desktop') >= 0) {
@ -535,7 +536,8 @@ function getUserMediaWithConstraints(um, success_callback, failure_callback, res
maxWidth: window.screen.width, maxWidth: window.screen.width,
maxHeight: window.screen.height, maxHeight: window.screen.height,
maxFrameRate: 3 maxFrameRate: 3
} },
optional: []
} }
} }
@ -543,7 +545,7 @@ function getUserMediaWithConstraints(um, success_callback, failure_callback, res
var isAndroid = navigator.userAgent.indexOf('Android') != -1; var isAndroid = navigator.userAgent.indexOf('Android') != -1;
if (resolution && !constraints.video || isAndroid) { if (resolution && !constraints.video || isAndroid) {
constraints.video = {mandatory: {}};// same behaviour as true constraints.video = { mandatory: {}, optional: [] };// same behaviour as true
} }
// see https://code.google.com/p/chromium/issues/detail?id=143631#c9 for list of supported resolutions // see https://code.google.com/p/chromium/issues/detail?id=143631#c9 for list of supported resolutions
switch (resolution) { switch (resolution) {
@ -552,23 +554,23 @@ function getUserMediaWithConstraints(um, success_callback, failure_callback, res
case 'fullhd': case 'fullhd':
constraints.video.mandatory.minWidth = 1920; constraints.video.mandatory.minWidth = 1920;
constraints.video.mandatory.minHeight = 1080; constraints.video.mandatory.minHeight = 1080;
constraints.video.mandatory.minAspectRatio = 1.77; constraints.video.optional.push({ minAspectRatio: 1.77 });
break; break;
case '720': case '720':
case 'hd': case 'hd':
constraints.video.mandatory.minWidth = 1280; constraints.video.mandatory.minWidth = 1280;
constraints.video.mandatory.minHeight = 720; constraints.video.mandatory.minHeight = 720;
constraints.video.mandatory.minAspectRatio = 1.77; constraints.video.optional.push({ minAspectRatio: 1.77 });
break; break;
case '360': case '360':
constraints.video.mandatory.minWidth = 640; constraints.video.mandatory.minWidth = 640;
constraints.video.mandatory.minHeight = 360; constraints.video.mandatory.minHeight = 360;
constraints.video.mandatory.minAspectRatio = 1.77; constraints.video.optional.push({ minAspectRatio: 1.77 });
break; break;
case '180': case '180':
constraints.video.mandatory.minWidth = 320; constraints.video.mandatory.minWidth = 320;
constraints.video.mandatory.minHeight = 180; constraints.video.mandatory.minHeight = 180;
constraints.video.mandatory.minAspectRatio = 1.77; constraints.video.optional.push({ minAspectRatio: 1.77 });
break; break;
// 4:3 // 4:3
case '960': case '960':

View File

@ -6,13 +6,13 @@ var LocalStatsCollector = (function() {
* Size of the webaudio analizer buffer. * Size of the webaudio analizer buffer.
* @type {number} * @type {number}
*/ */
var WEBAUDIO_ANALIZER_FFT_SIZE = 512; var WEBAUDIO_ANALIZER_FFT_SIZE = 2048;
/** /**
* Value of the webaudio analizer smoothing time parameter. * Value of the webaudio analizer smoothing time parameter.
* @type {number} * @type {number}
*/ */
var WEBAUDIO_ANALIZER_SMOOTING_TIME = 0.1; var WEBAUDIO_ANALIZER_SMOOTING_TIME = 0.8;
/** /**
* <tt>LocalStatsCollector</tt> calculates statistics for the local stream. * <tt>LocalStatsCollector</tt> calculates statistics for the local stream.
@ -32,7 +32,6 @@ var LocalStatsCollector = (function() {
this.audioLevel = 0; this.audioLevel = 0;
} }
/** /**
* Starts the collecting the statistics. * Starts the collecting the statistics.
*/ */
@ -55,14 +54,17 @@ var LocalStatsCollector = (function() {
this.intervalId = setInterval( this.intervalId = setInterval(
function () { function () {
var array = new Uint8Array(analyser.frequencyBinCount); var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array); analyser.getByteTimeDomainData(array);
self.audioLevel = FrequencyDataToAudioLevel(array); var audioLevel = TimeDomainDataToAudioLevel(array);
self.updateCallback(self); if(audioLevel != self.audioLevel) {
self.audioLevel = animateLevel(audioLevel, self.audioLevel);
self.updateCallback(LocalStatsCollectorProto.LOCAL_JID, self.audioLevel);
}
}, },
this.intervalMilis this.intervalMilis
); );
} };
/** /**
* Stops collecting the statistics. * Stops collecting the statistics.
@ -72,26 +74,58 @@ var LocalStatsCollector = (function() {
clearInterval(this.intervalId); clearInterval(this.intervalId);
this.intervalId = null; this.intervalId = null;
} }
} };
/** /**
* Converts frequency data array to audio level. * Converts time domain data array to audio level.
* @param array the frequency data array. * @param array the time domain data array.
* @returns {number} the audio level * @returns {number} the audio level
*/ */
var FrequencyDataToAudioLevel = function (array) { var TimeDomainDataToAudioLevel = function (samples) {
var maxVolume = 0; var maxVolume = 0;
var length = array.length; var length = samples.length;
for (var i = 0; i < length; i++) { for (var i = 0; i < length; i++) {
if (maxVolume < array[i]) if (maxVolume < samples[i])
maxVolume = array[i]; maxVolume = samples[i];
} }
return maxVolume / 255; return parseFloat(((maxVolume - 127) / 128).toFixed(3));
};
/**
* Animates audio level change
* @param newLevel the new audio level
* @param lastLevel the last audio level
* @returns {Number} the audio level to be set
*/
function animateLevel(newLevel, lastLevel)
{
var value = 0;
var diff = lastLevel - newLevel;
if(diff > 0.2)
{
value = lastLevel - 0.2;
}
else if(diff < -0.4)
{
value = lastLevel + 0.4;
}
else
{
value = newLevel;
}
return parseFloat(value.toFixed(3));
} }
/**
* Indicates that this audio level is for local jid.
* @type {string}
*/
LocalStatsCollectorProto.LOCAL_JID = 'local';
return LocalStatsCollectorProto; return LocalStatsCollectorProto;
})(); })();

View File

@ -213,6 +213,8 @@ StatsCollector.prototype.processReport = function ()
// but it seems to vary between 0 and around 32k. // but it seems to vary between 0 and around 32k.
audioLevel = audioLevel / 32767; audioLevel = audioLevel / 32767;
jidStats.setSsrcAudioLevel(ssrc, audioLevel); jidStats.setSsrcAudioLevel(ssrc, audioLevel);
if(jid != connection.emuc.myroomjid)
this.updateCallback(jid, audioLevel);
} }
var key = 'packetsReceived'; var key = 'packetsReceived';
@ -281,7 +283,5 @@ StatsCollector.prototype.processReport = function ()
// bar indicator // bar indicator
//console.info("Loss SMA3: " + outputAvg + " Q: " + quality); //console.info("Loss SMA3: " + outputAvg + " Q: " + quality);
} }
self.updateCallback(self);
}; };

View File

@ -29,7 +29,7 @@ var Toolbar = (function (my) {
if (sharedKey) { if (sharedKey) {
$.prompt("Are you sure you would like to remove your secret key?", $.prompt("Are you sure you would like to remove your secret key?",
{ {
title: "Remove secrect key", title: "Remove secret key",
persistent: false, persistent: false,
buttons: { "Remove": true, "Cancel": false}, buttons: { "Remove": true, "Cancel": false},
defaultButton: 1, defaultButton: 1,
@ -42,7 +42,7 @@ var Toolbar = (function (my) {
} }
); );
} else { } else {
$.prompt('<h2>Set a secrect key to lock your room</h2>' + $.prompt('<h2>Set a secret key to lock your room</h2>' +
'<input id="lockKey" type="text" placeholder="your shared key" autofocus>', '<input id="lockKey" type="text" placeholder="your shared key" autofocus>',
{ {
persistent: false, persistent: false,
@ -142,7 +142,7 @@ var Toolbar = (function (my) {
$.prompt('<h2>Configure your conference</h2>' + $.prompt('<h2>Configure your conference</h2>' +
'<input type="checkbox" id="initMuted"> Participants join muted<br/>' + '<input type="checkbox" id="initMuted"> Participants join muted<br/>' +
'<input type="checkbox" id="requireNicknames"> Require nicknames<br/><br/>' + '<input type="checkbox" id="requireNicknames"> Require nicknames<br/><br/>' +
'Set a secrect key to lock your room: <input id="lockKey" type="text" placeholder="your shared key" autofocus>', 'Set a secret key to lock your room: <input id="lockKey" type="text" placeholder="your shared key" autofocus>',
{ {
persistent: false, persistent: false,
buttons: { "Save": true, "Cancel": false}, buttons: { "Save": true, "Cancel": false},
@ -284,5 +284,24 @@ var Toolbar = (function (my) {
} }
}; };
// Shows or hides the 'recording' button.
my.showRecordingButton = function (show) {
if (!config.enableRecording) {
return;
}
if (show) {
$('#recording').css({display: "inline"});
}
else {
$('#recording').css({display: "none"});
}
};
// Toggle the state of the recording button
my.toggleRecordingButtonState = function() {
$('#recordButton').toggleClass('active');
};
return my; return my;
}(Toolbar || {})); }(Toolbar || {}));

View File

@ -26,6 +26,8 @@ var VideoLayout = (function (my) {
var localVideoContainer = document.getElementById('localVideoWrapper'); var localVideoContainer = document.getElementById('localVideoWrapper');
localVideoContainer.appendChild(localVideo); localVideoContainer.appendChild(localVideo);
AudioLevels.updateAudioLevelCanvas();
var localVideoSelector = $('#' + localVideo.id); var localVideoSelector = $('#' + localVideo.id);
// Add click handler to both video and video wrapper elements in case // Add click handler to both video and video wrapper elements in case
// there's no video. // there's no video.
@ -313,6 +315,8 @@ var VideoLayout = (function (my) {
addRemoteVideoMenu(peerJid, container); addRemoteVideoMenu(peerJid, container);
remotes.appendChild(container); remotes.appendChild(container);
AudioLevels.updateAudioLevelCanvas(peerJid);
return container; return container;
}; };
@ -579,6 +583,8 @@ var VideoLayout = (function (my) {
$('#remoteVideos').height(height); $('#remoteVideos').height(height);
$('#remoteVideos>span').width(width); $('#remoteVideos>span').width(width);
$('#remoteVideos>span').height(height); $('#remoteVideos>span').height(height);
$(document).trigger("remotevideo.resized", [width, height]);
}; };
/** /**
@ -958,3 +964,5 @@ var VideoLayout = (function (my) {
return my; return my;
}(VideoLayout || {})); }(VideoLayout || {}));