Implements statistics module.

This commit is contained in:
hristoterezov 2014-12-17 18:21:25 +02:00
parent 7bacd957bd
commit c6d8e34779
9 changed files with 1599 additions and 227 deletions

22
Makefile Normal file
View File

@ -0,0 +1,22 @@
BROWSERIFY = browserify
GLOBAL_FLAGS = -e
MODULE_DIR = modules
MODULE_SUBDIRS = $(wildcard $(MODULE_DIR)/*/)
MODULES = $(MODULE_SUBDIRS:$(MODULE_DIR)/%/=%)
OUTPUT_DIR = .
DEPLOY_DIR = libs/modules
all:FLAGS = $(GLOBAL_FLAGS)
all:$(MODULES)
debug:FLAGS = -d $(GLOBAL_FLAGS)
debug:$(MODULES)
$(MODULES): *.js
$(BROWSERIFY) $(FLAGS) $(MODULE_DIR)/$@/$@.js -s $@ -o $(OUTPUT_DIR)/$@.bundle.js
clean:
@rm $(OUTPUT_DIR)/*.bundle.js
deploy:
@mkdir -p $(DEPLOY_DIR) && cp $(OUTPUT_DIR)/*.bundle.js $(DEPLOY_DIR)

View File

@ -12,6 +12,10 @@ Installing Jitsi Meet is quite a simple experience. For Debian-based systems, we
For other systems, or if you wish to install all components manually, see the [detailed installation instructions](https://github.com/jitsi/jitsi-meet/blob/master/doc/manual-install.md).
## Development tools
Jitsi Meet uses [Browserify](http://browserify.org). If you want to make changes in the code you need to [install Browserify](http://browserify.org/#install). Browserify requires [nodejs](http://nodejs.org).
## Discuss
Please use the [Jitsi dev mailing list](http://lists.jitsi.org/pipermail/dev/) to discuss feature requests before opening an issue on github.

84
app.js
View File

@ -18,18 +18,6 @@ var notReceivedSSRCs = [];
var jid2Ssrc = {};
/**
* The stats collector that process stats data and triggers updates to app.js.
* @type {StatsCollector}
*/
var statsCollector = null;
/**
* The stats collector for the local stream.
* @type {LocalStatsCollector}
*/
var localStatsCollector = null;
/**
* Indicates whether ssrc is camera video or desktop stream.
* FIXME: remove those maps
@ -100,7 +88,7 @@ function init() {
videoStream.addTrack(videoTracks[i]);
}
VideoLayout.changeLocalAudio(audioStream);
startLocalRtpStatsCollector(audioStream);
statistics.onStreamCreated(audioStream);
VideoLayout.changeLocalVideo(videoStream, true);
@ -108,7 +96,7 @@ function init() {
else
{
VideoLayout.changeLocalStream(stream);
startLocalRtpStatsCollector(stream);
statistics.onStreamCreated(stream);
}
@ -559,7 +547,7 @@ function muteVideo(pc, unmute) {
function audioLevelUpdated(jid, audioLevel)
{
var resourceJid;
if(jid === LocalStatsCollector.LOCAL_JID)
if(jid === statistics.LOCAL_JID)
{
resourceJid = AudioLevels.LOCAL_LEVEL;
if(isAudioMuted())
@ -575,66 +563,13 @@ function audioLevelUpdated(jid, audioLevel)
AudioLevels.updateAudioLevel(resourceJid, audioLevel);
}
/**
* Starts the {@link StatsCollector} if the feature is enabled in config.js.
*/
function startRtpStatsCollector()
{
stopRTPStatsCollector();
if (config.enableRtpStats)
{
statsCollector = new StatsCollector(
getConferenceHandler().peerconnection, 200, audioLevelUpdated, 2000,
ConnectionQuality.updateLocalStats);
statsCollector.start();
}
}
/**
* Stops the {@link StatsCollector}.
*/
function stopRTPStatsCollector()
{
if (statsCollector)
{
statsCollector.stop();
statsCollector = null;
ConnectionQuality.stopSendingStats();
}
}
/**
* Starts the {@link LocalStatsCollector} if the feature is enabled in config.js
* @param stream the stream that will be used for collecting statistics.
*/
function startLocalRtpStatsCollector(stream)
{
if(config.enableRtpStats)
{
localStatsCollector = new LocalStatsCollector(stream, 100, audioLevelUpdated);
localStatsCollector.start();
}
}
/**
* Stops the {@link LocalStatsCollector}.
*/
function stopLocalRtpStatsCollector()
{
if(localStatsCollector)
{
localStatsCollector.stop();
localStatsCollector = null;
}
}
$(document).bind('callincoming.jingle', function (event, sid) {
var sess = connection.jingle.sessions[sid];
// TODO: do we check activecall == null?
activecall = sess;
startRtpStatsCollector();
statistics.onConfereceCreated(sess);
// Bind data channel listener in case we're a regular participant
if (config.openSctp)
@ -652,7 +587,7 @@ $(document).bind('callincoming.jingle', function (event, sid) {
$(document).bind('conferenceCreated.jingle', function (event, focus)
{
startRtpStatsCollector();
statistics.onConfereceCreated(getConferenceHandler());
});
$(document).bind('conferenceCreated.jingle', function (event, focus)
@ -1461,6 +1396,10 @@ $(document).ready(function () {
}
});
statistics.addAudioLevelListener(audioLevelUpdated);
statistics.addConnectionStatsListener(ConnectionQuality.updateLocalStats);
statistics.addRemoteStatsStopListener(ConnectionQuality.stopSendingStats);
Moderator.init();
// Set the defaults for prompt dialogs.
@ -1577,10 +1516,7 @@ function disposeConference(onUnload) {
}
handler.peerconnection.close();
}
stopRTPStatsCollector();
if(onUnload) {
stopLocalRtpStatsCollector();
}
statistics.onDisposeConference(onUnload);
activecall = null;
}

View File

@ -47,8 +47,6 @@
<script src="replacement.js?v=7"></script><!-- link and smiley replacement -->
<script src="moderatemuc.js?v=4"></script><!-- moderator plugin -->
<script src="analytics.js?v=1"></script><!-- google analytics plugin -->
<script src="rtp_sts.js?v=5"></script><!-- RTP stats processing -->
<script src="local_sts.js?v=2"></script><!-- Local stats processing -->
<script src="videolayout.js?v=31"></script><!-- video ui -->
<script src="connectionquality.js?v=1"></script>
<script src="toolbar.js?v=7"></script><!-- toolbar ui -->
@ -66,6 +64,7 @@
<script src="message_handler.js?v=2"></script>
<script src="api_connector.js?v=2"></script>
<script src="settings_menu.js?v=1"></script>
<script src="libs/modules/statistics.bundle.js"></script>
<script src="avatar.js?v=4"></script><!-- avatars -->
<link rel="stylesheet" href="css/font.css?v=6"/>
<link rel="stylesheet" href="css/toastr.css?v=1">

File diff suppressed because it is too large Load Diff

View File

@ -1,131 +0,0 @@
/**
* Provides statistics for the local stream.
*/
var LocalStatsCollector = (function() {
/**
* Size of the webaudio analizer buffer.
* @type {number}
*/
var WEBAUDIO_ANALIZER_FFT_SIZE = 2048;
/**
* Value of the webaudio analizer smoothing time parameter.
* @type {number}
*/
var WEBAUDIO_ANALIZER_SMOOTING_TIME = 0.8;
/**
* <tt>LocalStatsCollector</tt> calculates statistics for the local stream.
*
* @param stream the local stream
* @param interval stats refresh interval given in ms.
* @param {function(LocalStatsCollector)} updateCallback the callback called on stats
* update.
* @constructor
*/
function LocalStatsCollectorProto(stream, interval, updateCallback) {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
this.stream = stream;
this.intervalId = null;
this.intervalMilis = interval;
this.audioLevelsUpdateCallback = updateCallback;
this.audioLevel = 0;
}
/**
* Starts the collecting the statistics.
*/
LocalStatsCollectorProto.prototype.start = function () {
if (!window.AudioContext)
return;
var context = new AudioContext();
var analyser = context.createAnalyser();
analyser.smoothingTimeConstant = WEBAUDIO_ANALIZER_SMOOTING_TIME;
analyser.fftSize = WEBAUDIO_ANALIZER_FFT_SIZE;
var source = context.createMediaStreamSource(this.stream);
source.connect(analyser);
var self = this;
this.intervalId = setInterval(
function () {
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteTimeDomainData(array);
var audioLevel = TimeDomainDataToAudioLevel(array);
if(audioLevel != self.audioLevel) {
self.audioLevel = animateLevel(audioLevel, self.audioLevel);
self.audioLevelsUpdateCallback(LocalStatsCollectorProto.LOCAL_JID, self.audioLevel);
}
},
this.intervalMilis
);
};
/**
* Stops collecting the statistics.
*/
LocalStatsCollectorProto.prototype.stop = function () {
if (this.intervalId) {
clearInterval(this.intervalId);
this.intervalId = null;
}
};
/**
* Converts time domain data array to audio level.
* @param array the time domain data array.
* @returns {number} the audio level
*/
var TimeDomainDataToAudioLevel = function (samples) {
var maxVolume = 0;
var length = samples.length;
for (var i = 0; i < length; i++) {
if (maxVolume < samples[i])
maxVolume = samples[i];
}
return parseFloat(((maxVolume - 127) / 128).toFixed(3));
};
/**
* Animates audio level change
* @param newLevel the new audio level
* @param lastLevel the last audio level
* @returns {Number} the audio level to be set
*/
function animateLevel(newLevel, lastLevel)
{
var value = 0;
var diff = lastLevel - newLevel;
if(diff > 0.2)
{
value = lastLevel - 0.2;
}
else if(diff < -0.4)
{
value = lastLevel + 0.4;
}
else
{
value = newLevel;
}
return parseFloat(value.toFixed(3));
}
/**
* Indicates that this audio level is for local jid.
* @type {string}
*/
LocalStatsCollectorProto.LOCAL_JID = 'local';
return LocalStatsCollectorProto;
})();

View File

@ -0,0 +1,130 @@
/**
* Provides statistics for the local stream.
*/
/**
* Size of the webaudio analizer buffer.
* @type {number}
*/
var WEBAUDIO_ANALIZER_FFT_SIZE = 2048;
/**
* Value of the webaudio analizer smoothing time parameter.
* @type {number}
*/
var WEBAUDIO_ANALIZER_SMOOTING_TIME = 0.8;
/**
* Converts time domain data array to audio level.
* @param array the time domain data array.
* @returns {number} the audio level
*/
function timeDomainDataToAudioLevel(samples) {
var maxVolume = 0;
var length = samples.length;
for (var i = 0; i < length; i++) {
if (maxVolume < samples[i])
maxVolume = samples[i];
}
return parseFloat(((maxVolume - 127) / 128).toFixed(3));
};
/**
* Animates audio level change
* @param newLevel the new audio level
* @param lastLevel the last audio level
* @returns {Number} the audio level to be set
*/
function animateLevel(newLevel, lastLevel)
{
var value = 0;
var diff = lastLevel - newLevel;
if(diff > 0.2)
{
value = lastLevel - 0.2;
}
else if(diff < -0.4)
{
value = lastLevel + 0.4;
}
else
{
value = newLevel;
}
return parseFloat(value.toFixed(3));
}
/**
* <tt>LocalStatsCollector</tt> calculates statistics for the local stream.
*
* @param stream the local stream
* @param interval stats refresh interval given in ms.
* @param {function(LocalStatsCollector)} updateCallback the callback called on stats
* update.
* @constructor
*/
function LocalStatsCollector(stream, interval, statisticsService, eventEmitter) {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
this.stream = stream;
this.intervalId = null;
this.intervalMilis = interval;
this.eventEmitter = eventEmitter;
this.audioLevel = 0;
this.statisticsService = statisticsService;
}
/**
* Starts the collecting the statistics.
*/
LocalStatsCollector.prototype.start = function () {
if (!window.AudioContext)
return;
var context = new AudioContext();
var analyser = context.createAnalyser();
analyser.smoothingTimeConstant = WEBAUDIO_ANALIZER_SMOOTING_TIME;
analyser.fftSize = WEBAUDIO_ANALIZER_FFT_SIZE;
var source = context.createMediaStreamSource(this.stream);
source.connect(analyser);
var self = this;
this.intervalId = setInterval(
function () {
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteTimeDomainData(array);
var audioLevel = timeDomainDataToAudioLevel(array);
if(audioLevel != self.audioLevel) {
self.audioLevel = animateLevel(audioLevel, self.audioLevel);
self.eventEmitter.emit(
"statistics.audioLevel",
self.statisticsService.LOCAL_JID,
self.audioLevel);
}
},
this.intervalMilis
);
};
/**
* Stops collecting the statistics.
*/
LocalStatsCollector.prototype.stop = function () {
if (this.intervalId) {
clearInterval(this.intervalId);
this.intervalId = null;
}
};
module.exports = LocalStatsCollector;

View File

@ -13,6 +13,13 @@ function calculatePacketLoss(lostPackets, totalPackets) {
return Math.round((lostPackets/totalPackets)*100);
}
function getStatValue(item, name) {
if(!keyMap[RTC.browser][name])
throw "The property isn't supported!";
var key = keyMap[RTC.browser][name];
return RTC.browser == "chrome"? item.stat(key) : item[key];
}
/**
* Peer statistics data holder.
* @constructor
@ -124,9 +131,7 @@ PeerStats.transport = [];
* called on stats update.
* @constructor
*/
function StatsCollector(peerconnection, audioLevelsInterval,
audioLevelsUpdateCallback, statsInterval,
statsUpdateCallback)
function StatsCollector(peerconnection, audioLevelsInterval, statsInterval, eventEmitter)
{
this.peerconnection = peerconnection;
this.baselineAudioLevelsReport = null;
@ -134,6 +139,7 @@ function StatsCollector(peerconnection, audioLevelsInterval,
this.currentStatsReport = null;
this.baselineStatsReport = null;
this.audioLevelsIntervalId = null;
this.eventEmitter = eventEmitter;
/**
* Gather PeerConnection stats once every this many milliseconds.
@ -172,11 +178,10 @@ function StatsCollector(peerconnection, audioLevelsInterval,
this.statsIntervalMilis = statsInterval;
// Map of jids to PeerStats
this.jid2stats = {};
this.audioLevelsUpdateCallback = audioLevelsUpdateCallback;
this.statsUpdateCallback = statsUpdateCallback;
}
module.exports = StatsCollector;
/**
* Stops stats updates.
*/
@ -622,7 +627,7 @@ StatsCollector.prototype.processStatsReport = function () {
upload:
calculatePacketLoss(lostPackets.upload, totalPackets.upload)
};
this.statsUpdateCallback(
this.eventEmitter.emit("statistics.connectionstats",
{
"bitrate": PeerStats.bitrate,
"packetLoss": PeerStats.packetLoss,
@ -696,17 +701,10 @@ StatsCollector.prototype.processAudioLevelReport = function ()
audioLevel = audioLevel / 32767;
jidStats.setSsrcAudioLevel(ssrc, audioLevel);
if(jid != connection.emuc.myroomjid)
this.audioLevelsUpdateCallback(jid, audioLevel);
this.eventEmitter.emit("statistics.audioLevel", jid, audioLevel);
}
}
};
function getStatValue(item, name) {
if(!keyMap[RTC.browser][name])
throw "The property isn't supported!";
var key = keyMap[RTC.browser][name];
return RTC.browser == "chrome"? item.stat(key) : item[key];
}

View File

@ -0,0 +1,132 @@
/**
* Created by hristo on 8/4/14.
*/
var LocalStats = require("./LocalStatsCollector.js");
var RTPStats = require("./RTPStatsCollector.js");
var EventEmitter = require("events");
//var StreamEventTypes = require("../service/RTC/StreamEventTypes.js");
//var XMPPEvents = require("../service/xmpp/XMPPEvents");
var eventEmitter = new EventEmitter();
var localStats = null;
var rtpStats = null;
var RTCService = null;
function stopLocal()
{
if(localStats)
{
localStats.stop();
localStats = null;
}
}
function stopRemote()
{
if(rtpStats)
{
rtpStats.stop();
eventEmitter.emit("statistics.stop");
rtpStats = null;
}
}
function startRemoteStats (peerconnection) {
if (config.enableRtpStats)
{
if(rtpStats)
{
rtpStats.stop();
rtpStats = null;
}
rtpStats = new RTPStats(peerconnection, 200, 2000, eventEmitter);
rtpStats.start();
}
}
var statistics =
{
/**
* Indicates that this audio level is for local jid.
* @type {string}
*/
LOCAL_JID: 'local',
addAudioLevelListener: function(listener)
{
eventEmitter.on("statistics.audioLevel", listener);
},
removeAudioLevelListener: function(listener)
{
eventEmitter.removeListener("statistics.audioLevel", listener);
},
addConnectionStatsListener: function(listener)
{
eventEmitter.on("statistics.connectionstats", listener);
},
removeConnectionStatsListener: function(listener)
{
eventEmitter.removeListener("statistics.connectionstats", listener);
},
addRemoteStatsStopListener: function(listener)
{
eventEmitter.on("statistics.stop", listener);
},
removeRemoteStatsStopListener: function(listener)
{
eventEmitter.removeListener("statistics.stop", listener);
},
stop: function () {
stopLocal();
stopRemote();
if(eventEmitter)
{
eventEmitter.removeAllListeners();
}
},
stopRemoteStatistics: function()
{
stopRemote();
},
onConfereceCreated: function (event) {
startRemoteStats(event.peerconnection);
},
onDisposeConference: function (onUnload) {
stopRemote();
if(onUnload) {
stopLocal();
eventEmitter.removeAllListeners();
}
},
onStreamCreated: function(stream)
{
if(stream.getAudioTracks().length === 0)
return;
localStats = new LocalStats(stream, 100, this,
eventEmitter);
localStats.start();
}
};
module.exports = statistics;