Merge branch 'multiple-tracks'
This commit is contained in:
commit
cfcf6fbc67
|
@ -1,7 +1,8 @@
|
||||||
/* global APP */
|
/* global APP */
|
||||||
var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
|
var MediaStreamType = require("../../service/RTC/MediaStreamTypes");
|
||||||
var RTCEvents = require("../../service/RTC/RTCEvents");
|
var RTCEvents = require("../../service/RTC/RTCEvents");
|
||||||
var RTCBrowserType = require("./RTCBrowserType");
|
var RTCBrowserType = require("./RTCBrowserType");
|
||||||
|
var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This implements 'onended' callback normally fired by WebRTC after the stream
|
* This implements 'onended' callback normally fired by WebRTC after the stream
|
||||||
|
@ -29,7 +30,7 @@ function LocalStream(stream, type, eventEmitter, videoType, isGUMStream) {
|
||||||
if(isGUMStream === false)
|
if(isGUMStream === false)
|
||||||
this.isGUMStream = isGUMStream;
|
this.isGUMStream = isGUMStream;
|
||||||
var self = this;
|
var self = this;
|
||||||
if(type == "audio") {
|
if (MediaStreamType.AUDIO_TYPE === type) {
|
||||||
this.getTracks = function () {
|
this.getTracks = function () {
|
||||||
return self.stream.getAudioTracks();
|
return self.stream.getAudioTracks();
|
||||||
};
|
};
|
||||||
|
@ -60,7 +61,11 @@ LocalStream.prototype.getOriginalStream = function()
|
||||||
};
|
};
|
||||||
|
|
||||||
LocalStream.prototype.isAudioStream = function () {
|
LocalStream.prototype.isAudioStream = function () {
|
||||||
return this.type === "audio";
|
return MediaStreamType.AUDIO_TYPE === this.type;
|
||||||
|
};
|
||||||
|
|
||||||
|
LocalStream.prototype.isVideoStream = function () {
|
||||||
|
return MediaStreamType.VIDEO_TYPE === this.type;
|
||||||
};
|
};
|
||||||
|
|
||||||
LocalStream.prototype.setMute = function (mute)
|
LocalStream.prototype.setMute = function (mute)
|
||||||
|
|
|
@ -11,7 +11,7 @@ var MediaStreamType = require("../../service/RTC/MediaStreamTypes");
|
||||||
*
|
*
|
||||||
* @constructor
|
* @constructor
|
||||||
*/
|
*/
|
||||||
function MediaStream(data, ssrc, browser, eventEmitter, muted) {
|
function MediaStream(data, ssrc, browser, eventEmitter, muted, type) {
|
||||||
|
|
||||||
// XXX(gp) to minimize headaches in the future, we should build our
|
// XXX(gp) to minimize headaches in the future, we should build our
|
||||||
// abstractions around tracks and not streams. ORTC is track based API.
|
// abstractions around tracks and not streams. ORTC is track based API.
|
||||||
|
@ -23,18 +23,29 @@ function MediaStream(data, ssrc, browser, eventEmitter, muted) {
|
||||||
// Also, we should be able to associate multiple SSRCs with a MediaTrack as
|
// Also, we should be able to associate multiple SSRCs with a MediaTrack as
|
||||||
// a track might have an associated RTX and FEC sources.
|
// a track might have an associated RTX and FEC sources.
|
||||||
|
|
||||||
|
if (!type) {
|
||||||
|
console.log("Errrm...some code needs an update...");
|
||||||
|
}
|
||||||
|
|
||||||
this.stream = data.stream;
|
this.stream = data.stream;
|
||||||
this.peerjid = data.peerjid;
|
this.peerjid = data.peerjid;
|
||||||
this.videoType = data.videoType;
|
this.videoType = data.videoType;
|
||||||
this.ssrc = ssrc;
|
this.ssrc = ssrc;
|
||||||
this.type = (this.stream.getVideoTracks().length > 0)?
|
this.type = type;
|
||||||
MediaStreamType.VIDEO_TYPE : MediaStreamType.AUDIO_TYPE;
|
|
||||||
this.muted = muted;
|
this.muted = muted;
|
||||||
this.eventEmitter = eventEmitter;
|
this.eventEmitter = eventEmitter;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME duplicated with LocalStream methods - extract base class
|
||||||
|
MediaStream.prototype.isAudioStream = function () {
|
||||||
|
return MediaStreamType.AUDIO_TYPE === this.type;
|
||||||
|
};
|
||||||
|
|
||||||
MediaStream.prototype.getOriginalStream = function() {
|
MediaStream.prototype.isVideoStream = function () {
|
||||||
|
return MediaStreamType.VIDEO_TYPE === this.type;
|
||||||
|
};
|
||||||
|
|
||||||
|
MediaStream.prototype.getOriginalStream = function () {
|
||||||
return this.stream;
|
return this.stream;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -76,7 +76,7 @@ var RTC = {
|
||||||
if(isMuted === true)
|
if(isMuted === true)
|
||||||
localStream.setMute(true);
|
localStream.setMute(true);
|
||||||
|
|
||||||
if(type == "audio") {
|
if (MediaStreamType.AUDIO_TYPE === type) {
|
||||||
this.localAudio = localStream;
|
this.localAudio = localStream;
|
||||||
} else {
|
} else {
|
||||||
this.localVideo = localStream;
|
this.localVideo = localStream;
|
||||||
|
@ -98,16 +98,27 @@ var RTC = {
|
||||||
muted = pres.videoMuted;
|
muted = pres.videoMuted;
|
||||||
}
|
}
|
||||||
|
|
||||||
var remoteStream = new MediaStream(data, ssrc,
|
var self = this;
|
||||||
RTCBrowserType.getBrowserType(), eventEmitter, muted);
|
[MediaStreamType.AUDIO_TYPE, MediaStreamType.VIDEO_TYPE].forEach(
|
||||||
|
function (type) {
|
||||||
|
var tracks =
|
||||||
|
type == MediaStreamType.AUDIO_TYPE
|
||||||
|
? data.stream.getAudioTracks() : data.stream.getVideoTracks();
|
||||||
|
if (!tracks || !Array.isArray(tracks) || !tracks.length) {
|
||||||
|
console.log("Not creating a(n) " + type + " stream: no tracks");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if(!this.remoteStreams[jid]) {
|
var remoteStream = new MediaStream(data, ssrc,
|
||||||
this.remoteStreams[jid] = {};
|
RTCBrowserType.getBrowserType(), eventEmitter, muted, type);
|
||||||
}
|
|
||||||
this.remoteStreams[jid][remoteStream.type]= remoteStream;
|
if (!self.remoteStreams[jid]) {
|
||||||
eventEmitter.emit(StreamEventTypes.EVENT_TYPE_REMOTE_CREATED,
|
self.remoteStreams[jid] = {};
|
||||||
remoteStream);
|
}
|
||||||
return remoteStream;
|
self.remoteStreams[jid][type] = remoteStream;
|
||||||
|
eventEmitter.emit(StreamEventTypes.EVENT_TYPE_REMOTE_CREATED,
|
||||||
|
remoteStream);
|
||||||
|
});
|
||||||
},
|
},
|
||||||
getPCConstraints: function () {
|
getPCConstraints: function () {
|
||||||
return this.rtcUtils.pc_constraints;
|
return this.rtcUtils.pc_constraints;
|
||||||
|
@ -218,7 +229,9 @@ var RTC = {
|
||||||
changeLocalAudio: function (stream, callback) {
|
changeLocalAudio: function (stream, callback) {
|
||||||
var oldStream = this.localAudio.getOriginalStream();
|
var oldStream = this.localAudio.getOriginalStream();
|
||||||
var newStream = this.rtcUtils.createStream(stream);
|
var newStream = this.rtcUtils.createStream(stream);
|
||||||
this.localAudio = this.createLocalStream(newStream, "audio", true);
|
this.localAudio
|
||||||
|
= this.createLocalStream(
|
||||||
|
newStream, MediaStreamType.AUDIO_TYPE, true);
|
||||||
// Stop the stream
|
// Stop the stream
|
||||||
this.stopMediaStream(oldStream);
|
this.stopMediaStream(oldStream);
|
||||||
APP.xmpp.switchStreams(newStream, oldStream, callback, true);
|
APP.xmpp.switchStreams(newStream, oldStream, callback, true);
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
RTCPeerConnection, webkitMediaStream, webkitURL, webkitRTCPeerConnection,
|
RTCPeerConnection, webkitMediaStream, webkitURL, webkitRTCPeerConnection,
|
||||||
mozRTCIceCandidate, mozRTCSessionDescription, mozRTCPeerConnection */
|
mozRTCIceCandidate, mozRTCSessionDescription, mozRTCPeerConnection */
|
||||||
/* jshint -W101 */
|
/* jshint -W101 */
|
||||||
|
var MediaStreamType = require("../../service/RTC/MediaStreamTypes");
|
||||||
var RTCBrowserType = require("./RTCBrowserType");
|
var RTCBrowserType = require("./RTCBrowserType");
|
||||||
var Resolutions = require("../../service/RTC/Resolutions");
|
var Resolutions = require("../../service/RTC/Resolutions");
|
||||||
var AdapterJS = require("./adapter.screenshare");
|
var AdapterJS = require("./adapter.screenshare");
|
||||||
|
@ -523,10 +524,12 @@ RTCUtils.prototype.handleLocalStream = function(stream, usageOptions) {
|
||||||
videoGUM = (!usageOptions || usageOptions.video !== false);
|
videoGUM = (!usageOptions || usageOptions.video !== false);
|
||||||
|
|
||||||
|
|
||||||
this.service.createLocalStream(audioStream, "audio", null, null,
|
this.service.createLocalStream(
|
||||||
|
audioStream, MediaStreamType.AUDIO_TYPE, null, null,
|
||||||
audioMuted, audioGUM);
|
audioMuted, audioGUM);
|
||||||
|
|
||||||
this.service.createLocalStream(videoStream, "video", null, 'camera',
|
this.service.createLocalStream(
|
||||||
|
videoStream, MediaStreamType.VIDEO_TYPE, null, 'camera',
|
||||||
videoMuted, videoGUM);
|
videoMuted, videoGUM);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,7 @@ var JitsiPopover = require("./util/JitsiPopover");
|
||||||
var CQEvents = require("../../service/connectionquality/CQEvents");
|
var CQEvents = require("../../service/connectionquality/CQEvents");
|
||||||
var DesktopSharingEventTypes
|
var DesktopSharingEventTypes
|
||||||
= require("../../service/desktopsharing/DesktopSharingEventTypes");
|
= require("../../service/desktopsharing/DesktopSharingEventTypes");
|
||||||
|
var MediaStreamType = require("../../service/RTC/MediaStreamTypes");
|
||||||
var RTCEvents = require("../../service/RTC/RTCEvents");
|
var RTCEvents = require("../../service/RTC/RTCEvents");
|
||||||
var RTCBrowserType = require("../RTC/RTCBrowserType");
|
var RTCBrowserType = require("../RTC/RTCBrowserType");
|
||||||
var StreamEventTypes = require("../../service/RTC/StreamEventTypes");
|
var StreamEventTypes = require("../../service/RTC/StreamEventTypes");
|
||||||
|
@ -111,14 +112,14 @@ function setupToolbars() {
|
||||||
|
|
||||||
function streamHandler(stream, isMuted) {
|
function streamHandler(stream, isMuted) {
|
||||||
switch (stream.type) {
|
switch (stream.type) {
|
||||||
case "audio":
|
case MediaStreamType.AUDIO_TYPE:
|
||||||
VideoLayout.changeLocalAudio(stream, isMuted);
|
VideoLayout.changeLocalAudio(stream, isMuted);
|
||||||
break;
|
break;
|
||||||
case "video":
|
case MediaStreamType.VIDEO_TYPE:
|
||||||
VideoLayout.changeLocalVideo(stream, isMuted);
|
VideoLayout.changeLocalVideo(stream, isMuted);
|
||||||
break;
|
break;
|
||||||
case "stream":
|
default:
|
||||||
VideoLayout.changeLocalStream(stream, isMuted);
|
console.error("Unknown stream type: " + stream.type);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -302,7 +302,8 @@ function createLargeVideoHTML()
|
||||||
'<canvas id="activeSpeakerAudioLevel"></canvas>' +
|
'<canvas id="activeSpeakerAudioLevel"></canvas>' +
|
||||||
'</div>' +
|
'</div>' +
|
||||||
'<div id="largeVideoWrapper">' +
|
'<div id="largeVideoWrapper">' +
|
||||||
'<video id="largeVideo" autoplay oncontextmenu="return false;"></video>' +
|
'<video id="largeVideo" muted="true"' +
|
||||||
|
'autoplay oncontextmenu="return false;"></video>' +
|
||||||
'</div id="largeVideoWrapper">' +
|
'</div id="largeVideoWrapper">' +
|
||||||
'<span id="videoConnectionMessage"></span>';
|
'<span id="videoConnectionMessage"></span>';
|
||||||
html += '</div>';
|
html += '</div>';
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
var ConnectionIndicator = require("./ConnectionIndicator");
|
var ConnectionIndicator = require("./ConnectionIndicator");
|
||||||
var SmallVideo = require("./SmallVideo");
|
var SmallVideo = require("./SmallVideo");
|
||||||
var AudioLevels = require("../audio_levels/AudioLevels");
|
var AudioLevels = require("../audio_levels/AudioLevels");
|
||||||
|
var MediaStreamType = require("../../../service/RTC/MediaStreamTypes");
|
||||||
var RTCBrowserType = require("../../RTC/RTCBrowserType");
|
var RTCBrowserType = require("../../RTC/RTCBrowserType");
|
||||||
var UIUtils = require("../util/UIUtil");
|
var UIUtils = require("../util/UIUtil");
|
||||||
var XMPPEvents = require("../../../service/xmpp/XMPPEvents");
|
var XMPPEvents = require("../../../service/xmpp/XMPPEvents");
|
||||||
|
@ -178,8 +179,9 @@ RemoteVideo.prototype.remove = function () {
|
||||||
|
|
||||||
RemoteVideo.prototype.waitForPlayback = function (sel, stream) {
|
RemoteVideo.prototype.waitForPlayback = function (sel, stream) {
|
||||||
|
|
||||||
var isVideo = stream.getVideoTracks().length > 0;
|
var webRtcStream = stream.getOriginalStream();
|
||||||
if (!isVideo || stream.id === 'mixedmslabel') {
|
var isVideo = stream.isVideoStream();
|
||||||
|
if (!isVideo || webRtcStream.id === 'mixedmslabel') {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -191,7 +193,7 @@ RemoteVideo.prototype.waitForPlayback = function (sel, stream) {
|
||||||
var onPlayingHandler = function () {
|
var onPlayingHandler = function () {
|
||||||
// FIXME: why do i have to do this for FF?
|
// FIXME: why do i have to do this for FF?
|
||||||
if (RTCBrowserType.isFirefox()) {
|
if (RTCBrowserType.isFirefox()) {
|
||||||
APP.RTC.attachMediaStream(sel, stream);
|
APP.RTC.attachMediaStream(sel, webRtcStream);
|
||||||
}
|
}
|
||||||
if (RTCBrowserType.isTemasysPluginUsed()) {
|
if (RTCBrowserType.isTemasysPluginUsed()) {
|
||||||
sel = self.selectVideoElement();
|
sel = self.selectVideoElement();
|
||||||
|
@ -212,7 +214,8 @@ RemoteVideo.prototype.addRemoteStreamElement = function (stream) {
|
||||||
return;
|
return;
|
||||||
|
|
||||||
var self = this;
|
var self = this;
|
||||||
var isVideo = stream.getVideoTracks().length > 0;
|
var webRtcStream = stream.getOriginalStream();
|
||||||
|
var isVideo = stream.isVideoStream();
|
||||||
var streamElement = SmallVideo.createStreamElement(stream);
|
var streamElement = SmallVideo.createStreamElement(stream);
|
||||||
var newElementId = streamElement.id;
|
var newElementId = streamElement.id;
|
||||||
|
|
||||||
|
@ -226,14 +229,14 @@ RemoteVideo.prototype.addRemoteStreamElement = function (stream) {
|
||||||
if (!isVideo || (this.container.offsetParent !== null && isVideo)) {
|
if (!isVideo || (this.container.offsetParent !== null && isVideo)) {
|
||||||
this.waitForPlayback(sel, stream);
|
this.waitForPlayback(sel, stream);
|
||||||
|
|
||||||
APP.RTC.attachMediaStream(sel, stream);
|
APP.RTC.attachMediaStream(sel, webRtcStream);
|
||||||
}
|
}
|
||||||
|
|
||||||
APP.RTC.addMediaStreamInactiveHandler(
|
APP.RTC.addMediaStreamInactiveHandler(
|
||||||
stream, function () {
|
webRtcStream, function () {
|
||||||
console.log('stream ended', this);
|
console.log('stream ended', this);
|
||||||
|
|
||||||
self.removeRemoteStreamElement(stream, isVideo, newElementId);
|
self.removeRemoteStreamElement(webRtcStream, isVideo, newElementId);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Add click handler.
|
// Add click handler.
|
||||||
|
|
|
@ -4,6 +4,7 @@ var Avatar = require("../avatar/Avatar");
|
||||||
var UIUtil = require("../util/UIUtil");
|
var UIUtil = require("../util/UIUtil");
|
||||||
var LargeVideo = require("./LargeVideo");
|
var LargeVideo = require("./LargeVideo");
|
||||||
var RTCBrowserType = require("../../RTC/RTCBrowserType");
|
var RTCBrowserType = require("../../RTC/RTCBrowserType");
|
||||||
|
var MediaStreamType = require("../../../service/RTC/MediaStreamTypes");
|
||||||
|
|
||||||
function SmallVideo() {
|
function SmallVideo() {
|
||||||
this.isMuted = false;
|
this.isMuted = false;
|
||||||
|
@ -105,19 +106,22 @@ SmallVideo.prototype.setPresenceStatus = function (statusMsg) {
|
||||||
* Creates an audio or video element for a particular MediaStream.
|
* Creates an audio or video element for a particular MediaStream.
|
||||||
*/
|
*/
|
||||||
SmallVideo.createStreamElement = function (stream) {
|
SmallVideo.createStreamElement = function (stream) {
|
||||||
var isVideo = stream.getVideoTracks().length > 0;
|
var isVideo = stream.isVideoStream();
|
||||||
|
|
||||||
var element = isVideo ? document.createElement('video')
|
var element = isVideo ? document.createElement('video')
|
||||||
: document.createElement('audio');
|
: document.createElement('audio');
|
||||||
|
if (isVideo) {
|
||||||
|
element.setAttribute("muted", "true");
|
||||||
|
}
|
||||||
|
|
||||||
if (!RTCBrowserType.isIExplorer()) {
|
if (!RTCBrowserType.isIExplorer()) {
|
||||||
element.autoplay = true;
|
element.autoplay = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
element.id = (isVideo ? 'remoteVideo_' : 'remoteAudio_') +
|
element.id = (isVideo ? 'remoteVideo_' : 'remoteAudio_') +
|
||||||
APP.RTC.getStreamID(stream);
|
APP.RTC.getStreamID(stream.getOriginalStream());
|
||||||
|
|
||||||
element.onplay = function() {
|
element.onplay = function () {
|
||||||
console.log("(TIME) Render " + (isVideo ? 'video' : 'audio') + ":\t",
|
console.log("(TIME) Render " + (isVideo ? 'video' : 'audio') + ":\t",
|
||||||
window.performance.now());
|
window.performance.now());
|
||||||
};
|
};
|
||||||
|
|
|
@ -55,10 +55,6 @@ var VideoLayout = (function (my) {
|
||||||
lastNEndpointsCache.indexOf(resource) !== -1);
|
lastNEndpointsCache.indexOf(resource) !== -1);
|
||||||
};
|
};
|
||||||
|
|
||||||
my.changeLocalStream = function (stream, isMuted) {
|
|
||||||
VideoLayout.changeLocalVideo(stream, isMuted);
|
|
||||||
};
|
|
||||||
|
|
||||||
my.changeLocalAudio = function(stream, isMuted) {
|
my.changeLocalAudio = function(stream, isMuted) {
|
||||||
if (isMuted)
|
if (isMuted)
|
||||||
APP.UI.setAudioMuted(true, true);
|
APP.UI.setAudioMuted(true, true);
|
||||||
|
@ -187,8 +183,7 @@ var VideoLayout = (function (my) {
|
||||||
VideoLayout.ensurePeerContainerExists(stream.peerjid);
|
VideoLayout.ensurePeerContainerExists(stream.peerjid);
|
||||||
|
|
||||||
var resourceJid = Strophe.getResourceFromJid(stream.peerjid);
|
var resourceJid = Strophe.getResourceFromJid(stream.peerjid);
|
||||||
remoteVideos[resourceJid].addRemoteStreamElement(
|
remoteVideos[resourceJid].addRemoteStreamElement(stream);
|
||||||
stream.getOriginalStream());
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -562,7 +562,6 @@ JingleSessionPC.prototype.setRemoteDescription = function (elem, desctype) {
|
||||||
if (config.webrtcIceUdpDisable) {
|
if (config.webrtcIceUdpDisable) {
|
||||||
this.remoteSDP.removeUdpCandidates = true;
|
this.remoteSDP.removeUdpCandidates = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
this.remoteSDP.fromJingle(elem);
|
this.remoteSDP.fromJingle(elem);
|
||||||
this.readSsrcInfo($(elem).find(">content"));
|
this.readSsrcInfo($(elem).find(">content"));
|
||||||
if (this.peerconnection.remoteDescription !== null) {
|
if (this.peerconnection.remoteDescription !== null) {
|
||||||
|
|
|
@ -96,7 +96,7 @@ module.exports = function(XMPP, eventEmitter) {
|
||||||
switch (action) {
|
switch (action) {
|
||||||
case 'session-initiate':
|
case 'session-initiate':
|
||||||
console.log("(TIME) received session-initiate:\t",
|
console.log("(TIME) received session-initiate:\t",
|
||||||
window.performance.now());
|
window.performance.now(), iq);
|
||||||
var startMuted = $(iq).find('jingle>startmuted');
|
var startMuted = $(iq).find('jingle>startmuted');
|
||||||
if (startMuted && startMuted.length > 0) {
|
if (startMuted && startMuted.length > 0) {
|
||||||
var audioMuted = startMuted.attr("audio");
|
var audioMuted = startMuted.attr("audio");
|
||||||
|
@ -176,10 +176,12 @@ module.exports = function(XMPP, eventEmitter) {
|
||||||
break;
|
break;
|
||||||
case 'addsource': // FIXME: proprietary, un-jingleish
|
case 'addsource': // FIXME: proprietary, un-jingleish
|
||||||
case 'source-add': // FIXME: proprietary
|
case 'source-add': // FIXME: proprietary
|
||||||
|
console.info("source-add", iq);
|
||||||
sess.addSource($(iq).find('>jingle>content'));
|
sess.addSource($(iq).find('>jingle>content'));
|
||||||
break;
|
break;
|
||||||
case 'removesource': // FIXME: proprietary, un-jingleish
|
case 'removesource': // FIXME: proprietary, un-jingleish
|
||||||
case 'source-remove': // FIXME: proprietary
|
case 'source-remove': // FIXME: proprietary
|
||||||
|
console.info("source-remove", iq);
|
||||||
sess.removeSource($(iq).find('>jingle>content'));
|
sess.removeSource($(iq).find('>jingle>content'));
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
var MediaStreamType = {
|
var MediaStreamType = {
|
||||||
VIDEO_TYPE: "Video",
|
VIDEO_TYPE: "video",
|
||||||
|
|
||||||
AUDIO_TYPE: "Audio"
|
AUDIO_TYPE: "audio"
|
||||||
};
|
};
|
||||||
module.exports = MediaStreamType;
|
module.exports = MediaStreamType;
|
Loading…
Reference in New Issue