if (typeof Janus!="undefined") console.error("[TelsomeWebphone] Conflicto con la libreria Janus"); else { /* The MIT License (MIT) Copyright (c) 2016 Meetecho Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ // List of sessions Janus.sessions = {}; Janus.isExtensionEnabled = function() { if(navigator.mediaDevices && navigator.mediaDevices.getDisplayMedia) { // No need for the extension, getDisplayMedia is supported return true; } if(window.navigator.userAgent.match('Chrome')) { var chromever = parseInt(window.navigator.userAgent.match(/Chrome\/(.*) /)[1], 10); var maxver = 33; if(window.navigator.userAgent.match('Linux')) maxver = 35; // "known" crash in chrome 34 and 35 on linux if(chromever >= 26 && chromever <= maxver) { // Older versions of Chrome don't support this extension-based approach, so lie return true; } return Janus.extension.isInstalled(); } else { // Firefox of others, no need for the extension (but this doesn't mean it will work) return true; } }; var defaultExtension = { // Screensharing Chrome Extension ID extensionId: 'hapfgfdkleiggjjpfpenajgdnfckjpaj', isInstalled: function() { return document.querySelector('#janus-extension-installed') !== null; }, getScreen: function (callback) { var pending = window.setTimeout(function () { var error = new Error('NavigatorUserMediaError'); error.name = 'The required Chrome extension is not installed: click here to install it. (NOTE: this will need you to refresh the page)'; return callback(error); }, 1000); this.cache[pending] = callback; window.postMessage({ type: 'janusGetScreen', id: pending }, '*'); }, init: function () { var cache = {}; this.cache = cache; // Wait for events from the Chrome Extension window.addEventListener('message', function (event) { if(event.origin != window.location.origin) return; if(event.data.type == 'janusGotScreen' && cache[event.data.id]) { var callback = cache[event.data.id]; delete cache[event.data.id]; if (event.data.sourceId === '') { // user canceled var error = new Error('NavigatorUserMediaError'); error.name = 'You cancelled the request for permission, giving up...'; callback(error); } else { callback(null, event.data.sourceId); } } else if (event.data.type == 'janusGetScreenPending') { console.log('clearing ', event.data.id); window.clearTimeout(event.data.id); } }); } }; Janus.useDefaultDependencies = function (deps) { var f = (deps && deps.fetch) || fetch; var p = (deps && deps.Promise) || Promise; var socketCls = (deps && deps.WebSocket) || WebSocket; return { newWebSocket: function(server, proto) { return new socketCls(server, proto); }, extension: (deps && deps.extension) || defaultExtension, isArray: function(arr) { return Array.isArray(arr); }, webRTCAdapter: (deps && deps.adapter) || adapter, httpAPICall: function(url, options) { var fetchOptions = { method: options.verb, headers: { 'Accept': 'application/json, text/plain, */*' }, cache: 'no-cache' }; if(options.verb === "POST") { fetchOptions.headers['Content-Type'] = 'application/json'; } if(options.withCredentials !== undefined) { fetchOptions.credentials = options.withCredentials === true ? 'include' : (options.withCredentials ? options.withCredentials : 'omit'); } if(options.body) { fetchOptions.body = JSON.stringify(options.body); } var fetching = f(url, fetchOptions).catch(function(error) { return p.reject({message: 'Probably a network error, is the server down?', error: error}); }); /* * fetch() does not natively support timeouts. * Work around this by starting a timeout manually, and racing it agains the fetch() to see which thing resolves first. */ if(options.timeout) { var timeout = new p(function(resolve, reject) { var timerId = setTimeout(function() { clearTimeout(timerId); return reject({message: 'Request timed out', timeout: options.timeout}); }, options.timeout); }); fetching = p.race([fetching,timeout]); } fetching.then(function(response) { if(response.ok) { if(typeof(options.success) === typeof(Janus.noop)) { return response.json().then(function(parsed) { options.success(parsed); }).catch(function(error) { return p.reject({message: 'Failed to parse response body', error: error, response: response}); }); } } else { return p.reject({message: 'API call failed', response: response}); } }).catch(function(error) { if(typeof(options.error) === typeof(Janus.noop)) { options.error(error.message || '<< internal error >>', error); } }); return fetching; } } }; Janus.useOldDependencies = function (deps) { var jq = (deps && deps.jQuery) || jQuery; var socketCls = (deps && deps.WebSocket) || WebSocket; return { newWebSocket: function(server, proto) { return new socketCls(server, proto); }, isArray: function(arr) { return jq.isArray(arr); }, extension: (deps && deps.extension) || defaultExtension, webRTCAdapter: (deps && deps.adapter) || adapter, httpAPICall: function(url, options) { var payload = options.body !== undefined ? { contentType: 'application/json', data: JSON.stringify(options.body) } : {}; var credentials = options.withCredentials !== undefined ? {xhrFields: {withCredentials: options.withCredentials}} : {}; return jq.ajax(jq.extend(payload, credentials, { url: url, type: options.verb, cache: false, dataType: 'json', async: options.async, timeout: options.timeout, success: function(result) { if(typeof(options.success) === typeof(Janus.noop)) { options.success(result); } }, error: function(xhr, status, err) { if(typeof(options.error) === typeof(Janus.noop)) { options.error(status, err); } } })); }, }; }; Janus.noop = function() {}; Janus.dataChanDefaultLabel = "JanusDataChannel"; // Note: in the future we may want to change this, e.g., as was // attempted in https://github.com/meetecho/janus-gateway/issues/1670 Janus.endOfCandidates = null; // Initialization Janus.init = function(options) { options = options || {}; options.callback = (typeof options.callback == "function") ? options.callback : Janus.noop; if(Janus.initDone) { // Already initialized options.callback(); } else { if(typeof console == "undefined" || typeof console.log == "undefined") console = { log: function() {} }; // Console logging (all debugging disabled by default) Janus.trace = Janus.noop; Janus.debug = Janus.noop; Janus.vdebug = Janus.noop; Janus.log = Janus.noop; Janus.warn = Janus.noop; Janus.error = Janus.noop; if(options.debug === true || options.debug === "all") { // Enable all debugging levels Janus.trace = console.trace.bind(console); Janus.debug = console.debug.bind(console); Janus.vdebug = console.debug.bind(console); Janus.log = console.log.bind(console); Janus.warn = console.warn.bind(console); Janus.error = console.error.bind(console); } else if(Array.isArray(options.debug)) { for(var d of options.debug) { switch(d) { case "trace": Janus.trace = console.trace.bind(console); break; case "debug": Janus.debug = console.debug.bind(console); break; case "vdebug": Janus.vdebug = console.debug.bind(console); break; case "log": Janus.log = console.log.bind(console); break; case "warn": Janus.warn = console.warn.bind(console); break; case "error": Janus.error = console.error.bind(console); break; default: console.error("Unknown debugging option '" + d + "' (supported: 'trace', 'debug', 'vdebug', 'log', warn', 'error')"); break; } } } Janus.log("Initializing library"); var usedDependencies = options.dependencies || Janus.useDefaultDependencies(); Janus.isArray = usedDependencies.isArray; Janus.webRTCAdapter = usedDependencies.webRTCAdapter; Janus.httpAPICall = usedDependencies.httpAPICall; Janus.newWebSocket = usedDependencies.newWebSocket; Janus.extension = usedDependencies.extension; Janus.extension.init(); // Helper method to enumerate devices Janus.listDevices = function(callback, config) { callback = (typeof callback == "function") ? callback : Janus.noop; if (config == null) config = { audio: true, video: true }; if(Janus.isGetUserMediaAvailable()) { navigator.mediaDevices.getUserMedia(config) .then(function(stream) { navigator.mediaDevices.enumerateDevices().then(function(devices) { Janus.debug(devices); callback(devices); // Get rid of the now useless stream try { var tracks = stream.getTracks(); for(var mst of tracks) { if(mst) mst.stop(); } } catch(e) {} }); }) .catch(function(err) { Janus.error(err); callback([]); }); } else { Janus.warn("navigator.mediaDevices unavailable"); callback([]); } } // Helper methods to attach/reattach a stream to a video element (previously part of adapter.js) Janus.attachMediaStream = function(element, stream) { try { element.srcObject = stream; } catch (e) { try { element.src = URL.createObjectURL(stream); } catch (e) { Janus.error("Error attaching stream to element"); } } }; Janus.reattachMediaStream = function(to, from) { try { to.srcObject = from.srcObject; } catch (e) { try { to.src = from.src; } catch (e) { Janus.error("Error reattaching stream to element"); } } }; // Detect tab close: make sure we don't loose existing onbeforeunload handlers // (note: for iOS we need to subscribe to a different event, 'pagehide', see // https://gist.github.com/thehunmonkgroup/6bee8941a49b86be31a787fe8f4b8cfe) var iOS = ['iPad', 'iPhone', 'iPod'].indexOf(navigator.platform) >= 0; var eventName = iOS ? 'pagehide' : 'beforeunload'; var oldOBF = window["on" + eventName]; window.addEventListener(eventName, function(event) { Janus.log("Closing window"); for(var s in Janus.sessions) { if(Janus.sessions[s] && Janus.sessions[s].destroyOnUnload) { Janus.log("Destroying session " + s); Janus.sessions[s].destroy({unload: true, notifyDestroyed: false}); } } if(oldOBF && typeof oldOBF == "function") oldOBF(); }); // If this is a Safari Technology Preview, check if VP8 is supported Janus.safariVp8 = false; if(Janus.webRTCAdapter.browserDetails.browser === 'safari' && Janus.webRTCAdapter.browserDetails.version >= 605) { // Let's see if RTCRtpSender.getCapabilities() is there if(RTCRtpSender && RTCRtpSender.getCapabilities && RTCRtpSender.getCapabilities("video") && RTCRtpSender.getCapabilities("video").codecs && RTCRtpSender.getCapabilities("video").codecs.length) { for(var codec of RTCRtpSender.getCapabilities("video").codecs) { if(codec && codec.mimeType && codec.mimeType.toLowerCase() === "video/vp8") { Janus.safariVp8 = true; break; } } if(Janus.safariVp8) { Janus.log("This version of Safari supports VP8"); } else { Janus.warn("This version of Safari does NOT support VP8: if you're using a Technology Preview, " + "try enabling the 'WebRTC VP8 codec' setting in the 'Experimental Features' Develop menu"); } } else { // We do it in a very ugly way, as there's no alternative... // We create a PeerConnection to see if VP8 is in an offer var testpc = new RTCPeerConnection({}, {}); testpc.createOffer({offerToReceiveVideo: true}).then(function(offer) { Janus.safariVp8 = offer.sdp.indexOf("VP8") !== -1; if(Janus.safariVp8) { Janus.log("This version of Safari supports VP8"); } else { Janus.warn("This version of Safari does NOT support VP8: if you're using a Technology Preview, " + "try enabling the 'WebRTC VP8 codec' setting in the 'Experimental Features' Develop menu"); } testpc.close(); testpc = null; }); } } // Check if this browser supports Unified Plan and transceivers // Based on https://codepen.io/anon/pen/ZqLwWV?editors=0010 Janus.unifiedPlan = false; if(Janus.webRTCAdapter.browserDetails.browser === 'firefox' && Janus.webRTCAdapter.browserDetails.version >= 59) { // Firefox definitely does, starting from version 59 Janus.unifiedPlan = true; } else if(Janus.webRTCAdapter.browserDetails.browser === 'chrome' && Janus.webRTCAdapter.browserDetails.version < 72) { // Chrome does, but it's only usable from version 72 on Janus.unifiedPlan = false; } else if(!window.RTCRtpTransceiver || !('currentDirection' in RTCRtpTransceiver.prototype)) { // Safari supports addTransceiver() but not Unified Plan when // currentDirection is not defined (see codepen above). Janus.unifiedPlan = false; } else { // Check if addTransceiver() throws an exception const tempPc = new RTCPeerConnection(); try { tempPc.addTransceiver('audio'); Janus.unifiedPlan = true; } catch (e) {} tempPc.close(); } Janus.initDone = true; options.callback(); } }; // Helper method to check whether WebRTC is supported by this browser Janus.isWebrtcSupported = function() { return window.RTCPeerConnection ? true : false; }; // Helper method to check whether devices can be accessed by this browser (e.g., not possible via plain HTTP) Janus.isGetUserMediaAvailable = function() { return (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) ? true : false; }; // Helper method to create random identifiers (e.g., transaction) Janus.randomString = function(len) { var charSet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'; var randomString = ''; for (var i = 0; i < len; i++) { var randomPoz = Math.floor(Math.random() * charSet.length); randomString += charSet.substring(randomPoz,randomPoz+1); } return randomString; } function Janus(gatewayCallbacks) { gatewayCallbacks = gatewayCallbacks || {}; gatewayCallbacks.success = (typeof gatewayCallbacks.success == "function") ? gatewayCallbacks.success : Janus.noop; gatewayCallbacks.error = (typeof gatewayCallbacks.error == "function") ? gatewayCallbacks.error : Janus.noop; gatewayCallbacks.destroyed = (typeof gatewayCallbacks.destroyed == "function") ? gatewayCallbacks.destroyed : Janus.noop; if(!Janus.initDone) { gatewayCallbacks.error("Library not initialized"); return {}; } if(!Janus.isWebrtcSupported()) { gatewayCallbacks.error("WebRTC not supported by this browser"); return {}; } Janus.log("Library initialized: " + Janus.initDone); if(!gatewayCallbacks.server) { gatewayCallbacks.error("Invalid server url"); return {}; } var websockets = false; var ws = null; var wsHandlers = {}; var wsKeepaliveTimeoutId = null; var servers = null, serversIndex = 0; var server = gatewayCallbacks.server; if(Janus.isArray(server)) { Janus.log("Multiple servers provided (" + server.length + "), will use the first that works"); server = null; servers = gatewayCallbacks.server; Janus.debug(servers); } else { if(server.indexOf("ws") === 0) { websockets = true; Janus.log("Using WebSockets to contact Janus: " + server); } else { websockets = false; Janus.log("Using REST API to contact Janus: " + server); } } var iceServers = gatewayCallbacks.iceServers || [{urls: "stun:stun.l.google.com:19302"}]; var iceTransportPolicy = gatewayCallbacks.iceTransportPolicy; var bundlePolicy = gatewayCallbacks.bundlePolicy; // Whether IPv6 candidates should be gathered var ipv6Support = (gatewayCallbacks.ipv6 === true); // Whether we should enable the withCredentials flag for XHR requests var withCredentials = false; if(gatewayCallbacks.withCredentials !== undefined && gatewayCallbacks.withCredentials !== null) withCredentials = gatewayCallbacks.withCredentials === true; // Optional max events var maxev = 10; if(gatewayCallbacks.max_poll_events !== undefined && gatewayCallbacks.max_poll_events !== null) maxev = gatewayCallbacks.max_poll_events; if(maxev < 1) maxev = 1; // Token to use (only if the token based authentication mechanism is enabled) var token = null; if(gatewayCallbacks.token !== undefined && gatewayCallbacks.token !== null) token = gatewayCallbacks.token; // API secret to use (only if the shared API secret is enabled) var apisecret = null; if(gatewayCallbacks.apisecret !== undefined && gatewayCallbacks.apisecret !== null) apisecret = gatewayCallbacks.apisecret; // Whether we should destroy this session when onbeforeunload is called this.destroyOnUnload = true; if(gatewayCallbacks.destroyOnUnload !== undefined && gatewayCallbacks.destroyOnUnload !== null) this.destroyOnUnload = (gatewayCallbacks.destroyOnUnload === true); // Some timeout-related values var keepAlivePeriod = 25000; if(gatewayCallbacks.keepAlivePeriod !== undefined && gatewayCallbacks.keepAlivePeriod !== null) keepAlivePeriod = gatewayCallbacks.keepAlivePeriod; if(isNaN(keepAlivePeriod)) keepAlivePeriod = 25000; var longPollTimeout = 60000; if(gatewayCallbacks.longPollTimeout !== undefined && gatewayCallbacks.longPollTimeout !== null) longPollTimeout = gatewayCallbacks.longPollTimeout; if(isNaN(longPollTimeout)) longPollTimeout = 60000; // overrides for default maxBitrate values for simulcasting function getMaxBitrates(simulcastMaxBitrates) { var maxBitrates = { high: 900000, medium: 300000, low: 100000, }; if (simulcastMaxBitrates !== undefined && simulcastMaxBitrates !== null) { if (simulcastMaxBitrates.high) maxBitrates.high = simulcastMaxBitrates.high; if (simulcastMaxBitrates.medium) maxBitrates.medium = simulcastMaxBitrates.medium; if (simulcastMaxBitrates.low) maxBitrates.low = simulcastMaxBitrates.low; } return maxBitrates; } var connected = false; var sessionId = null; var pluginHandles = {}; var that = this; var retries = 0; var transactions = {}; createSession(gatewayCallbacks); // Public methods this.getServer = function() { return server; }; this.isConnected = function() { return connected; }; this.reconnect = function(callbacks) { callbacks = callbacks || {}; callbacks.success = (typeof callbacks.success == "function") ? callbacks.success : Janus.noop; callbacks.error = (typeof callbacks.error == "function") ? callbacks.error : Janus.noop; callbacks["reconnect"] = true; createSession(callbacks); }; this.getSessionId = function() { return sessionId; }; this.destroy = function(callbacks) { destroySession(callbacks); }; this.attach = function(callbacks) { createHandle(callbacks); }; function eventHandler() { if(sessionId == null) return; Janus.debug('Long poll...'); if(!connected) { Janus.warn("Is the server down? (connected=false)"); return; } var longpoll = server + "/" + sessionId + "?rid=" + new Date().getTime(); if(maxev) longpoll = longpoll + "&maxev=" + maxev; if(token) longpoll = longpoll + "&token=" + encodeURIComponent(token); if(apisecret) longpoll = longpoll + "&apisecret=" + encodeURIComponent(apisecret); Janus.httpAPICall(longpoll, { verb: 'GET', withCredentials: withCredentials, success: handleEvent, timeout: longPollTimeout, error: function(textStatus, errorThrown) { Janus.error(textStatus + ":", errorThrown); retries++; if(retries > 3) { // Did we just lose the server? :-( connected = false; gatewayCallbacks.error("Lost connection to the server (is it down?)"); return; } eventHandler(); } }); } // Private event handler: this will trigger plugin callbacks, if set function handleEvent(json, skipTimeout) { retries = 0; if(!websockets && sessionId !== undefined && sessionId !== null && skipTimeout !== true) eventHandler(); if(!websockets && Janus.isArray(json)) { // We got an array: it means we passed a maxev > 1, iterate on all objects for(var i=0; i data channel: ' + dcState); if(dcState === 'open') { // Any pending messages to send? if(config.dataChannel[label].pending && config.dataChannel[label].pending.length > 0) { Janus.log("Sending pending messages on <" + label + ">:", config.dataChannel[label].pending.length); for(var data of config.dataChannel[label].pending) { Janus.log("Sending data on data channel <" + label + ">"); Janus.debug(data); config.dataChannel[label].send(data); } config.dataChannel[label].pending = []; } // Notify the open data channel pluginHandle.ondataopen(label); } } var onDataChannelError = function(error) { Janus.error('Got error on data channel:', error); // TODO } if(!incoming) { // FIXME Add options (ordered, maxRetransmits, etc.) config.dataChannel[dclabel] = config.pc.createDataChannel(dclabel, {ordered: true}); } else { // The channel was created by Janus config.dataChannel[dclabel] = incoming; } config.dataChannel[dclabel].onmessage = onDataChannelMessage; config.dataChannel[dclabel].onopen = onDataChannelStateChange; config.dataChannel[dclabel].onclose = onDataChannelStateChange; config.dataChannel[dclabel].onerror = onDataChannelError; config.dataChannel[dclabel].pending = []; if(pendingData) config.dataChannel[dclabel].pending.push(pendingData); } // Private method to send a data channel message function sendData(handleId, callbacks) { callbacks = callbacks || {}; callbacks.success = (typeof callbacks.success == "function") ? callbacks.success : Janus.noop; callbacks.error = (typeof callbacks.error == "function") ? callbacks.error : Janus.noop; var pluginHandle = pluginHandles[handleId]; if(!pluginHandle || !pluginHandle.webrtcStuff) { Janus.warn("Invalid handle"); callbacks.error("Invalid handle"); return; } var config = pluginHandle.webrtcStuff; var data = callbacks.text || callbacks.data; if(!data) { Janus.warn("Invalid data"); callbacks.error("Invalid data"); return; } var label = callbacks.label ? callbacks.label : Janus.dataChanDefaultLabel; if(!config.dataChannel[label]) { // Create new data channel and wait for it to open createDataChannel(handleId, label, false, data); callbacks.success(); return; } if(config.dataChannel[label].readyState !== "open") { config.dataChannel[label].pending.push(data); callbacks.success(); return; } Janus.log("Sending data on data channel <" + label + ">"); Janus.debug(data); config.dataChannel[label].send(data); callbacks.success(); } // Private method to send a DTMF tone function sendDtmf(handleId, callbacks) { callbacks = callbacks || {}; callbacks.success = (typeof callbacks.success == "function") ? callbacks.success : Janus.noop; callbacks.error = (typeof callbacks.error == "function") ? callbacks.error : Janus.noop; var pluginHandle = pluginHandles[handleId]; if(!pluginHandle || !pluginHandle.webrtcStuff) { Janus.warn("Invalid handle"); callbacks.error("Invalid handle"); return; } var config = pluginHandle.webrtcStuff; if(!config.dtmfSender) { // Create the DTMF sender the proper way, if possible if(config.pc) { var senders = config.pc.getSenders(); var audioSender = senders.find(function(sender) { return sender.track && sender.track.kind === 'audio'; }); if(!audioSender) { Janus.warn("Invalid DTMF configuration (no audio track)"); callbacks.error("Invalid DTMF configuration (no audio track)"); return; } config.dtmfSender = audioSender.dtmf; if(config.dtmfSender) { Janus.log("Created DTMF Sender"); config.dtmfSender.ontonechange = function(tone) { Janus.debug("Sent DTMF tone: " + tone.tone); }; } } if(!config.dtmfSender) { Janus.warn("Invalid DTMF configuration"); callbacks.error("Invalid DTMF configuration"); return; } } var dtmf = callbacks.dtmf; if(!dtmf) { Janus.warn("Invalid DTMF parameters"); callbacks.error("Invalid DTMF parameters"); return; } var tones = dtmf.tones; if(!tones) { Janus.warn("Invalid DTMF string"); callbacks.error("Invalid DTMF string"); return; } var duration = (typeof dtmf.duration === 'number') ? dtmf.duration : 500; // We choose 500ms as the default duration for a tone var gap = (typeof dtmf.gap === 'number') ? dtmf.gap : 50; // We choose 50ms as the default gap between tones Janus.debug("Sending DTMF string " + tones + " (duration " + duration + "ms, gap " + gap + "ms)"); config.dtmfSender.insertDTMF(tones, duration, gap); callbacks.success(); } // Private method to destroy a plugin handle function destroyHandle(handleId, callbacks) { callbacks = callbacks || {}; callbacks.success = (typeof callbacks.success == "function") ? callbacks.success : Janus.noop; callbacks.error = (typeof callbacks.error == "function") ? callbacks.error : Janus.noop; var noRequest = (callbacks.noRequest === true); Janus.log("Destroying handle " + handleId + " (only-locally=" + noRequest + ")"); cleanupWebrtc(handleId); var pluginHandle = pluginHandles[handleId]; if(!pluginHandle || pluginHandle.detached) { // Plugin was already detached by Janus, calling detach again will return a handle not found error, so just exit here delete pluginHandles[handleId]; callbacks.success(); return; } if(noRequest) { // We're only removing the handle locally delete pluginHandles[handleId]; callbacks.success(); return; } if(!connected) { Janus.warn("Is the server down? (connected=false)"); callbacks.error("Is the server down? (connected=false)"); return; } var request = { "janus": "detach", "transaction": Janus.randomString(12) }; if(pluginHandle.token) request["token"] = pluginHandle.token; if(apisecret) request["apisecret"] = apisecret; if(websockets) { request["session_id"] = sessionId; request["handle_id"] = handleId; ws.send(JSON.stringify(request)); delete pluginHandles[handleId]; callbacks.success(); return; } Janus.httpAPICall(server + "/" + sessionId + "/" + handleId, { verb: 'POST', withCredentials: withCredentials, body: request, success: function(json) { Janus.log("Destroyed handle:"); Janus.debug(json); if(json["janus"] !== "success") { Janus.error("Ooops: " + json["error"].code + " " + json["error"].reason); // FIXME } delete pluginHandles[handleId]; callbacks.success(); }, error: function(textStatus, errorThrown) { Janus.error(textStatus + ":", errorThrown); // FIXME // We cleanup anyway delete pluginHandles[handleId]; callbacks.success(); } }); } // WebRTC stuff function streamsDone(handleId, jsep, media, callbacks, stream) { var pluginHandle = pluginHandles[handleId]; if(!pluginHandle || !pluginHandle.webrtcStuff) { Janus.warn("Invalid handle"); callbacks.error("Invalid handle"); return; } var config = pluginHandle.webrtcStuff; Janus.debug("streamsDone:", stream); if(stream) { Janus.debug(" -- Audio tracks:", stream.getAudioTracks()); Janus.debug(" -- Video tracks:", stream.getVideoTracks()); } // We're now capturing the new stream: check if we're updating or if it's a new thing var addTracks = false; if(!config.myStream || !media.update || config.streamExternal) { config.myStream = stream; addTracks = true; } else { // We only need to update the existing stream if(((!media.update && isAudioSendEnabled(media)) || (media.update && (media.addAudio || media.replaceAudio))) && stream.getAudioTracks() && stream.getAudioTracks().length) { config.myStream.addTrack(stream.getAudioTracks()[0]); if(Janus.unifiedPlan) { // Use Transceivers Janus.log((media.replaceAudio ? "Replacing" : "Adding") + " audio track:", stream.getAudioTracks()[0]); var audioTransceiver = null; var transceivers = config.pc.getTransceivers(); if(transceivers && transceivers.length > 0) { for(var t of transceivers) { if((t.sender && t.sender.track && t.sender.track.kind === "audio") || (t.receiver && t.receiver.track && t.receiver.track.kind === "audio")) { audioTransceiver = t; break; } } } if(audioTransceiver && audioTransceiver.sender) { audioTransceiver.sender.replaceTrack(stream.getAudioTracks()[0]); } else { config.pc.addTrack(stream.getAudioTracks()[0], stream); } } else { Janus.log((media.replaceAudio ? "Replacing" : "Adding") + " audio track:", stream.getAudioTracks()[0]); config.pc.addTrack(stream.getAudioTracks()[0], stream); } } if(((!media.update && isVideoSendEnabled(media)) || (media.update && (media.addVideo || media.replaceVideo))) && stream.getVideoTracks() && stream.getVideoTracks().length) { config.myStream.addTrack(stream.getVideoTracks()[0]); if(Janus.unifiedPlan) { // Use Transceivers Janus.log((media.replaceVideo ? "Replacing" : "Adding") + " video track:", stream.getVideoTracks()[0]); var videoTransceiver = null; var transceivers = config.pc.getTransceivers(); if(transceivers && transceivers.length > 0) { for(var t of transceivers) { if((t.sender && t.sender.track && t.sender.track.kind === "video") || (t.receiver && t.receiver.track && t.receiver.track.kind === "video")) { videoTransceiver = t; break; } } } if(videoTransceiver && videoTransceiver.sender) { videoTransceiver.sender.replaceTrack(stream.getVideoTracks()[0]); } else { config.pc.addTrack(stream.getVideoTracks()[0], stream); } } else { Janus.log((media.replaceVideo ? "Replacing" : "Adding") + " video track:", stream.getVideoTracks()[0]); config.pc.addTrack(stream.getVideoTracks()[0], stream); } } } // If we still need to create a PeerConnection, let's do that if(!config.pc) { var pc_config = {"iceServers": iceServers, "iceTransportPolicy": iceTransportPolicy, "bundlePolicy": bundlePolicy}; if(Janus.webRTCAdapter.browserDetails.browser === "chrome") { // For Chrome versions before 72, we force a plan-b semantic, and unified-plan otherwise pc_config["sdpSemantics"] = (Janus.webRTCAdapter.browserDetails.version < 72) ? "plan-b" : "unified-plan"; } var pc_constraints = { "optional": [{"DtlsSrtpKeyAgreement": true}] }; if(ipv6Support) { pc_constraints.optional.push({"googIPv6":true}); } // Any custom constraint to add? if(callbacks.rtcConstraints && typeof callbacks.rtcConstraints === 'object') { Janus.debug("Adding custom PeerConnection constraints:", callbacks.rtcConstraints); for(var i in callbacks.rtcConstraints) { pc_constraints.optional.push(callbacks.rtcConstraints[i]); } } if(Janus.webRTCAdapter.browserDetails.browser === "edge") { // This is Edge, enable BUNDLE explicitly pc_config.bundlePolicy = "max-bundle"; } Janus.log("Creating PeerConnection"); Janus.debug(pc_constraints); config.pc = new RTCPeerConnection(pc_config, pc_constraints); Janus.debug(config.pc); if(config.pc.getStats) { // FIXME config.volume = {}; config.bitrate.value = "0 kbits/sec"; } Janus.log("Preparing local SDP and gathering candidates (trickle=" + config.trickle + ")"); config.pc.oniceconnectionstatechange = function(e) { if(config.pc) pluginHandle.iceState(config.pc.iceConnectionState); }; config.pc.onicecandidate = function(event) { if (!event.candidate || (Janus.webRTCAdapter.browserDetails.browser === 'edge' && event.candidate.candidate.indexOf('endOfCandidates') > 0)) { Janus.log("End of candidates."); config.iceDone = true; if(config.trickle === true) { // Notify end of candidates sendTrickleCandidate(handleId, {"completed": true}); } else { // No trickle, time to send the complete SDP (including all candidates) sendSDP(handleId, callbacks); } } else { // JSON.stringify doesn't work on some WebRTC objects anymore // See https://code.google.com/p/chromium/issues/detail?id=467366 var candidate = { "candidate": event.candidate.candidate, "sdpMid": event.candidate.sdpMid, "sdpMLineIndex": event.candidate.sdpMLineIndex }; if(config.trickle === true) { // Send candidate sendTrickleCandidate(handleId, candidate); } } }; config.pc.ontrack = function(event) { Janus.log("Handling Remote Track"); Janus.debug(event); if(!event.streams) return; config.remoteStream = event.streams[0]; pluginHandle.onremotestream(config.remoteStream); if(event.track.onended) return; Janus.log("Adding onended callback to track:", event.track); event.track.onended = function(ev) { Janus.log("Remote track muted/removed:", ev); if(config.remoteStream) { config.remoteStream.removeTrack(ev.target); pluginHandle.onremotestream(config.remoteStream); } }; event.track.onmute = event.track.onended; event.track.onunmute = function(ev) { Janus.log("Remote track flowing again:", ev); try { config.remoteStream.addTrack(ev.target); pluginHandle.onremotestream(config.remoteStream); } catch(e) { Janus.error(e); }; }; }; } if(addTracks && stream) { Janus.log('Adding local stream'); var simulcast2 = (callbacks.simulcast2 === true); stream.getTracks().forEach(function(track) { Janus.log('Adding local track:', track); if(!simulcast2) { config.pc.addTrack(track, stream); } else { if(track.kind === "audio") { config.pc.addTrack(track, stream); } else { Janus.log('Enabling rid-based simulcasting:', track); const maxBitrates = getMaxBitrates(callbacks.simulcastMaxBitrates); config.pc.addTransceiver(track, { direction: "sendrecv", streams: [stream], sendEncodings: [ { rid: "h", active: true, maxBitrate: maxBitrates.high }, { rid: "m", active: true, maxBitrate: maxBitrates.medium, scaleResolutionDownBy: 2 }, { rid: "l", active: true, maxBitrate: maxBitrates.low, scaleResolutionDownBy: 4 } ] }); } } }); } // Any data channel to create? if(isDataEnabled(media) && !config.dataChannel[Janus.dataChanDefaultLabel]) { Janus.log("Creating data channel"); createDataChannel(handleId, Janus.dataChanDefaultLabel, false); config.pc.ondatachannel = function(event) { Janus.log("Data channel created by Janus:", event); createDataChannel(handleId, event.channel.label, event.channel); }; } // If there's a new local stream, let's notify the application if(config.myStream) pluginHandle.onlocalstream(config.myStream); // Create offer/answer now if(!jsep) { createOffer(handleId, media, callbacks); } else { config.pc.setRemoteDescription(jsep) .then(function() { Janus.log("Remote description accepted!"); config.remoteSdp = jsep.sdp; // Any trickle candidate we cached? if(config.candidates && config.candidates.length > 0) { for(var i = 0; i< config.candidates.length; i++) { var candidate = config.candidates[i]; Janus.debug("Adding remote candidate:", candidate); if(!candidate || candidate.completed === true) { // end-of-candidates config.pc.addIceCandidate(Janus.endOfCandidates); } else { // New candidate config.pc.addIceCandidate(candidate); } } config.candidates = []; } // Create the answer now createAnswer(handleId, media, callbacks); }, callbacks.error); } } function prepareWebrtc(handleId, offer, callbacks) { callbacks = callbacks || {}; callbacks.success = (typeof callbacks.success == "function") ? callbacks.success : Janus.noop; callbacks.error = (typeof callbacks.error == "function") ? callbacks.error : webrtcError; var jsep = callbacks.jsep; if(offer && jsep) { Janus.error("Provided a JSEP to a createOffer"); callbacks.error("Provided a JSEP to a createOffer"); return; } else if(!offer && (!jsep || !jsep.type || !jsep.sdp)) { Janus.error("A valid JSEP is required for createAnswer"); callbacks.error("A valid JSEP is required for createAnswer"); return; } /* Check that callbacks.media is a (not null) Object */ callbacks.media = (typeof callbacks.media === 'object' && callbacks.media) ? callbacks.media : { audio: true, video: true }; var media = callbacks.media; var pluginHandle = pluginHandles[handleId]; if(!pluginHandle || !pluginHandle.webrtcStuff) { Janus.warn("Invalid handle"); callbacks.error("Invalid handle"); return; } var config = pluginHandle.webrtcStuff; config.trickle = isTrickleEnabled(callbacks.trickle); // Are we updating a session? if(!config.pc) { // Nope, new PeerConnection media.update = false; media.keepAudio = false; media.keepVideo = false; } else { Janus.log("Updating existing media session"); media.update = true; // Check if there's anything to add/remove/replace, or if we // can go directly to preparing the new SDP offer or answer if(callbacks.stream) { // External stream: is this the same as the one we were using before? if(callbacks.stream !== config.myStream) { Janus.log("Renegotiation involves a new external stream"); } } else { // Check if there are changes on audio if(media.addAudio) { media.keepAudio = false; media.replaceAudio = false; media.removeAudio = false; media.audioSend = true; if(config.myStream && config.myStream.getAudioTracks() && config.myStream.getAudioTracks().length) { Janus.error("Can't add audio stream, there already is one"); callbacks.error("Can't add audio stream, there already is one"); return; } } else if(media.removeAudio) { media.keepAudio = false; media.replaceAudio = false; media.addAudio = false; media.audioSend = false; } else if(media.replaceAudio) { media.keepAudio = false; media.addAudio = false; media.removeAudio = false; media.audioSend = true; } if(!config.myStream) { // No media stream: if we were asked to replace, it's actually an "add" if(media.replaceAudio) { media.keepAudio = false; media.replaceAudio = false; media.addAudio = true; media.audioSend = true; } if(isAudioSendEnabled(media)) { media.keepAudio = false; media.addAudio = true; } } else { if(!config.myStream.getAudioTracks() || config.myStream.getAudioTracks().length === 0) { // No audio track: if we were asked to replace, it's actually an "add" if(media.replaceAudio) { media.keepAudio = false; media.replaceAudio = false; media.addAudio = true; media.audioSend = true; } if(isAudioSendEnabled(media)) { media.keepVideo = false; media.addAudio = true; } } else { // We have an audio track: should we keep it as it is? if(isAudioSendEnabled(media) && !media.removeAudio && !media.replaceAudio) { media.keepAudio = true; } } } // Check if there are changes on video if(media.addVideo) { media.keepVideo = false; media.replaceVideo = false; media.removeVideo = false; media.videoSend = true; if(config.myStream && config.myStream.getVideoTracks() && config.myStream.getVideoTracks().length) { Janus.error("Can't add video stream, there already is one"); callbacks.error("Can't add video stream, there already is one"); return; } } else if(media.removeVideo) { media.keepVideo = false; media.replaceVideo = false; media.addVideo = false; media.videoSend = false; } else if(media.replaceVideo) { media.keepVideo = false; media.addVideo = false; media.removeVideo = false; media.videoSend = true; } if(!config.myStream) { // No media stream: if we were asked to replace, it's actually an "add" if(media.replaceVideo) { media.keepVideo = false; media.replaceVideo = false; media.addVideo = true; media.videoSend = true; } if(isVideoSendEnabled(media)) { media.keepVideo = false; media.addVideo = true; } } else { if(!config.myStream.getVideoTracks() || config.myStream.getVideoTracks().length === 0) { // No video track: if we were asked to replace, it's actually an "add" if(media.replaceVideo) { media.keepVideo = false; media.replaceVideo = false; media.addVideo = true; media.videoSend = true; } if(isVideoSendEnabled(media)) { media.keepVideo = false; media.addVideo = true; } } else { // We have a video track: should we keep it as it is? if(isVideoSendEnabled(media) && !media.removeVideo && !media.replaceVideo) { media.keepVideo = true; } } } // Data channels can only be added if(media.addData) media.data = true; } // If we're updating and keeping all tracks, let's skip the getUserMedia part if((isAudioSendEnabled(media) && media.keepAudio) && (isVideoSendEnabled(media) && media.keepVideo)) { pluginHandle.consentDialog(false); streamsDone(handleId, jsep, media, callbacks, config.myStream); return; } } // If we're updating, check if we need to remove/replace one of the tracks if(media.update && !config.streamExternal) { if(media.removeAudio || media.replaceAudio) { if(config.myStream && config.myStream.getAudioTracks() && config.myStream.getAudioTracks().length) { var s = config.myStream.getAudioTracks()[0]; Janus.log("Removing audio track:", s); config.myStream.removeTrack(s); try { s.stop(); } catch(e) {}; } if(config.pc.getSenders() && config.pc.getSenders().length) { var ra = true; if(media.replaceAudio && Janus.unifiedPlan) { // We can use replaceTrack ra = false; } if(ra) { for(var s of config.pc.getSenders()) { if(s && s.track && s.track.kind === "audio") { Janus.log("Removing audio sender:", s); config.pc.removeTrack(s); } } } } } if(media.removeVideo || media.replaceVideo) { if(config.myStream && config.myStream.getVideoTracks() && config.myStream.getVideoTracks().length) { var s = config.myStream.getVideoTracks()[0]; Janus.log("Removing video track:", s); config.myStream.removeTrack(s); try { s.stop(); } catch(e) {}; } if(config.pc.getSenders() && config.pc.getSenders().length) { var rv = true; if(media.replaceVideo && Janus.unifiedPlan) { // We can use replaceTrack rv = false; } if(rv) { for(var s of config.pc.getSenders()) { if(s && s.track && s.track.kind === "video") { Janus.log("Removing video sender:", s); config.pc.removeTrack(s); } } } } } } // Was a MediaStream object passed, or do we need to take care of that? if(callbacks.stream) { var stream = callbacks.stream; Janus.log("MediaStream provided by the application"); Janus.debug(stream); // If this is an update, let's check if we need to release the previous stream if(media.update) { if(config.myStream && config.myStream !== callbacks.stream && !config.streamExternal) { // We're replacing a stream we captured ourselves with an external one try { // Try a MediaStreamTrack.stop() for each track var tracks = config.myStream.getTracks(); for(var mst of tracks) { Janus.log(mst); if(mst) mst.stop(); } } catch(e) { // Do nothing if this fails } config.myStream = null; } } // Skip the getUserMedia part config.streamExternal = true; pluginHandle.consentDialog(false); streamsDone(handleId, jsep, media, callbacks, stream); return; } if(isAudioSendEnabled(media) || isVideoSendEnabled(media)) { if(!Janus.isGetUserMediaAvailable()) { callbacks.error("getUserMedia not available"); return; } var constraints = { mandatory: {}, optional: []}; pluginHandle.consentDialog(true); var audioSupport = isAudioSendEnabled(media); if(audioSupport && media && typeof media.audio === 'object') audioSupport = media.audio; var videoSupport = isVideoSendEnabled(media); if(videoSupport && media) { var simulcast = (callbacks.simulcast === true); var simulcast2 = (callbacks.simulcast2 === true); if((simulcast || simulcast2) && !jsep && !media.video) media.video = "hires"; if(media.video && media.video != 'screen' && media.video != 'window') { if(typeof media.video === 'object') { videoSupport = media.video; } else { var width = 0; var height = 0, maxHeight = 0; if(media.video === 'lowres') { // Small resolution, 4:3 height = 240; maxHeight = 240; width = 320; } else if(media.video === 'lowres-16:9') { // Small resolution, 16:9 height = 180; maxHeight = 180; width = 320; } else if(media.video === 'hires' || media.video === 'hires-16:9' || media.video === 'hdres') { // High(HD) resolution is only 16:9 height = 720; maxHeight = 720; width = 1280; } else if(media.video === 'fhdres') { // Full HD resolution is only 16:9 height = 1080; maxHeight = 1080; width = 1920; } else if(media.video === '4kres') { // 4K resolution is only 16:9 height = 2160; maxHeight = 2160; width = 3840; } else if(media.video === 'stdres') { // Normal resolution, 4:3 height = 480; maxHeight = 480; width = 640; } else if(media.video === 'stdres-16:9') { // Normal resolution, 16:9 height = 360; maxHeight = 360; width = 640; } else { Janus.log("Default video setting is stdres 4:3"); height = 480; maxHeight = 480; width = 640; } Janus.log("Adding media constraint:", media.video); videoSupport = { 'height': {'ideal': height}, 'width': {'ideal': width} }; Janus.log("Adding video constraint:", videoSupport); } } else if(media.video === 'screen' || media.video === 'window') { if(!media.screenshareFrameRate) { media.screenshareFrameRate = 3; } if(navigator.mediaDevices && navigator.mediaDevices.getDisplayMedia) { // The new experimental getDisplayMedia API is available, let's use that // https://groups.google.com/forum/#!topic/discuss-webrtc/Uf0SrR4uxzk // https://webrtchacks.com/chrome-screensharing-getdisplaymedia/ navigator.mediaDevices.getDisplayMedia({ video: true, audio: media.captureDesktopAudio }) .then(function(stream) { pluginHandle.consentDialog(false); if(isAudioSendEnabled(media) && !media.keepAudio) { navigator.mediaDevices.getUserMedia({ audio: true, video: false }) .then(function (audioStream) { stream.addTrack(audioStream.getAudioTracks()[0]); streamsDone(handleId, jsep, media, callbacks, stream); }) } else { streamsDone(handleId, jsep, media, callbacks, stream); } }, function (error) { pluginHandle.consentDialog(false); callbacks.error(error); }); return; } // We're going to try and use the extension for Chrome 34+, the old approach // for older versions of Chrome, or the experimental support in Firefox 33+ function callbackUserMedia (error, stream) { pluginHandle.consentDialog(false); if(error) { callbacks.error(error); } else { streamsDone(handleId, jsep, media, callbacks, stream); } }; function getScreenMedia(constraints, gsmCallback, useAudio) { Janus.log("Adding media constraint (screen capture)"); Janus.debug(constraints); navigator.mediaDevices.getUserMedia(constraints) .then(function(stream) { if(useAudio) { navigator.mediaDevices.getUserMedia({ audio: true, video: false }) .then(function (audioStream) { stream.addTrack(audioStream.getAudioTracks()[0]); gsmCallback(null, stream); }) } else { gsmCallback(null, stream); } }) .catch(function(error) { pluginHandle.consentDialog(false); gsmCallback(error); }); }; if(Janus.webRTCAdapter.browserDetails.browser === 'chrome') { var chromever = Janus.webRTCAdapter.browserDetails.version; var maxver = 33; if(window.navigator.userAgent.match('Linux')) maxver = 35; // "known" crash in chrome 34 and 35 on linux if(chromever >= 26 && chromever <= maxver) { // Chrome 26->33 requires some awkward chrome://flags manipulation constraints = { video: { mandatory: { googLeakyBucket: true, maxWidth: window.screen.width, maxHeight: window.screen.height, minFrameRate: media.screenshareFrameRate, maxFrameRate: media.screenshareFrameRate, chromeMediaSource: 'screen' } }, audio: isAudioSendEnabled(media) && !media.keepAudio }; getScreenMedia(constraints, callbackUserMedia); } else { // Chrome 34+ requires an extension Janus.extension.getScreen(function (error, sourceId) { if (error) { pluginHandle.consentDialog(false); return callbacks.error(error); } constraints = { audio: false, video: { mandatory: { chromeMediaSource: 'desktop', maxWidth: window.screen.width, maxHeight: window.screen.height, minFrameRate: media.screenshareFrameRate, maxFrameRate: media.screenshareFrameRate, }, optional: [ {googLeakyBucket: true}, {googTemporalLayeredScreencast: true} ] } }; constraints.video.mandatory.chromeMediaSourceId = sourceId; getScreenMedia(constraints, callbackUserMedia, isAudioSendEnabled(media) && !media.keepAudio); }); } } else if(Janus.webRTCAdapter.browserDetails.browser === 'firefox') { if(Janus.webRTCAdapter.browserDetails.version >= 33) { // Firefox 33+ has experimental support for screen sharing constraints = { video: { mozMediaSource: media.video, mediaSource: media.video }, audio: isAudioSendEnabled(media) && !media.keepAudio }; getScreenMedia(constraints, function (err, stream) { callbackUserMedia(err, stream); // Workaround for https://bugzilla.mozilla.org/show_bug.cgi?id=1045810 if (!err) { var lastTime = stream.currentTime; var polly = window.setInterval(function () { if(!stream) window.clearInterval(polly); if(stream.currentTime == lastTime) { window.clearInterval(polly); if(stream.onended) { stream.onended(); } } lastTime = stream.currentTime; }, 500); } }); } else { var error = new Error('NavigatorUserMediaError'); error.name = 'Your version of Firefox does not support screen sharing, please install Firefox 33 (or more recent versions)'; pluginHandle.consentDialog(false); callbacks.error(error); return; } } return; } } // If we got here, we're not screensharing if(!media || media.video !== 'screen') { // Check whether all media sources are actually available or not navigator.mediaDevices.enumerateDevices().then(function(devices) { var audioExist = devices.some(function(device) { return device.kind === 'audioinput'; }), videoExist = isScreenSendEnabled(media) || devices.some(function(device) { return device.kind === 'videoinput'; }); // Check whether a missing device is really a problem var audioSend = isAudioSendEnabled(media); var videoSend = isVideoSendEnabled(media); var needAudioDevice = isAudioSendRequired(media); var needVideoDevice = isVideoSendRequired(media); if(audioSend || videoSend || needAudioDevice || needVideoDevice) { // We need to send either audio or video var haveAudioDevice = audioSend ? audioExist : false; var haveVideoDevice = videoSend ? videoExist : false; if(!haveAudioDevice && !haveVideoDevice) { // FIXME Should we really give up, or just assume recvonly for both? pluginHandle.consentDialog(false); callbacks.error('No capture device found'); return false; } else if(!haveAudioDevice && needAudioDevice) { pluginHandle.consentDialog(false); callbacks.error('Audio capture is required, but no capture device found'); return false; } else if(!haveVideoDevice && needVideoDevice) { pluginHandle.consentDialog(false); callbacks.error('Video capture is required, but no capture device found'); return false; } } var gumConstraints = { audio: (audioExist && !media.keepAudio) ? audioSupport : false, video: (videoExist && !media.keepVideo) ? videoSupport : false }; Janus.debug("getUserMedia constraints", gumConstraints); if (!gumConstraints.audio && !gumConstraints.video) { pluginHandle.consentDialog(false); streamsDone(handleId, jsep, media, callbacks, stream); } else { navigator.mediaDevices.getUserMedia(gumConstraints) .then(function(stream) { pluginHandle.consentDialog(false); streamsDone(handleId, jsep, media, callbacks, stream); }).catch(function(error) { pluginHandle.consentDialog(false); callbacks.error({code: error.code, name: error.name, message: error.message}); }); } }) .catch(function(error) { pluginHandle.consentDialog(false); callbacks.error('enumerateDevices error', error); }); } } else { // No need to do a getUserMedia, create offer/answer right away streamsDone(handleId, jsep, media, callbacks); } } function prepareWebrtcPeer(handleId, callbacks) { callbacks = callbacks || {}; callbacks.success = (typeof callbacks.success == "function") ? callbacks.success : Janus.noop; callbacks.error = (typeof callbacks.error == "function") ? callbacks.error : webrtcError; var jsep = callbacks.jsep; var pluginHandle = pluginHandles[handleId]; if(!pluginHandle || !pluginHandle.webrtcStuff) { Janus.warn("Invalid handle"); callbacks.error("Invalid handle"); return; } var config = pluginHandle.webrtcStuff; if(jsep) { if(!config.pc) { Janus.warn("Wait, no PeerConnection?? if this is an answer, use createAnswer and not handleRemoteJsep"); callbacks.error("No PeerConnection: if this is an answer, use createAnswer and not handleRemoteJsep"); return; } config.pc.setRemoteDescription(jsep) .then(function() { Janus.log("Remote description accepted!"); config.remoteSdp = jsep.sdp; // Any trickle candidate we cached? if(config.candidates && config.candidates.length > 0) { for(var i = 0; i< config.candidates.length; i++) { var candidate = config.candidates[i]; Janus.debug("Adding remote candidate:", candidate); if(!candidate || candidate.completed === true) { // end-of-candidates config.pc.addIceCandidate(Janus.endOfCandidates); } else { // New candidate config.pc.addIceCandidate(candidate); } } config.candidates = []; } // Done callbacks.success(); }, callbacks.error); } else { callbacks.error("Invalid JSEP"); } } function createOffer(handleId, media, callbacks) { callbacks = callbacks || {}; callbacks.success = (typeof callbacks.success == "function") ? callbacks.success : Janus.noop; callbacks.error = (typeof callbacks.error == "function") ? callbacks.error : Janus.noop; callbacks.customizeSdp = (typeof callbacks.customizeSdp == "function") ? callbacks.customizeSdp : Janus.noop; var pluginHandle = pluginHandles[handleId]; if(!pluginHandle || !pluginHandle.webrtcStuff) { Janus.warn("Invalid handle"); callbacks.error("Invalid handle"); return; } var config = pluginHandle.webrtcStuff; var simulcast = (callbacks.simulcast === true); if(!simulcast) { Janus.log("Creating offer (iceDone=" + config.iceDone + ")"); } else { Janus.log("Creating offer (iceDone=" + config.iceDone + ", simulcast=" + simulcast + ")"); } // https://code.google.com/p/webrtc/issues/detail?id=3508 var mediaConstraints = {}; if(Janus.unifiedPlan) { // We can use Transceivers var audioTransceiver = null, videoTransceiver = null; var transceivers = config.pc.getTransceivers(); if(transceivers && transceivers.length > 0) { for(var t of transceivers) { if((t.sender && t.sender.track && t.sender.track.kind === "audio") || (t.receiver && t.receiver.track && t.receiver.track.kind === "audio")) { if(!audioTransceiver) audioTransceiver = t; continue; } if((t.sender && t.sender.track && t.sender.track.kind === "video") || (t.receiver && t.receiver.track && t.receiver.track.kind === "video")) { if(!videoTransceiver) videoTransceiver = t; continue; } } } // Handle audio (and related changes, if any) var audioSend = isAudioSendEnabled(media); var audioRecv = isAudioRecvEnabled(media); if(!audioSend && !audioRecv) { // Audio disabled: have we removed it? if(media.removeAudio && audioTransceiver) { if (audioTransceiver.setDirection) { audioTransceiver.setDirection("inactive"); } else { audioTransceiver.direction = "inactive"; } Janus.log("Setting audio transceiver to inactive:", audioTransceiver); } } else { // Take care of audio m-line if(audioSend && audioRecv) { if(audioTransceiver) { if (audioTransceiver.setDirection) { audioTransceiver.setDirection("sendrecv"); } else { audioTransceiver.direction = "sendrecv"; } Janus.log("Setting audio transceiver to sendrecv:", audioTransceiver); } } else if(audioSend && !audioRecv) { if(audioTransceiver) { if (audioTransceiver.setDirection) { audioTransceiver.setDirection("sendonly"); } else { audioTransceiver.direction = "sendonly"; } Janus.log("Setting audio transceiver to sendonly:", audioTransceiver); } } else if(!audioSend && audioRecv) { if(audioTransceiver) { if (audioTransceiver.setDirection) { audioTransceiver.setDirection("recvonly"); } else { audioTransceiver.direction = "recvonly"; } Janus.log("Setting audio transceiver to recvonly:", audioTransceiver); } else { // In theory, this is the only case where we might not have a transceiver yet audioTransceiver = config.pc.addTransceiver("audio", { direction: "recvonly" }); Janus.log("Adding recvonly audio transceiver:", audioTransceiver); } } } // Handle video (and related changes, if any) var videoSend = isVideoSendEnabled(media); var videoRecv = isVideoRecvEnabled(media); if(!videoSend && !videoRecv) { // Video disabled: have we removed it? if(media.removeVideo && videoTransceiver) { if (videoTransceiver.setDirection) { videoTransceiver.setDirection("inactive"); } else { videoTransceiver.direction = "inactive"; } Janus.log("Setting video transceiver to inactive:", videoTransceiver); } } else { // Take care of video m-line if(videoSend && videoRecv) { if(videoTransceiver) { if (videoTransceiver.setDirection) { videoTransceiver.setDirection("sendrecv"); } else { videoTransceiver.direction = "sendrecv"; } Janus.log("Setting video transceiver to sendrecv:", videoTransceiver); } } else if(videoSend && !videoRecv) { if(videoTransceiver) { if (videoTransceiver.setDirection) { videoTransceiver.setDirection("sendonly"); } else { videoTransceiver.direction = "sendonly"; } Janus.log("Setting video transceiver to sendonly:", videoTransceiver); } } else if(!videoSend && videoRecv) { if(videoTransceiver) { if (videoTransceiver.setDirection) { videoTransceiver.setDirection("recvonly"); } else { videoTransceiver.direction = "recvonly"; } Janus.log("Setting video transceiver to recvonly:", videoTransceiver); } else { // In theory, this is the only case where we might not have a transceiver yet videoTransceiver = config.pc.addTransceiver("video", { direction: "recvonly" }); Janus.log("Adding recvonly video transceiver:", videoTransceiver); } } } } else { mediaConstraints["offerToReceiveAudio"] = isAudioRecvEnabled(media); mediaConstraints["offerToReceiveVideo"] = isVideoRecvEnabled(media); } var iceRestart = (callbacks.iceRestart === true); if(iceRestart) { mediaConstraints["iceRestart"] = true; } Janus.debug(mediaConstraints); // Check if this is Firefox and we've been asked to do simulcasting var sendVideo = isVideoSendEnabled(media); if(sendVideo && simulcast && Janus.webRTCAdapter.browserDetails.browser === "firefox") { // FIXME Based on https://gist.github.com/voluntas/088bc3cc62094730647b Janus.log("Enabling Simulcasting for Firefox (RID)"); var sender = config.pc.getSenders().find(function(s) {return s.track.kind == "video"}); if(sender) { var parameters = sender.getParameters(); if(!parameters) parameters = {}; const maxBitrates = getMaxBitrates(callbacks.simulcastMaxBitrates); parameters.encodings = [ { rid: "h", active: true, maxBitrate: maxBitrates.high }, { rid: "m", active: true, maxBitrate: maxBitrates.medium, scaleResolutionDownBy: 2 }, { rid: "l", active: true, maxBitrate: maxBitrates.low, scaleResolutionDownBy: 4 } ]; sender.setParameters(parameters); } } config.pc.createOffer(mediaConstraints) .then(function(offer) { Janus.debug(offer); // JSON.stringify doesn't work on some WebRTC objects anymore // See https://code.google.com/p/chromium/issues/detail?id=467366 var jsep = { "type": offer.type, "sdp": offer.sdp }; callbacks.customizeSdp(jsep); offer.sdp = jsep.sdp; Janus.log("Setting local description"); if(sendVideo && simulcast) { // This SDP munging only works with Chrome (Safari STP may support it too) if(Janus.webRTCAdapter.browserDetails.browser === "chrome" || Janus.webRTCAdapter.browserDetails.browser === "safari") { Janus.log("Enabling Simulcasting for Chrome (SDP munging)"); offer.sdp = mungeSdpForSimulcasting(offer.sdp); } else if(Janus.webRTCAdapter.browserDetails.browser !== "firefox") { Janus.warn("simulcast=true, but this is not Chrome nor Firefox, ignoring"); } } config.mySdp = offer.sdp; config.pc.setLocalDescription(offer) .catch(callbacks.error); config.mediaConstraints = mediaConstraints; if(!config.iceDone && !config.trickle) { // Don't do anything until we have all candidates Janus.log("Waiting for all candidates..."); return; } Janus.log("Offer ready"); Janus.debug(callbacks); callbacks.success(offer); }, callbacks.error); } function createAnswer(handleId, media, callbacks) { callbacks = callbacks || {}; callbacks.success = (typeof callbacks.success == "function") ? callbacks.success : Janus.noop; callbacks.error = (typeof callbacks.error == "function") ? callbacks.error : Janus.noop; callbacks.customizeSdp = (typeof callbacks.customizeSdp == "function") ? callbacks.customizeSdp : Janus.noop; var pluginHandle = pluginHandles[handleId]; if(!pluginHandle || !pluginHandle.webrtcStuff) { Janus.warn("Invalid handle"); callbacks.error("Invalid handle"); return; } var config = pluginHandle.webrtcStuff; var simulcast = (callbacks.simulcast === true); if(!simulcast) { Janus.log("Creating answer (iceDone=" + config.iceDone + ")"); } else { Janus.log("Creating answer (iceDone=" + config.iceDone + ", simulcast=" + simulcast + ")"); } var mediaConstraints = null; if(Janus.unifiedPlan) { // We can use Transceivers mediaConstraints = {}; var audioTransceiver = null, videoTransceiver = null; var transceivers = config.pc.getTransceivers(); if(transceivers && transceivers.length > 0) { for(var t of transceivers) { if((t.sender && t.sender.track && t.sender.track.kind === "audio") || (t.receiver && t.receiver.track && t.receiver.track.kind === "audio")) { if(!audioTransceiver) audioTransceiver = t; continue; } if((t.sender && t.sender.track && t.sender.track.kind === "video") || (t.receiver && t.receiver.track && t.receiver.track.kind === "video")) { if(!videoTransceiver) videoTransceiver = t; continue; } } } // Handle audio (and related changes, if any) var audioSend = isAudioSendEnabled(media); var audioRecv = isAudioRecvEnabled(media); if(!audioSend && !audioRecv) { // Audio disabled: have we removed it? if(media.removeAudio && audioTransceiver) { try { if (audioTransceiver.setDirection) { audioTransceiver.setDirection("inactive"); } else { audioTransceiver.direction = "inactive"; } Janus.log("Setting audio transceiver to inactive:", audioTransceiver); } catch(e) { Janus.error(e); } } } else { // Take care of audio m-line if(audioSend && audioRecv) { if(audioTransceiver) { try { if (audioTransceiver.setDirection) { audioTransceiver.setDirection("sendrecv"); } else { audioTransceiver.direction = "sendrecv"; } Janus.log("Setting audio transceiver to sendrecv:", audioTransceiver); } catch(e) { Janus.error(e); } } } else if(audioSend && !audioRecv) { try { if(audioTransceiver) { if (audioTransceiver.setDirection) { audioTransceiver.setDirection("sendonly"); } else { audioTransceiver.direction = "sendonly"; } Janus.log("Setting audio transceiver to sendonly:", audioTransceiver); } } catch(e) { Janus.error(e); } } else if(!audioSend && audioRecv) { if(audioTransceiver) { try { if (audioTransceiver.setDirection) { audioTransceiver.setDirection("recvonly"); } else { audioTransceiver.direction = "recvonly"; } Janus.log("Setting audio transceiver to recvonly:", audioTransceiver); } catch(e) { Janus.error(e); } } else { // In theory, this is the only case where we might not have a transceiver yet audioTransceiver = config.pc.addTransceiver("audio", { direction: "recvonly" }); Janus.log("Adding recvonly audio transceiver:", audioTransceiver); } } } // Handle video (and related changes, if any) var videoSend = isVideoSendEnabled(media); var videoRecv = isVideoRecvEnabled(media); if(!videoSend && !videoRecv) { // Video disabled: have we removed it? if(media.removeVideo && videoTransceiver) { try { if (videoTransceiver.setDirection) { videoTransceiver.setDirection("inactive"); } else { videoTransceiver.direction = "inactive"; } Janus.log("Setting video transceiver to inactive:", videoTransceiver); } catch(e) { Janus.error(e); } } } else { // Take care of video m-line if(videoSend && videoRecv) { if(videoTransceiver) { try { if (videoTransceiver.setDirection) { videoTransceiver.setDirection("sendrecv"); } else { videoTransceiver.direction = "sendrecv"; } Janus.log("Setting video transceiver to sendrecv:", videoTransceiver); } catch(e) { Janus.error(e); } } } else if(videoSend && !videoRecv) { if(videoTransceiver) { try { if (videoTransceiver.setDirection) { videoTransceiver.setDirection("sendonly"); } else { videoTransceiver.direction = "sendonly"; } Janus.log("Setting video transceiver to sendonly:", videoTransceiver); } catch(e) { Janus.error(e); } } } else if(!videoSend && videoRecv) { if(videoTransceiver) { try { if (videoTransceiver.setDirection) { videoTransceiver.setDirection("recvonly"); } else { videoTransceiver.direction = "recvonly"; } Janus.log("Setting video transceiver to recvonly:", videoTransceiver); } catch(e) { Janus.error(e); } } else { // In theory, this is the only case where we might not have a transceiver yet videoTransceiver = config.pc.addTransceiver("video", { direction: "recvonly" }); Janus.log("Adding recvonly video transceiver:", videoTransceiver); } } } } else { if(Janus.webRTCAdapter.browserDetails.browser === "firefox" || Janus.webRTCAdapter.browserDetails.browser === "edge") { mediaConstraints = { offerToReceiveAudio: isAudioRecvEnabled(media), offerToReceiveVideo: isVideoRecvEnabled(media) }; } else { mediaConstraints = { mandatory: { OfferToReceiveAudio: isAudioRecvEnabled(media), OfferToReceiveVideo: isVideoRecvEnabled(media) } }; } } Janus.debug(mediaConstraints); // Check if this is Firefox and we've been asked to do simulcasting var sendVideo = isVideoSendEnabled(media); if(sendVideo && simulcast && Janus.webRTCAdapter.browserDetails.browser === "firefox") { // FIXME Based on https://gist.github.com/voluntas/088bc3cc62094730647b Janus.log("Enabling Simulcasting for Firefox (RID)"); var sender = config.pc.getSenders()[1]; Janus.log(sender); var parameters = sender.getParameters(); Janus.log(parameters); const maxBitrates = getMaxBitrates(callbacks.simulcastMaxBitrates); sender.setParameters({encodings: [ { rid: "high", active: true, priority: "high", maxBitrate: maxBitrates.high }, { rid: "medium", active: true, priority: "medium", maxBitrate: maxBitrates.medium }, { rid: "low", active: true, priority: "low", maxBitrate: maxBitrates.low } ]}); } config.pc.createAnswer(mediaConstraints) .then(function(answer) { Janus.debug(answer); // JSON.stringify doesn't work on some WebRTC objects anymore // See https://code.google.com/p/chromium/issues/detail?id=467366 var jsep = { "type": answer.type, "sdp": answer.sdp }; callbacks.customizeSdp(jsep); answer.sdp = jsep.sdp; Janus.log("Setting local description"); if(sendVideo && simulcast) { // This SDP munging only works with Chrome if(Janus.webRTCAdapter.browserDetails.browser === "chrome") { // FIXME Apparently trying to simulcast when answering breaks video in Chrome... //~ Janus.log("Enabling Simulcasting for Chrome (SDP munging)"); //~ answer.sdp = mungeSdpForSimulcasting(answer.sdp); Janus.warn("simulcast=true, but this is an answer, and video breaks in Chrome if we enable it"); } else if(Janus.webRTCAdapter.browserDetails.browser !== "firefox") { Janus.warn("simulcast=true, but this is not Chrome nor Firefox, ignoring"); } } config.mySdp = answer.sdp; config.pc.setLocalDescription(answer) .catch(callbacks.error); config.mediaConstraints = mediaConstraints; if(!config.iceDone && !config.trickle) { // Don't do anything until we have all candidates Janus.log("Waiting for all candidates..."); return; } callbacks.success(answer); }, callbacks.error); } function sendSDP(handleId, callbacks) { callbacks = callbacks || {}; callbacks.success = (typeof callbacks.success == "function") ? callbacks.success : Janus.noop; callbacks.error = (typeof callbacks.error == "function") ? callbacks.error : Janus.noop; var pluginHandle = pluginHandles[handleId]; if(!pluginHandle || !pluginHandle.webrtcStuff) { Janus.warn("Invalid handle, not sending anything"); return; } var config = pluginHandle.webrtcStuff; Janus.log("Sending offer/answer SDP..."); if(!config.mySdp) { Janus.warn("Local SDP instance is invalid, not sending anything..."); return; } config.mySdp = { "type": config.pc.localDescription.type, "sdp": config.pc.localDescription.sdp }; if(config.trickle === false) config.mySdp["trickle"] = false; Janus.debug(callbacks); config.sdpSent = true; callbacks.success(config.mySdp); } function getVolume(handleId, remote) { var pluginHandle = pluginHandles[handleId]; if(!pluginHandle || !pluginHandle.webrtcStuff) { Janus.warn("Invalid handle"); return 0; } var stream = remote ? "remote" : "local"; var config = pluginHandle.webrtcStuff; if(!config.volume[stream]) config.volume[stream] = { value: 0 }; // Start getting the volume, if getStats is supported if(config.pc.getStats && Janus.webRTCAdapter.browserDetails.browser === "chrome") { if(remote && !config.remoteStream) { Janus.warn("Remote stream unavailable"); return 0; } else if(!remote && !config.myStream) { Janus.warn("Local stream unavailable"); return 0; } if(!config.volume[stream].timer) { Janus.log("Starting " + stream + " volume monitor"); config.volume[stream].timer = setInterval(function() { config.pc.getStats() .then(function(stats) { var results = stats.result(); for(var i=0; i -1) && res.type === "inbound-rtp" && res.id.indexOf("rtcp") < 0) { // New stats inStats = true; } else if(res.type == 'ssrc' && res.bytesReceived && (res.googCodecName === "VP8" || res.googCodecName === "")) { // Older Chromer versions inStats = true; } // Parse stats now if(inStats) { config.bitrate.bsnow = res.bytesReceived; config.bitrate.tsnow = res.timestamp; if(config.bitrate.bsbefore === null || config.bitrate.tsbefore === null) { // Skip this round config.bitrate.bsbefore = config.bitrate.bsnow; config.bitrate.tsbefore = config.bitrate.tsnow; } else { // Calculate bitrate var timePassed = config.bitrate.tsnow - config.bitrate.tsbefore; if(Janus.webRTCAdapter.browserDetails.browser === "safari") timePassed = timePassed/1000; // Apparently the timestamp is in microseconds, in Safari var bitRate = Math.round((config.bitrate.bsnow - config.bitrate.bsbefore) * 8 / timePassed); if(Janus.webRTCAdapter.browserDetails.browser === "safari") bitRate = parseInt(bitRate/1000); config.bitrate.value = bitRate + ' kbits/sec'; //~ Janus.log("Estimated bitrate is " + config.bitrate.value); config.bitrate.bsbefore = config.bitrate.bsnow; config.bitrate.tsbefore = config.bitrate.tsnow; } } }); }); }, 1000); return "0 kbits/sec"; // We don't have a bitrate value yet } return config.bitrate.value; } else { Janus.warn("Getting the video bitrate unsupported by browser"); return "Feature unsupported by browser"; } } function webrtcError(error) { Janus.error("WebRTC error:", error); } function cleanupWebrtc(handleId, hangupRequest) { Janus.log("Cleaning WebRTC stuff"); var pluginHandle = pluginHandles[handleId]; if(!pluginHandle) { // Nothing to clean return; } var config = pluginHandle.webrtcStuff; if(config) { if(hangupRequest === true) { // Send a hangup request (we don't really care about the response) var request = { "janus": "hangup", "transaction": Janus.randomString(12) }; if(pluginHandle.token) request["token"] = pluginHandle.token; if(apisecret) request["apisecret"] = apisecret; Janus.debug("Sending hangup request (handle=" + handleId + "):"); Janus.debug(request); if(websockets) { request["session_id"] = sessionId; request["handle_id"] = handleId; ws.send(JSON.stringify(request)); } else { Janus.httpAPICall(server + "/" + sessionId + "/" + handleId, { verb: 'POST', withCredentials: withCredentials, body: request }); } } // Cleanup stack config.remoteStream = null; if(config.volume) { if(config.volume["local"] && config.volume["local"].timer) clearInterval(config.volume["local"].timer); if(config.volume["remote"] && config.volume["remote"].timer) clearInterval(config.volume["remote"].timer); } config.volume = {}; if(config.bitrate.timer) clearInterval(config.bitrate.timer); config.bitrate.timer = null; config.bitrate.bsnow = null; config.bitrate.bsbefore = null; config.bitrate.tsnow = null; config.bitrate.tsbefore = null; config.bitrate.value = null; try { // Try a MediaStreamTrack.stop() for each track if(!config.streamExternal && config.myStream) { Janus.log("Stopping local stream tracks"); var tracks = config.myStream.getTracks(); for(var mst of tracks) { Janus.log(mst); if(mst) mst.stop(); } } } catch(e) { // Do nothing if this fails } config.streamExternal = false; config.myStream = null; // Close PeerConnection try { config.pc.close(); } catch(e) { // Do nothing } config.pc = null; config.candidates = null; config.mySdp = null; config.remoteSdp = null; config.iceDone = false; config.dataChannel = {}; config.dtmfSender = null; } pluginHandle.oncleanup(); } // Helper method to munge an SDP to enable simulcasting (Chrome only) function mungeSdpForSimulcasting(sdp) { // Let's munge the SDP to add the attributes for enabling simulcasting // (based on https://gist.github.com/ggarber/a19b4c33510028b9c657) var lines = sdp.split("\r\n"); var video = false; var ssrc = [ -1 ], ssrc_fid = [ -1 ]; var cname = null, msid = null, mslabel = null, label = null; var insertAt = -1; for(var i=0; i -1) { // We're done, let's add the new attributes here insertAt = i; break; } } continue; } if(!video) continue; var fid = lines[i].match(/a=ssrc-group:FID (\d+) (\d+)/); if(fid) { ssrc[0] = fid[1]; ssrc_fid[0] = fid[2]; lines.splice(i, 1); i--; continue; } if(ssrc[0]) { var match = lines[i].match('a=ssrc:' + ssrc[0] + ' cname:(.+)') if(match) { cname = match[1]; } match = lines[i].match('a=ssrc:' + ssrc[0] + ' msid:(.+)') if(match) { msid = match[1]; } match = lines[i].match('a=ssrc:' + ssrc[0] + ' mslabel:(.+)') if(match) { mslabel = match[1]; } match = lines[i].match('a=ssrc:' + ssrc[0] + ' label:(.+)') if(match) { label = match[1]; } if(lines[i].indexOf('a=ssrc:' + ssrc_fid[0]) === 0) { lines.splice(i, 1); i--; continue; } if(lines[i].indexOf('a=ssrc:' + ssrc[0]) === 0) { lines.splice(i, 1); i--; continue; } } if(lines[i].length == 0) { lines.splice(i, 1); i--; continue; } } if(ssrc[0] < 0) { // Couldn't find a FID attribute, let's just take the first video SSRC we find insertAt = -1; video = false; for(var i=0; i -1) { // We're done, let's add the new attributes here insertAt = i; break; } } continue; } if(!video) continue; if(ssrc[0] < 0) { var value = lines[i].match(/a=ssrc:(\d+)/); if(value) { ssrc[0] = value[1]; lines.splice(i, 1); i--; continue; } } else { var match = lines[i].match('a=ssrc:' + ssrc[0] + ' cname:(.+)') if(match) { cname = match[1]; } match = lines[i].match('a=ssrc:' + ssrc[0] + ' msid:(.+)') if(match) { msid = match[1]; } match = lines[i].match('a=ssrc:' + ssrc[0] + ' mslabel:(.+)') if(match) { mslabel = match[1]; } match = lines[i].match('a=ssrc:' + ssrc[0] + ' label:(.+)') if(match) { label = match[1]; } if(lines[i].indexOf('a=ssrc:' + ssrc_fid[0]) === 0) { lines.splice(i, 1); i--; continue; } if(lines[i].indexOf('a=ssrc:' + ssrc[0]) === 0) { lines.splice(i, 1); i--; continue; } } if(lines[i].length == 0) { lines.splice(i, 1); i--; continue; } } } if(ssrc[0] < 0) { // Still nothing, let's just return the SDP we were asked to munge Janus.warn("Couldn't find the video SSRC, simulcasting NOT enabled"); return sdp; } if(insertAt < 0) { // Append at the end insertAt = lines.length; } // Generate a couple of SSRCs (for retransmissions too) // Note: should we check if there are conflicts, here? ssrc[1] = Math.floor(Math.random()*0xFFFFFFFF); ssrc[2] = Math.floor(Math.random()*0xFFFFFFFF); ssrc_fid[1] = Math.floor(Math.random()*0xFFFFFFFF); ssrc_fid[2] = Math.floor(Math.random()*0xFFFFFFFF); // Add attributes to the SDP for(var i=0; in&&(n=e.maxptime)}),0n.sctp.maxMessageSize)throw new TypeError("Message too large (can send a maximum of "+n.sctp.maxMessageSize+" bytes)");return i.apply(r,arguments)}}}}},{"./utils":14,sdp:2}],8:[function(e,t,r){"use strict";var i=e("../utils"),a=e("./filtericeservers"),o=e("rtcpeerconnection-shim");t.exports={shimGetUserMedia:e("./getusermedia"),shimPeerConnection:function(e){var t=i.detectBrowser(e);if(e.RTCIceGatherer&&(e.RTCIceCandidate||(e.RTCIceCandidate=function(e){return e}),e.RTCSessionDescription||(e.RTCSessionDescription=function(e){return e}),t.version<15025)){var r=Object.getOwnPropertyDescriptor(e.MediaStreamTrack.prototype,"enabled");Object.defineProperty(e.MediaStreamTrack.prototype,"enabled",{set:function(e){r.set.call(this,e);var t=new Event("enabled");t.enabled=e,this.dispatchEvent(t)}})}!e.RTCRtpSender||"dtmf"in e.RTCRtpSender.prototype||Object.defineProperty(e.RTCRtpSender.prototype,"dtmf",{get:function(){return void 0===this._dtmf&&("audio"===this.track.kind?this._dtmf=new e.RTCDtmfSender(this):"video"===this.track.kind&&(this._dtmf=null)),this._dtmf}}),e.RTCDtmfSender&&!e.RTCDTMFSender&&(e.RTCDTMFSender=e.RTCDtmfSender);var n=o(e,t.version);e.RTCPeerConnection=function(e){return e&&e.iceServers&&(e.iceServers=a(e.iceServers)),new n(e)},e.RTCPeerConnection.prototype=n.prototype},shimReplaceTrack:function(e){!e.RTCRtpSender||"replaceTrack"in e.RTCRtpSender.prototype||(e.RTCRtpSender.prototype.replaceTrack=e.RTCRtpSender.prototype.setTrack)}}},{"../utils":14,"./filtericeservers":9,"./getusermedia":10,"rtcpeerconnection-shim":1}],9:[function(e,t,r){"use strict";var a=e("../utils");t.exports=function(e,n){var i=!1;return(e=JSON.parse(JSON.stringify(e))).filter(function(e){if(e&&(e.urls||e.url)){var t=e.urls||e.url;e.url&&!e.urls&&a.deprecated("RTCIceServer.url","RTCIceServer.urls");var r="string"==typeof t;return r&&(t=[t]),t=t.filter(function(e){return 0===e.indexOf("turn:")&&-1!==e.indexOf("transport=udp")&&-1===e.indexOf("turn:[")&&!i?i=!0:0===e.indexOf("stun:")&&14393<=n&&-1===e.indexOf("?transport=udp")}),delete e.url,e.urls=r?t[0]:t,!!t.length}})}},{"../utils":14}],10:[function(e,t,r){"use strict";t.exports=function(e){var t=e&&e.navigator,r=t.mediaDevices.getUserMedia.bind(t.mediaDevices);t.mediaDevices.getUserMedia=function(e){return r(e).catch(function(e){return Promise.reject({name:{PermissionDeniedError:"NotAllowedError"}[(t=e).name]||t.name,message:t.message,constraint:t.constraint,toString:function(){return this.name}});var t})}}},{}],11:[function(e,t,r){"use strict";var n=e("../utils");t.exports={shimGetUserMedia:e("./getusermedia"),shimOnTrack:function(e){"object"!=typeof e||!e.RTCPeerConnection||"ontrack"in e.RTCPeerConnection.prototype||Object.defineProperty(e.RTCPeerConnection.prototype,"ontrack",{get:function(){return this._ontrack},set:function(e){this._ontrack&&(this.removeEventListener("track",this._ontrack),this.removeEventListener("addstream",this._ontrackpoly)),this.addEventListener("track",this._ontrack=e),this.addEventListener("addstream",this._ontrackpoly=function(r){r.stream.getTracks().forEach(function(e){var t=new Event("track");t.track=e,t.receiver={track:e},t.transceiver={receiver:t.receiver},t.streams=[r.stream],this.dispatchEvent(t)}.bind(this))}.bind(this))},enumerable:!0,configurable:!0}),"object"==typeof e&&e.RTCTrackEvent&&"receiver"in e.RTCTrackEvent.prototype&&!("transceiver"in e.RTCTrackEvent.prototype)&&Object.defineProperty(e.RTCTrackEvent.prototype,"transceiver",{get:function(){return{receiver:this.receiver}}})},shimSourceObject:function(e){"object"==typeof e&&(!e.HTMLMediaElement||"srcObject"in e.HTMLMediaElement.prototype||Object.defineProperty(e.HTMLMediaElement.prototype,"srcObject",{get:function(){return this.mozSrcObject},set:function(e){this.mozSrcObject=e}}))},shimPeerConnection:function(s){var c=n.detectBrowser(s);if("object"==typeof s&&(s.RTCPeerConnection||s.mozRTCPeerConnection)){s.RTCPeerConnection||(s.RTCPeerConnection=function(e,t){if(c.version<38&&e&&e.iceServers){for(var r=[],n=0;n=r&&parseInt(n[r],10)}t.exports={extractVersion:a,wrapPeerConnectionEvent:function(e,n,i){if(e.RTCPeerConnection){var t=e.RTCPeerConnection.prototype,a=t.addEventListener;t.addEventListener=function(e,r){if(e!==n)return a.apply(this,arguments);var t=function(e){var t=i(e);t&&r(t)};return this._eventMap=this._eventMap||{},this._eventMap[r]=t,a.apply(this,[e,t])};var o=t.removeEventListener;t.removeEventListener=function(e,t){if(e!==n||!this._eventMap||!this._eventMap[t])return o.apply(this,arguments);var r=this._eventMap[t];return delete this._eventMap[t],o.apply(this,[e,r])},Object.defineProperty(t,"on"+n,{get:function(){return this["_on"+n]},set:function(e){this["_on"+n]&&(this.removeEventListener(n,this["_on"+n]),delete this["_on"+n]),e&&this.addEventListener(n,this["_on"+n]=e)},enumerable:!0,configurable:!0})}},disableLog:function(e){return"boolean"!=typeof e?new Error("Argument type: "+typeof e+". Please use a boolean."):(n=e)?"adapter.js logging disabled":"adapter.js logging enabled"},disableWarnings:function(e){return"boolean"!=typeof e?new Error("Argument type: "+typeof e+". Please use a boolean."):(i=!e,"adapter.js deprecation warnings "+(e?"disabled":"enabled"))},log:function(){if("object"==typeof window){if(n)return;"undefined"!=typeof console&&"function"==typeof console.log&&console.log.apply(console,arguments)}},deprecated:function(e,t){i&&console.warn(e+" is deprecated, please use "+t+" instead.")},detectBrowser:function(e){var t=e&&e.navigator,r={browser:null,version:null};if(void 0===e||!e.navigator)return r.browser="Not a browser.",r;if(t.mozGetUserMedia)r.browser="firefox",r.version=a(t.userAgent,/Firefox\/(\d+)\./,1);else if(t.webkitGetUserMedia)r.browser="chrome",r.version=a(t.userAgent,/Chrom(e|ium)\/(\d+)\./,2);else if(t.mediaDevices&&t.userAgent.match(/Edge\/(\d+).(\d+)$/))r.browser="edge",r.version=a(t.userAgent,/Edge\/(\d+).(\d+)$/,2);else{if(!e.RTCPeerConnection||!t.userAgent.match(/AppleWebKit\/(\d+)\./))return r.browser="Not a supported browser.",r;r.browser="safari",r.version=a(t.userAgent,/AppleWebKit\/(\d+)\./,1)}return r}}},{}]},{},[3])(3)}); (function(a,b){"object"==typeof exports&&"undefined"!=typeof module?b(exports):"function"==typeof define&&define.amd?define(["exports"],b):(a=a||self,b(a.mp3MediaRecorder={}))})(this,function(a){'use strict';function b(a){var b=t.get(a);if(null==b)throw new TypeError("'this' is expected an Event object, but got another value.");return b}function c(a){return null==a.passiveListener?void(!a.event.cancelable||(a.canceled=!0,"function"==typeof a.event.preventDefault&&a.event.preventDefault())):void("undefined"!=typeof console&&"function"==typeof console.error&&console.error("Unable to preventDefault inside passive event listener invocation.",a.passiveListener))}function d(a,b){t.set(this,{eventTarget:a,event:b,eventPhase:2,currentTarget:a,canceled:!1,stopped:!1,immediateStopped:!1,passiveListener:null,timeStamp:b.timeStamp||Date.now()}),Object.defineProperty(this,"isTrusted",{value:!1,enumerable:!0});for(var c,d=Object.keys(b),f=0;fa.vmsg_flush(b))throw new Error("flush_failed");var c=new Uint32Array(g.buffer,b+4,1)[0],d=new Uint32Array(g.buffer,b+8,1)[0],e=new Uint8Array(g.buffer,c,d),f=new Blob([e],{type:"audio/mpeg"});return a.vmsg_free(b),f},o=function(d){if(i){c.set(d);var e=a.vmsg_encode(b,d.length);if(0>e)throw new Error("encoding_failed")}};f.onmessage=function(b){var c=b.data;try{switch(c.type){case"INIT_WORKER":{var d=l();k(c.wasmURL,d).then(function(b){a=b.instance.exports,f.postMessage({type:"WORKER_READY"})}).catch(function(a){f.postMessage({type:"ERROR",error:a.message})});break}case"START_RECORDING":{m(c.config),f.postMessage({type:"WORKER_RECORDING"});break}case"DATA_AVAILABLE":{o(c.data);break}case"STOP_RECORDING":{var e=n();f.postMessage({type:"BLOB_READY",blob:e});break}}}catch(a){f.postMessage({type:"ERROR",error:a.message})}}},E="audio/mpeg",F=window.AudioContext||window.webkitAudioContext,G=function(a){return(a.createGain||a.createGainNode).call(a)},H=function(a){return(a.createScriptProcessor||a.createJavaScriptNode).call(a,4096,1,1)};a.getMp3MediaRecorder=function(a){var b=new Blob(["("+D.toString()+")()"],{type:"application/javascript"}),c=new Worker(URL.createObjectURL(b)),d=function(a){function b(b,d){var e=this;void 0===d&&(d={});var f=d.audioContext;a.call(this),this.mimeType=E,this.state="inactive",this.audioBitsPerSecond=0,this.videoBitsPerSecond=0,this.onWorkerMessage=function(a){var b=a.data;switch(b.type){case y.WORKER_RECORDING:{var c=new Event("start");e.dispatchEvent(c),e.state="recording";break}case y.ERROR:{var d=new Error(b.error),f=new Event("error");f.error=d;var g=window.MediaRecorderErrorEvent?new MediaRecorderErrorEvent("error",{error:d}):f;e.dispatchEvent(g),e.state="inactive";break}case y.BLOB_READY:{var h=new Event("stop"),i=new Event("dataavailable");i.data=b.blob,i.timecode=Date.now();var j=window.BlobEvent?new BlobEvent("dataavailable",{data:b.blob,timecode:Date.now()}):i;e.dispatchEvent(j),e.dispatchEvent(h),e.state="inactive";break}}},this.stream=b,this.audioContext=f||new F,this.sourceNode=this.audioContext.createMediaStreamSource(b),this.gainNode=G(this.audioContext),this.gainNode.gain.value=1,this.processorNode=H(this.audioContext),this.sourceNode.connect(this.gainNode),this.gainNode.connect(this.processorNode),c.onmessage=this.onWorkerMessage}return a&&(b.__proto__=a),b.prototype=Object.create(a&&a.prototype),b.prototype.constructor=b,b.prototype.start=function(){if("inactive"!==this.state)throw this.getStateError("start");this.processorNode.onaudioprocess=function(a){c.postMessage(B(a.inputBuffer.getChannelData(0)))},this.processorNode.connect(this.audioContext.destination),this.audioContext.resume(),c.postMessage(A({sampleRate:this.audioContext.sampleRate}))},b.prototype.stop=function(){if("recording"!==this.state)throw this.getStateError("stop");this.processorNode.disconnect(),this.audioContext.suspend(),c.postMessage(C())},b.prototype.pause=function(){if("recording"!==this.state)throw this.getStateError("pause");this.audioContext.suspend(),this.state="paused",this.dispatchEvent(new Event("pause"))},b.prototype.resume=function(){if("paused"!==this.state)throw this.getStateError("resume");this.audioContext.resume(),this.state="recording",this.dispatchEvent(new Event("resume"))},b.prototype.requestData=function(){},b.prototype.getStateError=function(a){return new Error("Uncaught DOMException: Failed to execute '"+a+"' on 'MediaRecorder': The MediaRecorder's state is '"+this.state+"'.")},b}(s);return d.isTypeSupported=function(a){return a===E},q(d.prototype,"start"),q(d.prototype,"stop"),q(d.prototype,"pause"),q(d.prototype,"resume"),q(d.prototype,"dataavailable"),q(d.prototype,"error"),new Promise(function(b,e){var f=new URL(a.wasmURL,window.location.origin).href;c.postMessage(z(f)),c.onmessage=function(a){var c=a.data;if(c.type===y.WORKER_READY)b(d);else{var f=c.type===y.ERROR?c.error:"Unknown error occurred ";e(f)}}})},Object.defineProperty(a,"__esModule",{value:!0})}); //# sourceMappingURL=mp3-mediarecorder.umd.js.map // Last time updated: 2019-06-21 4:09:42 AM UTC // ________________________ // MultiStreamsMixer v1.2.2 // Open-Sourced: https://github.com/muaz-khan/MultiStreamsMixer // -------------------------------------------------- // Muaz Khan - www.MuazKhan.com // MIT License - www.WebRTC-Experiment.com/licence // -------------------------------------------------- function MultiStreamsMixer(arrayOfMediaStreams,elementClass){function setSrcObject(stream,element){"srcObject"in element?element.srcObject=stream:"mozSrcObject"in element?element.mozSrcObject=stream:element.srcObject=stream}function drawVideosToCanvas(){if(!isStopDrawingFrames){var videosLength=videos.length,fullcanvas=!1,remaining=[];if(videos.forEach(function(video){video.stream||(video.stream={}),video.stream.fullcanvas?fullcanvas=video:remaining.push(video)}),fullcanvas)canvas.width=fullcanvas.stream.width,canvas.height=fullcanvas.stream.height;else if(remaining.length){canvas.width=videosLength>1?2*remaining[0].width:remaining[0].width;var height=1;3!==videosLength&&4!==videosLength||(height=2),5!==videosLength&&6!==videosLength||(height=3),7!==videosLength&&8!==videosLength||(height=4),9!==videosLength&&10!==videosLength||(height=5),canvas.height=remaining[0].height*height}else canvas.width=self.width||360,canvas.height=self.height||240;fullcanvas&&fullcanvas instanceof HTMLVideoElement&&drawImage(fullcanvas),remaining.forEach(function(video,idx){drawImage(video,idx)}),setTimeout(drawVideosToCanvas,self.frameInterval)}}function drawImage(video,idx){if(!isStopDrawingFrames){var x=0,y=0,width=video.width,height=video.height;1===idx&&(x=video.width),2===idx&&(y=video.height),3===idx&&(x=video.width,y=video.height),4===idx&&(y=2*video.height),5===idx&&(x=video.width,y=2*video.height),6===idx&&(y=3*video.height),7===idx&&(x=video.width,y=3*video.height),"undefined"!=typeof video.stream.left&&(x=video.stream.left),"undefined"!=typeof video.stream.top&&(y=video.stream.top),"undefined"!=typeof video.stream.width&&(width=video.stream.width),"undefined"!=typeof video.stream.height&&(height=video.stream.height),context.drawImage(video,x,y,width,height),"function"==typeof video.stream.onRender&&video.stream.onRender(context,x,y,width,height,idx)}}function getMixedStream(){isStopDrawingFrames=!1;var mixedVideoStream=getMixedVideoStream(),mixedAudioStream=getMixedAudioStream();mixedAudioStream&&mixedAudioStream.getTracks().filter(function(t){return"audio"===t.kind}).forEach(function(track){mixedVideoStream.addTrack(track)});var fullcanvas;return arrayOfMediaStreams.forEach(function(stream){stream.fullcanvas&&(fullcanvas=!0)}),mixedVideoStream}function getMixedVideoStream(){resetVideoStreams();var capturedStream;"captureStream"in canvas?capturedStream=canvas.captureStream():"mozCaptureStream"in canvas?capturedStream=canvas.mozCaptureStream():self.disableLogs||console.error("Upgrade to latest Chrome or otherwise enable this flag: chrome://flags/#enable-experimental-web-platform-features");var videoStream=new MediaStream;return capturedStream.getTracks().filter(function(t){return"video"===t.kind}).forEach(function(track){videoStream.addTrack(track)}),canvas.stream=videoStream,videoStream}function getMixedAudioStream(){Storage.AudioContextConstructor||(Storage.AudioContextConstructor=new Storage.AudioContext),self.audioContext=Storage.AudioContextConstructor,self.audioSources=[],self.useGainNode===!0&&(self.gainNode=self.audioContext.createGain(),self.gainNode.connect(self.audioContext.destination),self.gainNode.gain.value=0);var audioTracksLength=0;if(arrayOfMediaStreams.forEach(function(stream){if(stream.getTracks().filter(function(t){return"audio"===t.kind}).length){audioTracksLength++;var audioSource=self.audioContext.createMediaStreamSource(stream);self.useGainNode===!0&&audioSource.connect(self.gainNode),self.audioSources.push(audioSource)}}),audioTracksLength)return self.audioDestination=self.audioContext.createMediaStreamDestination(),self.audioSources.forEach(function(audioSource){audioSource.connect(self.audioDestination)}),self.audioDestination.stream}function getVideo(stream){var video=document.createElement("video");return setSrcObject(stream,video),video.className=elementClass,video.muted=!0,video.volume=0,video.width=stream.width||self.width||360,video.height=stream.height||self.height||240,video.play(),video}function resetVideoStreams(streams){videos=[],streams=streams||arrayOfMediaStreams,streams.forEach(function(stream){if(stream.getTracks().filter(function(t){return"video"===t.kind}).length){var video=getVideo(stream);video.stream=stream,videos.push(video)}})}var browserFakeUserAgent="Fake/5.0 (FakeOS) AppleWebKit/123 (KHTML, like Gecko) Fake/12.3.4567.89 Fake/123.45";!function(that){"undefined"==typeof RecordRTC&&that&&"undefined"==typeof window&&"undefined"!=typeof global&&(global.navigator={userAgent:browserFakeUserAgent,getUserMedia:function(){}},global.console||(global.console={}),"undefined"!=typeof global.console.log&&"undefined"!=typeof global.console.error||(global.console.error=global.console.log=global.console.log||function(){console.log(arguments)}),"undefined"==typeof document&&(that.document={documentElement:{appendChild:function(){return""}}},document.createElement=document.captureStream=document.mozCaptureStream=function(){var obj={getContext:function(){return obj},play:function(){},pause:function(){},drawImage:function(){},toDataURL:function(){return""},style:{}};return obj},that.HTMLVideoElement=function(){}),"undefined"==typeof location&&(that.location={protocol:"file:",href:"",hash:""}),"undefined"==typeof screen&&(that.screen={width:0,height:0}),"undefined"==typeof URL&&(that.URL={createObjectURL:function(){return""},revokeObjectURL:function(){return""}}),that.window=global)}("undefined"!=typeof global?global:null),elementClass=elementClass||"multi-streams-mixer";var videos=[],isStopDrawingFrames=!1,canvas=document.createElement("canvas"),context=canvas.getContext("2d");canvas.style.opacity=0,canvas.style.position="absolute",canvas.style.zIndex=-1,canvas.style.top="-1000em",canvas.style.left="-1000em",canvas.className=elementClass,(document.body||document.documentElement).appendChild(canvas),this.disableLogs=!1,this.frameInterval=10,this.width=360,this.height=240,this.useGainNode=!0;var self=this,AudioContext=window.AudioContext;"undefined"==typeof AudioContext&&("undefined"!=typeof webkitAudioContext&&(AudioContext=webkitAudioContext),"undefined"!=typeof mozAudioContext&&(AudioContext=mozAudioContext));var URL=window.URL;"undefined"==typeof URL&&"undefined"!=typeof webkitURL&&(URL=webkitURL),"undefined"!=typeof navigator&&"undefined"==typeof navigator.getUserMedia&&("undefined"!=typeof navigator.webkitGetUserMedia&&(navigator.getUserMedia=navigator.webkitGetUserMedia),"undefined"!=typeof navigator.mozGetUserMedia&&(navigator.getUserMedia=navigator.mozGetUserMedia));var MediaStream=window.MediaStream;"undefined"==typeof MediaStream&&"undefined"!=typeof webkitMediaStream&&(MediaStream=webkitMediaStream),"undefined"!=typeof MediaStream&&"undefined"==typeof MediaStream.prototype.stop&&(MediaStream.prototype.stop=function(){this.getTracks().forEach(function(track){track.stop()})});var Storage={};"undefined"!=typeof AudioContext?Storage.AudioContext=AudioContext:"undefined"!=typeof webkitAudioContext&&(Storage.AudioContext=webkitAudioContext),this.startDrawingFrames=function(){drawVideosToCanvas()},this.appendStreams=function(streams){if(!streams)throw"First parameter is required.";streams instanceof Array||(streams=[streams]),streams.forEach(function(stream){var newStream=new MediaStream;if(stream.getTracks().filter(function(t){return"video"===t.kind}).length){var video=getVideo(stream);video.stream=stream,videos.push(video),newStream.addTrack(stream.getTracks().filter(function(t){return"video"===t.kind})[0])}if(stream.getTracks().filter(function(t){return"audio"===t.kind}).length){var audioSource=self.audioContext.createMediaStreamSource(stream);self.audioDestination=self.audioContext.createMediaStreamDestination(),audioSource.connect(self.audioDestination),newStream.addTrack(self.audioDestination.stream.getTracks().filter(function(t){return"audio"===t.kind})[0])}arrayOfMediaStreams.push(newStream)})},this.releaseStreams=function(){videos=[],isStopDrawingFrames=!0,self.gainNode&&(self.gainNode.disconnect(),self.gainNode=null),self.audioSources.length&&(self.audioSources.forEach(function(source){source.disconnect()}),self.audioSources=[]),self.audioDestination&&(self.audioDestination.disconnect(),self.audioDestination=null),self.audioContext&&self.audioContext.close(),self.audioContext=null,context.clearRect(0,0,canvas.width,canvas.height),canvas.stream&&(canvas.stream.stop(),canvas.stream=null)},this.resetVideoStreams=function(streams){!streams||streams instanceof Array||(streams=[streams]),resetVideoStreams(streams)},this.name="MultiStreamsMixer",this.toString=function(){return this.name},this.getMixedStream=getMixedStream}"undefined"==typeof RecordRTC&&("undefined"!=typeof module&&(module.exports=MultiStreamsMixer),"function"==typeof define&&define.amd&&define("MultiStreamsMixer",[],function(){return MultiStreamsMixer})); Notification.requestPermission().then(function(a){ if (a!="granted") console.error("Debes dar permisos para notificaciones del navegador si quieres ver las notificaciones de Telsome WebPhone"); }); var telsome_webphone={ jserver:"", version:"1.2.10", debug:false, isRegistered:false, init:false ,_ringtones:{},callid:null,config:{}, alt_orgs: [16817,117], servers: ["wss:\/\/habla2.telsome.es\/wsjanus","wss:\/\/habla2.telsome.es\/wsjanus"]}; telsome_webphone._helper = { subscriptions: {},sipcall_helper:[],blfstatus:{}}; telsome_webphone._helper.log = function(...args){ if (telsome_webphone.debug) console.log(...args); }; telsome_webphone._server = "https://www.telsome.es"; telsome_webphone._helper.setDB = function(name,value) { var ret = true; try { localStorage.setItem("_telsome_webphone_"+name,JSON.stringify(value)); }catch(e){ ret = false; telsome_webphone._helper.log(e); } return ret; } telsome_webphone._helper.getDB = function(name) { var value = localStorage.getItem("_telsome_webphone_"+name); try { value = JSON.parse(value); } catch(e){ value = null; telsome_webphone._helper.log(e); } return value; } telsome_webphone._helper.setCookie = function(name,value,days) { var expires = ""; if (typeof days=="undefined"){ days=10000; } if (days) { var date = new Date(); date.setTime(date.getTime() + (days*24*60*60*1000)); expires = "; expires=" + date.toUTCString(); } document.cookie = "_telsome_webphone_"+name + "=" + (value || "") + expires + "; path=/"; } telsome_webphone._helper.getCookie = function(name) { var nameEQ = "_telsome_webphone_"+name + "="; var ca = document.cookie.split(';'); for(var i=0;i < ca.length;i++) { var c = ca[i]; while (c.charAt(0)==' ') c = c.substring(1,c.length); if (c.indexOf(nameEQ) == 0) return c.substring(nameEQ.length,c.length); } return null; } telsome_webphone._helper.retrieveConfig=function(){ var elements={"dialer":1,"notifications":1,"visible":1,"blf":btoa(JSON.stringify({ })),"defaultTransfer":false,"callerid":false}; for (var c in elements){ telsome_webphone.config[c] = telsome_webphone._helper.getCookie("__telsome_webphone."+c); if (telsome_webphone.config[c]===null){ telsome_webphone.config[c]=elements[c]; telsome_webphone._helper.setCookie("__telsome_webphone."+c,elements[c]); } if (c=="blf") { telsome_webphone.config[c] = JSON.parse(atob(telsome_webphone.config[c])); } } }(); telsome_webphone._helper.setConfig=function(param,val){ telsome_webphone.config[param]=val; telsome_webphone._helper.saveConfig(); } telsome_webphone._helper.saveConfig=function(){ var elements={"dialer":1,"notifications":1,"visible":1,"blf":1,"defaultTransfer":1,"callerid":false}; for (var c in elements){ if (c=="blf"){ telsome_webphone._helper.setCookie("__telsome_webphone."+c,btoa(JSON.stringify(telsome_webphone.config[c]))); } else { telsome_webphone._helper.setCookie("__telsome_webphone."+c,telsome_webphone.config[c]); } } }; telsome_webphone._helper.escapeHtml = function(text) { var map = { '&': '&', '<': '<', '>': '>', '"': '"', "'": ''' }; return text.replace(/[&<>"']/g, function(m) { return map[m]; }); } //Grabaciones telsome_webphone._helper.Mp3MediaRecorder=null; window.mp3MediaRecorder.getMp3MediaRecorder({ wasmURL: 'https://unpkg.com/vmsg@0.3.5/vmsg.wasm' }).then(recorderClass => { telsome_webphone._helper.Mp3MediaRecorder = recorderClass; }); telsome_webphone._helper.blobs=[]; telsome_webphone._helper.mediaStream=null; telsome_webphone._helper.recorder=null; telsome_webphone._helper.isRecording = false; telsome_webphone._helper.stopRecording = function(){ telsome_webphone.$("#_telsome_webphone_settings_record").removeClass("recording"); if (telsome_webphone._helper.isRecording) telsome_webphone._helper.recorder.stop(); }; telsome_webphone._helper.startRecording = function(){ const audioMixer = new MultiStreamsMixer([telsome_webphone.remotestream, telsome_webphone.localstream]); var stream = audioMixer.getMixedStream(); telsome_webphone._helper.mediaStream = stream; telsome_webphone._helper.recorder = new telsome_webphone._helper.Mp3MediaRecorder(stream); telsome_webphone._helper.recorder.start(); telsome_webphone._helper.isRecording=true; telsome_webphone.$("#_telsome_webphone_settings_record").addClass("recording"); telsome_webphone._helper.recorder.onstart = e => { telsome_webphone._helper.log('Inicio de grabacion', e); telsome_webphone._helper.eventDispatch("recordStart",{"did":telsome_webphone._helper.did}); telsome_webphone._helper.blobs = []; }; telsome_webphone._helper.recorder.ondataavailable = e => { telsome_webphone._helper.log('Grabacion: ondataavailable', e); telsome_webphone._helper.blobs.push(e.data); }; telsome_webphone._helper.recorder.onstop = e => { telsome_webphone._helper.log('Grabacion: onstop', e); telsome_webphone._helper.mediaStream.getTracks().forEach(track => track.stop()); telsome_webphone._helper.isRecording=false; var d=new Date(); d = d.toISOString(); const mp3Blob = new Blob(telsome_webphone._helper.blobs, { type: 'audio/mpeg' }); const mp3BlobUrl = URL.createObjectURL(mp3Blob); //var url = (window.URL || window.webkitURL).createObjectURL(mp3BlobUrl); var link = document.getElementById("_telsome_webphone_recording"); link.href = mp3BlobUrl; link.download="Recording_"+telsome_webphone.ext.split("*")[1]+"_"+telsome_webphone._helper.did+"_"+d+".mp3"; telsome_webphone._helper.eventDispatch("recordStop",{"did":telsome_webphone._helper.did,"file":link.download}); link.click(); window.URL.revokeObjectURL(mp3BlobUrl); }; telsome_webphone._helper.recorder.onpause = e => { telsome_webphone._helper.log('Grabacion: onpause', e); }; telsome_webphone._helper.recorder.onresume = e => { telsome_webphone._helper.log('Grabacion: onresume', e); }; telsome_webphone._helper.recorder.onerror = e => { telsome_webphone._helper.log('Grabacion: onerror', e); }; }; //Sonidos telsome_webphone._helper.AudioContext = window.AudioContext || window.webkitAudioContext || window.mozAudioContext; telsome_webphone._helper.Tone = function(context, freq1, freq2) { this.context = context; this.status = 0; this.freq1 = freq1; this.freq2 = freq2; } telsome_webphone._helper.Tone.prototype.setup = function(){ this.osc1 = telsome_webphone._helper.context.createOscillator(); this.osc2 = telsome_webphone._helper.context.createOscillator(); this.osc1.frequency.value = this.freq1; this.osc2.frequency.value = this.freq2; this.gainNode = this.context.createGain(); this.gainNode.gain.value = 0.25; this.filter = this.context.createBiquadFilter(); this.filter.type = "lowpass"; this.filter.frequency = 8000; this.osc1.connect(this.gainNode); this.osc2.connect(this.gainNode); this.gainNode.connect(this.filter); this.filter.connect(telsome_webphone._helper.context.destination); } telsome_webphone._helper.Tone.prototype.start = function(){ this.setup(); this.osc1.start(0); this.osc2.start(0); this.status = 1; } telsome_webphone._helper.Tone.prototype.stop = function(){ this.osc1.stop(0); this.osc2.stop(0); this.status = 0; } telsome_webphone._helper.dtmfFrequencies = { "1": {f1: 697, f2: 1209}, "2": {f1: 697, f2: 1336}, "3": {f1: 697, f2: 1477}, "4": {f1: 770, f2: 1209}, "5": {f1: 770, f2: 1336}, "6": {f1: 770, f2: 1477}, "7": {f1: 852, f2: 1209}, "8": {f1: 852, f2: 1336}, "9": {f1: 852, f2: 1477}, "*": {f1: 941, f2: 1209}, "0": {f1: 941, f2: 1336}, "#": {f1: 941, f2: 1477} } telsome_webphone._helper.context = new AudioContext(); // Create a new Tone instace. (We've initialised it with // frequencies of 350 and 440 but it doesn't really matter // what we choose because we will be changing them in the // function below) telsome_webphone._helper.dtmf = new telsome_webphone._helper.Tone(telsome_webphone._helper.context, 350, 440); telsome_webphone._helper.dtmfPlay = function(dial){ var keyPressed = ""+dial; // this gets the number/character that was pressed var frequencyPair = telsome_webphone._helper.dtmfFrequencies[keyPressed]; // this looks up which frequency pair we need // this sets the freq1 and freq2 properties telsome_webphone._helper.dtmf.freq1 = frequencyPair.f1; telsome_webphone._helper.dtmf.freq2 = frequencyPair.f2; if (telsome_webphone._helper.dtmf.status == 0){ telsome_webphone._helper.dtmf.start(); setTimeout(function(){telsome_webphone._helper.dtmf.stop();},200); } }; telsome_webphone._helper.eraseCookie=function(name) { document.cookie = "_telsome_webphone_"+name+'=; Max-Age=-99999999;'; } telsome_webphone._helper.setCalling=function(isCalling) { if (isCalling){ telsome_webphone.$("#_telsome_webphone").addClass("calling"); telsome_webphone.$("#_telsome_webphone_output").attr("disabled","disabled"); telsome_webphone.$("#_telsome_webphone_transfer_info").hide(); telsome_webphone.$("#_telsome_webphone_transfer").removeClass("_telsome_webphone_open"); } else { telsome_webphone._helper.stopRecording(); if (telsome_webphone._ringtones && telsome_webphone._ringtones.incoming) {telsome_webphone._ringtones.incoming.pause();telsome_webphone._ringtones.incoming2.pause();}; telsome_webphone.$("#_telsome_webphone").removeClass("calling incall"); telsome_webphone.$("#_telsome_webphone_output").val("").change(); telsome_webphone.$("#_telsome_webphone_output").removeAttr("disabled"); if (telsome_webphone._helper.isHidden) telsome_webphone.hide(); telsome_webphone.$("#_telsome_webphone_transfer_info").hide(); telsome_webphone.$("#_telsome_webphone_transfer").removeClass("_telsome_webphone_open"); } } telsome_webphone._helper.setInCall=function(isCalling,name) { if (isCalling){ if (telsome_webphone.$("#_telsome_webphone").is("._telsome_webphone_hidden")){telsome_webphone.show();} telsome_webphone.$("#_telsome_webphone_incominghangup").hide(); telsome_webphone.$("#_telsome_webphone_accept,#_telsome_webphone_reject").show(); telsome_webphone.$("#_telsome_webphone_incomingcalldid").text(name).attr("title",name); telsome_webphone.$("#_telsome_webphone_incall_gotodialer").hide(); telsome_webphone.$("#_telsome_webphone").addClass("incall"); try{ telsome_webphone._ringtones.incoming.play(); }catch(e){} } else { telsome_webphone.$("#_telsome_webphone").removeClass("incall"); } } telsome_webphone._helper.setInCall2=function(isCalling,name) { if (isCalling){ telsome_webphone.$("#_telsome_webphone_newcall_info").show().addClass("incall"); telsome_webphone.$("#_telsome_webphone_newcall_destination").val(name).attr("disabled","disabled"); telsome_webphone.$("#_telsome_webphone_call2").removeAttr("disabled"); try{ //tono 2 telsome_webphone._ringtones.incoming2.play(); }catch(e){} } else { telsome_webphone.$("#_telsome_webphone_newcall_destination").val("").removeAttr("disabled","disabled"); telsome_webphone.$("#_telsome_webphone_newcall_info").hide().removeClass("incall"); } } telsome_webphone._helper.notification=function(titulo,body,callback){ try{ if (!telsome_webphone.config.notifications) return; if (Notification.permission !== 'granted'){ Notification.requestPermission(); } else { if (telsome_webphone._active_notification) telsome_webphone._active_notification.close(); telsome_webphone._active_notification = new Notification(titulo, { icon: 'https://www.telsome.es/pub/other/favicon-64x64.png', body: body, }); if (typeof callback!="undefined" && callback) telsome_webphone._active_notification.onclick = callback; } } catch(e){return false;} } telsome_webphone._helper.setActiveCall = function (_callToActive,_value){ switch(_callToActive){ case 2: if (!_value){ telsome_webphone.$("#_telsome_webphone_newcall_info").slideUp(); } if (_value) { telsome_webphone.active2=telsome_webphone.active2?telsome_webphone.active2:_value; } else { telsome_webphone.active2=false; if (telsome_webphone.hold1) telsome_webphone._helper.unhold(1); } telsome_webphone.hold2=false; telsome_webphone.calling2=false; break; default: if (!_value && !telsome_webphone.active2){ telsome_webphone.$("#_telsome_webphone_newcall_info").slideUp(); } if (_value) { telsome_webphone.active1=telsome_webphone.active1?telsome_webphone.active1:_value; } else { telsome_webphone.active1=false; } telsome_webphone.hold1=false; telsome_webphone.calling1=false; break; } if (!_value){ // Si estabamos en conference, volver a poner el audio original en la otra llamada if (telsome_webphone.conference){ try{ switch(_callToActive){ case 2: if (telsome_webphone.active1){ telsome_webphone.sipcall.webrtcStuff.pc.getSenders()[0].replaceTrack(telsome_webphone.localstream); } break; default: if (telsome_webphone.active2){ telsome_webphone._helper.sipcall_helper[0].webrtcStuff.pc.getSenders()[0].replaceTrack(telsome_webphone.localstream2) } break; } } catch(e){console.error(e);} } telsome_webphone.conference=false; } telsome_webphone._helper.showOps(); } telsome_webphone._helper.setConference = function(){ // Habilitar conferencia si están las dos lineas if (!telsome_webphone.conference && telsome_webphone.active1 && telsome_webphone.active2){ telsome_webphone._helper.audioMixer1 = new MultiStreamsMixer([telsome_webphone.localstream, telsome_webphone.remotestream2]); telsome_webphone._helper.audioMixer2 = new MultiStreamsMixer([telsome_webphone.localstream2, telsome_webphone.remotestream]); // Buscar el stream de audio var tracks = telsome_webphone._helper.audioMixer1.getMixedStream().getTracks(); var audiotrack1=null; for(var i=0;i -1); doVideo = false;//(jsep.sdp.indexOf("m=video ") > -1); Janus.debug("Audio " + (doAudio ? "has" : "has NOT") + " been negotiated"); Janus.debug("Video " + (doVideo ? "has" : "has NOT") + " been negotiated"); } else { Janus.log("This call doesn't contain an offer... we'll need to provide one ourselves"); offerlessInvite = true; // In case you want to offer video when reacting to an offerless call, set this to true doVideo = false; } // Is this the result of a transfer? var transfer = ""; var referredBy = result["referred_by"]; if(referredBy) transfer = " (referred by " + referredBy + ")"; // Any security offered? A missing "srtp" attribute means plain RTP var rtpType = ""; var srtp = result["srtp"]; if(srtp === "sdes_optional") rtpType = " (SDES-SRTP offered)"; else if(srtp === "sdes_mandatory") rtpType = " (SDES-SRTP mandatory)"; telsome_webphone._helper.setCalling(true,did_entrante); telsome_webphone._helper.did2=did_entrante; telsome_webphone._helper.setInCall2(true,did_entrante); telsome_webphone._helper.aceptarLlamada2= function(){ telsome_webphone.$("#_telsome_webphone_newcall_info").removeClass("incall"); telsome_webphone._ringtones.incoming2.pause(); var sipcallAction = (offerlessInvite ? telsome_webphone._helper.sipcall_helper[0].createOffer : telsome_webphone._helper.sipcall_helper[0].createAnswer); sipcallAction( { jsep: jsep, media: { audio: doAudio, video: doVideo }, success: function(jsep) { Janus.debug("Got SDP " + jsep.type + "! audio=" + doAudio + ", video=" + doVideo); Janus.debug(jsep); var body = { request: "accept" }; telsome_webphone._helper.hold(1); telsome_webphone._helper.setActiveCall(2,2); telsome_webphone._helper.sipcall_helper[0].send({"message": body, "jsep": jsep}); }, error: function(error) { Janus.error("WebRTC error:", error); alert("WebRTC error... " + JSON.stringify(error)); // Don't keep the caller waiting any longer, but use a 480 instead of the default 486 to clarify the cause var body = { "request": "decline", "code": 480 }; telsome_webphone._helper.sipcall_helper[0].send({"message": body}); telsome_webphone._helper.setActiveCall(2,false); } }); } telsome_webphone._helper.eventDispatch("receivingCall",{"did":did_entrante}); telsome_webphone._helper.notification("Llamada entrante", "Llamada entrante de "+did_entrante + transfer + rtpType ); } else if(event === 'transferring') { telsome_webphone._helper.log("[Call2] Transfiriendo...",result); telsome_webphone._helper.log("Cerrando llamada 1"); telsome_webphone._helper.doHangup(); telsome_webphone._helper.log("OK, Cerrando llamada 2"); telsome_webphone._helper.doHangup2(); telsome_webphone._helper.log("OK, llamadas cerradas"); } else if (event==="registered"){ telsome_webphone._helper.log("[Call2] sipcall_helper["+idsipcall+"] registered"); if (onregister) onregister(); } else if(event === 'accepting') { // Response to an offerless INVITE, let's wait for an 'accepted' } else if(event === 'progress') { Janus.log("[Call2] There's early media from " + result["username"] + ", wairing for the call!"); Janus.log(jsep); // Call can start already: handle the remote answer if(jsep !== null && jsep !== undefined) { telsome_webphone._helper.sipcall_helper[0].handleRemoteJsep({jsep: jsep, error: telsome_webphone._helper.doHangup2 }); } } else if(event === 'accepted') { Janus.log("[Call2] "+result["username"] + " accepted the call!"); telsome_webphone._helper.eventDispatch("callStart",{"did":telsome_webphone._helper.did2,"callId":telsome_webphone.callid2}); telsome_webphone._helper.hold(1); telsome_webphone._helper.setActiveCall(2,1); Janus.log(jsep); // Call can start, now: handle the remote answer if(jsep !== null && jsep !== undefined) { telsome_webphone._helper.sipcall_helper[0].handleRemoteJsep({jsep: jsep, error: telsome_webphone._helper.doHangup2 }); } } else if(event === 'updatingcall') { // We got a re-INVITE: while we may prompt the user (e.g., // to notify about media changes), to keep things simple // we just accept the update and send an answer right away Janus.log("Got re-INVITE"); var doAudio = (jsep.sdp.indexOf("m=audio ") > -1), doVideo = (jsep.sdp.indexOf("m=video ") > -1); telsome_webphone._helper.sipcall_helper[0].createAnswer( { jsep: jsep, media: { audio: doAudio, video: doVideo }, success: function(jsep) { Janus.debug("[Call2] Got SDP " + jsep.type + "! audio=" + doAudio + ", video=" + doVideo); Janus.debug(jsep); var body = { request: "update" }; telsome_webphone._helper.sipcall_helper[0].send({"message": body, "jsep": jsep}); }, error: function(error) { Janus.error("[Call2] WebRTC error:", error); alert("[Call2] WebRTC error... " + JSON.stringify(error)); } }); } else if(event === 'transfer') { // We're being asked to transfer the call, ask the user what to do Janus.log("[Call2] Got transfer"); telsome_webphone._helper.log("Cerrando llamada 1"); telsome_webphone._helper.doHangup(); telsome_webphone._helper.log("OK, Cerrando llamada 2"); telsome_webphone._helper.doHangup2(); telsome_webphone._ringtones.incoming2.pause(); telsome_webphone._helper.log("OK, llamadas cerradas"); } else if(event === 'hangup') { //if (telsome_webphone.active1==2) return; Janus.log("[Call2] Call hung up (" + result["code"] + " " + result["reason"] + ")!"); var lastCallId=telsome_webphone.callid2; // Reset status //telsome_webphone._helper.sipcall_helper[0].hangup(); telsome_webphone._helper.doHangup2(); telsome_webphone._helper.setActiveCall(2,false); telsome_webphone._helper.unhold(1); var status="error"; switch (result["code"]){ case 486:status="busy";break; case 200:status="answer";break; case 480:status="noanswer";break; case 487:status="cancel";break; case 404:status="notfound";break; } telsome_webphone._ringtones.incoming2.pause(); telsome_webphone._helper.eventDispatch("callEnd",{"did":telsome_webphone._helper.did2,"status":status,"code":result["code"],"info":result["reason"],"callId":lastCallId}) } }, onlocalstream: function(stream) { Janus.debug("[Call2] ::: Got a local stream :::"); Janus.debug(stream); telsome_webphone.localstream2=stream; telsome_webphone.$('#videos').removeClass('hide').show(); if(telsome_webphone.$('#myvideo').length === 0) telsome_webphone.$('#_telsome_webphone_videoleft').append('