"// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\nfunction EventEmitter() {\n this._events = this._events || {};\n this._maxListeners = this._maxListeners || undefined;\n}\nmodule.exports = EventEmitter;\n\n// Backwards-compat with node 0.10.x\nEventEmitter.EventEmitter = EventEmitter;\n\nEventEmitter.prototype._events = undefined;\nEventEmitter.prototype._maxListeners = undefined;\n\n// By default EventEmitters will print a warning if more than 10 listeners are\n// added to it. This is a useful default which helps finding memory leaks.\nEventEmitter.defaultMaxListeners = 10;\n\n// Obviously not all Emitters should be limited to 10. This function allows\n// that to be increased. Set to zero for unlimited.\nEventEmitter.prototype.setMaxListeners = function(n) {\n if (!isNumber(n) || n < 0 || isNaN(n))\n throw TypeError('n must be a positive number');\n this._maxListeners = n;\n return this;\n};\n\nEventEmitter.prototype.emit = function(type) {\n var er, handler, len, args, i, listeners;\n\n if (!this._events)\n this._events = {};\n\n // If there is no 'error' event listener then throw.\n if (type === 'error') {\n if (!this._events.error ||\n (isObject(this._events.error) && !this._events.error.length)) {\n er = arguments[1];\n if (er instanceof Error) {\n throw er; // Unhandled 'error' event\n }\n throw TypeError('Uncaught, unspecified \"error\" event.');\n }\n }\n\n handler = this._events[type];\n\n if (isUndefined(handler))\n return false;\n\n if (isFunction(handler)) {\n switch (arguments.length) {\n // fast cases\n case 1:\n handler.call(this);\n break;\n case 2:\n handler.call(this, arguments[1]);\n break;\n case 3:\n handler.call(this, arguments[1], arguments[2]);\n break;\n // slower\n default:\n args = Array.prototype.slice.call(arguments, 1);\n handler.apply(this, args);\n }\n } else if (isObject(handler)) {\n args = Array.prototype.slice.call(arguments, 1);\n listeners = handler.slice();\n len = listeners.length;\n for (i = 0; i < len; i++)\n listeners[i].apply(this, args);\n }\n\n return true;\n};\n\nEventEmitter.prototype.addListener = function(type, listener) {\n var m;\n\n if (!isFunction(listener))\n throw TypeError('listener must be a function');\n\n if (!this._events)\n this._events = {};\n\n // To avoid recursion in the case that type === \"newListener\"! Before\n // adding it to the listeners, first emit \"newListener\".\n if (this._events.newListener)\n this.emit('newListener', type,\n isFunction(listener.listener) ?\n listener.listener : listener);\n\n if (!this._events[type])\n // Optimize the case of one listener. Don't need the extra array object.\n this._events[type] = listener;\n else if (isObject(this._events[type]))\n // If we've already got an array, just append.\n this._even
"var bundleFn = arguments[3];\nvar sources = arguments[4];\nvar cache = arguments[5];\n\nvar stringify = JSON.stringify;\n\nmodule.exports = function (fn) {\n var keys = [];\n var wkey;\n var cacheKeys = Object.keys(cache);\n\n for (var i = 0, l = cacheKeys.length; i < l; i++) {\n var key = cacheKeys[i];\n var exp = cache[key].exports;\n // Using babel as a transpiler to use esmodule, the export will always\n // be an object with the default export as a property of it. To ensure\n // the existing api and babel esmodule exports are both supported we\n // check for both\n if (exp === fn || exp.default === fn) {\n wkey = key;\n break;\n }\n }\n\n if (!wkey) {\n wkey = Math.floor(Math.pow(16, 8) * Math.random()).toString(16);\n var wcache = {};\n for (var i = 0, l = cacheKeys.length; i < l; i++) {\n var key = cacheKeys[i];\n wcache[key] = key;\n }\n sources[wkey] = [\n Function(['require','module','exports'], '(' + fn + ')(self)'),\n wcache\n ];\n }\n var skey = Math.floor(Math.pow(16, 8) * Math.random()).toString(16);\n\n var scache = {}; scache[wkey] = wkey;\n sources[skey] = [\n Function(['require'], (\n // try to call default if defined to also support babel esmodule\n // exports\n 'var f = require(' + stringify(wkey) + ');' +\n '(f.default ? f.default : f)(self);'\n )),\n scache\n ];\n\n var src = '(' + bundleFn + ')({'\n + Object.keys(sources).map(function (key) {\n return stringify(key) + ':['\n + sources[key][0]\n + ',' + stringify(sources[key][1]) + ']'\n ;\n }).join(',')\n + '},{},[' + stringify(skey) + '])'\n ;\n\n var URL = window.URL || window.webkitURL || window.mozURL || window.msURL;\n\n return new Worker(URL.createObjectURL(\n new Blob([src], { type: 'text/javascript' })\n ));\n};\n",
"/*\n * simple ABR Controller\n * - compute next level based on last fragment bw heuristics\n * - implement an abandon rules triggered if we have less than 2 frag buffered and if computed bw shows that we risk buffer stalling\n */\n\nimport Event from '../events';\nimport EventHandler from '../event-handler';\nimport BufferHelper from '../helper/buffer-helper';\nimport {ErrorDetails} from '../errors';\nimport {logger} from '../utils/logger';\n\nclass AbrController extends EventHandler {\n\n constructor(hls) {\n super(hls, Event.FRAG_LOADING,\n Event.FRAG_LOAD_PROGRESS,\n Event.FRAG_LOADED,\n Event.ERROR);\n this.lastLoadedFragLevel = 0;\n this._autoLevelCapping = -1;\n this._nextAutoLevel = -1;\n this.hls = hls;\n this.onCheck = this.abandonRulesCheck.bind(this);\n }\n\n destroy() {\n this.clearTimer();\n EventHandler.prototype.destroy.call(this);\n }\n\n onFragLoading(data) {\n this.timer = setInterval(this.onCheck, 100);\n this.fragCurrent = data.frag;\n }\n\n onFragLoadProgress(data) {\n var stats = data.stats;\n // only update stats if first frag loading\n // if same frag is loaded multiple times, it might be in browser cache, and loaded quickly\n // and leading to wrong bw estimation\n if (stats.aborted === undefined && data.frag.loadCounter === 1) {\n this.lastfetchduration = (performance.now() - stats.trequest) / 1000;\n this.lastbw = (stats.loaded * 8) / this.lastfetchduration;\n //console.log(`fetchDuration:${this.lastfetchduration},bw:${(this.lastbw/1000).toFixed(0)}/${stats.aborted}`);\n }\n }\n\n abandonRulesCheck() {\n /*\n monitor fragment retrieval time...\n we compute expected time of arrival of the complete fragment.\n we compare it to expected time of buffer starvation\n */\n let hls = this.hls, v = hls.media,frag = this.fragCurrent;\n /* only monitor frag retrieval time if\n (video not paused OR first fragment being loaded(ready state === HAVE_NOTHING = 0)) AND autoswitching enabled AND not lowest level (=> means that we have several levels) */\n if (v && (!v.paused || !v.readyState) && frag.autoLevel && frag.level) {\n let requestDelay = performance.now() - frag.trequest;\n // monitor fragment load progress after half of expected fragment duration,to stabilize bitrate\n if (requestDelay > (500 * frag.duration)) {\n let loadRate = Math.max(1,frag.loaded * 1000 / requestDelay); // byte/s; at least 1 byte/s to avoid division by zero\n if (frag.expectedLen < frag.loaded) {\n frag.expectedLen = frag.loaded;\n }\n let pos = v.currentTime;\n let fragLoadedDelay = (frag.expectedLen - frag.loaded) / loadRate;\n let bufferStarvationDelay = BufferHelper.bufferInfo(v,pos,hls.config.maxBufferHole).end - pos;\n // consider emergency switch down only if we have less than 2 frag buffered AND\n // time to finish loading current fragment is bigger than buffer starvation delay\n // ie if we risk buffer starvation if bw does not increase quickly\n if (bufferStarvationDelay < 2*frag.duration && fragLoadedDelay > bufferStarvationDelay) {\n let fragLevelNextLoadedDelay, nextLoadLevel;\n // lets iterate through lower level and try to find the biggest one that could avoid rebuffering\n // we start from current level - 1 and we step down , until we find a matching level\n for (nextLoadLevel = frag.level - 1 ; nextLoadLevel >=0 ; nextLoadLevel--) {\n // compute time to load next fragment at lower level\n // 0.8 : consider only 80% of current bw to be conservative\n // 8 = bits per byte (bps/Bps)\n fragLevelNextLoadedDelay = frag.duration * hls.levels[nextLoadLevel].bitrate / (8 * 0.8 * loadRate);\n logger.log(`fragLoadedDelay/bufferStarvationDelay/fragLevelNextLoadedDelay[${nextLoadLevel}] :${fragLoadedDelay.toFixed(1)}/${bufferStarvationDelay.toFixed(1)}/${fragLevelNextLoadedDelay.toFixed(1)}`)
"/*\n * Buffer Controller\n*/\n\nimport Event from '../events';\nimport EventHandler from '../event-handler';\nimport {logger} from '../utils/logger';\nimport {ErrorTypes, ErrorDetails} from '../errors';\n\n\nclass BufferController extends EventHandler {\n\n constructor(hls) {\n super(hls,\n Event.MEDIA_ATTACHING,\n Event.MEDIA_DETACHING,\n Event.BUFFER_RESET,\n Event.BUFFER_APPENDING,\n Event.BUFFER_CODECS,\n Event.BUFFER_EOS,\n Event.BUFFER_FLUSHING);\n\n // Source Buffer listeners\n this.onsbue = this.onSBUpdateEnd.bind(this);\n this.onsbe = this.onSBUpdateError.bind(this);\n }\n\n destroy() {\n EventHandler.prototype.destroy.call(this);\n }\n\n onMediaAttaching(data) {\n var media = this.media = data.media;\n // setup the media source\n var ms = this.mediaSource = new MediaSource();\n //Media Source listeners\n this.onmso = this.onMediaSourceOpen.bind(this);\n this.onmse = this.onMediaSourceEnded.bind(this);\n this.onmsc = this.onMediaSourceClose.bind(this);\n ms.addEventListener('sourceopen', this.onmso);\n ms.addEventListener('sourceended', this.onmse);\n ms.addEventListener('sourceclose', this.onmsc);\n // link video and media Source\n media.src = URL.createObjectURL(ms);\n }\n\n onMediaDetaching() {\n var ms = this.mediaSource;\n if (ms) {\n if (ms.readyState === 'open') {\n try {\n // endOfStream could trigger exception if any sourcebuffer is in updating state\n // we don't really care about checking sourcebuffer state here,\n // as we are anyway detaching the MediaSource\n // let's just avoid this exception to propagate\n ms.endOfStream();\n } catch(err) {\n logger.warn(`onMediaDetaching:${err.message} while calling endOfStream`);\n }\n }\n ms.removeEventListener('sourceopen', this.onmso);\n ms.removeEventListener('sourceended', this.onmse);\n ms.removeEventListener('sourceclose', this.onmsc);\n // unlink MediaSource from video tag\n this.media.src = '';\n this.media.removeAttribute('src');\n this.mediaSource = null;\n this.media = null;\n this.pendingTracks = null;\n this.sourceBuffer = null;\n }\n this.onmso = this.onmse = this.onmsc = null;\n this.hls.trigger(Event.MEDIA_DETACHED);\n }\n\n onMediaSourceOpen() {\n logger.log('media source opened');\n this.hls.trigger(Event.MEDIA_ATTACHED, { media : this.media });\n // once received, don't listen anymore to sourceopen event\n this.mediaSource.removeEventListener('sourceopen', this.onmso);\n // if any buffer codecs pending, treat it here.\n var pendingTracks = this.pendingTracks;\n if (pendingTracks) {\n this.onBufferCodecs(pendingTracks);\n this.pendingTracks = null;\n this.doAppending();\n }\n }\n\n onMediaSourceClose() {\n logger.log('media source closed');\n }\n\n onMediaSourceEnded() {\n logger.log('media source ended');\n }\n\n\n onSBUpdateEnd() {\n\n if (this._needsFlush) {\n this.doFlush();\n }\n\n if (this._needsEos) {\n this.onBufferEos();\n }\n\n this.hls.trigger(Event.BUFFER_APPENDED);\n\n this.doAppending();\n }\n\n onSBUpdateError(event) {\n logger.error(`sourceBuffer error:${event}`);\n // according to http://www.w3.org/TR/media-source/#sourcebuffer-append-error\n // this error might not always be fatal (it is fatal if decode error is set, in that case\n // it will be followed by a mediaElement error ...)\n this.hls.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_APPENDING_ERROR, fatal: false});\n // we don't need to do more than that, as accordin to the spec, updateend will be fired just after\n }\n\n onBufferReset() {\n var sourceBuffer = this.sourceBuffer;\n if (sourceBuffer) {\n for(var type in sourceBuffer) {\n var sb = sourceBuffer[type];\n try {\n this.mediaSource.removeSourceBuffer(sb);\n sb.removeEventListener('update
"/*\n * Timeline Controller\n*/\n\nimport Event from '../events';\nimport EventHandler from '../event-handler';\nimport CEA708Interpreter from '../utils/cea-708-interpreter';\n\nclass TimelineController extends EventHandler {\n\n constructor(hls) {\n super(hls, Event.MEDIA_ATTACHING,\n Event.MEDIA_DETACHING,\n Event.FRAG_PARSING_USERDATA,\n Event.MANIFEST_LOADING,\n Event.FRAG_LOADED);\n\n this.hls = hls;\n this.config = hls.config;\n\n if (this.config.enableCEA708Captions)\n {\n this.cea708Interpreter = new CEA708Interpreter();\n }\n }\n\n destroy() {\n EventHandler.prototype.destroy.call(this);\n }\n\n onMediaAttaching(data) {\n var media = this.media = data.media;\n this.cea708Interpreter.attach(media);\n }\n\n onMediaDetaching() {\n this.cea708Interpreter.detach();\n }\n\n onManifestLoading()\n {\n this.lastPts = Number.POSITIVE_INFINITY;\n }\n\n onFragLoaded(data)\n {\n var pts = data.frag.start; //Number.POSITIVE_INFINITY;\n\n // if this is a frag for a previously loaded timerange, remove all captions\n // TODO: consider just removing captions for the timerange\n if (pts <= this.lastPts)\n {\n this.cea708Interpreter.clear();\n }\n\n this.lastPts = pts;\n }\n\n onFragParsingUserdata(data) {\n // push all of the CEA-708 messages into the interpreter\n // immediately. It will create the proper timestamps based on our PTS value\n for (var i=0; i<data.samples.length; i++)\n {\n this.cea708Interpreter.push(data.samples[i].pts, data.samples[i].bytes);\n }\n }\n}\n\nexport default TimelineController;\n",
"/*\n *\n * This file contains an adaptation of the AES decryption algorithm\n * from the Standford Javascript Cryptography Library. That work is\n * covered by the following copyright and permissions notice:\n *\n * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are\n * met:\n *\n * 1. Redistributions of source code must retain the above copyright\n * notice, this list of conditions and the following disclaimer.\n *\n * 2. Redistributions in binary form must reproduce the above\n * copyright notice, this list of conditions and the following\n * disclaimer in the documentation and/or other materials provided\n * with the distribution.\n *\n * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR\n * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED\n * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE\n * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR\n * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,\n * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE\n * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN\n * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n *\n * The views and conclusions contained in the software and documentation\n * are those of the authors and should not be interpreted as representing\n * official policies, either expressed or implied, of the authors.\n */\nclass AES {\n\n /**\n * Schedule out an AES key for both encryption and decryption. This\n * is a low-level class. Use a cipher mode to do bulk encryption.\n *\n * @constructor\n * @param key {Array} The key as an array of 4, 6 or 8 words.\n */\n constructor(key) {\n /**\n * The expanded S-box and inverse S-box tables. These will be computed\n * on the client so that we don't have to send them down the wire.\n *\n * There are two tables, _tables[0] is for encryption and\n * _tables[1] is for decryption.\n *\n * The first 4 sub-tables are the expanded S-box with MixColumns. The\n * last (_tables[01][4]) is the S-box itself.\n *\n * @private\n */\n this._tables = [[[],[],[],[],[]],[[],[],[],[],[]]];\n\n this._precompute();\n\n var i, j, tmp,\n encKey, decKey,\n sbox = this._tables[0][4], decTable = this._tables[1],\n keyLen = key.length, rcon = 1;\n\n if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {\n throw new Error('Invalid aes key size=' + keyLen);\n }\n\n encKey = key.slice(0);\n decKey = [];\n this._key = [encKey, decKey];\n\n // schedule encryption keys\n for (i = keyLen; i < 4 * keyLen + 28; i++) {\n tmp = encKey[i-1];\n\n // apply sbox\n if (i%keyLen === 0 || (keyLen === 8 && i%keyLen === 4)) {\n tmp = sbox[tmp>>>24]<<24 ^ sbox[tmp>>16&255]<<16 ^ sbox[tmp>>8&255]<<8 ^ sbox[tmp&255];\n\n // shift rows and add rcon\n if (i%keyLen === 0) {\n tmp = tmp<<8 ^ tmp>>>24 ^ rcon<<24;\n rcon = rcon<<1 ^ (rcon>>7)*283;\n }\n }\n\n encKey[i] = encKey[i-keyLen] ^ tmp;\n }\n\n // schedule decryption keys\n for (j = 0; i; j++, i--) {\n tmp = encKey[j&3 ? i : i - 4];\n if (i<=4 || j<4) {\n decKey[j] = tmp;\n } else {\n decKey[j] = decTable[0][sbox[tmp>>>24 ]] ^\n decTable[1][sbox[tmp>>16 & 255]] ^\n decTable[2][sbox[tmp>>8 & 255]] ^\n decTable[3][sbox[tmp & 255]];\n }\n }\n }\n\n /**\n * Expand the S-box tables.\n *\n * @private\n */\n _precompute() {\n var encTable = this._tables[0], decTable = this._tables[1],\n sbox = encTable[4], sboxInv = d
"/*\n *\n * This file contains an adaptation of the AES decryption algorithm\n * from the Standford Javascript Cryptography Library. That work is\n * covered by the following copyright and permissions notice:\n *\n * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are\n * met:\n *\n * 1. Redistributions of source code must retain the above copyright\n * notice, this list of conditions and the following disclaimer.\n *\n * 2. Redistributions in binary form must reproduce the above\n * copyright notice, this list of conditions and the following\n * disclaimer in the documentation and/or other materials provided\n * with the distribution.\n *\n * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR\n * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED\n * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE\n * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR\n * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,\n * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE\n * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN\n * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n *\n * The views and conclusions contained in the software and documentation\n * are those of the authors and should not be interpreted as representing\n * official policies, either expressed or implied, of the authors.\n */\n\nimport AES from './aes';\n\nclass AES128Decrypter {\n\n constructor(key, initVector) {\n this.key = key;\n this.iv = initVector;\n }\n\n /**\n * Convert network-order (big-endian) bytes into their little-endian\n * representation.\n */\n ntoh(word) {\n return (word << 24) |\n ((word & 0xff00) << 8) |\n ((word & 0xff0000) >> 8) |\n (word >>> 24);\n }\n\n\n /**\n * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.\n * @param encrypted {Uint8Array} the encrypted bytes\n * @param key {Uint32Array} the bytes of the decryption key\n * @param initVector {Uint32Array} the initialization vector (IV) to\n * use for the first round of CBC.\n * @return {Uint8Array} the decrypted bytes\n *\n * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard\n * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29\n * @see https://tools.ietf.org/html/rfc2315\n */\n doDecrypt(encrypted, key, initVector) {\n var\n // word-level access to the encrypted bytes\n encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2),\n\n decipher = new AES(Array.prototype.slice.call(key)),\n\n // byte and word-level access for the decrypted output\n decrypted = new Uint8Array(encrypted.byteLength),\n decrypted32 = new Int32Array(decrypted.buffer),\n\n // temporary variables for working with the IV, encrypted, and\n // decrypted data\n init0, init1, init2, init3,\n encrypted0, encrypted1, encrypted2, encrypted3,\n\n // iteration variable\n wordIx;\n\n // pull out the words of the IV to ensure we don't modify the\n // passed-in reference and easier access\n init0 = ~~initVector[0];\n init1 = ~~initVector[1];\n init2 = ~~initVector[2];\n init3 = ~~initVector[3];\n\n // decrypt four word sequences, applying cipher-block chaining (CBC)\n // to each decrypted block\n for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {\n // convert big-endian (network order) words into little-endian\n // (javascript order)\n encrypted0 = ~~this.ntoh(encrypted32[wordIx]);\n encrypted1 = ~~this.ntoh(encrypted32[wordIx + 1]);\n
"/* inline demuxer.\n * probe fragments and instantiate appropriate demuxer depending on content type (TSDemuxer, AACDemuxer, ...)\n */\n\nimport Event from '../events';\nimport {ErrorTypes, ErrorDetails} from '../errors';\nimport AACDemuxer from '../demux/aacdemuxer';\nimport TSDemuxer from '../demux/tsdemuxer';\nimport MP4Remuxer from '../remux/mp4-remuxer';\nimport PassThroughRemuxer from '../remux/passthrough-remuxer';\n\nclass DemuxerInline {\n\n constructor(hls,typeSupported) {\n this.hls = hls;\n this.typeSupported = typeSupported;\n }\n\n destroy() {\n var demuxer = this.demuxer;\n if (demuxer) {\n demuxer.destroy();\n }\n }\n\n push(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration) {\n var demuxer = this.demuxer;\n if (!demuxer) {\n var hls = this.hls;\n // probe for content type\n if (TSDemuxer.probe(data)) {\n if (this.typeSupported.mp2t === true) {\n demuxer = new TSDemuxer(hls,PassThroughRemuxer);\n } else {\n demuxer = new TSDemuxer(hls,MP4Remuxer);\n }\n } else if(AACDemuxer.probe(data)) {\n demuxer = new AACDemuxer(hls,MP4Remuxer);\n } else {\n hls.trigger(Event.ERROR, {type : ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: true, reason: 'no demux matching with content found'});\n return;\n }\n this.demuxer = demuxer;\n }\n demuxer.push(data,audioCodec,videoCodec,timeOffset,cc,level,sn,duration);\n }\n}\n\nexport default DemuxerInline;\n",
"/* demuxer web worker.\n * - listen to worker message, and trigger DemuxerInline upon reception of Fragments.\n * - provides MP4 Boxes back to main thread using [transferable objects](https://developers.google.com/web/updates/2011/12/Transferable-Objects-Lightning-Fast) in order to minimize message passing overhead.\n */\n\n import DemuxerInline from '../demux/demuxer-inline';\n import Event from '../events';\n import EventEmitter from 'events';\n\nvar DemuxerWorker = function (self) {\n // observer setup\n var observer = new EventEmitter();\n observer.trigger = function trigger (event, ...data) {\n observer.emit(event, event, ...data);\n };\n\n observer.off = function off (event, ...data) {\n observer.removeListener(event, ...data);\n };\n self.addEventListener('message', function (ev) {\n var data = ev.data;\n //console.log('demuxer cmd:' + data.cmd);\n switch (data.cmd) {\n case 'init':\n self.demuxer = new DemuxerInline(observer, data.typeSupported);\n break;\n case 'demux':\n self.demuxer.push(new Uint8Array(data.data), data.audioCodec, data.videoCodec, data.timeOffset, data.cc, data.level, data.sn, data.duration);\n break;\n default:\n break;\n }\n });\n\n // listen to events triggered by Demuxer\n observer.on(Event.FRAG_PARSING_INIT_SEGMENT, function(ev, data) {\n self.postMessage({event: ev, tracks : data.tracks, unique : data.unique });\n });\n\n observer.on(Event.FRAG_PARSING_DATA, function(ev, data) {\n var objData = {event: ev, type: data.type, startPTS: data.startPTS, endPTS: data.endPTS, startDTS: data.startDTS, endDTS: data.endDTS, data1: data.data1.buffer, data2: data.data2.buffer, nb: data.nb};\n // pass data1/data2 as transferable object (no copy)\n self.postMessage(objData, [objData.data1, objData.data2]);\n });\n\n observer.on(Event.FRAG_PARSED, function(event) {\n self.postMessage({event: event});\n });\n\n observer.on(Event.ERROR, function(event, data) {\n self.postMessage({event: event, data: data});\n });\n\n observer.on(Event.FRAG_PARSING_METADATA, function(event, data) {\n var objData = {event: event, samples: data.samples};\n self.postMessage(objData);\n });\n\n observer.on(Event.FRAG_PARSING_USERDATA, function(event, data) {\n var objData = {event: event, samples: data.samples};\n self.postMessage(objData);\n });\n\n};\n\nexport default DemuxerWorker;\n\n",
"import Event from '../events';\nimport DemuxerInline from '../demux/demuxer-inline';\nimport DemuxerWorker from '../demux/demuxer-worker';\nimport {logger} from '../utils/logger';\nimport Decrypter from '../crypt/decrypter';\n\nclass Demuxer {\n\n constructor(hls) {\n this.hls = hls;\n var typeSupported = {\n mp4 : MediaSource.isTypeSupported('video/mp4'),\n mp2t : hls.config.enableMP2TPassThrough && MediaSource.isTypeSupported('video/mp2t')\n };\n if (hls.config.enableWorker && (typeof(Worker) !== 'undefined')) {\n logger.log('demuxing in webworker');\n try {\n var work = require('webworkify');\n this.w = work(DemuxerWorker);\n this.onwmsg = this.onWorkerMessage.bind(this);\n this.w.addEventListener('message', this.onwmsg);\n this.w.postMessage({cmd: 'init', typeSupported : typeSupported});\n } catch(err) {\n logger.error('error while initializing DemuxerWorker, fallback on DemuxerInline');\n this.demuxer = new DemuxerInline(hls,typeSupported);\n }\n } else {\n this.demuxer = new DemuxerInline(hls,typeSupported);\n }\n this.demuxInitialized = true;\n }\n\n destroy() {\n if (this.w) {\n this.w.removeEventListener('message', this.onwmsg);\n this.w.terminate();\n this.w = null;\n } else {\n this.demuxer.destroy();\n this.demuxer = null;\n }\n if (this.decrypter) {\n this.decrypter.destroy();\n this.decrypter = null;\n }\n }\n\n pushDecrypted(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration) {\n if (this.w) {\n // post fragment payload as transferable objects (no copy)\n this.w.postMessage({cmd: 'demux', data: data, audioCodec: audioCodec, videoCodec: videoCodec, timeOffset: timeOffset, cc: cc, level: level, sn : sn, duration: duration}, [data]);\n } else {\n this.demuxer.push(new Uint8Array(data), audioCodec, videoCodec, timeOffset, cc, level, sn, duration);\n }\n }\n\n push(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration, decryptdata) {\n if ((data.byteLength > 0) && (decryptdata != null) && (decryptdata.key != null) && (decryptdata.method === 'AES-128')) {\n if (this.decrypter == null) {\n this.decrypter = new Decrypter(this.hls);\n }\n\n var localthis = this;\n this.decrypter.decrypt(data, decryptdata.key, decryptdata.iv, function(decryptedData){\n localthis.pushDecrypted(decryptedData, audioCodec, videoCodec, timeOffset, cc, level, sn, duration);\n });\n } else {\n this.pushDecrypted(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration);\n }\n }\n\n onWorkerMessage(ev) {\n var data = ev.data;\n //console.log('onWorkerMessage:' + data.event);\n switch(data.event) {\n case Event.FRAG_PARSING_INIT_SEGMENT:\n var obj = {};\n obj.tracks = data.tracks;\n obj.unique = data.unique;\n this.hls.trigger(Event.FRAG_PARSING_INIT_SEGMENT, obj);\n break;\n case Event.FRAG_PARSING_DATA:\n this.hls.trigger(Event.FRAG_PARSING_DATA,{\n data1: new Uint8Array(data.data1),\n data2: new Uint8Array(data.data2),\n startPTS: data.startPTS,\n endPTS: data.endPTS,\n startDTS: data.startDTS,\n endDTS: data.endDTS,\n type: data.type,\n nb: data.nb\n });\n break;\n case Event.FRAG_PARSING_METADATA:\n this.hls.trigger(Event.FRAG_PARSING_METADATA, {\n samples: data.samples\n });\n break;\n case Event.FRAG_PARSING_USERDATA:\n this.hls.trigger(Event.FRAG_PARSING_USERDATA, {\n samples: data.samples\n });\n break;\n default:\n this.hls.trigger(data.event, data.data);\n break;\n }\n }\n}\n\nexport default Demuxer;\n\n",
"/**\n * Parser for exponential Golomb codes, a variable-bitwidth number encoding scheme used by h264.\n*/\n\nimport {logger} from '../utils/logger';\n\nclass ExpGolomb {\n\n constructor(data) {\n this.data = data;\n // the number of bytes left to examine in this.data\n this.bytesAvailable = this.data.byteLength;\n // the current word being examined\n this.word = 0; // :uint\n // the number of bits left to examine in the current word\n this.bitsAvailable = 0; // :uint\n }\n\n // ():void\n loadWord() {\n var\n position = this.data.byteLength - this.bytesAvailable,\n workingBytes = new Uint8Array(4),\n availableBytes = Math.min(4, this.bytesAvailable);\n if (availableBytes === 0) {\n throw new Error('no bytes available');\n }\n workingBytes.set(this.data.subarray(position, position + availableBytes));\n this.word = new DataView(workingBytes.buffer).getUint32(0);\n // track the amount of this.data that has been processed\n this.bitsAvailable = availableBytes * 8;\n this.bytesAvailable -= availableBytes;\n }\n\n // (count:int):void\n skipBits(count) {\n var skipBytes; // :int\n if (this.bitsAvailable > count) {\n this.word <<= count;\n this.bitsAvailable -= count;\n } else {\n count -= this.bitsAvailable;\n skipBytes = count >> 3;\n count -= (skipBytes >> 3);\n this.bytesAvailable -= skipBytes;\n this.loadWord();\n this.word <<= count;\n this.bitsAvailable -= count;\n }\n }\n\n // (size:int):uint\n readBits(size) {\n var\n bits = Math.min(this.bitsAvailable, size), // :uint\n valu = this.word >>> (32 - bits); // :uint\n if (size > 32) {\n logger.error('Cannot read more than 32 bits at a time');\n }\n this.bitsAvailable -= bits;\n if (this.bitsAvailable > 0) {\n this.word <<= bits;\n } else if (this.bytesAvailable > 0) {\n this.loadWord();\n }\n bits = size - bits;\n if (bits > 0) {\n return valu << bits | this.readBits(bits);\n } else {\n return valu;\n }\n }\n\n // ():uint\n skipLZ() {\n var leadingZeroCount; // :uint\n for (leadingZeroCount = 0; leadingZeroCount < this.bitsAvailable; ++leadingZeroCount) {\n if (0 !== (this.word & (0x80000000 >>> leadingZeroCount))) {\n // the first bit of working word is 1\n this.word <<= leadingZeroCount;\n this.bitsAvailable -= leadingZeroCount;\n return leadingZeroCount;\n }\n }\n // we exhausted word and still have not found a 1\n this.loadWord();\n return leadingZeroCount + this.skipLZ();\n }\n\n // ():void\n skipUEG() {\n this.skipBits(1 + this.skipLZ());\n }\n\n // ():void\n skipEG() {\n this.skipBits(1 + this.skipLZ());\n }\n\n // ():uint\n readUEG() {\n var clz = this.skipLZ(); // :uint\n return this.readBits(clz + 1) - 1;\n }\n\n // ():int\n readEG() {\n var valu = this.readUEG(); // :int\n if (0x01 & valu) {\n // the number is odd if the low order bit is set\n return (1 + valu) >>> 1; // add 1 to make it even, and divide by 2\n } else {\n return -1 * (valu >>> 1); // divide by two then make it negative\n }\n }\n\n // Some convenience functions\n // :Boolean\n readBoolean() {\n return 1 === this.readBits(1);\n }\n\n // ():int\n readUByte() {\n return this.readBits(8);\n }\n\n // ():int\n readUShort() {\n return this.readBits(16);\n }\n // ():int\n readUInt() {\n return this.readBits(32);\n }\n\n /**\n * Advance the ExpGolomb decoder past a scaling list. The scaling\n * list is optionally transmitted as part of a sequence parameter\n * set and is not relevant to transmuxing.\n * @param count {number} the number of entries in this scaling list\n * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1\n */\n skipScalingList(count) {\n var\n lastScale = 8,\n nextScale = 8,\n j,\n deltaScale;\n for (j = 0; j < count; j++) {\n if (nextScale !== 0) {\n deltaScale = this.readEG();\n nextScale =
"/**\n * highly optimized TS demuxer:\n * parse PAT, PMT\n * extract PES packet from audio and video PIDs\n * extract AVC/H264 NAL units and AAC/ADTS samples from PES packet\n * trigger the remuxer upon parsing completion\n * it also tries to workaround as best as it can audio codec switch (HE-AAC to AAC and vice versa), without having to restart the MediaSource.\n * it also controls the remuxing process :\n * upon discontinuity or level switch detection, it will also notifies the remuxer so that it can reset its state.\n*/\n\n import ADTS from './adts';\n import Event from '../events';\n import ExpGolomb from './exp-golomb';\n// import Hex from '../utils/hex';\n import {logger} from '../utils/logger';\n import {ErrorTypes, ErrorDetails} from '../errors';\n\n class TSDemuxer {\n\n constructor(observer,remuxerClass) {\n this.observer = observer;\n this.remuxerClass = remuxerClass;\n this.lastCC = 0;\n this.remuxer = new this.remuxerClass(observer);\n }\n\n static probe(data) {\n // a TS fragment should contain at least 3 TS packets, a PAT, a PMT, and one PID, each starting with 0x47\n if (data.length >= 3*188 && data[0] === 0x47 && data[188] === 0x47 && data[2*188] === 0x47) {\n return true;\n } else {\n return false;\n }\n }\n\n switchLevel() {\n this.pmtParsed = false;\n this._pmtId = -1;\n this.lastAacPTS = null;\n this.aacOverFlow = null;\n this._avcTrack = {container : 'video/mp2t', type: 'video', id :-1, sequenceNumber: 0, samples : [], len : 0, nbNalu : 0};\n this._aacTrack = {container : 'video/mp2t', type: 'audio', id :-1, sequenceNumber: 0, samples : [], len : 0};\n this._id3Track = {type: 'id3', id :-1, sequenceNumber: 0, samples : [], len : 0};\n this._txtTrack = {type: 'text', id: -1, sequenceNumber: 0, samples: [], len: 0};\n this.remuxer.switchLevel();\n }\n\n insertDiscontinuity() {\n this.switchLevel();\n this.remuxer.insertDiscontinuity();\n }\n\n // feed incoming data to the front of the parsing pipeline\n push(data, audioCodec, videoCodec, timeOffset, cc, level, sn, duration) {\n var avcData, aacData, id3Data,\n start, len = data.length, stt, pid, atf, offset,\n codecsOnly = this.remuxer.passthrough;\n\n this.audioCodec = audioCodec;\n this.videoCodec = videoCodec;\n this.timeOffset = timeOffset;\n this._duration = duration;\n this.contiguous = false;\n if (cc !== this.lastCC) {\n logger.log('discontinuity detected');\n this.insertDiscontinuity();\n this.lastCC = cc;\n } else if (level !== this.lastLevel) {\n logger.log('level switch detected');\n this.switchLevel();\n this.lastLevel = level;\n } else if (sn === (this.lastSN+1)) {\n this.contiguous = true;\n }\n this.lastSN = sn;\n\n if(!this.contiguous) {\n // flush any partial content\n this.aacOverFlow = null;\n }\n\n var pmtParsed = this.pmtParsed,\n avcId = this._avcTrack.id,\n aacId = this._aacTrack.id,\n id3Id = this._id3Track.id;\n\n // don't parse last TS packet if incomplete\n len -= len % 188;\n // loop through TS packets\n for (start = 0; start < len; start += 188) {\n if (data[start] === 0x47) {\n stt = !!(data[start + 1] & 0x40);\n // pid is a 13-bit field starting at the last bit of TS[1]\n pid = ((data[start + 1] & 0x1f) << 8) + data[start + 2];\n atf = (data[start + 3] & 0x30) >> 4;\n // if an adaption field is present, its length is specified by the fifth byte of the TS packet header.\n if (atf > 1) {\n offset = start + 5 + data[start + 4];\n // continue if there is only adaptation field\n if (offset === (start + 188)) {\n continue;\n }\n } else {\n offset = start + 4;\n }\n if (pmtParsed) {\n if (pid === avcId) {\n if (stt) {\n if (avcData) {\n this._parseAVCPES(this._parsePES(avcData));\n if (codecsOnly) {\n // if we hav
"module.exports = {\n // fired before MediaSource is attaching to media element - data: { media }\n MEDIA_ATTACHING: 'hlsMediaAttaching',\n // fired when MediaSource has been succesfully attached to media element - data: { }\n MEDIA_ATTACHED: 'hlsMediaAttached',\n // fired before detaching MediaSource from media element - data: { }\n MEDIA_DETACHING: 'hlsMediaDetaching',\n // fired when MediaSource has been detached from media element - data: { }\n MEDIA_DETACHED: 'hlsMediaDetached',\n // fired when we buffer is going to be resetted\n BUFFER_RESET: 'hlsBufferReset',\n // fired when we know about the codecs that we need buffers for to push into - data: {tracks : { container, codec, levelCodec, initSegment, metadata }}\n BUFFER_CODECS: 'hlsBufferCodecs',\n // fired when we append a segment to the buffer - data: { segment: segment object }\n BUFFER_APPENDING: 'hlsBufferAppending',\n // fired when we are done with appending a media segment to the buffer\n BUFFER_APPENDED: 'hlsBufferAppended',\n // fired when the stream is finished and we want to notify the media buffer that there will be no more data\n BUFFER_EOS: 'hlsBufferEos',\n // fired when the media buffer should be flushed - data {startOffset, endOffset}\n BUFFER_FLUSHING: 'hlsBufferFlushing',\n // fired when the media has been flushed\n BUFFER_FLUSHED: 'hlsBufferFlushed',\n // fired to signal that a manifest loading starts - data: { url : manifestURL}\n MANIFEST_LOADING: 'hlsManifestLoading',\n // fired after manifest has been loaded - data: { levels : [available quality levels] , url : manifestURL, stats : { trequest, tfirst, tload, mtime}}\n MANIFEST_LOADED: 'hlsManifestLoaded',\n // fired after manifest has been parsed - data: { levels : [available quality levels] , firstLevel : index of first quality level appearing in Manifest}\n MANIFEST_PARSED: 'hlsManifestParsed',\n // fired when a level playlist loading starts - data: { url : level URL level : id of level being loaded}\n LEVEL_LOADING: 'hlsLevelLoading',\n // fired when a level playlist loading finishes - data: { details : levelDetails object, level : id of loaded level, stats : { trequest, tfirst, tload, mtime} }\n LEVEL_LOADED: 'hlsLevelLoaded',\n // fired when a level's details have been updated based on previous details, after it has been loaded. - data: { details : levelDetails object, level : id of updated level }\n LEVEL_UPDATED: 'hlsLevelUpdated',\n // fired when a level's PTS information has been updated after parsing a fragment - data: { details : levelDetails object, level : id of updated level, drift: PTS drift observed when parsing last fragment }\n LEVEL_PTS_UPDATED: 'hlsLevelPtsUpdated',\n // fired when a level switch is requested - data: { level : id of new level }\n LEVEL_SWITCH: 'hlsLevelSwitch',\n // fired when a fragment loading starts - data: { frag : fragment object}\n FRAG_LOADING: 'hlsFragLoading',\n // fired when a fragment loading is progressing - data: { frag : fragment object, { trequest, tfirst, loaded}}\n FRAG_LOAD_PROGRESS: 'hlsFragLoadProgress',\n // Identifier for fragment load aborting for emergency switch down - data: {frag : fragment object}\n FRAG_LOAD_EMERGENCY_ABORTED: 'hlsFragLoadEmergencyAborted',\n // fired when a fragment loading is completed - data: { frag : fragment object, payload : fragment payload, stats : { trequest, tfirst, tload, length}}\n FRAG_LOADED: 'hlsFragLoaded',\n // fired when Init Segment has been extracted from fragment - data: { moov : moov MP4 box, codecs : codecs found while parsing fragment}\n FRAG_PARSING_INIT_SEGMENT: 'hlsFragParsingInitSegment',\n // fired when parsing sei text is completed - data: { samples : [ sei samples pes ] }\n FRAG_PARSING_USERDATA: 'hlsFragParsingUserdata',\n // fired when parsing id3 is completed - data: { samples : [ id3 samples pes ] }\n FRAG_PARSING_METADATA: 'hlsFragParsingMetadata',\n // fired when data have been extracted from fragment - data: { data1 : moof MP4 box or TS fragments, data2 : mdat MP4 box or null}\n FRAG_PARSING_DATA: 'hlsFragParsingDa
"/**\n * Level Helper class, providing methods dealing with playlist sliding and drift\n*/\n\nimport {logger} from '../utils/logger';\n\nclass LevelHelper {\n\n static mergeDetails(oldDetails,newDetails) {\n var start = Math.max(oldDetails.startSN,newDetails.startSN)-newDetails.startSN,\n end = Math.min(oldDetails.endSN,newDetails.endSN)-newDetails.startSN,\n delta = newDetails.startSN - oldDetails.startSN,\n oldfragments = oldDetails.fragments,\n newfragments = newDetails.fragments,\n ccOffset =0,\n PTSFrag;\n\n // check if old/new playlists have fragments in common\n if ( end < start) {\n newDetails.PTSKnown = false;\n return;\n }\n // loop through overlapping SN and update startPTS , cc, and duration if any found\n for(var i = start ; i <= end ; i++) {\n var oldFrag = oldfragments[delta+i],\n newFrag = newfragments[i];\n ccOffset = oldFrag.cc - newFrag.cc;\n if (!isNaN(oldFrag.startPTS)) {\n newFrag.start = newFrag.startPTS = oldFrag.startPTS;\n newFrag.endPTS = oldFrag.endPTS;\n newFrag.duration = oldFrag.duration;\n PTSFrag = newFrag;\n }\n }\n\n if(ccOffset) {\n logger.log(`discontinuity sliding from playlist, take drift into account`);\n for(i = 0 ; i < newfragments.length ; i++) {\n newfragments[i].cc += ccOffset;\n }\n }\n\n // if at least one fragment contains PTS info, recompute PTS information for all fragments\n if(PTSFrag) {\n LevelHelper.updateFragPTS(newDetails,PTSFrag.sn,PTSFrag.startPTS,PTSFrag.endPTS);\n } else {\n // adjust start by sliding offset\n var sliding = oldfragments[delta].start;\n for(i = 0 ; i < newfragments.length ; i++) {\n newfragments[i].start += sliding;\n }\n }\n // if we are here, it means we have fragments overlapping between\n // old and new level. reliable PTS info is thus relying on old level\n newDetails.PTSKnown = oldDetails.PTSKnown;\n return;\n }\n\n static updateFragPTS(details,sn,startPTS,endPTS) {\n var fragIdx, fragments, frag, i;\n // exit if sn out of range\n if (sn < details.startSN || sn > details.endSN) {\n return 0;\n }\n fragIdx = sn - details.startSN;\n fragments = details.fragments;\n frag = fragments[fragIdx];\n if(!isNaN(frag.startPTS)) {\n startPTS = Math.min(startPTS,frag.startPTS);\n endPTS = Math.max(endPTS, frag.endPTS);\n }\n\n var drift = startPTS - frag.start;\n\n frag.start = frag.startPTS = startPTS;\n frag.endPTS = endPTS;\n frag.duration = endPTS - startPTS;\n // adjust fragment PTS/duration from seqnum-1 to frag 0\n for(i = fragIdx ; i > 0 ; i--) {\n LevelHelper.updatePTS(fragments,i,i-1);\n }\n\n // adjust fragment PTS/duration from seqnum to last frag\n for(i = fragIdx ; i < fragments.length - 1 ; i++) {\n LevelHelper.updatePTS(fragments,i,i+1);\n }\n details.PTSKnown = true;\n //logger.log(` frag start/end:${startPTS.toFixed(3)}/${endPTS.toFixed(3)}`);\n\n return drift;\n }\n\n static updatePTS(fragments,fromIdx, toIdx) {\n var fragFrom = fragments[fromIdx],fragTo = fragments[toIdx], fragToPTS = fragTo.startPTS;\n // if we know startPTS[toIdx]\n if(!isNaN(fragToPTS)) {\n // update fragment duration.\n // it helps to fix drifts between playlist reported duration and fragment real duration\n if (toIdx > fromIdx) {\n fragFrom.duration = fragToPTS-fragFrom.start;\n if(fragFrom.duration < 0) {\n logger.error(`negative duration computed for frag ${fragFrom.sn},level ${fragFrom.level}, there should be some duration drift between playlist and fragment!`);\n }\n } else {\n fragTo.duration = fragFrom.start - fragToPTS;\n if(fragTo.duration < 0) {\n logger.error(`negative duration computed for frag ${fragTo.sn},level ${fragTo.level}, there should be some duration drift between playlist and fragment!`);\n }\n }\n
"// This is mostly for support of the es6 module export\n// syntax with the babel compiler, it looks like it doesnt support\n// function exports like we are used to in node/commonjs\nmodule.exports = require('./hls.js').default;\n",
"var BinarySearch = {\n /**\n * Searches for an item in an array which matches a certain condition.\n * This requires the condition to only match one item in the array,\n * and for the array to be ordered.\n *\n * @param {Array} list The array to search.\n * @param {Function} comparisonFunction\n * Called and provided a candidate item as the first argument.\n * Should return:\n * > -1 if the item should be located at a lower index than the provided item.\n * > 1 if the item should be located at a higher index than the provided item.\n * > 0 if the item is the item you're looking for.\n *\n * @return {*} The object if it is found or null otherwise.\n */\n search: function(list, comparisonFunction) {\n var minIndex = 0;\n var maxIndex = list.length - 1;\n var currentIndex = null;\n var currentElement = null;\n \n while (minIndex <= maxIndex) {\n currentIndex = (minIndex + maxIndex) / 2 | 0;\n currentElement = list[currentIndex];\n \n var comparisonResult = comparisonFunction(currentElement);\n if (comparisonResult > 0) {\n minIndex = currentIndex + 1;\n }\n else if (comparisonResult < 0) {\n maxIndex = currentIndex - 1;\n }\n else {\n return currentElement;\n }\n }\n \n return null;\n }\n};\n\nmodule.exports = BinarySearch;\n",
"var URLHelper = {\n\n // build an absolute URL from a relative one using the provided baseURL\n // if relativeURL is an absolute URL it will be returned as is.\n buildAbsoluteURL: function(baseURL, relativeURL) {\n // remove any remaining space and CRLF\n relativeURL = relativeURL.trim();\n if (/^[a-z]+:/i.test(relativeURL)) {\n // complete url, not relative\n return relativeURL;\n }\n\n var relativeURLQuery = null;\n var relativeURLHash = null;\n\n var relativeURLHashSplit = /^([^#]*)(.*)$/.exec(relativeURL);\n if (relativeURLHashSplit) {\n relativeURLHash = relativeURLHashSplit[2];\n relativeURL = relativeURLHashSplit[1];\n }\n var relativeURLQuerySplit = /^([^\\?]*)(.*)$/.exec(relativeURL);\n if (relativeURLQuerySplit) {\n relativeURLQuery = relativeURLQuerySplit[2];\n relativeURL = relativeURLQuerySplit[1];\n }\n\n var baseURLHashSplit = /^([^#]*)(.*)$/.exec(baseURL);\n if (baseURLHashSplit) {\n baseURL = baseURLHashSplit[1];\n }\n var baseURLQuerySplit = /^([^\\?]*)(.*)$/.exec(baseURL);\n if (baseURLQuerySplit) {\n baseURL = baseURLQuerySplit[1];\n }\n\n var baseURLDomainSplit = /^((([a-z]+):)?\\/\\/[a-z0-9\\.\\-_~]+(:[0-9]+)?\\/)(.*)$/i.exec(baseURL);\n var baseURLProtocol = baseURLDomainSplit[3];\n var baseURLDomain = baseURLDomainSplit[1];\n var baseURLPath = baseURLDomainSplit[5];\n\n var builtURL = null;\n if (/^\\/\\//.test(relativeURL)) {\n builtURL = baseURLProtocol+'://'+URLHelper.buildAbsolutePath('', relativeURL.substring(2));\n }\n else if (/^\\//.test(relativeURL)) {\n builtURL = baseURLDomain+URLHelper.buildAbsolutePath('', relativeURL.substring(1));\n }\n else {\n builtURL = URLHelper.buildAbsolutePath(baseURLDomain+baseURLPath, relativeURL);\n }\n\n // put the query and hash parts back\n if (relativeURLQuery) {\n builtURL += relativeURLQuery;\n }\n if (relativeURLHash) {\n builtURL += relativeURLHash;\n }\n return builtURL;\n },\n\n // build an absolute path using the provided basePath\n // adapted from https://developer.mozilla.org/en-US/docs/Web/API/document/cookie#Using_relative_URLs_in_the_path_parameter\n // this does not handle the case where relativePath is \"/\" or \"//\". These cases should be handled outside this.\n buildAbsolutePath: function(basePath, relativePath) {\n var sRelPath = relativePath;\n var nUpLn, sDir = '', sPath = basePath.replace(/[^\\/]*$/, sRelPath.replace(/(\\/|^)(?:\\.?\\/+)+/g, '$1'));\n for (var nEnd, nStart = 0; nEnd = sPath.indexOf('/../', nStart), nEnd > -1; nStart = nEnd + nUpLn) {\n nUpLn = /^\\/(?:\\.\\.\\/)*/.exec(sPath.slice(nEnd))[0].length;\n sDir = (sDir + sPath.substring(nStart, nEnd)).replace(new RegExp('(?:\\\\\\/+[^\\\\\\/]*){0,' + ((nUpLn - 1) / 3) + '}$'), '/');\n }\n return sDir + sPath.substr(nStart);\n }\n};\n\nmodule.exports = URLHelper;\n",