diff --git a/dist/hls-demo.js b/dist/hls-demo.js
new file mode 100644
index 00000000000..51cb4fb9b41
--- /dev/null
+++ b/dist/hls-demo.js
@@ -0,0 +1,1657 @@
+typeof window !== "undefined" &&
+(function webpackUniversalModuleDefinition(root, factory) {
+ if(typeof exports === 'object' && typeof module === 'object')
+ module.exports = factory();
+ else if(typeof define === 'function' && define.amd)
+ define([], factory);
+ else if(typeof exports === 'object')
+ exports["HlsDemo"] = factory();
+ else
+ root["HlsDemo"] = factory();
+})(this, function() {
+return /******/ (function(modules) { // webpackBootstrap
+/******/ // The module cache
+/******/ var installedModules = {};
+/******/
+/******/ // The require function
+/******/ function __webpack_require__(moduleId) {
+/******/
+/******/ // Check if module is in cache
+/******/ if(installedModules[moduleId]) {
+/******/ return installedModules[moduleId].exports;
+/******/ }
+/******/ // Create a new module (and put it into the cache)
+/******/ var module = installedModules[moduleId] = {
+/******/ i: moduleId,
+/******/ l: false,
+/******/ exports: {}
+/******/ };
+/******/
+/******/ // Execute the module function
+/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
+/******/
+/******/ // Flag the module as loaded
+/******/ module.l = true;
+/******/
+/******/ // Return the exports of the module
+/******/ return module.exports;
+/******/ }
+/******/
+/******/
+/******/ // expose the modules object (__webpack_modules__)
+/******/ __webpack_require__.m = modules;
+/******/
+/******/ // expose the module cache
+/******/ __webpack_require__.c = installedModules;
+/******/
+/******/ // define getter function for harmony exports
+/******/ __webpack_require__.d = function(exports, name, getter) {
+/******/ if(!__webpack_require__.o(exports, name)) {
+/******/ Object.defineProperty(exports, name, { enumerable: true, get: getter });
+/******/ }
+/******/ };
+/******/
+/******/ // define __esModule on exports
+/******/ __webpack_require__.r = function(exports) {
+/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
+/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
+/******/ }
+/******/ Object.defineProperty(exports, '__esModule', { value: true });
+/******/ };
+/******/
+/******/ // create a fake namespace object
+/******/ // mode & 1: value is a module id, require it
+/******/ // mode & 2: merge all properties of value into the ns
+/******/ // mode & 4: return value when already ns object
+/******/ // mode & 8|1: behave like require
+/******/ __webpack_require__.t = function(value, mode) {
+/******/ if(mode & 1) value = __webpack_require__(value);
+/******/ if(mode & 8) return value;
+/******/ if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;
+/******/ var ns = Object.create(null);
+/******/ __webpack_require__.r(ns);
+/******/ Object.defineProperty(ns, 'default', { enumerable: true, value: value });
+/******/ if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));
+/******/ return ns;
+/******/ };
+/******/
+/******/ // getDefaultExport function for compatibility with non-harmony modules
+/******/ __webpack_require__.n = function(module) {
+/******/ var getter = module && module.__esModule ?
+/******/ function getDefault() { return module['default']; } :
+/******/ function getModuleExports() { return module; };
+/******/ __webpack_require__.d(getter, 'a', getter);
+/******/ return getter;
+/******/ };
+/******/
+/******/ // Object.prototype.hasOwnProperty.call
+/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
+/******/
+/******/ // __webpack_public_path__
+/******/ __webpack_require__.p = "/dist/";
+/******/
+/******/
+/******/ // Load entry module and return exports
+/******/ return __webpack_require__(__webpack_require__.s = "./demo/main.js");
+/******/ })
+/************************************************************************/
+/******/ ({
+
+/***/ "./demo/main.js":
+/*!**********************************!*\
+ !*** ./demo/main.js + 1 modules ***!
+ \**********************************/
+/*! no exports provided */
+/***/ (function(module, __webpack_exports__, __webpack_require__) {
+
+"use strict";
+__webpack_require__.r(__webpack_exports__);
+
+// CONCATENATED MODULE: ./demo/demo-utils.js
+function sortObject(obj) {
+ if (typeof obj !== 'object') {
+ return obj;
+ }
+
+ var temp = {};
+ var keys = [];
+
+ for (var key in obj) {
+ keys.push(key);
+ }
+
+ keys.sort();
+
+ for (var index in keys) {
+ temp[keys[index]] = sortObject(obj[keys[index]]);
+ }
+
+ return temp;
+}
+function copyTextToClipboard(text) {
+ var textArea = document.createElement('textarea');
+ textArea.value = text;
+ document.body.appendChild(textArea);
+ textArea.select();
+
+ try {
+ var successful = document.execCommand('copy');
+ var msg = successful ? 'successful' : 'unsuccessful';
+ console.log('Copying text command was ' + msg);
+ } catch (err) {
+ console.log('Oops, unable to copy');
+ }
+
+ document.body.removeChild(textArea);
+}
+// CONCATENATED MODULE: ./demo/main.js
+
+var STORAGE_KEYS = {
+ Editor_Persistence: 'hlsjs:config-editor-persist',
+ Hls_Config: 'hlsjs:config'
+};
+
+var testStreams = __webpack_require__(/*! ../tests/test-streams */ "./tests/test-streams.js");
+
+var defaultTestStreamUrl = testStreams['bbb'].url;
+var sourceURL = decodeURIComponent(getURLParam('src', defaultTestStreamUrl));
+var demoConfig = getURLParam('demoConfig', null);
+
+if (demoConfig) {
+ demoConfig = JSON.parse(atob(demoConfig));
+} else {
+ demoConfig = {};
+}
+
+var hlsjsDefaults = {
+ debug: true,
+ enableWorker: true,
+ liveBackBufferLength: 60 * 15
+};
+var enableStreaming = getDemoConfigPropOrDefault('enableStreaming', true);
+var autoRecoverError = getDemoConfigPropOrDefault('autoRecoverError', true);
+var levelCapping = getDemoConfigPropOrDefault('levelCapping', -1);
+var limitMetrics = getDemoConfigPropOrDefault('limitMetrics', -1);
+var dumpfMP4 = getDemoConfigPropOrDefault('dumpfMP4', false);
+var bufferingIdx = -1;
+var selectedTestStream = null;
+var video = $('#video')[0];
+var startTime = Date.now();
+var lastSeekingIdx;
+var lastStartPosition;
+var lastDuration;
+var lastAudioTrackSwitchingIdx;
+var hls;
+var url;
+var events;
+var stats;
+var tracks;
+var fmp4Data;
+var configPersistenceEnabled = false;
+var configEditor = null;
+$(document).ready(function () {
+ setupConfigEditor();
+ Object.keys(testStreams).forEach(function (key) {
+ var stream = testStreams[key];
+ var option = new Option(stream.description, key);
+ $('#streamSelect').append(option);
+ });
+ $('#streamSelect').change(function () {
+ selectedTestStream = testStreams[$('#streamSelect').val()];
+ var streamUrl = selectedTestStream.url;
+ $('#streamURL').val(streamUrl);
+ loadSelectedStream();
+ });
+ $('#streamURL').change(function () {
+ selectedTestStream = null;
+ loadSelectedStream();
+ });
+ $('#videoSize').change(function () {
+ $('#video').width($('#videoSize').val());
+ $('#bufferedCanvas').width($('#videoSize').val());
+ });
+ $('#enableStreaming').click(function () {
+ enableStreaming = this.checked;
+ loadSelectedStream();
+ });
+ $('#autoRecoverError').click(function () {
+ autoRecoverError = this.checked;
+ onDemoConfigChanged();
+ });
+ $('#dumpfMP4').click(function () {
+ dumpfMP4 = this.checked;
+ onDemoConfigChanged();
+ });
+ $('#limitMetrics').change(function () {
+ limitMetrics = this.value;
+ onDemoConfigChanged();
+ });
+ $('#levelCapping').change(function () {
+ levelCapping = this.value;
+ onDemoConfigChanged();
+ });
+ $('#limitMetrics').val(limitMetrics);
+ $('#enableStreaming').prop('checked', enableStreaming);
+ $('#autoRecoverError').prop('checked', autoRecoverError);
+ $('#dumpfMP4').prop('checked', dumpfMP4);
+ $('#levelCapping').val(levelCapping);
+ $('h2').append(' v' + Hls.version + '');
+ $('#currentVersion').html('Hls version:' + Hls.version);
+ $('#streamURL').val(sourceURL);
+ video.volume = 0.05;
+ hideAllTabs();
+ $('#metricsButtonWindow').toggle(windowSliding);
+ $('#metricsButtonFixed').toggle(!windowSliding);
+ loadSelectedStream();
+});
+
+function setupGlobals() {
+ window.events = events = {
+ url: url,
+ t0: performance.now(),
+ load: [],
+ buffer: [],
+ video: [],
+ level: [],
+ bitrate: []
+ }; // actual values, only on window
+
+ window.recoverDecodingErrorDate = null;
+ window.recoverSwapAudioCodecDate = null;
+ window.fmp4Data = fmp4Data = {
+ 'audio': [],
+ 'video': []
+ };
+ window.onClickBufferedRange = onClickBufferedRange;
+ window.updateLevelInfo = updateLevelInfo;
+ window.onDemoConfigChanged = onDemoConfigChanged;
+ window.createfMP4 = createfMP4;
+ window.goToMetricsPermaLink = goToMetricsPermaLink;
+ window.toggleTab = toggleTab;
+ window.applyConfigEditorValue = applyConfigEditorValue;
+}
+
+function trimArray(target, limit) {
+ if (limit < 0) {
+ return;
+ }
+
+ while (target.length > limit) {
+ target.shift();
+ }
+}
+
+function trimEventHistory() {
+ var x = limitMetrics;
+
+ if (x < 0) {
+ return;
+ }
+
+ trimArray(events.load, x);
+ trimArray(events.buffer, x);
+ trimArray(events.video, x);
+ trimArray(events.level, x);
+ trimArray(events.bitrate, x);
+}
+
+function loadSelectedStream() {
+ if (!Hls.isSupported()) {
+ handleUnsupported();
+ return;
+ }
+
+ url = $('#streamURL').val();
+ setupGlobals();
+ hideCanvas();
+
+ if (hls) {
+ hls.destroy();
+
+ if (hls.bufferTimer) {
+ clearInterval(hls.bufferTimer);
+ hls.bufferTimer = undefined;
+ }
+
+ hls = null;
+ }
+
+ if (!enableStreaming) {
+ logStatus('Streaming disabled');
+ return;
+ }
+
+ logStatus('Loading ' + url); // Extending both a demo-specific config and the user config which can override all
+
+ var hlsConfig = $.extend({}, hlsjsDefaults, getEditorValue({
+ parse: true
+ }));
+
+ if (selectedTestStream && selectedTestStream.config) {
+ console.info('[loadSelectedStream] extending hls config with stream-specific config: ', selectedTestStream.config);
+ $.extend(hlsConfig, selectedTestStream.config);
+ updateConfigEditorValue(hlsConfig);
+ }
+
+ onDemoConfigChanged();
+ console.log('Using Hls.js config:', hlsConfig);
+ window.hls = hls = new Hls(hlsConfig);
+ logStatus('Loading manifest and attaching video element...');
+ hls.loadSource(url);
+ hls.autoLevelCapping = levelCapping;
+ hls.attachMedia(video);
+ hls.on(Hls.Events.MEDIA_ATTACHED, function () {
+ logStatus('Media element attached');
+ bufferingIdx = -1;
+ events.video.push({
+ time: performance.now() - events.t0,
+ type: 'Media attached'
+ });
+ trimEventHistory();
+ });
+ hls.on(Hls.Events.MEDIA_DETACHED, function () {
+ logStatus('Media element detached');
+ bufferingIdx = -1;
+ tracks = [];
+ events.video.push({
+ time: performance.now() - events.t0,
+ type: 'Media detached'
+ });
+ trimEventHistory();
+ });
+ hls.on(Hls.Events.FRAG_PARSING_INIT_SEGMENT, function (event, data) {
+ showCanvas();
+ var event = {
+ time: performance.now() - events.t0,
+ type: data.id + ' init segment'
+ };
+ events.video.push(event);
+ trimEventHistory();
+ });
+ hls.on(Hls.Events.FRAG_PARSING_METADATA, function (event, data) {//console.log("Id3 samples ", data.samples);
+ });
+ hls.on(Hls.Events.LEVEL_SWITCHING, function (event, data) {
+ events.level.push({
+ time: performance.now() - events.t0,
+ id: data.level,
+ bitrate: Math.round(hls.levels[data.level].bitrate / 1000)
+ });
+ trimEventHistory();
+ updateLevelInfo();
+ });
+ hls.on(Hls.Events.MANIFEST_PARSED, function (event, data) {
+ var event = {
+ type: 'manifest',
+ name: '',
+ start: 0,
+ end: data.levels.length,
+ time: data.stats.trequest - events.t0,
+ latency: data.stats.tfirst - data.stats.trequest,
+ load: data.stats.tload - data.stats.tfirst,
+ duration: data.stats.tload - data.stats.tfirst
+ };
+ events.load.push(event);
+ trimEventHistory();
+ refreshCanvas();
+ });
+ hls.on(Hls.Events.MANIFEST_PARSED, function (event, data) {
+ logStatus('No of quality levels found: ' + hls.levels.length);
+ logStatus('Manifest successfully loaded');
+ stats = {
+ levelNb: data.levels.length,
+ levelParsed: 0
+ };
+ trimEventHistory();
+ updateLevelInfo();
+ });
+ hls.on(Hls.Events.AUDIO_TRACKS_UPDATED, function (event, data) {
+ logStatus('No of audio tracks found: ' + data.audioTracks.length);
+ updateAudioTrackInfo();
+ });
+ hls.on(Hls.Events.AUDIO_TRACK_SWITCHING, function (event, data) {
+ logStatus('Audio track switching...');
+ updateAudioTrackInfo();
+ var event = {
+ time: performance.now() - events.t0,
+ type: 'audio switching',
+ name: '@' + data.id
+ };
+ events.video.push(event);
+ trimEventHistory();
+ lastAudioTrackSwitchingIdx = events.video.length - 1;
+ });
+ hls.on(Hls.Events.AUDIO_TRACK_SWITCHED, function (event, data) {
+ logStatus('Audio track switched');
+ updateAudioTrackInfo();
+ var event = {
+ time: performance.now() - events.t0,
+ type: 'audio switched',
+ name: '@' + data.id
+ };
+
+ if (lastAudioTrackSwitchingIdx !== undefined) {
+ events.video[lastAudioTrackSwitchingIdx].duration = event.time - events.video[lastAudioTrackSwitchingIdx].time;
+ lastAudioTrackSwitchingIdx = undefined;
+ }
+
+ events.video.push(event);
+ trimEventHistory();
+ });
+ hls.on(Hls.Events.LEVEL_LOADED, function (event, data) {
+ events.isLive = data.details.live;
+ var event = {
+ type: 'level',
+ id: data.level,
+ start: data.details.startSN,
+ end: data.details.endSN,
+ time: data.stats.trequest - events.t0,
+ latency: data.stats.tfirst - data.stats.trequest,
+ load: data.stats.tload - data.stats.tfirst,
+ parsing: data.stats.tparsed - data.stats.tload,
+ duration: data.stats.tload - data.stats.tfirst
+ };
+ var parsingDuration = data.stats.tparsed - data.stats.tload;
+
+ if (stats.levelParsed) {
+ this.sumLevelParsingMs += parsingDuration;
+ } else {
+ this.sumLevelParsingMs = parsingDuration;
+ }
+
+ stats.levelParsed++;
+ stats.levelParsingUs = Math.round(1000 * this.sumLevelParsingMs / stats.levelParsed); //console.log('parsing level duration :' + stats.levelParsingUs + 'us,count:' + stats.levelParsed);
+
+ events.load.push(event);
+ trimEventHistory();
+ refreshCanvas();
+ });
+ hls.on(Hls.Events.AUDIO_TRACK_LOADED, function (event, data) {
+ events.isLive = data.details.live;
+ var event = {
+ type: 'audio track',
+ id: data.id,
+ start: data.details.startSN,
+ end: data.details.endSN,
+ time: data.stats.trequest - events.t0,
+ latency: data.stats.tfirst - data.stats.trequest,
+ load: data.stats.tload - data.stats.tfirst,
+ parsing: data.stats.tparsed - data.stats.tload,
+ duration: data.stats.tload - data.stats.tfirst
+ };
+ events.load.push(event);
+ trimEventHistory();
+ refreshCanvas();
+ });
+ hls.on(Hls.Events.FRAG_BUFFERED, function (event, data) {
+ var event = {
+ type: data.frag.type + ' fragment',
+ id: data.frag.level,
+ id2: data.frag.sn,
+ time: data.stats.trequest - events.t0,
+ latency: data.stats.tfirst - data.stats.trequest,
+ load: data.stats.tload - data.stats.tfirst,
+ parsing: data.stats.tparsed - data.stats.tload,
+ buffer: data.stats.tbuffered - data.stats.tparsed,
+ duration: data.stats.tbuffered - data.stats.tfirst,
+ bw: Math.round(8 * data.stats.total / (data.stats.tbuffered - data.stats.trequest)),
+ size: data.stats.total
+ };
+ events.load.push(event);
+ events.bitrate.push({
+ time: performance.now() - events.t0,
+ bitrate: event.bw,
+ duration: data.frag.duration,
+ level: event.id
+ });
+
+ if (hls.bufferTimer === undefined) {
+ events.buffer.push({
+ time: 0,
+ buffer: 0,
+ pos: 0
+ });
+ hls.bufferTimer = window.setInterval(checkBuffer, 100);
+ }
+
+ trimEventHistory();
+ refreshCanvas();
+ updateLevelInfo();
+ var latency = data.stats.tfirst - data.stats.trequest,
+ parsing = data.stats.tparsed - data.stats.tload,
+ process = data.stats.tbuffered - data.stats.trequest,
+ bitrate = Math.round(8 * data.stats.length / (data.stats.tbuffered - data.stats.tfirst));
+
+ if (stats.fragBuffered) {
+ stats.fragMinLatency = Math.min(stats.fragMinLatency, latency);
+ stats.fragMaxLatency = Math.max(stats.fragMaxLatency, latency);
+ stats.fragMinProcess = Math.min(stats.fragMinProcess, process);
+ stats.fragMaxProcess = Math.max(stats.fragMaxProcess, process);
+ stats.fragMinKbps = Math.min(stats.fragMinKbps, bitrate);
+ stats.fragMaxKbps = Math.max(stats.fragMaxKbps, bitrate);
+ stats.autoLevelCappingMin = Math.min(stats.autoLevelCappingMin, hls.autoLevelCapping);
+ stats.autoLevelCappingMax = Math.max(stats.autoLevelCappingMax, hls.autoLevelCapping);
+ stats.fragBuffered++;
+ } else {
+ stats.fragMinLatency = stats.fragMaxLatency = latency;
+ stats.fragMinProcess = stats.fragMaxProcess = process;
+ stats.fragMinKbps = stats.fragMaxKbps = bitrate;
+ stats.fragBuffered = 1;
+ stats.fragBufferedBytes = 0;
+ stats.autoLevelCappingMin = stats.autoLevelCappingMax = hls.autoLevelCapping;
+ this.sumLatency = 0;
+ this.sumKbps = 0;
+ this.sumProcess = 0;
+ this.sumParsing = 0;
+ }
+
+ stats.fraglastLatency = latency;
+ this.sumLatency += latency;
+ stats.fragAvgLatency = Math.round(this.sumLatency / stats.fragBuffered);
+ stats.fragLastProcess = process;
+ this.sumProcess += process;
+ this.sumParsing += parsing;
+ stats.fragAvgProcess = Math.round(this.sumProcess / stats.fragBuffered);
+ stats.fragLastKbps = bitrate;
+ this.sumKbps += bitrate;
+ stats.fragAvgKbps = Math.round(this.sumKbps / stats.fragBuffered);
+ stats.fragBufferedBytes += data.stats.total;
+ stats.fragparsingKbps = Math.round(8 * stats.fragBufferedBytes / this.sumParsing);
+ stats.fragparsingMs = Math.round(this.sumParsing);
+ stats.autoLevelCappingLast = hls.autoLevelCapping;
+ });
+ hls.on(Hls.Events.LEVEL_SWITCHED, function (event, data) {
+ var event = {
+ time: performance.now() - events.t0,
+ type: 'level switched',
+ name: data.level
+ };
+ events.video.push(event);
+ trimEventHistory();
+ refreshCanvas();
+ updateLevelInfo();
+ });
+ hls.on(Hls.Events.FRAG_CHANGED, function (event, data) {
+ var event = {
+ time: performance.now() - events.t0,
+ type: 'frag changed',
+ name: data.frag.sn + ' @ ' + data.frag.level
+ };
+ events.video.push(event);
+ trimEventHistory();
+ refreshCanvas();
+ updateLevelInfo();
+ stats.tagList = data.frag.tagList;
+ var level = data.frag.level,
+ autoLevel = data.frag.autoLevel;
+
+ if (stats.levelStart === undefined) {
+ stats.levelStart = level;
+ }
+
+ if (autoLevel) {
+ if (stats.fragChangedAuto) {
+ stats.autoLevelMin = Math.min(stats.autoLevelMin, level);
+ stats.autoLevelMax = Math.max(stats.autoLevelMax, level);
+ stats.fragChangedAuto++;
+
+ if (this.levelLastAuto && level !== stats.autoLevelLast) {
+ stats.autoLevelSwitch++;
+ }
+ } else {
+ stats.autoLevelMin = stats.autoLevelMax = level;
+ stats.autoLevelSwitch = 0;
+ stats.fragChangedAuto = 1;
+ this.sumAutoLevel = 0;
+ }
+
+ this.sumAutoLevel += level;
+ stats.autoLevelAvg = Math.round(1000 * this.sumAutoLevel / stats.fragChangedAuto) / 1000;
+ stats.autoLevelLast = level;
+ } else {
+ if (stats.fragChangedManual) {
+ stats.manualLevelMin = Math.min(stats.manualLevelMin, level);
+ stats.manualLevelMax = Math.max(stats.manualLevelMax, level);
+ stats.fragChangedManual++;
+
+ if (!this.levelLastAuto && level !== stats.manualLevelLast) {
+ stats.manualLevelSwitch++;
+ }
+ } else {
+ stats.manualLevelMin = stats.manualLevelMax = level;
+ stats.manualLevelSwitch = 0;
+ stats.fragChangedManual = 1;
+ }
+
+ stats.manualLevelLast = level;
+ }
+
+ this.levelLastAuto = autoLevel;
+ });
+ hls.on(Hls.Events.FRAG_LOAD_EMERGENCY_ABORTED, function (event, data) {
+ if (stats) {
+ if (stats.fragLoadEmergencyAborted === undefined) {
+ stats.fragLoadEmergencyAborted = 1;
+ } else {
+ stats.fragLoadEmergencyAborted++;
+ }
+ }
+ });
+ hls.on(Hls.Events.FRAG_DECRYPTED, function (event, data) {
+ if (!stats.fragDecrypted) {
+ stats.fragDecrypted = 0;
+ this.totalDecryptTime = 0;
+ stats.fragAvgDecryptTime = 0;
+ }
+
+ stats.fragDecrypted++;
+ this.totalDecryptTime += data.stats.tdecrypt - data.stats.tstart;
+ stats.fragAvgDecryptTime = this.totalDecryptTime / stats.fragDecrypted;
+ });
+ hls.on(Hls.Events.ERROR, function (event, data) {
+ console.warn('Error event:', data);
+
+ switch (data.details) {
+ case Hls.ErrorDetails.MANIFEST_LOAD_ERROR:
+ try {
+ $('#errorOut').html('Cannot load ' + url + '
HTTP response code:' + data.response.code + '
' + data.response.text);
+
+ if (data.response.code === 0) {
+ $('#errorOut').append('This might be a CORS issue, consider installing Allow-Control-Allow-Origin Chrome Extension');
+ }
+ } catch (err) {
+ $('#errorOut').html('Cannot load ' + url + '
Response body: ' + data.response.text);
+ }
+
+ break;
+
+ case Hls.ErrorDetails.MANIFEST_LOAD_TIMEOUT:
+ logError('Timeout while loading manifest');
+ break;
+
+ case Hls.ErrorDetails.MANIFEST_PARSING_ERROR:
+ logError('Error while parsing manifest:' + data.reason);
+ break;
+
+ case Hls.ErrorDetails.LEVEL_LOAD_ERROR:
+ logError('Error while loading level playlist');
+ break;
+
+ case Hls.ErrorDetails.LEVEL_LOAD_TIMEOUT:
+ logError('Timeout while loading level playlist');
+ break;
+
+ case Hls.ErrorDetails.LEVEL_SWITCH_ERROR:
+ logError('Error while trying to switch to level ' + data.level);
+ break;
+
+ case Hls.ErrorDetails.FRAG_LOAD_ERROR:
+ logError('Error while loading fragment ' + data.frag.url);
+ break;
+
+ case Hls.ErrorDetails.FRAG_LOAD_TIMEOUT:
+ logError('Timeout while loading fragment ' + data.frag.url);
+ break;
+
+ case Hls.ErrorDetails.FRAG_LOOP_LOADING_ERROR:
+ logError('Fragment-loop loading error');
+ break;
+
+ case Hls.ErrorDetails.FRAG_DECRYPT_ERROR:
+ logError('Decrypting error:' + data.reason);
+ break;
+
+ case Hls.ErrorDetails.FRAG_PARSING_ERROR:
+ logError('Parsing error:' + data.reason);
+ break;
+
+ case Hls.ErrorDetails.KEY_LOAD_ERROR:
+ logError('Error while loading key ' + data.frag.decryptdata.uri);
+ break;
+
+ case Hls.ErrorDetails.KEY_LOAD_TIMEOUT:
+ logError('Timeout while loading key ' + data.frag.decryptdata.uri);
+ break;
+
+ case Hls.ErrorDetails.BUFFER_APPEND_ERROR:
+ logError('Buffer append error');
+ break;
+
+ case Hls.ErrorDetails.BUFFER_ADD_CODEC_ERROR:
+ logError('Buffer add codec error for ' + data.mimeType + ':' + data.err.message);
+ break;
+
+ case Hls.ErrorDetails.BUFFER_APPENDING_ERROR:
+ logError('Buffer appending error');
+ break;
+
+ case Hls.ErrorDetails.BUFFER_STALLED_ERROR:
+ logError('Buffer stalled error');
+ break;
+
+ default:
+ break;
+ }
+
+ if (data.fatal) {
+ console.error('Fatal error :' + data.details);
+
+ switch (data.type) {
+ case Hls.ErrorTypes.MEDIA_ERROR:
+ handleMediaError();
+ break;
+
+ case Hls.ErrorTypes.NETWORK_ERROR:
+ logError('A network error occured');
+ break;
+
+ default:
+ logError('An unrecoverable error occured');
+ hls.destroy();
+ break;
+ }
+ }
+
+ if (!stats) {
+ stats = {};
+ } // track all errors independently
+
+
+ if (stats[data.details] === undefined) {
+ stats[data.details] = 1;
+ } else {
+ stats[data.details] += 1;
+ } // track fatal error
+
+
+ if (data.fatal) {
+ if (stats.fatalError === undefined) {
+ stats.fatalError = 1;
+ } else {
+ stats.fatalError += 1;
+ }
+ }
+
+ $('#statisticsOut').text(JSON.stringify(sortObject(stats), null, '\t'));
+ });
+ hls.on(Hls.Events.BUFFER_CREATED, function (event, data) {
+ tracks = data.tracks;
+ });
+ hls.on(Hls.Events.BUFFER_APPENDING, function (event, data) {
+ if (dumpfMP4) {
+ fmp4Data[data.type].push(data.data);
+ }
+ });
+ hls.on(Hls.Events.FPS_DROP, function (event, data) {
+ var evt = {
+ time: performance.now() - events.t0,
+ type: 'frame drop',
+ name: data.currentDropped + '/' + data.currentDecoded
+ };
+ events.video.push(evt);
+ trimEventHistory();
+
+ if (stats) {
+ if (stats.fpsDropEvent === undefined) {
+ stats.fpsDropEvent = 1;
+ } else {
+ stats.fpsDropEvent++;
+ }
+
+ stats.fpsTotalDroppedFrames = data.totalDroppedFrames;
+ }
+ });
+ video.addEventListener('resize', handleVideoEvent);
+ video.addEventListener('seeking', handleVideoEvent);
+ video.addEventListener('seeked', handleVideoEvent);
+ video.addEventListener('pause', handleVideoEvent);
+ video.addEventListener('play', handleVideoEvent);
+ video.addEventListener('canplay', handleVideoEvent);
+ video.addEventListener('canplaythrough', handleVideoEvent);
+ video.addEventListener('ended', handleVideoEvent);
+ video.addEventListener('playing', handleVideoEvent);
+ video.addEventListener('error', handleVideoEvent);
+ video.addEventListener('loadedmetadata', handleVideoEvent);
+ video.addEventListener('loadeddata', handleVideoEvent);
+ video.addEventListener('durationchange', handleVideoEvent);
+}
+
+function handleUnsupported() {
+ if (navigator.userAgent.toLowerCase().indexOf('firefox') !== -1) {
+ logStatus('You are using Firefox, it looks like MediaSource is not enabled,
please ensure the following keys are set appropriately in about:config
media.mediasource.enabled=true
media.mediasource.mp4.enabled=true
media.mediasource.whitelist=false');
+ } else {
+ logStatus('Your Browser does not support MediaSourceExtension / MP4 mediasource');
+ }
+}
+
+function handleVideoEvent(evt) {
+ var data = '';
+
+ switch (evt.type) {
+ case 'durationchange':
+ if (evt.target.duration - lastDuration <= 0.5) {
+ // some browsers report several duration change events with almost the same value ... avoid spamming video events
+ return;
+ }
+
+ lastDuration = evt.target.duration;
+ data = Math.round(evt.target.duration * 1000);
+ break;
+
+ case 'resize':
+ data = evt.target.videoWidth + '/' + evt.target.videoHeight;
+ break;
+
+ case 'loadedmetadata':
+ case 'loadeddata':
+ case 'canplay':
+ case 'canplaythrough':
+ case 'ended':
+ case 'seeking':
+ case 'seeked':
+ case 'play':
+ case 'playing':
+ lastStartPosition = evt.target.currentTime;
+
+ case 'pause':
+ case 'waiting':
+ case 'stalled':
+ case 'error':
+ data = Math.round(evt.target.currentTime * 1000);
+
+ if (evt.type === 'error') {
+ var errorTxt,
+ mediaError = evt.currentTarget.error;
+
+ switch (mediaError.code) {
+ case mediaError.MEDIA_ERR_ABORTED:
+ errorTxt = 'You aborted the video playback';
+ break;
+
+ case mediaError.MEDIA_ERR_DECODE:
+ errorTxt = 'The video playback was aborted due to a corruption problem or because the video used features your browser did not support';
+ handleMediaError();
+ break;
+
+ case mediaError.MEDIA_ERR_NETWORK:
+ errorTxt = 'A network error caused the video download to fail part-way';
+ break;
+
+ case mediaError.MEDIA_ERR_SRC_NOT_SUPPORTED:
+ errorTxt = 'The video could not be loaded, either because the server or network failed or because the format is not supported';
+ break;
+ }
+
+ if (mediaError.message) {
+ errorTxt += ' - ' + mediaError.message;
+ }
+
+ logStatus(errorTxt);
+ console.error(errorTxt);
+ }
+
+ break;
+
+ default:
+ break;
+ }
+
+ var event = {
+ time: performance.now() - events.t0,
+ type: evt.type,
+ name: data
+ };
+ events.video.push(event);
+
+ if (evt.type === 'seeking') {
+ lastSeekingIdx = events.video.length - 1;
+ }
+
+ if (evt.type === 'seeked') {
+ events.video[lastSeekingIdx].duration = event.time - events.video[lastSeekingIdx].time;
+ }
+
+ trimEventHistory();
+}
+
+function handleMediaError() {
+ if (autoRecoverError) {
+ var now = performance.now();
+
+ if (!recoverDecodingErrorDate || now - recoverDecodingErrorDate > 3000) {
+ recoverDecodingErrorDate = performance.now();
+ $('#statusOut').append(', trying to recover media error.');
+ hls.recoverMediaError();
+ } else {
+ if (!recoverSwapAudioCodecDate || now - recoverSwapAudioCodecDate > 3000) {
+ recoverSwapAudioCodecDate = performance.now();
+ $('#statusOut').append(', trying to swap audio codec and recover media error.');
+ hls.swapAudioCodec();
+ hls.recoverMediaError();
+ } else {
+ $('#statusOut').append(', cannot recover. Last media error recovery failed.');
+ }
+ }
+ }
+}
+
+function timeRangesToString(r) {
+ var log = '';
+
+ for (var i = 0; i < r.length; i++) {
+ log += '[' + r.start(i) + ', ' + r.end(i) + ']';
+ log += ' ';
+ }
+
+ return log;
+}
+
+function checkBuffer() {
+ var v = $('#video')[0];
+ var canvas = $('#bufferedCanvas')[0];
+ var ctx = canvas.getContext('2d');
+ var r = v.buffered;
+ var bufferingDuration;
+ ctx.fillStyle = 'black';
+ ctx.fillRect(0, 0, canvas.width, canvas.height);
+ ctx.fillStyle = 'gray';
+
+ if (r) {
+ if (!canvas.width || canvas.width !== v.clientWidth) {
+ canvas.width = v.clientWidth;
+ }
+
+ var pos = v.currentTime,
+ bufferLen;
+
+ for (var i = 0, bufferLen = 0; i < r.length; i++) {
+ var start = r.start(i) / v.duration * canvas.width;
+ var end = r.end(i) / v.duration * canvas.width;
+ ctx.fillRect(start, 3, Math.max(2, end - start), 10);
+
+ if (pos >= r.start(i) && pos < r.end(i)) {
+ // play position is inside this buffer TimeRange, retrieve end of buffer position and buffer length
+ bufferLen = r.end(i) - pos;
+ }
+ } // check if we are in buffering / or playback ended state
+
+
+ if (bufferLen <= 0.1 && v.paused === false && pos - lastStartPosition > 0.5) {
+ // don't create buffering event if we are at the end of the playlist, don't report ended for live playlist
+ if (lastDuration - pos <= 0.5 && events.isLive === false) {} else {
+ // we are not at the end of the playlist ... real buffering
+ if (bufferingIdx !== -1) {
+ bufferingDuration = performance.now() - events.t0 - events.video[bufferingIdx].time;
+ events.video[bufferingIdx].duration = bufferingDuration;
+ events.video[bufferingIdx].name = bufferingDuration;
+ } else {
+ events.video.push({
+ type: 'buffering',
+ time: performance.now() - events.t0
+ });
+ trimEventHistory(); // we are in buffering state
+
+ bufferingIdx = events.video.length - 1;
+ }
+ }
+ }
+
+ if (bufferLen > 0.1 && bufferingIdx != -1) {
+ bufferingDuration = performance.now() - events.t0 - events.video[bufferingIdx].time;
+ events.video[bufferingIdx].duration = bufferingDuration;
+ events.video[bufferingIdx].name = bufferingDuration; // we are out of buffering state
+
+ bufferingIdx = -1;
+ } // update buffer/position for current Time
+
+
+ var event = {
+ time: performance.now() - events.t0,
+ buffer: Math.round(bufferLen * 1000),
+ pos: Math.round(pos * 1000)
+ };
+ var bufEvents = events.buffer,
+ bufEventLen = bufEvents.length;
+
+ if (bufEventLen > 1) {
+ var event0 = bufEvents[bufEventLen - 2],
+ event1 = bufEvents[bufEventLen - 1];
+ var slopeBuf0 = (event0.buffer - event1.buffer) / (event0.time - event1.time);
+ var slopeBuf1 = (event1.buffer - event.buffer) / (event1.time - event.time);
+ var slopePos0 = (event0.pos - event1.pos) / (event0.time - event1.time);
+ var slopePos1 = (event1.pos - event.pos) / (event1.time - event.time); // compute slopes. if less than 30% difference, remove event1
+
+ if ((slopeBuf0 === slopeBuf1 || Math.abs(slopeBuf0 / slopeBuf1 - 1) <= 0.3) && (slopePos0 === slopePos1 || Math.abs(slopePos0 / slopePos1 - 1) <= 0.3)) {
+ bufEvents.pop();
+ }
+ }
+
+ events.buffer.push(event);
+ trimEventHistory();
+ refreshCanvas();
+ var log = 'Duration: ' + v.duration + '\n' + 'Buffered: ' + timeRangesToString(v.buffered) + '\n' + 'Seekable: ' + timeRangesToString(v.seekable) + '\n' + 'Played: ' + timeRangesToString(v.played) + '\n';
+
+ if (hls.media) {
+ for (var type in tracks) {
+ log += 'Buffer for ' + type + ' contains: ' + timeRangesToString(tracks[type].buffer.buffered) + '\n';
+ }
+
+ var videoPlaybackQuality = v.getVideoPlaybackQuality;
+
+ if (videoPlaybackQuality && typeof videoPlaybackQuality === typeof Function) {
+ log += 'Dropped frames: ' + v.getVideoPlaybackQuality().droppedVideoFrames + '\n';
+ log += 'Corrupted frames:' + v.getVideoPlaybackQuality().corruptedVideoFrames + '\n';
+ } else if (v.webkitDroppedFrameCount) {
+ log += 'Dropped frames:' + v.webkitDroppedFrameCount + '\n';
+ }
+ }
+
+ $('#bufferedOut').text(log);
+ $('#statisticsOut').text(JSON.stringify(sortObject(stats), null, '\t'));
+ ctx.fillStyle = 'blue';
+ var x = v.currentTime / v.duration * canvas.width;
+ ctx.fillRect(x, 0, 2, 15);
+ }
+}
+
+function showCanvas() {
+ showMetrics();
+ $('#bufferedOut').show();
+ $('#bufferedCanvas').show();
+}
+
+function hideCanvas() {
+ hideMetrics();
+ $('#bufferedOut').hide();
+ $('#bufferedCanvas').hide();
+}
+
+function getMetrics() {
+ var json = JSON.stringify(events);
+ var jsonpacked = jsonpack.pack(json); // console.log('packing JSON from ' + json.length + ' to ' + jsonpacked.length + ' bytes');
+
+ return btoa(jsonpacked);
+}
+
+function copyMetricsToClipBoard() {
+ copyTextToClipboard(getMetrics());
+}
+
+function goToMetrics() {
+ var url = document.URL;
+ url = url.substr(0, url.lastIndexOf('/') + 1) + 'metrics.html';
+ window.open(url, '_blank');
+}
+
+function goToMetricsPermaLink() {
+ var url = document.URL;
+ var b64 = getMetrics();
+ url = url.substr(0, url.lastIndexOf('/') + 1) + 'metrics.html#data=' + b64;
+ window.open(url, '_blank');
+}
+
+function minsecs(ts) {
+ var m = Math.floor(Math.floor(ts % 3600) / 60);
+ var s = Math.floor(ts % 60);
+ return m + ':' + (s < 10 ? '0' : '') + s;
+}
+
+function onClickBufferedRange(event) {
+ var canvas = $('#bufferedCanvas')[0];
+ var v = $('#video')[0];
+ var target = (event.clientX - canvas.offsetLeft) / canvas.width * v.duration;
+ v.currentTime = target;
+}
+
+function updateLevelInfo() {
+ if (!hls.levels) {
+ return;
+ }
+
+ var button_template = '';
+ var html2 = button_template;
+
+ if (hls.autoLevelEnabled) {
+ html2 += button_enabled;
+ } else {
+ html2 += button_disabled;
+ }
+
+ html2 += 'onclick="hls.loadLevel=-1">auto';
+ var html3 = button_template;
+
+ if (hls.autoLevelCapping === -1) {
+ html3 += button_enabled;
+ } else {
+ html3 += button_disabled;
+ }
+
+ html3 += 'onclick="levelCapping=hls.autoLevelCapping=-1;updateLevelInfo();onDemoConfigChanged();">auto';
+ var html4 = button_template;
+
+ if (hls.autoLevelEnabled) {
+ html4 += button_enabled;
+ } else {
+ html4 += button_disabled;
+ }
+
+ html4 += 'onclick="hls.nextLevel=-1">auto';
+
+ for (var i = 0; i < hls.levels.length; i++) {
+ html1 += button_template;
+
+ if (hls.currentLevel === i) {
+ html1 += button_enabled;
+ } else {
+ html1 += button_disabled;
+ }
+
+ var levelName = i;
+ var label = level2label(i);
+
+ if (label) {
+ levelName += ' (' + level2label(i) + 'p)';
+ }
+
+ html1 += 'onclick="hls.currentLevel=' + i + '">' + levelName + '';
+ html2 += button_template;
+
+ if (hls.loadLevel === i) {
+ html2 += button_enabled;
+ } else {
+ html2 += button_disabled;
+ }
+
+ html2 += 'onclick="hls.loadLevel=' + i + '">' + levelName + '';
+ html3 += button_template;
+
+ if (hls.autoLevelCapping === i) {
+ html3 += button_enabled;
+ } else {
+ html3 += button_disabled;
+ }
+
+ html3 += 'onclick="levelCapping=hls.autoLevelCapping=' + i + ';updateLevelInfo();onDemoConfigChanged();">' + levelName + '';
+ html4 += button_template;
+
+ if (hls.nextLevel === i) {
+ html4 += button_enabled;
+ } else {
+ html4 += button_disabled;
+ }
+
+ html4 += 'onclick="hls.nextLevel=' + i + '">' + levelName + '';
+ }
+
+ var v = $('#video')[0];
+
+ if (v.videoWidth && v.videoHeight) {
+ $('#currentResolution').html(v.videoWidth + ' x ' + v.videoHeight);
+ }
+
+ if ($('#currentLevelControl').html() != html1) {
+ $('#currentLevelControl').html(html1);
+ }
+
+ if ($('#loadLevelControl').html() != html2) {
+ $('#loadLevelControl').html(html2);
+ }
+
+ if ($('#levelCappingControl').html() != html3) {
+ $('#levelCappingControl').html(html3);
+ }
+
+ if ($('#nextLevelControl').html() != html4) {
+ $('#nextLevelControl').html(html4);
+ }
+}
+
+function updateAudioTrackInfo() {
+ var button_template = '';
+ }
+
+ $('#audioTrackControl').html(html1);
+}
+
+function level2label(index) {
+ if (hls && hls.levels.length - 1 >= index) {
+ var level = hls.levels[index];
+
+ if (level.name) {
+ return level.name;
+ } else {
+ if (level.height) {
+ return level.height + 'p / ' + Math.round(level.bitrate / 1024) + 'kb';
+ } else {
+ if (level.bitrate) {
+ return Math.round(level.bitrate / 1024) + 'kb';
+ } else {
+ return null;
+ }
+ }
+ }
+ }
+}
+
+function getDemoConfigPropOrDefault(propName, defaultVal) {
+ return typeof demoConfig[propName] !== 'undefined' ? demoConfig[propName] : defaultVal;
+}
+
+function getURLParam(sParam, defaultValue) {
+ var sPageURL = window.location.search.substring(1);
+ var sURLVariables = sPageURL.split('&');
+
+ for (var i = 0; i < sURLVariables.length; i++) {
+ var sParameterName = sURLVariables[i].split('=');
+
+ if (sParameterName[0] == sParam) {
+ return 'undefined' == sParameterName[1] ? undefined : 'false' == sParameterName[1] ? false : sParameterName[1];
+ }
+ }
+
+ return defaultValue;
+}
+
+function onDemoConfigChanged() {
+ demoConfig = {
+ enableStreaming: enableStreaming,
+ autoRecoverError: autoRecoverError,
+ dumpfMP4: dumpfMP4,
+ levelCapping: levelCapping,
+ limitMetrics: limitMetrics
+ };
+
+ if (configPersistenceEnabled) {
+ persistEditorValue();
+ }
+
+ var serializedDemoConfig = btoa(JSON.stringify(demoConfig));
+ var baseURL = document.URL.split('?')[0];
+ var streamURL = $('#streamURL').val();
+ var permalinkURL = baseURL + "?src=" + encodeURIComponent(streamURL) + "&demoConfig=" + serializedDemoConfig;
+ $('#StreamPermalink').html("" + permalinkURL + "");
+}
+
+function onConfigPersistenceChanged(event) {
+ configPersistenceEnabled = event.target.checked;
+ localStorage.setItem(STORAGE_KEYS.Editor_Persistence, JSON.stringify(configPersistenceEnabled));
+
+ if (configPersistenceEnabled) {
+ persistEditorValue();
+ } else {
+ localStorage.removeItem(STORAGE_KEYS.Hls_Config);
+ }
+}
+
+function getEditorValue(options) {
+ options = $.extend({
+ parse: false
+ }, options || {});
+ var value = configEditor.session.getValue();
+
+ if (options.parse) {
+ try {
+ value = JSON.parse(value);
+ } catch (e) {
+ console.warn('[getEditorValue] could not parse editor value', e);
+ value = {};
+ }
+ }
+
+ return value;
+}
+
+function getPersistedHlsConfig() {
+ var value = localStorage.getItem(STORAGE_KEYS.Hls_Config);
+
+ if (value === null) {
+ return value;
+ }
+
+ try {
+ value = JSON.parse(value);
+ } catch (e) {
+ console.warn('[getPersistedHlsConfig] could not hls config json', e);
+ value = {};
+ }
+
+ return value;
+}
+
+function persistEditorValue() {
+ localStorage.setItem(STORAGE_KEYS.Hls_Config, getEditorValue());
+}
+
+function setupConfigEditor() {
+ configEditor = ace.edit('config-editor');
+ configEditor.setTheme('ace/theme/github');
+ configEditor.session.setMode('ace/mode/json');
+ var contents = hlsjsDefaults;
+ var shouldRestorePersisted = JSON.parse(localStorage.getItem(STORAGE_KEYS.Editor_Persistence)) === true;
+
+ if (shouldRestorePersisted) {
+ $.extend(contents, getPersistedHlsConfig());
+ }
+
+ var elPersistence = document.querySelector('#configPersistence');
+ elPersistence.addEventListener('change', onConfigPersistenceChanged);
+ elPersistence.checked = shouldRestorePersisted;
+ configPersistenceEnabled = shouldRestorePersisted;
+ updateConfigEditorValue(contents);
+}
+
+function updateConfigEditorValue(obj) {
+ var json = JSON.stringify(obj, null, 2);
+ configEditor.session.setValue(json);
+}
+
+function applyConfigEditorValue() {
+ onDemoConfigChanged();
+ loadSelectedStream();
+}
+
+function createfMP4(type) {
+ if (fmp4Data[type].length) {
+ var blob = new Blob([arrayConcat(fmp4Data[type])], {
+ type: 'application/octet-stream'
+ });
+ var filename = type + '-' + new Date().toISOString() + '.mp4';
+ saveAs(blob, filename); //$('body').append('Download ' + filename + ' track
');
+ }
+}
+
+function arrayConcat(inputArray) {
+ var totalLength = inputArray.reduce(function (prev, cur) {
+ return prev + cur.length;
+ }, 0);
+ var result = new Uint8Array(totalLength);
+ var offset = 0;
+ inputArray.forEach(function (element) {
+ result.set(element, offset);
+ offset += element.length;
+ });
+ return result;
+}
+
+function hideAllTabs() {
+ $('#playbackControlTab').hide();
+ $('#qualityLevelControlTab').hide();
+ $('#audioTrackControlTab').hide();
+ $('#metricsDisplayTab').hide();
+ $('#statsDisplayTab').hide();
+}
+
+function toggleTab(tabElId) {
+ hideAllTabs();
+ hideMetrics();
+ $('#' + tabElId).show();
+}
+
+function appendLog(textElId, message) {
+ var el = $('#' + textElId);
+ var logText = el.text();
+
+ if (logText.length) {
+ logText += '\n';
+ }
+
+ var timestamp = (Date.now() - startTime) / 1000;
+ var newMessage = timestamp + ' | ' + message;
+ logText += newMessage; // update
+
+ el.text(logText);
+}
+
+function logStatus(message) {
+ appendLog('statusOut', message);
+}
+
+function logError(message) {
+ appendLog('errorOut', message);
+}
+
+/***/ }),
+
+/***/ "./tests/test-streams.js":
+/*!*******************************!*\
+ !*** ./tests/test-streams.js ***!
+ \*******************************/
+/*! no static exports found */
+/*! ModuleConcatenation bailout: Module is not an ECMAScript module */
+/***/ (function(module, exports) {
+
+/**
+ * Create test stream
+ * @param {string} url
+ * @param {string} description
+ * @param {boolean} [live]
+ * @param {boolean} [abr]
+ * @param {string[]} [blacklist_ua]
+ * @returns {{url: string, description: string, live: boolean, abr: boolean, blacklist_ua: string[]}}
+ */
+function createTestStream(url, description, live, abr, blacklist_ua) {
+ if (live === void 0) {
+ live = false;
+ }
+
+ if (abr === void 0) {
+ abr = true;
+ }
+
+ if (blacklist_ua === void 0) {
+ blacklist_ua = [];
+ }
+
+ return {
+ url: url,
+ description: description,
+ live: live,
+ abr: abr,
+ blacklist_ua: blacklist_ua
+ };
+}
+/**
+ * @param {Object} target
+ * @param {Object} [config]
+ * @returns {{url: string, description: string, live: boolean, abr: boolean, blacklist_ua: string[]}}
+ */
+
+
+function createTestStreamWithConfig(target, config) {
+ if (typeof target !== 'object') {
+ throw new Error('target should be object');
+ }
+
+ var testStream = createTestStream(target.url, target.description, target.live, target.abr, target.blacklist_ua);
+ testStream.config = config;
+ return testStream;
+}
+
+module.exports = {
+ bbb: createTestStreamWithConfig({
+ url: 'https://test-streams.mux.dev/x36xhzz/x36xhzz.m3u8',
+ description: 'Big Buck Bunny - adaptive qualities'
+ }, {
+ // try to workaround test failing because of slow seek on Chrome/Win10
+ nudgeMaxRetry: 5
+ }),
+ bigBuckBunny480p: {
+ 'url': 'https://test-streams.mux.dev/x36xhzz/url_6/193039199_mp4_h264_aac_hq_7.m3u8',
+ 'description': 'Big Buck Bunny - 480p only',
+ 'live': false,
+ 'abr': false,
+ 'blacklist_ua': ['internet explorer']
+ },
+ arte: {
+ 'url': 'https://test-streams.mux.dev/test_001/stream.m3u8',
+ 'description': 'ARTE China,ABR',
+ 'live': false,
+ 'abr': true
+ },
+ deltatreDAI: {
+ 'url': 'https://test-streams.mux.dev/dai-discontinuity-deltatre/manifest.m3u8',
+ 'description': 'Ad-insertion in event stream',
+ 'live': false,
+ 'abr': false,
+ 'blacklist_ua': ['internet explorer']
+ },
+ issue666: {
+ 'url': 'https://test-streams.mux.dev/issue666/playlists/cisq0gim60007xzvi505emlxx.m3u8',
+ 'description': 'hls.js/issues/666',
+ 'live': false,
+ 'abr': false,
+ 'blacklist_ua': ['internet explorer']
+ },
+
+ /* // went offline for us :( would be good to replace this for regression test with something mimicking the issue
+ issue649: {
+ 'url': 'https://cdn3.screen9.com/media/c/W/cW87csHkxsgu5TV1qs78aA_auto_hls.m3u8?auth=qlUjeCtbVdtkDfZYrtveTIVUXX1yuSqgF8wfWabzKpX72r-d5upW88-FHuyRRdnZA_1PKRTGAtTt_6Z-aj22kw',
+ 'description': 'hls.js/issues/649',
+ 'live': false,
+ 'abr': false
+ },
+ */
+ closedCaptions: {
+ 'url': 'https://playertest.longtailvideo.com/adaptive/captions/playlist.m3u8',
+ 'description': 'CNN special report, with CC',
+ 'live': false,
+ 'abr': false,
+ 'blacklist_ua': ['safari']
+ },
+ oceansAES: {
+ 'url': 'https://playertest.longtailvideo.com/adaptive/oceans_aes/oceans_aes.m3u8',
+ 'description': 'AES encrypted,ABR',
+ 'live': false,
+ 'abr': true
+ },
+
+ /*
+ bbbAES: {
+ 'url': 'https://test-streams.mux.dev/bbbAES/playlists/sample_aes/index.m3u8',
+ 'description': 'SAMPLE-AES encrypted',
+ 'live': false,
+ 'abr': false
+ },
+ */
+ mp3Audio: {
+ 'url': 'https://player.webvideocore.net/CL1olYogIrDWvwqiIKK7eLBkzvO18gwo9ERMzsyXzwt_t-ya8ygf2kQBZww38JJT/8i4vvznv8408.m3u8',
+ 'description': 'MP3 VOD demo',
+ 'live': false,
+ 'abr': false,
+ 'blacklist_ua': ['safari']
+ },
+ mpegAudioOnly: {
+ 'url': 'https://pl.streamingvideoprovider.com/mp3-playlist/playlist.m3u8',
+ 'description': 'MPEG Audio Only demo',
+ 'live': false,
+ 'abr': false,
+ 'blacklist_ua': ['internet explorer', 'MicrosoftEdge', 'safari', 'firefox']
+ },
+ fmp4: {
+ 'url': 'https://storage.googleapis.com/shaka-demo-assets/angel-one-hls/hls.m3u8',
+ 'description': 'HLS fMP4 Angel-One multiple audio-tracks',
+ 'live': false,
+ 'abr': false,
+ 'blacklist_ua': ['safari', 'internet explorer']
+ },
+ fmp4Bitmovin: {
+ 'url': 'https://bitdash-a.akamaihd.net/content/MI201109210084_1/m3u8s-fmp4/f08e80da-bf1d-4e3d-8899-f0f6155f6efa.m3u8',
+ 'description': 'HLS fMP4 by Bitmovin',
+ 'live': false,
+ 'abr': true,
+ 'blacklist_ua': ['safari', 'internet explorer']
+ },
+ offset_pts: {
+ 'url': 'https://test-streams.mux.dev/pts_shift/master.m3u8',
+ 'description': 'DK Turntable, PTS shifted by 2.3s',
+ 'live': false,
+ 'abr': false
+ },
+
+ /*
+ uspHLSAteam: createTestStream(
+ 'http://demo.unified-streaming.com/video/ateam/ateam.ism/ateam.m3u8?session_id=27199',
+ 'A-Team movie trailer - HLS by Unified Streaming Platform'
+ ),
+ */
+ angelOneShakaWidevine: createTestStreamWithConfig({
+ url: 'https://storage.googleapis.com/shaka-demo-assets/angel-one-widevine-hls/hls.m3u8',
+ description: 'Shaka-packager Widevine DRM (EME) HLS-fMP4 - Angel One Demo',
+ blacklist_ua: ['firefox', 'safari', 'internet explorer']
+ }, {
+ widevineLicenseUrl: 'http://cwip-shaka-proxy.appspot.com/no_auth',
+ emeEnabled: true
+ }),
+ audioOnlyMultipleLevels: {
+ 'url': 'https://s3.amazonaws.com/qa.jwplayer.com/~alex/121628/new_master.m3u8',
+ 'description': 'Multiple non-alternate audio levels',
+ 'live': false,
+ 'abr': false
+ },
+ pdtDuplicate: {
+ url: 'https://playertest.longtailvideo.com/adaptive/artbeats/manifest.m3u8',
+ description: 'Stream with duplicate sequential PDT values'
+ },
+ pdtLargeGap: {
+ url: 'https://playertest.longtailvideo.com/adaptive/boxee/playlist.m3u8',
+ description: 'PDTs with large gaps following discontinuities'
+ },
+ pdtBadValues: {
+ url: 'https://playertest.longtailvideo.com/adaptive/progdatime/playlist2.m3u8',
+ description: 'PDTs with bad values'
+ },
+ pdtOneValue: {
+ url: 'https://playertest.longtailvideo.com/adaptive/aviion/manifest.m3u8',
+ description: 'One PDT, no discontinuities'
+ },
+ noTrackIntersection: {
+ url: 'https://s3.amazonaws.com/qa.jwplayer.com/~alex/123633/new_master.m3u8',
+ description: 'Audio/video track PTS values do not intersect; 10 second start gap'
+ },
+ // altAudioNoVideoCodecSignaled: {
+ // url: 'https://d35u71x3nb8v2y.cloudfront.net/4b711b97-513c-4d36-ad29-298ab23a2e5e/3cbf1114-b2f4-4320-afb3-f0f7eeeb8630/playlist.m3u8',
+ // description: 'Alternate audio track, but no video codec is signaled in the master manifest'
+ // },
+ altAudioAndTracks: {
+ url: 'https://wowzaec2demo.streamlock.net/vod-multitrack/_definst_/smil:ElephantsDream/elephantsdream2.smil/playlist.m3u',
+ description: 'Alternate audio tracks, and multiple VTT tracks'
+ },
+ altAudioWithPdtAndStartGap: {
+ url: 'https://playertest.longtailvideo.com/adaptive/hls-test-streams/test-audio-pdt/playlist.m3u8',
+ description: 'PDT before each segment, 1.59s start gap',
+ abr: true,
+ startSeek: true
+ }
+};
+
+/***/ })
+
+/******/ })["default"];
+});
+//# sourceMappingURL=hls-demo.js.map
\ No newline at end of file
diff --git a/dist/hls.js b/dist/hls.js
index a8cb326475f..bf54b61b9aa 100644
--- a/dist/hls.js
+++ b/dist/hls.js
@@ -828,6 +828,18 @@ module.exports = function (moduleId, options) {
}
+/***/ }),
+
+/***/ "./src/controller/stream-controller.js":
+/*!*********************************************!*\
+ !*** ./src/controller/stream-controller.js ***!
+ \*********************************************/
+/*! no static exports found */
+/*! ModuleConcatenation bailout: Module is not an ECMAScript module */
+/***/ (function(module, exports) {
+
+throw new Error("Module build failed (from ./node_modules/babel-loader/lib/index.js):\nSyntaxError: /Users/savlan/work/hls.js/src/controller/stream-controller.js: Support for the experimental syntax 'optionalChaining' isn't currently enabled (102:44):\n\n\u001b[0m \u001b[90m 100 | \u001b[39m \u001b[36mbreak\u001b[39m\u001b[33m;\u001b[39m\u001b[0m\n\u001b[0m \u001b[90m 101 | \u001b[39m \u001b[36mcase\u001b[39m \u001b[33mState\u001b[39m\u001b[33m.\u001b[39m\u001b[33mWAITING_LEVEL\u001b[39m\u001b[33m:\u001b[39m\u001b[0m\n\u001b[0m\u001b[31m\u001b[1m>\u001b[22m\u001b[39m\u001b[90m 102 | \u001b[39m \u001b[36mvar\u001b[39m details \u001b[33m=\u001b[39m \u001b[36mthis\u001b[39m\u001b[33m.\u001b[39mlevels[\u001b[36mthis\u001b[39m\u001b[33m.\u001b[39mlevel]\u001b[33m?\u001b[39m\u001b[33m.\u001b[39mdetails\u001b[33m;\u001b[39m\u001b[0m\n\u001b[0m \u001b[90m | \u001b[39m \u001b[31m\u001b[1m^\u001b[22m\u001b[39m\u001b[0m\n\u001b[0m \u001b[90m 103 | \u001b[39m \u001b[90m// check if playlist is already loaded (must be current level for live)\u001b[39m\u001b[0m\n\u001b[0m \u001b[90m 104 | \u001b[39m \u001b[36mif\u001b[39m (details \u001b[33m&&\u001b[39m (\u001b[33m!\u001b[39mdetails\u001b[33m.\u001b[39mlive \u001b[33m||\u001b[39m \u001b[36mthis\u001b[39m\u001b[33m.\u001b[39mlevelLastLoaded \u001b[33m===\u001b[39m \u001b[36mthis\u001b[39m\u001b[33m.\u001b[39mlevel)) {\u001b[0m\n\u001b[0m \u001b[90m 105 | \u001b[39m \u001b[36mthis\u001b[39m\u001b[33m.\u001b[39mstate \u001b[33m=\u001b[39m \u001b[33mState\u001b[39m\u001b[33m.\u001b[39m\u001b[33mIDLE\u001b[39m\u001b[33m;\u001b[39m\u001b[0m\n\nAdd @babel/plugin-proposal-optional-chaining (https://git.io/vb4Sk) to the 'plugins' section of your Babel config to enable transformation.\n at Parser.raise (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:6322:17)\n at Parser.expectPlugin (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:7643:18)\n at Parser.parseSubscript (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:8420:12)\n at Parser.parseSubscripts (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:8406:19)\n at Parser.parseExprSubscripts (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:8395:17)\n at Parser.parseMaybeUnary (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:8365:21)\n at Parser.parseExprOps (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:8252:23)\n at Parser.parseMaybeConditional (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:8225:23)\n at Parser.parseMaybeAssign (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:8172:21)\n at Parser.parseVar (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10415:26)\n at Parser.parseVarStatement (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10234:10)\n at Parser.parseStatementContent (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9830:21)\n at Parser.parseStatement (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9763:17)\n at Parser.parseSwitchStatement (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10170:36)\n at Parser.parseStatementContent (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9814:21)\n at Parser.parseStatement (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9763:17)\n at Parser.parseBlockOrModuleBlockBody (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10340:25)\n at Parser.parseBlockBody (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10327:10)\n at Parser.parseBlock (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10311:10)\n at Parser.parseFunctionBody (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9382:24)\n at Parser.parseFunctionBodyAndFinish (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9352:10)\n at Parser.parseMethod (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9306:10)\n at Parser.pushClassMethod (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10743:30)\n at Parser.parseClassMemberWithIsStatic (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10668:12)\n at Parser.parseClassMember (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10607:10)\n at /Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10562:14\n at Parser.withTopicForbiddingContext (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9657:14)\n at Parser.parseClassBody (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10539:10)\n at Parser.parseClass (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10513:22)\n at Parser.parseStatementContent (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9805:21)\n at Parser.parseStatement (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9763:17)\n at Parser.parseBlockOrModuleBlockBody (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10340:25)\n at Parser.parseBlockBody (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10327:10)\n at Parser.parseTopLevel (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9692:10)\n at Parser.parse (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:11209:17)\n at parse (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:11245:38)\n at parser (/Users/savlan/work/hls.js/node_modules/@babel/core/lib/transformation/normalize-file.js:170:34)\n at normalizeFile (/Users/savlan/work/hls.js/node_modules/@babel/core/lib/transformation/normalize-file.js:138:11)\n at runSync (/Users/savlan/work/hls.js/node_modules/@babel/core/lib/transformation/index.js:44:43)\n at runAsync (/Users/savlan/work/hls.js/node_modules/@babel/core/lib/transformation/index.js:35:14)\n at /Users/savlan/work/hls.js/node_modules/@babel/core/lib/transform.js:34:34\n at processTicksAndRejections (internal/process/task_queues.js:79:11)");
+
/***/ }),
/***/ "./src/crypt/decrypter.js":
@@ -2364,6 +2376,7 @@ function () {
// import Hex from '../utils/hex';
+
// We are using fixed track IDs for driving the MP4 remuxer
// instead of following the TS PIDs.
// There is no reason not to do this and some browsers/SourceBuffer-demuxers
@@ -2545,7 +2558,7 @@ function () {
switch (pid) {
case avcId:
if (stt) {
- if (avcData && (pes = parsePES(avcData)) && pes.pts !== undefined) {
+ if (avcData && (pes = parsePES(avcData))) {
parseAVCPES(pes, false);
}
@@ -2564,7 +2577,7 @@ function () {
case audioId:
if (stt) {
- if (audioData && (pes = parsePES(audioData)) && pes.pts !== undefined) {
+ if (audioData && (pes = parsePES(audioData))) {
if (audioTrack.isAAC) {
parseAACPES(pes);
} else {
@@ -2587,7 +2600,7 @@ function () {
case id3Id:
if (stt) {
- if (id3Data && (pes = parsePES(id3Data)) && pes.pts !== undefined) {
+ if (id3Data && (pes = parsePES(id3Data))) {
parseID3PES(pes);
}
@@ -2672,7 +2685,7 @@ function () {
} // try to parse last PES packets
- if (avcData && (pes = parsePES(avcData)) && pes.pts !== undefined) {
+ if (avcData && (pes = parsePES(avcData))) {
parseAVCPES(pes, true);
avcTrack.pesData = null;
} else {
@@ -2680,7 +2693,7 @@ function () {
avcTrack.pesData = avcData;
}
- if (audioData && (pes = parsePES(audioData)) && pes.pts !== undefined) {
+ if (audioData && (pes = parsePES(audioData))) {
if (audioTrack.isAAC) {
parseAACPES(pes);
} else {
@@ -2697,7 +2710,7 @@ function () {
audioTrack.pesData = audioData;
}
- if (id3Data && (pes = parsePES(id3Data)) && pes.pts !== undefined) {
+ if (id3Data && (pes = parsePES(id3Data))) {
parseID3PES(pes);
id3Track.pesData = null;
} else {
@@ -2924,6 +2937,11 @@ function () {
pesHdrLen = frag[8]; // 9 bytes : 6 bytes for PES header + 3 bytes for PES extension
payloadStartOffset = pesHdrLen + 9;
+
+ if (stream.size <= payloadStartOffset) {
+ return null;
+ }
+
stream.size -= payloadStartOffset; // reassemble PES packet
pesData = new Uint8Array(stream.size);
@@ -2968,11 +2986,24 @@ function () {
_proto.pushAccesUnit = function pushAccesUnit(avcSample, avcTrack) {
if (avcSample.units.length && avcSample.frame) {
var samples = avcTrack.samples;
- var nbSamples = samples.length; // only push AVC sample if starting with a keyframe is not mandatory OR
+ var nbSamples = samples.length; // if sample does not have PTS/DTS, patch with last sample PTS/DTS
+
+ if (isNaN(avcSample.pts)) {
+ if (nbSamples) {
+ var lastSample = samples[nbSamples - 1];
+ avcSample.pts = lastSample.pts;
+ avcSample.dts = lastSample.dts;
+ } else {
+ // dropping samples, no timestamp found
+ avcTrack.dropped++;
+ return;
+ }
+ } // only push AVC sample if starting with a keyframe is not mandatory OR
// if keyframe already found in this fragment OR
// keyframe found in last fragment (track.sps) AND
// samples already appended (we already found a keyframe in this fragment) OR fragment is contiguous
+
if (!this.config.forceKeyFrameOnDiscontinuity || avcSample.key === true || avcTrack.sps && (nbSamples || this.contiguous)) {
avcSample.id = nbSamples;
samples.push(avcSample);
@@ -3141,7 +3172,6 @@ function () {
if (payloadSize > 16) {
var uuidStrArray = [];
- var userDataPayloadBytes = [];
for (i = 0; i < 16; i++) {
uuidStrArray.push(expGolombDecoder.readUByte().toString(16));
@@ -3151,16 +3181,19 @@ function () {
}
}
- for (i = 16; i < payloadSize; i++) {
- userDataPayloadBytes.push(expGolombDecoder.readUByte());
+ var length = payloadSize - 16;
+ var userDataPayloadBytes = new Uint8Array(length);
+
+ for (i = 0; i < length; i++) {
+ userDataPayloadBytes[i] = expGolombDecoder.readUByte();
}
_this._insertSampleInOrder(_this._txtTrack.samples, {
pts: pes.pts,
payloadType: payloadType,
uuid: uuidStrArray.join(''),
- userData: String.fromCharCode.apply(null, userDataPayloadBytes),
- userDataBytes: userDataPayloadBytes
+ userDataBytes: userDataPayloadBytes,
+ userData: Object(id3["utf8ArrayToStr"])(userDataPayloadBytes.buffer)
});
}
} else if (payloadSize < expGolombDecoder.bytesAvailable) {
@@ -3535,18 +3568,21 @@ function () {
while (offset < len) {
- if (isHeader(data, offset) && offset + 5 < len) {
- var frame = appendFrame(track, data, offset, pts, frameIndex);
+ if (isHeader(data, offset)) {
+ if (offset + 5 < len) {
+ var frame = appendFrame(track, data, offset, pts, frameIndex);
- if (frame) {
- // logger.log(`${Math.round(frame.sample.pts)} : AAC`);
- offset += frame.length;
- stamp = frame.sample.pts;
- frameIndex++;
- } else {
- // logger.log('Unable to parse AAC frame');
- break;
- }
+ if (frame) {
+ offset += frame.length;
+ stamp = frame.sample.pts;
+ frameIndex++;
+ continue;
+ }
+ } // We are at an ADTS header, but do not have enough data for a frame
+ // Remaining data will be added to aacOverFlow
+
+
+ break;
} else {
// nothing found, keep looking
offset++;
@@ -6440,11 +6476,13 @@ var ErrorDetails;
"use strict";
__webpack_require__.r(__webpack_exports__);
+var _HlsEvents;
+
/**
* @readonly
* @enum {string}
*/
-var HlsEvents = {
+var HlsEvents = (_HlsEvents = {
// fired before MediaSource is attaching to media element - data: { media }
MEDIA_ATTACHING: 'hlsMediaAttaching',
// fired when MediaSource has been succesfully attached to media element - data: { }
@@ -6549,16 +6587,17 @@ var HlsEvents = {
KEY_LOADED: 'hlsKeyLoaded',
// fired upon stream controller state transitions - data: { previousState, nextState }
STREAM_STATE_TRANSITION: 'hlsStreamStateTransition'
-};
+}, _HlsEvents["LIVE_BACK_BUFFER_REACHED"] = 'hlsLiveBackBufferReached', _HlsEvents);
/* harmony default export */ __webpack_exports__["default"] = (HlsEvents);
/***/ }),
/***/ "./src/hls.ts":
/*!*********************************!*\
- !*** ./src/hls.ts + 50 modules ***!
+ !*** ./src/hls.ts + 48 modules ***!
\*********************************/
/*! exports provided: default */
+/*! ModuleConcatenation bailout: Cannot concat with ./src/controller/stream-controller.js (<- Module is not an ECMAScript module) */
/*! ModuleConcatenation bailout: Cannot concat with ./src/crypt/decrypter.js because of ./src/demux/demuxer-worker.js */
/*! ModuleConcatenation bailout: Cannot concat with ./src/demux/demuxer-inline.js because of ./src/demux/demuxer-worker.js */
/*! ModuleConcatenation bailout: Cannot concat with ./src/demux/id3.js because of ./src/demux/demuxer-worker.js */
@@ -8843,5982 +8882,4194 @@ function (_EventHandler) {
return FragmentTracker;
}(event_handler);
-// CONCATENATED MODULE: ./src/utils/binary-search.ts
-var BinarySearch = {
- /**
- * Searches for an item in an array which matches a certain condition.
- * This requires the condition to only match one item in the array,
- * and for the array to be ordered.
- *
- * @param {Array} list The array to search.
- * @param {BinarySearchComparison} comparisonFn
- * Called and provided a candidate item as the first argument.
- * Should return:
- * > -1 if the item should be located at a lower index than the provided item.
- * > 1 if the item should be located at a higher index than the provided item.
- * > 0 if the item is the item you're looking for.
- *
- * @return {T | null} The object if it is found or null otherwise.
- */
- search: function search(list, comparisonFn) {
- var minIndex = 0;
- var maxIndex = list.length - 1;
- var currentIndex = null;
- var currentElement = null;
+// EXTERNAL MODULE: ./src/controller/stream-controller.js
+var stream_controller = __webpack_require__("./src/controller/stream-controller.js");
+var stream_controller_default = /*#__PURE__*/__webpack_require__.n(stream_controller);
+
+// CONCATENATED MODULE: ./src/controller/level-helper.js
+
+
- while (minIndex <= maxIndex) {
- currentIndex = (minIndex + maxIndex) / 2 | 0;
- currentElement = list[currentIndex];
- var comparisonResult = comparisonFn(currentElement);
- if (comparisonResult > 0) {
- minIndex = currentIndex + 1;
- } else if (comparisonResult < 0) {
- maxIndex = currentIndex - 1;
- } else {
- return currentElement;
- }
- }
- return null;
- }
-};
-/* harmony default export */ var binary_search = (BinarySearch);
-// CONCATENATED MODULE: ./src/utils/buffer-helper.ts
/**
- * @module BufferHelper
+ * @module LevelHelper
*
- * Providing methods dealing with buffer length retrieval for example.
+ * Providing methods dealing with playlist sliding and drift
*
- * In general, a helper around HTML5 MediaElement TimeRanges gathered from `buffered` property.
+ * TODO: Create an actual `Level` class/model that deals with all this logic in an object-oriented-manner.
*
- * Also @see https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/buffered
-*/
-var BufferHelper =
-/*#__PURE__*/
-function () {
- function BufferHelper() {}
-
- /**
- * Return true if `media`'s buffered include `position`
- * @param {Bufferable} media
- * @param {number} position
- * @returns {boolean}
- */
- BufferHelper.isBuffered = function isBuffered(media, position) {
- try {
- if (media) {
- var buffered = media.buffered;
+ * */
- for (var i = 0; i < buffered.length; i++) {
- if (position >= buffered.start(i) && position <= buffered.end(i)) {
- return true;
- }
- }
+function addGroupId(level, type, id) {
+ switch (type) {
+ case 'audio':
+ if (!level.audioGroupIds) {
+ level.audioGroupIds = [];
}
- } catch (error) {// this is to catch
- // InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
- // This SourceBuffer has been removed from the parent media source
- }
-
- return false;
- };
-
- BufferHelper.bufferInfo = function bufferInfo(media, pos, maxHoleDuration) {
- try {
- if (media) {
- var vbuffered = media.buffered;
- var buffered = [];
- var i;
- for (i = 0; i < vbuffered.length; i++) {
- buffered.push({
- start: vbuffered.start(i),
- end: vbuffered.end(i)
- });
- }
+ level.audioGroupIds.push(id);
+ break;
- return this.bufferedInfo(buffered, pos, maxHoleDuration);
+ case 'text':
+ if (!level.textGroupIds) {
+ level.textGroupIds = [];
}
- } catch (error) {// this is to catch
- // InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
- // This SourceBuffer has been removed from the parent media source
- }
- return {
- len: 0,
- start: pos,
- end: pos,
- nextStart: undefined
- };
- };
+ level.textGroupIds.push(id);
+ break;
+ }
+}
+function updatePTS(fragments, fromIdx, toIdx) {
+ var fragFrom = fragments[fromIdx],
+ fragTo = fragments[toIdx],
+ fragToPTS = fragTo.startPTS; // if we know startPTS[toIdx]
- BufferHelper.bufferedInfo = function bufferedInfo(buffered, pos, maxHoleDuration) {
- // sort on buffer.start/smaller end (IE does not always return sorted buffered range)
- buffered.sort(function (a, b) {
- var diff = a.start - b.start;
+ if (Object(number_isFinite["isFiniteNumber"])(fragToPTS)) {
+ // update fragment duration.
+ // it helps to fix drifts between playlist reported duration and fragment real duration
+ if (toIdx > fromIdx) {
+ fragFrom.duration = fragToPTS - fragFrom.start;
- if (diff) {
- return diff;
- } else {
- return b.end - a.end;
+ if (fragFrom.duration < 0) {
+ logger["logger"].warn("negative duration computed for frag " + fragFrom.sn + ",level " + fragFrom.level + ", there should be some duration drift between playlist and fragment!");
}
- });
- var buffered2 = []; // there might be some small holes between buffer time range
- // consider that holes smaller than maxHoleDuration are irrelevant and build another
- // buffer time range representations that discards those holes
+ } else {
+ fragTo.duration = fragFrom.start - fragToPTS;
- for (var i = 0; i < buffered.length; i++) {
- var buf2len = buffered2.length;
-
- if (buf2len) {
- var buf2end = buffered2[buf2len - 1].end; // if small hole (value between 0 or maxHoleDuration ) or overlapping (negative)
-
- if (buffered[i].start - buf2end < maxHoleDuration) {
- // merge overlapping time ranges
- // update lastRange.end only if smaller than item.end
- // e.g. [ 1, 15] with [ 2,8] => [ 1,15] (no need to modify lastRange.end)
- // whereas [ 1, 8] with [ 2,15] => [ 1,15] ( lastRange should switch from [1,8] to [1,15])
- if (buffered[i].end > buf2end) {
- buffered2[buf2len - 1].end = buffered[i].end;
- }
- } else {
- // big hole
- buffered2.push(buffered[i]);
- }
- } else {
- // first value
- buffered2.push(buffered[i]);
+ if (fragTo.duration < 0) {
+ logger["logger"].warn("negative duration computed for frag " + fragTo.sn + ",level " + fragTo.level + ", there should be some duration drift between playlist and fragment!");
}
}
+ } else {
+ // we dont know startPTS[toIdx]
+ if (toIdx > fromIdx) {
+ fragTo.start = fragFrom.start + fragFrom.duration;
+ } else {
+ fragTo.start = Math.max(fragFrom.start - fragTo.duration, 0);
+ }
+ }
+}
+function updateFragPTSDTS(details, frag, startPTS, endPTS, startDTS, endDTS) {
+ // update frag PTS/DTS
+ var maxStartPTS = startPTS;
- var bufferLen = 0; // bufferStartNext can possibly be undefined based on the conditional logic below
+ if (Object(number_isFinite["isFiniteNumber"])(frag.startPTS)) {
+ // delta PTS between audio and video
+ var deltaPTS = Math.abs(frag.startPTS - startPTS);
- var bufferStartNext; // bufferStart and bufferEnd are buffer boundaries around current video position
+ if (!Object(number_isFinite["isFiniteNumber"])(frag.deltaPTS)) {
+ frag.deltaPTS = deltaPTS;
+ } else {
+ frag.deltaPTS = Math.max(deltaPTS, frag.deltaPTS);
+ }
- var bufferStart = pos;
- var bufferEnd = pos;
+ maxStartPTS = Math.max(startPTS, frag.startPTS);
+ startPTS = Math.min(startPTS, frag.startPTS);
+ endPTS = Math.max(endPTS, frag.endPTS);
+ startDTS = Math.min(startDTS, frag.startDTS);
+ endDTS = Math.max(endDTS, frag.endDTS);
+ }
- for (var _i = 0; _i < buffered2.length; _i++) {
- var start = buffered2[_i].start,
- end = buffered2[_i].end; // logger.log('buf start/end:' + buffered.start(i) + '/' + buffered.end(i));
+ var drift = startPTS - frag.start;
+ frag.start = frag.startPTS = startPTS;
+ frag.maxStartPTS = maxStartPTS;
+ frag.endPTS = endPTS;
+ frag.startDTS = startDTS;
+ frag.endDTS = endDTS;
+ frag.duration = endPTS - startPTS;
+ var sn = frag.sn; // exit if sn out of range
- if (pos + maxHoleDuration >= start && pos < end) {
- // play position is inside this buffer TimeRange, retrieve end of buffer position and buffer length
- bufferStart = start;
- bufferEnd = end;
- bufferLen = bufferEnd - pos;
- } else if (pos + maxHoleDuration < start) {
- bufferStartNext = start;
- break;
- }
- }
+ if (!details || sn < details.startSN || sn > details.endSN) {
+ return 0;
+ }
- return {
- len: bufferLen,
- start: bufferStart,
- end: bufferEnd,
- nextStart: bufferStartNext
- };
- };
+ var fragIdx, fragments, i;
+ fragIdx = sn - details.startSN;
+ fragments = details.fragments; // update frag reference in fragments array
+ // rationale is that fragments array might not contain this frag object.
+ // this will happen if playlist has been refreshed between frag loading and call to updateFragPTSDTS()
+ // if we don't update frag, we won't be able to propagate PTS info on the playlist
+ // resulting in invalid sliding computation
- return BufferHelper;
-}();
-// EXTERNAL MODULE: ./node_modules/eventemitter3/index.js
-var eventemitter3 = __webpack_require__("./node_modules/eventemitter3/index.js");
+ fragments[fragIdx] = frag; // adjust fragment PTS/duration from seqnum-1 to frag 0
-// EXTERNAL MODULE: ./node_modules/webworkify-webpack/index.js
-var webworkify_webpack = __webpack_require__("./node_modules/webworkify-webpack/index.js");
+ for (i = fragIdx; i > 0; i--) {
+ updatePTS(fragments, i, i - 1);
+ } // adjust fragment PTS/duration from seqnum to last frag
-// EXTERNAL MODULE: ./src/demux/demuxer-inline.js + 12 modules
-var demuxer_inline = __webpack_require__("./src/demux/demuxer-inline.js");
-// CONCATENATED MODULE: ./src/utils/mediasource-helper.ts
-/**
- * MediaSource helper
- */
-function getMediaSource() {
- return window.MediaSource || window.WebKitMediaSource;
+ for (i = fragIdx; i < fragments.length - 1; i++) {
+ updatePTS(fragments, i, i + 1);
+ }
+
+ details.PTSKnown = true;
+ return drift;
}
-// EXTERNAL MODULE: ./src/utils/get-self-scope.js
-var get_self_scope = __webpack_require__("./src/utils/get-self-scope.js");
+function mergeDetails(oldDetails, newDetails) {
+ // potentially retrieve cached initsegment
+ if (newDetails.initSegment && oldDetails.initSegment) {
+ newDetails.initSegment = oldDetails.initSegment;
+ } // check if old/new playlists have fragments in common
+ // loop through overlapping SN and update startPTS , cc, and duration if any found
-// CONCATENATED MODULE: ./src/observer.ts
-function observer_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+ var ccOffset = 0;
+ var PTSFrag;
+ mapFragmentIntersection(oldDetails, newDetails, function (oldFrag, newFrag) {
+ ccOffset = oldFrag.cc - newFrag.cc;
+
+ if (Object(number_isFinite["isFiniteNumber"])(oldFrag.startPTS)) {
+ newFrag.start = newFrag.startPTS = oldFrag.startPTS;
+ newFrag.endPTS = oldFrag.endPTS;
+ newFrag.duration = oldFrag.duration;
+ newFrag.backtracked = oldFrag.backtracked;
+ newFrag.dropped = oldFrag.dropped;
+ PTSFrag = newFrag;
+ } // PTS is known when there are overlapping segments
-/**
- * Simple adapter sub-class of Nodejs-like EventEmitter.
- */
-var Observer =
-/*#__PURE__*/
-function (_EventEmitter) {
- observer_inheritsLoose(Observer, _EventEmitter);
+ newDetails.PTSKnown = true;
+ });
- function Observer() {
- return _EventEmitter.apply(this, arguments) || this;
+ if (!newDetails.PTSKnown) {
+ return;
}
- var _proto = Observer.prototype;
+ if (ccOffset) {
+ logger["logger"].log('discontinuity sliding from playlist, take drift into account');
+ var newFragments = newDetails.fragments;
- /**
- * We simply want to pass along the event-name itself
- * in every call to a handler, which is the purpose of our `trigger` method
- * extending the standard API.
- */
- _proto.trigger = function trigger(event) {
- for (var _len = arguments.length, data = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
- data[_key - 1] = arguments[_key];
+ for (var i = 0; i < newFragments.length; i++) {
+ newFragments[i].cc += ccOffset;
}
-
- this.emit.apply(this, [event, event].concat(data));
- };
-
- return Observer;
-}(eventemitter3["EventEmitter"]);
-// CONCATENATED MODULE: ./src/demux/demuxer.js
+ } // if at least one fragment contains PTS info, recompute PTS information for all fragments
+ if (PTSFrag) {
+ updateFragPTSDTS(newDetails, PTSFrag, PTSFrag.startPTS, PTSFrag.endPTS, PTSFrag.startDTS, PTSFrag.endDTS);
+ } else {
+ // ensure that delta is within oldFragments range
+ // also adjust sliding in case delta is 0 (we could have old=[50-60] and new=old=[50-61])
+ // in that case we also need to adjust start offset of all fragments
+ adjustSliding(oldDetails, newDetails);
+ } // if we are here, it means we have fragments overlapping between
+ // old and new level. reliable PTS info is thus relying on old level
+ newDetails.PTSKnown = oldDetails.PTSKnown;
+}
+function mergeSubtitlePlaylists(oldPlaylist, newPlaylist, referenceStart) {
+ if (referenceStart === void 0) {
+ referenceStart = 0;
+ }
+ var lastIndex = -1;
+ mapFragmentIntersection(oldPlaylist, newPlaylist, function (oldFrag, newFrag, index) {
+ newFrag.start = oldFrag.start;
+ lastIndex = index;
+ });
+ var frags = newPlaylist.fragments;
+ if (lastIndex < 0) {
+ frags.forEach(function (frag) {
+ frag.start += referenceStart;
+ });
+ return;
+ }
+ for (var i = lastIndex + 1; i < frags.length; i++) {
+ frags[i].start = frags[i - 1].start + frags[i - 1].duration;
+ }
+}
+function mapFragmentIntersection(oldPlaylist, newPlaylist, intersectionFn) {
+ if (!oldPlaylist || !newPlaylist) {
+ return;
+ }
+ var start = Math.max(oldPlaylist.startSN, newPlaylist.startSN) - newPlaylist.startSN;
+ var end = Math.min(oldPlaylist.endSN, newPlaylist.endSN) - newPlaylist.startSN;
+ var delta = newPlaylist.startSN - oldPlaylist.startSN;
- // see https://stackoverflow.com/a/11237259/589493
+ for (var i = start; i <= end; i++) {
+ var oldFrag = oldPlaylist.fragments[delta + i];
+ var newFrag = newPlaylist.fragments[i];
-var global = Object(get_self_scope["getSelfScope"])(); // safeguard for code that might run both on worker and main thread
+ if (!oldFrag || !newFrag) {
+ break;
+ }
-var demuxer_MediaSource = getMediaSource() || {
- isTypeSupported: function isTypeSupported() {
- return false;
+ intersectionFn(oldFrag, newFrag, i);
}
-};
+}
+function adjustSliding(oldPlaylist, newPlaylist) {
+ var delta = newPlaylist.startSN - oldPlaylist.startSN;
+ var oldFragments = oldPlaylist.fragments;
+ var newFragments = newPlaylist.fragments;
-var demuxer_Demuxer =
-/*#__PURE__*/
-function () {
- function Demuxer(hls, id) {
- var _this = this;
+ if (delta < 0 || delta > oldFragments.length) {
+ return;
+ }
- this.hls = hls;
- this.id = id;
- var observer = this.observer = new Observer();
- var config = hls.config;
+ for (var i = 0; i < newFragments.length; i++) {
+ newFragments[i].start += oldFragments[delta].start;
+ }
+}
+function computeReloadInterval(currentPlaylist, newPlaylist, lastRequestTime) {
+ var reloadInterval = 1000 * (newPlaylist.averagetargetduration ? newPlaylist.averagetargetduration : newPlaylist.targetduration);
+ var minReloadInterval = reloadInterval / 2;
- var forwardMessage = function forwardMessage(ev, data) {
- data = data || {};
- data.frag = _this.frag;
- data.id = _this.id;
- hls.trigger(ev, data);
- }; // forward events to main thread
+ if (currentPlaylist && newPlaylist.endSN === currentPlaylist.endSN) {
+ // follow HLS Spec, If the client reloads a Playlist file and finds that it has not
+ // changed then it MUST wait for a period of one-half the target
+ // duration before retrying.
+ reloadInterval = minReloadInterval;
+ }
+ if (lastRequestTime) {
+ reloadInterval = Math.max(minReloadInterval, reloadInterval - (window.performance.now() - lastRequestTime));
+ } // in any case, don't reload more than half of target duration
- observer.on(events["default"].FRAG_DECRYPTED, forwardMessage);
- observer.on(events["default"].FRAG_PARSING_INIT_SEGMENT, forwardMessage);
- observer.on(events["default"].FRAG_PARSING_DATA, forwardMessage);
- observer.on(events["default"].FRAG_PARSED, forwardMessage);
- observer.on(events["default"].ERROR, forwardMessage);
- observer.on(events["default"].FRAG_PARSING_METADATA, forwardMessage);
- observer.on(events["default"].FRAG_PARSING_USERDATA, forwardMessage);
- observer.on(events["default"].INIT_PTS_FOUND, forwardMessage);
- var typeSupported = {
- mp4: demuxer_MediaSource.isTypeSupported('video/mp4'),
- mpeg: demuxer_MediaSource.isTypeSupported('audio/mpeg'),
- mp3: demuxer_MediaSource.isTypeSupported('audio/mp4; codecs="mp3"')
- }; // navigator.vendor is not always available in Web Worker
- // refer to https://developer.mozilla.org/en-US/docs/Web/API/WorkerGlobalScope/navigator
- var vendor = navigator.vendor;
+ return Math.round(reloadInterval);
+}
+// CONCATENATED MODULE: ./src/controller/level-controller.js
+function level_controller_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
- if (config.enableWorker && typeof Worker !== 'undefined') {
- logger["logger"].log('demuxing in webworker');
- var w;
+function level_controller_createClass(Constructor, protoProps, staticProps) { if (protoProps) level_controller_defineProperties(Constructor.prototype, protoProps); if (staticProps) level_controller_defineProperties(Constructor, staticProps); return Constructor; }
- try {
- w = this.w = webworkify_webpack(/*require.resolve*/(/*! ../demux/demuxer-worker.js */ "./src/demux/demuxer-worker.js"));
- this.onwmsg = this.onWorkerMessage.bind(this);
- w.addEventListener('message', this.onwmsg);
+function level_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
- w.onerror = function (event) {
- hls.trigger(events["default"].ERROR, {
- type: errors["ErrorTypes"].OTHER_ERROR,
- details: errors["ErrorDetails"].INTERNAL_EXCEPTION,
- fatal: true,
- event: 'demuxerWorker',
- err: {
- message: event.message + ' (' + event.filename + ':' + event.lineno + ')'
- }
- });
- };
+/*
+ * Level Controller
+*/
- w.postMessage({
- cmd: 'init',
- typeSupported: typeSupported,
- vendor: vendor,
- id: id,
- config: JSON.stringify(config)
- });
- } catch (err) {
- logger["logger"].warn('Error in worker:', err);
- logger["logger"].error('Error while initializing DemuxerWorker, fallback on DemuxerInline');
- if (w) {
- // revoke the Object URL that was used to create demuxer worker, so as not to leak it
- global.URL.revokeObjectURL(w.objectURL);
- }
- this.demuxer = new demuxer_inline["default"](observer, typeSupported, config, vendor);
- this.w = undefined;
- }
- } else {
- this.demuxer = new demuxer_inline["default"](observer, typeSupported, config, vendor);
- }
- }
- var _proto = Demuxer.prototype;
- _proto.destroy = function destroy() {
- var w = this.w;
- if (w) {
- w.removeEventListener('message', this.onwmsg);
- w.terminate();
- this.w = null;
- } else {
- var demuxer = this.demuxer;
+var level_controller_window = window,
+ level_controller_performance = level_controller_window.performance;
+var chromeOrFirefox;
- if (demuxer) {
- demuxer.destroy();
- this.demuxer = null;
- }
- }
+var level_controller_LevelController =
+/*#__PURE__*/
+function (_EventHandler) {
+ level_controller_inheritsLoose(LevelController, _EventHandler);
- var observer = this.observer;
+ function LevelController(hls) {
+ var _this;
- if (observer) {
- observer.removeAllListeners();
- this.observer = null;
- }
- };
-
- _proto.push = function push(data, initSegment, audioCodec, videoCodec, frag, duration, accurateTimeOffset, defaultInitPTS) {
- var w = this.w;
- var timeOffset = Object(number_isFinite["isFiniteNumber"])(frag.startPTS) ? frag.startPTS : frag.start;
- var decryptdata = frag.decryptdata;
- var lastFrag = this.frag;
- var discontinuity = !(lastFrag && frag.cc === lastFrag.cc);
- var trackSwitch = !(lastFrag && frag.level === lastFrag.level);
- var nextSN = lastFrag && frag.sn === lastFrag.sn + 1;
- var contiguous = !trackSwitch && nextSN;
-
- if (discontinuity) {
- logger["logger"].log(this.id + ":discontinuity detected");
- }
-
- if (trackSwitch) {
- logger["logger"].log(this.id + ":switch detected");
- }
+ _this = _EventHandler.call(this, hls, events["default"].MANIFEST_LOADED, events["default"].LEVEL_LOADED, events["default"].AUDIO_TRACK_SWITCHED, events["default"].FRAG_LOADED, events["default"].ERROR) || this;
+ _this.canload = false;
+ _this.currentLevelIndex = null;
+ _this.manualLevelIndex = -1;
+ _this.timer = null;
+ chromeOrFirefox = /chrome|firefox/.test(navigator.userAgent.toLowerCase());
+ return _this;
+ }
- this.frag = frag;
+ var _proto = LevelController.prototype;
- if (w) {
- // post fragment payload as transferable objects for ArrayBuffer (no copy)
- w.postMessage({
- cmd: 'demux',
- data: data,
- decryptdata: decryptdata,
- initSegment: initSegment,
- audioCodec: audioCodec,
- videoCodec: videoCodec,
- timeOffset: timeOffset,
- discontinuity: discontinuity,
- trackSwitch: trackSwitch,
- contiguous: contiguous,
- duration: duration,
- accurateTimeOffset: accurateTimeOffset,
- defaultInitPTS: defaultInitPTS
- }, data instanceof ArrayBuffer ? [data] : []);
- } else {
- var demuxer = this.demuxer;
+ _proto.onHandlerDestroying = function onHandlerDestroying() {
+ this.clearTimer();
+ this.manualLevelIndex = -1;
+ };
- if (demuxer) {
- demuxer.push(data, decryptdata, initSegment, audioCodec, videoCodec, timeOffset, discontinuity, trackSwitch, contiguous, duration, accurateTimeOffset, defaultInitPTS);
- }
+ _proto.clearTimer = function clearTimer() {
+ if (this.timer !== null) {
+ clearTimeout(this.timer);
+ this.timer = null;
}
};
- _proto.onWorkerMessage = function onWorkerMessage(ev) {
- var data = ev.data,
- hls = this.hls;
-
- switch (data.event) {
- case 'init':
- // revoke the Object URL that was used to create demuxer worker, so as not to leak it
- global.URL.revokeObjectURL(this.w.objectURL);
- break;
- // special case for FRAG_PARSING_DATA: data1 and data2 are transferable objects
+ _proto.startLoad = function startLoad() {
+ var levels = this._levels;
+ this.canload = true;
+ this.levelRetryCount = 0; // clean up live level details to force reload them, and reset load errors
- case events["default"].FRAG_PARSING_DATA:
- data.data.data1 = new Uint8Array(data.data1);
+ if (levels) {
+ levels.forEach(function (level) {
+ level.loadError = 0;
+ var levelDetails = level.details;
- if (data.data2) {
- data.data.data2 = new Uint8Array(data.data2);
+ if (levelDetails && levelDetails.live) {
+ level.details = undefined;
}
+ });
+ } // speed up live playlist refresh if timer exists
- /* falls through */
- default:
- data.data = data.data || {};
- data.data.frag = this.frag;
- data.data.id = this.id;
- hls.trigger(data.event, data.data);
- break;
+ if (this.timer !== null) {
+ this.loadLevel();
}
};
- return Demuxer;
-}();
-
-/* harmony default export */ var demux_demuxer = (demuxer_Demuxer);
-// CONCATENATED MODULE: ./src/controller/level-helper.js
-
+ _proto.stopLoad = function stopLoad() {
+ this.canload = false;
+ };
+ _proto.onManifestLoaded = function onManifestLoaded(data) {
+ var levels = [];
+ var audioTracks = [];
+ var bitrateStart;
+ var levelSet = {};
+ var levelFromSet = null;
+ var videoCodecFound = false;
+ var audioCodecFound = false; // regroup redundant levels together
+ data.levels.forEach(function (level) {
+ var attributes = level.attrs;
+ level.loadError = 0;
+ level.fragmentError = false;
+ videoCodecFound = videoCodecFound || !!level.videoCodec;
+ audioCodecFound = audioCodecFound || !!level.audioCodec; // erase audio codec info if browser does not support mp4a.40.34.
+ // demuxer will autodetect codec and fallback to mpeg/audio
+ if (chromeOrFirefox && level.audioCodec && level.audioCodec.indexOf('mp4a.40.34') !== -1) {
+ level.audioCodec = undefined;
+ }
-/**
- * @module LevelHelper
- *
- * Providing methods dealing with playlist sliding and drift
- *
- * TODO: Create an actual `Level` class/model that deals with all this logic in an object-oriented-manner.
- *
- * */
+ levelFromSet = levelSet[level.bitrate]; // FIXME: we would also have to match the resolution here
-function addGroupId(level, type, id) {
- switch (type) {
- case 'audio':
- if (!level.audioGroupIds) {
- level.audioGroupIds = [];
+ if (!levelFromSet) {
+ level.url = [level.url];
+ level.urlId = 0;
+ levelSet[level.bitrate] = level;
+ levels.push(level);
+ } else {
+ levelFromSet.url.push(level.url);
}
- level.audioGroupIds.push(id);
- break;
+ if (attributes) {
+ if (attributes.AUDIO) {
+ audioCodecFound = true;
+ addGroupId(levelFromSet || level, 'audio', attributes.AUDIO);
+ }
- case 'text':
- if (!level.textGroupIds) {
- level.textGroupIds = [];
+ if (attributes.SUBTITLES) {
+ addGroupId(levelFromSet || level, 'text', attributes.SUBTITLES);
+ }
}
+ }); // remove audio-only level if we also have levels with audio+video codecs signalled
- level.textGroupIds.push(id);
- break;
- }
-}
-function updatePTS(fragments, fromIdx, toIdx) {
- var fragFrom = fragments[fromIdx],
- fragTo = fragments[toIdx],
- fragToPTS = fragTo.startPTS; // if we know startPTS[toIdx]
-
- if (Object(number_isFinite["isFiniteNumber"])(fragToPTS)) {
- // update fragment duration.
- // it helps to fix drifts between playlist reported duration and fragment real duration
- if (toIdx > fromIdx) {
- fragFrom.duration = fragToPTS - fragFrom.start;
+ if (videoCodecFound && audioCodecFound) {
+ levels = levels.filter(function (_ref) {
+ var videoCodec = _ref.videoCodec;
+ return !!videoCodec;
+ });
+ } // only keep levels with supported audio/video codecs
- if (fragFrom.duration < 0) {
- logger["logger"].warn("negative duration computed for frag " + fragFrom.sn + ",level " + fragFrom.level + ", there should be some duration drift between playlist and fragment!");
- }
- } else {
- fragTo.duration = fragFrom.start - fragToPTS;
- if (fragTo.duration < 0) {
- logger["logger"].warn("negative duration computed for frag " + fragTo.sn + ",level " + fragTo.level + ", there should be some duration drift between playlist and fragment!");
- }
- }
- } else {
- // we dont know startPTS[toIdx]
- if (toIdx > fromIdx) {
- fragTo.start = fragFrom.start + fragFrom.duration;
- } else {
- fragTo.start = Math.max(fragFrom.start - fragTo.duration, 0);
- }
- }
-}
-function updateFragPTSDTS(details, frag, startPTS, endPTS, startDTS, endDTS) {
- // update frag PTS/DTS
- var maxStartPTS = startPTS;
+ levels = levels.filter(function (_ref2) {
+ var audioCodec = _ref2.audioCodec,
+ videoCodec = _ref2.videoCodec;
+ return (!audioCodec || isCodecSupportedInMp4(audioCodec, 'audio')) && (!videoCodec || isCodecSupportedInMp4(videoCodec, 'video'));
+ });
- if (Object(number_isFinite["isFiniteNumber"])(frag.startPTS)) {
- // delta PTS between audio and video
- var deltaPTS = Math.abs(frag.startPTS - startPTS);
+ if (data.audioTracks) {
+ audioTracks = data.audioTracks.filter(function (track) {
+ return !track.audioCodec || isCodecSupportedInMp4(track.audioCodec, 'audio');
+ }); // Reassign id's after filtering since they're used as array indices
- if (!Object(number_isFinite["isFiniteNumber"])(frag.deltaPTS)) {
- frag.deltaPTS = deltaPTS;
- } else {
- frag.deltaPTS = Math.max(deltaPTS, frag.deltaPTS);
+ audioTracks.forEach(function (track, index) {
+ track.id = index;
+ });
}
- maxStartPTS = Math.max(startPTS, frag.startPTS);
- startPTS = Math.min(startPTS, frag.startPTS);
- endPTS = Math.max(endPTS, frag.endPTS);
- startDTS = Math.min(startDTS, frag.startDTS);
- endDTS = Math.max(endDTS, frag.endDTS);
- }
+ if (levels.length > 0) {
+ // start bitrate is the first bitrate of the manifest
+ bitrateStart = levels[0].bitrate; // sort level on bitrate
- var drift = startPTS - frag.start;
- frag.start = frag.startPTS = startPTS;
- frag.maxStartPTS = maxStartPTS;
- frag.endPTS = endPTS;
- frag.startDTS = startDTS;
- frag.endDTS = endDTS;
- frag.duration = endPTS - startPTS;
- var sn = frag.sn; // exit if sn out of range
+ levels.sort(function (a, b) {
+ return a.bitrate - b.bitrate;
+ });
+ this._levels = levels; // find index of first level in sorted levels
- if (!details || sn < details.startSN || sn > details.endSN) {
- return 0;
- }
+ for (var i = 0; i < levels.length; i++) {
+ if (levels[i].bitrate === bitrateStart) {
+ this._firstLevel = i;
+ logger["logger"].log("manifest loaded," + levels.length + " level(s) found, first bitrate:" + bitrateStart);
+ break;
+ }
+ } // Audio is only alternate if manifest include a URI along with the audio group tag
- var fragIdx, fragments, i;
- fragIdx = sn - details.startSN;
- fragments = details.fragments; // update frag reference in fragments array
- // rationale is that fragments array might not contain this frag object.
- // this will happen if playlist has been refreshed between frag loading and call to updateFragPTSDTS()
- // if we don't update frag, we won't be able to propagate PTS info on the playlist
- // resulting in invalid sliding computation
- fragments[fragIdx] = frag; // adjust fragment PTS/duration from seqnum-1 to frag 0
-
- for (i = fragIdx; i > 0; i--) {
- updatePTS(fragments, i, i - 1);
- } // adjust fragment PTS/duration from seqnum to last frag
-
-
- for (i = fragIdx; i < fragments.length - 1; i++) {
- updatePTS(fragments, i, i + 1);
- }
-
- details.PTSKnown = true;
- return drift;
-}
-function mergeDetails(oldDetails, newDetails) {
- // potentially retrieve cached initsegment
- if (newDetails.initSegment && oldDetails.initSegment) {
- newDetails.initSegment = oldDetails.initSegment;
- } // check if old/new playlists have fragments in common
- // loop through overlapping SN and update startPTS , cc, and duration if any found
-
-
- var ccOffset = 0;
- var PTSFrag;
- mapFragmentIntersection(oldDetails, newDetails, function (oldFrag, newFrag) {
- ccOffset = oldFrag.cc - newFrag.cc;
-
- if (Object(number_isFinite["isFiniteNumber"])(oldFrag.startPTS)) {
- newFrag.start = newFrag.startPTS = oldFrag.startPTS;
- newFrag.endPTS = oldFrag.endPTS;
- newFrag.duration = oldFrag.duration;
- newFrag.backtracked = oldFrag.backtracked;
- newFrag.dropped = oldFrag.dropped;
- PTSFrag = newFrag;
- } // PTS is known when there are overlapping segments
-
-
- newDetails.PTSKnown = true;
- });
-
- if (!newDetails.PTSKnown) {
- return;
- }
-
- if (ccOffset) {
- logger["logger"].log('discontinuity sliding from playlist, take drift into account');
- var newFragments = newDetails.fragments;
-
- for (var i = 0; i < newFragments.length; i++) {
- newFragments[i].cc += ccOffset;
- }
- } // if at least one fragment contains PTS info, recompute PTS information for all fragments
-
-
- if (PTSFrag) {
- updateFragPTSDTS(newDetails, PTSFrag, PTSFrag.startPTS, PTSFrag.endPTS, PTSFrag.startDTS, PTSFrag.endDTS);
- } else {
- // ensure that delta is within oldFragments range
- // also adjust sliding in case delta is 0 (we could have old=[50-60] and new=old=[50-61])
- // in that case we also need to adjust start offset of all fragments
- adjustSliding(oldDetails, newDetails);
- } // if we are here, it means we have fragments overlapping between
- // old and new level. reliable PTS info is thus relying on old level
-
-
- newDetails.PTSKnown = oldDetails.PTSKnown;
-}
-function mergeSubtitlePlaylists(oldPlaylist, newPlaylist, referenceStart) {
- if (referenceStart === void 0) {
- referenceStart = 0;
- }
-
- var lastIndex = -1;
- mapFragmentIntersection(oldPlaylist, newPlaylist, function (oldFrag, newFrag, index) {
- newFrag.start = oldFrag.start;
- lastIndex = index;
- });
- var frags = newPlaylist.fragments;
-
- if (lastIndex < 0) {
- frags.forEach(function (frag) {
- frag.start += referenceStart;
- });
- return;
- }
-
- for (var i = lastIndex + 1; i < frags.length; i++) {
- frags[i].start = frags[i - 1].start + frags[i - 1].duration;
- }
-}
-function mapFragmentIntersection(oldPlaylist, newPlaylist, intersectionFn) {
- if (!oldPlaylist || !newPlaylist) {
- return;
- }
-
- var start = Math.max(oldPlaylist.startSN, newPlaylist.startSN) - newPlaylist.startSN;
- var end = Math.min(oldPlaylist.endSN, newPlaylist.endSN) - newPlaylist.startSN;
- var delta = newPlaylist.startSN - oldPlaylist.startSN;
-
- for (var i = start; i <= end; i++) {
- var oldFrag = oldPlaylist.fragments[delta + i];
- var newFrag = newPlaylist.fragments[i];
-
- if (!oldFrag || !newFrag) {
- break;
- }
-
- intersectionFn(oldFrag, newFrag, i);
- }
-}
-function adjustSliding(oldPlaylist, newPlaylist) {
- var delta = newPlaylist.startSN - oldPlaylist.startSN;
- var oldFragments = oldPlaylist.fragments;
- var newFragments = newPlaylist.fragments;
-
- if (delta < 0 || delta > oldFragments.length) {
- return;
- }
-
- for (var i = 0; i < newFragments.length; i++) {
- newFragments[i].start += oldFragments[delta].start;
- }
-}
-function computeReloadInterval(currentPlaylist, newPlaylist, lastRequestTime) {
- var reloadInterval = 1000 * (newPlaylist.averagetargetduration ? newPlaylist.averagetargetduration : newPlaylist.targetduration);
- var minReloadInterval = reloadInterval / 2;
-
- if (currentPlaylist && newPlaylist.endSN === currentPlaylist.endSN) {
- // follow HLS Spec, If the client reloads a Playlist file and finds that it has not
- // changed then it MUST wait for a period of one-half the target
- // duration before retrying.
- reloadInterval = minReloadInterval;
- }
-
- if (lastRequestTime) {
- reloadInterval = Math.max(minReloadInterval, reloadInterval - (window.performance.now() - lastRequestTime));
- } // in any case, don't reload more than half of target duration
-
-
- return Math.round(reloadInterval);
-}
-// CONCATENATED MODULE: ./src/utils/time-ranges.ts
-/**
- * TimeRanges to string helper
- */
-var TimeRanges = {
- toString: function toString(r) {
- var log = '';
- var len = r.length;
-
- for (var i = 0; i < len; i++) {
- log += '[' + r.start(i).toFixed(3) + ',' + r.end(i).toFixed(3) + ']';
- }
-
- return log;
- }
-};
-/* harmony default export */ var time_ranges = (TimeRanges);
-// CONCATENATED MODULE: ./src/utils/discontinuities.js
-
-
-
-function findFirstFragWithCC(fragments, cc) {
- var firstFrag = null;
-
- for (var i = 0; i < fragments.length; i += 1) {
- var currentFrag = fragments[i];
-
- if (currentFrag && currentFrag.cc === cc) {
- firstFrag = currentFrag;
- break;
- }
- }
-
- return firstFrag;
-}
-function findFragWithCC(fragments, CC) {
- return binary_search.search(fragments, function (candidate) {
- if (candidate.cc < CC) {
- return 1;
- } else if (candidate.cc > CC) {
- return -1;
- } else {
- return 0;
- }
- });
-}
-function shouldAlignOnDiscontinuities(lastFrag, lastLevel, details) {
- var shouldAlign = false;
-
- if (lastLevel && lastLevel.details && details) {
- if (details.endCC > details.startCC || lastFrag && lastFrag.cc < details.startCC) {
- shouldAlign = true;
- }
- }
-
- return shouldAlign;
-} // Find the first frag in the previous level which matches the CC of the first frag of the new level
-
-function findDiscontinuousReferenceFrag(prevDetails, curDetails) {
- var prevFrags = prevDetails.fragments;
- var curFrags = curDetails.fragments;
-
- if (!curFrags.length || !prevFrags.length) {
- logger["logger"].log('No fragments to align');
- return;
- }
-
- var prevStartFrag = findFirstFragWithCC(prevFrags, curFrags[0].cc);
-
- if (!prevStartFrag || prevStartFrag && !prevStartFrag.startPTS) {
- logger["logger"].log('No frag in previous level to align on');
- return;
- }
-
- return prevStartFrag;
-}
-function adjustPts(sliding, details) {
- details.fragments.forEach(function (frag) {
- if (frag) {
- var start = frag.start + sliding;
- frag.start = frag.startPTS = start;
- frag.endPTS = start + frag.duration;
- }
- });
- details.PTSKnown = true;
-}
-/**
- * Using the parameters of the last level, this function computes PTS' of the new fragments so that they form a
- * contiguous stream with the last fragments.
- * The PTS of a fragment lets Hls.js know where it fits into a stream - by knowing every PTS, we know which fragment to
- * download at any given time. PTS is normally computed when the fragment is demuxed, so taking this step saves us time
- * and an extra download.
- * @param lastFrag
- * @param lastLevel
- * @param details
- */
-
-function alignStream(lastFrag, lastLevel, details) {
- alignDiscontinuities(lastFrag, details, lastLevel);
-
- if (!details.PTSKnown && lastLevel) {
- // If the PTS wasn't figured out via discontinuity sequence that means there was no CC increase within the level.
- // Aligning via Program Date Time should therefore be reliable, since PDT should be the same within the same
- // discontinuity sequence.
- alignPDT(details, lastLevel.details);
- }
-}
-/**
- * Computes the PTS if a new level's fragments using the PTS of a fragment in the last level which shares the same
- * discontinuity sequence.
- * @param lastLevel - The details of the last loaded level
- * @param details - The details of the new level
- */
-
-function alignDiscontinuities(lastFrag, details, lastLevel) {
- if (shouldAlignOnDiscontinuities(lastFrag, lastLevel, details)) {
- var referenceFrag = findDiscontinuousReferenceFrag(lastLevel.details, details);
-
- if (referenceFrag) {
- logger["logger"].log('Adjusting PTS using last level due to CC increase within current level');
- adjustPts(referenceFrag.start, details);
- }
- }
-}
-/**
- * Computes the PTS of a new level's fragments using the difference in Program Date Time from the last level.
- * @param details - The details of the new level
- * @param lastDetails - The details of the last loaded level
- */
-
-function alignPDT(details, lastDetails) {
- if (lastDetails && lastDetails.fragments.length) {
- if (!details.hasProgramDateTime || !lastDetails.hasProgramDateTime) {
- return;
- } // if last level sliding is 1000 and its first frag PROGRAM-DATE-TIME is 2017-08-20 1:10:00 AM
- // and if new details first frag PROGRAM DATE-TIME is 2017-08-20 1:10:08 AM
- // then we can deduce that playlist B sliding is 1000+8 = 1008s
-
-
- var lastPDT = lastDetails.fragments[0].programDateTime;
- var newPDT = details.fragments[0].programDateTime; // date diff is in ms. frag.start is in seconds
-
- var sliding = (newPDT - lastPDT) / 1000 + lastDetails.fragments[0].start;
-
- if (Object(number_isFinite["isFiniteNumber"])(sliding)) {
- logger["logger"].log("adjusting PTS using programDateTime delta, sliding:" + sliding.toFixed(3));
- adjustPts(sliding, details);
- }
- }
-}
-// CONCATENATED MODULE: ./src/controller/fragment-finders.ts
-
-
-
-/**
- * Returns first fragment whose endPdt value exceeds the given PDT.
- * @param {Array} fragments - The array of candidate fragments
- * @param {number|null} [PDTValue = null] - The PDT value which must be exceeded
- * @param {number} [maxFragLookUpTolerance = 0] - The amount of time that a fragment's start/end can be within in order to be considered contiguous
- * @returns {*|null} fragment - The best matching fragment
- */
-function findFragmentByPDT(fragments, PDTValue, maxFragLookUpTolerance) {
- if (PDTValue === null || !Array.isArray(fragments) || !fragments.length || !Object(number_isFinite["isFiniteNumber"])(PDTValue)) {
- return null;
- } // if less than start
-
-
- var startPDT = fragments[0].programDateTime;
-
- if (PDTValue < (startPDT || 0)) {
- return null;
- }
-
- var endPDT = fragments[fragments.length - 1].endProgramDateTime;
-
- if (PDTValue >= (endPDT || 0)) {
- return null;
- }
-
- maxFragLookUpTolerance = maxFragLookUpTolerance || 0;
-
- for (var seg = 0; seg < fragments.length; ++seg) {
- var frag = fragments[seg];
-
- if (pdtWithinToleranceTest(PDTValue, maxFragLookUpTolerance, frag)) {
- return frag;
- }
- }
-
- return null;
-}
-/**
- * Finds a fragment based on the SN of the previous fragment; or based on the needs of the current buffer.
- * This method compensates for small buffer gaps by applying a tolerance to the start of any candidate fragment, thus
- * breaking any traps which would cause the same fragment to be continuously selected within a small range.
- * @param {*} fragPrevious - The last frag successfully appended
- * @param {Array} fragments - The array of candidate fragments
- * @param {number} [bufferEnd = 0] - The end of the contiguous buffered range the playhead is currently within
- * @param {number} maxFragLookUpTolerance - The amount of time that a fragment's start/end can be within in order to be considered contiguous
- * @returns {*} foundFrag - The best matching fragment
- */
-
-function findFragmentByPTS(fragPrevious, fragments, bufferEnd, maxFragLookUpTolerance) {
- if (bufferEnd === void 0) {
- bufferEnd = 0;
- }
-
- if (maxFragLookUpTolerance === void 0) {
- maxFragLookUpTolerance = 0;
- }
-
- var fragNext = fragPrevious ? fragments[fragPrevious.sn - fragments[0].sn + 1] : null; // Prefer the next fragment if it's within tolerance
-
- if (fragNext && !fragment_finders_fragmentWithinToleranceTest(bufferEnd, maxFragLookUpTolerance, fragNext)) {
- return fragNext;
- }
-
- return binary_search.search(fragments, fragment_finders_fragmentWithinToleranceTest.bind(null, bufferEnd, maxFragLookUpTolerance));
-}
-/**
- * The test function used by the findFragmentBySn's BinarySearch to look for the best match to the current buffer conditions.
- * @param {*} candidate - The fragment to test
- * @param {number} [bufferEnd = 0] - The end of the current buffered range the playhead is currently within
- * @param {number} [maxFragLookUpTolerance = 0] - The amount of time that a fragment's start can be within in order to be considered contiguous
- * @returns {number} - 0 if it matches, 1 if too low, -1 if too high
- */
-
-function fragment_finders_fragmentWithinToleranceTest(bufferEnd, maxFragLookUpTolerance, candidate) {
- if (bufferEnd === void 0) {
- bufferEnd = 0;
- }
-
- if (maxFragLookUpTolerance === void 0) {
- maxFragLookUpTolerance = 0;
- }
-
- // offset should be within fragment boundary - config.maxFragLookUpTolerance
- // this is to cope with situations like
- // bufferEnd = 9.991
- // frag[Ø] : [0,10]
- // frag[1] : [10,20]
- // bufferEnd is within frag[0] range ... although what we are expecting is to return frag[1] here
- // frag start frag start+duration
- // |-----------------------------|
- // <---> <--->
- // ...--------><-----------------------------><---------....
- // previous frag matching fragment next frag
- // return -1 return 0 return 1
- // logger.log(`level/sn/start/end/bufEnd:${level}/${candidate.sn}/${candidate.start}/${(candidate.start+candidate.duration)}/${bufferEnd}`);
- // Set the lookup tolerance to be small enough to detect the current segment - ensures we don't skip over very small segments
- var candidateLookupTolerance = Math.min(maxFragLookUpTolerance, candidate.duration + (candidate.deltaPTS ? candidate.deltaPTS : 0));
-
- if (candidate.start + candidate.duration - candidateLookupTolerance <= bufferEnd) {
- return 1;
- } else if (candidate.start - candidateLookupTolerance > bufferEnd && candidate.start) {
- // if maxFragLookUpTolerance will have negative value then don't return -1 for first element
- return -1;
- }
-
- return 0;
-}
-/**
- * The test function used by the findFragmentByPdt's BinarySearch to look for the best match to the current buffer conditions.
- * This function tests the candidate's program date time values, as represented in Unix time
- * @param {*} candidate - The fragment to test
- * @param {number} [pdtBufferEnd = 0] - The Unix time representing the end of the current buffered range
- * @param {number} [maxFragLookUpTolerance = 0] - The amount of time that a fragment's start can be within in order to be considered contiguous
- * @returns {boolean} True if contiguous, false otherwise
- */
-
-function pdtWithinToleranceTest(pdtBufferEnd, maxFragLookUpTolerance, candidate) {
- var candidateLookupTolerance = Math.min(maxFragLookUpTolerance, candidate.duration + (candidate.deltaPTS ? candidate.deltaPTS : 0)) * 1000; // endProgramDateTime can be null, default to zero
-
- var endProgramDateTime = candidate.endProgramDateTime || 0;
- return endProgramDateTime - candidateLookupTolerance > pdtBufferEnd;
-}
-// CONCATENATED MODULE: ./src/controller/gap-controller.js
-
-
-
-
-var stallDebounceInterval = 1000;
-var jumpThreshold = 0.5; // tolerance needed as some browsers stalls playback before reaching buffered range end
-
-var gap_controller_GapController =
-/*#__PURE__*/
-function () {
- function GapController(config, media, fragmentTracker, hls) {
- this.config = config;
- this.media = media;
- this.fragmentTracker = fragmentTracker;
- this.hls = hls;
- this.stallReported = false;
- }
- /**
- * Checks if the playhead is stuck within a gap, and if so, attempts to free it.
- * A gap is an unbuffered range between two buffered ranges (or the start and the first buffered range).
- * @param lastCurrentTime
- * @param buffered
- */
-
-
- var _proto = GapController.prototype;
-
- _proto.poll = function poll(lastCurrentTime, buffered) {
- var config = this.config,
- media = this.media;
- var currentTime = media.currentTime;
- var tnow = window.performance.now();
-
- if (currentTime !== lastCurrentTime) {
- // The playhead is now moving, but was previously stalled
- if (this.stallReported) {
- logger["logger"].warn("playback not stuck anymore @" + currentTime + ", after " + Math.round(tnow - this.stalled) + "ms");
- this.stallReported = false;
- }
-
- this.stalled = null;
- this.nudgeRetry = 0;
- return;
- }
-
- if (media.ended || !media.buffered.length || media.readyState > 2) {
- return;
- }
-
- if (media.seeking && BufferHelper.isBuffered(media, currentTime)) {
- return;
- } // The playhead isn't moving but it should be
- // Allow some slack time to for small stalls to resolve themselves
-
-
- var stalledDuration = tnow - this.stalled;
- var bufferInfo = BufferHelper.bufferInfo(media, currentTime, config.maxBufferHole);
-
- if (!this.stalled) {
- this.stalled = tnow;
- return;
- } else if (stalledDuration >= stallDebounceInterval) {
- // Report stalling after trying to fix
- this._reportStall(bufferInfo.len);
- }
-
- this._tryFixBufferStall(bufferInfo, stalledDuration);
- }
- /**
- * Detects and attempts to fix known buffer stalling issues.
- * @param bufferInfo - The properties of the current buffer.
- * @param stalledDuration - The amount of time Hls.js has been stalling for.
- * @private
- */
- ;
-
- _proto._tryFixBufferStall = function _tryFixBufferStall(bufferInfo, stalledDuration) {
- var config = this.config,
- fragmentTracker = this.fragmentTracker,
- media = this.media;
- var currentTime = media.currentTime;
- var partial = fragmentTracker.getPartialFragment(currentTime);
-
- if (partial) {
- // Try to skip over the buffer hole caused by a partial fragment
- // This method isn't limited by the size of the gap between buffered ranges
- this._trySkipBufferHole(partial);
- }
-
- if (bufferInfo.len > jumpThreshold && stalledDuration > config.highBufferWatchdogPeriod * 1000) {
- // Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
- // We only try to jump the hole if it's under the configured size
- // Reset stalled so to rearm watchdog timer
- this.stalled = null;
-
- this._tryNudgeBuffer();
- }
- }
- /**
- * Triggers a BUFFER_STALLED_ERROR event, but only once per stall period.
- * @param bufferLen - The playhead distance from the end of the current buffer segment.
- * @private
- */
- ;
-
- _proto._reportStall = function _reportStall(bufferLen) {
- var hls = this.hls,
- media = this.media,
- stallReported = this.stallReported;
-
- if (!stallReported) {
- // Report stalled error once
- this.stallReported = true;
- logger["logger"].warn("Playback stalling at @" + media.currentTime + " due to low buffer");
- hls.trigger(events["default"].ERROR, {
- type: errors["ErrorTypes"].MEDIA_ERROR,
- details: errors["ErrorDetails"].BUFFER_STALLED_ERROR,
- fatal: false,
- buffer: bufferLen
- });
- }
- }
- /**
- * Attempts to fix buffer stalls by jumping over known gaps caused by partial fragments
- * @param partial - The partial fragment found at the current time (where playback is stalling).
- * @private
- */
- ;
-
- _proto._trySkipBufferHole = function _trySkipBufferHole(partial) {
- var hls = this.hls,
- media = this.media;
- var currentTime = media.currentTime;
- var lastEndTime = 0; // Check if currentTime is between unbuffered regions of partial fragments
-
- for (var i = 0; i < media.buffered.length; i++) {
- var startTime = media.buffered.start(i);
-
- if (currentTime >= lastEndTime && currentTime < startTime) {
- media.currentTime = Math.max(startTime, media.currentTime + 0.1);
- logger["logger"].warn("skipping hole, adjusting currentTime from " + currentTime + " to " + media.currentTime);
- this.stalled = null;
- hls.trigger(events["default"].ERROR, {
- type: errors["ErrorTypes"].MEDIA_ERROR,
- details: errors["ErrorDetails"].BUFFER_SEEK_OVER_HOLE,
- fatal: false,
- reason: "fragment loaded with buffer holes, seeking from " + currentTime + " to " + media.currentTime,
- frag: partial
- });
- return;
- }
-
- lastEndTime = media.buffered.end(i);
- }
- }
- /**
- * Attempts to fix buffer stalls by advancing the mediaElement's current time by a small amount.
- * @private
- */
- ;
-
- _proto._tryNudgeBuffer = function _tryNudgeBuffer() {
- var config = this.config,
- hls = this.hls,
- media = this.media;
- var currentTime = media.currentTime;
- var nudgeRetry = (this.nudgeRetry || 0) + 1;
- this.nudgeRetry = nudgeRetry;
-
- if (nudgeRetry < config.nudgeMaxRetry) {
- var targetTime = currentTime + nudgeRetry * config.nudgeOffset;
- logger["logger"].log("adjust currentTime from " + currentTime + " to " + targetTime); // playback stalled in buffered area ... let's nudge currentTime to try to overcome this
-
- media.currentTime = targetTime;
- hls.trigger(events["default"].ERROR, {
- type: errors["ErrorTypes"].MEDIA_ERROR,
- details: errors["ErrorDetails"].BUFFER_NUDGE_ON_STALL,
- fatal: false
- });
- } else {
- logger["logger"].error("still stuck in high buffer @" + currentTime + " after " + config.nudgeMaxRetry + ", raise fatal error");
- hls.trigger(events["default"].ERROR, {
- type: errors["ErrorTypes"].MEDIA_ERROR,
- details: errors["ErrorDetails"].BUFFER_STALLED_ERROR,
- fatal: true
- });
- }
- };
-
- return GapController;
-}();
-
-
-// CONCATENATED MODULE: ./src/task-loop.ts
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function task_loop_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
-
-
-
-/**
- * Sub-class specialization of EventHandler base class.
- *
- * TaskLoop allows to schedule a task function being called (optionnaly repeatedly) on the main loop,
- * scheduled asynchroneously, avoiding recursive calls in the same tick.
- *
- * The task itself is implemented in `doTick`. It can be requested and called for single execution
- * using the `tick` method.
- *
- * It will be assured that the task execution method (`tick`) only gets called once per main loop "tick",
- * no matter how often it gets requested for execution. Execution in further ticks will be scheduled accordingly.
- *
- * If further execution requests have already been scheduled on the next tick, it can be checked with `hasNextTick`,
- * and cancelled with `clearNextTick`.
- *
- * The task can be scheduled as an interval repeatedly with a period as parameter (see `setInterval`, `clearInterval`).
- *
- * Sub-classes need to implement the `doTick` method which will effectively have the task execution routine.
- *
- * Further explanations:
- *
- * The baseclass has a `tick` method that will schedule the doTick call. It may be called synchroneously
- * only for a stack-depth of one. On re-entrant calls, sub-sequent calls are scheduled for next main loop ticks.
- *
- * When the task execution (`tick` method) is called in re-entrant way this is detected and
- * we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further
- * task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo).
- */
-var TaskLoop =
-/*#__PURE__*/
-function (_EventHandler) {
- task_loop_inheritsLoose(TaskLoop, _EventHandler);
-
- function TaskLoop(hls) {
- var _this;
-
- for (var _len = arguments.length, events = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
- events[_key - 1] = arguments[_key];
- }
-
- _this = _EventHandler.call.apply(_EventHandler, [this, hls].concat(events)) || this;
- _this._boundTick = void 0;
- _this._tickTimer = null;
- _this._tickInterval = null;
- _this._tickCallCount = 0;
- _this._boundTick = _this.tick.bind(_assertThisInitialized(_this));
- return _this;
- }
- /**
- * @override
- */
-
-
- var _proto = TaskLoop.prototype;
-
- _proto.onHandlerDestroying = function onHandlerDestroying() {
- // clear all timers before unregistering from event bus
- this.clearNextTick();
- this.clearInterval();
- }
- /**
- * @returns {boolean}
- */
- ;
-
- _proto.hasInterval = function hasInterval() {
- return !!this._tickInterval;
- }
- /**
- * @returns {boolean}
- */
- ;
-
- _proto.hasNextTick = function hasNextTick() {
- return !!this._tickTimer;
- }
- /**
- * @param {number} millis Interval time (ms)
- * @returns {boolean} True when interval has been scheduled, false when already scheduled (no effect)
- */
- ;
-
- _proto.setInterval = function setInterval(millis) {
- if (!this._tickInterval) {
- this._tickInterval = self.setInterval(this._boundTick, millis);
- return true;
- }
-
- return false;
- }
- /**
- * @returns {boolean} True when interval was cleared, false when none was set (no effect)
- */
- ;
-
- _proto.clearInterval = function clearInterval() {
- if (this._tickInterval) {
- self.clearInterval(this._tickInterval);
- this._tickInterval = null;
- return true;
- }
-
- return false;
- }
- /**
- * @returns {boolean} True when timeout was cleared, false when none was set (no effect)
- */
- ;
-
- _proto.clearNextTick = function clearNextTick() {
- if (this._tickTimer) {
- self.clearTimeout(this._tickTimer);
- this._tickTimer = null;
- return true;
- }
-
- return false;
- }
- /**
- * Will call the subclass doTick implementation in this main loop tick
- * or in the next one (via setTimeout(,0)) in case it has already been called
- * in this tick (in case this is a re-entrant call).
- */
- ;
-
- _proto.tick = function tick() {
- this._tickCallCount++;
-
- if (this._tickCallCount === 1) {
- this.doTick(); // re-entrant call to tick from previous doTick call stack
- // -> schedule a call on the next main loop iteration to process this task processing request
-
- if (this._tickCallCount > 1) {
- // make sure only one timer exists at any time at max
- this.clearNextTick();
- this._tickTimer = self.setTimeout(this._boundTick, 0);
- }
-
- this._tickCallCount = 0;
- }
- }
- /**
- * For subclass to implement task logic
- * @abstract
- */
- ;
-
- _proto.doTick = function doTick() {};
-
- return TaskLoop;
-}(event_handler);
-
-
-// CONCATENATED MODULE: ./src/controller/base-stream-controller.js
-
-
-function base_stream_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
-
-
-
-
-
-var State = {
- STOPPED: 'STOPPED',
- STARTING: 'STARTING',
- IDLE: 'IDLE',
- PAUSED: 'PAUSED',
- KEY_LOADING: 'KEY_LOADING',
- FRAG_LOADING: 'FRAG_LOADING',
- FRAG_LOADING_WAITING_RETRY: 'FRAG_LOADING_WAITING_RETRY',
- WAITING_TRACK: 'WAITING_TRACK',
- PARSING: 'PARSING',
- PARSED: 'PARSED',
- BUFFER_FLUSHING: 'BUFFER_FLUSHING',
- ENDED: 'ENDED',
- ERROR: 'ERROR',
- WAITING_INIT_PTS: 'WAITING_INIT_PTS',
- WAITING_LEVEL: 'WAITING_LEVEL'
-};
-
-var base_stream_controller_BaseStreamController =
-/*#__PURE__*/
-function (_TaskLoop) {
- base_stream_controller_inheritsLoose(BaseStreamController, _TaskLoop);
-
- function BaseStreamController() {
- return _TaskLoop.apply(this, arguments) || this;
- }
-
- var _proto = BaseStreamController.prototype;
-
- _proto.doTick = function doTick() {};
-
- _proto.startLoad = function startLoad() {};
-
- _proto.stopLoad = function stopLoad() {
- var frag = this.fragCurrent;
-
- if (frag) {
- if (frag.loader) {
- frag.loader.abort();
- }
-
- this.fragmentTracker.removeFragment(frag);
- }
-
- if (this.demuxer) {
- this.demuxer.destroy();
- this.demuxer = null;
- }
-
- this.fragCurrent = null;
- this.fragPrevious = null;
- this.clearInterval();
- this.clearNextTick();
- this.state = State.STOPPED;
- };
-
- _proto._streamEnded = function _streamEnded(bufferInfo, levelDetails) {
- var fragCurrent = this.fragCurrent,
- fragmentTracker = this.fragmentTracker; // we just got done loading the final fragment and there is no other buffered range after ...
- // rationale is that in case there are any buffered ranges after, it means that there are unbuffered portion in between
- // so we should not switch to ENDED in that case, to be able to buffer them
- // dont switch to ENDED if we need to backtrack last fragment
-
- if (!levelDetails.live && fragCurrent && !fragCurrent.backtracked && fragCurrent.sn === levelDetails.endSN && !bufferInfo.nextStart) {
- var fragState = fragmentTracker.getState(fragCurrent);
- return fragState === FragmentState.PARTIAL || fragState === FragmentState.OK;
- }
-
- return false;
- };
-
- _proto.onMediaSeeking = function onMediaSeeking() {
- var config = this.config,
- media = this.media,
- mediaBuffer = this.mediaBuffer,
- state = this.state;
- var currentTime = media ? media.currentTime : null;
- var bufferInfo = BufferHelper.bufferInfo(mediaBuffer || media, currentTime, this.config.maxBufferHole);
-
- if (Object(number_isFinite["isFiniteNumber"])(currentTime)) {
- logger["logger"].log("media seeking to " + currentTime.toFixed(3));
- }
-
- if (state === State.FRAG_LOADING) {
- var fragCurrent = this.fragCurrent; // check if we are seeking to a unbuffered area AND if frag loading is in progress
-
- if (bufferInfo.len === 0 && fragCurrent) {
- var tolerance = config.maxFragLookUpTolerance;
- var fragStartOffset = fragCurrent.start - tolerance;
- var fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance; // check if we seek position will be out of currently loaded frag range : if out cancel frag load, if in, don't do anything
-
- if (currentTime < fragStartOffset || currentTime > fragEndOffset) {
- if (fragCurrent.loader) {
- logger["logger"].log('seeking outside of buffer while fragment load in progress, cancel fragment load');
- fragCurrent.loader.abort();
- }
-
- this.fragCurrent = null;
- this.fragPrevious = null; // switch to IDLE state to load new fragment
-
- this.state = State.IDLE;
- } else {
- logger["logger"].log('seeking outside of buffer but within currently loaded fragment range');
- }
- }
- } else if (state === State.ENDED) {
- // if seeking to unbuffered area, clean up fragPrevious
- if (bufferInfo.len === 0) {
- this.fragPrevious = null;
- this.fragCurrent = null;
- } // switch to IDLE state to check for potential new fragment
-
-
- this.state = State.IDLE;
- }
-
- if (media) {
- this.lastCurrentTime = currentTime;
- } // in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
-
-
- if (!this.loadedmetadata) {
- this.nextLoadPosition = this.startPosition = currentTime;
- } // tick to speed up processing
-
-
- this.tick();
- };
-
- _proto.onMediaEnded = function onMediaEnded() {
- // reset startPosition and lastCurrentTime to restart playback @ stream beginning
- this.startPosition = this.lastCurrentTime = 0;
- };
-
- _proto.onHandlerDestroying = function onHandlerDestroying() {
- this.stopLoad();
-
- _TaskLoop.prototype.onHandlerDestroying.call(this);
- };
-
- _proto.onHandlerDestroyed = function onHandlerDestroyed() {
- this.state = State.STOPPED;
- this.fragmentTracker = null;
- };
-
- _proto.computeLivePosition = function computeLivePosition(sliding, levelDetails) {
- var targetLatency = this.config.liveSyncDuration !== undefined ? this.config.liveSyncDuration : this.config.liveSyncDurationCount * levelDetails.targetduration;
- return sliding + Math.max(0, levelDetails.totalduration - targetLatency);
- };
-
- return BaseStreamController;
-}(TaskLoop);
-
-
-// CONCATENATED MODULE: ./src/controller/stream-controller.js
-
-
-
-
-
-
-function stream_controller_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function stream_controller_createClass(Constructor, protoProps, staticProps) { if (protoProps) stream_controller_defineProperties(Constructor.prototype, protoProps); if (staticProps) stream_controller_defineProperties(Constructor, staticProps); return Constructor; }
-
-function stream_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
-
-/*
- * Stream Controller
-*/
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-var TICK_INTERVAL = 100; // how often to tick in ms
-
-var stream_controller_StreamController =
-/*#__PURE__*/
-function (_BaseStreamController) {
- stream_controller_inheritsLoose(StreamController, _BaseStreamController);
-
- function StreamController(hls, fragmentTracker) {
- var _this;
-
- _this = _BaseStreamController.call(this, hls, events["default"].MEDIA_ATTACHED, events["default"].MEDIA_DETACHING, events["default"].MANIFEST_LOADING, events["default"].MANIFEST_PARSED, events["default"].LEVEL_LOADED, events["default"].KEY_LOADED, events["default"].FRAG_LOADED, events["default"].FRAG_LOAD_EMERGENCY_ABORTED, events["default"].FRAG_PARSING_INIT_SEGMENT, events["default"].FRAG_PARSING_DATA, events["default"].FRAG_PARSED, events["default"].ERROR, events["default"].AUDIO_TRACK_SWITCHING, events["default"].AUDIO_TRACK_SWITCHED, events["default"].BUFFER_CREATED, events["default"].BUFFER_APPENDED, events["default"].BUFFER_FLUSHED) || this;
- _this.fragmentTracker = fragmentTracker;
- _this.config = hls.config;
- _this.audioCodecSwap = false;
- _this._state = State.STOPPED;
- _this.stallReported = false;
- _this.gapController = null;
- return _this;
- }
-
- var _proto = StreamController.prototype;
-
- _proto.startLoad = function startLoad(startPosition) {
- if (this.levels) {
- var lastCurrentTime = this.lastCurrentTime,
- hls = this.hls;
- this.stopLoad();
- this.setInterval(TICK_INTERVAL);
- this.level = -1;
- this.fragLoadError = 0;
-
- if (!this.startFragRequested) {
- // determine load level
- var startLevel = hls.startLevel;
-
- if (startLevel === -1) {
- // -1 : guess start Level by doing a bitrate test by loading first fragment of lowest quality level
- startLevel = 0;
- this.bitrateTest = true;
- } // set new level to playlist loader : this will trigger start level load
- // hls.nextLoadLevel remains until it is set to a new value or until a new frag is successfully loaded
-
-
- this.level = hls.nextLoadLevel = startLevel;
- this.loadedmetadata = false;
- } // if startPosition undefined but lastCurrentTime set, set startPosition to last currentTime
-
-
- if (lastCurrentTime > 0 && startPosition === -1) {
- logger["logger"].log("override startPosition with lastCurrentTime @" + lastCurrentTime.toFixed(3));
- startPosition = lastCurrentTime;
- }
-
- this.state = State.IDLE;
- this.nextLoadPosition = this.startPosition = this.lastCurrentTime = startPosition;
- this.tick();
- } else {
- this.forceStartLoad = true;
- this.state = State.STOPPED;
- }
- };
-
- _proto.stopLoad = function stopLoad() {
- this.forceStartLoad = false;
-
- _BaseStreamController.prototype.stopLoad.call(this);
- };
-
- _proto.doTick = function doTick() {
- switch (this.state) {
- case State.BUFFER_FLUSHING:
- // in buffer flushing state, reset fragLoadError counter
- this.fragLoadError = 0;
- break;
-
- case State.IDLE:
- this._doTickIdle();
-
- break;
-
- case State.WAITING_LEVEL:
- var level = this.levels[this.level]; // check if playlist is already loaded
-
- if (level && level.details) {
- this.state = State.IDLE;
- }
-
- break;
-
- case State.FRAG_LOADING_WAITING_RETRY:
- var now = window.performance.now();
- var retryDate = this.retryDate; // if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
-
- if (!retryDate || now >= retryDate || this.media && this.media.seeking) {
- logger["logger"].log('mediaController: retryDate reached, switch back to IDLE state');
- this.state = State.IDLE;
- }
-
- break;
-
- case State.ERROR:
- case State.STOPPED:
- case State.FRAG_LOADING:
- case State.PARSING:
- case State.PARSED:
- case State.ENDED:
- break;
-
- default:
- break;
- } // check buffer
-
-
- this._checkBuffer(); // check/update current fragment
-
-
- this._checkFragmentChanged();
- } // Ironically the "idle" state is the on we do the most logic in it seems ....
- // NOTE: Maybe we could rather schedule a check for buffer length after half of the currently
- // played segment, or on pause/play/seek instead of naively checking every 100ms?
- ;
-
- _proto._doTickIdle = function _doTickIdle() {
- var hls = this.hls,
- config = hls.config,
- media = this.media; // if start level not parsed yet OR
- // if video not attached AND start fragment already requested OR start frag prefetch disable
- // exit loop, as we either need more info (level not parsed) or we need media to be attached to load new fragment
-
- if (this.levelLastLoaded === undefined || !media && (this.startFragRequested || !config.startFragPrefetch)) {
- return;
- } // if we have not yet loaded any fragment, start loading from start position
-
-
- var pos;
-
- if (this.loadedmetadata) {
- pos = media.currentTime;
- } else {
- pos = this.nextLoadPosition;
- } // determine next load level
-
-
- var level = hls.nextLoadLevel,
- levelInfo = this.levels[level];
-
- if (!levelInfo) {
- return;
- }
-
- var levelBitrate = levelInfo.bitrate,
- maxBufLen; // compute max Buffer Length that we could get from this load level, based on level bitrate.
-
- if (levelBitrate) {
- maxBufLen = Math.max(8 * config.maxBufferSize / levelBitrate, config.maxBufferLength);
- } else {
- maxBufLen = config.maxBufferLength;
- }
-
- maxBufLen = Math.min(maxBufLen, config.maxMaxBufferLength); // determine next candidate fragment to be loaded, based on current position and end of buffer position
- // ensure up to `config.maxMaxBufferLength` of buffer upfront
-
- var bufferInfo = BufferHelper.bufferInfo(this.mediaBuffer ? this.mediaBuffer : media, pos, config.maxBufferHole),
- bufferLen = bufferInfo.len; // Stay idle if we are still with buffer margins
-
- if (bufferLen >= maxBufLen) {
- return;
- } // if buffer length is less than maxBufLen try to load a new fragment ...
-
-
- logger["logger"].trace("buffer length of " + bufferLen.toFixed(3) + " is below max of " + maxBufLen.toFixed(3) + ". checking for more payload ..."); // set next load level : this will trigger a playlist load if needed
-
- this.level = hls.nextLoadLevel = level;
- var levelDetails = levelInfo.details; // if level info not retrieved yet, switch state and wait for level retrieval
- // if live playlist, ensure that new playlist has been refreshed to avoid loading/try to load
- // a useless and outdated fragment (that might even introduce load error if it is already out of the live playlist)
-
- if (!levelDetails || levelDetails.live && this.levelLastLoaded !== level) {
- this.state = State.WAITING_LEVEL;
- return;
- }
-
- if (this._streamEnded(bufferInfo, levelDetails)) {
- var data = {};
-
- if (this.altAudio) {
- data.type = 'video';
- }
-
- this.hls.trigger(events["default"].BUFFER_EOS, data);
- this.state = State.ENDED;
- return;
- } // if we have the levelDetails for the selected variant, lets continue enrichen our stream (load keys/fragments or trigger EOS, etc..)
-
-
- this._fetchPayloadOrEos(pos, bufferInfo, levelDetails);
- };
-
- _proto._fetchPayloadOrEos = function _fetchPayloadOrEos(pos, bufferInfo, levelDetails) {
- var fragPrevious = this.fragPrevious,
- level = this.level,
- fragments = levelDetails.fragments,
- fragLen = fragments.length; // empty playlist
-
- if (fragLen === 0) {
- return;
- } // find fragment index, contiguous with end of buffer position
-
-
- var start = fragments[0].start,
- end = fragments[fragLen - 1].start + fragments[fragLen - 1].duration,
- bufferEnd = bufferInfo.end,
- frag;
-
- if (levelDetails.initSegment && !levelDetails.initSegment.data) {
- frag = levelDetails.initSegment;
- } else {
- // in case of live playlist we need to ensure that requested position is not located before playlist start
- if (levelDetails.live) {
- var initialLiveManifestSize = this.config.initialLiveManifestSize;
-
- if (fragLen < initialLiveManifestSize) {
- logger["logger"].warn("Can not start playback of a level, reason: not enough fragments " + fragLen + " < " + initialLiveManifestSize);
- return;
- }
-
- frag = this._ensureFragmentAtLivePoint(levelDetails, bufferEnd, start, end, fragPrevious, fragments, fragLen); // if it explicitely returns null don't load any fragment and exit function now
-
- if (frag === null) {
- return;
- }
- } else {
- // VoD playlist: if bufferEnd before start of playlist, load first fragment
- if (bufferEnd < start) {
- frag = fragments[0];
- }
- }
- }
-
- if (!frag) {
- frag = this._findFragment(start, fragPrevious, fragLen, fragments, bufferEnd, end, levelDetails);
- }
-
- if (frag) {
- if (frag.encrypted) {
- logger["logger"].log("Loading key for " + frag.sn + " of [" + levelDetails.startSN + " ," + levelDetails.endSN + "],level " + level);
-
- this._loadKey(frag);
- } else {
- logger["logger"].log("Loading " + frag.sn + " of [" + levelDetails.startSN + " ," + levelDetails.endSN + "],level " + level + ", currentTime:" + pos.toFixed(3) + ",bufferEnd:" + bufferEnd.toFixed(3));
-
- this._loadFragment(frag);
- }
- }
- };
-
- _proto._ensureFragmentAtLivePoint = function _ensureFragmentAtLivePoint(levelDetails, bufferEnd, start, end, fragPrevious, fragments, fragLen) {
- var config = this.hls.config,
- media = this.media;
- var frag; // check if requested position is within seekable boundaries :
- // logger.log(`start/pos/bufEnd/seeking:${start.toFixed(3)}/${pos.toFixed(3)}/${bufferEnd.toFixed(3)}/${this.media.seeking}`);
-
- var maxLatency = config.liveMaxLatencyDuration !== undefined ? config.liveMaxLatencyDuration : config.liveMaxLatencyDurationCount * levelDetails.targetduration;
-
- if (bufferEnd < Math.max(start - config.maxFragLookUpTolerance, end - maxLatency)) {
- var liveSyncPosition = this.liveSyncPosition = this.computeLivePosition(start, levelDetails);
- logger["logger"].log("buffer end: " + bufferEnd.toFixed(3) + " is located too far from the end of live sliding playlist, reset currentTime to : " + liveSyncPosition.toFixed(3));
- bufferEnd = liveSyncPosition;
-
- if (media && media.readyState && media.duration > liveSyncPosition) {
- media.currentTime = liveSyncPosition;
- }
-
- this.nextLoadPosition = liveSyncPosition;
- } // if end of buffer greater than live edge, don't load any fragment
- // this could happen if live playlist intermittently slides in the past.
- // level 1 loaded [182580161,182580167]
- // level 1 loaded [182580162,182580169]
- // Loading 182580168 of [182580162 ,182580169],level 1 ..
- // Loading 182580169 of [182580162 ,182580169],level 1 ..
- // level 1 loaded [182580162,182580168] <============= here we should have bufferEnd > end. in that case break to avoid reloading 182580168
- // level 1 loaded [182580164,182580171]
- //
- // don't return null in case media not loaded yet (readystate === 0)
-
-
- if (levelDetails.PTSKnown && bufferEnd > end && media && media.readyState) {
- return null;
- }
-
- if (this.startFragRequested && !levelDetails.PTSKnown) {
- /* we are switching level on live playlist, but we don't have any PTS info for that quality level ...
- try to load frag matching with next SN.
- even if SN are not synchronized between playlists, loading this frag will help us
- compute playlist sliding and find the right one after in case it was not the right consecutive one */
- if (fragPrevious) {
- if (levelDetails.hasProgramDateTime) {
- // Relies on PDT in order to switch bitrates (Support EXT-X-DISCONTINUITY without EXT-X-DISCONTINUITY-SEQUENCE)
- logger["logger"].log("live playlist, switching playlist, load frag with same PDT: " + fragPrevious.programDateTime);
- frag = findFragmentByPDT(fragments, fragPrevious.endProgramDateTime, config.maxFragLookUpTolerance);
- } else {
- // Uses buffer and sequence number to calculate switch segment (required if using EXT-X-DISCONTINUITY-SEQUENCE)
- var targetSN = fragPrevious.sn + 1;
-
- if (targetSN >= levelDetails.startSN && targetSN <= levelDetails.endSN) {
- var fragNext = fragments[targetSN - levelDetails.startSN];
-
- if (fragPrevious.cc === fragNext.cc) {
- frag = fragNext;
- logger["logger"].log("live playlist, switching playlist, load frag with next SN: " + frag.sn);
- }
- } // next frag SN not available (or not with same continuity counter)
- // look for a frag sharing the same CC
-
-
- if (!frag) {
- frag = binary_search.search(fragments, function (frag) {
- return fragPrevious.cc - frag.cc;
- });
-
- if (frag) {
- logger["logger"].log("live playlist, switching playlist, load frag with same CC: " + frag.sn);
- }
- }
- }
- }
-
- if (!frag) {
- /* we have no idea about which fragment should be loaded.
- so let's load mid fragment. it will help computing playlist sliding and find the right one
- */
- frag = fragments[Math.min(fragLen - 1, Math.round(fragLen / 2))];
- logger["logger"].log("live playlist, switching playlist, unknown, load middle frag : " + frag.sn);
- }
- }
-
- return frag;
- };
-
- _proto._findFragment = function _findFragment(start, fragPrevious, fragLen, fragments, bufferEnd, end, levelDetails) {
- var config = this.hls.config;
- var frag;
-
- if (bufferEnd < end) {
- var lookupTolerance = bufferEnd > end - config.maxFragLookUpTolerance ? 0 : config.maxFragLookUpTolerance; // Remove the tolerance if it would put the bufferEnd past the actual end of stream
- // Uses buffer and sequence number to calculate switch segment (required if using EXT-X-DISCONTINUITY-SEQUENCE)
-
- frag = findFragmentByPTS(fragPrevious, fragments, bufferEnd, lookupTolerance);
- } else {
- // reach end of playlist
- frag = fragments[fragLen - 1];
- }
-
- if (frag) {
- var curSNIdx = frag.sn - levelDetails.startSN;
- var sameLevel = fragPrevious && frag.level === fragPrevious.level;
- var prevFrag = fragments[curSNIdx - 1];
- var nextFrag = fragments[curSNIdx + 1]; // logger.log('find SN matching with pos:' + bufferEnd + ':' + frag.sn);
-
- if (fragPrevious && frag.sn === fragPrevious.sn) {
- if (sameLevel && !frag.backtracked) {
- if (frag.sn < levelDetails.endSN) {
- var deltaPTS = fragPrevious.deltaPTS; // if there is a significant delta between audio and video, larger than max allowed hole,
- // and if previous remuxed fragment did not start with a keyframe. (fragPrevious.dropped)
- // let's try to load previous fragment again to get last keyframe
- // then we will reload again current fragment (that way we should be able to fill the buffer hole ...)
-
- if (deltaPTS && deltaPTS > config.maxBufferHole && fragPrevious.dropped && curSNIdx) {
- frag = prevFrag;
- logger["logger"].warn('SN just loaded, with large PTS gap between audio and video, maybe frag is not starting with a keyframe ? load previous one to try to overcome this');
- } else {
- frag = nextFrag;
- logger["logger"].log("SN just loaded, load next one: " + frag.sn, frag);
- }
- } else {
- frag = null;
- }
- } else if (frag.backtracked) {
- // Only backtrack a max of 1 consecutive fragment to prevent sliding back too far when little or no frags start with keyframes
- if (nextFrag && nextFrag.backtracked) {
- logger["logger"].warn("Already backtracked from fragment " + nextFrag.sn + ", will not backtrack to fragment " + frag.sn + ". Loading fragment " + nextFrag.sn);
- frag = nextFrag;
- } else {
- // If a fragment has dropped frames and it's in a same level/sequence, load the previous fragment to try and find the keyframe
- // Reset the dropped count now since it won't be reset until we parse the fragment again, which prevents infinite backtracking on the same segment
- logger["logger"].warn('Loaded fragment with dropped frames, backtracking 1 segment to find a keyframe');
- frag.dropped = 0;
-
- if (prevFrag) {
- frag = prevFrag;
- frag.backtracked = true;
- } else if (curSNIdx) {
- // can't backtrack on very first fragment
- frag = null;
- }
- }
- }
- }
- }
-
- return frag;
- };
-
- _proto._loadKey = function _loadKey(frag) {
- this.state = State.KEY_LOADING;
- this.hls.trigger(events["default"].KEY_LOADING, {
- frag: frag
- });
- };
-
- _proto._loadFragment = function _loadFragment(frag) {
- // Check if fragment is not loaded
- var fragState = this.fragmentTracker.getState(frag);
- this.fragCurrent = frag;
-
- if (frag.sn !== 'initSegment') {
- this.startFragRequested = true;
- } // Don't update nextLoadPosition for fragments which are not buffered
-
-
- if (Object(number_isFinite["isFiniteNumber"])(frag.sn) && !frag.bitrateTest) {
- this.nextLoadPosition = frag.start + frag.duration;
- } // Allow backtracked fragments to load
-
-
- if (frag.backtracked || fragState === FragmentState.NOT_LOADED || fragState === FragmentState.PARTIAL) {
- frag.autoLevel = this.hls.autoLevelEnabled;
- frag.bitrateTest = this.bitrateTest;
- this.hls.trigger(events["default"].FRAG_LOADING, {
- frag: frag
- }); // lazy demuxer init, as this could take some time ... do it during frag loading
-
- if (!this.demuxer) {
- this.demuxer = new demux_demuxer(this.hls, 'main');
- }
-
- this.state = State.FRAG_LOADING;
- } else if (fragState === FragmentState.APPENDING) {
- // Lower the buffer size and try again
- if (this._reduceMaxBufferLength(frag.duration)) {
- this.fragmentTracker.removeFragment(frag);
- }
- }
- };
-
- _proto.getBufferedFrag = function getBufferedFrag(position) {
- return this.fragmentTracker.getBufferedFrag(position, PlaylistLevelType.MAIN);
- };
-
- _proto.followingBufferedFrag = function followingBufferedFrag(frag) {
- if (frag) {
- // try to get range of next fragment (500ms after this range)
- return this.getBufferedFrag(frag.endPTS + 0.5);
- }
-
- return null;
- };
-
- _proto._checkFragmentChanged = function _checkFragmentChanged() {
- var fragPlayingCurrent,
- currentTime,
- video = this.media;
-
- if (video && video.readyState && video.seeking === false) {
- currentTime = video.currentTime;
- /* if video element is in seeked state, currentTime can only increase.
- (assuming that playback rate is positive ...)
- As sometimes currentTime jumps back to zero after a
- media decode error, check this, to avoid seeking back to
- wrong position after a media decode error
- */
-
- if (currentTime > this.lastCurrentTime) {
- this.lastCurrentTime = currentTime;
- }
-
- if (BufferHelper.isBuffered(video, currentTime)) {
- fragPlayingCurrent = this.getBufferedFrag(currentTime);
- } else if (BufferHelper.isBuffered(video, currentTime + 0.1)) {
- /* ensure that FRAG_CHANGED event is triggered at startup,
- when first video frame is displayed and playback is paused.
- add a tolerance of 100ms, in case current position is not buffered,
- check if current pos+100ms is buffered and use that buffer range
- for FRAG_CHANGED event reporting */
- fragPlayingCurrent = this.getBufferedFrag(currentTime + 0.1);
- }
-
- if (fragPlayingCurrent) {
- var fragPlaying = fragPlayingCurrent;
-
- if (fragPlaying !== this.fragPlaying) {
- this.hls.trigger(events["default"].FRAG_CHANGED, {
- frag: fragPlaying
- });
- var fragPlayingLevel = fragPlaying.level;
-
- if (!this.fragPlaying || this.fragPlaying.level !== fragPlayingLevel) {
- this.hls.trigger(events["default"].LEVEL_SWITCHED, {
- level: fragPlayingLevel
- });
- }
-
- this.fragPlaying = fragPlaying;
- }
- }
- }
- }
- /*
- on immediate level switch :
- - pause playback if playing
- - cancel any pending load request
- - and trigger a buffer flush
- */
- ;
-
- _proto.immediateLevelSwitch = function immediateLevelSwitch() {
- logger["logger"].log('immediateLevelSwitch');
-
- if (!this.immediateSwitch) {
- this.immediateSwitch = true;
- var media = this.media,
- previouslyPaused;
-
- if (media) {
- previouslyPaused = media.paused;
- media.pause();
- } else {
- // don't restart playback after instant level switch in case media not attached
- previouslyPaused = true;
- }
-
- this.previouslyPaused = previouslyPaused;
- }
-
- var fragCurrent = this.fragCurrent;
-
- if (fragCurrent && fragCurrent.loader) {
- fragCurrent.loader.abort();
- }
-
- this.fragCurrent = null; // flush everything
-
- this.flushMainBuffer(0, Number.POSITIVE_INFINITY);
- }
- /**
- * on immediate level switch end, after new fragment has been buffered:
- * - nudge video decoder by slightly adjusting video currentTime (if currentTime buffered)
- * - resume the playback if needed
- */
- ;
-
- _proto.immediateLevelSwitchEnd = function immediateLevelSwitchEnd() {
- var media = this.media;
-
- if (media && media.buffered.length) {
- this.immediateSwitch = false;
-
- if (BufferHelper.isBuffered(media, media.currentTime)) {
- // only nudge if currentTime is buffered
- media.currentTime -= 0.0001;
- }
-
- if (!this.previouslyPaused) {
- media.play();
- }
- }
- }
- /**
- * try to switch ASAP without breaking video playback:
- * in order to ensure smooth but quick level switching,
- * we need to find the next flushable buffer range
- * we should take into account new segment fetch time
- */
- ;
-
- _proto.nextLevelSwitch = function nextLevelSwitch() {
- var media = this.media; // ensure that media is defined and that metadata are available (to retrieve currentTime)
-
- if (media && media.readyState) {
- var fetchdelay, fragPlayingCurrent, nextBufferedFrag;
- fragPlayingCurrent = this.getBufferedFrag(media.currentTime);
-
- if (fragPlayingCurrent && fragPlayingCurrent.startPTS > 1) {
- // flush buffer preceding current fragment (flush until current fragment start offset)
- // minus 1s to avoid video freezing, that could happen if we flush keyframe of current video ...
- this.flushMainBuffer(0, fragPlayingCurrent.startPTS - 1);
- }
-
- if (!media.paused) {
- // add a safety delay of 1s
- var nextLevelId = this.hls.nextLoadLevel,
- nextLevel = this.levels[nextLevelId],
- fragLastKbps = this.fragLastKbps;
-
- if (fragLastKbps && this.fragCurrent) {
- fetchdelay = this.fragCurrent.duration * nextLevel.bitrate / (1000 * fragLastKbps) + 1;
- } else {
- fetchdelay = 0;
- }
- } else {
- fetchdelay = 0;
- } // logger.log('fetchdelay:'+fetchdelay);
- // find buffer range that will be reached once new fragment will be fetched
-
-
- nextBufferedFrag = this.getBufferedFrag(media.currentTime + fetchdelay);
-
- if (nextBufferedFrag) {
- // we can flush buffer range following this one without stalling playback
- nextBufferedFrag = this.followingBufferedFrag(nextBufferedFrag);
-
- if (nextBufferedFrag) {
- // if we are here, we can also cancel any loading/demuxing in progress, as they are useless
- var fragCurrent = this.fragCurrent;
-
- if (fragCurrent && fragCurrent.loader) {
- fragCurrent.loader.abort();
- }
-
- this.fragCurrent = null; // start flush position is the start PTS of next buffered frag.
- // we use frag.naxStartPTS which is max(audio startPTS, video startPTS).
- // in case there is a small PTS Delta between audio and video, using maxStartPTS avoids flushing last samples from current fragment
-
- this.flushMainBuffer(nextBufferedFrag.maxStartPTS, Number.POSITIVE_INFINITY);
- }
- }
- }
- };
-
- _proto.flushMainBuffer = function flushMainBuffer(startOffset, endOffset) {
- this.state = State.BUFFER_FLUSHING;
- var flushScope = {
- startOffset: startOffset,
- endOffset: endOffset
- }; // if alternate audio tracks are used, only flush video, otherwise flush everything
-
- if (this.altAudio) {
- flushScope.type = 'video';
- }
-
- this.hls.trigger(events["default"].BUFFER_FLUSHING, flushScope);
- };
-
- _proto.onMediaAttached = function onMediaAttached(data) {
- var media = this.media = this.mediaBuffer = data.media;
- this.onvseeking = this.onMediaSeeking.bind(this);
- this.onvseeked = this.onMediaSeeked.bind(this);
- this.onvended = this.onMediaEnded.bind(this);
- media.addEventListener('seeking', this.onvseeking);
- media.addEventListener('seeked', this.onvseeked);
- media.addEventListener('ended', this.onvended);
- var config = this.config;
-
- if (this.levels && config.autoStartLoad) {
- this.hls.startLoad(config.startPosition);
+ this.hls.trigger(events["default"].MANIFEST_PARSED, {
+ levels: levels,
+ audioTracks: audioTracks,
+ firstLevel: this._firstLevel,
+ stats: data.stats,
+ audio: audioCodecFound,
+ video: videoCodecFound,
+ altAudio: audioTracks.some(function (t) {
+ return !!t.url;
+ })
+ });
+ } else {
+ this.hls.trigger(events["default"].ERROR, {
+ type: errors["ErrorTypes"].MEDIA_ERROR,
+ details: errors["ErrorDetails"].MANIFEST_INCOMPATIBLE_CODECS_ERROR,
+ fatal: true,
+ url: this.hls.url,
+ reason: 'no level with compatible codecs found in manifest'
+ });
}
-
- this.gapController = new gap_controller_GapController(config, media, this.fragmentTracker, this.hls);
};
- _proto.onMediaDetaching = function onMediaDetaching() {
- var media = this.media;
+ _proto.setLevelInternal = function setLevelInternal(newLevel) {
+ var levels = this._levels;
+ var hls = this.hls; // check if level idx is valid
- if (media && media.ended) {
- logger["logger"].log('MSE detaching and video ended, reset startPosition');
- this.startPosition = this.lastCurrentTime = 0;
- } // reset fragment backtracked flag
+ if (newLevel >= 0 && newLevel < levels.length) {
+ // stopping live reloading timer if any
+ this.clearTimer();
+ if (this.currentLevelIndex !== newLevel) {
+ logger["logger"].log("switching to level " + newLevel);
+ this.currentLevelIndex = newLevel;
+ var levelProperties = levels[newLevel];
+ levelProperties.level = newLevel;
+ hls.trigger(events["default"].LEVEL_SWITCHING, levelProperties);
+ }
- var levels = this.levels;
+ var level = levels[newLevel];
+ var levelDetails = level.details; // check if we need to load playlist for this level
- if (levels) {
- levels.forEach(function (level) {
- if (level.details) {
- level.details.fragments.forEach(function (fragment) {
- fragment.backtracked = undefined;
- });
- }
+ if (!levelDetails || levelDetails.live) {
+ // level not retrieved yet, or live playlist we need to (re)load it
+ var urlId = level.urlId;
+ hls.trigger(events["default"].LEVEL_LOADING, {
+ url: level.url[urlId],
+ level: newLevel,
+ id: urlId
+ });
+ }
+ } else {
+ // invalid level id given, trigger error
+ hls.trigger(events["default"].ERROR, {
+ type: errors["ErrorTypes"].OTHER_ERROR,
+ details: errors["ErrorDetails"].LEVEL_SWITCH_ERROR,
+ level: newLevel,
+ fatal: false,
+ reason: 'invalid level idx'
});
- } // remove video listeners
-
-
- if (media) {
- media.removeEventListener('seeking', this.onvseeking);
- media.removeEventListener('seeked', this.onvseeked);
- media.removeEventListener('ended', this.onvended);
- this.onvseeking = this.onvseeked = this.onvended = null;
}
-
- this.media = this.mediaBuffer = null;
- this.loadedmetadata = false;
- this.stopLoad();
};
- _proto.onMediaSeeked = function onMediaSeeked() {
- var media = this.media,
- currentTime = media ? media.currentTime : undefined;
-
- if (Object(number_isFinite["isFiniteNumber"])(currentTime)) {
- logger["logger"].log("media seeked to " + currentTime.toFixed(3));
- } // tick to speed up FRAGMENT_PLAYING triggering
-
-
- this.tick();
- };
+ _proto.onError = function onError(data) {
+ if (data.fatal) {
+ if (data.type === errors["ErrorTypes"].NETWORK_ERROR) {
+ this.clearTimer();
+ }
- _proto.onManifestLoading = function onManifestLoading() {
- // reset buffer on manifest loading
- logger["logger"].log('trigger BUFFER_RESET');
- this.hls.trigger(events["default"].BUFFER_RESET);
- this.fragmentTracker.removeAllFragments();
- this.stalled = false;
- this.startPosition = this.lastCurrentTime = 0;
- };
+ return;
+ }
- _proto.onManifestParsed = function onManifestParsed(data) {
- var aac = false,
- heaac = false,
- codec;
- data.levels.forEach(function (level) {
- // detect if we have different kind of audio codecs used amongst playlists
- codec = level.audioCodec;
+ var levelError = false,
+ fragmentError = false;
+ var levelIndex; // try to recover not fatal errors
- if (codec) {
- if (codec.indexOf('mp4a.40.2') !== -1) {
- aac = true;
- }
+ switch (data.details) {
+ case errors["ErrorDetails"].FRAG_LOAD_ERROR:
+ case errors["ErrorDetails"].FRAG_LOAD_TIMEOUT:
+ case errors["ErrorDetails"].KEY_LOAD_ERROR:
+ case errors["ErrorDetails"].KEY_LOAD_TIMEOUT:
+ levelIndex = data.frag.level;
+ fragmentError = true;
+ break;
- if (codec.indexOf('mp4a.40.5') !== -1) {
- heaac = true;
- }
- }
- });
- this.audioCodecSwitch = aac && heaac;
+ case errors["ErrorDetails"].LEVEL_LOAD_ERROR:
+ case errors["ErrorDetails"].LEVEL_LOAD_TIMEOUT:
+ levelIndex = data.context.level;
+ levelError = true;
+ break;
- if (this.audioCodecSwitch) {
- logger["logger"].log('both AAC/HE-AAC audio found in levels; declaring level codec as HE-AAC');
+ case errors["ErrorDetails"].REMUX_ALLOC_ERROR:
+ levelIndex = data.level;
+ levelError = true;
+ break;
}
- this.levels = data.levels;
- this.startFragRequested = false;
- var config = this.config;
-
- if (config.autoStartLoad || this.forceStartLoad) {
- this.hls.startLoad(config.startPosition);
+ if (levelIndex !== undefined) {
+ this.recoverLevel(data, levelIndex, levelError, fragmentError);
}
- };
+ }
+ /**
+ * Switch to a redundant stream if any available.
+ * If redundant stream is not available, emergency switch down if ABR mode is enabled.
+ *
+ * @param {Object} errorEvent
+ * @param {Number} levelIndex current level index
+ * @param {Boolean} levelError
+ * @param {Boolean} fragmentError
+ */
+ // FIXME Find a better abstraction where fragment/level retry management is well decoupled
+ ;
- _proto.onLevelLoaded = function onLevelLoaded(data) {
- var newDetails = data.details;
- var newLevelId = data.level;
- var lastLevel = this.levels[this.levelLastLoaded];
- var curLevel = this.levels[newLevelId];
- var duration = newDetails.totalduration;
- var sliding = 0;
- logger["logger"].log("level " + newLevelId + " loaded [" + newDetails.startSN + "," + newDetails.endSN + "],duration:" + duration);
+ _proto.recoverLevel = function recoverLevel(errorEvent, levelIndex, levelError, fragmentError) {
+ var _this2 = this;
- if (newDetails.live) {
- var curDetails = curLevel.details;
+ var config = this.hls.config;
+ var errorDetails = errorEvent.details;
+ var level = this._levels[levelIndex];
+ var redundantLevels, delay, nextLevel;
+ level.loadError++;
+ level.fragmentError = fragmentError;
- if (curDetails && newDetails.fragments.length > 0) {
- // we already have details for that level, merge them
- mergeDetails(curDetails, newDetails);
- sliding = newDetails.fragments[0].start;
- this.liveSyncPosition = this.computeLivePosition(sliding, curDetails);
+ if (levelError) {
+ if (this.levelRetryCount + 1 <= config.levelLoadingMaxRetry) {
+ // exponential backoff capped to max retry timeout
+ delay = Math.min(Math.pow(2, this.levelRetryCount) * config.levelLoadingRetryDelay, config.levelLoadingMaxRetryTimeout); // Schedule level reload
- if (newDetails.PTSKnown && Object(number_isFinite["isFiniteNumber"])(sliding)) {
- logger["logger"].log("live playlist sliding:" + sliding.toFixed(3));
- } else {
- logger["logger"].log('live playlist - outdated PTS, unknown sliding');
- alignStream(this.fragPrevious, lastLevel, newDetails);
- }
+ this.timer = setTimeout(function () {
+ return _this2.loadLevel();
+ }, delay); // boolean used to inform stream controller not to switch back to IDLE on non fatal error
+
+ errorEvent.levelRetry = true;
+ this.levelRetryCount++;
+ logger["logger"].warn("level controller, " + errorDetails + ", retry in " + delay + " ms, current retry count is " + this.levelRetryCount);
} else {
- logger["logger"].log('live playlist - first load, unknown sliding');
- newDetails.PTSKnown = false;
- alignStream(this.fragPrevious, lastLevel, newDetails);
- }
- } else {
- newDetails.PTSKnown = false;
- } // override level info
+ logger["logger"].error("level controller, cannot recover from " + errorDetails + " error");
+ this.currentLevelIndex = null; // stopping live reloading timer if any
+ this.clearTimer(); // switch error to fatal
- curLevel.details = newDetails;
- this.levelLastLoaded = newLevelId;
- this.hls.trigger(events["default"].LEVEL_UPDATED, {
- details: newDetails,
- level: newLevelId
- });
+ errorEvent.fatal = true;
+ return;
+ }
+ } // Try any redundant streams if available for both errors: level and fragment
+ // If level.loadError reaches redundantLevels it means that we tried them all, no hope => let's switch down
- if (this.startFragRequested === false) {
- // compute start position if set to -1. use it straight away if value is defined
- if (this.startPosition === -1 || this.lastCurrentTime === -1) {
- // first, check if start time offset has been set in playlist, if yes, use this value
- var startTimeOffset = newDetails.startTimeOffset;
- if (Object(number_isFinite["isFiniteNumber"])(startTimeOffset)) {
- if (startTimeOffset < 0) {
- logger["logger"].log("negative start time offset " + startTimeOffset + ", count from end of last fragment");
- startTimeOffset = sliding + duration + startTimeOffset;
- }
+ if (levelError || fragmentError) {
+ redundantLevels = level.url.length;
- logger["logger"].log("start time offset found in playlist, adjust startPosition to " + startTimeOffset);
- this.startPosition = startTimeOffset;
- } else {
- // if live playlist, set start position to be fragment N-this.config.liveSyncDurationCount (usually 3)
- if (newDetails.live) {
- this.startPosition = this.computeLivePosition(sliding, newDetails);
- logger["logger"].log("configure startPosition to " + this.startPosition);
- } else {
- this.startPosition = 0;
- }
+ if (redundantLevels > 1 && level.loadError < redundantLevels) {
+ level.urlId = (level.urlId + 1) % redundantLevels;
+ level.details = undefined;
+ logger["logger"].warn("level controller, " + errorDetails + " for level " + levelIndex + ": switching to redundant URL-id " + level.urlId); // console.log('Current audio track group ID:', this.hls.audioTracks[this.hls.audioTrack].groupId);
+ // console.log('New video quality level audio group id:', level.attrs.AUDIO);
+ } else {
+ // Search for available level
+ if (this.manualLevelIndex === -1) {
+ // When lowest level has been reached, let's start hunt from the top
+ nextLevel = levelIndex === 0 ? this._levels.length - 1 : levelIndex - 1;
+ logger["logger"].warn("level controller, " + errorDetails + ": switch to " + nextLevel);
+ this.hls.nextAutoLevel = this.currentLevelIndex = nextLevel;
+ } else if (fragmentError) {
+ // Allow fragment retry as long as configuration allows.
+ // reset this._level so that another call to set level() will trigger again a frag load
+ logger["logger"].warn("level controller, " + errorDetails + ": reload a fragment");
+ this.currentLevelIndex = null;
}
-
- this.lastCurrentTime = this.startPosition;
}
+ }
+ } // reset errors on the successful load of a fragment
+ ;
- this.nextLoadPosition = this.startPosition;
- } // only switch batck to IDLE state if we were waiting for level to start downloading a new fragment
-
-
- if (this.state === State.WAITING_LEVEL) {
- this.state = State.IDLE;
- } // trigger handler right now
-
+ _proto.onFragLoaded = function onFragLoaded(_ref3) {
+ var frag = _ref3.frag;
- this.tick();
- };
+ if (frag !== undefined && frag.type === 'main') {
+ var level = this._levels[frag.level];
- _proto.onKeyLoaded = function onKeyLoaded() {
- if (this.state === State.KEY_LOADING) {
- this.state = State.IDLE;
- this.tick();
+ if (level !== undefined) {
+ level.fragmentError = false;
+ level.loadError = 0;
+ this.levelRetryCount = 0;
+ }
}
};
- _proto.onFragLoaded = function onFragLoaded(data) {
- var fragCurrent = this.fragCurrent,
- hls = this.hls,
- levels = this.levels,
- media = this.media;
- var fragLoaded = data.frag;
-
- if (this.state === State.FRAG_LOADING && fragCurrent && fragLoaded.type === 'main' && fragLoaded.level === fragCurrent.level && fragLoaded.sn === fragCurrent.sn) {
- var stats = data.stats;
- var currentLevel = levels[fragCurrent.level];
- var details = currentLevel.details; // reset frag bitrate test in any case after frag loaded event
- // if this frag was loaded to perform a bitrate test AND if hls.nextLoadLevel is greater than 0
- // then this means that we should be able to load a fragment at a higher quality level
-
- this.bitrateTest = false;
- this.stats = stats;
- logger["logger"].log("Loaded " + fragCurrent.sn + " of [" + details.startSN + " ," + details.endSN + "],level " + fragCurrent.level);
-
- if (fragLoaded.bitrateTest && hls.nextLoadLevel) {
- // switch back to IDLE state ... we just loaded a fragment to determine adequate start bitrate and initialize autoswitch algo
- this.state = State.IDLE;
- this.startFragRequested = false;
- stats.tparsed = stats.tbuffered = window.performance.now();
- hls.trigger(events["default"].FRAG_BUFFERED, {
- stats: stats,
- frag: fragCurrent,
- id: 'main'
- });
- this.tick();
- } else if (fragLoaded.sn === 'initSegment') {
- this.state = State.IDLE;
- stats.tparsed = stats.tbuffered = window.performance.now();
- details.initSegment.data = data.payload;
- hls.trigger(events["default"].FRAG_BUFFERED, {
- stats: stats,
- frag: fragCurrent,
- id: 'main'
- });
- this.tick();
- } else {
- logger["logger"].log("Parsing " + fragCurrent.sn + " of [" + details.startSN + " ," + details.endSN + "],level " + fragCurrent.level + ", cc " + fragCurrent.cc);
- this.state = State.PARSING;
- this.pendingBuffering = true;
- this.appended = false; // Bitrate test frags are not usually buffered so the fragment tracker ignores them. If Hls.js decides to buffer
- // it (and therefore ends up at this line), then the fragment tracker needs to be manually informed.
-
- if (fragLoaded.bitrateTest) {
- fragLoaded.bitrateTest = false;
- this.fragmentTracker.onFragLoaded({
- frag: fragLoaded
- });
- } // time Offset is accurate if level PTS is known, or if playlist is not sliding (not live) and if media is not seeking (this is to overcome potential timestamp drifts between playlists and fragments)
+ _proto.onLevelLoaded = function onLevelLoaded(data) {
+ var _this3 = this;
+ var level = data.level,
+ details = data.details; // only process level loaded events matching with expected level
- var accurateTimeOffset = !(media && media.seeking) && (details.PTSKnown || !details.live);
- var initSegmentData = details.initSegment ? details.initSegment.data : [];
+ if (level !== this.currentLevelIndex) {
+ return;
+ }
- var audioCodec = this._getAudioCodec(currentLevel); // transmux the MPEG-TS data to ISO-BMFF segments
+ var curLevel = this._levels[level]; // reset level load error counter on successful level loaded only if there is no issues with fragments
+ if (!curLevel.fragmentError) {
+ curLevel.loadError = 0;
+ this.levelRetryCount = 0;
+ } // if current playlist is a live playlist, arm a timer to reload it
- var demuxer = this.demuxer = this.demuxer || new demux_demuxer(this.hls, 'main');
- demuxer.push(data.payload, initSegmentData, audioCodec, currentLevel.videoCodec, fragCurrent, details.totalduration, accurateTimeOffset);
- }
- }
- this.fragLoadError = 0;
+ if (details.live) {
+ var reloadInterval = computeReloadInterval(curLevel.details, details, data.stats.trequest);
+ logger["logger"].log("live playlist, reload in " + Math.round(reloadInterval) + " ms");
+ this.timer = setTimeout(function () {
+ return _this3.loadLevel();
+ }, reloadInterval);
+ } else {
+ this.clearTimer();
+ }
};
- _proto.onFragParsingInitSegment = function onFragParsingInitSegment(data) {
- var fragCurrent = this.fragCurrent;
- var fragNew = data.frag;
+ _proto.onAudioTrackSwitched = function onAudioTrackSwitched(data) {
+ var audioGroupId = this.hls.audioTracks[data.id].groupId;
+ var currentLevel = this.hls.levels[this.currentLevelIndex];
- if (fragCurrent && data.id === 'main' && fragNew.sn === fragCurrent.sn && fragNew.level === fragCurrent.level && this.state === State.PARSING) {
- var tracks = data.tracks,
- trackName,
- track; // if audio track is expected to come from audio stream controller, discard any coming from main
+ if (!currentLevel) {
+ return;
+ }
- if (tracks.audio && this.altAudio) {
- delete tracks.audio;
- } // include levelCodec in audio and video tracks
+ if (currentLevel.audioGroupIds) {
+ var urlId = -1;
+ for (var i = 0; i < currentLevel.audioGroupIds.length; i++) {
+ if (currentLevel.audioGroupIds[i] === audioGroupId) {
+ urlId = i;
+ break;
+ }
+ }
- track = tracks.audio;
+ if (urlId !== currentLevel.urlId) {
+ currentLevel.urlId = urlId;
+ this.startLoad();
+ }
+ }
+ };
- if (track) {
- var audioCodec = this.levels[this.level].audioCodec,
- ua = navigator.userAgent.toLowerCase();
+ _proto.loadLevel = function loadLevel() {
+ logger["logger"].debug('call to loadLevel');
- if (audioCodec && this.audioCodecSwap) {
- logger["logger"].log('swapping playlist audio codec');
+ if (this.currentLevelIndex !== null && this.canload) {
+ var levelObject = this._levels[this.currentLevelIndex];
- if (audioCodec.indexOf('mp4a.40.5') !== -1) {
- audioCodec = 'mp4a.40.2';
- } else {
- audioCodec = 'mp4a.40.5';
- }
- } // in case AAC and HE-AAC audio codecs are signalled in manifest
- // force HE-AAC , as it seems that most browsers prefers that way,
- // except for mono streams OR on FF
- // these conditions might need to be reviewed ...
+ if (typeof levelObject === 'object' && levelObject.url.length > 0) {
+ var level = this.currentLevelIndex;
+ var id = levelObject.urlId;
+ var url = levelObject.url[id];
+ logger["logger"].log("Attempt loading level index " + level + " with URL-id " + id); // console.log('Current audio track group ID:', this.hls.audioTracks[this.hls.audioTrack].groupId);
+ // console.log('New video quality level audio group id:', levelObject.attrs.AUDIO, level);
+ this.hls.trigger(events["default"].LEVEL_LOADING, {
+ url: url,
+ level: level,
+ id: id
+ });
+ }
+ }
+ };
- if (this.audioCodecSwitch) {
- // don't force HE-AAC if mono stream
- if (track.metadata.channelCount !== 1 && // don't force HE-AAC if firefox
- ua.indexOf('firefox') === -1) {
- audioCodec = 'mp4a.40.5';
- }
- } // HE-AAC is broken on Android, always signal audio codec as AAC even if variant manifest states otherwise
+ level_controller_createClass(LevelController, [{
+ key: "levels",
+ get: function get() {
+ return this._levels;
+ }
+ }, {
+ key: "level",
+ get: function get() {
+ return this.currentLevelIndex;
+ },
+ set: function set(newLevel) {
+ var levels = this._levels;
+ if (levels) {
+ newLevel = Math.min(newLevel, levels.length - 1);
- if (ua.indexOf('android') !== -1 && track.container !== 'audio/mpeg') {
- // Exclude mpeg audio
- audioCodec = 'mp4a.40.2';
- logger["logger"].log("Android: force audio codec to " + audioCodec);
+ if (this.currentLevelIndex !== newLevel || !levels[newLevel].details) {
+ this.setLevelInternal(newLevel);
}
-
- track.levelCodec = audioCodec;
- track.id = data.id;
}
+ }
+ }, {
+ key: "manualLevel",
+ get: function get() {
+ return this.manualLevelIndex;
+ },
+ set: function set(newLevel) {
+ this.manualLevelIndex = newLevel;
- track = tracks.video;
+ if (this._startLevel === undefined) {
+ this._startLevel = newLevel;
+ }
- if (track) {
- track.levelCodec = this.levels[this.level].videoCodec;
- track.id = data.id;
+ if (newLevel !== -1) {
+ this.level = newLevel;
}
+ }
+ }, {
+ key: "firstLevel",
+ get: function get() {
+ return this._firstLevel;
+ },
+ set: function set(newLevel) {
+ this._firstLevel = newLevel;
+ }
+ }, {
+ key: "startLevel",
+ get: function get() {
+ // hls.startLevel takes precedence over config.startLevel
+ // if none of these values are defined, fallback on this._firstLevel (first quality level appearing in variant manifest)
+ if (this._startLevel === undefined) {
+ var configStartLevel = this.hls.config.startLevel;
- this.hls.trigger(events["default"].BUFFER_CODECS, tracks); // loop through tracks that are going to be provided to bufferController
+ if (configStartLevel !== undefined) {
+ return configStartLevel;
+ } else {
+ return this._firstLevel;
+ }
+ } else {
+ return this._startLevel;
+ }
+ },
+ set: function set(newLevel) {
+ this._startLevel = newLevel;
+ }
+ }, {
+ key: "nextLoadLevel",
+ get: function get() {
+ if (this.manualLevelIndex !== -1) {
+ return this.manualLevelIndex;
+ } else {
+ return this.hls.nextAutoLevel;
+ }
+ },
+ set: function set(nextLevel) {
+ this.level = nextLevel;
- for (trackName in tracks) {
- track = tracks[trackName];
- logger["logger"].log("main track:" + trackName + ",container:" + track.container + ",codecs[level/parsed]=[" + track.levelCodec + "/" + track.codec + "]");
- var initSegment = track.initSegment;
+ if (this.manualLevelIndex === -1) {
+ this.hls.nextAutoLevel = nextLevel;
+ }
+ }
+ }]);
- if (initSegment) {
- this.appended = true; // arm pending Buffering flag before appending a segment
+ return LevelController;
+}(event_handler);
- this.pendingBuffering = true;
- this.hls.trigger(events["default"].BUFFER_APPENDING, {
- type: trackName,
- data: initSegment,
- parent: 'main',
- content: 'initSegment'
- });
- }
- } // trigger handler right now
+// EXTERNAL MODULE: ./src/demux/id3.js
+var id3 = __webpack_require__("./src/demux/id3.js");
- this.tick();
- }
- };
+// CONCATENATED MODULE: ./src/utils/texttrack-utils.ts
+function sendAddTrackEvent(track, videoEl) {
+ var event;
- _proto.onFragParsingData = function onFragParsingData(data) {
- var _this2 = this;
+ try {
+ event = new Event('addtrack');
+ } catch (err) {
+ // for IE11
+ event = document.createEvent('Event');
+ event.initEvent('addtrack', false, false);
+ }
- var fragCurrent = this.fragCurrent;
- var fragNew = data.frag;
+ event.track = track;
+ videoEl.dispatchEvent(event);
+}
+function clearCurrentCues(track) {
+ if (track && track.cues) {
+ while (track.cues.length > 0) {
+ track.removeCue(track.cues[0]);
+ }
+ }
+}
+/**
- if (fragCurrent && data.id === 'main' && fragNew.sn === fragCurrent.sn && fragNew.level === fragCurrent.level && !(data.type === 'audio' && this.altAudio) && // filter out main audio if audio track is loaded through audio stream controller
- this.state === State.PARSING) {
- var level = this.levels[this.level],
- frag = fragCurrent;
+ * Given a list of Cues, finds the closest cue matching the given time.
+ * Modified verison of binary search O(log(n)).
+ *
+ * @export
+ * @param {(TextTrackCueList | TextTrackCue[])} cues - List of cues.
+ * @param {number} time - Target time, to find closest cue to.
+ * @returns {TextTrackCue}
+ */
- if (!Object(number_isFinite["isFiniteNumber"])(data.endPTS)) {
- data.endPTS = data.startPTS + fragCurrent.duration;
- data.endDTS = data.startDTS + fragCurrent.duration;
- }
+function getClosestCue(cues, time) {
+ // If the offset is less than the first element, the first element is the closest.
+ if (time < cues[0].endTime) {
+ return cues[0];
+ } // If the offset is greater than the last cue, the last is the closest.
- if (data.hasAudio === true) {
- frag.addElementaryStream(ElementaryStreamTypes.AUDIO);
- }
- if (data.hasVideo === true) {
- frag.addElementaryStream(ElementaryStreamTypes.VIDEO);
- }
+ if (time > cues[cues.length - 1].endTime) {
+ return cues[cues.length - 1];
+ }
- logger["logger"].log("Parsed " + data.type + ",PTS:[" + data.startPTS.toFixed(3) + "," + data.endPTS.toFixed(3) + "],DTS:[" + data.startDTS.toFixed(3) + "/" + data.endDTS.toFixed(3) + "],nb:" + data.nb + ",dropped:" + (data.dropped || 0)); // Detect gaps in a fragment and try to fix it by finding a keyframe in the previous fragment (see _findFragments)
+ var left = 0;
+ var right = cues.length - 1;
- if (data.type === 'video') {
- frag.dropped = data.dropped;
+ while (left <= right) {
+ var mid = Math.floor((right + left) / 2);
- if (frag.dropped) {
- if (!frag.backtracked) {
- var levelDetails = level.details;
+ if (time < cues[mid].endTime) {
+ right = mid - 1;
+ } else if (time > cues[mid].endTime) {
+ left = mid + 1;
+ } else {
+ // If it's not lower or higher, it must be equal.
+ return cues[mid];
+ }
+ } // At this point, left and right have swapped.
+ // No direct match was found, left or right element must be the closest. Check which one has the smallest diff.
- if (levelDetails && frag.sn === levelDetails.startSN) {
- logger["logger"].warn('missing video frame(s) on first frag, appending with gap', frag.sn);
- } else {
- logger["logger"].warn('missing video frame(s), backtracking fragment', frag.sn); // Return back to the IDLE state without appending to buffer
- // Causes findFragments to backtrack a segment and find the keyframe
- // Audio fragments arriving before video sets the nextLoadPosition, causing _findFragments to skip the backtracked fragment
- this.fragmentTracker.removeFragment(frag);
- frag.backtracked = true;
- this.nextLoadPosition = data.startPTS;
- this.state = State.IDLE;
- this.fragPrevious = frag;
- this.tick();
- return;
- }
- } else {
- logger["logger"].warn('Already backtracked on this fragment, appending with the gap', frag.sn);
- }
- } else {
- // Only reset the backtracked flag if we've loaded the frag without any dropped frames
- frag.backtracked = false;
- }
- }
+ return cues[left].endTime - time < time - cues[right].endTime ? cues[left] : cues[right];
+}
+// CONCATENATED MODULE: ./src/controller/id3-track-controller.js
+function id3_track_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
- var drift = updateFragPTSDTS(level.details, frag, data.startPTS, data.endPTS, data.startDTS, data.endDTS),
- hls = this.hls;
- hls.trigger(events["default"].LEVEL_PTS_UPDATED, {
- details: level.details,
- level: this.level,
- drift: drift,
- type: data.type,
- start: data.startPTS,
- end: data.endPTS
- }); // has remuxer dropped video frames located before first keyframe ?
-
- [data.data1, data.data2].forEach(function (buffer) {
- // only append in PARSING state (rationale is that an appending error could happen synchronously on first segment appending)
- // in that case it is useless to append following segments
- if (buffer && buffer.length && _this2.state === State.PARSING) {
- _this2.appended = true; // arm pending Buffering flag before appending a segment
-
- _this2.pendingBuffering = true;
- hls.trigger(events["default"].BUFFER_APPENDING, {
- type: data.type,
- data: buffer,
- parent: 'main',
- content: 'data'
- });
- }
- }); // trigger handler right now
+/*
+ * id3 metadata track controller
+*/
- this.tick();
- }
- };
- _proto.onFragParsed = function onFragParsed(data) {
- var fragCurrent = this.fragCurrent;
- var fragNew = data.frag;
- if (fragCurrent && data.id === 'main' && fragNew.sn === fragCurrent.sn && fragNew.level === fragCurrent.level && this.state === State.PARSING) {
- this.stats.tparsed = window.performance.now();
- this.state = State.PARSED;
- this._checkAppendedParsed();
- }
- };
- _proto.onAudioTrackSwitching = function onAudioTrackSwitching(data) {
- // if any URL found on new audio track, it is an alternate audio track
- var altAudio = !!data.url,
- trackId = data.id; // if we switch on main audio, ensure that main fragment scheduling is synced with media.buffered
- // don't do anything if we switch to alt audio: audio stream controller is handling it.
- // we will just have to change buffer scheduling on audioTrackSwitched
- if (!altAudio) {
- if (this.mediaBuffer !== this.media) {
- logger["logger"].log('switching on main audio, use media.buffered to schedule main fragment loading');
- this.mediaBuffer = this.media;
- var fragCurrent = this.fragCurrent; // we need to refill audio buffer from main: cancel any frag loading to speed up audio switch
-
- if (fragCurrent.loader) {
- logger["logger"].log('switching to main audio track, cancel main fragment load');
- fragCurrent.loader.abort();
- }
+var id3_track_controller_ID3TrackController =
+/*#__PURE__*/
+function (_EventHandler) {
+ id3_track_controller_inheritsLoose(ID3TrackController, _EventHandler);
- this.fragCurrent = null;
- this.fragPrevious = null; // destroy demuxer to force init segment generation (following audio switch)
+ function ID3TrackController(hls) {
+ var _this;
- if (this.demuxer) {
- this.demuxer.destroy();
- this.demuxer = null;
- } // switch to IDLE state to load new fragment
+ _this = _EventHandler.call(this, hls, events["default"].MEDIA_ATTACHED, events["default"].MEDIA_DETACHING, events["default"].FRAG_PARSING_METADATA, events["default"].LIVE_BACK_BUFFER_REACHED) || this;
+ _this.id3Track = undefined;
+ _this.media = undefined;
+ return _this;
+ }
+ var _proto = ID3TrackController.prototype;
- this.state = State.IDLE;
- }
+ _proto.destroy = function destroy() {
+ event_handler.prototype.destroy.call(this);
+ } // Add ID3 metatadata text track.
+ ;
- var hls = this.hls; // switching to main audio, flush all audio and trigger track switched
+ _proto.onMediaAttached = function onMediaAttached(data) {
+ this.media = data.media;
- hls.trigger(events["default"].BUFFER_FLUSHING, {
- startOffset: 0,
- endOffset: Number.POSITIVE_INFINITY,
- type: 'audio'
- });
- hls.trigger(events["default"].AUDIO_TRACK_SWITCHED, {
- id: trackId
- });
- this.altAudio = false;
- }
+ if (!this.media) {}
};
- _proto.onAudioTrackSwitched = function onAudioTrackSwitched(data) {
- var trackId = data.id,
- altAudio = !!this.hls.audioTracks[trackId].url;
+ _proto.onMediaDetaching = function onMediaDetaching() {
+ clearCurrentCues(this.id3Track);
+ this.id3Track = undefined;
+ this.media = undefined;
+ };
- if (altAudio) {
- var videoBuffer = this.videoBuffer; // if we switched on alternate audio, ensure that main fragment scheduling is synced with video sourcebuffer buffered
+ _proto.getID3Track = function getID3Track(textTracks) {
+ for (var i = 0; i < textTracks.length; i++) {
+ var textTrack = textTracks[i];
- if (videoBuffer && this.mediaBuffer !== videoBuffer) {
- logger["logger"].log('switching on alternate audio, use video.buffered to schedule main fragment loading');
- this.mediaBuffer = videoBuffer;
+ if (textTrack.kind === 'metadata' && textTrack.label === 'id3') {
+ // send 'addtrack' when reusing the textTrack for metadata,
+ // same as what we do for captions
+ sendAddTrackEvent(textTrack, this.media);
+ return textTrack;
}
}
- this.altAudio = altAudio;
- this.tick();
+ return this.media.addTextTrack('metadata', 'id3');
};
- _proto.onBufferCreated = function onBufferCreated(data) {
- var tracks = data.tracks,
- mediaTrack,
- name,
- alternate = false;
-
- for (var type in tracks) {
- var track = tracks[type];
-
- if (track.id === 'main') {
- name = type;
- mediaTrack = track; // keep video source buffer reference
-
- if (type === 'video') {
- this.videoBuffer = tracks[type].buffer;
- }
- } else {
- alternate = true;
- }
- }
+ _proto.onLiveBackBufferReached = function onLiveBackBufferReached(_ref) {
+ var bufferEnd = _ref.bufferEnd;
- if (alternate && mediaTrack) {
- logger["logger"].log("alternate track found, use " + name + ".buffered to schedule main fragment loading");
- this.mediaBuffer = mediaTrack.buffer;
- } else {
- this.mediaBuffer = this.media;
+ if (!this.id3Track || !this.id3Track.cues || !this.id3Track.cues.length) {
+ return;
}
- };
-
- _proto.onBufferAppended = function onBufferAppended(data) {
- if (data.parent === 'main') {
- var state = this.state;
- if (state === State.PARSING || state === State.PARSED) {
- // check if all buffers have been appended
- this.pendingBuffering = data.pending > 0;
+ var foundCue = getClosestCue(this.id3Track.cues, bufferEnd);
- this._checkAppendedParsed();
- }
+ if (!foundCue) {
+ return;
}
- };
- _proto._checkAppendedParsed = function _checkAppendedParsed() {
- // trigger handler right now
- if (this.state === State.PARSED && (!this.appended || !this.pendingBuffering)) {
- var frag = this.fragCurrent;
+ var removeCues = true;
- if (frag) {
- var media = this.mediaBuffer ? this.mediaBuffer : this.media;
- logger["logger"].log("main buffered : " + time_ranges.toString(media.buffered));
- this.fragPrevious = frag;
- var stats = this.stats;
- stats.tbuffered = window.performance.now(); // we should get rid of this.fragLastKbps
+ while (removeCues) {
+ var cue = this.id3Track.cues[0];
- this.fragLastKbps = Math.round(8 * stats.total / (stats.tbuffered - stats.tfirst));
- this.hls.trigger(events["default"].FRAG_BUFFERED, {
- stats: stats,
- frag: frag,
- id: 'main'
- });
- this.state = State.IDLE;
+ if (!this.id3Track.cues.length || cue.id === foundCue.id) {
+ removeCues = false;
+ return;
}
- this.tick();
+ this.id3Track.removeCue(cue);
}
};
- _proto.onError = function onError(data) {
- var frag = data.frag || this.fragCurrent; // don't handle frag error not related to main fragment
-
- if (frag && frag.type !== 'main') {
- return;
- } // 0.5 : tolerance needed as some browsers stalls playback before reaching buffered end
-
+ _proto.onFragParsingMetadata = function onFragParsingMetadata(data) {
+ var fragment = data.frag;
+ var samples = data.samples; // create track dynamically
- var mediaBuffered = !!this.media && BufferHelper.isBuffered(this.media, this.media.currentTime) && BufferHelper.isBuffered(this.media, this.media.currentTime + 0.5);
+ if (!this.id3Track) {
+ this.id3Track = this.getID3Track(this.media.textTracks);
+ this.id3Track.mode = 'hidden';
+ } // Attempt to recreate Safari functionality by creating
+ // WebKitDataCue objects when available and store the decoded
+ // ID3 data in the value property of the cue
- switch (data.details) {
- case errors["ErrorDetails"].FRAG_LOAD_ERROR:
- case errors["ErrorDetails"].FRAG_LOAD_TIMEOUT:
- case errors["ErrorDetails"].KEY_LOAD_ERROR:
- case errors["ErrorDetails"].KEY_LOAD_TIMEOUT:
- if (!data.fatal) {
- // keep retrying until the limit will be reached
- if (this.fragLoadError + 1 <= this.config.fragLoadingMaxRetry) {
- // exponential backoff capped to config.fragLoadingMaxRetryTimeout
- var delay = Math.min(Math.pow(2, this.fragLoadError) * this.config.fragLoadingRetryDelay, this.config.fragLoadingMaxRetryTimeout);
- logger["logger"].warn("mediaController: frag loading failed, retry in " + delay + " ms");
- this.retryDate = window.performance.now() + delay; // retry loading state
- // if loadedmetadata is not set, it means that we are emergency switch down on first frag
- // in that case, reset startFragRequested flag
-
- if (!this.loadedmetadata) {
- this.startFragRequested = false;
- this.nextLoadPosition = this.startPosition;
- }
- this.fragLoadError++;
- this.state = State.FRAG_LOADING_WAITING_RETRY;
- } else {
- logger["logger"].error("mediaController: " + data.details + " reaches max retry, redispatch as fatal ..."); // switch error to fatal
+ var Cue = window.WebKitDataCue || window.VTTCue || window.TextTrackCue;
- data.fatal = true;
- this.state = State.ERROR;
- }
- }
+ for (var i = 0; i < samples.length; i++) {
+ var frames = id3["default"].getID3Frames(samples[i].data);
- break;
+ if (frames) {
+ var startTime = samples[i].pts;
+ var endTime = i < samples.length - 1 ? samples[i + 1].pts : fragment.endPTS;
- case errors["ErrorDetails"].LEVEL_LOAD_ERROR:
- case errors["ErrorDetails"].LEVEL_LOAD_TIMEOUT:
- if (this.state !== State.ERROR) {
- if (data.fatal) {
- // if fatal error, stop processing
- this.state = State.ERROR;
- logger["logger"].warn("streamController: " + data.details + ",switch to " + this.state + " state ...");
- } else {
- // in case of non fatal error while loading level, if level controller is not retrying to load level , switch back to IDLE
- if (!data.levelRetry && this.state === State.WAITING_LEVEL) {
- this.state = State.IDLE;
- }
- }
+ if (startTime === endTime) {
+ // Give a slight bump to the endTime if it's equal to startTime to avoid a SyntaxError in IE
+ endTime += 0.0001;
+ } else if (startTime > endTime) {
+ logger["logger"].warn('detected an id3 sample with endTime < startTime, adjusting endTime to (startTime + 0.25)');
+ endTime = startTime + 0.25;
}
- break;
-
- case errors["ErrorDetails"].BUFFER_FULL_ERROR:
- // if in appending state
- if (data.parent === 'main' && (this.state === State.PARSING || this.state === State.PARSED)) {
- // reduce max buf len if current position is buffered
- if (mediaBuffered) {
- this._reduceMaxBufferLength(this.config.maxBufferLength);
-
- this.state = State.IDLE;
- } else {
- // current position is not buffered, but browser is still complaining about buffer full error
- // this happens on IE/Edge, refer to https://github.com/video-dev/hls.js/pull/708
- // in that case flush the whole buffer to recover
- logger["logger"].warn('buffer full error also media.currentTime is not buffered, flush everything');
- this.fragCurrent = null; // flush everything
+ for (var j = 0; j < frames.length; j++) {
+ var frame = frames[j]; // Safari doesn't put the timestamp frame in the TextTrack
- this.flushMainBuffer(0, Number.POSITIVE_INFINITY);
+ if (!id3["default"].isTimeStampFrame(frame)) {
+ var cue = new Cue(startTime, endTime, '');
+ cue.value = frame;
+ this.id3Track.addCue(cue);
}
}
-
- break;
-
- default:
- break;
+ }
}
};
- _proto._reduceMaxBufferLength = function _reduceMaxBufferLength(minLength) {
- var config = this.config;
+ _proto.onLiveBackBufferReached = function onLiveBackBufferReached(_ref2) {
+ var bufferEnd = _ref2.bufferEnd;
+ var id3Track = this.id3Track;
- if (config.maxMaxBufferLength >= minLength) {
- // reduce max buffer length as it might be too high. we do this to avoid loop flushing ...
- config.maxMaxBufferLength /= 2;
- logger["logger"].warn("main:reduce max buffer length to " + config.maxMaxBufferLength + "s");
- return true;
+ if (!id3Track || !id3Track.cues || !id3Track.cues.length) {
+ return;
}
- return false;
- }
- /**
- * Checks the health of the buffer and attempts to resolve playback stalls.
- * @private
- */
- ;
-
- _proto._checkBuffer = function _checkBuffer() {
- var media = this.media;
+ var foundCue = getClosestCue(id3Track.cues, bufferEnd);
- if (!media || media.readyState === 0) {
- // Exit early if we don't have media or if the media hasn't bufferd anything yet (readyState 0)
+ if (!foundCue) {
return;
}
- var mediaBuffer = this.mediaBuffer ? this.mediaBuffer : media;
- var buffered = mediaBuffer.buffered;
-
- if (!this.loadedmetadata && buffered.length) {
- this.loadedmetadata = true;
-
- this._seekToStartPos();
- } else if (this.immediateSwitch) {
- this.immediateLevelSwitchEnd();
- } else {
- this.gapController.poll(this.lastCurrentTime, buffered);
+ while (id3Track.cues[0] !== foundCue) {
+ id3Track.removeCue(id3Track.cues[0]);
}
};
- _proto.onFragLoadEmergencyAborted = function onFragLoadEmergencyAborted() {
- this.state = State.IDLE; // if loadedmetadata is not set, it means that we are emergency switch down on first frag
- // in that case, reset startFragRequested flag
-
- if (!this.loadedmetadata) {
- this.startFragRequested = false;
- this.nextLoadPosition = this.startPosition;
- }
-
- this.tick();
- };
+ return ID3TrackController;
+}(event_handler);
- _proto.onBufferFlushed = function onBufferFlushed() {
- /* after successful buffer flushing, filter flushed fragments from bufferedFrags
- use mediaBuffered instead of media (so that we will check against video.buffered ranges in case of alt audio track)
- */
- var media = this.mediaBuffer ? this.mediaBuffer : this.media;
+/* harmony default export */ var id3_track_controller = (id3_track_controller_ID3TrackController);
+// CONCATENATED MODULE: ./src/utils/mediasource-helper.ts
+/**
+ * MediaSource helper
+ */
+function getMediaSource() {
+ return window.MediaSource || window.WebKitMediaSource;
+}
+// CONCATENATED MODULE: ./src/is-supported.ts
- if (media) {
- // filter fragments potentially evicted from buffer. this is to avoid memleak on live streams
- this.fragmentTracker.detectEvictedFragments(ElementaryStreamTypes.VIDEO, media.buffered);
- } // move to IDLE once flush complete. this should trigger new fragment loading
+function is_supported_isSupported() {
+ var mediaSource = getMediaSource();
+ if (!mediaSource) {
+ return false;
+ }
- this.state = State.IDLE; // reset reference to frag
+ var sourceBuffer = self.SourceBuffer || self.WebKitSourceBuffer;
+ var isTypeSupported = mediaSource && typeof mediaSource.isTypeSupported === 'function' && mediaSource.isTypeSupported('video/mp4; codecs="avc1.42E01E,mp4a.40.2"'); // if SourceBuffer is exposed ensure its API is valid
+ // safari and old version of Chrome doe not expose SourceBuffer globally so checking SourceBuffer.prototype is impossible
- this.fragPrevious = null;
- };
+ var sourceBufferValidAPI = !sourceBuffer || sourceBuffer.prototype && typeof sourceBuffer.prototype.appendBuffer === 'function' && typeof sourceBuffer.prototype.remove === 'function';
+ return !!isTypeSupported && !!sourceBufferValidAPI;
+}
+// CONCATENATED MODULE: ./src/utils/buffer-helper.ts
+/**
+ * @module BufferHelper
+ *
+ * Providing methods dealing with buffer length retrieval for example.
+ *
+ * In general, a helper around HTML5 MediaElement TimeRanges gathered from `buffered` property.
+ *
+ * Also @see https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/buffered
+*/
+var BufferHelper =
+/*#__PURE__*/
+function () {
+ function BufferHelper() {}
- _proto.swapAudioCodec = function swapAudioCodec() {
- this.audioCodecSwap = !this.audioCodecSwap;
- }
/**
- * Seeks to the set startPosition if not equal to the mediaElement's current time.
- * @private
+ * Return true if `media`'s buffered include `position`
+ * @param {Bufferable} media
+ * @param {number} position
+ * @returns {boolean}
*/
- ;
-
- _proto._seekToStartPos = function _seekToStartPos() {
- var media = this.media;
- var currentTime = media.currentTime; // only adjust currentTime if different from startPosition or if startPosition not buffered
- // at that stage, there should be only one buffered range, as we reach that code after first fragment has been buffered
-
- var startPosition = media.seeking ? currentTime : this.startPosition; // if currentTime not matching with expected startPosition or startPosition not buffered but close to first buffered
-
- if (currentTime !== startPosition) {
- // if startPosition not buffered, let's seek to buffered.start(0)
- logger["logger"].log("target start position not buffered, seek to buffered.start(0) " + startPosition + " from current time " + currentTime + " ");
- media.currentTime = startPosition;
- }
- };
-
- _proto._getAudioCodec = function _getAudioCodec(currentLevel) {
- var audioCodec = this.config.defaultAudioCodec || currentLevel.audioCodec;
-
- if (this.audioCodecSwap) {
- logger["logger"].log('swapping playlist audio codec');
+ BufferHelper.isBuffered = function isBuffered(media, position) {
+ try {
+ if (media) {
+ var buffered = media.buffered;
- if (audioCodec) {
- if (audioCodec.indexOf('mp4a.40.5') !== -1) {
- audioCodec = 'mp4a.40.2';
- } else {
- audioCodec = 'mp4a.40.5';
+ for (var i = 0; i < buffered.length; i++) {
+ if (position >= buffered.start(i) && position <= buffered.end(i)) {
+ return true;
+ }
}
}
+ } catch (error) {// this is to catch
+ // InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
+ // This SourceBuffer has been removed from the parent media source
}
- return audioCodec;
- };
-
- stream_controller_createClass(StreamController, [{
- key: "state",
- set: function set(nextState) {
- if (this.state !== nextState) {
- var previousState = this.state;
- this._state = nextState;
- logger["logger"].log("main stream:" + previousState + "->" + nextState);
- this.hls.trigger(events["default"].STREAM_STATE_TRANSITION, {
- previousState: previousState,
- nextState: nextState
- });
- }
- },
- get: function get() {
- return this._state;
- }
- }, {
- key: "currentLevel",
- get: function get() {
- var media = this.media;
-
+ return false;
+ };
+
+ BufferHelper.bufferInfo = function bufferInfo(media, pos, maxHoleDuration) {
+ try {
if (media) {
- var frag = this.getBufferedFrag(media.currentTime);
+ var vbuffered = media.buffered;
+ var buffered = [];
+ var i;
- if (frag) {
- return frag.level;
+ for (i = 0; i < vbuffered.length; i++) {
+ buffered.push({
+ start: vbuffered.start(i),
+ end: vbuffered.end(i)
+ });
}
- }
-
- return -1;
- }
- }, {
- key: "nextBufferedFrag",
- get: function get() {
- var media = this.media;
- if (media) {
- // first get end range of current fragment
- return this.followingBufferedFrag(this.getBufferedFrag(media.currentTime));
- } else {
- return null;
+ return this.bufferedInfo(buffered, pos, maxHoleDuration);
}
+ } catch (error) {// this is to catch
+ // InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
+ // This SourceBuffer has been removed from the parent media source
}
- }, {
- key: "nextLevel",
- get: function get() {
- var frag = this.nextBufferedFrag;
- if (frag) {
- return frag.level;
+ return {
+ len: 0,
+ start: pos,
+ end: pos,
+ nextStart: undefined
+ };
+ };
+
+ BufferHelper.bufferedInfo = function bufferedInfo(buffered, pos, maxHoleDuration) {
+ // sort on buffer.start/smaller end (IE does not always return sorted buffered range)
+ buffered.sort(function (a, b) {
+ var diff = a.start - b.start;
+
+ if (diff) {
+ return diff;
} else {
- return -1;
+ return b.end - a.end;
}
+ });
+ var buffered2 = [];
+
+ if (maxHoleDuration) {
+ // there might be some small holes between buffer time range
+ // consider that holes smaller than maxHoleDuration are irrelevant and build another
+ // buffer time range representations that discards those holes
+ for (var i = 0; i < buffered.length; i++) {
+ var buf2len = buffered2.length;
+
+ if (buf2len) {
+ var buf2end = buffered2[buf2len - 1].end; // if small hole (value between 0 or maxHoleDuration ) or overlapping (negative)
+
+ if (buffered[i].start - buf2end < maxHoleDuration) {
+ // merge overlapping time ranges
+ // update lastRange.end only if smaller than item.end
+ // e.g. [ 1, 15] with [ 2,8] => [ 1,15] (no need to modify lastRange.end)
+ // whereas [ 1, 8] with [ 2,15] => [ 1,15] ( lastRange should switch from [1,8] to [1,15])
+ if (buffered[i].end > buf2end) {
+ buffered2[buf2len - 1].end = buffered[i].end;
+ }
+ } else {
+ // big hole
+ buffered2.push(buffered[i]);
+ }
+ } else {
+ // first value
+ buffered2.push(buffered[i]);
+ }
+ }
+ } else {
+ buffered2 = buffered;
}
- }, {
- key: "liveSyncPosition",
- get: function get() {
- return this._liveSyncPosition;
- },
- set: function set(value) {
- this._liveSyncPosition = value;
- }
- }]);
-
- return StreamController;
-}(base_stream_controller_BaseStreamController);
-
-/* harmony default export */ var stream_controller = (stream_controller_StreamController);
-// CONCATENATED MODULE: ./src/controller/level-controller.js
-function level_controller_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function level_controller_createClass(Constructor, protoProps, staticProps) { if (protoProps) level_controller_defineProperties(Constructor.prototype, protoProps); if (staticProps) level_controller_defineProperties(Constructor, staticProps); return Constructor; }
-
-function level_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
-
-/*
- * Level Controller
-*/
+ var bufferLen = 0; // bufferStartNext can possibly be undefined based on the conditional logic below
+ var bufferStartNext; // bufferStart and bufferEnd are buffer boundaries around current video position
+ var bufferStart = pos;
+ var bufferEnd = pos;
+ for (var _i = 0; _i < buffered2.length; _i++) {
+ var start = buffered2[_i].start,
+ end = buffered2[_i].end; // logger.log('buf start/end:' + buffered.start(i) + '/' + buffered.end(i));
+ if (pos + maxHoleDuration >= start && pos < end) {
+ // play position is inside this buffer TimeRange, retrieve end of buffer position and buffer length
+ bufferStart = start;
+ bufferEnd = end;
+ bufferLen = bufferEnd - pos;
+ } else if (pos + maxHoleDuration < start) {
+ bufferStartNext = start;
+ break;
+ }
+ }
-var level_controller_window = window,
- level_controller_performance = level_controller_window.performance;
-var chromeOrFirefox;
+ return {
+ len: bufferLen,
+ start: bufferStart,
+ end: bufferEnd,
+ nextStart: bufferStartNext
+ };
+ };
-var level_controller_LevelController =
+ return BufferHelper;
+}();
+// CONCATENATED MODULE: ./src/utils/ewma.ts
+/*
+ * compute an Exponential Weighted moving average
+ * - https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
+ * - heavily inspired from shaka-player
+ */
+var EWMA =
/*#__PURE__*/
-function (_EventHandler) {
- level_controller_inheritsLoose(LevelController, _EventHandler);
-
- function LevelController(hls) {
- var _this;
-
- _this = _EventHandler.call(this, hls, events["default"].MANIFEST_LOADED, events["default"].LEVEL_LOADED, events["default"].AUDIO_TRACK_SWITCHED, events["default"].FRAG_LOADED, events["default"].ERROR) || this;
- _this.canload = false;
- _this.currentLevelIndex = null;
- _this.manualLevelIndex = -1;
- _this.timer = null;
- chromeOrFirefox = /chrome|firefox/.test(navigator.userAgent.toLowerCase());
- return _this;
+function () {
+ // About half of the estimated value will be from the last |halfLife| samples by weight.
+ function EWMA(halfLife) {
+ this.alpha_ = void 0;
+ this.estimate_ = void 0;
+ this.totalWeight_ = void 0;
+ // Larger values of alpha expire historical data more slowly.
+ this.alpha_ = halfLife ? Math.exp(Math.log(0.5) / halfLife) : 0;
+ this.estimate_ = 0;
+ this.totalWeight_ = 0;
}
- var _proto = LevelController.prototype;
+ var _proto = EWMA.prototype;
- _proto.onHandlerDestroying = function onHandlerDestroying() {
- this.clearTimer();
- this.manualLevelIndex = -1;
+ _proto.sample = function sample(weight, value) {
+ var adjAlpha = Math.pow(this.alpha_, weight);
+ this.estimate_ = value * (1 - adjAlpha) + adjAlpha * this.estimate_;
+ this.totalWeight_ += weight;
};
- _proto.clearTimer = function clearTimer() {
- if (this.timer !== null) {
- clearTimeout(this.timer);
- this.timer = null;
+ _proto.getTotalWeight = function getTotalWeight() {
+ return this.totalWeight_;
+ };
+
+ _proto.getEstimate = function getEstimate() {
+ if (this.alpha_) {
+ var zeroFactor = 1 - Math.pow(this.alpha_, this.totalWeight_);
+ return this.estimate_ / zeroFactor;
+ } else {
+ return this.estimate_;
}
};
- _proto.startLoad = function startLoad() {
- var levels = this._levels;
- this.canload = true;
- this.levelRetryCount = 0; // clean up live level details to force reload them, and reset load errors
+ return EWMA;
+}();
- if (levels) {
- levels.forEach(function (level) {
- level.loadError = 0;
- var levelDetails = level.details;
+/* harmony default export */ var ewma = (EWMA);
+// CONCATENATED MODULE: ./src/utils/ewma-bandwidth-estimator.ts
+/*
+ * EWMA Bandwidth Estimator
+ * - heavily inspired from shaka-player
+ * Tracks bandwidth samples and estimates available bandwidth.
+ * Based on the minimum of two exponentially-weighted moving averages with
+ * different half-lives.
+ */
- if (levelDetails && levelDetails.live) {
- level.details = undefined;
- }
- });
- } // speed up live playlist refresh if timer exists
+var ewma_bandwidth_estimator_EwmaBandWidthEstimator =
+/*#__PURE__*/
+function () {
+ // TODO(typescript-hls)
+ function EwmaBandWidthEstimator(hls, slow, fast, defaultEstimate) {
+ this.hls = void 0;
+ this.defaultEstimate_ = void 0;
+ this.minWeight_ = void 0;
+ this.minDelayMs_ = void 0;
+ this.slow_ = void 0;
+ this.fast_ = void 0;
+ this.hls = hls;
+ this.defaultEstimate_ = defaultEstimate;
+ this.minWeight_ = 0.001;
+ this.minDelayMs_ = 50;
+ this.slow_ = new ewma(slow);
+ this.fast_ = new ewma(fast);
+ }
+
+ var _proto = EwmaBandWidthEstimator.prototype;
- if (this.timer !== null) {
- this.loadLevel();
- }
+ _proto.sample = function sample(durationMs, numBytes) {
+ durationMs = Math.max(durationMs, this.minDelayMs_);
+ var numBits = 8 * numBytes,
+ // weight is duration in seconds
+ durationS = durationMs / 1000,
+ // value is bandwidth in bits/s
+ bandwidthInBps = numBits / durationS;
+ this.fast_.sample(durationS, bandwidthInBps);
+ this.slow_.sample(durationS, bandwidthInBps);
};
- _proto.stopLoad = function stopLoad() {
- this.canload = false;
+ _proto.canEstimate = function canEstimate() {
+ var fast = this.fast_;
+ return fast && fast.getTotalWeight() >= this.minWeight_;
};
- _proto.onManifestLoaded = function onManifestLoaded(data) {
- var levels = [];
- var audioTracks = [];
- var bitrateStart;
- var levelSet = {};
- var levelFromSet = null;
- var videoCodecFound = false;
- var audioCodecFound = false; // regroup redundant levels together
+ _proto.getEstimate = function getEstimate() {
+ if (this.canEstimate()) {
+ // console.log('slow estimate:'+ Math.round(this.slow_.getEstimate()));
+ // console.log('fast estimate:'+ Math.round(this.fast_.getEstimate()));
+ // Take the minimum of these two estimates. This should have the effect of
+ // adapting down quickly, but up more slowly.
+ return Math.min(this.fast_.getEstimate(), this.slow_.getEstimate());
+ } else {
+ return this.defaultEstimate_;
+ }
+ };
- data.levels.forEach(function (level) {
- var attributes = level.attrs;
- level.loadError = 0;
- level.fragmentError = false;
- videoCodecFound = videoCodecFound || !!level.videoCodec;
- audioCodecFound = audioCodecFound || !!level.audioCodec; // erase audio codec info if browser does not support mp4a.40.34.
- // demuxer will autodetect codec and fallback to mpeg/audio
+ _proto.destroy = function destroy() {};
+
+ return EwmaBandWidthEstimator;
+}();
- if (chromeOrFirefox && level.audioCodec && level.audioCodec.indexOf('mp4a.40.34') !== -1) {
- level.audioCodec = undefined;
- }
+/* harmony default export */ var ewma_bandwidth_estimator = (ewma_bandwidth_estimator_EwmaBandWidthEstimator);
+// CONCATENATED MODULE: ./src/controller/abr-controller.js
- levelFromSet = levelSet[level.bitrate]; // FIXME: we would also have to match the resolution here
- if (!levelFromSet) {
- level.url = [level.url];
- level.urlId = 0;
- levelSet[level.bitrate] = level;
- levels.push(level);
- } else {
- levelFromSet.url.push(level.url);
- }
- if (attributes) {
- if (attributes.AUDIO) {
- audioCodecFound = true;
- addGroupId(levelFromSet || level, 'audio', attributes.AUDIO);
- }
+function abr_controller_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
- if (attributes.SUBTITLES) {
- addGroupId(levelFromSet || level, 'text', attributes.SUBTITLES);
- }
- }
- }); // remove audio-only level if we also have levels with audio+video codecs signalled
+function abr_controller_createClass(Constructor, protoProps, staticProps) { if (protoProps) abr_controller_defineProperties(Constructor.prototype, protoProps); if (staticProps) abr_controller_defineProperties(Constructor, staticProps); return Constructor; }
- if (videoCodecFound && audioCodecFound) {
- levels = levels.filter(function (_ref) {
- var videoCodec = _ref.videoCodec;
- return !!videoCodec;
- });
- } // only keep levels with supported audio/video codecs
+function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+function abr_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
- levels = levels.filter(function (_ref2) {
- var audioCodec = _ref2.audioCodec,
- videoCodec = _ref2.videoCodec;
- return (!audioCodec || isCodecSupportedInMp4(audioCodec, 'audio')) && (!videoCodec || isCodecSupportedInMp4(videoCodec, 'video'));
- });
+/*
+ * simple ABR Controller
+ * - compute next level based on last fragment bw heuristics
+ * - implement an abandon rules triggered if we have less than 2 frag buffered and if computed bw shows that we risk buffer stalling
+ */
- if (data.audioTracks) {
- audioTracks = data.audioTracks.filter(function (track) {
- return !track.audioCodec || isCodecSupportedInMp4(track.audioCodec, 'audio');
- }); // Reassign id's after filtering since they're used as array indices
- audioTracks.forEach(function (track, index) {
- track.id = index;
- });
- }
- if (levels.length > 0) {
- // start bitrate is the first bitrate of the manifest
- bitrateStart = levels[0].bitrate; // sort level on bitrate
- levels.sort(function (a, b) {
- return a.bitrate - b.bitrate;
- });
- this._levels = levels; // find index of first level in sorted levels
- for (var i = 0; i < levels.length; i++) {
- if (levels[i].bitrate === bitrateStart) {
- this._firstLevel = i;
- logger["logger"].log("manifest loaded," + levels.length + " level(s) found, first bitrate:" + bitrateStart);
- break;
- }
- } // Audio is only alternate if manifest include a URI along with the audio group tag
+var abr_controller_window = window,
+ abr_controller_performance = abr_controller_window.performance;
- this.hls.trigger(events["default"].MANIFEST_PARSED, {
- levels: levels,
- audioTracks: audioTracks,
- firstLevel: this._firstLevel,
- stats: data.stats,
- audio: audioCodecFound,
- video: videoCodecFound,
- altAudio: audioTracks.some(function (t) {
- return !!t.url;
- })
- });
- } else {
- this.hls.trigger(events["default"].ERROR, {
- type: errors["ErrorTypes"].MEDIA_ERROR,
- details: errors["ErrorDetails"].MANIFEST_INCOMPATIBLE_CODECS_ERROR,
- fatal: true,
- url: this.hls.url,
- reason: 'no level with compatible codecs found in manifest'
- });
- }
+var abr_controller_AbrController =
+/*#__PURE__*/
+function (_EventHandler) {
+ abr_controller_inheritsLoose(AbrController, _EventHandler);
+
+ function AbrController(hls) {
+ var _this;
+
+ _this = _EventHandler.call(this, hls, events["default"].FRAG_LOADING, events["default"].FRAG_LOADED, events["default"].FRAG_BUFFERED, events["default"].ERROR) || this;
+ _this.lastLoadedFragLevel = 0;
+ _this._nextAutoLevel = -1;
+ _this.hls = hls;
+ _this.timer = null;
+ _this._bwEstimator = null;
+ _this.onCheck = _this._abandonRulesCheck.bind(_assertThisInitialized(_this));
+ return _this;
+ }
+
+ var _proto = AbrController.prototype;
+
+ _proto.destroy = function destroy() {
+ this.clearTimer();
+ event_handler.prototype.destroy.call(this);
};
- _proto.setLevelInternal = function setLevelInternal(newLevel) {
- var levels = this._levels;
- var hls = this.hls; // check if level idx is valid
+ _proto.onFragLoading = function onFragLoading(data) {
+ var frag = data.frag;
- if (newLevel >= 0 && newLevel < levels.length) {
- // stopping live reloading timer if any
- this.clearTimer();
+ if (frag.type === 'main') {
+ if (!this.timer) {
+ this.fragCurrent = frag;
+ this.timer = setInterval(this.onCheck, 100);
+ } // lazy init of BwEstimator, rationale is that we use different params for Live/VoD
+ // so we need to wait for stream manifest / playlist type to instantiate it.
- if (this.currentLevelIndex !== newLevel) {
- logger["logger"].log("switching to level " + newLevel);
- this.currentLevelIndex = newLevel;
- var levelProperties = levels[newLevel];
- levelProperties.level = newLevel;
- hls.trigger(events["default"].LEVEL_SWITCHING, levelProperties);
- }
- var level = levels[newLevel];
- var levelDetails = level.details; // check if we need to load playlist for this level
+ if (!this._bwEstimator) {
+ var hls = this.hls;
+ var config = hls.config;
+ var level = frag.level;
+ var isLive = hls.levels[level].details.live;
+ var ewmaFast;
+ var ewmaSlow;
- if (!levelDetails || levelDetails.live) {
- // level not retrieved yet, or live playlist we need to (re)load it
- var urlId = level.urlId;
- hls.trigger(events["default"].LEVEL_LOADING, {
- url: level.url[urlId],
- level: newLevel,
- id: urlId
- });
+ if (isLive) {
+ ewmaFast = config.abrEwmaFastLive;
+ ewmaSlow = config.abrEwmaSlowLive;
+ } else {
+ ewmaFast = config.abrEwmaFastVoD;
+ ewmaSlow = config.abrEwmaSlowVoD;
+ }
+
+ this._bwEstimator = new ewma_bandwidth_estimator(hls, ewmaSlow, ewmaFast, config.abrEwmaDefaultEstimate);
}
- } else {
- // invalid level id given, trigger error
- hls.trigger(events["default"].ERROR, {
- type: errors["ErrorTypes"].OTHER_ERROR,
- details: errors["ErrorDetails"].LEVEL_SWITCH_ERROR,
- level: newLevel,
- fatal: false,
- reason: 'invalid level idx'
- });
}
};
- _proto.onError = function onError(data) {
- if (data.fatal) {
- if (data.type === errors["ErrorTypes"].NETWORK_ERROR) {
- this.clearTimer();
- }
+ _proto._abandonRulesCheck = function _abandonRulesCheck() {
+ /*
+ monitor fragment retrieval time...
+ we compute expected time of arrival of the complete fragment.
+ we compare it to expected time of buffer starvation
+ */
+ var hls = this.hls;
+ var video = hls.media;
+ var frag = this.fragCurrent;
+ if (!frag) {
return;
}
- var levelError = false,
- fragmentError = false;
- var levelIndex; // try to recover not fatal errors
-
- switch (data.details) {
- case errors["ErrorDetails"].FRAG_LOAD_ERROR:
- case errors["ErrorDetails"].FRAG_LOAD_TIMEOUT:
- case errors["ErrorDetails"].KEY_LOAD_ERROR:
- case errors["ErrorDetails"].KEY_LOAD_TIMEOUT:
- levelIndex = data.frag.level;
- fragmentError = true;
- break;
+ var loader = frag.loader;
+ var minAutoLevel = hls.minAutoLevel; // if loader has been destroyed or loading has been aborted, stop timer and return
- case errors["ErrorDetails"].LEVEL_LOAD_ERROR:
- case errors["ErrorDetails"].LEVEL_LOAD_TIMEOUT:
- levelIndex = data.context.level;
- levelError = true;
- break;
+ if (!loader || loader.stats && loader.stats.aborted) {
+ logger["logger"].warn('frag loader destroy or aborted, disarm abandonRules');
+ this.clearTimer(); // reset forced auto level value so that next level will be selected
- case errors["ErrorDetails"].REMUX_ALLOC_ERROR:
- levelIndex = data.level;
- levelError = true;
- break;
+ this._nextAutoLevel = -1;
+ return;
}
- if (levelIndex !== undefined) {
- this.recoverLevel(data, levelIndex, levelError, fragmentError);
- }
- }
- /**
- * Switch to a redundant stream if any available.
- * If redundant stream is not available, emergency switch down if ABR mode is enabled.
- *
- * @param {Object} errorEvent
- * @param {Number} levelIndex current level index
- * @param {Boolean} levelError
- * @param {Boolean} fragmentError
- */
- // FIXME Find a better abstraction where fragment/level retry management is well decoupled
- ;
+ var stats = loader.stats;
+ /* only monitor frag retrieval time if
+ (video not paused OR first fragment being loaded(ready state === HAVE_NOTHING = 0)) AND autoswitching enabled AND not lowest level (=> means that we have several levels) */
- _proto.recoverLevel = function recoverLevel(errorEvent, levelIndex, levelError, fragmentError) {
- var _this2 = this;
+ if (video && stats && (!video.paused && video.playbackRate !== 0 || !video.readyState) && frag.autoLevel && frag.level) {
+ var requestDelay = abr_controller_performance.now() - stats.trequest;
+ var playbackRate = Math.abs(video.playbackRate); // monitor fragment load progress after half of expected fragment duration,to stabilize bitrate
- var config = this.hls.config;
- var errorDetails = errorEvent.details;
- var level = this._levels[levelIndex];
- var redundantLevels, delay, nextLevel;
- level.loadError++;
- level.fragmentError = fragmentError;
+ if (requestDelay > 500 * frag.duration / playbackRate) {
+ var levels = hls.levels;
+ var loadRate = Math.max(1, stats.bw ? stats.bw / 8 : stats.loaded * 1000 / requestDelay); // byte/s; at least 1 byte/s to avoid division by zero
+ // compute expected fragment length using frag duration and level bitrate. also ensure that expected len is gte than already loaded size
- if (levelError) {
- if (this.levelRetryCount + 1 <= config.levelLoadingMaxRetry) {
- // exponential backoff capped to max retry timeout
- delay = Math.min(Math.pow(2, this.levelRetryCount) * config.levelLoadingRetryDelay, config.levelLoadingMaxRetryTimeout); // Schedule level reload
+ var level = levels[frag.level];
+ var levelBitrate = level.realBitrate ? Math.max(level.realBitrate, level.bitrate) : level.bitrate;
+ var expectedLen = stats.total ? stats.total : Math.max(stats.loaded, Math.round(frag.duration * levelBitrate / 8));
+ var pos = video.currentTime;
+ var fragLoadedDelay = (expectedLen - stats.loaded) / loadRate;
+ var bufferStarvationDelay = (BufferHelper.bufferInfo(video, pos, hls.config.maxBufferHole).end - pos) / playbackRate; // consider emergency switch down only if we have less than 2 frag buffered AND
+ // time to finish loading current fragment is bigger than buffer starvation delay
+ // ie if we risk buffer starvation if bw does not increase quickly
+
+ if (bufferStarvationDelay < 2 * frag.duration / playbackRate && fragLoadedDelay > bufferStarvationDelay) {
+ var fragLevelNextLoadedDelay;
+ var nextLoadLevel; // lets iterate through lower level and try to find the biggest one that could avoid rebuffering
+ // we start from current level - 1 and we step down , until we find a matching level
- this.timer = setTimeout(function () {
- return _this2.loadLevel();
- }, delay); // boolean used to inform stream controller not to switch back to IDLE on non fatal error
+ for (nextLoadLevel = frag.level - 1; nextLoadLevel > minAutoLevel; nextLoadLevel--) {
+ // compute time to load next fragment at lower level
+ // 0.8 : consider only 80% of current bw to be conservative
+ // 8 = bits per byte (bps/Bps)
+ var levelNextBitrate = levels[nextLoadLevel].realBitrate ? Math.max(levels[nextLoadLevel].realBitrate, levels[nextLoadLevel].bitrate) : levels[nextLoadLevel].bitrate;
- errorEvent.levelRetry = true;
- this.levelRetryCount++;
- logger["logger"].warn("level controller, " + errorDetails + ", retry in " + delay + " ms, current retry count is " + this.levelRetryCount);
- } else {
- logger["logger"].error("level controller, cannot recover from " + errorDetails + " error");
- this.currentLevelIndex = null; // stopping live reloading timer if any
+ var _fragLevelNextLoadedDelay = frag.duration * levelNextBitrate / (8 * 0.8 * loadRate);
- this.clearTimer(); // switch error to fatal
+ if (_fragLevelNextLoadedDelay < bufferStarvationDelay) {
+ // we found a lower level that be rebuffering free with current estimated bw !
+ break;
+ }
+ } // only emergency switch down if it takes less time to load new fragment at lowest level instead
+ // of finishing loading current one ...
- errorEvent.fatal = true;
- return;
- }
- } // Try any redundant streams if available for both errors: level and fragment
- // If level.loadError reaches redundantLevels it means that we tried them all, no hope => let's switch down
+ if (fragLevelNextLoadedDelay < fragLoadedDelay) {
+ logger["logger"].warn("loading too slow, abort fragment loading and switch to level " + nextLoadLevel + ":fragLoadedDelay[" + nextLoadLevel + "] 1 && level.loadError < redundantLevels) {
- level.urlId = (level.urlId + 1) % redundantLevels;
- level.details = undefined;
- logger["logger"].warn("level controller, " + errorDetails + " for level " + levelIndex + ": switching to redundant URL-id " + level.urlId); // console.log('Current audio track group ID:', this.hls.audioTracks[this.hls.audioTrack].groupId);
- // console.log('New video quality level audio group id:', level.attrs.AUDIO);
- } else {
- // Search for available level
- if (this.manualLevelIndex === -1) {
- // When lowest level has been reached, let's start hunt from the top
- nextLevel = levelIndex === 0 ? this._levels.length - 1 : levelIndex - 1;
- logger["logger"].warn("level controller, " + errorDetails + ": switch to " + nextLevel);
- this.hls.nextAutoLevel = this.currentLevelIndex = nextLevel;
- } else if (fragmentError) {
- // Allow fragment retry as long as configuration allows.
- // reset this._level so that another call to set level() will trigger again a frag load
- logger["logger"].warn("level controller, " + errorDetails + ": reload a fragment");
- this.currentLevelIndex = null;
- }
- }
- }
- } // reset errors on the successful load of a fragment
- ;
+ this._bwEstimator.sample(requestDelay, stats.loaded); // abort fragment loading
- _proto.onFragLoaded = function onFragLoaded(_ref3) {
- var frag = _ref3.frag;
- if (frag !== undefined && frag.type === 'main') {
- var level = this._levels[frag.level];
+ loader.abort(); // stop abandon rules timer
- if (level !== undefined) {
- level.fragmentError = false;
- level.loadError = 0;
- this.levelRetryCount = 0;
+ this.clearTimer();
+ hls.trigger(events["default"].FRAG_LOAD_EMERGENCY_ABORTED, {
+ frag: frag,
+ stats: stats
+ });
+ }
+ }
}
}
};
- _proto.onLevelLoaded = function onLevelLoaded(data) {
- var _this3 = this;
+ _proto.onFragLoaded = function onFragLoaded(data) {
+ var frag = data.frag;
- var level = data.level,
- details = data.details; // only process level loaded events matching with expected level
+ if (frag.type === 'main' && Object(number_isFinite["isFiniteNumber"])(frag.sn)) {
+ // stop monitoring bw once frag loaded
+ this.clearTimer(); // store level id after successful fragment load
- if (level !== this.currentLevelIndex) {
- return;
- }
+ this.lastLoadedFragLevel = frag.level; // reset forced auto level value so that next level will be selected
- var curLevel = this._levels[level]; // reset level load error counter on successful level loaded only if there is no issues with fragments
+ this._nextAutoLevel = -1; // compute level average bitrate
- if (!curLevel.fragmentError) {
- curLevel.loadError = 0;
- this.levelRetryCount = 0;
- } // if current playlist is a live playlist, arm a timer to reload it
+ if (this.hls.config.abrMaxWithRealBitrate) {
+ var level = this.hls.levels[frag.level];
+ var loadedBytes = (level.loaded ? level.loaded.bytes : 0) + data.stats.loaded;
+ var loadedDuration = (level.loaded ? level.loaded.duration : 0) + data.frag.duration;
+ level.loaded = {
+ bytes: loadedBytes,
+ duration: loadedDuration
+ };
+ level.realBitrate = Math.round(8 * loadedBytes / loadedDuration);
+ } // if fragment has been loaded to perform a bitrate test,
- if (details.live) {
- var reloadInterval = computeReloadInterval(curLevel.details, details, data.stats.trequest);
- logger["logger"].log("live playlist, reload in " + Math.round(reloadInterval) + " ms");
- this.timer = setTimeout(function () {
- return _this3.loadLevel();
- }, reloadInterval);
- } else {
- this.clearTimer();
+ if (data.frag.bitrateTest) {
+ var stats = data.stats;
+ stats.tparsed = stats.tbuffered = stats.tload;
+ this.onFragBuffered(data);
+ }
}
};
- _proto.onAudioTrackSwitched = function onAudioTrackSwitched(data) {
- var audioGroupId = this.hls.audioTracks[data.id].groupId;
- var currentLevel = this.hls.levels[this.currentLevelIndex];
+ _proto.onFragBuffered = function onFragBuffered(data) {
+ var stats = data.stats;
+ var frag = data.frag; // only update stats on first frag buffering
+ // if same frag is loaded multiple times, it might be in browser cache, and loaded quickly
+ // and leading to wrong bw estimation
+ // on bitrate test, also only update stats once (if tload = tbuffered == on FRAG_LOADED)
- if (!currentLevel) {
- return;
- }
+ if (stats.aborted !== true && frag.type === 'main' && Object(number_isFinite["isFiniteNumber"])(frag.sn) && (!frag.bitrateTest || stats.tload === stats.tbuffered)) {
+ // use tparsed-trequest instead of tbuffered-trequest to compute fragLoadingProcessing; rationale is that buffer appending only happens once media is attached
+ // in case we use config.startFragPrefetch while media is not attached yet, fragment might be parsed while media not attached yet, but it will only be buffered on media attached
+ // as a consequence it could happen really late in the process. meaning that appending duration might appears huge ... leading to underestimated throughput estimation
+ var fragLoadingProcessingMs = stats.tparsed - stats.trequest;
+ logger["logger"].log("latency/loading/parsing/append/kbps:" + Math.round(stats.tfirst - stats.trequest) + "/" + Math.round(stats.tload - stats.tfirst) + "/" + Math.round(stats.tparsed - stats.tload) + "/" + Math.round(stats.tbuffered - stats.tparsed) + "/" + Math.round(8 * stats.loaded / (stats.tbuffered - stats.trequest)));
- if (currentLevel.audioGroupIds) {
- var urlId = -1;
+ this._bwEstimator.sample(fragLoadingProcessingMs, stats.loaded);
- for (var i = 0; i < currentLevel.audioGroupIds.length; i++) {
- if (currentLevel.audioGroupIds[i] === audioGroupId) {
- urlId = i;
- break;
- }
- }
+ stats.bwEstimate = this._bwEstimator.getEstimate(); // if fragment has been loaded to perform a bitrate test, (hls.startLevel = -1), store bitrate test delay duration
- if (urlId !== currentLevel.urlId) {
- currentLevel.urlId = urlId;
- this.startLoad();
+ if (frag.bitrateTest) {
+ this.bitrateTestDelay = fragLoadingProcessingMs / 1000;
+ } else {
+ this.bitrateTestDelay = 0;
}
}
};
- _proto.loadLevel = function loadLevel() {
- logger["logger"].debug('call to loadLevel');
-
- if (this.currentLevelIndex !== null && this.canload) {
- var levelObject = this._levels[this.currentLevelIndex];
-
- if (typeof levelObject === 'object' && levelObject.url.length > 0) {
- var level = this.currentLevelIndex;
- var id = levelObject.urlId;
- var url = levelObject.url[id];
- logger["logger"].log("Attempt loading level index " + level + " with URL-id " + id); // console.log('Current audio track group ID:', this.hls.audioTracks[this.hls.audioTrack].groupId);
- // console.log('New video quality level audio group id:', levelObject.attrs.AUDIO, level);
+ _proto.onError = function onError(data) {
+ // stop timer in case of frag loading error
+ switch (data.details) {
+ case errors["ErrorDetails"].FRAG_LOAD_ERROR:
+ case errors["ErrorDetails"].FRAG_LOAD_TIMEOUT:
+ this.clearTimer();
+ break;
- this.hls.trigger(events["default"].LEVEL_LOADING, {
- url: url,
- level: level,
- id: id
- });
- }
+ default:
+ break;
}
};
- level_controller_createClass(LevelController, [{
- key: "levels",
- get: function get() {
- return this._levels;
- }
- }, {
- key: "level",
- get: function get() {
- return this.currentLevelIndex;
- },
- set: function set(newLevel) {
- var levels = this._levels;
-
- if (levels) {
- newLevel = Math.min(newLevel, levels.length - 1);
-
- if (this.currentLevelIndex !== newLevel || !levels[newLevel].details) {
- this.setLevelInternal(newLevel);
- }
- }
- }
- }, {
- key: "manualLevel",
- get: function get() {
- return this.manualLevelIndex;
- },
- set: function set(newLevel) {
- this.manualLevelIndex = newLevel;
-
- if (this._startLevel === undefined) {
- this._startLevel = newLevel;
- }
+ _proto.clearTimer = function clearTimer() {
+ clearInterval(this.timer);
+ this.timer = null;
+ } // return next auto level
+ ;
- if (newLevel !== -1) {
- this.level = newLevel;
- }
- }
- }, {
- key: "firstLevel",
- get: function get() {
- return this._firstLevel;
- },
- set: function set(newLevel) {
- this._firstLevel = newLevel;
- }
- }, {
- key: "startLevel",
- get: function get() {
- // hls.startLevel takes precedence over config.startLevel
- // if none of these values are defined, fallback on this._firstLevel (first quality level appearing in variant manifest)
- if (this._startLevel === undefined) {
- var configStartLevel = this.hls.config.startLevel;
+ _proto._findBestLevel = function _findBestLevel(currentLevel, currentFragDuration, currentBw, minAutoLevel, maxAutoLevel, maxFetchDuration, bwFactor, bwUpFactor, levels) {
+ for (var i = maxAutoLevel; i >= minAutoLevel; i--) {
+ var levelInfo = levels[i];
- if (configStartLevel !== undefined) {
- return configStartLevel;
- } else {
- return this._firstLevel;
- }
- } else {
- return this._startLevel;
- }
- },
- set: function set(newLevel) {
- this._startLevel = newLevel;
- }
- }, {
- key: "nextLoadLevel",
- get: function get() {
- if (this.manualLevelIndex !== -1) {
- return this.manualLevelIndex;
+ if (!levelInfo) {
+ continue;
+ }
+
+ var levelDetails = levelInfo.details;
+ var avgDuration = levelDetails ? levelDetails.totalduration / levelDetails.fragments.length : currentFragDuration;
+ var live = levelDetails ? levelDetails.live : false;
+ var adjustedbw = void 0; // follow algorithm captured from stagefright :
+ // https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/httplive/LiveSession.cpp
+ // Pick the highest bandwidth stream below or equal to estimated bandwidth.
+ // consider only 80% of the available bandwidth, but if we are switching up,
+ // be even more conservative (70%) to avoid overestimating and immediately
+ // switching back.
+
+ if (i <= currentLevel) {
+ adjustedbw = bwFactor * currentBw;
} else {
- return this.hls.nextAutoLevel;
+ adjustedbw = bwUpFactor * currentBw;
}
- },
- set: function set(nextLevel) {
- this.level = nextLevel;
- if (this.manualLevelIndex === -1) {
- this.hls.nextAutoLevel = nextLevel;
+ var bitrate = levels[i].realBitrate ? Math.max(levels[i].realBitrate, levels[i].bitrate) : levels[i].bitrate;
+ var fetchDuration = bitrate * avgDuration / adjustedbw;
+ logger["logger"].trace("level/adjustedbw/bitrate/avgDuration/maxFetchDuration/fetchDuration: " + i + "/" + Math.round(adjustedbw) + "/" + bitrate + "/" + avgDuration + "/" + maxFetchDuration + "/" + fetchDuration); // if adjusted bw is greater than level bitrate AND
+
+ if (adjustedbw > bitrate && ( // fragment fetchDuration unknown OR live stream OR fragment fetchDuration less than max allowed fetch duration, then this level matches
+ // we don't account for max Fetch Duration for live streams, this is to avoid switching down when near the edge of live sliding window ...
+ // special case to support startLevel = -1 (bitrateTest) on live streams : in that case we should not exit loop so that _findBestLevel will return -1
+ !fetchDuration || live && !this.bitrateTestDelay || fetchDuration < maxFetchDuration)) {
+ // as we are looping from highest to lowest, this will return the best achievable quality level
+ return i;
}
- }
- }]);
+ } // not enough time budget even with quality level 0 ... rebuffering might happen
- return LevelController;
-}(event_handler);
+ return -1;
+ };
-// EXTERNAL MODULE: ./src/demux/id3.js
-var id3 = __webpack_require__("./src/demux/id3.js");
+ abr_controller_createClass(AbrController, [{
+ key: "nextAutoLevel",
+ get: function get() {
+ var forcedAutoLevel = this._nextAutoLevel;
+ var bwEstimator = this._bwEstimator; // in case next auto level has been forced, and bw not available or not reliable, return forced value
-// CONCATENATED MODULE: ./src/utils/texttrack-utils.ts
-function sendAddTrackEvent(track, videoEl) {
- var event;
+ if (forcedAutoLevel !== -1 && (!bwEstimator || !bwEstimator.canEstimate())) {
+ return forcedAutoLevel;
+ } // compute next level using ABR logic
- try {
- event = new Event('addtrack');
- } catch (err) {
- // for IE11
- event = document.createEvent('Event');
- event.initEvent('addtrack', false, false);
- }
- event.track = track;
- videoEl.dispatchEvent(event);
-}
-function clearCurrentCues(track) {
- if (track && track.cues) {
- while (track.cues.length > 0) {
- track.removeCue(track.cues[0]);
- }
- }
-}
-/**
- * Given a list of Cues, finds the closest cue matching the given time.
- * Modified verison of binary search O(log(n)).
- *
- * @export
- * @param {(TextTrackCueList | TextTrackCue[])} cues - List of cues.
- * @param {number} time - Target time, to find closest cue to.
- * @returns {TextTrackCue}
- */
+ var nextABRAutoLevel = this._nextABRAutoLevel; // if forced auto level has been defined, use it to cap ABR computed quality level
-function texttrack_utils_getClosestCue(cues, time) {
- // If the offset is less than the first element, the first element is the closest.
- if (time < cues[0].endTime) {
- return cues[0];
- } // If the offset is greater than the last cue, the last is the closest.
+ if (forcedAutoLevel !== -1) {
+ nextABRAutoLevel = Math.min(forcedAutoLevel, nextABRAutoLevel);
+ }
+ return nextABRAutoLevel;
+ },
+ set: function set(nextLevel) {
+ this._nextAutoLevel = nextLevel;
+ }
+ }, {
+ key: "_nextABRAutoLevel",
+ get: function get() {
+ var hls = this.hls;
+ var maxAutoLevel = hls.maxAutoLevel,
+ levels = hls.levels,
+ config = hls.config,
+ minAutoLevel = hls.minAutoLevel;
+ var video = hls.media;
+ var currentLevel = this.lastLoadedFragLevel;
+ var currentFragDuration = this.fragCurrent ? this.fragCurrent.duration : 0;
+ var pos = video ? video.currentTime : 0; // playbackRate is the absolute value of the playback rate; if video.playbackRate is 0, we use 1 to load as
+ // if we're playing back at the normal rate.
- if (time > cues[cues.length - 1].endTime) {
- return cues[cues.length - 1];
- }
+ var playbackRate = video && video.playbackRate !== 0 ? Math.abs(video.playbackRate) : 1.0;
+ var avgbw = this._bwEstimator ? this._bwEstimator.getEstimate() : config.abrEwmaDefaultEstimate; // bufferStarvationDelay is the wall-clock time left until the playback buffer is exhausted.
- var left = 0;
- var right = cues.length - 1;
+ var bufferStarvationDelay = (BufferHelper.bufferInfo(video, pos, config.maxBufferHole).end - pos) / playbackRate; // First, look to see if we can find a level matching with our avg bandwidth AND that could also guarantee no rebuffering at all
- while (left <= right) {
- var mid = Math.floor((right + left) / 2);
+ var bestLevel = this._findBestLevel(currentLevel, currentFragDuration, avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, config.abrBandWidthFactor, config.abrBandWidthUpFactor, levels);
- if (time < cues[mid].endTime) {
- right = mid - 1;
- } else if (time > cues[mid].endTime) {
- left = mid + 1;
- } else {
- // If it's not lower or higher, it must be equal.
- return cues[mid];
- }
- } // At this point, left and right have swapped.
- // No direct match was found, left or right element must be the closest. Check which one has the smallest diff.
+ if (bestLevel >= 0) {
+ return bestLevel;
+ } else {
+ logger["logger"].trace('rebuffering expected to happen, lets try to find a quality level minimizing the rebuffering'); // not possible to get rid of rebuffering ... let's try to find level that will guarantee less than maxStarvationDelay of rebuffering
+ // if no matching level found, logic will return 0
+ var maxStarvationDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxStarvationDelay) : config.maxStarvationDelay;
+ var bwFactor = config.abrBandWidthFactor;
+ var bwUpFactor = config.abrBandWidthUpFactor;
- return cues[left].endTime - time < time - cues[right].endTime ? cues[left] : cues[right];
-}
-// CONCATENATED MODULE: ./src/controller/id3-track-controller.js
-function id3_track_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+ if (bufferStarvationDelay === 0) {
+ // in case buffer is empty, let's check if previous fragment was loaded to perform a bitrate test
+ var bitrateTestDelay = this.bitrateTestDelay;
-/*
- * id3 metadata track controller
-*/
+ if (bitrateTestDelay) {
+ // if it is the case, then we need to adjust our max starvation delay using maxLoadingDelay config value
+ // max video loading delay used in automatic start level selection :
+ // in that mode ABR controller will ensure that video loading time (ie the time to fetch the first fragment at lowest quality level +
+ // the time to fetch the fragment at the appropriate quality level is less than ```maxLoadingDelay``` )
+ // cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
+ var maxLoadingDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxLoadingDelay) : config.maxLoadingDelay;
+ maxStarvationDelay = maxLoadingDelay - bitrateTestDelay;
+ logger["logger"].trace("bitrate test took " + Math.round(1000 * bitrateTestDelay) + "ms, set first fragment max fetchDuration to " + Math.round(1000 * maxStarvationDelay) + " ms"); // don't use conservative factor on bitrate test
+ bwFactor = bwUpFactor = 1;
+ }
+ }
+ bestLevel = this._findBestLevel(currentLevel, currentFragDuration, avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay + maxStarvationDelay, bwFactor, bwUpFactor, levels);
+ return Math.max(bestLevel, 0);
+ }
+ }
+ }]);
+ return AbrController;
+}(event_handler);
+/* harmony default export */ var abr_controller = (abr_controller_AbrController);
+// CONCATENATED MODULE: ./src/controller/buffer-controller.ts
-var id3_track_controller_ID3TrackController =
-/*#__PURE__*/
-function (_EventHandler) {
- id3_track_controller_inheritsLoose(ID3TrackController, _EventHandler);
+function buffer_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
- function ID3TrackController(hls) {
- var _this;
+/*
+ * Buffer Controller
+ */
- _this = _EventHandler.call(this, hls, events["default"].MEDIA_ATTACHED, events["default"].MEDIA_DETACHING, events["default"].FRAG_PARSING_METADATA, events["default"].LIVE_BACK_BUFFER_REACHED) || this;
- _this.id3Track = undefined;
- _this.media = undefined;
- return _this;
- }
- var _proto = ID3TrackController.prototype;
- _proto.destroy = function destroy() {
- event_handler.prototype.destroy.call(this);
- } // Add ID3 metatadata text track.
- ;
- _proto.onMediaAttached = function onMediaAttached(data) {
- this.media = data.media;
- if (!this.media) {}
- };
+var buffer_controller_MediaSource = getMediaSource();
- _proto.onMediaDetaching = function onMediaDetaching() {
- clearCurrentCues(this.id3Track);
- this.id3Track = undefined;
- this.media = undefined;
- };
+var buffer_controller_BufferController =
+/*#__PURE__*/
+function (_EventHandler) {
+ buffer_controller_inheritsLoose(BufferController, _EventHandler);
- _proto.getID3Track = function getID3Track(textTracks) {
- for (var i = 0; i < textTracks.length; i++) {
- var textTrack = textTracks[i];
+ // the value that we have set mediasource.duration to
+ // (the actual duration may be tweaked slighly by the browser)
+ // the value that we want to set mediaSource.duration to
+ // the target duration of the current media playlist
+ // current stream state: true - for live broadcast, false - for VoD content
+ // cache the self generated object url to detect hijack of video tag
+ // signals that the sourceBuffers need to be flushed
+ // signals that mediaSource should have endOfStream called
+ // this is optional because this property is removed from the class sometimes
+ // The number of BUFFER_CODEC events received before any sourceBuffers are created
+ // The total number of BUFFER_CODEC events received
+ // A reference to the attached media element
+ // A reference to the active media source
+ // List of pending segments to be appended to source buffer
+ // A guard to see if we are currently appending to the source buffer
+ // counters
+ function BufferController(hls) {
+ var _this;
- if (textTrack.kind === 'metadata' && textTrack.label === 'id3') {
- // send 'addtrack' when reusing the textTrack for metadata,
- // same as what we do for captions
- sendAddTrackEvent(textTrack, this.media);
- return textTrack;
- }
- }
+ _this = _EventHandler.call(this, hls, events["default"].MEDIA_ATTACHING, events["default"].MEDIA_DETACHING, events["default"].MANIFEST_PARSED, events["default"].BUFFER_RESET, events["default"].BUFFER_APPENDING, events["default"].BUFFER_CODECS, events["default"].BUFFER_EOS, events["default"].BUFFER_FLUSHING, events["default"].LEVEL_PTS_UPDATED, events["default"].LEVEL_UPDATED) || this;
+ _this._msDuration = null;
+ _this._levelDuration = null;
+ _this._levelTargetDuration = 10;
+ _this._live = null;
+ _this._objectUrl = null;
+ _this._needsFlush = false;
+ _this._needsEos = false;
+ _this.config = void 0;
+ _this.audioTimestampOffset = void 0;
+ _this.bufferCodecEventsExpected = 0;
+ _this._bufferCodecEventsTotal = 0;
+ _this.media = null;
+ _this.mediaSource = null;
+ _this.segments = [];
+ _this.parent = void 0;
+ _this.appending = false;
+ _this.appended = 0;
+ _this.appendError = 0;
+ _this.flushBufferCounter = 0;
+ _this.tracks = {};
+ _this.pendingTracks = {};
+ _this.sourceBuffer = {};
+ _this.flushRange = [];
- return this.media.addTextTrack('metadata', 'id3');
- };
+ _this._onMediaSourceOpen = function () {
+ logger["logger"].log('media source opened');
- _proto.onLiveBackBufferReached = function onLiveBackBufferReached(_ref) {
- var bufferEnd = _ref.bufferEnd;
+ _this.hls.trigger(events["default"].MEDIA_ATTACHED, {
+ media: _this.media
+ });
- if (!this.id3Track || !this.id3Track.cues || !this.id3Track.cues.length) {
- return;
- }
+ var mediaSource = _this.mediaSource;
- var foundCue = getClosestCue(this.id3Track.cues, bufferEnd);
+ if (mediaSource) {
+ // once received, don't listen anymore to sourceopen event
+ mediaSource.removeEventListener('sourceopen', _this._onMediaSourceOpen);
+ }
- if (!foundCue) {
- return;
- }
+ _this.checkPendingTracks();
+ };
- var removeCues = true;
+ _this._onMediaSourceClose = function () {
+ logger["logger"].log('media source closed');
+ };
- while (removeCues) {
- var cue = this.id3Track.cues[0];
+ _this._onMediaSourceEnded = function () {
+ logger["logger"].log('media source ended');
+ };
- if (!this.id3Track.cues.length || cue.id === foundCue.id) {
- removeCues = false;
- return;
+ _this._onSBUpdateEnd = function () {
+ // update timestampOffset
+ if (_this.audioTimestampOffset && _this.sourceBuffer.audio) {
+ var audioBuffer = _this.sourceBuffer.audio;
+ logger["logger"].warn("change mpeg audio timestamp offset from " + audioBuffer.timestampOffset + " to " + _this.audioTimestampOffset);
+ audioBuffer.timestampOffset = _this.audioTimestampOffset;
+ delete _this.audioTimestampOffset;
}
- this.id3Track.removeCue(cue);
- }
- };
+ if (_this._needsFlush) {
+ _this.doFlush();
+ }
- _proto.onFragParsingMetadata = function onFragParsingMetadata(data) {
- var fragment = data.frag;
- var samples = data.samples; // create track dynamically
+ if (_this._needsEos) {
+ _this.checkEos();
+ }
- if (!this.id3Track) {
- this.id3Track = this.getID3Track(this.media.textTracks);
- this.id3Track.mode = 'hidden';
- } // Attempt to recreate Safari functionality by creating
- // WebKitDataCue objects when available and store the decoded
- // ID3 data in the value property of the cue
+ _this.appending = false;
+ var parent = _this.parent; // count nb of pending segments waiting for appending on this sourcebuffer
+ var pending = _this.segments.reduce(function (counter, segment) {
+ return segment.parent === parent ? counter + 1 : counter;
+ }, 0); // this.sourceBuffer is better to use than media.buffered as it is closer to the PTS data from the fragments
- var Cue = window.WebKitDataCue || window.VTTCue || window.TextTrackCue;
- for (var i = 0; i < samples.length; i++) {
- var frames = id3["default"].getID3Frames(samples[i].data);
+ var timeRanges = {};
+ var sbSet = _this.sourceBuffer;
- if (frames) {
- var startTime = samples[i].pts;
- var endTime = i < samples.length - 1 ? samples[i + 1].pts : fragment.endPTS;
+ for (var streamType in sbSet) {
+ var sb = sbSet[streamType];
- if (startTime === endTime) {
- // Give a slight bump to the endTime if it's equal to startTime to avoid a SyntaxError in IE
- endTime += 0.0001;
- } else if (startTime > endTime) {
- logger["logger"].warn('detected an id3 sample with endTime < startTime, adjusting endTime to (startTime + 0.25)');
- endTime = startTime + 0.25;
+ if (!sb) {
+ throw Error("handling source buffer update end error: source buffer for " + streamType + " uninitilized and unable to update buffered TimeRanges.");
}
- for (var j = 0; j < frames.length; j++) {
- var frame = frames[j]; // Safari doesn't put the timestamp frame in the TextTrack
-
- if (!id3["default"].isTimeStampFrame(frame)) {
- var cue = new Cue(startTime, endTime, '');
- cue.value = frame;
- this.id3Track.addCue(cue);
- }
- }
+ timeRanges[streamType] = sb.buffered;
}
- }
- };
- return ID3TrackController;
-}(event_handler);
+ _this.hls.trigger(events["default"].BUFFER_APPENDED, {
+ parent: parent,
+ pending: pending,
+ timeRanges: timeRanges
+ }); // don't append in flushing mode
-/* harmony default export */ var id3_track_controller = (id3_track_controller_ID3TrackController);
-// CONCATENATED MODULE: ./src/is-supported.ts
-function is_supported_isSupported() {
- var mediaSource = getMediaSource();
+ if (!_this._needsFlush) {
+ _this.doAppending();
+ }
- if (!mediaSource) {
- return false;
- }
+ _this.updateMediaElementDuration(); // appending goes first
- var sourceBuffer = SourceBuffer || window.WebKitSourceBuffer;
- var isTypeSupported = mediaSource && typeof mediaSource.isTypeSupported === 'function' && mediaSource.isTypeSupported('video/mp4; codecs="avc1.42E01E,mp4a.40.2"'); // if SourceBuffer is exposed ensure its API is valid
- // safari and old version of Chrome doe not expose SourceBuffer globally so checking SourceBuffer.prototype is impossible
- var sourceBufferValidAPI = !sourceBuffer || sourceBuffer.prototype && typeof sourceBuffer.prototype.appendBuffer === 'function' && typeof sourceBuffer.prototype.remove === 'function';
- return !!isTypeSupported && !!sourceBufferValidAPI;
-}
-// CONCATENATED MODULE: ./src/utils/ewma.ts
-/*
- * compute an Exponential Weighted moving average
- * - https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
- * - heavily inspired from shaka-player
- */
-var EWMA =
-/*#__PURE__*/
-function () {
- // About half of the estimated value will be from the last |halfLife| samples by weight.
- function EWMA(halfLife) {
- this.alpha_ = void 0;
- this.estimate_ = void 0;
- this.totalWeight_ = void 0;
- // Larger values of alpha expire historical data more slowly.
- this.alpha_ = halfLife ? Math.exp(Math.log(0.5) / halfLife) : 0;
- this.estimate_ = 0;
- this.totalWeight_ = 0;
- }
+ if (pending === 0) {
+ _this.flushLiveBackBuffer();
+ }
+ };
- var _proto = EWMA.prototype;
+ _this._onSBUpdateError = function (event) {
+ logger["logger"].error('sourceBuffer error:', event); // according to http://www.w3.org/TR/media-source/#sourcebuffer-append-error
+ // this error might not always be fatal (it is fatal if decode error is set, in that case
+ // it will be followed by a mediaElement error ...)
- _proto.sample = function sample(weight, value) {
- var adjAlpha = Math.pow(this.alpha_, weight);
- this.estimate_ = value * (1 - adjAlpha) + adjAlpha * this.estimate_;
- this.totalWeight_ += weight;
- };
+ _this.hls.trigger(events["default"].ERROR, {
+ type: errors["ErrorTypes"].MEDIA_ERROR,
+ details: errors["ErrorDetails"].BUFFER_APPENDING_ERROR,
+ fatal: false
+ }); // we don't need to do more than that, as accordin to the spec, updateend will be fired just after
- _proto.getTotalWeight = function getTotalWeight() {
- return this.totalWeight_;
- };
+ };
- _proto.getEstimate = function getEstimate() {
- if (this.alpha_) {
- var zeroFactor = 1 - Math.pow(this.alpha_, this.totalWeight_);
- return this.estimate_ / zeroFactor;
- } else {
- return this.estimate_;
- }
- };
+ _this.config = hls.config;
+ return _this;
+ }
- return EWMA;
-}();
+ var _proto = BufferController.prototype;
-/* harmony default export */ var ewma = (EWMA);
-// CONCATENATED MODULE: ./src/utils/ewma-bandwidth-estimator.ts
-/*
- * EWMA Bandwidth Estimator
- * - heavily inspired from shaka-player
- * Tracks bandwidth samples and estimates available bandwidth.
- * Based on the minimum of two exponentially-weighted moving averages with
- * different half-lives.
- */
+ _proto.destroy = function destroy() {
+ event_handler.prototype.destroy.call(this);
+ };
+ _proto.onLevelPtsUpdated = function onLevelPtsUpdated(data) {
+ var type = data.type;
+ var audioTrack = this.tracks.audio; // Adjusting `SourceBuffer.timestampOffset` (desired point in the timeline where the next frames should be appended)
+ // in Chrome browser when we detect MPEG audio container and time delta between level PTS and `SourceBuffer.timestampOffset`
+ // is greater than 100ms (this is enough to handle seek for VOD or level change for LIVE videos). At the time of change we issue
+ // `SourceBuffer.abort()` and adjusting `SourceBuffer.timestampOffset` if `SourceBuffer.updating` is false or awaiting `updateend`
+ // event if SB is in updating state.
+ // More info here: https://github.com/video-dev/hls.js/issues/332#issuecomment-257986486
-var ewma_bandwidth_estimator_EwmaBandWidthEstimator =
-/*#__PURE__*/
-function () {
- // TODO(typescript-hls)
- function EwmaBandWidthEstimator(hls, slow, fast, defaultEstimate) {
- this.hls = void 0;
- this.defaultEstimate_ = void 0;
- this.minWeight_ = void 0;
- this.minDelayMs_ = void 0;
- this.slow_ = void 0;
- this.fast_ = void 0;
- this.hls = hls;
- this.defaultEstimate_ = defaultEstimate;
- this.minWeight_ = 0.001;
- this.minDelayMs_ = 50;
- this.slow_ = new ewma(slow);
- this.fast_ = new ewma(fast);
- }
+ if (type === 'audio' && audioTrack && audioTrack.container === 'audio/mpeg') {
+ // Chrome audio mp3 track
+ var audioBuffer = this.sourceBuffer.audio;
- var _proto = EwmaBandWidthEstimator.prototype;
+ if (!audioBuffer) {
+ throw Error('Level PTS Updated and source buffer for audio uninitalized');
+ }
- _proto.sample = function sample(durationMs, numBytes) {
- durationMs = Math.max(durationMs, this.minDelayMs_);
- var numBits = 8 * numBytes,
- // weight is duration in seconds
- durationS = durationMs / 1000,
- // value is bandwidth in bits/s
- bandwidthInBps = numBits / durationS;
- this.fast_.sample(durationS, bandwidthInBps);
- this.slow_.sample(durationS, bandwidthInBps);
- };
+ var delta = Math.abs(audioBuffer.timestampOffset - data.start); // adjust timestamp offset if time delta is greater than 100ms
- _proto.canEstimate = function canEstimate() {
- var fast = this.fast_;
- return fast && fast.getTotalWeight() >= this.minWeight_;
- };
+ if (delta > 0.1) {
+ var updating = audioBuffer.updating;
- _proto.getEstimate = function getEstimate() {
- if (this.canEstimate()) {
- // console.log('slow estimate:'+ Math.round(this.slow_.getEstimate()));
- // console.log('fast estimate:'+ Math.round(this.fast_.getEstimate()));
- // Take the minimum of these two estimates. This should have the effect of
- // adapting down quickly, but up more slowly.
- return Math.min(this.fast_.getEstimate(), this.slow_.getEstimate());
- } else {
- return this.defaultEstimate_;
+ try {
+ audioBuffer.abort();
+ } catch (err) {
+ logger["logger"].warn('can not abort audio buffer: ' + err);
+ }
+
+ if (!updating) {
+ logger["logger"].warn('change mpeg audio timestamp offset from ' + audioBuffer.timestampOffset + ' to ' + data.start);
+ audioBuffer.timestampOffset = data.start;
+ } else {
+ this.audioTimestampOffset = data.start;
+ }
+ }
}
};
- _proto.destroy = function destroy() {};
-
- return EwmaBandWidthEstimator;
-}();
-
-/* harmony default export */ var ewma_bandwidth_estimator = (ewma_bandwidth_estimator_EwmaBandWidthEstimator);
-// CONCATENATED MODULE: ./src/controller/abr-controller.js
-
+ _proto.onManifestParsed = function onManifestParsed(data) {
+ // in case of alt audio 2 BUFFER_CODECS events will be triggered, one per stream controller
+ // sourcebuffers will be created all at once when the expected nb of tracks will be reached
+ // in case alt audio is not used, only one BUFFER_CODEC event will be fired from main stream controller
+ // it will contain the expected nb of source buffers, no need to compute it
+ this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = data.altAudio ? 2 : 1;
+ logger["logger"].log(this.bufferCodecEventsExpected + " bufferCodec event(s) expected");
+ };
+ _proto.onMediaAttaching = function onMediaAttaching(data) {
+ var media = this.media = data.media;
-function abr_controller_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+ if (media && buffer_controller_MediaSource) {
+ // setup the media source
+ var ms = this.mediaSource = new buffer_controller_MediaSource(); // Media Source listeners
-function abr_controller_createClass(Constructor, protoProps, staticProps) { if (protoProps) abr_controller_defineProperties(Constructor.prototype, protoProps); if (staticProps) abr_controller_defineProperties(Constructor, staticProps); return Constructor; }
+ ms.addEventListener('sourceopen', this._onMediaSourceOpen);
+ ms.addEventListener('sourceended', this._onMediaSourceEnded);
+ ms.addEventListener('sourceclose', this._onMediaSourceClose); // link video and media Source
-function abr_controller_assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+ media.src = window.URL.createObjectURL(ms); // cache the locally generated object url
-function abr_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+ this._objectUrl = media.src;
+ }
+ };
-/*
- * simple ABR Controller
- * - compute next level based on last fragment bw heuristics
- * - implement an abandon rules triggered if we have less than 2 frag buffered and if computed bw shows that we risk buffer stalling
- */
+ _proto.onMediaDetaching = function onMediaDetaching() {
+ logger["logger"].log('media source detaching');
+ var ms = this.mediaSource;
+ if (ms) {
+ if (ms.readyState === 'open') {
+ try {
+ // endOfStream could trigger exception if any sourcebuffer is in updating state
+ // we don't really care about checking sourcebuffer state here,
+ // as we are anyway detaching the MediaSource
+ // let's just avoid this exception to propagate
+ ms.endOfStream();
+ } catch (err) {
+ logger["logger"].warn("onMediaDetaching:" + err.message + " while calling endOfStream");
+ }
+ }
+ ms.removeEventListener('sourceopen', this._onMediaSourceOpen);
+ ms.removeEventListener('sourceended', this._onMediaSourceEnded);
+ ms.removeEventListener('sourceclose', this._onMediaSourceClose); // Detach properly the MediaSource from the HTMLMediaElement as
+ // suggested in https://github.com/w3c/media-source/issues/53.
+ if (this.media) {
+ if (this._objectUrl) {
+ window.URL.revokeObjectURL(this._objectUrl);
+ } // clean up video tag src only if it's our own url. some external libraries might
+ // hijack the video tag and change its 'src' without destroying the Hls instance first
+ if (this.media.src === this._objectUrl) {
+ this.media.removeAttribute('src');
+ this.media.load();
+ } else {
+ logger["logger"].warn('media.src was changed by a third party - skip cleanup');
+ }
+ }
-var abr_controller_window = window,
- abr_controller_performance = abr_controller_window.performance;
+ this.mediaSource = null;
+ this.media = null;
+ this._objectUrl = null;
+ this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
+ this.pendingTracks = {};
+ this.tracks = {};
+ this.sourceBuffer = {};
+ this.flushRange = [];
+ this.segments = [];
+ this.appended = 0;
+ }
-var abr_controller_AbrController =
-/*#__PURE__*/
-function (_EventHandler) {
- abr_controller_inheritsLoose(AbrController, _EventHandler);
+ this.hls.trigger(events["default"].MEDIA_DETACHED);
+ };
- function AbrController(hls) {
- var _this;
+ _proto.checkPendingTracks = function checkPendingTracks() {
+ var bufferCodecEventsExpected = this.bufferCodecEventsExpected,
+ pendingTracks = this.pendingTracks; // Check if we've received all of the expected bufferCodec events. When none remain, create all the sourceBuffers at once.
+ // This is important because the MSE spec allows implementations to throw QuotaExceededErrors if creating new sourceBuffers after
+ // data has been appended to existing ones.
+ // 2 tracks is the max (one for audio, one for video). If we've reach this max go ahead and create the buffers.
- _this = _EventHandler.call(this, hls, events["default"].FRAG_LOADING, events["default"].FRAG_LOADED, events["default"].FRAG_BUFFERED, events["default"].ERROR) || this;
- _this.lastLoadedFragLevel = 0;
- _this._nextAutoLevel = -1;
- _this.hls = hls;
- _this.timer = null;
- _this._bwEstimator = null;
- _this.onCheck = _this._abandonRulesCheck.bind(abr_controller_assertThisInitialized(_this));
- return _this;
- }
+ var pendingTracksCount = Object.keys(pendingTracks).length;
- var _proto = AbrController.prototype;
+ if (pendingTracksCount && !bufferCodecEventsExpected || pendingTracksCount === 2) {
+ // ok, let's create them now !
+ this.createSourceBuffers(pendingTracks);
+ this.pendingTracks = {}; // append any pending segments now !
- _proto.destroy = function destroy() {
- this.clearTimer();
- event_handler.prototype.destroy.call(this);
+ this.doAppending();
+ }
};
- _proto.onFragLoading = function onFragLoading(data) {
- var frag = data.frag;
-
- if (frag.type === 'main') {
- if (!this.timer) {
- this.fragCurrent = frag;
- this.timer = setInterval(this.onCheck, 100);
- } // lazy init of BwEstimator, rationale is that we use different params for Live/VoD
- // so we need to wait for stream manifest / playlist type to instantiate it.
+ _proto.onBufferReset = function onBufferReset() {
+ var sourceBuffer = this.sourceBuffer;
+ for (var type in sourceBuffer) {
+ var sb = sourceBuffer[type];
- if (!this._bwEstimator) {
- var hls = this.hls;
- var config = hls.config;
- var level = frag.level;
- var isLive = hls.levels[level].details.live;
- var ewmaFast;
- var ewmaSlow;
+ try {
+ if (sb) {
+ if (this.mediaSource) {
+ this.mediaSource.removeSourceBuffer(sb);
+ }
- if (isLive) {
- ewmaFast = config.abrEwmaFastLive;
- ewmaSlow = config.abrEwmaSlowLive;
- } else {
- ewmaFast = config.abrEwmaFastVoD;
- ewmaSlow = config.abrEwmaSlowVoD;
+ sb.removeEventListener('updateend', this._onSBUpdateEnd);
+ sb.removeEventListener('error', this._onSBUpdateError);
}
-
- this._bwEstimator = new ewma_bandwidth_estimator(hls, ewmaSlow, ewmaFast, config.abrEwmaDefaultEstimate);
- }
+ } catch (err) {}
}
+
+ this.sourceBuffer = {};
+ this.flushRange = [];
+ this.segments = [];
+ this.appended = 0;
};
- _proto._abandonRulesCheck = function _abandonRulesCheck() {
- /*
- monitor fragment retrieval time...
- we compute expected time of arrival of the complete fragment.
- we compare it to expected time of buffer starvation
- */
- var hls = this.hls;
- var video = hls.media;
- var frag = this.fragCurrent;
+ _proto.onBufferCodecs = function onBufferCodecs(tracks) {
+ var _this2 = this;
- if (!frag) {
+ // if source buffer(s) not created yet, appended buffer tracks in this.pendingTracks
+ // if sourcebuffers already created, do nothing ...
+ if (Object.keys(this.sourceBuffer).length) {
return;
}
- var loader = frag.loader;
- var minAutoLevel = hls.minAutoLevel; // if loader has been destroyed or loading has been aborted, stop timer and return
-
- if (!loader || loader.stats && loader.stats.aborted) {
- logger["logger"].warn('frag loader destroy or aborted, disarm abandonRules');
- this.clearTimer(); // reset forced auto level value so that next level will be selected
+ Object.keys(tracks).forEach(function (trackName) {
+ _this2.pendingTracks[trackName] = tracks[trackName];
+ });
+ this.bufferCodecEventsExpected = Math.max(this.bufferCodecEventsExpected - 1, 0);
- this._nextAutoLevel = -1;
- return;
+ if (this.mediaSource && this.mediaSource.readyState === 'open') {
+ this.checkPendingTracks();
}
+ };
- var stats = loader.stats;
- /* only monitor frag retrieval time if
- (video not paused OR first fragment being loaded(ready state === HAVE_NOTHING = 0)) AND autoswitching enabled AND not lowest level (=> means that we have several levels) */
-
- if (video && stats && (!video.paused && video.playbackRate !== 0 || !video.readyState) && frag.autoLevel && frag.level) {
- var requestDelay = abr_controller_performance.now() - stats.trequest;
- var playbackRate = Math.abs(video.playbackRate); // monitor fragment load progress after half of expected fragment duration,to stabilize bitrate
-
- if (requestDelay > 500 * frag.duration / playbackRate) {
- var levels = hls.levels;
- var loadRate = Math.max(1, stats.bw ? stats.bw / 8 : stats.loaded * 1000 / requestDelay); // byte/s; at least 1 byte/s to avoid division by zero
- // compute expected fragment length using frag duration and level bitrate. also ensure that expected len is gte than already loaded size
-
- var level = levels[frag.level];
- var levelBitrate = level.realBitrate ? Math.max(level.realBitrate, level.bitrate) : level.bitrate;
- var expectedLen = stats.total ? stats.total : Math.max(stats.loaded, Math.round(frag.duration * levelBitrate / 8));
- var pos = video.currentTime;
- var fragLoadedDelay = (expectedLen - stats.loaded) / loadRate;
- var bufferStarvationDelay = (BufferHelper.bufferInfo(video, pos, hls.config.maxBufferHole).end - pos) / playbackRate; // consider emergency switch down only if we have less than 2 frag buffered AND
- // time to finish loading current fragment is bigger than buffer starvation delay
- // ie if we risk buffer starvation if bw does not increase quickly
+ _proto.createSourceBuffers = function createSourceBuffers(tracks) {
+ var sourceBuffer = this.sourceBuffer,
+ mediaSource = this.mediaSource;
- if (bufferStarvationDelay < 2 * frag.duration / playbackRate && fragLoadedDelay > bufferStarvationDelay) {
- var fragLevelNextLoadedDelay;
- var nextLoadLevel; // lets iterate through lower level and try to find the biggest one that could avoid rebuffering
- // we start from current level - 1 and we step down , until we find a matching level
+ if (!mediaSource) {
+ throw Error('createSourceBuffers called when mediaSource was null');
+ }
- for (nextLoadLevel = frag.level - 1; nextLoadLevel > minAutoLevel; nextLoadLevel--) {
- // compute time to load next fragment at lower level
- // 0.8 : consider only 80% of current bw to be conservative
- // 8 = bits per byte (bps/Bps)
- var levelNextBitrate = levels[nextLoadLevel].realBitrate ? Math.max(levels[nextLoadLevel].realBitrate, levels[nextLoadLevel].bitrate) : levels[nextLoadLevel].bitrate;
+ for (var trackName in tracks) {
+ if (!sourceBuffer[trackName]) {
+ var track = tracks[trackName];
- var _fragLevelNextLoadedDelay = frag.duration * levelNextBitrate / (8 * 0.8 * loadRate);
+ if (!track) {
+ throw Error("source buffer exists for track " + trackName + ", however track does not");
+ } // use levelCodec as first priority
- if (_fragLevelNextLoadedDelay < bufferStarvationDelay) {
- // we found a lower level that be rebuffering free with current estimated bw !
- break;
- }
- } // only emergency switch down if it takes less time to load new fragment at lowest level instead
- // of finishing loading current one ...
+ var codec = track.levelCodec || track.codec;
+ var mimeType = track.container + ";codecs=" + codec;
+ logger["logger"].log("creating sourceBuffer(" + mimeType + ")");
- if (fragLevelNextLoadedDelay < fragLoadedDelay) {
- logger["logger"].warn("loading too slow, abort fragment loading and switch to level " + nextLoadLevel + ":fragLoadedDelay[" + nextLoadLevel + "]= minAutoLevel; i--) {
- var levelInfo = levels[i];
+ if (!isFinite(liveBackBufferLength) || liveBackBufferLength < 0) {
+ return;
+ }
- if (!levelInfo) {
- continue;
- }
+ if (!this.media) {
+ logger["logger"].error('flushLiveBackBuffer called without attaching media');
+ return;
+ }
- var levelDetails = levelInfo.details;
- var avgDuration = levelDetails ? levelDetails.totalduration / levelDetails.fragments.length : currentFragDuration;
- var live = levelDetails ? levelDetails.live : false;
- var adjustedbw = void 0; // follow algorithm captured from stagefright :
- // https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/httplive/LiveSession.cpp
- // Pick the highest bandwidth stream below or equal to estimated bandwidth.
- // consider only 80% of the available bandwidth, but if we are switching up,
- // be even more conservative (70%) to avoid overestimating and immediately
- // switching back.
+ var currentTime = this.media.currentTime;
+ var sourceBuffer = this.sourceBuffer;
+ var bufferTypes = Object.keys(sourceBuffer);
+ var targetBackBufferPosition = currentTime - Math.max(liveBackBufferLength, this._levelTargetDuration);
- if (i <= currentLevel) {
- adjustedbw = bwFactor * currentBw;
- } else {
- adjustedbw = bwUpFactor * currentBw;
- }
+ for (var index = bufferTypes.length - 1; index >= 0; index--) {
+ var bufferType = bufferTypes[index];
+ var sb = sourceBuffer[bufferType];
- var bitrate = levels[i].realBitrate ? Math.max(levels[i].realBitrate, levels[i].bitrate) : levels[i].bitrate;
- var fetchDuration = bitrate * avgDuration / adjustedbw;
- logger["logger"].trace("level/adjustedbw/bitrate/avgDuration/maxFetchDuration/fetchDuration: " + i + "/" + Math.round(adjustedbw) + "/" + bitrate + "/" + avgDuration + "/" + maxFetchDuration + "/" + fetchDuration); // if adjusted bw is greater than level bitrate AND
+ if (sb) {
+ var buffered = sb.buffered; // when target buffer start exceeds actual buffer start
- if (adjustedbw > bitrate && ( // fragment fetchDuration unknown OR live stream OR fragment fetchDuration less than max allowed fetch duration, then this level matches
- // we don't account for max Fetch Duration for live streams, this is to avoid switching down when near the edge of live sliding window ...
- // special case to support startLevel = -1 (bitrateTest) on live streams : in that case we should not exit loop so that _findBestLevel will return -1
- !fetchDuration || live && !this.bitrateTestDelay || fetchDuration < maxFetchDuration)) {
- // as we are looping from highest to lowest, this will return the best achievable quality level
- return i;
+ if (buffered.length > 0 && targetBackBufferPosition > buffered.start(0)) {
+ // remove buffer up until current time minus minimum back buffer length (removing buffer too close to current
+ // time will lead to playback freezing)
+ // credits for level target duration - https://github.com/videojs/http-streaming/blob/3132933b6aa99ddefab29c10447624efd6fd6e52/src/segment-loader.js#L91
+ if (this.removeBufferRange(bufferType, sb, 0, targetBackBufferPosition)) {
+ this.hls.trigger(events["default"].LIVE_BACK_BUFFER_REACHED, {
+ bufferEnd: targetBackBufferPosition
+ });
+ }
+ }
}
- } // not enough time budget even with quality level 0 ... rebuffering might happen
-
-
- return -1;
+ }
};
- abr_controller_createClass(AbrController, [{
- key: "nextAutoLevel",
- get: function get() {
- var forcedAutoLevel = this._nextAutoLevel;
- var bwEstimator = this._bwEstimator; // in case next auto level has been forced, and bw not available or not reliable, return forced value
+ _proto.onLevelUpdated = function onLevelUpdated(_ref) {
+ var details = _ref.details;
- if (forcedAutoLevel !== -1 && (!bwEstimator || !bwEstimator.canEstimate())) {
- return forcedAutoLevel;
- } // compute next level using ABR logic
+ if (details.fragments.length > 0) {
+ this._levelDuration = details.totalduration + details.fragments[0].start;
+ this._levelTargetDuration = details.averagetargetduration || details.targetduration || 10;
+ this._live = details.live;
+ this.updateMediaElementDuration();
+ }
+ }
+ /**
+ * Update Media Source duration to current level duration or override to Infinity if configuration parameter
+ * 'liveDurationInfinity` is set to `true`
+ * More details: https://github.com/video-dev/hls.js/issues/355
+ */
+ ;
+ _proto.updateMediaElementDuration = function updateMediaElementDuration() {
+ var config = this.config;
+ var duration;
- var nextABRAutoLevel = this._nextABRAutoLevel; // if forced auto level has been defined, use it to cap ABR computed quality level
+ if (this._levelDuration === null || !this.media || !this.mediaSource || !this.sourceBuffer || this.media.readyState === 0 || this.mediaSource.readyState !== 'open') {
+ return;
+ }
- if (forcedAutoLevel !== -1) {
- nextABRAutoLevel = Math.min(forcedAutoLevel, nextABRAutoLevel);
- }
+ for (var type in this.sourceBuffer) {
+ var sb = this.sourceBuffer[type];
- return nextABRAutoLevel;
- },
- set: function set(nextLevel) {
- this._nextAutoLevel = nextLevel;
+ if (sb && sb.updating === true) {
+ // can't set duration whilst a buffer is updating
+ return;
+ }
}
- }, {
- key: "_nextABRAutoLevel",
- get: function get() {
- var hls = this.hls;
- var maxAutoLevel = hls.maxAutoLevel,
- levels = hls.levels,
- config = hls.config,
- minAutoLevel = hls.minAutoLevel;
- var video = hls.media;
- var currentLevel = this.lastLoadedFragLevel;
- var currentFragDuration = this.fragCurrent ? this.fragCurrent.duration : 0;
- var pos = video ? video.currentTime : 0; // playbackRate is the absolute value of the playback rate; if video.playbackRate is 0, we use 1 to load as
- // if we're playing back at the normal rate.
- var playbackRate = video && video.playbackRate !== 0 ? Math.abs(video.playbackRate) : 1.0;
- var avgbw = this._bwEstimator ? this._bwEstimator.getEstimate() : config.abrEwmaDefaultEstimate; // bufferStarvationDelay is the wall-clock time left until the playback buffer is exhausted.
+ duration = this.media.duration; // initialise to the value that the media source is reporting
+
+ if (this._msDuration === null) {
+ this._msDuration = this.mediaSource.duration;
+ }
- var bufferStarvationDelay = (BufferHelper.bufferInfo(video, pos, config.maxBufferHole).end - pos) / playbackRate; // First, look to see if we can find a level matching with our avg bandwidth AND that could also guarantee no rebuffering at all
+ if (this._live === true && config.liveDurationInfinity === true) {
+ // Override duration to Infinity
+ logger["logger"].log('Media Source duration is set to Infinity');
+ this._msDuration = this.mediaSource.duration = Infinity;
+ } else if (this._levelDuration > this._msDuration && this._levelDuration > duration || !Object(number_isFinite["isFiniteNumber"])(duration)) {
+ // levelDuration was the last value we set.
+ // not using mediaSource.duration as the browser may tweak this value
+ // only update Media Source duration if its value increase, this is to avoid
+ // flushing already buffered portion when switching between quality level
+ logger["logger"].log("Updating Media Source duration to " + this._levelDuration.toFixed(3));
+ this._msDuration = this.mediaSource.duration = this._levelDuration;
+ }
+ };
- var bestLevel = this._findBestLevel(currentLevel, currentFragDuration, avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, config.abrBandWidthFactor, config.abrBandWidthUpFactor, levels);
+ _proto.doFlush = function doFlush() {
+ // loop through all buffer ranges to flush
+ while (this.flushRange.length) {
+ var range = this.flushRange[0]; // flushBuffer will abort any buffer append in progress and flush Audio/Video Buffer
- if (bestLevel >= 0) {
- return bestLevel;
+ if (this.flushBuffer(range.start, range.end, range.type)) {
+ // range flushed, remove from flush array
+ this.flushRange.shift();
+ this.flushBufferCounter = 0;
} else {
- logger["logger"].trace('rebuffering expected to happen, lets try to find a quality level minimizing the rebuffering'); // not possible to get rid of rebuffering ... let's try to find level that will guarantee less than maxStarvationDelay of rebuffering
- // if no matching level found, logic will return 0
+ this._needsFlush = true; // avoid looping, wait for SB update end to retrigger a flush
- var maxStarvationDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxStarvationDelay) : config.maxStarvationDelay;
- var bwFactor = config.abrBandWidthFactor;
- var bwUpFactor = config.abrBandWidthUpFactor;
+ return;
+ }
+ }
- if (bufferStarvationDelay === 0) {
- // in case buffer is empty, let's check if previous fragment was loaded to perform a bitrate test
- var bitrateTestDelay = this.bitrateTestDelay;
+ if (this.flushRange.length === 0) {
+ // everything flushed
+ this._needsFlush = false; // let's recompute this.appended, which is used to avoid flush looping
- if (bitrateTestDelay) {
- // if it is the case, then we need to adjust our max starvation delay using maxLoadingDelay config value
- // max video loading delay used in automatic start level selection :
- // in that mode ABR controller will ensure that video loading time (ie the time to fetch the first fragment at lowest quality level +
- // the time to fetch the fragment at the appropriate quality level is less than ```maxLoadingDelay``` )
- // cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
- var maxLoadingDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxLoadingDelay) : config.maxLoadingDelay;
- maxStarvationDelay = maxLoadingDelay - bitrateTestDelay;
- logger["logger"].trace("bitrate test took " + Math.round(1000 * bitrateTestDelay) + "ms, set first fragment max fetchDuration to " + Math.round(1000 * maxStarvationDelay) + " ms"); // don't use conservative factor on bitrate test
+ var appended = 0;
+ var sourceBuffer = this.sourceBuffer;
- bwFactor = bwUpFactor = 1;
+ try {
+ for (var type in sourceBuffer) {
+ var sb = sourceBuffer[type];
+
+ if (sb) {
+ appended += sb.buffered.length;
}
}
-
- bestLevel = this._findBestLevel(currentLevel, currentFragDuration, avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay + maxStarvationDelay, bwFactor, bwUpFactor, levels);
- return Math.max(bestLevel, 0);
+ } catch (error) {
+ // error could be thrown while accessing buffered, in case sourcebuffer has already been removed from MediaSource
+ // this is harmess at this stage, catch this to avoid reporting an internal exception
+ logger["logger"].error('error while accessing sourceBuffer.buffered');
}
+
+ this.appended = appended;
+ this.hls.trigger(events["default"].BUFFER_FLUSHED);
}
- }]);
+ };
- return AbrController;
-}(event_handler);
+ _proto.doAppending = function doAppending() {
+ var config = this.config,
+ hls = this.hls,
+ segments = this.segments,
+ sourceBuffer = this.sourceBuffer;
-/* harmony default export */ var abr_controller = (abr_controller_AbrController);
-// CONCATENATED MODULE: ./src/controller/buffer-controller.ts
+ if (!Object.keys(sourceBuffer).length) {
+ // early exit if no source buffers have been initialized yet
+ return;
+ }
+ if (!this.media || this.media.error) {
+ this.segments = [];
+ logger["logger"].error('trying to append although a media error occured, flush segment and abort');
+ return;
+ }
-function buffer_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+ if (this.appending) {
+ // logger.log(`sb appending in progress`);
+ return;
+ }
-/*
- * Buffer Controller
- */
+ var segment = segments.shift();
+ if (!segment) {
+ // handle undefined shift
+ return;
+ }
+ try {
+ var sb = sourceBuffer[segment.type];
+ if (!sb) {
+ // in case we don't have any source buffer matching with this segment type,
+ // it means that Mediasource fails to create sourcebuffer
+ // discard this segment, and trigger update end
+ this._onSBUpdateEnd();
+ return;
+ }
-var buffer_controller_MediaSource = getMediaSource();
+ if (sb.updating) {
+ // if we are still updating the source buffer from the last segment, place this back at the front of the queue
+ segments.unshift(segment);
+ return;
+ } // reset sourceBuffer ended flag before appending segment
-var buffer_controller_BufferController =
-/*#__PURE__*/
-function (_EventHandler) {
- buffer_controller_inheritsLoose(BufferController, _EventHandler);
- // the value that we have set mediasource.duration to
- // (the actual duration may be tweaked slighly by the browser)
- // the value that we want to set mediaSource.duration to
- // the target duration of the current media playlist
- // current stream state: true - for live broadcast, false - for VoD content
- // cache the self generated object url to detect hijack of video tag
- // signals that the sourceBuffers need to be flushed
- // signals that mediaSource should have endOfStream called
- // this is optional because this property is removed from the class sometimes
- // The number of BUFFER_CODEC events received before any sourceBuffers are created
- // A reference to the attached media element
- // A reference to the active media source
- // List of pending segments to be appended to source buffer
- // A guard to see if we are currently appending to the source buffer
- // counters
- function BufferController(hls) {
- var _this;
+ sb.ended = false; // logger.log(`appending ${segment.content} ${type} SB, size:${segment.data.length}, ${segment.parent}`);
- _this = _EventHandler.call(this, hls, events["default"].MEDIA_ATTACHING, events["default"].MEDIA_DETACHING, events["default"].MANIFEST_PARSED, events["default"].BUFFER_RESET, events["default"].BUFFER_APPENDING, events["default"].BUFFER_CODECS, events["default"].BUFFER_EOS, events["default"].BUFFER_FLUSHING, events["default"].LEVEL_PTS_UPDATED, events["default"].LEVEL_UPDATED) || this;
- _this._msDuration = null;
- _this._levelDuration = null;
- _this._levelTargetDuration = 10;
- _this._live = null;
- _this._objectUrl = null;
- _this._needsFlush = false;
- _this._needsEos = false;
- _this.config = void 0;
- _this.audioTimestampOffset = void 0;
- _this.bufferCodecEventsExpected = 0;
- _this.media = null;
- _this.mediaSource = null;
- _this.segments = [];
- _this.parent = void 0;
- _this.appending = false;
- _this.appended = 0;
- _this.appendError = 0;
- _this.flushBufferCounter = 0;
- _this.tracks = {};
- _this.pendingTracks = {};
- _this.sourceBuffer = {};
- _this.flushRange = [];
+ this.parent = segment.parent;
+ sb.appendBuffer(segment.data);
+ this.appendError = 0;
+ this.appended++;
+ this.appending = true;
+ } catch (err) {
+ // in case any error occured while appending, put back segment in segments table
+ logger["logger"].error("error while trying to append buffer:" + err.message);
+ segments.unshift(segment);
+ var event = {
+ type: errors["ErrorTypes"].MEDIA_ERROR,
+ parent: segment.parent,
+ details: '',
+ fatal: false
+ };
- _this._onMediaSourceOpen = function () {
- logger["logger"].log('media source opened');
+ if (err.code === 22) {
+ // QuotaExceededError: http://www.w3.org/TR/html5/infrastructure.html#quotaexceedederror
+ // let's stop appending any segments, and report BUFFER_FULL_ERROR error
+ this.segments = [];
+ event.details = errors["ErrorDetails"].BUFFER_FULL_ERROR;
+ } else {
+ this.appendError++;
+ event.details = errors["ErrorDetails"].BUFFER_APPEND_ERROR;
+ /* with UHD content, we could get loop of quota exceeded error until
+ browser is able to evict some data from sourcebuffer. retrying help recovering this
+ */
- _this.hls.trigger(events["default"].MEDIA_ATTACHED, {
- media: _this.media
- });
+ if (this.appendError > config.appendErrorMaxRetry) {
+ logger["logger"].log("fail " + config.appendErrorMaxRetry + " times to append segment in sourceBuffer");
+ this.segments = [];
+ event.fatal = true;
+ }
+ }
- var mediaSource = _this.mediaSource;
+ hls.trigger(events["default"].ERROR, event);
+ }
+ }
+ /*
+ flush specified buffered range,
+ return true once range has been flushed.
+ as sourceBuffer.remove() is asynchronous, flushBuffer will be retriggered on sourceBuffer update end
+ */
+ ;
- if (mediaSource) {
- // once received, don't listen anymore to sourceopen event
- mediaSource.removeEventListener('sourceopen', _this._onMediaSourceOpen);
- }
+ _proto.flushBuffer = function flushBuffer(startOffset, endOffset, sbType) {
+ var sourceBuffer = this.sourceBuffer; // exit if no sourceBuffers are initialized
- _this.checkPendingTracks();
- };
+ if (!Object.keys(sourceBuffer).length) {
+ return true;
+ }
- _this._onMediaSourceClose = function () {
- logger["logger"].log('media source closed');
- };
+ var currentTime = 'null';
- _this._onMediaSourceEnded = function () {
- logger["logger"].log('media source ended');
- };
+ if (this.media) {
+ currentTime = this.media.currentTime.toFixed(3);
+ }
- _this._onSBUpdateEnd = function () {
- // update timestampOffset
- if (_this.audioTimestampOffset && _this.sourceBuffer.audio) {
- var audioBuffer = _this.sourceBuffer.audio;
- logger["logger"].warn("change mpeg audio timestamp offset from " + audioBuffer.timestampOffset + " to " + _this.audioTimestampOffset);
- audioBuffer.timestampOffset = _this.audioTimestampOffset;
- delete _this.audioTimestampOffset;
- }
+ logger["logger"].log("flushBuffer,pos/start/end: " + currentTime + "/" + startOffset + "/" + endOffset); // safeguard to avoid infinite looping : don't try to flush more than the nb of appended segments
- if (_this._needsFlush) {
- _this.doFlush();
- }
+ if (this.flushBufferCounter >= this.appended) {
+ logger["logger"].warn('abort flushing too many retries');
+ return true;
+ }
- if (_this._needsEos) {
- _this.checkEos();
+ var sb = sourceBuffer[sbType]; // we are going to flush buffer, mark source buffer as 'not ended'
+
+ if (sb) {
+ sb.ended = false;
+
+ if (!sb.updating) {
+ if (this.removeBufferRange(sbType, sb, startOffset, endOffset)) {
+ this.flushBufferCounter++;
+ return false;
+ }
+ } else {
+ logger["logger"].warn('cannot flush, sb updating in progress');
+ return false;
}
+ }
- _this.appending = false;
- var parent = _this.parent; // count nb of pending segments waiting for appending on this sourcebuffer
+ logger["logger"].log('buffer flushed'); // everything flushed !
- var pending = _this.segments.reduce(function (counter, segment) {
- return segment.parent === parent ? counter + 1 : counter;
- }, 0); // this.sourceBuffer is better to use than media.buffered as it is closer to the PTS data from the fragments
+ return true;
+ }
+ /**
+ * Removes first buffered range from provided source buffer that lies within given start and end offsets.
+ *
+ * @param {string} type Type of the source buffer, logging purposes only.
+ * @param {SourceBuffer} sb Target SourceBuffer instance.
+ * @param {number} startOffset
+ * @param {number} endOffset
+ *
+ * @returns {boolean} True when source buffer remove requested.
+ */
+ ;
+ _proto.removeBufferRange = function removeBufferRange(type, sb, startOffset, endOffset) {
+ try {
+ for (var i = 0; i < sb.buffered.length; i++) {
+ var bufStart = sb.buffered.start(i);
+ var bufEnd = sb.buffered.end(i);
+ var removeStart = Math.max(bufStart, startOffset);
+ var removeEnd = Math.min(bufEnd, endOffset);
+ /* sometimes sourcebuffer.remove() does not flush
+ the exact expected time range.
+ to avoid rounding issues/infinite loop,
+ only flush buffer range of length greater than 500ms.
+ */
- var timeRanges = {};
- var sbSet = _this.sourceBuffer;
+ if (Math.min(removeEnd, bufEnd) - removeStart > 0.5) {
+ var currentTime = 'null';
- for (var streamType in sbSet) {
- var sb = sbSet[streamType];
+ if (this.media) {
+ currentTime = this.media.currentTime.toString();
+ }
- if (!sb) {
- throw Error("handling source buffer update end error: source buffer for " + streamType + " uninitilized and unable to update buffered TimeRanges.");
+ logger["logger"].log("sb remove " + type + " [" + removeStart + "," + removeEnd + "], of [" + bufStart + "," + bufEnd + "], pos:" + currentTime);
+ sb.remove(removeStart, removeEnd);
+ return true;
}
-
- timeRanges[streamType] = sb.buffered;
}
+ } catch (error) {
+ logger["logger"].warn('removeBufferRange failed', error);
+ }
- _this.hls.trigger(events["default"].BUFFER_APPENDED, {
- parent: parent,
- pending: pending,
- timeRanges: timeRanges
- }); // don't append in flushing mode
+ return false;
+ };
+
+ return BufferController;
+}(event_handler);
+/* harmony default export */ var buffer_controller = (buffer_controller_BufferController);
+// CONCATENATED MODULE: ./src/controller/cap-level-controller.js
+function cap_level_controller_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
- if (!_this._needsFlush) {
- _this.doAppending();
- }
+function cap_level_controller_createClass(Constructor, protoProps, staticProps) { if (protoProps) cap_level_controller_defineProperties(Constructor.prototype, protoProps); if (staticProps) cap_level_controller_defineProperties(Constructor, staticProps); return Constructor; }
- _this.updateMediaElementDuration(); // appending goes first
+function cap_level_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+/*
+ * cap stream level to media size dimension controller
+*/
- if (pending === 0) {
- _this.flushLiveBackBuffer();
- }
- };
- _this._onSBUpdateError = function (event) {
- logger["logger"].error('sourceBuffer error:', event); // according to http://www.w3.org/TR/media-source/#sourcebuffer-append-error
- // this error might not always be fatal (it is fatal if decode error is set, in that case
- // it will be followed by a mediaElement error ...)
- _this.hls.trigger(events["default"].ERROR, {
- type: errors["ErrorTypes"].MEDIA_ERROR,
- details: errors["ErrorDetails"].BUFFER_APPENDING_ERROR,
- fatal: false
- }); // we don't need to do more than that, as accordin to the spec, updateend will be fired just after
+var cap_level_controller_CapLevelController =
+/*#__PURE__*/
+function (_EventHandler) {
+ cap_level_controller_inheritsLoose(CapLevelController, _EventHandler);
- };
+ function CapLevelController(hls) {
+ var _this;
- _this.config = hls.config;
+ _this = _EventHandler.call(this, hls, events["default"].FPS_DROP_LEVEL_CAPPING, events["default"].MEDIA_ATTACHING, events["default"].MANIFEST_PARSED, events["default"].BUFFER_CODECS, events["default"].MEDIA_DETACHING) || this;
+ _this.autoLevelCapping = Number.POSITIVE_INFINITY;
+ _this.firstLevel = null;
+ _this.levels = [];
+ _this.media = null;
+ _this.restrictedLevels = [];
+ _this.timer = null;
return _this;
}
- var _proto = BufferController.prototype;
+ var _proto = CapLevelController.prototype;
_proto.destroy = function destroy() {
- event_handler.prototype.destroy.call(this);
+ if (this.hls.config.capLevelToPlayerSize) {
+ this.media = null;
+ this.stopCapping();
+ }
};
- _proto.onLevelPtsUpdated = function onLevelPtsUpdated(data) {
- var type = data.type;
- var audioTrack = this.tracks.audio; // Adjusting `SourceBuffer.timestampOffset` (desired point in the timeline where the next frames should be appended)
- // in Chrome browser when we detect MPEG audio container and time delta between level PTS and `SourceBuffer.timestampOffset`
- // is greater than 100ms (this is enough to handle seek for VOD or level change for LIVE videos). At the time of change we issue
- // `SourceBuffer.abort()` and adjusting `SourceBuffer.timestampOffset` if `SourceBuffer.updating` is false or awaiting `updateend`
- // event if SB is in updating state.
- // More info here: https://github.com/video-dev/hls.js/issues/332#issuecomment-257986486
-
- if (type === 'audio' && audioTrack && audioTrack.container === 'audio/mpeg') {
- // Chrome audio mp3 track
- var audioBuffer = this.sourceBuffer.audio;
+ _proto.onFpsDropLevelCapping = function onFpsDropLevelCapping(data) {
+ // Don't add a restricted level more than once
+ if (CapLevelController.isLevelAllowed(data.droppedLevel, this.restrictedLevels)) {
+ this.restrictedLevels.push(data.droppedLevel);
+ }
+ };
- if (!audioBuffer) {
- throw Error('Level PTS Updated and source buffer for audio uninitalized');
- }
+ _proto.onMediaAttaching = function onMediaAttaching(data) {
+ this.media = data.media instanceof window.HTMLVideoElement ? data.media : null;
+ };
- var delta = Math.abs(audioBuffer.timestampOffset - data.start); // adjust timestamp offset if time delta is greater than 100ms
+ _proto.onManifestParsed = function onManifestParsed(data) {
+ var hls = this.hls;
+ this.restrictedLevels = [];
+ this.levels = data.levels;
+ this.firstLevel = data.firstLevel;
- if (delta > 0.1) {
- var updating = audioBuffer.updating;
+ if (hls.config.capLevelToPlayerSize && data.video) {
+ // Start capping immediately if the manifest has signaled video codecs
+ this.startCapping();
+ }
+ } // Only activate capping when playing a video stream; otherwise, multi-bitrate audio-only streams will be restricted
+ // to the first level
+ ;
- try {
- audioBuffer.abort();
- } catch (err) {
- logger["logger"].warn('can not abort audio buffer: ' + err);
- }
+ _proto.onBufferCodecs = function onBufferCodecs(data) {
+ var hls = this.hls;
- if (!updating) {
- logger["logger"].warn('change mpeg audio timestamp offset from ' + audioBuffer.timestampOffset + ' to ' + data.start);
- audioBuffer.timestampOffset = data.start;
- } else {
- this.audioTimestampOffset = data.start;
- }
- }
+ if (hls.config.capLevelToPlayerSize && data.video) {
+ // If the manifest did not signal a video codec capping has been deferred until we're certain video is present
+ this.startCapping();
}
};
- _proto.onManifestParsed = function onManifestParsed(data) {
- // in case of alt audio 2 BUFFER_CODECS events will be triggered, one per stream controller
- // sourcebuffers will be created all at once when the expected nb of tracks will be reached
- // in case alt audio is not used, only one BUFFER_CODEC event will be fired from main stream controller
- // it will contain the expected nb of source buffers, no need to compute it
- this.bufferCodecEventsExpected = data.altAudio ? 2 : 1;
- logger["logger"].log(this.bufferCodecEventsExpected + " bufferCodec event(s) expected");
+ _proto.onLevelsUpdated = function onLevelsUpdated(data) {
+ this.levels = data.levels;
};
- _proto.onMediaAttaching = function onMediaAttaching(data) {
- var media = this.media = data.media;
+ _proto.onMediaDetaching = function onMediaDetaching() {
+ this.stopCapping();
+ };
- if (media && buffer_controller_MediaSource) {
- // setup the media source
- var ms = this.mediaSource = new buffer_controller_MediaSource(); // Media Source listeners
+ _proto.detectPlayerSize = function detectPlayerSize() {
+ if (this.media) {
+ var levelsLength = this.levels ? this.levels.length : 0;
- ms.addEventListener('sourceopen', this._onMediaSourceOpen);
- ms.addEventListener('sourceended', this._onMediaSourceEnded);
- ms.addEventListener('sourceclose', this._onMediaSourceClose); // link video and media Source
+ if (levelsLength) {
+ var hls = this.hls;
+ hls.autoLevelCapping = this.getMaxLevel(levelsLength - 1);
- media.src = window.URL.createObjectURL(ms); // cache the locally generated object url
+ if (hls.autoLevelCapping > this.autoLevelCapping) {
+ // if auto level capping has a higher value for the previous one, flush the buffer using nextLevelSwitch
+ // usually happen when the user go to the fullscreen mode.
+ hls.streamController.nextLevelSwitch();
+ }
- this._objectUrl = media.src;
+ this.autoLevelCapping = hls.autoLevelCapping;
+ }
}
- };
+ }
+ /*
+ * returns level should be the one with the dimensions equal or greater than the media (player) dimensions (so the video will be downscaled)
+ */
+ ;
- _proto.onMediaDetaching = function onMediaDetaching() {
- logger["logger"].log('media source detaching');
- var ms = this.mediaSource;
+ _proto.getMaxLevel = function getMaxLevel(capLevelIndex) {
+ var _this2 = this;
- if (ms) {
- if (ms.readyState === 'open') {
- try {
- // endOfStream could trigger exception if any sourcebuffer is in updating state
- // we don't really care about checking sourcebuffer state here,
- // as we are anyway detaching the MediaSource
- // let's just avoid this exception to propagate
- ms.endOfStream();
- } catch (err) {
- logger["logger"].warn("onMediaDetaching:" + err.message + " while calling endOfStream");
- }
- }
+ if (!this.levels) {
+ return -1;
+ }
- ms.removeEventListener('sourceopen', this._onMediaSourceOpen);
- ms.removeEventListener('sourceended', this._onMediaSourceEnded);
- ms.removeEventListener('sourceclose', this._onMediaSourceClose); // Detach properly the MediaSource from the HTMLMediaElement as
- // suggested in https://github.com/w3c/media-source/issues/53.
+ var validLevels = this.levels.filter(function (level, index) {
+ return CapLevelController.isLevelAllowed(index, _this2.restrictedLevels) && index <= capLevelIndex;
+ });
+ return CapLevelController.getMaxLevelByMediaSize(validLevels, this.mediaWidth, this.mediaHeight);
+ };
- if (this.media) {
- if (this._objectUrl) {
- window.URL.revokeObjectURL(this._objectUrl);
- } // clean up video tag src only if it's our own url. some external libraries might
- // hijack the video tag and change its 'src' without destroying the Hls instance first
+ _proto.startCapping = function startCapping() {
+ if (this.timer) {
+ // Don't reset capping if started twice; this can happen if the manifest signals a video codec
+ return;
+ }
+ this.autoLevelCapping = Number.POSITIVE_INFINITY;
+ this.hls.firstLevel = this.getMaxLevel(this.firstLevel);
+ clearInterval(this.timer);
+ this.timer = setInterval(this.detectPlayerSize.bind(this), 1000);
+ this.detectPlayerSize();
+ };
+
+ _proto.stopCapping = function stopCapping() {
+ this.restrictedLevels = [];
+ this.firstLevel = null;
+ this.autoLevelCapping = Number.POSITIVE_INFINITY;
- if (this.media.src === this._objectUrl) {
- this.media.removeAttribute('src');
- this.media.load();
- } else {
- logger["logger"].warn('media.src was changed by a third party - skip cleanup');
- }
- }
+ if (this.timer) {
+ this.timer = clearInterval(this.timer);
+ this.timer = null;
+ }
+ };
- this.mediaSource = null;
- this.media = null;
- this._objectUrl = null;
- this.pendingTracks = {};
- this.tracks = {};
- this.sourceBuffer = {};
- this.flushRange = [];
- this.segments = [];
- this.appended = 0;
+ CapLevelController.isLevelAllowed = function isLevelAllowed(level, restrictedLevels) {
+ if (restrictedLevels === void 0) {
+ restrictedLevels = [];
}
- this.hls.trigger(events["default"].MEDIA_DETACHED);
+ return restrictedLevels.indexOf(level) === -1;
};
- _proto.checkPendingTracks = function checkPendingTracks() {
- var bufferCodecEventsExpected = this.bufferCodecEventsExpected,
- pendingTracks = this.pendingTracks; // Check if we've received all of the expected bufferCodec events. When none remain, create all the sourceBuffers at once.
- // This is important because the MSE spec allows implementations to throw QuotaExceededErrors if creating new sourceBuffers after
- // data has been appended to existing ones.
- // 2 tracks is the max (one for audio, one for video). If we've reach this max go ahead and create the buffers.
+ CapLevelController.getMaxLevelByMediaSize = function getMaxLevelByMediaSize(levels, width, height) {
+ if (!levels || levels && !levels.length) {
+ return -1;
+ } // Levels can have the same dimensions but differing bandwidths - since levels are ordered, we can look to the next
+ // to determine whether we've chosen the greatest bandwidth for the media's dimensions
- var pendingTracksCount = Object.keys(pendingTracks).length;
- if (pendingTracksCount && !bufferCodecEventsExpected || pendingTracksCount === 2) {
- // ok, let's create them now !
- this.createSourceBuffers(pendingTracks);
- this.pendingTracks = {}; // append any pending segments now !
+ var atGreatestBandiwdth = function atGreatestBandiwdth(curLevel, nextLevel) {
+ if (!nextLevel) {
+ return true;
+ }
- this.doAppending();
- }
- };
+ return curLevel.width !== nextLevel.width || curLevel.height !== nextLevel.height;
+ }; // If we run through the loop without breaking, the media's dimensions are greater than every level, so default to
+ // the max level
- _proto.onBufferReset = function onBufferReset() {
- var sourceBuffer = this.sourceBuffer;
- for (var type in sourceBuffer) {
- var sb = sourceBuffer[type];
+ var maxLevelIndex = levels.length - 1;
- try {
- if (sb) {
- if (this.mediaSource) {
- this.mediaSource.removeSourceBuffer(sb);
- }
+ for (var i = 0; i < levels.length; i += 1) {
+ var level = levels[i];
- sb.removeEventListener('updateend', this._onSBUpdateEnd);
- sb.removeEventListener('error', this._onSBUpdateError);
- }
- } catch (err) {}
+ if ((level.width >= width || level.height >= height) && atGreatestBandiwdth(level, levels[i + 1])) {
+ maxLevelIndex = i;
+ break;
+ }
}
- this.sourceBuffer = {};
- this.flushRange = [];
- this.segments = [];
- this.appended = 0;
+ return maxLevelIndex;
};
- _proto.onBufferCodecs = function onBufferCodecs(tracks) {
- var _this2 = this;
+ cap_level_controller_createClass(CapLevelController, [{
+ key: "mediaWidth",
+ get: function get() {
+ var width;
+ var media = this.media;
- // if source buffer(s) not created yet, appended buffer tracks in this.pendingTracks
- // if sourcebuffers already created, do nothing ...
- if (Object.keys(this.sourceBuffer).length) {
- return;
+ if (media) {
+ width = media.width || media.clientWidth || media.offsetWidth;
+ width *= CapLevelController.contentScaleFactor;
+ }
+
+ return width;
}
+ }, {
+ key: "mediaHeight",
+ get: function get() {
+ var height;
+ var media = this.media;
- Object.keys(tracks).forEach(function (trackName) {
- _this2.pendingTracks[trackName] = tracks[trackName];
- });
- this.bufferCodecEventsExpected = Math.max(this.bufferCodecEventsExpected - 1, 0);
+ if (media) {
+ height = media.height || media.clientHeight || media.offsetHeight;
+ height *= CapLevelController.contentScaleFactor;
+ }
- if (this.mediaSource && this.mediaSource.readyState === 'open') {
- this.checkPendingTracks();
+ return height;
}
- };
+ }], [{
+ key: "contentScaleFactor",
+ get: function get() {
+ var pixelRatio = 1;
- _proto.createSourceBuffers = function createSourceBuffers(tracks) {
- var sourceBuffer = this.sourceBuffer,
- mediaSource = this.mediaSource;
+ try {
+ pixelRatio = window.devicePixelRatio;
+ } catch (e) {}
- if (!mediaSource) {
- throw Error('createSourceBuffers called when mediaSource was null');
+ return pixelRatio;
}
+ }]);
- for (var trackName in tracks) {
- if (!sourceBuffer[trackName]) {
- var track = tracks[trackName];
+ return CapLevelController;
+}(event_handler);
- if (!track) {
- throw Error("source buffer exists for track " + trackName + ", however track does not");
- } // use levelCodec as first priority
+/* harmony default export */ var cap_level_controller = (cap_level_controller_CapLevelController);
+// CONCATENATED MODULE: ./src/controller/fps-controller.js
+function fps_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+/*
+ * FPS Controller
+*/
- var codec = track.levelCodec || track.codec;
- var mimeType = track.container + ";codecs=" + codec;
- logger["logger"].log("creating sourceBuffer(" + mimeType + ")");
- try {
- var sb = sourceBuffer[trackName] = mediaSource.addSourceBuffer(mimeType);
- sb.addEventListener('updateend', this._onSBUpdateEnd);
- sb.addEventListener('error', this._onSBUpdateError);
- this.tracks[trackName] = {
- buffer: sb,
- codec: codec,
- id: track.id,
- container: track.container,
- levelCodec: track.levelCodec
- };
- } catch (err) {
- logger["logger"].error("error while trying to add sourceBuffer:" + err.message);
- this.hls.trigger(events["default"].ERROR, {
- type: errors["ErrorTypes"].MEDIA_ERROR,
- details: errors["ErrorDetails"].BUFFER_ADD_CODEC_ERROR,
- fatal: false,
- err: err,
- mimeType: mimeType
- });
- }
- }
- }
- this.hls.trigger(events["default"].BUFFER_CREATED, {
- tracks: this.tracks
- });
- };
+var fps_controller_window = window,
+ fps_controller_performance = fps_controller_window.performance;
- _proto.onBufferAppending = function onBufferAppending(data) {
- if (!this._needsFlush) {
- if (!this.segments) {
- this.segments = [data];
- } else {
- this.segments.push(data);
- }
+var fps_controller_FPSController =
+/*#__PURE__*/
+function (_EventHandler) {
+ fps_controller_inheritsLoose(FPSController, _EventHandler);
- this.doAppending();
- }
- } // on BUFFER_EOS mark matching sourcebuffer(s) as ended and trigger checkEos()
- // an undefined data.type will mark all buffers as EOS.
- ;
+ function FPSController(hls) {
+ return _EventHandler.call(this, hls, events["default"].MEDIA_ATTACHING) || this;
+ }
- _proto.onBufferEos = function onBufferEos(data) {
- for (var type in this.sourceBuffer) {
- if (!data.type || data.type === type) {
- var sb = this.sourceBuffer[type];
+ var _proto = FPSController.prototype;
- if (sb && !sb.ended) {
- sb.ended = true;
- logger["logger"].log(type + " sourceBuffer now EOS");
- }
- }
+ _proto.destroy = function destroy() {
+ if (this.timer) {
+ clearInterval(this.timer);
}
- this.checkEos();
- } // if all source buffers are marked as ended, signal endOfStream() to MediaSource.
- ;
+ this.isVideoPlaybackQualityAvailable = false;
+ };
- _proto.checkEos = function checkEos() {
- var sourceBuffer = this.sourceBuffer,
- mediaSource = this.mediaSource;
+ _proto.onMediaAttaching = function onMediaAttaching(data) {
+ var config = this.hls.config;
- if (!mediaSource || mediaSource.readyState !== 'open') {
- this._needsEos = false;
- return;
+ if (config.capLevelOnFPSDrop) {
+ var video = this.video = data.media instanceof window.HTMLVideoElement ? data.media : null;
+
+ if (typeof video.getVideoPlaybackQuality === 'function') {
+ this.isVideoPlaybackQualityAvailable = true;
+ }
+
+ clearInterval(this.timer);
+ this.timer = setInterval(this.checkFPSInterval.bind(this), config.fpsDroppedMonitoringPeriod);
}
+ };
- for (var type in sourceBuffer) {
- var sb = sourceBuffer[type];
- if (!sb) continue;
+ _proto.checkFPS = function checkFPS(video, decodedFrames, droppedFrames) {
+ var currentTime = fps_controller_performance.now();
- if (!sb.ended) {
- return;
+ if (decodedFrames) {
+ if (this.lastTime) {
+ var currentPeriod = currentTime - this.lastTime,
+ currentDropped = droppedFrames - this.lastDroppedFrames,
+ currentDecoded = decodedFrames - this.lastDecodedFrames,
+ droppedFPS = 1000 * currentDropped / currentPeriod,
+ hls = this.hls;
+ hls.trigger(events["default"].FPS_DROP, {
+ currentDropped: currentDropped,
+ currentDecoded: currentDecoded,
+ totalDroppedFrames: droppedFrames
+ });
+
+ if (droppedFPS > 0) {
+ // logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
+ if (currentDropped > hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
+ var currentLevel = hls.currentLevel;
+ logger["logger"].warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
+
+ if (currentLevel > 0 && (hls.autoLevelCapping === -1 || hls.autoLevelCapping >= currentLevel)) {
+ currentLevel = currentLevel - 1;
+ hls.trigger(events["default"].FPS_DROP_LEVEL_CAPPING, {
+ level: currentLevel,
+ droppedLevel: hls.currentLevel
+ });
+ hls.autoLevelCapping = currentLevel;
+ hls.streamController.nextLevelSwitch();
+ }
+ }
+ }
}
- if (sb.updating) {
- this._needsEos = true;
- return;
- }
+ this.lastTime = currentTime;
+ this.lastDroppedFrames = droppedFrames;
+ this.lastDecodedFrames = decodedFrames;
}
+ };
- logger["logger"].log('all media data are available, signal endOfStream() to MediaSource and stop loading fragment'); // Notify the media element that it now has all of the media data
+ _proto.checkFPSInterval = function checkFPSInterval() {
+ var video = this.video;
- try {
- mediaSource.endOfStream();
- } catch (e) {
- logger["logger"].warn('exception while calling mediaSource.endOfStream()');
+ if (video) {
+ if (this.isVideoPlaybackQualityAvailable) {
+ var videoPlaybackQuality = video.getVideoPlaybackQuality();
+ this.checkFPS(video, videoPlaybackQuality.totalVideoFrames, videoPlaybackQuality.droppedVideoFrames);
+ } else {
+ this.checkFPS(video, video.webkitDecodedFrameCount, video.webkitDroppedFrameCount);
+ }
}
-
- this._needsEos = false;
};
- _proto.onBufferFlushing = function onBufferFlushing(data) {
- if (data.type) {
- this.flushRange.push({
- start: data.startOffset,
- end: data.endOffset,
- type: data.type
- });
- } else {
- this.flushRange.push({
- start: data.startOffset,
- end: data.endOffset,
- type: 'video'
- });
- this.flushRange.push({
- start: data.startOffset,
- end: data.endOffset,
- type: 'audio'
- });
- } // attempt flush immediately
+ return FPSController;
+}(event_handler);
+/* harmony default export */ var fps_controller = (fps_controller_FPSController);
+// CONCATENATED MODULE: ./src/utils/xhr-loader.js
+/**
+ * XHR based logger
+*/
- this.flushBufferCounter = 0;
- this.doFlush();
- };
+var xhr_loader_window = window,
+ xhr_loader_performance = xhr_loader_window.performance,
+ xhr_loader_XMLHttpRequest = xhr_loader_window.XMLHttpRequest;
- _proto.flushLiveBackBuffer = function flushLiveBackBuffer() {
- // clear back buffer for live only
- if (!this._live) {
- return;
+var xhr_loader_XhrLoader =
+/*#__PURE__*/
+function () {
+ function XhrLoader(config) {
+ if (config && config.xhrSetup) {
+ this.xhrSetup = config.xhrSetup;
}
+ }
- var liveBackBufferLength = this.config.liveBackBufferLength;
+ var _proto = XhrLoader.prototype;
- if (!isFinite(liveBackBufferLength) || liveBackBufferLength < 0) {
- return;
- }
+ _proto.destroy = function destroy() {
+ this.abort();
+ this.loader = null;
+ };
- if (!this.media) {
- logger["logger"].error('flushLiveBackBuffer called without attaching media');
- return;
+ _proto.abort = function abort() {
+ var loader = this.loader;
+
+ if (loader && loader.readyState !== 4) {
+ this.stats.aborted = true;
+ loader.abort();
}
- var currentTime = this.media.currentTime;
- var sourceBuffer = this.sourceBuffer;
- var bufferTypes = Object.keys(sourceBuffer);
- var targetBackBufferPosition = currentTime - Math.max(liveBackBufferLength, this._levelTargetDuration);
+ window.clearTimeout(this.requestTimeout);
+ this.requestTimeout = null;
+ window.clearTimeout(this.retryTimeout);
+ this.retryTimeout = null;
+ };
- for (var index = bufferTypes.length - 1; index >= 0; index--) {
- var bufferType = bufferTypes[index];
- var sb = sourceBuffer[bufferType];
+ _proto.load = function load(context, config, callbacks) {
+ this.context = context;
+ this.config = config;
+ this.callbacks = callbacks;
+ this.stats = {
+ trequest: xhr_loader_performance.now(),
+ retry: 0
+ };
+ this.retryDelay = config.retryDelay;
+ this.loadInternal();
+ };
- if (sb) {
- var buffered = sb.buffered; // when target buffer start exceeds actual buffer start
+ _proto.loadInternal = function loadInternal() {
+ var xhr,
+ context = this.context;
+ xhr = this.loader = new xhr_loader_XMLHttpRequest();
+ var stats = this.stats;
+ stats.tfirst = 0;
+ stats.loaded = 0;
+ var xhrSetup = this.xhrSetup;
- if (buffered.length > 0 && targetBackBufferPosition > buffered.start(0)) {
- // remove buffer up until current time minus minimum back buffer length (removing buffer too close to current
- // time will lead to playback freezing)
- // credits for level target duration - https://github.com/videojs/http-streaming/blob/3132933b6aa99ddefab29c10447624efd6fd6e52/src/segment-loader.js#L91
- this.removeBufferRange(bufferType, sb, 0, targetBackBufferPosition);
- this.hls.trigger(events["default"].LIVE_BACK_BUFFER_REACHED, {
- bufferEnd: targetBackBufferPosition
- });
+ try {
+ if (xhrSetup) {
+ try {
+ xhrSetup(xhr, context.url);
+ } catch (e) {
+ // fix xhrSetup: (xhr, url) => {xhr.setRequestHeader("Content-Language", "test");}
+ // not working, as xhr.setRequestHeader expects xhr.readyState === OPEN
+ xhr.open('GET', context.url, true);
+ xhrSetup(xhr, context.url);
}
}
- }
- };
- _proto.onLevelUpdated = function onLevelUpdated(_ref) {
- var details = _ref.details;
+ if (!xhr.readyState) {
+ xhr.open('GET', context.url, true);
+ }
+ } catch (e) {
+ // IE11 throws an exception on xhr.open if attempting to access an HTTP resource over HTTPS
+ this.callbacks.onError({
+ code: xhr.status,
+ text: e.message
+ }, context, xhr);
+ return;
+ }
- if (details.fragments.length > 0) {
- this._levelDuration = details.totalduration + details.fragments[0].start;
- this._levelTargetDuration = details.averagetargetduration || details.targetduration || 10;
- this._live = details.live;
- this.updateMediaElementDuration();
+ if (context.rangeEnd) {
+ xhr.setRequestHeader('Range', 'bytes=' + context.rangeStart + '-' + (context.rangeEnd - 1));
}
- }
- /**
- * Update Media Source duration to current level duration or override to Infinity if configuration parameter
- * 'liveDurationInfinity` is set to `true`
- * More details: https://github.com/video-dev/hls.js/issues/355
- */
- ;
- _proto.updateMediaElementDuration = function updateMediaElementDuration() {
- var config = this.config;
- var duration;
+ xhr.onreadystatechange = this.readystatechange.bind(this);
+ xhr.onprogress = this.loadprogress.bind(this);
+ xhr.responseType = context.responseType; // setup timeout before we perform request
- if (this._levelDuration === null || !this.media || !this.mediaSource || !this.sourceBuffer || this.media.readyState === 0 || this.mediaSource.readyState !== 'open') {
- return;
- }
+ this.requestTimeout = window.setTimeout(this.loadtimeout.bind(this), this.config.timeout);
+ xhr.send();
+ };
- for (var type in this.sourceBuffer) {
- var sb = this.sourceBuffer[type];
+ _proto.readystatechange = function readystatechange(event) {
+ var xhr = event.currentTarget,
+ readyState = xhr.readyState,
+ stats = this.stats,
+ context = this.context,
+ config = this.config; // don't proceed if xhr has been aborted
- if (sb && sb.updating === true) {
- // can't set duration whilst a buffer is updating
- return;
- }
- }
+ if (stats.aborted) {
+ return;
+ } // >= HEADERS_RECEIVED
- duration = this.media.duration; // initialise to the value that the media source is reporting
- if (this._msDuration === null) {
- this._msDuration = this.mediaSource.duration;
- }
+ if (readyState >= 2) {
+ // clear xhr timeout and rearm it if readyState less than 4
+ window.clearTimeout(this.requestTimeout);
- if (this._live === true && config.liveDurationInfinity === true) {
- // Override duration to Infinity
- logger["logger"].log('Media Source duration is set to Infinity');
- this._msDuration = this.mediaSource.duration = Infinity;
- } else if (this._levelDuration > this._msDuration && this._levelDuration > duration || !Object(number_isFinite["isFiniteNumber"])(duration)) {
- // levelDuration was the last value we set.
- // not using mediaSource.duration as the browser may tweak this value
- // only update Media Source duration if its value increase, this is to avoid
- // flushing already buffered portion when switching between quality level
- logger["logger"].log("Updating Media Source duration to " + this._levelDuration.toFixed(3));
- this._msDuration = this.mediaSource.duration = this._levelDuration;
- }
- };
+ if (stats.tfirst === 0) {
+ stats.tfirst = Math.max(xhr_loader_performance.now(), stats.trequest);
+ }
- _proto.doFlush = function doFlush() {
- // loop through all buffer ranges to flush
- while (this.flushRange.length) {
- var range = this.flushRange[0]; // flushBuffer will abort any buffer append in progress and flush Audio/Video Buffer
+ if (readyState === 4) {
+ var status = xhr.status; // http status between 200 to 299 are all successful
- if (this.flushBuffer(range.start, range.end, range.type)) {
- // range flushed, remove from flush array
- this.flushRange.shift();
- this.flushBufferCounter = 0;
- } else {
- this._needsFlush = true; // avoid looping, wait for SB update end to retrigger a flush
+ if (status >= 200 && status < 300) {
+ stats.tload = Math.max(stats.tfirst, xhr_loader_performance.now());
+ var data, len;
- return;
- }
- }
+ if (context.responseType === 'arraybuffer') {
+ data = xhr.response;
+ len = data.byteLength;
+ } else {
+ data = xhr.responseText;
+ len = data.length;
+ }
- if (this.flushRange.length === 0) {
- // everything flushed
- this._needsFlush = false; // let's recompute this.appended, which is used to avoid flush looping
+ stats.loaded = stats.total = len;
+ var response = {
+ url: xhr.responseURL,
+ data: data
+ };
+ this.callbacks.onSuccess(response, stats, context, xhr);
+ } else {
+ // if max nb of retries reached or if http status between 400 and 499 (such error cannot be recovered, retrying is useless), return error
+ if (stats.retry >= config.maxRetry) {
+ logger["logger"].error(status + " while loading " + context.url);
+ this.callbacks.onError({
+ code: status,
+ text: xhr.statusText
+ }, context, xhr);
+ } else {
+ // retry
+ logger["logger"].warn(status + " while loading " + context.url + ", retrying in " + this.retryDelay + "..."); // aborts and resets internal state
- var appended = 0;
- var sourceBuffer = this.sourceBuffer;
+ this.destroy(); // schedule retry
- try {
- for (var type in sourceBuffer) {
- var sb = sourceBuffer[type];
+ this.retryTimeout = window.setTimeout(this.loadInternal.bind(this), this.retryDelay); // set exponential backoff
- if (sb) {
- appended += sb.buffered.length;
+ this.retryDelay = Math.min(2 * this.retryDelay, config.maxRetryDelay);
+ stats.retry++;
}
}
- } catch (error) {
- // error could be thrown while accessing buffered, in case sourcebuffer has already been removed from MediaSource
- // this is harmess at this stage, catch this to avoid reporting an internal exception
- logger["logger"].error('error while accessing sourceBuffer.buffered');
+ } else {
+ // readyState >= 2 AND readyState !==4 (readyState = HEADERS_RECEIVED || LOADING) rearm timeout as xhr not finished yet
+ this.requestTimeout = window.setTimeout(this.loadtimeout.bind(this), config.timeout);
}
-
- this.appended = appended;
- this.hls.trigger(events["default"].BUFFER_FLUSHED);
}
};
- _proto.doAppending = function doAppending() {
- var config = this.config,
- hls = this.hls,
- segments = this.segments,
- sourceBuffer = this.sourceBuffer;
+ _proto.loadtimeout = function loadtimeout() {
+ logger["logger"].warn("timeout while loading " + this.context.url);
+ this.callbacks.onTimeout(this.stats, this.context, null);
+ };
- if (!Object.keys(sourceBuffer).length) {
- // early exit if no source buffers have been initialized yet
- return;
- }
+ _proto.loadprogress = function loadprogress(event) {
+ var xhr = event.currentTarget,
+ stats = this.stats;
+ stats.loaded = event.loaded;
- if (!this.media || this.media.error) {
- this.segments = [];
- logger["logger"].error('trying to append although a media error occured, flush segment and abort');
- return;
+ if (event.lengthComputable) {
+ stats.total = event.total;
}
- if (this.appending) {
- // logger.log(`sb appending in progress`);
- return;
+ var onProgress = this.callbacks.onProgress;
+
+ if (onProgress) {
+ // third arg is to provide on progress data
+ onProgress(stats, this.context, null, xhr);
}
+ };
- var segment = segments.shift();
+ return XhrLoader;
+}();
- if (!segment) {
- // handle undefined shift
- return;
- }
+/* harmony default export */ var xhr_loader = (xhr_loader_XhrLoader);
+// CONCATENATED MODULE: ./src/task-loop.ts
+function task_loop_assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
- try {
- var sb = sourceBuffer[segment.type];
+function task_loop_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
- if (!sb) {
- // in case we don't have any source buffer matching with this segment type,
- // it means that Mediasource fails to create sourcebuffer
- // discard this segment, and trigger update end
- this._onSBUpdateEnd();
- return;
- }
- if (sb.updating) {
- // if we are still updating the source buffer from the last segment, place this back at the front of the queue
- segments.unshift(segment);
- return;
- } // reset sourceBuffer ended flag before appending segment
+/**
+ * Sub-class specialization of EventHandler base class.
+ *
+ * TaskLoop allows to schedule a task function being called (optionnaly repeatedly) on the main loop,
+ * scheduled asynchroneously, avoiding recursive calls in the same tick.
+ *
+ * The task itself is implemented in `doTick`. It can be requested and called for single execution
+ * using the `tick` method.
+ *
+ * It will be assured that the task execution method (`tick`) only gets called once per main loop "tick",
+ * no matter how often it gets requested for execution. Execution in further ticks will be scheduled accordingly.
+ *
+ * If further execution requests have already been scheduled on the next tick, it can be checked with `hasNextTick`,
+ * and cancelled with `clearNextTick`.
+ *
+ * The task can be scheduled as an interval repeatedly with a period as parameter (see `setInterval`, `clearInterval`).
+ *
+ * Sub-classes need to implement the `doTick` method which will effectively have the task execution routine.
+ *
+ * Further explanations:
+ *
+ * The baseclass has a `tick` method that will schedule the doTick call. It may be called synchroneously
+ * only for a stack-depth of one. On re-entrant calls, sub-sequent calls are scheduled for next main loop ticks.
+ *
+ * When the task execution (`tick` method) is called in re-entrant way this is detected and
+ * we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further
+ * task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo).
+ */
+var TaskLoop =
+/*#__PURE__*/
+function (_EventHandler) {
+ task_loop_inheritsLoose(TaskLoop, _EventHandler);
+ function TaskLoop(hls) {
+ var _this;
- sb.ended = false; // logger.log(`appending ${segment.content} ${type} SB, size:${segment.data.length}, ${segment.parent}`);
+ for (var _len = arguments.length, events = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
+ events[_key - 1] = arguments[_key];
+ }
- this.parent = segment.parent;
- sb.appendBuffer(segment.data);
- this.appendError = 0;
- this.appended++;
- this.appending = true;
- } catch (err) {
- // in case any error occured while appending, put back segment in segments table
- logger["logger"].error("error while trying to append buffer:" + err.message);
- segments.unshift(segment);
- var event = {
- type: errors["ErrorTypes"].MEDIA_ERROR,
- parent: segment.parent,
- details: '',
- fatal: false
- };
+ _this = _EventHandler.call.apply(_EventHandler, [this, hls].concat(events)) || this;
+ _this._boundTick = void 0;
+ _this._tickTimer = null;
+ _this._tickInterval = null;
+ _this._tickCallCount = 0;
+ _this._boundTick = _this.tick.bind(task_loop_assertThisInitialized(_this));
+ return _this;
+ }
+ /**
+ * @override
+ */
- if (err.code === 22) {
- // QuotaExceededError: http://www.w3.org/TR/html5/infrastructure.html#quotaexceedederror
- // let's stop appending any segments, and report BUFFER_FULL_ERROR error
- this.segments = [];
- event.details = errors["ErrorDetails"].BUFFER_FULL_ERROR;
- } else {
- this.appendError++;
- event.details = errors["ErrorDetails"].BUFFER_APPEND_ERROR;
- /* with UHD content, we could get loop of quota exceeded error until
- browser is able to evict some data from sourcebuffer. retrying help recovering this
- */
- if (this.appendError > config.appendErrorMaxRetry) {
- logger["logger"].log("fail " + config.appendErrorMaxRetry + " times to append segment in sourceBuffer");
- this.segments = [];
- event.fatal = true;
- }
- }
+ var _proto = TaskLoop.prototype;
- hls.trigger(events["default"].ERROR, event);
- }
+ _proto.onHandlerDestroying = function onHandlerDestroying() {
+ // clear all timers before unregistering from event bus
+ this.clearNextTick();
+ this.clearInterval();
}
- /*
- flush specified buffered range,
- return true once range has been flushed.
- as sourceBuffer.remove() is asynchronous, flushBuffer will be retriggered on sourceBuffer update end
- */
+ /**
+ * @returns {boolean}
+ */
;
- _proto.flushBuffer = function flushBuffer(startOffset, endOffset, sbType) {
- var sourceBuffer = this.sourceBuffer; // exit if no sourceBuffers are initialized
+ _proto.hasInterval = function hasInterval() {
+ return !!this._tickInterval;
+ }
+ /**
+ * @returns {boolean}
+ */
+ ;
- if (!Object.keys(sourceBuffer).length) {
+ _proto.hasNextTick = function hasNextTick() {
+ return !!this._tickTimer;
+ }
+ /**
+ * @param {number} millis Interval time (ms)
+ * @returns {boolean} True when interval has been scheduled, false when already scheduled (no effect)
+ */
+ ;
+
+ _proto.setInterval = function setInterval(millis) {
+ if (!this._tickInterval) {
+ this._tickInterval = self.setInterval(this._boundTick, millis);
return true;
}
- var currentTime = 'null';
+ return false;
+ }
+ /**
+ * @returns {boolean} True when interval was cleared, false when none was set (no effect)
+ */
+ ;
- if (this.media) {
- currentTime = this.media.currentTime.toFixed(3);
+ _proto.clearInterval = function clearInterval() {
+ if (this._tickInterval) {
+ self.clearInterval(this._tickInterval);
+ this._tickInterval = null;
+ return true;
}
- logger["logger"].log("flushBuffer,pos/start/end: " + currentTime + "/" + startOffset + "/" + endOffset); // safeguard to avoid infinite looping : don't try to flush more than the nb of appended segments
+ return false;
+ }
+ /**
+ * @returns {boolean} True when timeout was cleared, false when none was set (no effect)
+ */
+ ;
- if (this.flushBufferCounter >= this.appended) {
- logger["logger"].warn('abort flushing too many retries');
+ _proto.clearNextTick = function clearNextTick() {
+ if (this._tickTimer) {
+ self.clearTimeout(this._tickTimer);
+ this._tickTimer = null;
return true;
}
- var sb = sourceBuffer[sbType]; // we are going to flush buffer, mark source buffer as 'not ended'
+ return false;
+ }
+ /**
+ * Will call the subclass doTick implementation in this main loop tick
+ * or in the next one (via setTimeout(,0)) in case it has already been called
+ * in this tick (in case this is a re-entrant call).
+ */
+ ;
- if (sb) {
- sb.ended = false;
+ _proto.tick = function tick() {
+ this._tickCallCount++;
- if (!sb.updating) {
- if (this.removeBufferRange(sbType, sb, startOffset, endOffset)) {
- this.flushBufferCounter++;
- return false;
- }
- } else {
- logger["logger"].warn('cannot flush, sb updating in progress');
- return false;
- }
- }
+ if (this._tickCallCount === 1) {
+ this.doTick(); // re-entrant call to tick from previous doTick call stack
+ // -> schedule a call on the next main loop iteration to process this task processing request
- logger["logger"].log('buffer flushed'); // everything flushed !
+ if (this._tickCallCount > 1) {
+ // make sure only one timer exists at any time at max
+ this.clearNextTick();
+ this._tickTimer = self.setTimeout(this._boundTick, 0);
+ }
- return true;
+ this._tickCallCount = 0;
+ }
}
/**
- * Removes first buffered range from provided source buffer that lies within given start and end offsets.
- *
- * @param {string} type Type of the source buffer, logging purposes only.
- * @param {SourceBuffer} sb Target SourceBuffer instance.
- * @param {number} startOffset
- * @param {number} endOffset
- *
- * @returns {boolean} True when source buffer remove requested.
+ * For subclass to implement task logic
+ * @abstract
*/
;
- _proto.removeBufferRange = function removeBufferRange(type, sb, startOffset, endOffset) {
- try {
- for (var i = 0; i < sb.buffered.length; i++) {
- var bufStart = sb.buffered.start(i);
- var bufEnd = sb.buffered.end(i);
- var removeStart = Math.max(bufStart, startOffset);
- var removeEnd = Math.min(bufEnd, endOffset);
- /* sometimes sourcebuffer.remove() does not flush
- the exact expected time range.
- to avoid rounding issues/infinite loop,
- only flush buffer range of length greater than 500ms.
- */
-
- if (Math.min(removeEnd, bufEnd) - removeStart > 0.5) {
- var currentTime = 'null';
+ _proto.doTick = function doTick() {};
- if (this.media) {
- currentTime = this.media.currentTime.toString();
- }
+ return TaskLoop;
+}(event_handler);
- logger["logger"].log("sb remove " + type + " [" + removeStart + "," + removeEnd + "], of [" + bufStart + "," + bufEnd + "], pos:" + currentTime);
- sb.remove(removeStart, removeEnd);
- return true;
- }
- }
- } catch (error) {
- logger["logger"].warn('removeBufferRange failed', error);
- }
- return false;
- };
+// CONCATENATED MODULE: ./src/controller/audio-track-controller.js
+function audio_track_controller_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
- return BufferController;
-}(event_handler);
+function audio_track_controller_createClass(Constructor, protoProps, staticProps) { if (protoProps) audio_track_controller_defineProperties(Constructor.prototype, protoProps); if (staticProps) audio_track_controller_defineProperties(Constructor, staticProps); return Constructor; }
-/* harmony default export */ var buffer_controller = (buffer_controller_BufferController);
-// CONCATENATED MODULE: ./src/controller/cap-level-controller.js
-function cap_level_controller_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+function audio_track_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
-function cap_level_controller_createClass(Constructor, protoProps, staticProps) { if (protoProps) cap_level_controller_defineProperties(Constructor.prototype, protoProps); if (staticProps) cap_level_controller_defineProperties(Constructor, staticProps); return Constructor; }
-function cap_level_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
-/*
- * cap stream level to media size dimension controller
-*/
+/**
+ * @class AudioTrackController
+ * @implements {EventHandler}
+ *
+ * Handles main manifest and audio-track metadata loaded,
+ * owns and exposes the selectable audio-tracks data-models.
+ *
+ * Exposes internal interface to select available audio-tracks.
+ *
+ * Handles errors on loading audio-track playlists. Manages fallback mechanism
+ * with redundants tracks (group-IDs).
+ *
+ * Handles level-loading and group-ID switches for video (fallback on video levels),
+ * and eventually adapts the audio-track group-ID to match.
+ *
+ * @fires AUDIO_TRACK_LOADING
+ * @fires AUDIO_TRACK_SWITCHING
+ * @fires AUDIO_TRACKS_UPDATED
+ * @fires ERROR
+ *
+ */
-var cap_level_controller_CapLevelController =
+var audio_track_controller_AudioTrackController =
/*#__PURE__*/
-function (_EventHandler) {
- cap_level_controller_inheritsLoose(CapLevelController, _EventHandler);
+function (_TaskLoop) {
+ audio_track_controller_inheritsLoose(AudioTrackController, _TaskLoop);
- function CapLevelController(hls) {
+ function AudioTrackController(hls) {
var _this;
- _this = _EventHandler.call(this, hls, events["default"].FPS_DROP_LEVEL_CAPPING, events["default"].MEDIA_ATTACHING, events["default"].MANIFEST_PARSED, events["default"].BUFFER_CODECS, events["default"].MEDIA_DETACHING) || this;
- _this.autoLevelCapping = Number.POSITIVE_INFINITY;
- _this.firstLevel = null;
- _this.levels = [];
- _this.media = null;
- _this.restrictedLevels = [];
- _this.timer = null;
+ _this = _TaskLoop.call(this, hls, events["default"].MANIFEST_LOADING, events["default"].MANIFEST_PARSED, events["default"].AUDIO_TRACK_LOADED, events["default"].AUDIO_TRACK_SWITCHED, events["default"].LEVEL_LOADED, events["default"].ERROR) || this;
+ /**
+ * @private
+ * Currently selected index in `tracks`
+ * @member {number} trackId
+ */
+
+ _this._trackId = -1;
+ /**
+ * @private
+ * If should select tracks according to default track attribute
+ * @member {boolean} _selectDefaultTrack
+ */
+
+ _this._selectDefaultTrack = true;
+ /**
+ * @public
+ * All tracks available
+ * @member {AudioTrack[]}
+ */
+
+ _this.tracks = [];
+ /**
+ * @public
+ * List of blacklisted audio track IDs (that have caused failure)
+ * @member {number[]}
+ */
+
+ _this.trackIdBlacklist = Object.create(null);
+ /**
+ * @public
+ * The currently running group ID for audio
+ * (we grab this on manifest-parsed and new level-loaded)
+ * @member {string}
+ */
+
+ _this.audioGroupId = null;
return _this;
}
+ /**
+ * Reset audio tracks on new manifest loading.
+ */
- var _proto = CapLevelController.prototype;
- _proto.destroy = function destroy() {
- if (this.hls.config.capLevelToPlayerSize) {
- this.media = null;
- this.stopCapping();
- }
- };
+ var _proto = AudioTrackController.prototype;
- _proto.onFpsDropLevelCapping = function onFpsDropLevelCapping(data) {
- // Don't add a restricted level more than once
- if (CapLevelController.isLevelAllowed(data.droppedLevel, this.restrictedLevels)) {
- this.restrictedLevels.push(data.droppedLevel);
+ _proto.onManifestLoading = function onManifestLoading() {
+ this.tracks = [];
+ this._trackId = -1;
+ this._selectDefaultTrack = true;
+ }
+ /**
+ * Store tracks data from manifest parsed data.
+ *
+ * Trigger AUDIO_TRACKS_UPDATED event.
+ *
+ * @param {*} data
+ */
+ ;
+
+ _proto.onManifestParsed = function onManifestParsed(data) {
+ var tracks = this.tracks = data.audioTracks || [];
+ this.hls.trigger(events["default"].AUDIO_TRACKS_UPDATED, {
+ audioTracks: tracks
+ });
+
+ this._selectAudioGroup(this.hls.nextLoadLevel);
+ }
+ /**
+ * Store track details of loaded track in our data-model.
+ *
+ * Set-up metadata update interval task for live-mode streams.
+ *
+ * @param {*} data
+ */
+ ;
+
+ _proto.onAudioTrackLoaded = function onAudioTrackLoaded(data) {
+ if (data.id >= this.tracks.length) {
+ logger["logger"].warn('Invalid audio track id:', data.id);
+ return;
}
- };
- _proto.onMediaAttaching = function onMediaAttaching(data) {
- this.media = data.media instanceof window.HTMLVideoElement ? data.media : null;
- };
+ logger["logger"].log("audioTrack " + data.id + " loaded");
+ this.tracks[data.id].details = data.details; // check if current playlist is a live playlist
+ // and if we have already our reload interval setup
- _proto.onManifestParsed = function onManifestParsed(data) {
- var hls = this.hls;
- this.restrictedLevels = [];
- this.levels = data.levels;
- this.firstLevel = data.firstLevel;
+ if (data.details.live && !this.hasInterval()) {
+ // if live playlist we will have to reload it periodically
+ // set reload period to playlist target duration
+ var updatePeriodMs = data.details.targetduration * 1000;
+ this.setInterval(updatePeriodMs);
+ }
- if (hls.config.capLevelToPlayerSize && data.video) {
- // Start capping immediately if the manifest has signaled video codecs
- this.startCapping();
+ if (!data.details.live && this.hasInterval()) {
+ // playlist is not live and timer is scheduled: cancel it
+ this.clearInterval();
}
- } // Only activate capping when playing a video stream; otherwise, multi-bitrate audio-only streams will be restricted
- // to the first level
+ }
+ /**
+ * Update the internal group ID to any audio-track we may have set manually
+ * or because of a failure-handling fallback.
+ *
+ * Quality-levels should update to that group ID in this case.
+ *
+ * @param {*} data
+ */
;
- _proto.onBufferCodecs = function onBufferCodecs(data) {
- var hls = this.hls;
+ _proto.onAudioTrackSwitched = function onAudioTrackSwitched(data) {
+ var audioGroupId = this.tracks[data.id].groupId;
- if (hls.config.capLevelToPlayerSize && data.video) {
- // If the manifest did not signal a video codec capping has been deferred until we're certain video is present
- this.startCapping();
+ if (audioGroupId && this.audioGroupId !== audioGroupId) {
+ this.audioGroupId = audioGroupId;
}
- };
+ }
+ /**
+ * When a level gets loaded, if it has redundant audioGroupIds (in the same ordinality as it's redundant URLs)
+ * we are setting our audio-group ID internally to the one set, if it is different from the group ID currently set.
+ *
+ * If group-ID got update, we re-select the appropriate audio-track with this group-ID matching the currently
+ * selected one (based on NAME property).
+ *
+ * @param {*} data
+ */
+ ;
- _proto.onLevelsUpdated = function onLevelsUpdated(data) {
- this.levels = data.levels;
- };
+ _proto.onLevelLoaded = function onLevelLoaded(data) {
+ this._selectAudioGroup(data.level);
+ }
+ /**
+ * Handle network errors loading audio track manifests
+ * and also pausing on any netwok errors.
+ *
+ * @param {ErrorEventData} data
+ */
+ ;
- _proto.onMediaDetaching = function onMediaDetaching() {
- this.stopCapping();
- };
+ _proto.onError = function onError(data) {
+ // Only handle network errors
+ if (data.type !== errors["ErrorTypes"].NETWORK_ERROR) {
+ return;
+ } // If fatal network error, cancel update task
- _proto.detectPlayerSize = function detectPlayerSize() {
- if (this.media) {
- var levelsLength = this.levels ? this.levels.length : 0;
- if (levelsLength) {
- var hls = this.hls;
- hls.autoLevelCapping = this.getMaxLevel(levelsLength - 1);
+ if (data.fatal) {
+ this.clearInterval();
+ } // If not an audio-track loading error don't handle further
- if (hls.autoLevelCapping > this.autoLevelCapping) {
- // if auto level capping has a higher value for the previous one, flush the buffer using nextLevelSwitch
- // usually happen when the user go to the fullscreen mode.
- hls.streamController.nextLevelSwitch();
- }
- this.autoLevelCapping = hls.autoLevelCapping;
- }
+ if (data.details !== errors["ErrorDetails"].AUDIO_TRACK_LOAD_ERROR) {
+ return;
}
+
+ logger["logger"].warn('Network failure on audio-track id:', data.context.id);
+
+ this._handleLoadError();
}
- /*
- * returns level should be the one with the dimensions equal or greater than the media (player) dimensions (so the video will be downscaled)
- */
+ /**
+ * @type {AudioTrack[]} Audio-track list we own
+ */
;
- _proto.getMaxLevel = function getMaxLevel(capLevelIndex) {
- var _this2 = this;
+ /**
+ * @private
+ * @param {number} newId
+ */
+ _proto._setAudioTrack = function _setAudioTrack(newId) {
+ // noop on same audio track id as already set
+ if (this._trackId === newId && this.tracks[this._trackId].details) {
+ logger["logger"].debug('Same id as current audio-track passed, and track details available -> no-op');
+ return;
+ } // check if level idx is valid
- if (!this.levels) {
- return -1;
+
+ if (newId < 0 || newId >= this.tracks.length) {
+ logger["logger"].warn('Invalid id passed to audio-track controller');
+ return;
}
- var validLevels = this.levels.filter(function (level, index) {
- return CapLevelController.isLevelAllowed(index, _this2.restrictedLevels) && index <= capLevelIndex;
+ var audioTrack = this.tracks[newId];
+ logger["logger"].log("Now switching to audio-track index " + newId); // stopping live reloading timer if any
+
+ this.clearInterval();
+ this._trackId = newId;
+ var url = audioTrack.url,
+ type = audioTrack.type,
+ id = audioTrack.id;
+ this.hls.trigger(events["default"].AUDIO_TRACK_SWITCHING, {
+ id: id,
+ type: type,
+ url: url
});
- return CapLevelController.getMaxLevelByMediaSize(validLevels, this.mediaWidth, this.mediaHeight);
- };
- _proto.startCapping = function startCapping() {
- if (this.timer) {
- // Don't reset capping if started twice; this can happen if the manifest signals a video codec
- return;
- }
+ this._loadTrackDetailsIfNeeded(audioTrack);
+ }
+ /**
+ * @override
+ */
+ ;
- this.autoLevelCapping = Number.POSITIVE_INFINITY;
- this.hls.firstLevel = this.getMaxLevel(this.firstLevel);
- clearInterval(this.timer);
- this.timer = setInterval(this.detectPlayerSize.bind(this), 1000);
- this.detectPlayerSize();
- };
+ _proto.doTick = function doTick() {
+ this._updateTrack(this._trackId);
+ }
+ /**
+ * @param levelId
+ * @private
+ */
+ ;
- _proto.stopCapping = function stopCapping() {
- this.restrictedLevels = [];
- this.firstLevel = null;
- this.autoLevelCapping = Number.POSITIVE_INFINITY;
+ _proto._selectAudioGroup = function _selectAudioGroup(levelId) {
+ var levelInfo = this.hls.levels[levelId];
- if (this.timer) {
- this.timer = clearInterval(this.timer);
- this.timer = null;
+ if (!levelInfo || !levelInfo.audioGroupIds) {
+ return;
}
- };
- CapLevelController.isLevelAllowed = function isLevelAllowed(level, restrictedLevels) {
- if (restrictedLevels === void 0) {
- restrictedLevels = [];
- }
+ var audioGroupId = levelInfo.audioGroupIds[levelInfo.urlId];
- return restrictedLevels.indexOf(level) === -1;
- };
+ if (this.audioGroupId !== audioGroupId) {
+ this.audioGroupId = audioGroupId;
- CapLevelController.getMaxLevelByMediaSize = function getMaxLevelByMediaSize(levels, width, height) {
- if (!levels || levels && !levels.length) {
- return -1;
- } // Levels can have the same dimensions but differing bandwidths - since levels are ordered, we can look to the next
- // to determine whether we've chosen the greatest bandwidth for the media's dimensions
+ this._selectInitialAudioTrack();
+ }
+ }
+ /**
+ * Select initial track
+ * @private
+ */
+ ;
+ _proto._selectInitialAudioTrack = function _selectInitialAudioTrack() {
+ var _this2 = this;
- var atGreatestBandiwdth = function atGreatestBandiwdth(curLevel, nextLevel) {
- if (!nextLevel) {
- return true;
- }
+ var tracks = this.tracks;
- return curLevel.width !== nextLevel.width || curLevel.height !== nextLevel.height;
- }; // If we run through the loop without breaking, the media's dimensions are greater than every level, so default to
- // the max level
+ if (!tracks.length) {
+ return;
+ }
+ var currentAudioTrack = this.tracks[this._trackId];
+ var name = null;
- var maxLevelIndex = levels.length - 1;
+ if (currentAudioTrack) {
+ name = currentAudioTrack.name;
+ } // Pre-select default tracks if there are any
- for (var i = 0; i < levels.length; i += 1) {
- var level = levels[i];
- if ((level.width >= width || level.height >= height) && atGreatestBandiwdth(level, levels[i + 1])) {
- maxLevelIndex = i;
- break;
+ if (this._selectDefaultTrack) {
+ var defaultTracks = tracks.filter(function (track) {
+ return track.default;
+ });
+
+ if (defaultTracks.length) {
+ tracks = defaultTracks;
+ } else {
+ logger["logger"].warn('No default audio tracks defined');
}
}
- return maxLevelIndex;
- };
+ var trackFound = false;
- cap_level_controller_createClass(CapLevelController, [{
- key: "mediaWidth",
- get: function get() {
- var width;
- var media = this.media;
+ var traverseTracks = function traverseTracks() {
+ // Select track with right group ID
+ tracks.forEach(function (track) {
+ if (trackFound) {
+ return;
+ } // We need to match the (pre-)selected group ID
+ // and the NAME of the current track.
- if (media) {
- width = media.width || media.clientWidth || media.offsetWidth;
- width *= CapLevelController.contentScaleFactor;
- }
- return width;
- }
- }, {
- key: "mediaHeight",
- get: function get() {
- var height;
- var media = this.media;
+ if ((!_this2.audioGroupId || track.groupId === _this2.audioGroupId) && (!name || name === track.name)) {
+ // If there was a previous track try to stay with the same `NAME`.
+ // It should be unique across tracks of same group, and consistent through redundant track groups.
+ _this2._setAudioTrack(track.id);
- if (media) {
- height = media.height || media.clientHeight || media.offsetHeight;
- height *= CapLevelController.contentScaleFactor;
- }
+ trackFound = true;
+ }
+ });
+ };
- return height;
- }
- }], [{
- key: "contentScaleFactor",
- get: function get() {
- var pixelRatio = 1;
+ traverseTracks();
- try {
- pixelRatio = window.devicePixelRatio;
- } catch (e) {}
+ if (!trackFound) {
+ name = null;
+ traverseTracks();
+ }
- return pixelRatio;
+ if (!trackFound) {
+ logger["logger"].error("No track found for running audio group-ID: " + this.audioGroupId);
+ this.hls.trigger(events["default"].ERROR, {
+ type: errors["ErrorTypes"].MEDIA_ERROR,
+ details: errors["ErrorDetails"].AUDIO_TRACK_LOAD_ERROR,
+ fatal: true
+ });
}
- }]);
+ }
+ /**
+ * @private
+ * @param {AudioTrack} audioTrack
+ * @returns {boolean}
+ */
+ ;
- return CapLevelController;
-}(event_handler);
+ _proto._needsTrackLoading = function _needsTrackLoading(audioTrack) {
+ var details = audioTrack.details,
+ url = audioTrack.url;
-/* harmony default export */ var cap_level_controller = (cap_level_controller_CapLevelController);
-// CONCATENATED MODULE: ./src/controller/fps-controller.js
-function fps_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+ if (!details || details.live) {
+ // check if we face an audio track embedded in main playlist (audio track without URI attribute)
+ return !!url;
+ }
-/*
- * FPS Controller
-*/
+ return false;
+ }
+ /**
+ * @private
+ * @param {AudioTrack} audioTrack
+ */
+ ;
+ _proto._loadTrackDetailsIfNeeded = function _loadTrackDetailsIfNeeded(audioTrack) {
+ if (this._needsTrackLoading(audioTrack)) {
+ var url = audioTrack.url,
+ id = audioTrack.id; // track not retrieved yet, or live playlist we need to (re)load it
+
+ logger["logger"].log("loading audio-track playlist for id: " + id);
+ this.hls.trigger(events["default"].AUDIO_TRACK_LOADING, {
+ url: url,
+ id: id
+ });
+ }
+ }
+ /**
+ * @private
+ * @param {number} newId
+ */
+ ;
+ _proto._updateTrack = function _updateTrack(newId) {
+ // check if level idx is valid
+ if (newId < 0 || newId >= this.tracks.length) {
+ return;
+ } // stopping live reloading timer if any
-var fps_controller_window = window,
- fps_controller_performance = fps_controller_window.performance;
-var fps_controller_FPSController =
-/*#__PURE__*/
-function (_EventHandler) {
- fps_controller_inheritsLoose(FPSController, _EventHandler);
+ this.clearInterval();
+ this._trackId = newId;
+ logger["logger"].log("trying to update audio-track " + newId);
+ var audioTrack = this.tracks[newId];
- function FPSController(hls) {
- return _EventHandler.call(this, hls, events["default"].MEDIA_ATTACHING) || this;
+ this._loadTrackDetailsIfNeeded(audioTrack);
}
+ /**
+ * @private
+ */
+ ;
- var _proto = FPSController.prototype;
+ _proto._handleLoadError = function _handleLoadError() {
+ // First, let's black list current track id
+ this.trackIdBlacklist[this._trackId] = true; // Let's try to fall back on a functional audio-track with the same group ID
- _proto.destroy = function destroy() {
- if (this.timer) {
- clearInterval(this.timer);
- }
+ var previousId = this._trackId;
+ var _this$tracks$previous = this.tracks[previousId],
+ name = _this$tracks$previous.name,
+ language = _this$tracks$previous.language,
+ groupId = _this$tracks$previous.groupId;
+ logger["logger"].warn("Loading failed on audio track id: " + previousId + ", group-id: " + groupId + ", name/language: \"" + name + "\" / \"" + language + "\""); // Find a non-blacklisted track ID with the same NAME
+ // At least a track that is not blacklisted, thus on another group-ID.
- this.isVideoPlaybackQualityAvailable = false;
- };
+ var newId = previousId;
- _proto.onMediaAttaching = function onMediaAttaching(data) {
- var config = this.hls.config;
+ for (var i = 0; i < this.tracks.length; i++) {
+ if (this.trackIdBlacklist[i]) {
+ continue;
+ }
- if (config.capLevelOnFPSDrop) {
- var video = this.video = data.media instanceof window.HTMLVideoElement ? data.media : null;
+ var newTrack = this.tracks[i];
- if (typeof video.getVideoPlaybackQuality === 'function') {
- this.isVideoPlaybackQualityAvailable = true;
+ if (newTrack.name === name) {
+ newId = i;
+ break;
}
-
- clearInterval(this.timer);
- this.timer = setInterval(this.checkFPSInterval.bind(this), config.fpsDroppedMonitoringPeriod);
}
- };
- _proto.checkFPS = function checkFPS(video, decodedFrames, droppedFrames) {
- var currentTime = fps_controller_performance.now();
+ if (newId === previousId) {
+ logger["logger"].warn("No fallback audio-track found for name/language: \"" + name + "\" / \"" + language + "\"");
+ return;
+ }
- if (decodedFrames) {
- if (this.lastTime) {
- var currentPeriod = currentTime - this.lastTime,
- currentDropped = droppedFrames - this.lastDroppedFrames,
- currentDecoded = decodedFrames - this.lastDecodedFrames,
- droppedFPS = 1000 * currentDropped / currentPeriod,
- hls = this.hls;
- hls.trigger(events["default"].FPS_DROP, {
- currentDropped: currentDropped,
- currentDecoded: currentDecoded,
- totalDroppedFrames: droppedFrames
- });
+ logger["logger"].log('Attempting audio-track fallback id:', newId, 'group-id:', this.tracks[newId].groupId);
- if (droppedFPS > 0) {
- // logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
- if (currentDropped > hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
- var currentLevel = hls.currentLevel;
- logger["logger"].warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
+ this._setAudioTrack(newId);
+ };
- if (currentLevel > 0 && (hls.autoLevelCapping === -1 || hls.autoLevelCapping >= currentLevel)) {
- currentLevel = currentLevel - 1;
- hls.trigger(events["default"].FPS_DROP_LEVEL_CAPPING, {
- level: currentLevel,
- droppedLevel: hls.currentLevel
- });
- hls.autoLevelCapping = currentLevel;
- hls.streamController.nextLevelSwitch();
- }
- }
- }
- }
+ audio_track_controller_createClass(AudioTrackController, [{
+ key: "audioTracks",
+ get: function get() {
+ return this.tracks;
+ }
+ /**
+ * @type {number} Index into audio-tracks list of currently selected track.
+ */
- this.lastTime = currentTime;
- this.lastDroppedFrames = droppedFrames;
- this.lastDecodedFrames = decodedFrames;
+ }, {
+ key: "audioTrack",
+ get: function get() {
+ return this._trackId;
}
- };
+ /**
+ * Select current track by index
+ */
+ ,
+ set: function set(newId) {
+ this._setAudioTrack(newId); // If audio track is selected from API then don't choose from the manifest default track
- _proto.checkFPSInterval = function checkFPSInterval() {
- var video = this.video;
- if (video) {
- if (this.isVideoPlaybackQualityAvailable) {
- var videoPlaybackQuality = video.getVideoPlaybackQuality();
- this.checkFPS(video, videoPlaybackQuality.totalVideoFrames, videoPlaybackQuality.droppedVideoFrames);
- } else {
- this.checkFPS(video, video.webkitDecodedFrameCount, video.webkitDroppedFrameCount);
- }
+ this._selectDefaultTrack = false;
}
- };
+ }]);
- return FPSController;
-}(event_handler);
+ return AudioTrackController;
+}(TaskLoop);
-/* harmony default export */ var fps_controller = (fps_controller_FPSController);
-// CONCATENATED MODULE: ./src/utils/xhr-loader.js
-/**
- * XHR based logger
-*/
+/* harmony default export */ var audio_track_controller = (audio_track_controller_AudioTrackController);
+// CONCATENATED MODULE: ./src/utils/binary-search.ts
+var BinarySearch = {
+ /**
+ * Searches for an item in an array which matches a certain condition.
+ * This requires the condition to only match one item in the array,
+ * and for the array to be ordered.
+ *
+ * @param {Array} list The array to search.
+ * @param {BinarySearchComparison} comparisonFn
+ * Called and provided a candidate item as the first argument.
+ * Should return:
+ * > -1 if the item should be located at a lower index than the provided item.
+ * > 1 if the item should be located at a higher index than the provided item.
+ * > 0 if the item is the item you're looking for.
+ *
+ * @return {T | null} The object if it is found or null otherwise.
+ */
+ search: function search(list, comparisonFn) {
+ var minIndex = 0;
+ var maxIndex = list.length - 1;
+ var currentIndex = null;
+ var currentElement = null;
-var xhr_loader_window = window,
- xhr_loader_performance = xhr_loader_window.performance,
- xhr_loader_XMLHttpRequest = xhr_loader_window.XMLHttpRequest;
+ while (minIndex <= maxIndex) {
+ currentIndex = (minIndex + maxIndex) / 2 | 0;
+ currentElement = list[currentIndex];
+ var comparisonResult = comparisonFn(currentElement);
-var xhr_loader_XhrLoader =
-/*#__PURE__*/
-function () {
- function XhrLoader(config) {
- if (config && config.xhrSetup) {
- this.xhrSetup = config.xhrSetup;
+ if (comparisonResult > 0) {
+ minIndex = currentIndex + 1;
+ } else if (comparisonResult < 0) {
+ maxIndex = currentIndex - 1;
+ } else {
+ return currentElement;
+ }
}
- }
- var _proto = XhrLoader.prototype;
+ return null;
+ }
+};
+/* harmony default export */ var binary_search = (BinarySearch);
+// EXTERNAL MODULE: ./node_modules/eventemitter3/index.js
+var eventemitter3 = __webpack_require__("./node_modules/eventemitter3/index.js");
- _proto.destroy = function destroy() {
- this.abort();
- this.loader = null;
- };
+// EXTERNAL MODULE: ./node_modules/webworkify-webpack/index.js
+var webworkify_webpack = __webpack_require__("./node_modules/webworkify-webpack/index.js");
- _proto.abort = function abort() {
- var loader = this.loader;
+// EXTERNAL MODULE: ./src/demux/demuxer-inline.js + 12 modules
+var demuxer_inline = __webpack_require__("./src/demux/demuxer-inline.js");
- if (loader && loader.readyState !== 4) {
- this.stats.aborted = true;
- loader.abort();
- }
+// EXTERNAL MODULE: ./src/utils/get-self-scope.js
+var get_self_scope = __webpack_require__("./src/utils/get-self-scope.js");
- window.clearTimeout(this.requestTimeout);
- this.requestTimeout = null;
- window.clearTimeout(this.retryTimeout);
- this.retryTimeout = null;
- };
+// CONCATENATED MODULE: ./src/observer.ts
+function observer_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
- _proto.load = function load(context, config, callbacks) {
- this.context = context;
- this.config = config;
- this.callbacks = callbacks;
- this.stats = {
- trequest: xhr_loader_performance.now(),
- retry: 0
- };
- this.retryDelay = config.retryDelay;
- this.loadInternal();
- };
- _proto.loadInternal = function loadInternal() {
- var xhr,
- context = this.context;
- xhr = this.loader = new xhr_loader_XMLHttpRequest();
- var stats = this.stats;
- stats.tfirst = 0;
- stats.loaded = 0;
- var xhrSetup = this.xhrSetup;
+/**
+ * Simple adapter sub-class of Nodejs-like EventEmitter.
+ */
- try {
- if (xhrSetup) {
- try {
- xhrSetup(xhr, context.url);
- } catch (e) {
- // fix xhrSetup: (xhr, url) => {xhr.setRequestHeader("Content-Language", "test");}
- // not working, as xhr.setRequestHeader expects xhr.readyState === OPEN
- xhr.open('GET', context.url, true);
- xhrSetup(xhr, context.url);
- }
- }
+var Observer =
+/*#__PURE__*/
+function (_EventEmitter) {
+ observer_inheritsLoose(Observer, _EventEmitter);
- if (!xhr.readyState) {
- xhr.open('GET', context.url, true);
- }
- } catch (e) {
- // IE11 throws an exception on xhr.open if attempting to access an HTTP resource over HTTPS
- this.callbacks.onError({
- code: xhr.status,
- text: e.message
- }, context, xhr);
- return;
- }
+ function Observer() {
+ return _EventEmitter.apply(this, arguments) || this;
+ }
- if (context.rangeEnd) {
- xhr.setRequestHeader('Range', 'bytes=' + context.rangeStart + '-' + (context.rangeEnd - 1));
- }
+ var _proto = Observer.prototype;
- xhr.onreadystatechange = this.readystatechange.bind(this);
- xhr.onprogress = this.loadprogress.bind(this);
- xhr.responseType = context.responseType; // setup timeout before we perform request
+ /**
+ * We simply want to pass along the event-name itself
+ * in every call to a handler, which is the purpose of our `trigger` method
+ * extending the standard API.
+ */
+ _proto.trigger = function trigger(event) {
+ for (var _len = arguments.length, data = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
+ data[_key - 1] = arguments[_key];
+ }
- this.requestTimeout = window.setTimeout(this.loadtimeout.bind(this), this.config.timeout);
- xhr.send();
+ this.emit.apply(this, [event, event].concat(data));
};
- _proto.readystatechange = function readystatechange(event) {
- var xhr = event.currentTarget,
- readyState = xhr.readyState,
- stats = this.stats,
- context = this.context,
- config = this.config; // don't proceed if xhr has been aborted
+ return Observer;
+}(eventemitter3["EventEmitter"]);
+// CONCATENATED MODULE: ./src/demux/demuxer.js
- if (stats.aborted) {
- return;
- } // >= HEADERS_RECEIVED
- if (readyState >= 2) {
- // clear xhr timeout and rearm it if readyState less than 4
- window.clearTimeout(this.requestTimeout);
- if (stats.tfirst === 0) {
- stats.tfirst = Math.max(xhr_loader_performance.now(), stats.trequest);
- }
- if (readyState === 4) {
- var status = xhr.status; // http status between 200 to 299 are all successful
- if (status >= 200 && status < 300) {
- stats.tload = Math.max(stats.tfirst, xhr_loader_performance.now());
- var data, len;
- if (context.responseType === 'arraybuffer') {
- data = xhr.response;
- len = data.byteLength;
- } else {
- data = xhr.responseText;
- len = data.length;
- }
- stats.loaded = stats.total = len;
- var response = {
- url: xhr.responseURL,
- data: data
- };
- this.callbacks.onSuccess(response, stats, context, xhr);
- } else {
- // if max nb of retries reached or if http status between 400 and 499 (such error cannot be recovered, retrying is useless), return error
- if (stats.retry >= config.maxRetry) {
- logger["logger"].error(status + " while loading " + context.url);
- this.callbacks.onError({
- code: status,
- text: xhr.statusText
- }, context, xhr);
- } else {
- // retry
- logger["logger"].warn(status + " while loading " + context.url + ", retrying in " + this.retryDelay + "..."); // aborts and resets internal state
- this.destroy(); // schedule retry
+ // see https://stackoverflow.com/a/11237259/589493
- this.retryTimeout = window.setTimeout(this.loadInternal.bind(this), this.retryDelay); // set exponential backoff
+var global = Object(get_self_scope["getSelfScope"])(); // safeguard for code that might run both on worker and main thread
- this.retryDelay = Math.min(2 * this.retryDelay, config.maxRetryDelay);
- stats.retry++;
- }
- }
- } else {
- // readyState >= 2 AND readyState !==4 (readyState = HEADERS_RECEIVED || LOADING) rearm timeout as xhr not finished yet
- this.requestTimeout = window.setTimeout(this.loadtimeout.bind(this), config.timeout);
- }
- }
- };
+var demuxer_MediaSource = getMediaSource() || {
+ isTypeSupported: function isTypeSupported() {
+ return false;
+ }
+};
- _proto.loadtimeout = function loadtimeout() {
- logger["logger"].warn("timeout while loading " + this.context.url);
- this.callbacks.onTimeout(this.stats, this.context, null);
- };
+var demuxer_Demuxer =
+/*#__PURE__*/
+function () {
+ function Demuxer(hls, id) {
+ var _this = this;
- _proto.loadprogress = function loadprogress(event) {
- var xhr = event.currentTarget,
- stats = this.stats;
- stats.loaded = event.loaded;
+ this.hls = hls;
+ this.id = id;
+ var observer = this.observer = new Observer();
+ var config = hls.config;
- if (event.lengthComputable) {
- stats.total = event.total;
- }
+ var forwardMessage = function forwardMessage(ev, data) {
+ data = data || {};
+ data.frag = _this.frag;
+ data.id = _this.id;
+ hls.trigger(ev, data);
+ }; // forward events to main thread
- var onProgress = this.callbacks.onProgress;
- if (onProgress) {
- // third arg is to provide on progress data
- onProgress(stats, this.context, null, xhr);
- }
- };
+ observer.on(events["default"].FRAG_DECRYPTED, forwardMessage);
+ observer.on(events["default"].FRAG_PARSING_INIT_SEGMENT, forwardMessage);
+ observer.on(events["default"].FRAG_PARSING_DATA, forwardMessage);
+ observer.on(events["default"].FRAG_PARSED, forwardMessage);
+ observer.on(events["default"].ERROR, forwardMessage);
+ observer.on(events["default"].FRAG_PARSING_METADATA, forwardMessage);
+ observer.on(events["default"].FRAG_PARSING_USERDATA, forwardMessage);
+ observer.on(events["default"].INIT_PTS_FOUND, forwardMessage);
+ var typeSupported = {
+ mp4: demuxer_MediaSource.isTypeSupported('video/mp4'),
+ mpeg: demuxer_MediaSource.isTypeSupported('audio/mpeg'),
+ mp3: demuxer_MediaSource.isTypeSupported('audio/mp4; codecs="mp3"')
+ }; // navigator.vendor is not always available in Web Worker
+ // refer to https://developer.mozilla.org/en-US/docs/Web/API/WorkerGlobalScope/navigator
- return XhrLoader;
-}();
+ var vendor = navigator.vendor;
-/* harmony default export */ var xhr_loader = (xhr_loader_XhrLoader);
-// CONCATENATED MODULE: ./src/controller/audio-track-controller.js
-function audio_track_controller_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+ if (config.enableWorker && typeof Worker !== 'undefined') {
+ logger["logger"].log('demuxing in webworker');
+ var w;
-function audio_track_controller_createClass(Constructor, protoProps, staticProps) { if (protoProps) audio_track_controller_defineProperties(Constructor.prototype, protoProps); if (staticProps) audio_track_controller_defineProperties(Constructor, staticProps); return Constructor; }
+ try {
+ w = this.w = webworkify_webpack(/*require.resolve*/(/*! ../demux/demuxer-worker.js */ "./src/demux/demuxer-worker.js"));
+ this.onwmsg = this.onWorkerMessage.bind(this);
+ w.addEventListener('message', this.onwmsg);
-function audio_track_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+ w.onerror = function (event) {
+ hls.trigger(events["default"].ERROR, {
+ type: errors["ErrorTypes"].OTHER_ERROR,
+ details: errors["ErrorDetails"].INTERNAL_EXCEPTION,
+ fatal: true,
+ event: 'demuxerWorker',
+ err: {
+ message: event.message + ' (' + event.filename + ':' + event.lineno + ')'
+ }
+ });
+ };
+
+ w.postMessage({
+ cmd: 'init',
+ typeSupported: typeSupported,
+ vendor: vendor,
+ id: id,
+ config: JSON.stringify(config)
+ });
+ } catch (err) {
+ logger["logger"].warn('Error in worker:', err);
+ logger["logger"].error('Error while initializing DemuxerWorker, fallback on DemuxerInline');
+
+ if (w) {
+ // revoke the Object URL that was used to create demuxer worker, so as not to leak it
+ global.URL.revokeObjectURL(w.objectURL);
+ }
+ this.demuxer = new demuxer_inline["default"](observer, typeSupported, config, vendor);
+ this.w = undefined;
+ }
+ } else {
+ this.demuxer = new demuxer_inline["default"](observer, typeSupported, config, vendor);
+ }
+ }
+ var _proto = Demuxer.prototype;
+ _proto.destroy = function destroy() {
+ var w = this.w;
+ if (w) {
+ w.removeEventListener('message', this.onwmsg);
+ w.terminate();
+ this.w = null;
+ } else {
+ var demuxer = this.demuxer;
-/**
- * @class AudioTrackController
- * @implements {EventHandler}
- *
- * Handles main manifest and audio-track metadata loaded,
- * owns and exposes the selectable audio-tracks data-models.
- *
- * Exposes internal interface to select available audio-tracks.
- *
- * Handles errors on loading audio-track playlists. Manages fallback mechanism
- * with redundants tracks (group-IDs).
- *
- * Handles level-loading and group-ID switches for video (fallback on video levels),
- * and eventually adapts the audio-track group-ID to match.
- *
- * @fires AUDIO_TRACK_LOADING
- * @fires AUDIO_TRACK_SWITCHING
- * @fires AUDIO_TRACKS_UPDATED
- * @fires ERROR
- *
- */
+ if (demuxer) {
+ demuxer.destroy();
+ this.demuxer = null;
+ }
+ }
-var audio_track_controller_AudioTrackController =
-/*#__PURE__*/
-function (_TaskLoop) {
- audio_track_controller_inheritsLoose(AudioTrackController, _TaskLoop);
+ var observer = this.observer;
- function AudioTrackController(hls) {
- var _this;
+ if (observer) {
+ observer.removeAllListeners();
+ this.observer = null;
+ }
+ };
- _this = _TaskLoop.call(this, hls, events["default"].MANIFEST_LOADING, events["default"].MANIFEST_PARSED, events["default"].AUDIO_TRACK_LOADED, events["default"].AUDIO_TRACK_SWITCHED, events["default"].LEVEL_LOADED, events["default"].ERROR) || this;
- /**
- * @private
- * Currently selected index in `tracks`
- * @member {number} trackId
- */
+ _proto.push = function push(data, initSegment, audioCodec, videoCodec, frag, duration, accurateTimeOffset, defaultInitPTS) {
+ var w = this.w;
+ var timeOffset = Object(number_isFinite["isFiniteNumber"])(frag.startPTS) ? frag.startPTS : frag.start;
+ var decryptdata = frag.decryptdata;
+ var lastFrag = this.frag;
+ var discontinuity = !(lastFrag && frag.cc === lastFrag.cc);
+ var trackSwitch = !(lastFrag && frag.level === lastFrag.level);
+ var nextSN = lastFrag && frag.sn === lastFrag.sn + 1;
+ var contiguous = !trackSwitch && nextSN;
- _this._trackId = -1;
- /**
- * @private
- * If should select tracks according to default track attribute
- * @member {boolean} _selectDefaultTrack
- */
+ if (discontinuity) {
+ logger["logger"].log(this.id + ":discontinuity detected");
+ }
- _this._selectDefaultTrack = true;
- /**
- * @public
- * All tracks available
- * @member {AudioTrack[]}
- */
+ if (trackSwitch) {
+ logger["logger"].log(this.id + ":switch detected");
+ }
- _this.tracks = [];
- /**
- * @public
- * List of blacklisted audio track IDs (that have caused failure)
- * @member {number[]}
- */
+ this.frag = frag;
- _this.trackIdBlacklist = Object.create(null);
- /**
- * @public
- * The currently running group ID for audio
- * (we grab this on manifest-parsed and new level-loaded)
- * @member {string}
- */
+ if (w) {
+ // post fragment payload as transferable objects for ArrayBuffer (no copy)
+ w.postMessage({
+ cmd: 'demux',
+ data: data,
+ decryptdata: decryptdata,
+ initSegment: initSegment,
+ audioCodec: audioCodec,
+ videoCodec: videoCodec,
+ timeOffset: timeOffset,
+ discontinuity: discontinuity,
+ trackSwitch: trackSwitch,
+ contiguous: contiguous,
+ duration: duration,
+ accurateTimeOffset: accurateTimeOffset,
+ defaultInitPTS: defaultInitPTS
+ }, data instanceof ArrayBuffer ? [data] : []);
+ } else {
+ var demuxer = this.demuxer;
- _this.audioGroupId = null;
- return _this;
- }
- /**
- * Reset audio tracks on new manifest loading.
- */
+ if (demuxer) {
+ demuxer.push(data, decryptdata, initSegment, audioCodec, videoCodec, timeOffset, discontinuity, trackSwitch, contiguous, duration, accurateTimeOffset, defaultInitPTS);
+ }
+ }
+ };
+ _proto.onWorkerMessage = function onWorkerMessage(ev) {
+ var data = ev.data,
+ hls = this.hls;
- var _proto = AudioTrackController.prototype;
+ switch (data.event) {
+ case 'init':
+ // revoke the Object URL that was used to create demuxer worker, so as not to leak it
+ global.URL.revokeObjectURL(this.w.objectURL);
+ break;
+ // special case for FRAG_PARSING_DATA: data1 and data2 are transferable objects
- _proto.onManifestLoading = function onManifestLoading() {
- this.tracks = [];
- this._trackId = -1;
- this._selectDefaultTrack = true;
- }
- /**
- * Store tracks data from manifest parsed data.
- *
- * Trigger AUDIO_TRACKS_UPDATED event.
- *
- * @param {*} data
- */
- ;
+ case events["default"].FRAG_PARSING_DATA:
+ data.data.data1 = new Uint8Array(data.data1);
- _proto.onManifestParsed = function onManifestParsed(data) {
- var tracks = this.tracks = data.audioTracks || [];
- this.hls.trigger(events["default"].AUDIO_TRACKS_UPDATED, {
- audioTracks: tracks
- });
- }
- /**
- * Store track details of loaded track in our data-model.
- *
- * Set-up metadata update interval task for live-mode streams.
- *
- * @param {} data
- */
- ;
+ if (data.data2) {
+ data.data.data2 = new Uint8Array(data.data2);
+ }
- _proto.onAudioTrackLoaded = function onAudioTrackLoaded(data) {
- if (data.id >= this.tracks.length) {
- logger["logger"].warn('Invalid audio track id:', data.id);
- return;
+ /* falls through */
+
+ default:
+ data.data = data.data || {};
+ data.data.frag = this.frag;
+ data.data.id = this.id;
+ hls.trigger(data.event, data.data);
+ break;
}
+ };
- logger["logger"].log("audioTrack " + data.id + " loaded");
- this.tracks[data.id].details = data.details; // check if current playlist is a live playlist
- // and if we have already our reload interval setup
+ return Demuxer;
+}();
- if (data.details.live && !this.hasInterval()) {
- // if live playlist we will have to reload it periodically
- // set reload period to playlist target duration
- var updatePeriodMs = data.details.targetduration * 1000;
- this.setInterval(updatePeriodMs);
- }
+/* harmony default export */ var demux_demuxer = (demuxer_Demuxer);
+// CONCATENATED MODULE: ./src/utils/time-ranges.ts
+/**
+ * TimeRanges to string helper
+ */
+var TimeRanges = {
+ toString: function toString(r) {
+ var log = '';
+ var len = r.length;
- if (!data.details.live && this.hasInterval()) {
- // playlist is not live and timer is scheduled: cancel it
- this.clearInterval();
+ for (var i = 0; i < len; i++) {
+ log += '[' + r.start(i).toFixed(3) + ',' + r.end(i).toFixed(3) + ']';
}
+
+ return log;
}
- /**
- * Update the internal group ID to any audio-track we may have set manually
- * or because of a failure-handling fallback.
- *
- * Quality-levels should update to that group ID in this case.
- *
- * @param {*} data
- */
- ;
+};
+/* harmony default export */ var time_ranges = (TimeRanges);
+// CONCATENATED MODULE: ./src/utils/discontinuities.js
- _proto.onAudioTrackSwitched = function onAudioTrackSwitched(data) {
- var audioGroupId = this.tracks[data.id].groupId;
- if (audioGroupId && this.audioGroupId !== audioGroupId) {
- this.audioGroupId = audioGroupId;
- }
- }
- /**
- * When a level gets loaded, if it has redundant audioGroupIds (in the same ordinality as it's redundant URLs)
- * we are setting our audio-group ID internally to the one set, if it is different from the group ID currently set.
- *
- * If group-ID got update, we re-select the appropriate audio-track with this group-ID matching the currently
- * selected one (based on NAME property).
- *
- * @param {*} data
- */
- ;
- _proto.onLevelLoaded = function onLevelLoaded(data) {
- // FIXME: crashes because currentLevel is undefined
- // const levelInfo = this.hls.levels[this.hls.currentLevel];
- var levelInfo = this.hls.levels[data.level];
+function findFirstFragWithCC(fragments, cc) {
+ var firstFrag = null;
- if (!levelInfo.audioGroupIds) {
- return;
- }
+ for (var i = 0; i < fragments.length; i += 1) {
+ var currentFrag = fragments[i];
- var audioGroupId = levelInfo.audioGroupIds[levelInfo.urlId];
+ if (currentFrag && currentFrag.cc === cc) {
+ firstFrag = currentFrag;
+ break;
+ }
+ }
- if (this.audioGroupId !== audioGroupId) {
- this.audioGroupId = audioGroupId;
+ return firstFrag;
+}
+function findFragWithCC(fragments, CC) {
+ return binary_search.search(fragments, function (candidate) {
+ if (candidate.cc < CC) {
+ return 1;
+ } else if (candidate.cc > CC) {
+ return -1;
+ } else {
+ return 0;
+ }
+ });
+}
+function shouldAlignOnDiscontinuities(lastFrag, lastLevel, details) {
+ var shouldAlign = false;
- this._selectInitialAudioTrack();
+ if (lastLevel && lastLevel.details && details) {
+ if (details.endCC > details.startCC || lastFrag && lastFrag.cc < details.startCC) {
+ shouldAlign = true;
}
}
- /**
- * Handle network errors loading audio track manifests
- * and also pausing on any netwok errors.
- *
- * @param {ErrorEventData} data
- */
- ;
- _proto.onError = function onError(data) {
- // Only handle network errors
- if (data.type !== errors["ErrorTypes"].NETWORK_ERROR) {
- return;
- } // If fatal network error, cancel update task
+ return shouldAlign;
+} // Find the first frag in the previous level which matches the CC of the first frag of the new level
+
+function findDiscontinuousReferenceFrag(prevDetails, curDetails) {
+ var prevFrags = prevDetails.fragments;
+ var curFrags = curDetails.fragments;
+ if (!curFrags.length || !prevFrags.length) {
+ logger["logger"].log('No fragments to align');
+ return;
+ }
- if (data.fatal) {
- this.clearInterval();
- } // If not an audio-track loading error don't handle further
+ var prevStartFrag = findFirstFragWithCC(prevFrags, curFrags[0].cc);
+ if (!prevStartFrag || prevStartFrag && !prevStartFrag.startPTS) {
+ logger["logger"].log('No frag in previous level to align on');
+ return;
+ }
- if (data.details !== errors["ErrorDetails"].AUDIO_TRACK_LOAD_ERROR) {
- return;
+ return prevStartFrag;
+}
+function adjustPts(sliding, details) {
+ details.fragments.forEach(function (frag) {
+ if (frag) {
+ var start = frag.start + sliding;
+ frag.start = frag.startPTS = start;
+ frag.endPTS = start + frag.duration;
}
+ });
+ details.PTSKnown = true;
+}
+/**
+ * Using the parameters of the last level, this function computes PTS' of the new fragments so that they form a
+ * contiguous stream with the last fragments.
+ * The PTS of a fragment lets Hls.js know where it fits into a stream - by knowing every PTS, we know which fragment to
+ * download at any given time. PTS is normally computed when the fragment is demuxed, so taking this step saves us time
+ * and an extra download.
+ * @param lastFrag
+ * @param lastLevel
+ * @param details
+ */
- logger["logger"].warn('Network failure on audio-track id:', data.context.id);
+function alignStream(lastFrag, lastLevel, details) {
+ alignDiscontinuities(lastFrag, details, lastLevel);
- this._handleLoadError();
+ if (!details.PTSKnown && lastLevel) {
+ // If the PTS wasn't figured out via discontinuity sequence that means there was no CC increase within the level.
+ // Aligning via Program Date Time should therefore be reliable, since PDT should be the same within the same
+ // discontinuity sequence.
+ alignPDT(details, lastLevel.details);
}
- /**
- * @type {AudioTrack[]} Audio-track list we own
- */
- ;
+}
+/**
+ * Computes the PTS if a new level's fragments using the PTS of a fragment in the last level which shares the same
+ * discontinuity sequence.
+ * @param lastLevel - The details of the last loaded level
+ * @param details - The details of the new level
+ */
- /**
- * @private
- * @param {number} newId
- */
- _proto._setAudioTrack = function _setAudioTrack(newId) {
- // noop on same audio track id as already set
- if (this._trackId === newId && this.tracks[this._trackId].details) {
- logger["logger"].debug('Same id as current audio-track passed, and track details available -> no-op');
- return;
- } // check if level idx is valid
+function alignDiscontinuities(lastFrag, details, lastLevel) {
+ if (shouldAlignOnDiscontinuities(lastFrag, lastLevel, details)) {
+ var referenceFrag = findDiscontinuousReferenceFrag(lastLevel.details, details);
+ if (referenceFrag) {
+ logger["logger"].log('Adjusting PTS using last level due to CC increase within current level');
+ adjustPts(referenceFrag.start, details);
+ }
+ }
+}
+/**
+ * Computes the PTS of a new level's fragments using the difference in Program Date Time from the last level.
+ * @param details - The details of the new level
+ * @param lastDetails - The details of the last loaded level
+ */
- if (newId < 0 || newId >= this.tracks.length) {
- logger["logger"].warn('Invalid id passed to audio-track controller');
+function alignPDT(details, lastDetails) {
+ if (lastDetails && lastDetails.fragments.length) {
+ if (!details.hasProgramDateTime || !lastDetails.hasProgramDateTime) {
return;
- }
+ } // if last level sliding is 1000 and its first frag PROGRAM-DATE-TIME is 2017-08-20 1:10:00 AM
+ // and if new details first frag PROGRAM DATE-TIME is 2017-08-20 1:10:08 AM
+ // then we can deduce that playlist B sliding is 1000+8 = 1008s
- var audioTrack = this.tracks[newId];
- logger["logger"].log("Now switching to audio-track index " + newId); // stopping live reloading timer if any
- this.clearInterval();
- this._trackId = newId;
- var url = audioTrack.url,
- type = audioTrack.type,
- id = audioTrack.id;
- this.hls.trigger(events["default"].AUDIO_TRACK_SWITCHING, {
- id: id,
- type: type,
- url: url
- });
+ var lastPDT = lastDetails.fragments[0].programDateTime;
+ var newPDT = details.fragments[0].programDateTime; // date diff is in ms. frag.start is in seconds
- this._loadTrackDetailsIfNeeded(audioTrack);
- }
- /**
- * @override
- */
- ;
+ var sliding = (newPDT - lastPDT) / 1000 + lastDetails.fragments[0].start;
- _proto.doTick = function doTick() {
- this._updateTrack(this._trackId);
+ if (Object(number_isFinite["isFiniteNumber"])(sliding)) {
+ logger["logger"].log("adjusting PTS using programDateTime delta, sliding:" + sliding.toFixed(3));
+ adjustPts(sliding, details);
+ }
}
- /**
- * Select initial track
- * @private
- */
- ;
+}
+// CONCATENATED MODULE: ./src/controller/base-stream-controller.js
- _proto._selectInitialAudioTrack = function _selectInitialAudioTrack() {
- var _this2 = this;
- var tracks = this.tracks;
+function base_stream_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
- if (!tracks.length) {
- return;
- }
- var currentAudioTrack = this.tracks[this._trackId];
- var name = null;
- if (currentAudioTrack) {
- name = currentAudioTrack.name;
- } // Pre-select default tracks if there are any
- if (this._selectDefaultTrack) {
- var defaultTracks = tracks.filter(function (track) {
- return track.default;
- });
+var State = {
+ STOPPED: 'STOPPED',
+ STARTING: 'STARTING',
+ IDLE: 'IDLE',
+ PAUSED: 'PAUSED',
+ KEY_LOADING: 'KEY_LOADING',
+ FRAG_LOADING: 'FRAG_LOADING',
+ FRAG_LOADING_WAITING_RETRY: 'FRAG_LOADING_WAITING_RETRY',
+ WAITING_TRACK: 'WAITING_TRACK',
+ PARSING: 'PARSING',
+ PARSED: 'PARSED',
+ BUFFER_FLUSHING: 'BUFFER_FLUSHING',
+ ENDED: 'ENDED',
+ ERROR: 'ERROR',
+ WAITING_INIT_PTS: 'WAITING_INIT_PTS',
+ WAITING_LEVEL: 'WAITING_LEVEL'
+};
- if (defaultTracks.length) {
- tracks = defaultTracks;
- } else {
- logger["logger"].warn('No default audio tracks defined');
- }
- }
+var base_stream_controller_BaseStreamController =
+/*#__PURE__*/
+function (_TaskLoop) {
+ base_stream_controller_inheritsLoose(BaseStreamController, _TaskLoop);
- var trackFound = false;
+ function BaseStreamController() {
+ return _TaskLoop.apply(this, arguments) || this;
+ }
- var traverseTracks = function traverseTracks() {
- // Select track with right group ID
- tracks.forEach(function (track) {
- if (trackFound) {
- return;
- } // We need to match the (pre-)selected group ID
- // and the NAME of the current track.
+ var _proto = BaseStreamController.prototype;
+ _proto.doTick = function doTick() {};
- if ((!_this2.audioGroupId || track.groupId === _this2.audioGroupId) && (!name || name === track.name)) {
- // If there was a previous track try to stay with the same `NAME`.
- // It should be unique across tracks of same group, and consistent through redundant track groups.
- _this2._setAudioTrack(track.id);
+ _proto.startLoad = function startLoad() {};
- trackFound = true;
- }
- });
- };
+ _proto.stopLoad = function stopLoad() {
+ var frag = this.fragCurrent;
- traverseTracks();
+ if (frag) {
+ if (frag.loader) {
+ frag.loader.abort();
+ }
- if (!trackFound) {
- name = null;
- traverseTracks();
+ this.fragmentTracker.removeFragment(frag);
}
- if (!trackFound) {
- logger["logger"].error("No track found for running audio group-ID: " + this.audioGroupId);
- this.hls.trigger(events["default"].ERROR, {
- type: errors["ErrorTypes"].MEDIA_ERROR,
- details: errors["ErrorDetails"].AUDIO_TRACK_LOAD_ERROR,
- fatal: true
- });
+ if (this.demuxer) {
+ this.demuxer.destroy();
+ this.demuxer = null;
}
- }
- /**
- * @private
- * @param {AudioTrack} audioTrack
- * @returns {boolean}
- */
- ;
- _proto._needsTrackLoading = function _needsTrackLoading(audioTrack) {
- var details = audioTrack.details,
- url = audioTrack.url;
+ this.fragCurrent = null;
+ this.fragPrevious = null;
+ this.clearInterval();
+ this.clearNextTick();
+ this.state = State.STOPPED;
+ };
- if (!details || details.live) {
- // check if we face an audio track embedded in main playlist (audio track without URI attribute)
- return !!url;
+ _proto._streamEnded = function _streamEnded(bufferInfo, levelDetails) {
+ var fragCurrent = this.fragCurrent,
+ fragmentTracker = this.fragmentTracker; // we just got done loading the final fragment and there is no other buffered range after ...
+ // rationale is that in case there are any buffered ranges after, it means that there are unbuffered portion in between
+ // so we should not switch to ENDED in that case, to be able to buffer them
+ // dont switch to ENDED if we need to backtrack last fragment
+
+ if (!levelDetails.live && fragCurrent && !fragCurrent.backtracked && fragCurrent.sn === levelDetails.endSN && !bufferInfo.nextStart) {
+ var fragState = fragmentTracker.getState(fragCurrent);
+ return fragState === FragmentState.PARTIAL || fragState === FragmentState.OK;
}
return false;
- }
- /**
- * @private
- * @param {AudioTrack} audioTrack
- */
- ;
+ };
- _proto._loadTrackDetailsIfNeeded = function _loadTrackDetailsIfNeeded(audioTrack) {
- if (this._needsTrackLoading(audioTrack)) {
- var url = audioTrack.url,
- id = audioTrack.id; // track not retrieved yet, or live playlist we need to (re)load it
+ _proto.onMediaSeeking = function onMediaSeeking() {
+ var config = this.config,
+ media = this.media,
+ mediaBuffer = this.mediaBuffer,
+ state = this.state;
+ var currentTime = media ? media.currentTime : null;
+ var bufferInfo = BufferHelper.bufferInfo(mediaBuffer || media, currentTime, this.config.maxBufferHole);
- logger["logger"].log("loading audio-track playlist for id: " + id);
- this.hls.trigger(events["default"].AUDIO_TRACK_LOADING, {
- url: url,
- id: id
- });
+ if (Object(number_isFinite["isFiniteNumber"])(currentTime)) {
+ logger["logger"].log("media seeking to " + currentTime.toFixed(3));
}
- }
- /**
- * @private
- * @param {number} newId
- */
- ;
-
- _proto._updateTrack = function _updateTrack(newId) {
- // check if level idx is valid
- if (newId < 0 || newId >= this.tracks.length) {
- return;
- } // stopping live reloading timer if any
-
-
- this.clearInterval();
- this._trackId = newId;
- logger["logger"].log("trying to update audio-track " + newId);
- var audioTrack = this.tracks[newId];
- this._loadTrackDetailsIfNeeded(audioTrack);
- }
- /**
- * @private
- */
- ;
+ if (state === State.FRAG_LOADING) {
+ var fragCurrent = this.fragCurrent; // check if we are seeking to a unbuffered area AND if frag loading is in progress
- _proto._handleLoadError = function _handleLoadError() {
- // First, let's black list current track id
- this.trackIdBlacklist[this._trackId] = true; // Let's try to fall back on a functional audio-track with the same group ID
+ if (bufferInfo.len === 0 && fragCurrent) {
+ var tolerance = config.maxFragLookUpTolerance;
+ var fragStartOffset = fragCurrent.start - tolerance;
+ var fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance; // check if we seek position will be out of currently loaded frag range : if out cancel frag load, if in, don't do anything
- var previousId = this._trackId;
- var _this$tracks$previous = this.tracks[previousId],
- name = _this$tracks$previous.name,
- language = _this$tracks$previous.language,
- groupId = _this$tracks$previous.groupId;
- logger["logger"].warn("Loading failed on audio track id: " + previousId + ", group-id: " + groupId + ", name/language: \"" + name + "\" / \"" + language + "\""); // Find a non-blacklisted track ID with the same NAME
- // At least a track that is not blacklisted, thus on another group-ID.
+ if (currentTime < fragStartOffset || currentTime > fragEndOffset) {
+ if (fragCurrent.loader) {
+ logger["logger"].log('seeking outside of buffer while fragment load in progress, cancel fragment load');
+ fragCurrent.loader.abort();
+ }
- var newId = previousId;
+ this.fragCurrent = null;
+ this.fragPrevious = null; // switch to IDLE state to load new fragment
- for (var i = 0; i < this.tracks.length; i++) {
- if (this.trackIdBlacklist[i]) {
- continue;
+ this.state = State.IDLE;
+ } else {
+ logger["logger"].log('seeking outside of buffer but within currently loaded fragment range');
+ }
}
+ } else if (state === State.ENDED) {
+ // if seeking to unbuffered area, clean up fragPrevious
+ if (bufferInfo.len === 0) {
+ this.fragPrevious = null;
+ this.fragCurrent = null;
+ } // switch to IDLE state to check for potential new fragment
- var newTrack = this.tracks[i];
- if (newTrack.name === name) {
- newId = i;
- break;
- }
+ this.state = State.IDLE;
}
- if (newId === previousId) {
- logger["logger"].warn("No fallback audio-track found for name/language: \"" + name + "\" / \"" + language + "\"");
- return;
- }
+ if (media) {
+ this.lastCurrentTime = currentTime;
+ } // in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
- logger["logger"].log('Attempting audio-track fallback id:', newId, 'group-id:', this.tracks[newId].groupId);
- this._setAudioTrack(newId);
+ if (!this.loadedmetadata) {
+ this.nextLoadPosition = this.startPosition = currentTime;
+ } // tick to speed up processing
+
+
+ this.tick();
};
- audio_track_controller_createClass(AudioTrackController, [{
- key: "audioTracks",
- get: function get() {
- return this.tracks;
- }
- /**
- * @type {number} Index into audio-tracks list of currently selected track.
- */
+ _proto.onMediaEnded = function onMediaEnded() {
+ // reset startPosition and lastCurrentTime to restart playback @ stream beginning
+ this.startPosition = this.lastCurrentTime = 0;
+ };
- }, {
- key: "audioTrack",
- get: function get() {
- return this._trackId;
- }
- /**
- * Select current track by index
- */
- ,
- set: function set(newId) {
- this._setAudioTrack(newId); // If audio track is selected from API then don't choose from the manifest default track
+ _proto.onHandlerDestroying = function onHandlerDestroying() {
+ this.stopLoad();
+ _TaskLoop.prototype.onHandlerDestroying.call(this);
+ };
- this._selectDefaultTrack = false;
- }
- }]);
+ _proto.onHandlerDestroyed = function onHandlerDestroyed() {
+ this.state = State.STOPPED;
+ this.fragmentTracker = null;
+ };
- return AudioTrackController;
+ _proto.computeLivePosition = function computeLivePosition(sliding, levelDetails) {
+ var targetLatency = this.config.liveSyncDuration !== undefined ? this.config.liveSyncDuration : this.config.liveSyncDurationCount * levelDetails.targetduration;
+ return sliding + Math.max(0, levelDetails.totalduration - targetLatency);
+ };
+
+ return BaseStreamController;
}(TaskLoop);
-/* harmony default export */ var audio_track_controller = (audio_track_controller_AudioTrackController);
+
// CONCATENATED MODULE: ./src/controller/audio-stream-controller.js
@@ -14847,7 +13098,7 @@ function audio_stream_controller_inheritsLoose(subClass, superClass) { subClass.
var audio_stream_controller_window = window,
audio_stream_controller_performance = audio_stream_controller_window.performance;
-var audio_stream_controller_TICK_INTERVAL = 100; // how often to tick in ms
+var TICK_INTERVAL = 100; // how often to tick in ms
var audio_stream_controller_AudioStreamController =
/*#__PURE__*/
@@ -14894,7 +13145,7 @@ function (_BaseStreamController) {
if (this.tracks) {
var lastCurrentTime = this.lastCurrentTime;
this.stopLoad();
- this.setInterval(audio_stream_controller_TICK_INTERVAL);
+ this.setInterval(TICK_INTERVAL);
this.fragLoadError = 0;
if (lastCurrentTime > 0 && startPosition === -1) {
@@ -15231,6 +13482,7 @@ function (_BaseStreamController) {
this.media = this.mediaBuffer = this.videoBuffer = null;
this.loadedmetadata = false;
+ this.fragmentTracker.removeAllFragments();
this.stopLoad();
};
@@ -15254,7 +13506,7 @@ function (_BaseStreamController) {
}
} else {
// switching to audio track, start timer if not already started
- this.setInterval(audio_stream_controller_TICK_INTERVAL);
+ this.setInterval(TICK_INTERVAL);
} // should we switch tracks ?
@@ -18334,7 +16586,7 @@ function (_EventHandler) {
function TimelineController(hls) {
var _this;
- _this = _EventHandler.call(this, hls, events["default"].MEDIA_ATTACHING, events["default"].MEDIA_DETACHING, events["default"].FRAG_PARSING_USERDATA, events["default"].FRAG_DECRYPTED, events["default"].MANIFEST_LOADING, events["default"].MANIFEST_LOADED, events["default"].FRAG_LOADED, events["default"].LEVEL_SWITCHING, events["default"].INIT_PTS_FOUND) || this;
+ _this = _EventHandler.call(this, hls, events["default"].MEDIA_ATTACHING, events["default"].MEDIA_DETACHING, events["default"].FRAG_PARSING_USERDATA, events["default"].FRAG_DECRYPTED, events["default"].MANIFEST_LOADING, events["default"].MANIFEST_LOADED, events["default"].FRAG_LOADED, events["default"].INIT_PTS_FOUND) || this;
_this.media = null;
_this.config = void 0;
_this.enabled = true;
@@ -18577,10 +16829,6 @@ function (_EventHandler) {
}
};
- _proto.onLevelSwitching = function onLevelSwitching() {
- this.enabled = this.hls.currentLevel.closedCaptions !== 'NONE';
- };
-
_proto.onFragLoaded = function onFragLoaded(data) {
var frag = data.frag,
payload = data.payload;
@@ -18673,7 +16921,12 @@ function (_EventHandler) {
if (!currentTrack.cues.getCueById(cue.id)) {
try {
currentTrack.addCue(cue);
+
+ if (!currentTrack.cues.getCueById(cue.id)) {
+ throw new Error("addCue is failed for: " + cue);
+ }
} catch (err) {
+ logger["logger"].debug("Failed occurred on adding cues: " + err);
var textTrackCue = new window.TextTrackCue(cue.startTime, cue.endTime, cue.text);
textTrackCue.id = cue.id;
currentTrack.addCue(textTrackCue);
@@ -18721,7 +16974,7 @@ function (_EventHandler) {
};
_proto.onFragParsingUserdata = function onFragParsingUserdata(data) {
- if (!this.enabled || !this.config.enableCEA708Captions) {
+ if (!this.enabled || !this.cea608Parser) {
return;
} // If the event contains captions (found in the bytes property), push all bytes into the parser immediately
// It will create the proper timestamps based on the PTS value
@@ -18782,6 +17035,7 @@ function intersection(x1, x2, y1, y2) {
+
function subtitle_track_controller_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
function subtitle_track_controller_createClass(Constructor, protoProps, staticProps) { if (protoProps) subtitle_track_controller_defineProperties(Constructor.prototype, protoProps); if (staticProps) subtitle_track_controller_defineProperties(Constructor, staticProps); return Constructor; }
@@ -18793,6 +17047,7 @@ function subtitle_track_controller_inheritsLoose(subClass, superClass) { subClas
+
var subtitle_track_controller_SubtitleTrackController =
/*#__PURE__*/
function (_EventHandler) {
@@ -18864,6 +17119,17 @@ function (_EventHandler) {
this.media.textTracks.removeEventListener('change', this.trackChangeListener);
}
+ if (Object(number_isFinite["isFiniteNumber"])(this.subtitleTrack)) {
+ this.queuedDefaultTrack = this.subtitleTrack;
+ }
+
+ var textTracks = filterSubtitleTracks(this.media.textTracks); // Clear loaded cues on media detachment from tracks
+
+ textTracks.forEach(function (track) {
+ clearCurrentCues(track);
+ }); // Disable all subtitle tracks before detachment so when reattached only tracks in that content are enabled.
+
+ this.subtitleTrack = -1;
this.media = null;
} // Fired whenever a new manifest is loaded.
;
@@ -19087,6 +17353,132 @@ function filterSubtitleTracks(textTrackList) {
// EXTERNAL MODULE: ./src/crypt/decrypter.js + 3 modules
var decrypter = __webpack_require__("./src/crypt/decrypter.js");
+// CONCATENATED MODULE: ./src/controller/fragment-finders.ts
+
+
+
+/**
+ * Returns first fragment whose endPdt value exceeds the given PDT.
+ * @param {Array} fragments - The array of candidate fragments
+ * @param {number|null} [PDTValue = null] - The PDT value which must be exceeded
+ * @param {number} [maxFragLookUpTolerance = 0] - The amount of time that a fragment's start/end can be within in order to be considered contiguous
+ * @returns {*|null} fragment - The best matching fragment
+ */
+function findFragmentByPDT(fragments, PDTValue, maxFragLookUpTolerance) {
+ if (PDTValue === null || !Array.isArray(fragments) || !fragments.length || !Object(number_isFinite["isFiniteNumber"])(PDTValue)) {
+ return null;
+ } // if less than start
+
+
+ var startPDT = fragments[0].programDateTime;
+
+ if (PDTValue < (startPDT || 0)) {
+ return null;
+ }
+
+ var endPDT = fragments[fragments.length - 1].endProgramDateTime;
+
+ if (PDTValue >= (endPDT || 0)) {
+ return null;
+ }
+
+ maxFragLookUpTolerance = maxFragLookUpTolerance || 0;
+
+ for (var seg = 0; seg < fragments.length; ++seg) {
+ var frag = fragments[seg];
+
+ if (pdtWithinToleranceTest(PDTValue, maxFragLookUpTolerance, frag)) {
+ return frag;
+ }
+ }
+
+ return null;
+}
+/**
+ * Finds a fragment based on the SN of the previous fragment; or based on the needs of the current buffer.
+ * This method compensates for small buffer gaps by applying a tolerance to the start of any candidate fragment, thus
+ * breaking any traps which would cause the same fragment to be continuously selected within a small range.
+ * @param {*} fragPrevious - The last frag successfully appended
+ * @param {Array} fragments - The array of candidate fragments
+ * @param {number} [bufferEnd = 0] - The end of the contiguous buffered range the playhead is currently within
+ * @param {number} maxFragLookUpTolerance - The amount of time that a fragment's start/end can be within in order to be considered contiguous
+ * @returns {*} foundFrag - The best matching fragment
+ */
+
+function findFragmentByPTS(fragPrevious, fragments, bufferEnd, maxFragLookUpTolerance) {
+ if (bufferEnd === void 0) {
+ bufferEnd = 0;
+ }
+
+ if (maxFragLookUpTolerance === void 0) {
+ maxFragLookUpTolerance = 0;
+ }
+
+ var fragNext = fragPrevious ? fragments[fragPrevious.sn - fragments[0].sn + 1] : null; // Prefer the next fragment if it's within tolerance
+
+ if (fragNext && !fragment_finders_fragmentWithinToleranceTest(bufferEnd, maxFragLookUpTolerance, fragNext)) {
+ return fragNext;
+ }
+
+ return binary_search.search(fragments, fragment_finders_fragmentWithinToleranceTest.bind(null, bufferEnd, maxFragLookUpTolerance));
+}
+/**
+ * The test function used by the findFragmentBySn's BinarySearch to look for the best match to the current buffer conditions.
+ * @param {*} candidate - The fragment to test
+ * @param {number} [bufferEnd = 0] - The end of the current buffered range the playhead is currently within
+ * @param {number} [maxFragLookUpTolerance = 0] - The amount of time that a fragment's start can be within in order to be considered contiguous
+ * @returns {number} - 0 if it matches, 1 if too low, -1 if too high
+ */
+
+function fragment_finders_fragmentWithinToleranceTest(bufferEnd, maxFragLookUpTolerance, candidate) {
+ if (bufferEnd === void 0) {
+ bufferEnd = 0;
+ }
+
+ if (maxFragLookUpTolerance === void 0) {
+ maxFragLookUpTolerance = 0;
+ }
+
+ // offset should be within fragment boundary - config.maxFragLookUpTolerance
+ // this is to cope with situations like
+ // bufferEnd = 9.991
+ // frag[Ø] : [0,10]
+ // frag[1] : [10,20]
+ // bufferEnd is within frag[0] range ... although what we are expecting is to return frag[1] here
+ // frag start frag start+duration
+ // |-----------------------------|
+ // <---> <--->
+ // ...--------><-----------------------------><---------....
+ // previous frag matching fragment next frag
+ // return -1 return 0 return 1
+ // logger.log(`level/sn/start/end/bufEnd:${level}/${candidate.sn}/${candidate.start}/${(candidate.start+candidate.duration)}/${bufferEnd}`);
+ // Set the lookup tolerance to be small enough to detect the current segment - ensures we don't skip over very small segments
+ var candidateLookupTolerance = Math.min(maxFragLookUpTolerance, candidate.duration + (candidate.deltaPTS ? candidate.deltaPTS : 0));
+
+ if (candidate.start + candidate.duration - candidateLookupTolerance <= bufferEnd) {
+ return 1;
+ } else if (candidate.start - candidateLookupTolerance > bufferEnd && candidate.start) {
+ // if maxFragLookUpTolerance will have negative value then don't return -1 for first element
+ return -1;
+ }
+
+ return 0;
+}
+/**
+ * The test function used by the findFragmentByPdt's BinarySearch to look for the best match to the current buffer conditions.
+ * This function tests the candidate's program date time values, as represented in Unix time
+ * @param {*} candidate - The fragment to test
+ * @param {number} [pdtBufferEnd = 0] - The Unix time representing the end of the current buffered range
+ * @param {number} [maxFragLookUpTolerance = 0] - The amount of time that a fragment's start can be within in order to be considered contiguous
+ * @returns {boolean} True if contiguous, false otherwise
+ */
+
+function pdtWithinToleranceTest(pdtBufferEnd, maxFragLookUpTolerance, candidate) {
+ var candidateLookupTolerance = Math.min(maxFragLookUpTolerance, candidate.duration + (candidate.deltaPTS ? candidate.deltaPTS : 0)) * 1000; // endProgramDateTime can be null, default to zero
+
+ var endProgramDateTime = candidate.endProgramDateTime || 0;
+ return endProgramDateTime - candidateLookupTolerance > pdtBufferEnd;
+}
// CONCATENATED MODULE: ./src/controller/subtitle-stream-controller.js
function subtitle_stream_controller_assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
@@ -19180,7 +17572,18 @@ function (_BaseStreamController) {
};
_proto.onMediaDetaching = function onMediaDetaching() {
+ var _this2 = this;
+
+ if (!this.media) {
+ return;
+ }
+
this.media.removeEventListener('seeking', this._onMediaSeeking);
+ this.fragmentTracker.removeAllFragments();
+ this.currentTrackId = -1;
+ this.tracks.forEach(function (track) {
+ _this2.tracksBuffered[track.id] = [];
+ });
this.media = null;
this.state = State.STOPPED;
} // If something goes wrong, proceed to next frag, if we were processing one.
@@ -19198,20 +17601,20 @@ function (_BaseStreamController) {
;
_proto.onSubtitleTracksUpdated = function onSubtitleTracksUpdated(data) {
- var _this2 = this;
+ var _this3 = this;
logger["logger"].log('subtitle tracks updated');
this.tracksBuffered = [];
this.tracks = data.subtitleTracks;
this.tracks.forEach(function (track) {
- _this2.tracksBuffered[track.id] = [];
+ _this3.tracksBuffered[track.id] = [];
});
};
_proto.onSubtitleTrackSwitch = function onSubtitleTrackSwitch(data) {
this.currentTrackId = data.id;
- if (!this.tracks || this.currentTrackId === -1) {
+ if (!this.tracks || !this.tracks.length || this.currentTrackId === -1) {
this.clearInterval();
return;
} // Check if track has the necessary details to load fragments
@@ -19327,6 +17730,30 @@ function (_BaseStreamController) {
if (!foundFrag) {
foundFrag = findFragmentByPTS(fragPrevious, fragments, bufferEnd, maxFragLookUpTolerance);
}
+
+ if (!foundFrag && trackDetails.live && fragPrevious && fragPrevious.start < fragments[0].start) {
+ /*
+ below is a real world example of what can happen in production.
+
+ fragPrevious s:04:08:44.000Z, e:04:08:49.000Z -- was found by PDT
+
+ # response on subtitle/en/playlist.m3u8 on Nth call
+ fragments[0]: s:04:08:24.000Z, e:04:08:29.000Z -- s:4532.900002, e:4537.900002
+ fragments[1]: s:04:08:29.000Z, e:04:08:34.000Z -- s:4537.900002, e:4542.900002
+ fragments[2]: s:04:08:34.000Z, e:04:08:39.000Z -- s:4542.900002, e:4547.900002
+ fragments[3]: s:04:08:39.000Z, e:04:08:44.000Z -- s:4547.900002, e:4552.900002
+ fragments[4]: s:04:08:44.000Z, e:04:08:49.000Z -- s:4552.900002, e:4557.900002
+ # response on subtitle/en/playlist.m3u8 on (N+1)th call
+ fragments[0]: s:04:08:54.000Z, e:04:08:59.000Z -- s:4562.900002, e:4567.900002
+ fragments[1]: s:04:08:59.000Z, e:04:09:04.000Z -- s:4567.900002, e:4572.900002
+ fragments[2]: s:04:09:04.000Z, e:04:09:09.000Z -- s:4572.900002, e:4577.900002
+ fragments[3]: s:04:09:09.000Z, e:04:09:14.000Z -- s:4577.900002, e:4582.900002
+ fragments[4]: s:04:09:14.000Z, e:04:09:19.000Z -- s:4582.900002, e:4587.900002
+ # notice the gap from e:04:08:49.000Z to s:04:08:54.000Z, code below is to fix this issue
+ */
+ foundFrag = fragments[0];
+ logger["logger"].warn("Gap detected in live subtitle playlist, using next available fragment {start: " + foundFrag.start + "}");
+ }
} else {
foundFrag = fragments[fragLen - 1];
}
@@ -19617,7 +18044,7 @@ function (_EventHandler) {
logger["logger"].log('Got EME message event, creating license request');
this._requestLicense(message, function (data) {
- logger["logger"].log('Received license data, updating key-session');
+ logger["logger"].log("Received license data (length: " + (data ? data.byteLength : data) + "), updating key-session");
keySession.update(data);
});
}
@@ -20301,7 +18728,7 @@ function (_Observer) {
* @member {StreamController} streamController
*/
- var streamController = _this.streamController = new stream_controller(hls_assertThisInitialized(_this), fragmentTracker);
+ var streamController = _this.streamController = new stream_controller_default.a(hls_assertThisInitialized(_this), fragmentTracker);
var networkControllers = [levelController, streamController]; // optional audio stream controller
/**
diff --git a/dist/hls.light.js b/dist/hls.light.js
new file mode 100644
index 00000000000..5c58dc7e3b0
--- /dev/null
+++ b/dist/hls.light.js
@@ -0,0 +1,8005 @@
+typeof window !== "undefined" &&
+(function webpackUniversalModuleDefinition(root, factory) {
+ if(typeof exports === 'object' && typeof module === 'object')
+ module.exports = factory();
+ else if(typeof define === 'function' && define.amd)
+ define([], factory);
+ else if(typeof exports === 'object')
+ exports["Hls"] = factory();
+ else
+ root["Hls"] = factory();
+})(this, function() {
+return /******/ (function(modules) { // webpackBootstrap
+/******/ // The module cache
+/******/ var installedModules = {};
+/******/
+/******/ // The require function
+/******/ function __webpack_require__(moduleId) {
+/******/
+/******/ // Check if module is in cache
+/******/ if(installedModules[moduleId]) {
+/******/ return installedModules[moduleId].exports;
+/******/ }
+/******/ // Create a new module (and put it into the cache)
+/******/ var module = installedModules[moduleId] = {
+/******/ i: moduleId,
+/******/ l: false,
+/******/ exports: {}
+/******/ };
+/******/
+/******/ // Execute the module function
+/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
+/******/
+/******/ // Flag the module as loaded
+/******/ module.l = true;
+/******/
+/******/ // Return the exports of the module
+/******/ return module.exports;
+/******/ }
+/******/
+/******/
+/******/ // expose the modules object (__webpack_modules__)
+/******/ __webpack_require__.m = modules;
+/******/
+/******/ // expose the module cache
+/******/ __webpack_require__.c = installedModules;
+/******/
+/******/ // define getter function for harmony exports
+/******/ __webpack_require__.d = function(exports, name, getter) {
+/******/ if(!__webpack_require__.o(exports, name)) {
+/******/ Object.defineProperty(exports, name, { enumerable: true, get: getter });
+/******/ }
+/******/ };
+/******/
+/******/ // define __esModule on exports
+/******/ __webpack_require__.r = function(exports) {
+/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
+/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
+/******/ }
+/******/ Object.defineProperty(exports, '__esModule', { value: true });
+/******/ };
+/******/
+/******/ // create a fake namespace object
+/******/ // mode & 1: value is a module id, require it
+/******/ // mode & 2: merge all properties of value into the ns
+/******/ // mode & 4: return value when already ns object
+/******/ // mode & 8|1: behave like require
+/******/ __webpack_require__.t = function(value, mode) {
+/******/ if(mode & 1) value = __webpack_require__(value);
+/******/ if(mode & 8) return value;
+/******/ if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;
+/******/ var ns = Object.create(null);
+/******/ __webpack_require__.r(ns);
+/******/ Object.defineProperty(ns, 'default', { enumerable: true, value: value });
+/******/ if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));
+/******/ return ns;
+/******/ };
+/******/
+/******/ // getDefaultExport function for compatibility with non-harmony modules
+/******/ __webpack_require__.n = function(module) {
+/******/ var getter = module && module.__esModule ?
+/******/ function getDefault() { return module['default']; } :
+/******/ function getModuleExports() { return module; };
+/******/ __webpack_require__.d(getter, 'a', getter);
+/******/ return getter;
+/******/ };
+/******/
+/******/ // Object.prototype.hasOwnProperty.call
+/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
+/******/
+/******/ // __webpack_public_path__
+/******/ __webpack_require__.p = "/dist/";
+/******/
+/******/
+/******/ // Load entry module and return exports
+/******/ return __webpack_require__(__webpack_require__.s = "./src/hls.ts");
+/******/ })
+/************************************************************************/
+/******/ ({
+
+/***/ "./node_modules/eventemitter3/index.js":
+/*!*********************************************!*\
+ !*** ./node_modules/eventemitter3/index.js ***!
+ \*********************************************/
+/*! no static exports found */
+/*! ModuleConcatenation bailout: Module is not an ECMAScript module */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+
+var has = Object.prototype.hasOwnProperty
+ , prefix = '~';
+
+/**
+ * Constructor to create a storage for our `EE` objects.
+ * An `Events` instance is a plain object whose properties are event names.
+ *
+ * @constructor
+ * @private
+ */
+function Events() {}
+
+//
+// We try to not inherit from `Object.prototype`. In some engines creating an
+// instance in this way is faster than calling `Object.create(null)` directly.
+// If `Object.create(null)` is not supported we prefix the event names with a
+// character to make sure that the built-in object properties are not
+// overridden or used as an attack vector.
+//
+if (Object.create) {
+ Events.prototype = Object.create(null);
+
+ //
+ // This hack is needed because the `__proto__` property is still inherited in
+ // some old browsers like Android 4, iPhone 5.1, Opera 11 and Safari 5.
+ //
+ if (!new Events().__proto__) prefix = false;
+}
+
+/**
+ * Representation of a single event listener.
+ *
+ * @param {Function} fn The listener function.
+ * @param {*} context The context to invoke the listener with.
+ * @param {Boolean} [once=false] Specify if the listener is a one-time listener.
+ * @constructor
+ * @private
+ */
+function EE(fn, context, once) {
+ this.fn = fn;
+ this.context = context;
+ this.once = once || false;
+}
+
+/**
+ * Add a listener for a given event.
+ *
+ * @param {EventEmitter} emitter Reference to the `EventEmitter` instance.
+ * @param {(String|Symbol)} event The event name.
+ * @param {Function} fn The listener function.
+ * @param {*} context The context to invoke the listener with.
+ * @param {Boolean} once Specify if the listener is a one-time listener.
+ * @returns {EventEmitter}
+ * @private
+ */
+function addListener(emitter, event, fn, context, once) {
+ if (typeof fn !== 'function') {
+ throw new TypeError('The listener must be a function');
+ }
+
+ var listener = new EE(fn, context || emitter, once)
+ , evt = prefix ? prefix + event : event;
+
+ if (!emitter._events[evt]) emitter._events[evt] = listener, emitter._eventsCount++;
+ else if (!emitter._events[evt].fn) emitter._events[evt].push(listener);
+ else emitter._events[evt] = [emitter._events[evt], listener];
+
+ return emitter;
+}
+
+/**
+ * Clear event by name.
+ *
+ * @param {EventEmitter} emitter Reference to the `EventEmitter` instance.
+ * @param {(String|Symbol)} evt The Event name.
+ * @private
+ */
+function clearEvent(emitter, evt) {
+ if (--emitter._eventsCount === 0) emitter._events = new Events();
+ else delete emitter._events[evt];
+}
+
+/**
+ * Minimal `EventEmitter` interface that is molded against the Node.js
+ * `EventEmitter` interface.
+ *
+ * @constructor
+ * @public
+ */
+function EventEmitter() {
+ this._events = new Events();
+ this._eventsCount = 0;
+}
+
+/**
+ * Return an array listing the events for which the emitter has registered
+ * listeners.
+ *
+ * @returns {Array}
+ * @public
+ */
+EventEmitter.prototype.eventNames = function eventNames() {
+ var names = []
+ , events
+ , name;
+
+ if (this._eventsCount === 0) return names;
+
+ for (name in (events = this._events)) {
+ if (has.call(events, name)) names.push(prefix ? name.slice(1) : name);
+ }
+
+ if (Object.getOwnPropertySymbols) {
+ return names.concat(Object.getOwnPropertySymbols(events));
+ }
+
+ return names;
+};
+
+/**
+ * Return the listeners registered for a given event.
+ *
+ * @param {(String|Symbol)} event The event name.
+ * @returns {Array} The registered listeners.
+ * @public
+ */
+EventEmitter.prototype.listeners = function listeners(event) {
+ var evt = prefix ? prefix + event : event
+ , handlers = this._events[evt];
+
+ if (!handlers) return [];
+ if (handlers.fn) return [handlers.fn];
+
+ for (var i = 0, l = handlers.length, ee = new Array(l); i < l; i++) {
+ ee[i] = handlers[i].fn;
+ }
+
+ return ee;
+};
+
+/**
+ * Return the number of listeners listening to a given event.
+ *
+ * @param {(String|Symbol)} event The event name.
+ * @returns {Number} The number of listeners.
+ * @public
+ */
+EventEmitter.prototype.listenerCount = function listenerCount(event) {
+ var evt = prefix ? prefix + event : event
+ , listeners = this._events[evt];
+
+ if (!listeners) return 0;
+ if (listeners.fn) return 1;
+ return listeners.length;
+};
+
+/**
+ * Calls each of the listeners registered for a given event.
+ *
+ * @param {(String|Symbol)} event The event name.
+ * @returns {Boolean} `true` if the event had listeners, else `false`.
+ * @public
+ */
+EventEmitter.prototype.emit = function emit(event, a1, a2, a3, a4, a5) {
+ var evt = prefix ? prefix + event : event;
+
+ if (!this._events[evt]) return false;
+
+ var listeners = this._events[evt]
+ , len = arguments.length
+ , args
+ , i;
+
+ if (listeners.fn) {
+ if (listeners.once) this.removeListener(event, listeners.fn, undefined, true);
+
+ switch (len) {
+ case 1: return listeners.fn.call(listeners.context), true;
+ case 2: return listeners.fn.call(listeners.context, a1), true;
+ case 3: return listeners.fn.call(listeners.context, a1, a2), true;
+ case 4: return listeners.fn.call(listeners.context, a1, a2, a3), true;
+ case 5: return listeners.fn.call(listeners.context, a1, a2, a3, a4), true;
+ case 6: return listeners.fn.call(listeners.context, a1, a2, a3, a4, a5), true;
+ }
+
+ for (i = 1, args = new Array(len -1); i < len; i++) {
+ args[i - 1] = arguments[i];
+ }
+
+ listeners.fn.apply(listeners.context, args);
+ } else {
+ var length = listeners.length
+ , j;
+
+ for (i = 0; i < length; i++) {
+ if (listeners[i].once) this.removeListener(event, listeners[i].fn, undefined, true);
+
+ switch (len) {
+ case 1: listeners[i].fn.call(listeners[i].context); break;
+ case 2: listeners[i].fn.call(listeners[i].context, a1); break;
+ case 3: listeners[i].fn.call(listeners[i].context, a1, a2); break;
+ case 4: listeners[i].fn.call(listeners[i].context, a1, a2, a3); break;
+ default:
+ if (!args) for (j = 1, args = new Array(len -1); j < len; j++) {
+ args[j - 1] = arguments[j];
+ }
+
+ listeners[i].fn.apply(listeners[i].context, args);
+ }
+ }
+ }
+
+ return true;
+};
+
+/**
+ * Add a listener for a given event.
+ *
+ * @param {(String|Symbol)} event The event name.
+ * @param {Function} fn The listener function.
+ * @param {*} [context=this] The context to invoke the listener with.
+ * @returns {EventEmitter} `this`.
+ * @public
+ */
+EventEmitter.prototype.on = function on(event, fn, context) {
+ return addListener(this, event, fn, context, false);
+};
+
+/**
+ * Add a one-time listener for a given event.
+ *
+ * @param {(String|Symbol)} event The event name.
+ * @param {Function} fn The listener function.
+ * @param {*} [context=this] The context to invoke the listener with.
+ * @returns {EventEmitter} `this`.
+ * @public
+ */
+EventEmitter.prototype.once = function once(event, fn, context) {
+ return addListener(this, event, fn, context, true);
+};
+
+/**
+ * Remove the listeners of a given event.
+ *
+ * @param {(String|Symbol)} event The event name.
+ * @param {Function} fn Only remove the listeners that match this function.
+ * @param {*} context Only remove the listeners that have this context.
+ * @param {Boolean} once Only remove one-time listeners.
+ * @returns {EventEmitter} `this`.
+ * @public
+ */
+EventEmitter.prototype.removeListener = function removeListener(event, fn, context, once) {
+ var evt = prefix ? prefix + event : event;
+
+ if (!this._events[evt]) return this;
+ if (!fn) {
+ clearEvent(this, evt);
+ return this;
+ }
+
+ var listeners = this._events[evt];
+
+ if (listeners.fn) {
+ if (
+ listeners.fn === fn &&
+ (!once || listeners.once) &&
+ (!context || listeners.context === context)
+ ) {
+ clearEvent(this, evt);
+ }
+ } else {
+ for (var i = 0, events = [], length = listeners.length; i < length; i++) {
+ if (
+ listeners[i].fn !== fn ||
+ (once && !listeners[i].once) ||
+ (context && listeners[i].context !== context)
+ ) {
+ events.push(listeners[i]);
+ }
+ }
+
+ //
+ // Reset the array, or remove it completely if we have no more listeners.
+ //
+ if (events.length) this._events[evt] = events.length === 1 ? events[0] : events;
+ else clearEvent(this, evt);
+ }
+
+ return this;
+};
+
+/**
+ * Remove all listeners, or those of the specified event.
+ *
+ * @param {(String|Symbol)} [event] The event name.
+ * @returns {EventEmitter} `this`.
+ * @public
+ */
+EventEmitter.prototype.removeAllListeners = function removeAllListeners(event) {
+ var evt;
+
+ if (event) {
+ evt = prefix ? prefix + event : event;
+ if (this._events[evt]) clearEvent(this, evt);
+ } else {
+ this._events = new Events();
+ this._eventsCount = 0;
+ }
+
+ return this;
+};
+
+//
+// Alias methods names because people roll like that.
+//
+EventEmitter.prototype.off = EventEmitter.prototype.removeListener;
+EventEmitter.prototype.addListener = EventEmitter.prototype.on;
+
+//
+// Expose the prefix.
+//
+EventEmitter.prefixed = prefix;
+
+//
+// Allow `EventEmitter` to be imported as module namespace.
+//
+EventEmitter.EventEmitter = EventEmitter;
+
+//
+// Expose the module.
+//
+if (true) {
+ module.exports = EventEmitter;
+}
+
+
+/***/ }),
+
+/***/ "./node_modules/url-toolkit/src/url-toolkit.js":
+/*!*****************************************************!*\
+ !*** ./node_modules/url-toolkit/src/url-toolkit.js ***!
+ \*****************************************************/
+/*! no static exports found */
+/*! ModuleConcatenation bailout: Module is not an ECMAScript module */
+/***/ (function(module, exports, __webpack_require__) {
+
+// see https://tools.ietf.org/html/rfc1808
+
+/* jshint ignore:start */
+(function(root) {
+/* jshint ignore:end */
+
+ var URL_REGEX = /^((?:[a-zA-Z0-9+\-.]+:)?)(\/\/[^\/?#]*)?((?:[^\/\?#]*\/)*.*?)??(;.*?)?(\?.*?)?(#.*?)?$/;
+ var FIRST_SEGMENT_REGEX = /^([^\/?#]*)(.*)$/;
+ var SLASH_DOT_REGEX = /(?:\/|^)\.(?=\/)/g;
+ var SLASH_DOT_DOT_REGEX = /(?:\/|^)\.\.\/(?!\.\.\/).*?(?=\/)/g;
+
+ var URLToolkit = { // jshint ignore:line
+ // If opts.alwaysNormalize is true then the path will always be normalized even when it starts with / or //
+ // E.g
+ // With opts.alwaysNormalize = false (default, spec compliant)
+ // http://a.com/b/cd + /e/f/../g => http://a.com/e/f/../g
+ // With opts.alwaysNormalize = true (not spec compliant)
+ // http://a.com/b/cd + /e/f/../g => http://a.com/e/g
+ buildAbsoluteURL: function(baseURL, relativeURL, opts) {
+ opts = opts || {};
+ // remove any remaining space and CRLF
+ baseURL = baseURL.trim();
+ relativeURL = relativeURL.trim();
+ if (!relativeURL) {
+ // 2a) If the embedded URL is entirely empty, it inherits the
+ // entire base URL (i.e., is set equal to the base URL)
+ // and we are done.
+ if (!opts.alwaysNormalize) {
+ return baseURL;
+ }
+ var basePartsForNormalise = URLToolkit.parseURL(baseURL);
+ if (!basePartsForNormalise) {
+ throw new Error('Error trying to parse base URL.');
+ }
+ basePartsForNormalise.path = URLToolkit.normalizePath(basePartsForNormalise.path);
+ return URLToolkit.buildURLFromParts(basePartsForNormalise);
+ }
+ var relativeParts = URLToolkit.parseURL(relativeURL);
+ if (!relativeParts) {
+ throw new Error('Error trying to parse relative URL.');
+ }
+ if (relativeParts.scheme) {
+ // 2b) If the embedded URL starts with a scheme name, it is
+ // interpreted as an absolute URL and we are done.
+ if (!opts.alwaysNormalize) {
+ return relativeURL;
+ }
+ relativeParts.path = URLToolkit.normalizePath(relativeParts.path);
+ return URLToolkit.buildURLFromParts(relativeParts);
+ }
+ var baseParts = URLToolkit.parseURL(baseURL);
+ if (!baseParts) {
+ throw new Error('Error trying to parse base URL.');
+ }
+ if (!baseParts.netLoc && baseParts.path && baseParts.path[0] !== '/') {
+ // If netLoc missing and path doesn't start with '/', assume everthing before the first '/' is the netLoc
+ // This causes 'example.com/a' to be handled as '//example.com/a' instead of '/example.com/a'
+ var pathParts = FIRST_SEGMENT_REGEX.exec(baseParts.path);
+ baseParts.netLoc = pathParts[1];
+ baseParts.path = pathParts[2];
+ }
+ if (baseParts.netLoc && !baseParts.path) {
+ baseParts.path = '/';
+ }
+ var builtParts = {
+ // 2c) Otherwise, the embedded URL inherits the scheme of
+ // the base URL.
+ scheme: baseParts.scheme,
+ netLoc: relativeParts.netLoc,
+ path: null,
+ params: relativeParts.params,
+ query: relativeParts.query,
+ fragment: relativeParts.fragment
+ };
+ if (!relativeParts.netLoc) {
+ // 3) If the embedded URL's is non-empty, we skip to
+ // Step 7. Otherwise, the embedded URL inherits the
+ // (if any) of the base URL.
+ builtParts.netLoc = baseParts.netLoc;
+ // 4) If the embedded URL path is preceded by a slash "/", the
+ // path is not relative and we skip to Step 7.
+ if (relativeParts.path[0] !== '/') {
+ if (!relativeParts.path) {
+ // 5) If the embedded URL path is empty (and not preceded by a
+ // slash), then the embedded URL inherits the base URL path
+ builtParts.path = baseParts.path;
+ // 5a) if the embedded URL's is non-empty, we skip to
+ // step 7; otherwise, it inherits the of the base
+ // URL (if any) and
+ if (!relativeParts.params) {
+ builtParts.params = baseParts.params;
+ // 5b) if the embedded URL's is non-empty, we skip to
+ // step 7; otherwise, it inherits the of the base
+ // URL (if any) and we skip to step 7.
+ if (!relativeParts.query) {
+ builtParts.query = baseParts.query;
+ }
+ }
+ } else {
+ // 6) The last segment of the base URL's path (anything
+ // following the rightmost slash "/", or the entire path if no
+ // slash is present) is removed and the embedded URL's path is
+ // appended in its place.
+ var baseURLPath = baseParts.path;
+ var newPath = baseURLPath.substring(0, baseURLPath.lastIndexOf('/') + 1) + relativeParts.path;
+ builtParts.path = URLToolkit.normalizePath(newPath);
+ }
+ }
+ }
+ if (builtParts.path === null) {
+ builtParts.path = opts.alwaysNormalize ? URLToolkit.normalizePath(relativeParts.path) : relativeParts.path;
+ }
+ return URLToolkit.buildURLFromParts(builtParts);
+ },
+ parseURL: function(url) {
+ var parts = URL_REGEX.exec(url);
+ if (!parts) {
+ return null;
+ }
+ return {
+ scheme: parts[1] || '',
+ netLoc: parts[2] || '',
+ path: parts[3] || '',
+ params: parts[4] || '',
+ query: parts[5] || '',
+ fragment: parts[6] || ''
+ };
+ },
+ normalizePath: function(path) {
+ // The following operations are
+ // then applied, in order, to the new path:
+ // 6a) All occurrences of "./", where "." is a complete path
+ // segment, are removed.
+ // 6b) If the path ends with "." as a complete path segment,
+ // that "." is removed.
+ path = path.split('').reverse().join('').replace(SLASH_DOT_REGEX, '');
+ // 6c) All occurrences of "/../", where is a
+ // complete path segment not equal to "..", are removed.
+ // Removal of these path segments is performed iteratively,
+ // removing the leftmost matching pattern on each iteration,
+ // until no matching pattern remains.
+ // 6d) If the path ends with "/..", where is a
+ // complete path segment not equal to "..", that
+ // "/.." is removed.
+ while (path.length !== (path = path.replace(SLASH_DOT_DOT_REGEX, '')).length) {} // jshint ignore:line
+ return path.split('').reverse().join('');
+ },
+ buildURLFromParts: function(parts) {
+ return parts.scheme + parts.netLoc + parts.path + parts.params + parts.query + parts.fragment;
+ }
+ };
+
+/* jshint ignore:start */
+ if(true)
+ module.exports = URLToolkit;
+ else {}
+})(this);
+/* jshint ignore:end */
+
+
+/***/ }),
+
+/***/ "./src/controller/stream-controller.js":
+/*!*********************************************!*\
+ !*** ./src/controller/stream-controller.js ***!
+ \*********************************************/
+/*! no static exports found */
+/*! ModuleConcatenation bailout: Module is not an ECMAScript module */
+/***/ (function(module, exports) {
+
+throw new Error("Module build failed (from ./node_modules/babel-loader/lib/index.js):\nSyntaxError: /Users/savlan/work/hls.js/src/controller/stream-controller.js: Support for the experimental syntax 'optionalChaining' isn't currently enabled (102:44):\n\n\u001b[0m \u001b[90m 100 | \u001b[39m \u001b[36mbreak\u001b[39m\u001b[33m;\u001b[39m\u001b[0m\n\u001b[0m \u001b[90m 101 | \u001b[39m \u001b[36mcase\u001b[39m \u001b[33mState\u001b[39m\u001b[33m.\u001b[39m\u001b[33mWAITING_LEVEL\u001b[39m\u001b[33m:\u001b[39m\u001b[0m\n\u001b[0m\u001b[31m\u001b[1m>\u001b[22m\u001b[39m\u001b[90m 102 | \u001b[39m \u001b[36mvar\u001b[39m details \u001b[33m=\u001b[39m \u001b[36mthis\u001b[39m\u001b[33m.\u001b[39mlevels[\u001b[36mthis\u001b[39m\u001b[33m.\u001b[39mlevel]\u001b[33m?\u001b[39m\u001b[33m.\u001b[39mdetails\u001b[33m;\u001b[39m\u001b[0m\n\u001b[0m \u001b[90m | \u001b[39m \u001b[31m\u001b[1m^\u001b[22m\u001b[39m\u001b[0m\n\u001b[0m \u001b[90m 103 | \u001b[39m \u001b[90m// check if playlist is already loaded (must be current level for live)\u001b[39m\u001b[0m\n\u001b[0m \u001b[90m 104 | \u001b[39m \u001b[36mif\u001b[39m (details \u001b[33m&&\u001b[39m (\u001b[33m!\u001b[39mdetails\u001b[33m.\u001b[39mlive \u001b[33m||\u001b[39m \u001b[36mthis\u001b[39m\u001b[33m.\u001b[39mlevelLastLoaded \u001b[33m===\u001b[39m \u001b[36mthis\u001b[39m\u001b[33m.\u001b[39mlevel)) {\u001b[0m\n\u001b[0m \u001b[90m 105 | \u001b[39m \u001b[36mthis\u001b[39m\u001b[33m.\u001b[39mstate \u001b[33m=\u001b[39m \u001b[33mState\u001b[39m\u001b[33m.\u001b[39m\u001b[33mIDLE\u001b[39m\u001b[33m;\u001b[39m\u001b[0m\n\nAdd @babel/plugin-proposal-optional-chaining (https://git.io/vb4Sk) to the 'plugins' section of your Babel config to enable transformation.\n at Parser.raise (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:6322:17)\n at Parser.expectPlugin (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:7643:18)\n at Parser.parseSubscript (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:8420:12)\n at Parser.parseSubscripts (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:8406:19)\n at Parser.parseExprSubscripts (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:8395:17)\n at Parser.parseMaybeUnary (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:8365:21)\n at Parser.parseExprOps (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:8252:23)\n at Parser.parseMaybeConditional (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:8225:23)\n at Parser.parseMaybeAssign (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:8172:21)\n at Parser.parseVar (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10415:26)\n at Parser.parseVarStatement (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10234:10)\n at Parser.parseStatementContent (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9830:21)\n at Parser.parseStatement (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9763:17)\n at Parser.parseSwitchStatement (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10170:36)\n at Parser.parseStatementContent (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9814:21)\n at Parser.parseStatement (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9763:17)\n at Parser.parseBlockOrModuleBlockBody (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10340:25)\n at Parser.parseBlockBody (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10327:10)\n at Parser.parseBlock (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10311:10)\n at Parser.parseFunctionBody (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9382:24)\n at Parser.parseFunctionBodyAndFinish (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9352:10)\n at Parser.parseMethod (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9306:10)\n at Parser.pushClassMethod (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10743:30)\n at Parser.parseClassMemberWithIsStatic (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10668:12)\n at Parser.parseClassMember (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10607:10)\n at /Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10562:14\n at Parser.withTopicForbiddingContext (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9657:14)\n at Parser.parseClassBody (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10539:10)\n at Parser.parseClass (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10513:22)\n at Parser.parseStatementContent (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9805:21)\n at Parser.parseStatement (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9763:17)\n at Parser.parseBlockOrModuleBlockBody (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10340:25)\n at Parser.parseBlockBody (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:10327:10)\n at Parser.parseTopLevel (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:9692:10)\n at Parser.parse (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:11209:17)\n at parse (/Users/savlan/work/hls.js/node_modules/@babel/parser/lib/index.js:11245:38)\n at parser (/Users/savlan/work/hls.js/node_modules/@babel/core/lib/transformation/normalize-file.js:170:34)\n at normalizeFile (/Users/savlan/work/hls.js/node_modules/@babel/core/lib/transformation/normalize-file.js:138:11)\n at runSync (/Users/savlan/work/hls.js/node_modules/@babel/core/lib/transformation/index.js:44:43)\n at runAsync (/Users/savlan/work/hls.js/node_modules/@babel/core/lib/transformation/index.js:35:14)\n at /Users/savlan/work/hls.js/node_modules/@babel/core/lib/transform.js:34:34\n at processTicksAndRejections (internal/process/task_queues.js:79:11)");
+
+/***/ }),
+
+/***/ "./src/empty.js":
+/*!**********************!*\
+ !*** ./src/empty.js ***!
+ \**********************/
+/*! no static exports found */
+/*! ModuleConcatenation bailout: Module is not an ECMAScript module */
+/***/ (function(module, exports) {
+
+// This file is inserted as a shim for modules which we do not want to include into the distro.
+// This replacement is done in the "resolve" section of the webpack config.
+module.exports = void 0;
+
+/***/ }),
+
+/***/ "./src/hls.ts":
+/*!*********************************!*\
+ !*** ./src/hls.ts + 36 modules ***!
+ \*********************************/
+/*! exports provided: default */
+/*! ModuleConcatenation bailout: Cannot concat with ./src/controller/stream-controller.js (<- Module is not an ECMAScript module) */
+/*! ModuleConcatenation bailout: Cannot concat with ./node_modules/eventemitter3/index.js (<- Module is not an ECMAScript module) */
+/*! ModuleConcatenation bailout: Cannot concat with ./node_modules/url-toolkit/src/url-toolkit.js (<- Module is not an ECMAScript module) */
+/***/ (function(module, __webpack_exports__, __webpack_require__) {
+
+"use strict";
+__webpack_require__.r(__webpack_exports__);
+
+// EXTERNAL MODULE: ./node_modules/url-toolkit/src/url-toolkit.js
+var url_toolkit = __webpack_require__("./node_modules/url-toolkit/src/url-toolkit.js");
+
+// CONCATENATED MODULE: ./src/errors.ts
+var ErrorTypes;
+/**
+ * @enum {ErrorDetails}
+ * @typedef {string} ErrorDetail
+ */
+
+(function (ErrorTypes) {
+ ErrorTypes["NETWORK_ERROR"] = "networkError";
+ ErrorTypes["MEDIA_ERROR"] = "mediaError";
+ ErrorTypes["KEY_SYSTEM_ERROR"] = "keySystemError";
+ ErrorTypes["MUX_ERROR"] = "muxError";
+ ErrorTypes["OTHER_ERROR"] = "otherError";
+})(ErrorTypes || (ErrorTypes = {}));
+
+var ErrorDetails;
+
+(function (ErrorDetails) {
+ ErrorDetails["KEY_SYSTEM_NO_KEYS"] = "keySystemNoKeys";
+ ErrorDetails["KEY_SYSTEM_NO_ACCESS"] = "keySystemNoAccess";
+ ErrorDetails["KEY_SYSTEM_NO_SESSION"] = "keySystemNoSession";
+ ErrorDetails["KEY_SYSTEM_LICENSE_REQUEST_FAILED"] = "keySystemLicenseRequestFailed";
+ ErrorDetails["KEY_SYSTEM_NO_INIT_DATA"] = "keySystemNoInitData";
+ ErrorDetails["MANIFEST_LOAD_ERROR"] = "manifestLoadError";
+ ErrorDetails["MANIFEST_LOAD_TIMEOUT"] = "manifestLoadTimeOut";
+ ErrorDetails["MANIFEST_PARSING_ERROR"] = "manifestParsingError";
+ ErrorDetails["MANIFEST_INCOMPATIBLE_CODECS_ERROR"] = "manifestIncompatibleCodecsError";
+ ErrorDetails["LEVEL_LOAD_ERROR"] = "levelLoadError";
+ ErrorDetails["LEVEL_LOAD_TIMEOUT"] = "levelLoadTimeOut";
+ ErrorDetails["LEVEL_SWITCH_ERROR"] = "levelSwitchError";
+ ErrorDetails["AUDIO_TRACK_LOAD_ERROR"] = "audioTrackLoadError";
+ ErrorDetails["AUDIO_TRACK_LOAD_TIMEOUT"] = "audioTrackLoadTimeOut";
+ ErrorDetails["FRAG_LOAD_ERROR"] = "fragLoadError";
+ ErrorDetails["FRAG_LOAD_TIMEOUT"] = "fragLoadTimeOut";
+ ErrorDetails["FRAG_DECRYPT_ERROR"] = "fragDecryptError";
+ ErrorDetails["FRAG_PARSING_ERROR"] = "fragParsingError";
+ ErrorDetails["REMUX_ALLOC_ERROR"] = "remuxAllocError";
+ ErrorDetails["KEY_LOAD_ERROR"] = "keyLoadError";
+ ErrorDetails["KEY_LOAD_TIMEOUT"] = "keyLoadTimeOut";
+ ErrorDetails["BUFFER_ADD_CODEC_ERROR"] = "bufferAddCodecError";
+ ErrorDetails["BUFFER_APPEND_ERROR"] = "bufferAppendError";
+ ErrorDetails["BUFFER_APPENDING_ERROR"] = "bufferAppendingError";
+ ErrorDetails["BUFFER_STALLED_ERROR"] = "bufferStalledError";
+ ErrorDetails["BUFFER_FULL_ERROR"] = "bufferFullError";
+ ErrorDetails["BUFFER_SEEK_OVER_HOLE"] = "bufferSeekOverHole";
+ ErrorDetails["BUFFER_NUDGE_ON_STALL"] = "bufferNudgeOnStall";
+ ErrorDetails["INTERNAL_EXCEPTION"] = "internalException";
+})(ErrorDetails || (ErrorDetails = {}));
+// CONCATENATED MODULE: ./src/polyfills/number-isFinite.js
+var isFiniteNumber = Number.isFinite || function (value) {
+ return typeof value === 'number' && isFinite(value);
+};
+// CONCATENATED MODULE: ./src/events.js
+var _HlsEvents;
+
+/**
+ * @readonly
+ * @enum {string}
+ */
+var HlsEvents = (_HlsEvents = {
+ // fired before MediaSource is attaching to media element - data: { media }
+ MEDIA_ATTACHING: 'hlsMediaAttaching',
+ // fired when MediaSource has been succesfully attached to media element - data: { }
+ MEDIA_ATTACHED: 'hlsMediaAttached',
+ // fired before detaching MediaSource from media element - data: { }
+ MEDIA_DETACHING: 'hlsMediaDetaching',
+ // fired when MediaSource has been detached from media element - data: { }
+ MEDIA_DETACHED: 'hlsMediaDetached',
+ // fired when we buffer is going to be reset - data: { }
+ BUFFER_RESET: 'hlsBufferReset',
+ // fired when we know about the codecs that we need buffers for to push into - data: {tracks : { container, codec, levelCodec, initSegment, metadata }}
+ BUFFER_CODECS: 'hlsBufferCodecs',
+ // fired when sourcebuffers have been created - data: { tracks : tracks }
+ BUFFER_CREATED: 'hlsBufferCreated',
+ // fired when we append a segment to the buffer - data: { segment: segment object }
+ BUFFER_APPENDING: 'hlsBufferAppending',
+ // fired when we are done with appending a media segment to the buffer - data : { parent : segment parent that triggered BUFFER_APPENDING, pending : nb of segments waiting for appending for this segment parent}
+ BUFFER_APPENDED: 'hlsBufferAppended',
+ // fired when the stream is finished and we want to notify the media buffer that there will be no more data - data: { }
+ BUFFER_EOS: 'hlsBufferEos',
+ // fired when the media buffer should be flushed - data { startOffset, endOffset }
+ BUFFER_FLUSHING: 'hlsBufferFlushing',
+ // fired when the media buffer has been flushed - data: { }
+ BUFFER_FLUSHED: 'hlsBufferFlushed',
+ // fired to signal that a manifest loading starts - data: { url : manifestURL}
+ MANIFEST_LOADING: 'hlsManifestLoading',
+ // fired after manifest has been loaded - data: { levels : [available quality levels], audioTracks : [ available audio tracks], url : manifestURL, stats : { trequest, tfirst, tload, mtime}}
+ MANIFEST_LOADED: 'hlsManifestLoaded',
+ // fired after manifest has been parsed - data: { levels : [available quality levels], firstLevel : index of first quality level appearing in Manifest}
+ MANIFEST_PARSED: 'hlsManifestParsed',
+ // fired when a level switch is requested - data: { level : id of new level }
+ LEVEL_SWITCHING: 'hlsLevelSwitching',
+ // fired when a level switch is effective - data: { level : id of new level }
+ LEVEL_SWITCHED: 'hlsLevelSwitched',
+ // fired when a level playlist loading starts - data: { url : level URL, level : id of level being loaded}
+ LEVEL_LOADING: 'hlsLevelLoading',
+ // fired when a level playlist loading finishes - data: { details : levelDetails object, level : id of loaded level, stats : { trequest, tfirst, tload, mtime} }
+ LEVEL_LOADED: 'hlsLevelLoaded',
+ // fired when a level's details have been updated based on previous details, after it has been loaded - data: { details : levelDetails object, level : id of updated level }
+ LEVEL_UPDATED: 'hlsLevelUpdated',
+ // fired when a level's PTS information has been updated after parsing a fragment - data: { details : levelDetails object, level : id of updated level, drift: PTS drift observed when parsing last fragment }
+ LEVEL_PTS_UPDATED: 'hlsLevelPtsUpdated',
+ // fired when the live back buffer is reached defined by the liveBackBufferLength config option - data : { bufferEnd: number }
+ LIVE_BACK_BUFFER_REACHED: 'hlsLiveBackBufferReached',
+ // fired to notify that audio track lists has been updated - data: { audioTracks : audioTracks }
+ AUDIO_TRACKS_UPDATED: 'hlsAudioTracksUpdated',
+ // fired when an audio track switching is requested - data: { id : audio track id }
+ AUDIO_TRACK_SWITCHING: 'hlsAudioTrackSwitching',
+ // fired when an audio track switch actually occurs - data: { id : audio track id }
+ AUDIO_TRACK_SWITCHED: 'hlsAudioTrackSwitched',
+ // fired when an audio track loading starts - data: { url : audio track URL, id : audio track id }
+ AUDIO_TRACK_LOADING: 'hlsAudioTrackLoading',
+ // fired when an audio track loading finishes - data: { details : levelDetails object, id : audio track id, stats : { trequest, tfirst, tload, mtime } }
+ AUDIO_TRACK_LOADED: 'hlsAudioTrackLoaded',
+ // fired to notify that subtitle track lists has been updated - data: { subtitleTracks : subtitleTracks }
+ SUBTITLE_TRACKS_UPDATED: 'hlsSubtitleTracksUpdated',
+ // fired when an subtitle track switch occurs - data: { id : subtitle track id }
+ SUBTITLE_TRACK_SWITCH: 'hlsSubtitleTrackSwitch',
+ // fired when a subtitle track loading starts - data: { url : subtitle track URL, id : subtitle track id }
+ SUBTITLE_TRACK_LOADING: 'hlsSubtitleTrackLoading',
+ // fired when a subtitle track loading finishes - data: { details : levelDetails object, id : subtitle track id, stats : { trequest, tfirst, tload, mtime } }
+ SUBTITLE_TRACK_LOADED: 'hlsSubtitleTrackLoaded',
+ // fired when a subtitle fragment has been processed - data: { success : boolean, frag : the processed frag }
+ SUBTITLE_FRAG_PROCESSED: 'hlsSubtitleFragProcessed',
+ // fired when the first timestamp is found - data: { id : demuxer id, initPTS: initPTS, frag : fragment object }
+ INIT_PTS_FOUND: 'hlsInitPtsFound',
+ // fired when a fragment loading starts - data: { frag : fragment object }
+ FRAG_LOADING: 'hlsFragLoading',
+ // fired when a fragment loading is progressing - data: { frag : fragment object, { trequest, tfirst, loaded } }
+ FRAG_LOAD_PROGRESS: 'hlsFragLoadProgress',
+ // Identifier for fragment load aborting for emergency switch down - data: { frag : fragment object }
+ FRAG_LOAD_EMERGENCY_ABORTED: 'hlsFragLoadEmergencyAborted',
+ // fired when a fragment loading is completed - data: { frag : fragment object, payload : fragment payload, stats : { trequest, tfirst, tload, length } }
+ FRAG_LOADED: 'hlsFragLoaded',
+ // fired when a fragment has finished decrypting - data: { id : demuxer id, frag: fragment object, payload : fragment payload, stats : { tstart, tdecrypt } }
+ FRAG_DECRYPTED: 'hlsFragDecrypted',
+ // fired when Init Segment has been extracted from fragment - data: { id : demuxer id, frag: fragment object, moov : moov MP4 box, codecs : codecs found while parsing fragment }
+ FRAG_PARSING_INIT_SEGMENT: 'hlsFragParsingInitSegment',
+ // fired when parsing sei text is completed - data: { id : demuxer id, frag: fragment object, samples : [ sei samples pes ] }
+ FRAG_PARSING_USERDATA: 'hlsFragParsingUserdata',
+ // fired when parsing id3 is completed - data: { id : demuxer id, frag: fragment object, samples : [ id3 samples pes ] }
+ FRAG_PARSING_METADATA: 'hlsFragParsingMetadata',
+ // fired when data have been extracted from fragment - data: { id : demuxer id, frag: fragment object, data1 : moof MP4 box or TS fragments, data2 : mdat MP4 box or null}
+ FRAG_PARSING_DATA: 'hlsFragParsingData',
+ // fired when fragment parsing is completed - data: { id : demuxer id, frag: fragment object }
+ FRAG_PARSED: 'hlsFragParsed',
+ // fired when fragment remuxed MP4 boxes have all been appended into SourceBuffer - data: { id : demuxer id, frag : fragment object, stats : { trequest, tfirst, tload, tparsed, tbuffered, length, bwEstimate } }
+ FRAG_BUFFERED: 'hlsFragBuffered',
+ // fired when fragment matching with current media position is changing - data : { id : demuxer id, frag : fragment object }
+ FRAG_CHANGED: 'hlsFragChanged',
+ // Identifier for a FPS drop event - data: { curentDropped, currentDecoded, totalDroppedFrames }
+ FPS_DROP: 'hlsFpsDrop',
+ // triggered when FPS drop triggers auto level capping - data: { level, droppedlevel }
+ FPS_DROP_LEVEL_CAPPING: 'hlsFpsDropLevelCapping',
+ // Identifier for an error event - data: { type : error type, details : error details, fatal : if true, hls.js cannot/will not try to recover, if false, hls.js will try to recover,other error specific data }
+ ERROR: 'hlsError',
+ // fired when hls.js instance starts destroying. Different from MEDIA_DETACHED as one could want to detach and reattach a media to the instance of hls.js to handle mid-rolls for example - data: { }
+ DESTROYING: 'hlsDestroying',
+ // fired when a decrypt key loading starts - data: { frag : fragment object }
+ KEY_LOADING: 'hlsKeyLoading',
+ // fired when a decrypt key loading is completed - data: { frag : fragment object, payload : key payload, stats : { trequest, tfirst, tload, length } }
+ KEY_LOADED: 'hlsKeyLoaded',
+ // fired upon stream controller state transitions - data: { previousState, nextState }
+ STREAM_STATE_TRANSITION: 'hlsStreamStateTransition'
+}, _HlsEvents["LIVE_BACK_BUFFER_REACHED"] = 'hlsLiveBackBufferReached', _HlsEvents);
+/* harmony default export */ var events = (HlsEvents);
+// CONCATENATED MODULE: ./src/utils/get-self-scope.js
+function getSelfScope() {
+ // see https://stackoverflow.com/a/11237259/589493
+ if (typeof window === 'undefined') {
+ /* eslint-disable-next-line no-undef */
+ return self;
+ } else {
+ return window;
+ }
+}
+// CONCATENATED MODULE: ./src/utils/logger.js
+
+
+function noop() {}
+
+var fakeLogger = {
+ trace: noop,
+ debug: noop,
+ log: noop,
+ warn: noop,
+ info: noop,
+ error: noop
+};
+var exportedLogger = fakeLogger; // let lastCallTime;
+// function formatMsgWithTimeInfo(type, msg) {
+// const now = Date.now();
+// const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
+// lastCallTime = now;
+// msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
+// return msg;
+// }
+
+function formatMsg(type, msg) {
+ msg = '[' + type + '] > ' + msg;
+ return msg;
+}
+
+var logger_global = getSelfScope();
+
+function consolePrintFn(type) {
+ var func = logger_global.console[type];
+
+ if (func) {
+ return function () {
+ for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
+ args[_key] = arguments[_key];
+ }
+
+ if (args[0]) {
+ args[0] = formatMsg(type, args[0]);
+ }
+
+ func.apply(logger_global.console, args);
+ };
+ }
+
+ return noop;
+}
+
+function exportLoggerFunctions(debugConfig) {
+ for (var _len2 = arguments.length, functions = new Array(_len2 > 1 ? _len2 - 1 : 0), _key2 = 1; _key2 < _len2; _key2++) {
+ functions[_key2 - 1] = arguments[_key2];
+ }
+
+ functions.forEach(function (type) {
+ exportedLogger[type] = debugConfig[type] ? debugConfig[type].bind(debugConfig) : consolePrintFn(type);
+ });
+}
+
+var enableLogs = function enableLogs(debugConfig) {
+ // check that console is available
+ if (logger_global.console && debugConfig === true || typeof debugConfig === 'object') {
+ exportLoggerFunctions(debugConfig, // Remove out from list here to hard-disable a log-level
+ // 'trace',
+ 'debug', 'log', 'info', 'warn', 'error'); // Some browsers don't allow to use bind on console object anyway
+ // fallback to default if needed
+
+ try {
+ exportedLogger.log();
+ } catch (e) {
+ exportedLogger = fakeLogger;
+ }
+ } else {
+ exportedLogger = fakeLogger;
+ }
+};
+var logger = exportedLogger;
+// CONCATENATED MODULE: ./src/event-handler.ts
+/*
+*
+* All objects in the event handling chain should inherit from this class
+*
+*/
+
+
+
+var FORBIDDEN_EVENT_NAMES = {
+ 'hlsEventGeneric': true,
+ 'hlsHandlerDestroying': true,
+ 'hlsHandlerDestroyed': true
+};
+
+var event_handler_EventHandler =
+/*#__PURE__*/
+function () {
+ function EventHandler(hls) {
+ this.hls = void 0;
+ this.handledEvents = void 0;
+ this.useGenericHandler = void 0;
+ this.hls = hls;
+ this.onEvent = this.onEvent.bind(this);
+
+ for (var _len = arguments.length, events = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
+ events[_key - 1] = arguments[_key];
+ }
+
+ this.handledEvents = events;
+ this.useGenericHandler = true;
+ this.registerListeners();
+ }
+
+ var _proto = EventHandler.prototype;
+
+ _proto.destroy = function destroy() {
+ this.onHandlerDestroying();
+ this.unregisterListeners();
+ this.onHandlerDestroyed();
+ };
+
+ _proto.onHandlerDestroying = function onHandlerDestroying() {};
+
+ _proto.onHandlerDestroyed = function onHandlerDestroyed() {};
+
+ _proto.isEventHandler = function isEventHandler() {
+ return typeof this.handledEvents === 'object' && this.handledEvents.length && typeof this.onEvent === 'function';
+ };
+
+ _proto.registerListeners = function registerListeners() {
+ if (this.isEventHandler()) {
+ this.handledEvents.forEach(function (event) {
+ if (FORBIDDEN_EVENT_NAMES[event]) {
+ throw new Error('Forbidden event-name: ' + event);
+ }
+
+ this.hls.on(event, this.onEvent);
+ }, this);
+ }
+ };
+
+ _proto.unregisterListeners = function unregisterListeners() {
+ if (this.isEventHandler()) {
+ this.handledEvents.forEach(function (event) {
+ this.hls.off(event, this.onEvent);
+ }, this);
+ }
+ }
+ /**
+ * arguments: event (string), data (any)
+ */
+ ;
+
+ _proto.onEvent = function onEvent(event, data) {
+ this.onEventGeneric(event, data);
+ };
+
+ _proto.onEventGeneric = function onEventGeneric(event, data) {
+ var eventToFunction = function eventToFunction(event, data) {
+ var funcName = 'on' + event.replace('hls', '');
+
+ if (typeof this[funcName] !== 'function') {
+ throw new Error("Event " + event + " has no generic handler in this " + this.constructor.name + " class (tried " + funcName + ")");
+ }
+
+ return this[funcName].bind(this, data);
+ };
+
+ try {
+ eventToFunction.call(this, event, data).call();
+ } catch (err) {
+ logger.error("An internal error happened while handling event " + event + ". Error message: \"" + err.message + "\". Here is a stacktrace:", err);
+ this.hls.trigger(events.ERROR, {
+ type: ErrorTypes.OTHER_ERROR,
+ details: ErrorDetails.INTERNAL_EXCEPTION,
+ fatal: false,
+ event: event,
+ err: err
+ });
+ }
+ };
+
+ return EventHandler;
+}();
+
+/* harmony default export */ var event_handler = (event_handler_EventHandler);
+// CONCATENATED MODULE: ./src/types/loader.ts
+/**
+ * `type` property values for this loaders' context object
+ * @enum
+ *
+ */
+var PlaylistContextType;
+/**
+ * @enum {string}
+ */
+
+(function (PlaylistContextType) {
+ PlaylistContextType["MANIFEST"] = "manifest";
+ PlaylistContextType["LEVEL"] = "level";
+ PlaylistContextType["AUDIO_TRACK"] = "audioTrack";
+ PlaylistContextType["SUBTITLE_TRACK"] = "subtitleTrack";
+})(PlaylistContextType || (PlaylistContextType = {}));
+
+var PlaylistLevelType;
+
+(function (PlaylistLevelType) {
+ PlaylistLevelType["MAIN"] = "main";
+ PlaylistLevelType["AUDIO"] = "audio";
+ PlaylistLevelType["SUBTITLE"] = "subtitle";
+})(PlaylistLevelType || (PlaylistLevelType = {}));
+// CONCATENATED MODULE: ./src/demux/mp4demuxer.js
+/**
+ * MP4 demuxer
+ */
+
+
+var UINT32_MAX = Math.pow(2, 32) - 1;
+
+var mp4demuxer_MP4Demuxer =
+/*#__PURE__*/
+function () {
+ function MP4Demuxer(observer, remuxer) {
+ this.observer = observer;
+ this.remuxer = remuxer;
+ }
+
+ var _proto = MP4Demuxer.prototype;
+
+ _proto.resetTimeStamp = function resetTimeStamp(initPTS) {
+ this.initPTS = initPTS;
+ };
+
+ _proto.resetInitSegment = function resetInitSegment(initSegment, audioCodec, videoCodec, duration) {
+ // jshint unused:false
+ if (initSegment && initSegment.byteLength) {
+ var initData = this.initData = MP4Demuxer.parseInitSegment(initSegment); // default audio codec if nothing specified
+ // TODO : extract that from initsegment
+
+ if (audioCodec == null) {
+ audioCodec = 'mp4a.40.5';
+ }
+
+ if (videoCodec == null) {
+ videoCodec = 'avc1.42e01e';
+ }
+
+ var tracks = {};
+
+ if (initData.audio && initData.video) {
+ tracks.audiovideo = {
+ container: 'video/mp4',
+ codec: audioCodec + ',' + videoCodec,
+ initSegment: duration ? initSegment : null
+ };
+ } else {
+ if (initData.audio) {
+ tracks.audio = {
+ container: 'audio/mp4',
+ codec: audioCodec,
+ initSegment: duration ? initSegment : null
+ };
+ }
+
+ if (initData.video) {
+ tracks.video = {
+ container: 'video/mp4',
+ codec: videoCodec,
+ initSegment: duration ? initSegment : null
+ };
+ }
+ }
+
+ this.observer.trigger(events.FRAG_PARSING_INIT_SEGMENT, {
+ tracks: tracks
+ });
+ } else {
+ if (audioCodec) {
+ this.audioCodec = audioCodec;
+ }
+
+ if (videoCodec) {
+ this.videoCodec = videoCodec;
+ }
+ }
+ };
+
+ MP4Demuxer.probe = function probe(data) {
+ // ensure we find a moof box in the first 16 kB
+ return MP4Demuxer.findBox({
+ data: data,
+ start: 0,
+ end: Math.min(data.length, 16384)
+ }, ['moof']).length > 0;
+ };
+
+ MP4Demuxer.bin2str = function bin2str(buffer) {
+ return String.fromCharCode.apply(null, buffer);
+ };
+
+ MP4Demuxer.readUint16 = function readUint16(buffer, offset) {
+ if (buffer.data) {
+ offset += buffer.start;
+ buffer = buffer.data;
+ }
+
+ var val = buffer[offset] << 8 | buffer[offset + 1];
+ return val < 0 ? 65536 + val : val;
+ };
+
+ MP4Demuxer.readUint32 = function readUint32(buffer, offset) {
+ if (buffer.data) {
+ offset += buffer.start;
+ buffer = buffer.data;
+ }
+
+ var val = buffer[offset] << 24 | buffer[offset + 1] << 16 | buffer[offset + 2] << 8 | buffer[offset + 3];
+ return val < 0 ? 4294967296 + val : val;
+ };
+
+ MP4Demuxer.writeUint32 = function writeUint32(buffer, offset, value) {
+ if (buffer.data) {
+ offset += buffer.start;
+ buffer = buffer.data;
+ }
+
+ buffer[offset] = value >> 24;
+ buffer[offset + 1] = value >> 16 & 0xff;
+ buffer[offset + 2] = value >> 8 & 0xff;
+ buffer[offset + 3] = value & 0xff;
+ } // Find the data for a box specified by its path
+ ;
+
+ MP4Demuxer.findBox = function findBox(data, path) {
+ var results = [],
+ i,
+ size,
+ type,
+ end,
+ subresults,
+ start,
+ endbox;
+
+ if (data.data) {
+ start = data.start;
+ end = data.end;
+ data = data.data;
+ } else {
+ start = 0;
+ end = data.byteLength;
+ }
+
+ if (!path.length) {
+ // short-circuit the search for empty paths
+ return null;
+ }
+
+ for (i = start; i < end;) {
+ size = MP4Demuxer.readUint32(data, i);
+ type = MP4Demuxer.bin2str(data.subarray(i + 4, i + 8));
+ endbox = size > 1 ? i + size : end;
+
+ if (type === path[0]) {
+ if (path.length === 1) {
+ // this is the end of the path and we've found the box we were
+ // looking for
+ results.push({
+ data: data,
+ start: i + 8,
+ end: endbox
+ });
+ } else {
+ // recursively search for the next box along the path
+ subresults = MP4Demuxer.findBox({
+ data: data,
+ start: i + 8,
+ end: endbox
+ }, path.slice(1));
+
+ if (subresults.length) {
+ results = results.concat(subresults);
+ }
+ }
+ }
+
+ i = endbox;
+ } // we've finished searching all of data
+
+
+ return results;
+ };
+
+ MP4Demuxer.parseSegmentIndex = function parseSegmentIndex(initSegment) {
+ var moov = MP4Demuxer.findBox(initSegment, ['moov'])[0];
+ var moovEndOffset = moov ? moov.end : null; // we need this in case we need to chop of garbage of the end of current data
+
+ var index = 0;
+ var sidx = MP4Demuxer.findBox(initSegment, ['sidx']);
+ var references;
+
+ if (!sidx || !sidx[0]) {
+ return null;
+ }
+
+ references = [];
+ sidx = sidx[0];
+ var version = sidx.data[0]; // set initial offset, we skip the reference ID (not needed)
+
+ index = version === 0 ? 8 : 16;
+ var timescale = MP4Demuxer.readUint32(sidx, index);
+ index += 4; // TODO: parse earliestPresentationTime and firstOffset
+ // usually zero in our case
+
+ var earliestPresentationTime = 0;
+ var firstOffset = 0;
+
+ if (version === 0) {
+ index += 8;
+ } else {
+ index += 16;
+ } // skip reserved
+
+
+ index += 2;
+ var startByte = sidx.end + firstOffset;
+ var referencesCount = MP4Demuxer.readUint16(sidx, index);
+ index += 2;
+
+ for (var i = 0; i < referencesCount; i++) {
+ var referenceIndex = index;
+ var referenceInfo = MP4Demuxer.readUint32(sidx, referenceIndex);
+ referenceIndex += 4;
+ var referenceSize = referenceInfo & 0x7FFFFFFF;
+ var referenceType = (referenceInfo & 0x80000000) >>> 31;
+
+ if (referenceType === 1) {
+ console.warn('SIDX has hierarchical references (not supported)');
+ return;
+ }
+
+ var subsegmentDuration = MP4Demuxer.readUint32(sidx, referenceIndex);
+ referenceIndex += 4;
+ references.push({
+ referenceSize: referenceSize,
+ subsegmentDuration: subsegmentDuration,
+ // unscaled
+ info: {
+ duration: subsegmentDuration / timescale,
+ start: startByte,
+ end: startByte + referenceSize - 1
+ }
+ });
+ startByte += referenceSize; // Skipping 1 bit for |startsWithSap|, 3 bits for |sapType|, and 28 bits
+ // for |sapDelta|.
+
+ referenceIndex += 4; // skip to next ref
+
+ index = referenceIndex;
+ }
+
+ return {
+ earliestPresentationTime: earliestPresentationTime,
+ timescale: timescale,
+ version: version,
+ referencesCount: referencesCount,
+ references: references,
+ moovEndOffset: moovEndOffset
+ };
+ }
+ /**
+ * Parses an MP4 initialization segment and extracts stream type and
+ * timescale values for any declared tracks. Timescale values indicate the
+ * number of clock ticks per second to assume for time-based values
+ * elsewhere in the MP4.
+ *
+ * To determine the start time of an MP4, you need two pieces of
+ * information: the timescale unit and the earliest base media decode
+ * time. Multiple timescales can be specified within an MP4 but the
+ * base media decode time is always expressed in the timescale from
+ * the media header box for the track:
+ * ```
+ * moov > trak > mdia > mdhd.timescale
+ * moov > trak > mdia > hdlr
+ * ```
+ * @param init {Uint8Array} the bytes of the init segment
+ * @return {object} a hash of track type to timescale values or null if
+ * the init segment is malformed.
+ */
+ ;
+
+ MP4Demuxer.parseInitSegment = function parseInitSegment(initSegment) {
+ var result = [];
+ var traks = MP4Demuxer.findBox(initSegment, ['moov', 'trak']);
+ traks.forEach(function (trak) {
+ var tkhd = MP4Demuxer.findBox(trak, ['tkhd'])[0];
+
+ if (tkhd) {
+ var version = tkhd.data[tkhd.start];
+ var index = version === 0 ? 12 : 20;
+ var trackId = MP4Demuxer.readUint32(tkhd, index);
+ var mdhd = MP4Demuxer.findBox(trak, ['mdia', 'mdhd'])[0];
+
+ if (mdhd) {
+ version = mdhd.data[mdhd.start];
+ index = version === 0 ? 12 : 20;
+ var timescale = MP4Demuxer.readUint32(mdhd, index);
+ var hdlr = MP4Demuxer.findBox(trak, ['mdia', 'hdlr'])[0];
+
+ if (hdlr) {
+ var hdlrType = MP4Demuxer.bin2str(hdlr.data.subarray(hdlr.start + 8, hdlr.start + 12));
+ var type = {
+ 'soun': 'audio',
+ 'vide': 'video'
+ }[hdlrType];
+
+ if (type) {
+ // extract codec info. TODO : parse codec details to be able to build MIME type
+ var codecBox = MP4Demuxer.findBox(trak, ['mdia', 'minf', 'stbl', 'stsd']);
+
+ if (codecBox.length) {
+ codecBox = codecBox[0];
+ var codecType = MP4Demuxer.bin2str(codecBox.data.subarray(codecBox.start + 12, codecBox.start + 16));
+ logger.log("MP4Demuxer:" + type + ":" + codecType + " found");
+ }
+
+ result[trackId] = {
+ timescale: timescale,
+ type: type
+ };
+ result[type] = {
+ timescale: timescale,
+ id: trackId
+ };
+ }
+ }
+ }
+ }
+ });
+ return result;
+ }
+ /**
+ * Determine the base media decode start time, in seconds, for an MP4
+ * fragment. If multiple fragments are specified, the earliest time is
+ * returned.
+ *
+ * The base media decode time can be parsed from track fragment
+ * metadata:
+ * ```
+ * moof > traf > tfdt.baseMediaDecodeTime
+ * ```
+ * It requires the timescale value from the mdhd to interpret.
+ *
+ * @param timescale {object} a hash of track ids to timescale values.
+ * @return {number} the earliest base media decode start time for the
+ * fragment, in seconds
+ */
+ ;
+
+ MP4Demuxer.getStartDTS = function getStartDTS(initData, fragment) {
+ var trafs, baseTimes, result; // we need info from two childrend of each track fragment box
+
+ trafs = MP4Demuxer.findBox(fragment, ['moof', 'traf']); // determine the start times for each track
+
+ baseTimes = [].concat.apply([], trafs.map(function (traf) {
+ return MP4Demuxer.findBox(traf, ['tfhd']).map(function (tfhd) {
+ var id, scale, baseTime; // get the track id from the tfhd
+
+ id = MP4Demuxer.readUint32(tfhd, 4); // assume a 90kHz clock if no timescale was specified
+
+ scale = initData[id].timescale || 90e3; // get the base media decode time from the tfdt
+
+ baseTime = MP4Demuxer.findBox(traf, ['tfdt']).map(function (tfdt) {
+ var version, result;
+ version = tfdt.data[tfdt.start];
+ result = MP4Demuxer.readUint32(tfdt, 4);
+
+ if (version === 1) {
+ result *= Math.pow(2, 32);
+ result += MP4Demuxer.readUint32(tfdt, 8);
+ }
+
+ return result;
+ })[0]; // convert base time to seconds
+
+ return baseTime / scale;
+ });
+ })); // return the minimum
+
+ result = Math.min.apply(null, baseTimes);
+ return isFinite(result) ? result : 0;
+ };
+
+ MP4Demuxer.offsetStartDTS = function offsetStartDTS(initData, fragment, timeOffset) {
+ MP4Demuxer.findBox(fragment, ['moof', 'traf']).map(function (traf) {
+ return MP4Demuxer.findBox(traf, ['tfhd']).map(function (tfhd) {
+ // get the track id from the tfhd
+ var id = MP4Demuxer.readUint32(tfhd, 4); // assume a 90kHz clock if no timescale was specified
+
+ var timescale = initData[id].timescale || 90e3; // get the base media decode time from the tfdt
+
+ MP4Demuxer.findBox(traf, ['tfdt']).map(function (tfdt) {
+ var version = tfdt.data[tfdt.start];
+ var baseMediaDecodeTime = MP4Demuxer.readUint32(tfdt, 4);
+
+ if (version === 0) {
+ MP4Demuxer.writeUint32(tfdt, 4, baseMediaDecodeTime - timeOffset * timescale);
+ } else {
+ baseMediaDecodeTime *= Math.pow(2, 32);
+ baseMediaDecodeTime += MP4Demuxer.readUint32(tfdt, 8);
+ baseMediaDecodeTime -= timeOffset * timescale;
+ baseMediaDecodeTime = Math.max(baseMediaDecodeTime, 0);
+ var upper = Math.floor(baseMediaDecodeTime / (UINT32_MAX + 1));
+ var lower = Math.floor(baseMediaDecodeTime % (UINT32_MAX + 1));
+ MP4Demuxer.writeUint32(tfdt, 4, upper);
+ MP4Demuxer.writeUint32(tfdt, 8, lower);
+ }
+ });
+ });
+ });
+ } // feed incoming data to the front of the parsing pipeline
+ ;
+
+ _proto.append = function append(data, timeOffset, contiguous, accurateTimeOffset) {
+ var initData = this.initData;
+
+ if (!initData) {
+ this.resetInitSegment(data, this.audioCodec, this.videoCodec, false);
+ initData = this.initData;
+ }
+
+ var startDTS,
+ initPTS = this.initPTS;
+
+ if (initPTS === undefined) {
+ var _startDTS = MP4Demuxer.getStartDTS(initData, data);
+
+ this.initPTS = initPTS = _startDTS - timeOffset;
+ this.observer.trigger(events.INIT_PTS_FOUND, {
+ initPTS: initPTS
+ });
+ }
+
+ MP4Demuxer.offsetStartDTS(initData, data, initPTS);
+ startDTS = MP4Demuxer.getStartDTS(initData, data);
+ this.remuxer.remux(initData.audio, initData.video, null, null, startDTS, contiguous, accurateTimeOffset, data);
+ };
+
+ _proto.destroy = function destroy() {};
+
+ return MP4Demuxer;
+}();
+
+/* harmony default export */ var mp4demuxer = (mp4demuxer_MP4Demuxer);
+// CONCATENATED MODULE: ./src/loader/level-key.ts
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+
+
+var level_key_LevelKey =
+/*#__PURE__*/
+function () {
+ function LevelKey(baseURI, relativeURI) {
+ this._uri = null;
+ this.baseuri = void 0;
+ this.reluri = void 0;
+ this.method = null;
+ this.key = null;
+ this.iv = null;
+ this.baseuri = baseURI;
+ this.reluri = relativeURI;
+ }
+
+ _createClass(LevelKey, [{
+ key: "uri",
+ get: function get() {
+ if (!this._uri && this.reluri) {
+ this._uri = Object(url_toolkit["buildAbsoluteURL"])(this.baseuri, this.reluri, {
+ alwaysNormalize: true
+ });
+ }
+
+ return this._uri;
+ }
+ }]);
+
+ return LevelKey;
+}();
+
+
+// CONCATENATED MODULE: ./src/loader/fragment.ts
+
+
+
+function fragment_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function fragment_createClass(Constructor, protoProps, staticProps) { if (protoProps) fragment_defineProperties(Constructor.prototype, protoProps); if (staticProps) fragment_defineProperties(Constructor, staticProps); return Constructor; }
+
+
+
+
+var ElementaryStreamTypes;
+
+(function (ElementaryStreamTypes) {
+ ElementaryStreamTypes["AUDIO"] = "audio";
+ ElementaryStreamTypes["VIDEO"] = "video";
+})(ElementaryStreamTypes || (ElementaryStreamTypes = {}));
+
+var fragment_Fragment =
+/*#__PURE__*/
+function () {
+ function Fragment() {
+ var _this$_elementaryStre;
+
+ this._url = null;
+ this._byteRange = null;
+ this._decryptdata = null;
+ this._elementaryStreams = (_this$_elementaryStre = {}, _this$_elementaryStre[ElementaryStreamTypes.AUDIO] = false, _this$_elementaryStre[ElementaryStreamTypes.VIDEO] = false, _this$_elementaryStre);
+ this.deltaPTS = 0;
+ this.rawProgramDateTime = null;
+ this.programDateTime = null;
+ this.title = null;
+ this.tagList = [];
+ this.cc = void 0;
+ this.type = void 0;
+ this.relurl = void 0;
+ this.baseurl = void 0;
+ this.duration = void 0;
+ this.start = void 0;
+ this.sn = 0;
+ this.urlId = 0;
+ this.level = 0;
+ this.levelkey = void 0;
+ this.loader = void 0;
+ }
+
+ var _proto = Fragment.prototype;
+
+ // setByteRange converts a EXT-X-BYTERANGE attribute into a two element array
+ _proto.setByteRange = function setByteRange(value, previousFrag) {
+ var params = value.split('@', 2);
+ var byteRange = [];
+
+ if (params.length === 1) {
+ byteRange[0] = previousFrag ? previousFrag.byteRangeEndOffset : 0;
+ } else {
+ byteRange[0] = parseInt(params[1]);
+ }
+
+ byteRange[1] = parseInt(params[0]) + byteRange[0];
+ this._byteRange = byteRange;
+ };
+
+ /**
+ * @param {ElementaryStreamTypes} type
+ */
+ _proto.addElementaryStream = function addElementaryStream(type) {
+ this._elementaryStreams[type] = true;
+ }
+ /**
+ * @param {ElementaryStreamTypes} type
+ */
+ ;
+
+ _proto.hasElementaryStream = function hasElementaryStream(type) {
+ return this._elementaryStreams[type] === true;
+ }
+ /**
+ * Utility method for parseLevelPlaylist to create an initialization vector for a given segment
+ * @param {number} segmentNumber - segment number to generate IV with
+ * @returns {Uint8Array}
+ */
+ ;
+
+ _proto.createInitializationVector = function createInitializationVector(segmentNumber) {
+ var uint8View = new Uint8Array(16);
+
+ for (var i = 12; i < 16; i++) {
+ uint8View[i] = segmentNumber >> 8 * (15 - i) & 0xff;
+ }
+
+ return uint8View;
+ }
+ /**
+ * Utility method for parseLevelPlaylist to get a fragment's decryption data from the currently parsed encryption key data
+ * @param levelkey - a playlist's encryption info
+ * @param segmentNumber - the fragment's segment number
+ * @returns {LevelKey} - an object to be applied as a fragment's decryptdata
+ */
+ ;
+
+ _proto.setDecryptDataFromLevelKey = function setDecryptDataFromLevelKey(levelkey, segmentNumber) {
+ var decryptdata = levelkey;
+
+ if (levelkey && levelkey.method && levelkey.uri && !levelkey.iv) {
+ decryptdata = new level_key_LevelKey(levelkey.baseuri, levelkey.reluri);
+ decryptdata.method = levelkey.method;
+ decryptdata.iv = this.createInitializationVector(segmentNumber);
+ }
+
+ return decryptdata;
+ };
+
+ fragment_createClass(Fragment, [{
+ key: "url",
+ get: function get() {
+ if (!this._url && this.relurl) {
+ this._url = Object(url_toolkit["buildAbsoluteURL"])(this.baseurl, this.relurl, {
+ alwaysNormalize: true
+ });
+ }
+
+ return this._url;
+ },
+ set: function set(value) {
+ this._url = value;
+ }
+ }, {
+ key: "byteRange",
+ get: function get() {
+ if (!this._byteRange) {
+ return [];
+ }
+
+ return this._byteRange;
+ }
+ /**
+ * @type {number}
+ */
+
+ }, {
+ key: "byteRangeStartOffset",
+ get: function get() {
+ return this.byteRange[0];
+ }
+ }, {
+ key: "byteRangeEndOffset",
+ get: function get() {
+ return this.byteRange[1];
+ }
+ }, {
+ key: "decryptdata",
+ get: function get() {
+ if (!this.levelkey && !this._decryptdata) {
+ return null;
+ }
+
+ if (!this._decryptdata && this.levelkey) {
+ var sn = this.sn;
+
+ if (typeof sn !== 'number') {
+ // We are fetching decryption data for a initialization segment
+ // If the segment was encrypted with AES-128
+ // It must have an IV defined. We cannot substitute the Segment Number in.
+ if (this.levelkey && this.levelkey.method === 'AES-128' && !this.levelkey.iv) {
+ logger.warn("missing IV for initialization segment with method=\"" + this.levelkey.method + "\" - compliance issue");
+ }
+ /*
+ Be converted to a Number.
+ 'initSegment' will become NaN.
+ NaN, which when converted through ToInt32() -> +0.
+ ---
+ Explicitly set sn to resulting value from implicit conversions 'initSegment' values for IV generation.
+ */
+
+
+ sn = 0;
+ }
+
+ this._decryptdata = this.setDecryptDataFromLevelKey(this.levelkey, sn);
+ }
+
+ return this._decryptdata;
+ }
+ }, {
+ key: "endProgramDateTime",
+ get: function get() {
+ if (this.programDateTime === null) {
+ return null;
+ }
+
+ if (!isFiniteNumber(this.programDateTime)) {
+ return null;
+ }
+
+ var duration = !isFiniteNumber(this.duration) ? 0 : this.duration;
+ return this.programDateTime + duration * 1000;
+ }
+ }, {
+ key: "encrypted",
+ get: function get() {
+ return !!(this.decryptdata && this.decryptdata.uri !== null && this.decryptdata.key === null);
+ }
+ }]);
+
+ return Fragment;
+}();
+
+
+// CONCATENATED MODULE: ./src/loader/level.js
+
+
+function level_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function level_createClass(Constructor, protoProps, staticProps) { if (protoProps) level_defineProperties(Constructor.prototype, protoProps); if (staticProps) level_defineProperties(Constructor, staticProps); return Constructor; }
+
+var level_Level =
+/*#__PURE__*/
+function () {
+ function Level(baseUrl) {
+ // Please keep properties in alphabetical order
+ this.endCC = 0;
+ this.endSN = 0;
+ this.fragments = [];
+ this.initSegment = null;
+ this.live = true;
+ this.needSidxRanges = false;
+ this.startCC = 0;
+ this.startSN = 0;
+ this.startTimeOffset = null;
+ this.targetduration = 0;
+ this.totalduration = 0;
+ this.type = null;
+ this.url = baseUrl;
+ this.version = null;
+ }
+
+ level_createClass(Level, [{
+ key: "hasProgramDateTime",
+ get: function get() {
+ return !!(this.fragments[0] && isFiniteNumber(this.fragments[0].programDateTime));
+ }
+ }]);
+
+ return Level;
+}();
+
+
+// CONCATENATED MODULE: ./src/utils/attr-list.js
+var DECIMAL_RESOLUTION_REGEX = /^(\d+)x(\d+)$/; // eslint-disable-line no-useless-escape
+
+var ATTR_LIST_REGEX = /\s*(.+?)\s*=((?:\".*?\")|.*?)(?:,|$)/g; // eslint-disable-line no-useless-escape
+// adapted from https://github.com/kanongil/node-m3u8parse/blob/master/attrlist.js
+
+var AttrList =
+/*#__PURE__*/
+function () {
+ function AttrList(attrs) {
+ if (typeof attrs === 'string') {
+ attrs = AttrList.parseAttrList(attrs);
+ }
+
+ for (var attr in attrs) {
+ if (attrs.hasOwnProperty(attr)) {
+ this[attr] = attrs[attr];
+ }
+ }
+ }
+
+ var _proto = AttrList.prototype;
+
+ _proto.decimalInteger = function decimalInteger(attrName) {
+ var intValue = parseInt(this[attrName], 10);
+
+ if (intValue > Number.MAX_SAFE_INTEGER) {
+ return Infinity;
+ }
+
+ return intValue;
+ };
+
+ _proto.hexadecimalInteger = function hexadecimalInteger(attrName) {
+ if (this[attrName]) {
+ var stringValue = (this[attrName] || '0x').slice(2);
+ stringValue = (stringValue.length & 1 ? '0' : '') + stringValue;
+ var value = new Uint8Array(stringValue.length / 2);
+
+ for (var i = 0; i < stringValue.length / 2; i++) {
+ value[i] = parseInt(stringValue.slice(i * 2, i * 2 + 2), 16);
+ }
+
+ return value;
+ } else {
+ return null;
+ }
+ };
+
+ _proto.hexadecimalIntegerAsNumber = function hexadecimalIntegerAsNumber(attrName) {
+ var intValue = parseInt(this[attrName], 16);
+
+ if (intValue > Number.MAX_SAFE_INTEGER) {
+ return Infinity;
+ }
+
+ return intValue;
+ };
+
+ _proto.decimalFloatingPoint = function decimalFloatingPoint(attrName) {
+ return parseFloat(this[attrName]);
+ };
+
+ _proto.enumeratedString = function enumeratedString(attrName) {
+ return this[attrName];
+ };
+
+ _proto.decimalResolution = function decimalResolution(attrName) {
+ var res = DECIMAL_RESOLUTION_REGEX.exec(this[attrName]);
+
+ if (res === null) {
+ return undefined;
+ }
+
+ return {
+ width: parseInt(res[1], 10),
+ height: parseInt(res[2], 10)
+ };
+ };
+
+ AttrList.parseAttrList = function parseAttrList(input) {
+ var match,
+ attrs = {};
+ ATTR_LIST_REGEX.lastIndex = 0;
+
+ while ((match = ATTR_LIST_REGEX.exec(input)) !== null) {
+ var value = match[2],
+ quote = '"';
+
+ if (value.indexOf(quote) === 0 && value.lastIndexOf(quote) === value.length - 1) {
+ value = value.slice(1, -1);
+ }
+
+ attrs[match[1]] = value;
+ }
+
+ return attrs;
+ };
+
+ return AttrList;
+}();
+
+/* harmony default export */ var attr_list = (AttrList);
+// CONCATENATED MODULE: ./src/utils/codecs.ts
+// from http://mp4ra.org/codecs.html
+var sampleEntryCodesISO = {
+ audio: {
+ 'a3ds': true,
+ 'ac-3': true,
+ 'ac-4': true,
+ 'alac': true,
+ 'alaw': true,
+ 'dra1': true,
+ 'dts+': true,
+ 'dts-': true,
+ 'dtsc': true,
+ 'dtse': true,
+ 'dtsh': true,
+ 'ec-3': true,
+ 'enca': true,
+ 'g719': true,
+ 'g726': true,
+ 'm4ae': true,
+ 'mha1': true,
+ 'mha2': true,
+ 'mhm1': true,
+ 'mhm2': true,
+ 'mlpa': true,
+ 'mp4a': true,
+ 'raw ': true,
+ 'Opus': true,
+ 'samr': true,
+ 'sawb': true,
+ 'sawp': true,
+ 'sevc': true,
+ 'sqcp': true,
+ 'ssmv': true,
+ 'twos': true,
+ 'ulaw': true
+ },
+ video: {
+ 'avc1': true,
+ 'avc2': true,
+ 'avc3': true,
+ 'avc4': true,
+ 'avcp': true,
+ 'drac': true,
+ 'dvav': true,
+ 'dvhe': true,
+ 'encv': true,
+ 'hev1': true,
+ 'hvc1': true,
+ 'mjp2': true,
+ 'mp4v': true,
+ 'mvc1': true,
+ 'mvc2': true,
+ 'mvc3': true,
+ 'mvc4': true,
+ 'resv': true,
+ 'rv60': true,
+ 's263': true,
+ 'svc1': true,
+ 'svc2': true,
+ 'vc-1': true,
+ 'vp08': true,
+ 'vp09': true
+ }
+};
+
+function isCodecType(codec, type) {
+ var typeCodes = sampleEntryCodesISO[type];
+ return !!typeCodes && typeCodes[codec.slice(0, 4)] === true;
+}
+
+function isCodecSupportedInMp4(codec, type) {
+ return MediaSource.isTypeSupported((type || 'video') + "/mp4;codecs=\"" + codec + "\"");
+}
+
+
+// CONCATENATED MODULE: ./src/loader/m3u8-parser.ts
+
+
+
+
+
+
+
+
+
+
+
+/**
+ * M3U8 parser
+ * @module
+ */
+// https://regex101.com is your friend
+var MASTER_PLAYLIST_REGEX = /#EXT-X-STREAM-INF:([^\n\r]*)[\r\n]+([^\r\n]+)/g;
+var MASTER_PLAYLIST_MEDIA_REGEX = /#EXT-X-MEDIA:(.*)/g;
+var LEVEL_PLAYLIST_REGEX_FAST = new RegExp([/#EXTINF:\s*(\d*(?:\.\d+)?)(?:,(.*)\s+)?/.source, // duration (#EXTINF:,), group 1 => duration, group 2 => title
+/|(?!#)([\S+ ?]+)/.source, // segment URI, group 3 => the URI (note newline is not eaten)
+/|#EXT-X-BYTERANGE:*(.+)/.source, // next segment's byterange, group 4 => range spec (x@y)
+/|#EXT-X-PROGRAM-DATE-TIME:(.+)/.source, // next segment's program date/time group 5 => the datetime spec
+/|#.*/.source // All other non-segment oriented tags will match with all groups empty
+].join(''), 'g');
+var LEVEL_PLAYLIST_REGEX_SLOW = /(?:(?:#(EXTM3U))|(?:#EXT-X-(PLAYLIST-TYPE):(.+))|(?:#EXT-X-(MEDIA-SEQUENCE): *(\d+))|(?:#EXT-X-(TARGETDURATION): *(\d+))|(?:#EXT-X-(KEY):(.+))|(?:#EXT-X-(START):(.+))|(?:#EXT-X-(ENDLIST))|(?:#EXT-X-(DISCONTINUITY-SEQ)UENCE:(\d+))|(?:#EXT-X-(DIS)CONTINUITY))|(?:#EXT-X-(VERSION):(\d+))|(?:#EXT-X-(MAP):(.+))|(?:(#)([^:]*):(.*))|(?:(#)(.*))(?:.*)\r?\n?/;
+var MP4_REGEX_SUFFIX = /\.(mp4|m4s|m4v|m4a)$/i;
+
+var m3u8_parser_M3U8Parser =
+/*#__PURE__*/
+function () {
+ function M3U8Parser() {}
+
+ M3U8Parser.findGroup = function findGroup(groups, mediaGroupId) {
+ for (var i = 0; i < groups.length; i++) {
+ var group = groups[i];
+
+ if (group.id === mediaGroupId) {
+ return group;
+ }
+ }
+ };
+
+ M3U8Parser.convertAVC1ToAVCOTI = function convertAVC1ToAVCOTI(codec) {
+ var avcdata = codec.split('.');
+ var result;
+
+ if (avcdata.length > 2) {
+ result = avcdata.shift() + '.';
+ result += parseInt(avcdata.shift()).toString(16);
+ result += ('000' + parseInt(avcdata.shift()).toString(16)).substr(-4);
+ } else {
+ result = codec;
+ }
+
+ return result;
+ };
+
+ M3U8Parser.resolve = function resolve(url, baseUrl) {
+ return url_toolkit["buildAbsoluteURL"](baseUrl, url, {
+ alwaysNormalize: true
+ });
+ };
+
+ M3U8Parser.parseMasterPlaylist = function parseMasterPlaylist(string, baseurl) {
+ // TODO(typescript-level)
+ var levels = [];
+ MASTER_PLAYLIST_REGEX.lastIndex = 0; // TODO(typescript-level)
+
+ function setCodecs(codecs, level) {
+ ['video', 'audio'].forEach(function (type) {
+ var filtered = codecs.filter(function (codec) {
+ return isCodecType(codec, type);
+ });
+
+ if (filtered.length) {
+ var preferred = filtered.filter(function (codec) {
+ return codec.lastIndexOf('avc1', 0) === 0 || codec.lastIndexOf('mp4a', 0) === 0;
+ });
+ level[type + "Codec"] = preferred.length > 0 ? preferred[0] : filtered[0]; // remove from list
+
+ codecs = codecs.filter(function (codec) {
+ return filtered.indexOf(codec) === -1;
+ });
+ }
+ });
+ level.unknownCodecs = codecs;
+ }
+
+ var result;
+
+ while ((result = MASTER_PLAYLIST_REGEX.exec(string)) != null) {
+ // TODO(typescript-level)
+ var level = {};
+ var attrs = level.attrs = new attr_list(result[1]);
+ level.url = M3U8Parser.resolve(result[2], baseurl);
+ var resolution = attrs.decimalResolution('RESOLUTION');
+
+ if (resolution) {
+ level.width = resolution.width;
+ level.height = resolution.height;
+ }
+
+ level.bitrate = attrs.decimalInteger('AVERAGE-BANDWIDTH') || attrs.decimalInteger('BANDWIDTH');
+ level.name = attrs.NAME;
+ setCodecs([].concat((attrs.CODECS || '').split(/[ ,]+/)), level);
+
+ if (level.videoCodec && level.videoCodec.indexOf('avc1') !== -1) {
+ level.videoCodec = M3U8Parser.convertAVC1ToAVCOTI(level.videoCodec);
+ }
+
+ levels.push(level);
+ }
+
+ return levels;
+ };
+
+ M3U8Parser.parseMasterPlaylistMedia = function parseMasterPlaylistMedia(string, baseurl, type, audioGroups) {
+ if (audioGroups === void 0) {
+ audioGroups = [];
+ }
+
+ var result;
+ var medias = [];
+ var id = 0;
+ MASTER_PLAYLIST_MEDIA_REGEX.lastIndex = 0;
+
+ while ((result = MASTER_PLAYLIST_MEDIA_REGEX.exec(string)) !== null) {
+ var attrs = new attr_list(result[1]);
+
+ if (attrs.TYPE === type) {
+ var media = {
+ id: id++,
+ groupId: attrs['GROUP-ID'],
+ name: attrs.NAME || attrs.LANGUAGE,
+ type: type,
+ default: attrs.DEFAULT === 'YES',
+ autoselect: attrs.AUTOSELECT === 'YES',
+ forced: attrs.FORCED === 'YES',
+ lang: attrs.LANGUAGE
+ };
+
+ if (attrs.URI) {
+ media.url = M3U8Parser.resolve(attrs.URI, baseurl);
+ }
+
+ if (audioGroups.length) {
+ // If there are audio groups signalled in the manifest, let's look for a matching codec string for this track
+ var groupCodec = M3U8Parser.findGroup(audioGroups, media.groupId); // If we don't find the track signalled, lets use the first audio groups codec we have
+ // Acting as a best guess
+
+ media.audioCodec = groupCodec ? groupCodec.codec : audioGroups[0].codec;
+ }
+
+ medias.push(media);
+ }
+ }
+
+ return medias;
+ };
+
+ M3U8Parser.parseLevelPlaylist = function parseLevelPlaylist(string, baseurl, id, type, levelUrlId) {
+ var currentSN = 0;
+ var totalduration = 0;
+ var level = new level_Level(baseurl);
+ var discontinuityCounter = 0;
+ var prevFrag = null;
+ var frag = new fragment_Fragment();
+ var result;
+ var i;
+ var levelkey;
+ var firstPdtIndex = null;
+ LEVEL_PLAYLIST_REGEX_FAST.lastIndex = 0;
+
+ while ((result = LEVEL_PLAYLIST_REGEX_FAST.exec(string)) !== null) {
+ var duration = result[1];
+
+ if (duration) {
+ // INF
+ frag.duration = parseFloat(duration); // avoid sliced strings https://github.com/video-dev/hls.js/issues/939
+
+ var title = (' ' + result[2]).slice(1);
+ frag.title = title || null;
+ frag.tagList.push(title ? ['INF', duration, title] : ['INF', duration]);
+ } else if (result[3]) {
+ // url
+ if (isFiniteNumber(frag.duration)) {
+ var sn = currentSN++;
+ frag.type = type;
+ frag.start = totalduration;
+
+ if (levelkey) {
+ frag.levelkey = levelkey;
+ }
+
+ frag.sn = sn;
+ frag.level = id;
+ frag.cc = discontinuityCounter;
+ frag.urlId = levelUrlId;
+ frag.baseurl = baseurl; // avoid sliced strings https://github.com/video-dev/hls.js/issues/939
+
+ frag.relurl = (' ' + result[3]).slice(1);
+ assignProgramDateTime(frag, prevFrag);
+ level.fragments.push(frag);
+ prevFrag = frag;
+ totalduration += frag.duration;
+ frag = new fragment_Fragment();
+ }
+ } else if (result[4]) {
+ // X-BYTERANGE
+ var data = (' ' + result[4]).slice(1);
+
+ if (prevFrag) {
+ frag.setByteRange(data, prevFrag);
+ } else {
+ frag.setByteRange(data);
+ }
+ } else if (result[5]) {
+ // PROGRAM-DATE-TIME
+ // avoid sliced strings https://github.com/video-dev/hls.js/issues/939
+ frag.rawProgramDateTime = (' ' + result[5]).slice(1);
+ frag.tagList.push(['PROGRAM-DATE-TIME', frag.rawProgramDateTime]);
+
+ if (firstPdtIndex === null) {
+ firstPdtIndex = level.fragments.length;
+ }
+ } else {
+ result = result[0].match(LEVEL_PLAYLIST_REGEX_SLOW);
+
+ if (!result) {
+ logger.warn('No matches on slow regex match for level playlist!');
+ continue;
+ }
+
+ for (i = 1; i < result.length; i++) {
+ if (typeof result[i] !== 'undefined') {
+ break;
+ }
+ } // avoid sliced strings https://github.com/video-dev/hls.js/issues/939
+
+
+ var value1 = (' ' + result[i + 1]).slice(1);
+ var value2 = (' ' + result[i + 2]).slice(1);
+
+ switch (result[i]) {
+ case '#':
+ frag.tagList.push(value2 ? [value1, value2] : [value1]);
+ break;
+
+ case 'PLAYLIST-TYPE':
+ level.type = value1.toUpperCase();
+ break;
+
+ case 'MEDIA-SEQUENCE':
+ currentSN = level.startSN = parseInt(value1);
+ break;
+
+ case 'TARGETDURATION':
+ level.targetduration = parseFloat(value1);
+ break;
+
+ case 'VERSION':
+ level.version = parseInt(value1);
+ break;
+
+ case 'EXTM3U':
+ break;
+
+ case 'ENDLIST':
+ level.live = false;
+ break;
+
+ case 'DIS':
+ discontinuityCounter++;
+ frag.tagList.push(['DIS']);
+ break;
+
+ case 'DISCONTINUITY-SEQ':
+ discontinuityCounter = parseInt(value1);
+ break;
+
+ case 'KEY':
+ {
+ // https://tools.ietf.org/html/draft-pantos-http-live-streaming-08#section-3.4.4
+ var decryptparams = value1;
+ var keyAttrs = new attr_list(decryptparams);
+ var decryptmethod = keyAttrs.enumeratedString('METHOD');
+ var decrypturi = keyAttrs.URI;
+ var decryptiv = keyAttrs.hexadecimalInteger('IV');
+
+ if (decryptmethod) {
+ levelkey = new level_key_LevelKey(baseurl, decrypturi);
+
+ if (decrypturi && ['AES-128', 'SAMPLE-AES', 'SAMPLE-AES-CENC'].indexOf(decryptmethod) >= 0) {
+ levelkey.method = decryptmethod;
+ levelkey.key = null; // Initialization Vector (IV)
+
+ levelkey.iv = decryptiv;
+ }
+ }
+
+ break;
+ }
+
+ case 'START':
+ {
+ var startAttrs = new attr_list(value1);
+ var startTimeOffset = startAttrs.decimalFloatingPoint('TIME-OFFSET'); // TIME-OFFSET can be 0
+
+ if (isFiniteNumber(startTimeOffset)) {
+ level.startTimeOffset = startTimeOffset;
+ }
+
+ break;
+ }
+
+ case 'MAP':
+ {
+ var mapAttrs = new attr_list(value1);
+ frag.relurl = mapAttrs.URI;
+
+ if (mapAttrs.BYTERANGE) {
+ frag.setByteRange(mapAttrs.BYTERANGE);
+ }
+
+ frag.baseurl = baseurl;
+ frag.level = id;
+ frag.type = type;
+ frag.sn = 'initSegment';
+ level.initSegment = frag;
+ frag = new fragment_Fragment();
+ frag.rawProgramDateTime = level.initSegment.rawProgramDateTime;
+ break;
+ }
+
+ default:
+ logger.warn("line parsed but not handled: " + result);
+ break;
+ }
+ }
+ }
+
+ frag = prevFrag; // logger.log('found ' + level.fragments.length + ' fragments');
+
+ if (frag && !frag.relurl) {
+ level.fragments.pop();
+ totalduration -= frag.duration;
+ }
+
+ level.totalduration = totalduration;
+ level.averagetargetduration = totalduration / level.fragments.length;
+ level.endSN = currentSN - 1;
+ level.startCC = level.fragments[0] ? level.fragments[0].cc : 0;
+ level.endCC = discontinuityCounter;
+
+ if (!level.initSegment && level.fragments.length) {
+ // this is a bit lurky but HLS really has no other way to tell us
+ // if the fragments are TS or MP4, except if we download them :/
+ // but this is to be able to handle SIDX.
+ if (level.fragments.every(function (frag) {
+ return MP4_REGEX_SUFFIX.test(frag.relurl);
+ })) {
+ logger.warn('MP4 fragments found but no init segment (probably no MAP, incomplete M3U8), trying to fetch SIDX');
+ frag = new fragment_Fragment();
+ frag.relurl = level.fragments[0].relurl;
+ frag.baseurl = baseurl;
+ frag.level = id;
+ frag.type = type;
+ frag.sn = 'initSegment';
+ level.initSegment = frag;
+ level.needSidxRanges = true;
+ }
+ }
+ /**
+ * Backfill any missing PDT values
+ "If the first EXT-X-PROGRAM-DATE-TIME tag in a Playlist appears after
+ one or more Media Segment URIs, the client SHOULD extrapolate
+ backward from that tag (using EXTINF durations and/or media
+ timestamps) to associate dates with those segments."
+ * We have already extrapolated forward, but all fragments up to the first instance of PDT do not have their PDTs
+ * computed.
+ */
+
+
+ if (firstPdtIndex) {
+ backfillProgramDateTimes(level.fragments, firstPdtIndex);
+ }
+
+ return level;
+ };
+
+ return M3U8Parser;
+}();
+
+
+
+function backfillProgramDateTimes(fragments, startIndex) {
+ var fragPrev = fragments[startIndex];
+
+ for (var i = startIndex - 1; i >= 0; i--) {
+ var frag = fragments[i];
+ frag.programDateTime = fragPrev.programDateTime - frag.duration * 1000;
+ fragPrev = frag;
+ }
+}
+
+function assignProgramDateTime(frag, prevFrag) {
+ if (frag.rawProgramDateTime) {
+ frag.programDateTime = Date.parse(frag.rawProgramDateTime);
+ } else if (prevFrag && prevFrag.programDateTime) {
+ frag.programDateTime = prevFrag.endProgramDateTime;
+ }
+
+ if (!isFiniteNumber(frag.programDateTime)) {
+ frag.programDateTime = null;
+ frag.rawProgramDateTime = null;
+ }
+}
+// CONCATENATED MODULE: ./src/loader/playlist-loader.ts
+
+
+
+function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+
+/**
+ * PlaylistLoader - delegate for media manifest/playlist loading tasks. Takes care of parsing media to internal data-models.
+ *
+ * Once loaded, dispatches events with parsed data-models of manifest/levels/audio/subtitle tracks.
+ *
+ * Uses loader(s) set in config to do actual internal loading of resource tasks.
+ *
+ * @module
+ *
+ */
+
+
+
+
+
+
+
+var _window = window,
+ performance = _window.performance;
+/**
+ * @constructor
+ */
+
+var playlist_loader_PlaylistLoader =
+/*#__PURE__*/
+function (_EventHandler) {
+ _inheritsLoose(PlaylistLoader, _EventHandler);
+
+ /**
+ * @constructs
+ * @param {Hls} hls
+ */
+ function PlaylistLoader(hls) {
+ var _this;
+
+ _this = _EventHandler.call(this, hls, events.MANIFEST_LOADING, events.LEVEL_LOADING, events.AUDIO_TRACK_LOADING, events.SUBTITLE_TRACK_LOADING) || this;
+ _this.loaders = {};
+ return _this;
+ }
+ /**
+ * @param {PlaylistContextType} type
+ * @returns {boolean}
+ */
+
+
+ PlaylistLoader.canHaveQualityLevels = function canHaveQualityLevels(type) {
+ return type !== PlaylistContextType.AUDIO_TRACK && type !== PlaylistContextType.SUBTITLE_TRACK;
+ }
+ /**
+ * Map context.type to LevelType
+ * @param {PlaylistLoaderContext} context
+ * @returns {LevelType}
+ */
+ ;
+
+ PlaylistLoader.mapContextToLevelType = function mapContextToLevelType(context) {
+ var type = context.type;
+
+ switch (type) {
+ case PlaylistContextType.AUDIO_TRACK:
+ return PlaylistLevelType.AUDIO;
+
+ case PlaylistContextType.SUBTITLE_TRACK:
+ return PlaylistLevelType.SUBTITLE;
+
+ default:
+ return PlaylistLevelType.MAIN;
+ }
+ };
+
+ PlaylistLoader.getResponseUrl = function getResponseUrl(response, context) {
+ var url = response.url; // responseURL not supported on some browsers (it is used to detect URL redirection)
+ // data-uri mode also not supported (but no need to detect redirection)
+
+ if (url === undefined || url.indexOf('data:') === 0) {
+ // fallback to initial URL
+ url = context.url;
+ }
+
+ return url;
+ }
+ /**
+ * Returns defaults or configured loader-type overloads (pLoader and loader config params)
+ * Default loader is XHRLoader (see utils)
+ * @param {PlaylistLoaderContext} context
+ * @returns {Loader} or other compatible configured overload
+ */
+ ;
+
+ var _proto = PlaylistLoader.prototype;
+
+ _proto.createInternalLoader = function createInternalLoader(context) {
+ var config = this.hls.config;
+ var PLoader = config.pLoader;
+ var Loader = config.loader; // TODO(typescript-config): Verify once config is typed that InternalLoader always returns a Loader
+
+ var InternalLoader = PLoader || Loader;
+ var loader = new InternalLoader(config); // TODO - Do we really need to assign the instance or if the dep has been lost
+
+ context.loader = loader;
+ this.loaders[context.type] = loader;
+ return loader;
+ };
+
+ _proto.getInternalLoader = function getInternalLoader(context) {
+ return this.loaders[context.type];
+ };
+
+ _proto.resetInternalLoader = function resetInternalLoader(contextType) {
+ if (this.loaders[contextType]) {
+ delete this.loaders[contextType];
+ }
+ }
+ /**
+ * Call `destroy` on all internal loader instances mapped (one per context type)
+ */
+ ;
+
+ _proto.destroyInternalLoaders = function destroyInternalLoaders() {
+ for (var contextType in this.loaders) {
+ var loader = this.loaders[contextType];
+
+ if (loader) {
+ loader.destroy();
+ }
+
+ this.resetInternalLoader(contextType);
+ }
+ };
+
+ _proto.destroy = function destroy() {
+ this.destroyInternalLoaders();
+
+ _EventHandler.prototype.destroy.call(this);
+ };
+
+ _proto.onManifestLoading = function onManifestLoading(data) {
+ this.load({
+ url: data.url,
+ type: PlaylistContextType.MANIFEST,
+ level: 0,
+ id: null,
+ responseType: 'text'
+ });
+ };
+
+ _proto.onLevelLoading = function onLevelLoading(data) {
+ this.load({
+ url: data.url,
+ type: PlaylistContextType.LEVEL,
+ level: data.level,
+ id: data.id,
+ responseType: 'text'
+ });
+ };
+
+ _proto.onAudioTrackLoading = function onAudioTrackLoading(data) {
+ this.load({
+ url: data.url,
+ type: PlaylistContextType.AUDIO_TRACK,
+ level: null,
+ id: data.id,
+ responseType: 'text'
+ });
+ };
+
+ _proto.onSubtitleTrackLoading = function onSubtitleTrackLoading(data) {
+ this.load({
+ url: data.url,
+ type: PlaylistContextType.SUBTITLE_TRACK,
+ level: null,
+ id: data.id,
+ responseType: 'text'
+ });
+ };
+
+ _proto.load = function load(context) {
+ var config = this.hls.config;
+ logger.debug("Loading playlist of type " + context.type + ", level: " + context.level + ", id: " + context.id); // Check if a loader for this context already exists
+
+ var loader = this.getInternalLoader(context);
+
+ if (loader) {
+ var loaderContext = loader.context;
+
+ if (loaderContext && loaderContext.url === context.url) {
+ // same URL can't overlap
+ logger.trace('playlist request ongoing');
+ return false;
+ } else {
+ logger.warn("aborting previous loader for type: " + context.type);
+ loader.abort();
+ }
+ }
+
+ var maxRetry;
+ var timeout;
+ var retryDelay;
+ var maxRetryDelay; // apply different configs for retries depending on
+ // context (manifest, level, audio/subs playlist)
+
+ switch (context.type) {
+ case PlaylistContextType.MANIFEST:
+ maxRetry = config.manifestLoadingMaxRetry;
+ timeout = config.manifestLoadingTimeOut;
+ retryDelay = config.manifestLoadingRetryDelay;
+ maxRetryDelay = config.manifestLoadingMaxRetryTimeout;
+ break;
+
+ case PlaylistContextType.LEVEL:
+ // Disable internal loader retry logic, since we are managing retries in Level Controller
+ maxRetry = 0;
+ maxRetryDelay = 0;
+ retryDelay = 0;
+ timeout = config.levelLoadingTimeOut; // TODO Introduce retry settings for audio-track and subtitle-track, it should not use level retry config
+
+ break;
+
+ default:
+ maxRetry = config.levelLoadingMaxRetry;
+ timeout = config.levelLoadingTimeOut;
+ retryDelay = config.levelLoadingRetryDelay;
+ maxRetryDelay = config.levelLoadingMaxRetryTimeout;
+ break;
+ }
+
+ loader = this.createInternalLoader(context);
+ var loaderConfig = {
+ timeout: timeout,
+ maxRetry: maxRetry,
+ retryDelay: retryDelay,
+ maxRetryDelay: maxRetryDelay
+ };
+ var loaderCallbacks = {
+ onSuccess: this.loadsuccess.bind(this),
+ onError: this.loaderror.bind(this),
+ onTimeout: this.loadtimeout.bind(this)
+ };
+ logger.debug("Calling internal loader delegate for URL: " + context.url);
+ loader.load(context, loaderConfig, loaderCallbacks);
+ return true;
+ };
+
+ _proto.loadsuccess = function loadsuccess(response, stats, context, networkDetails) {
+ if (networkDetails === void 0) {
+ networkDetails = null;
+ }
+
+ if (context.isSidxRequest) {
+ this._handleSidxRequest(response, context);
+
+ this._handlePlaylistLoaded(response, stats, context, networkDetails);
+
+ return;
+ }
+
+ this.resetInternalLoader(context.type);
+
+ if (typeof response.data !== 'string') {
+ throw new Error('expected responseType of "text" for PlaylistLoader');
+ }
+
+ var string = response.data;
+ stats.tload = performance.now(); // stats.mtime = new Date(target.getResponseHeader('Last-Modified'));
+ // Validate if it is an M3U8 at all
+
+ if (string.indexOf('#EXTM3U') !== 0) {
+ this._handleManifestParsingError(response, context, 'no EXTM3U delimiter', networkDetails);
+
+ return;
+ } // Check if chunk-list or master. handle empty chunk list case (first EXTINF not signaled, but TARGETDURATION present)
+
+
+ if (string.indexOf('#EXTINF:') > 0 || string.indexOf('#EXT-X-TARGETDURATION:') > 0) {
+ this._handleTrackOrLevelPlaylist(response, stats, context, networkDetails);
+ } else {
+ this._handleMasterPlaylist(response, stats, context, networkDetails);
+ }
+ };
+
+ _proto.loaderror = function loaderror(response, context, networkDetails) {
+ if (networkDetails === void 0) {
+ networkDetails = null;
+ }
+
+ this._handleNetworkError(context, networkDetails, false, response);
+ };
+
+ _proto.loadtimeout = function loadtimeout(stats, context, networkDetails) {
+ if (networkDetails === void 0) {
+ networkDetails = null;
+ }
+
+ this._handleNetworkError(context, networkDetails, true);
+ } // TODO(typescript-config): networkDetails can currently be a XHR or Fetch impl,
+ // but with custom loaders it could be generic investigate this further when config is typed
+ ;
+
+ _proto._handleMasterPlaylist = function _handleMasterPlaylist(response, stats, context, networkDetails) {
+ var hls = this.hls;
+ var string = response.data;
+ var url = PlaylistLoader.getResponseUrl(response, context);
+ var levels = m3u8_parser_M3U8Parser.parseMasterPlaylist(string, url);
+
+ if (!levels.length) {
+ this._handleManifestParsingError(response, context, 'no level found in manifest', networkDetails);
+
+ return;
+ } // multi level playlist, parse level info
+
+
+ var audioGroups = levels.map(function (level) {
+ return {
+ id: level.attrs.AUDIO,
+ codec: level.audioCodec
+ };
+ });
+ var audioTracks = m3u8_parser_M3U8Parser.parseMasterPlaylistMedia(string, url, 'AUDIO', audioGroups);
+ var subtitles = m3u8_parser_M3U8Parser.parseMasterPlaylistMedia(string, url, 'SUBTITLES');
+
+ if (audioTracks.length) {
+ // check if we have found an audio track embedded in main playlist (audio track without URI attribute)
+ var embeddedAudioFound = false;
+ audioTracks.forEach(function (audioTrack) {
+ if (!audioTrack.url) {
+ embeddedAudioFound = true;
+ }
+ }); // if no embedded audio track defined, but audio codec signaled in quality level,
+ // we need to signal this main audio track this could happen with playlists with
+ // alt audio rendition in which quality levels (main)
+ // contains both audio+video. but with mixed audio track not signaled
+
+ if (embeddedAudioFound === false && levels[0].audioCodec && !levels[0].attrs.AUDIO) {
+ logger.log('audio codec signaled in quality level, but no embedded audio track signaled, create one');
+ audioTracks.unshift({
+ type: 'main',
+ name: 'main',
+ default: false,
+ autoselect: false,
+ forced: false,
+ id: -1
+ });
+ }
+ }
+
+ hls.trigger(events.MANIFEST_LOADED, {
+ levels: levels,
+ audioTracks: audioTracks,
+ subtitles: subtitles,
+ url: url,
+ stats: stats,
+ networkDetails: networkDetails
+ });
+ };
+
+ _proto._handleTrackOrLevelPlaylist = function _handleTrackOrLevelPlaylist(response, stats, context, networkDetails) {
+ var hls = this.hls;
+ var id = context.id,
+ level = context.level,
+ type = context.type;
+ var url = PlaylistLoader.getResponseUrl(response, context); // if the values are null, they will result in the else conditional
+
+ var levelUrlId = isFiniteNumber(id) ? id : 0;
+ var levelId = isFiniteNumber(level) ? level : levelUrlId;
+ var levelType = PlaylistLoader.mapContextToLevelType(context);
+ var levelDetails = m3u8_parser_M3U8Parser.parseLevelPlaylist(response.data, url, levelId, levelType, levelUrlId); // set stats on level structure
+ // TODO(jstackhouse): why? mixing concerns, is it just treated as value bag?
+
+ levelDetails.tload = stats.tload; // We have done our first request (Manifest-type) and receive
+ // not a master playlist but a chunk-list (track/level)
+ // We fire the manifest-loaded event anyway with the parsed level-details
+ // by creating a single-level structure for it.
+
+ if (type === PlaylistContextType.MANIFEST) {
+ var singleLevel = {
+ url: url,
+ details: levelDetails
+ };
+ hls.trigger(events.MANIFEST_LOADED, {
+ levels: [singleLevel],
+ audioTracks: [],
+ url: url,
+ stats: stats,
+ networkDetails: networkDetails
+ });
+ } // save parsing time
+
+
+ stats.tparsed = performance.now(); // in case we need SIDX ranges
+ // return early after calling load for
+ // the SIDX box.
+
+ if (levelDetails.needSidxRanges) {
+ var sidxUrl = levelDetails.initSegment.url;
+ this.load({
+ url: sidxUrl,
+ isSidxRequest: true,
+ type: type,
+ level: level,
+ levelDetails: levelDetails,
+ id: id,
+ rangeStart: 0,
+ rangeEnd: 2048,
+ responseType: 'arraybuffer'
+ });
+ return;
+ } // extend the context with the new levelDetails property
+
+
+ context.levelDetails = levelDetails;
+
+ this._handlePlaylistLoaded(response, stats, context, networkDetails);
+ };
+
+ _proto._handleSidxRequest = function _handleSidxRequest(response, context) {
+ if (typeof response.data === 'string') {
+ throw new Error('sidx request must be made with responseType of array buffer');
+ }
+
+ var sidxInfo = mp4demuxer.parseSegmentIndex(new Uint8Array(response.data)); // if provided fragment does not contain sidx, early return
+
+ if (!sidxInfo) {
+ return;
+ }
+
+ var sidxReferences = sidxInfo.references;
+ var levelDetails = context.levelDetails;
+ sidxReferences.forEach(function (segmentRef, index) {
+ var segRefInfo = segmentRef.info;
+
+ if (!levelDetails) {
+ return;
+ }
+
+ var frag = levelDetails.fragments[index];
+
+ if (frag.byteRange.length === 0) {
+ frag.setByteRange(String(1 + segRefInfo.end - segRefInfo.start) + '@' + String(segRefInfo.start));
+ }
+ });
+
+ if (levelDetails) {
+ levelDetails.initSegment.setByteRange(String(sidxInfo.moovEndOffset) + '@0');
+ }
+ };
+
+ _proto._handleManifestParsingError = function _handleManifestParsingError(response, context, reason, networkDetails) {
+ this.hls.trigger(events.ERROR, {
+ type: ErrorTypes.NETWORK_ERROR,
+ details: ErrorDetails.MANIFEST_PARSING_ERROR,
+ fatal: true,
+ url: response.url,
+ reason: reason,
+ networkDetails: networkDetails
+ });
+ };
+
+ _proto._handleNetworkError = function _handleNetworkError(context, networkDetails, timeout, response) {
+ if (timeout === void 0) {
+ timeout = false;
+ }
+
+ if (response === void 0) {
+ response = null;
+ }
+
+ logger.info("A network error occured while loading a " + context.type + "-type playlist");
+ var details;
+ var fatal;
+ var loader = this.getInternalLoader(context);
+
+ switch (context.type) {
+ case PlaylistContextType.MANIFEST:
+ details = timeout ? ErrorDetails.MANIFEST_LOAD_TIMEOUT : ErrorDetails.MANIFEST_LOAD_ERROR;
+ fatal = true;
+ break;
+
+ case PlaylistContextType.LEVEL:
+ details = timeout ? ErrorDetails.LEVEL_LOAD_TIMEOUT : ErrorDetails.LEVEL_LOAD_ERROR;
+ fatal = false;
+ break;
+
+ case PlaylistContextType.AUDIO_TRACK:
+ details = timeout ? ErrorDetails.AUDIO_TRACK_LOAD_TIMEOUT : ErrorDetails.AUDIO_TRACK_LOAD_ERROR;
+ fatal = false;
+ break;
+
+ default:
+ // details = ...?
+ fatal = false;
+ }
+
+ if (loader) {
+ loader.abort();
+ this.resetInternalLoader(context.type);
+ } // TODO(typescript-events): when error events are handled, type this
+
+
+ var errorData = {
+ type: ErrorTypes.NETWORK_ERROR,
+ details: details,
+ fatal: fatal,
+ url: context.url,
+ loader: loader,
+ context: context,
+ networkDetails: networkDetails
+ };
+
+ if (response) {
+ errorData.response = response;
+ }
+
+ this.hls.trigger(events.ERROR, errorData);
+ };
+
+ _proto._handlePlaylistLoaded = function _handlePlaylistLoaded(response, stats, context, networkDetails) {
+ var type = context.type,
+ level = context.level,
+ id = context.id,
+ levelDetails = context.levelDetails;
+
+ if (!levelDetails || !levelDetails.targetduration) {
+ this._handleManifestParsingError(response, context, 'invalid target duration', networkDetails);
+
+ return;
+ }
+
+ var canHaveLevels = PlaylistLoader.canHaveQualityLevels(context.type);
+
+ if (canHaveLevels) {
+ this.hls.trigger(events.LEVEL_LOADED, {
+ details: levelDetails,
+ level: level || 0,
+ id: id || 0,
+ stats: stats,
+ networkDetails: networkDetails
+ });
+ } else {
+ switch (type) {
+ case PlaylistContextType.AUDIO_TRACK:
+ this.hls.trigger(events.AUDIO_TRACK_LOADED, {
+ details: levelDetails,
+ id: id,
+ stats: stats,
+ networkDetails: networkDetails
+ });
+ break;
+
+ case PlaylistContextType.SUBTITLE_TRACK:
+ this.hls.trigger(events.SUBTITLE_TRACK_LOADED, {
+ details: levelDetails,
+ id: id,
+ stats: stats,
+ networkDetails: networkDetails
+ });
+ break;
+ }
+ }
+ };
+
+ return PlaylistLoader;
+}(event_handler);
+
+/* harmony default export */ var playlist_loader = (playlist_loader_PlaylistLoader);
+// CONCATENATED MODULE: ./src/loader/fragment-loader.js
+
+
+
+function fragment_loader_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+
+/*
+ * Fragment Loader
+*/
+
+
+
+
+
+var fragment_loader_FragmentLoader =
+/*#__PURE__*/
+function (_EventHandler) {
+ fragment_loader_inheritsLoose(FragmentLoader, _EventHandler);
+
+ function FragmentLoader(hls) {
+ var _this;
+
+ _this = _EventHandler.call(this, hls, events.FRAG_LOADING) || this;
+ _this.loaders = {};
+ return _this;
+ }
+
+ var _proto = FragmentLoader.prototype;
+
+ _proto.destroy = function destroy() {
+ var loaders = this.loaders;
+
+ for (var loaderName in loaders) {
+ var loader = loaders[loaderName];
+
+ if (loader) {
+ loader.destroy();
+ }
+ }
+
+ this.loaders = {};
+
+ _EventHandler.prototype.destroy.call(this);
+ };
+
+ _proto.onFragLoading = function onFragLoading(data) {
+ var frag = data.frag,
+ type = frag.type,
+ loaders = this.loaders,
+ config = this.hls.config,
+ FragmentILoader = config.fLoader,
+ DefaultILoader = config.loader; // reset fragment state
+
+ frag.loaded = 0;
+ var loader = loaders[type];
+
+ if (loader) {
+ logger.warn("abort previous fragment loader for type: " + type);
+ loader.abort();
+ }
+
+ loader = loaders[type] = frag.loader = config.fLoader ? new FragmentILoader(config) : new DefaultILoader(config);
+ var loaderContext, loaderConfig, loaderCallbacks;
+ loaderContext = {
+ url: frag.url,
+ frag: frag,
+ responseType: 'arraybuffer',
+ progressData: false
+ };
+ var start = frag.byteRangeStartOffset,
+ end = frag.byteRangeEndOffset;
+
+ if (isFiniteNumber(start) && isFiniteNumber(end)) {
+ loaderContext.rangeStart = start;
+ loaderContext.rangeEnd = end;
+ }
+
+ loaderConfig = {
+ timeout: config.fragLoadingTimeOut,
+ maxRetry: 0,
+ retryDelay: 0,
+ maxRetryDelay: config.fragLoadingMaxRetryTimeout
+ };
+ loaderCallbacks = {
+ onSuccess: this.loadsuccess.bind(this),
+ onError: this.loaderror.bind(this),
+ onTimeout: this.loadtimeout.bind(this),
+ onProgress: this.loadprogress.bind(this)
+ };
+ loader.load(loaderContext, loaderConfig, loaderCallbacks);
+ };
+
+ _proto.loadsuccess = function loadsuccess(response, stats, context, networkDetails) {
+ if (networkDetails === void 0) {
+ networkDetails = null;
+ }
+
+ var payload = response.data,
+ frag = context.frag; // detach fragment loader on load success
+
+ frag.loader = undefined;
+ this.loaders[frag.type] = undefined;
+ this.hls.trigger(events.FRAG_LOADED, {
+ payload: payload,
+ frag: frag,
+ stats: stats,
+ networkDetails: networkDetails
+ });
+ };
+
+ _proto.loaderror = function loaderror(response, context, networkDetails) {
+ if (networkDetails === void 0) {
+ networkDetails = null;
+ }
+
+ var frag = context.frag;
+ var loader = frag.loader;
+
+ if (loader) {
+ loader.abort();
+ }
+
+ this.loaders[frag.type] = undefined;
+ this.hls.trigger(events.ERROR, {
+ type: ErrorTypes.NETWORK_ERROR,
+ details: ErrorDetails.FRAG_LOAD_ERROR,
+ fatal: false,
+ frag: context.frag,
+ response: response,
+ networkDetails: networkDetails
+ });
+ };
+
+ _proto.loadtimeout = function loadtimeout(stats, context, networkDetails) {
+ if (networkDetails === void 0) {
+ networkDetails = null;
+ }
+
+ var frag = context.frag;
+ var loader = frag.loader;
+
+ if (loader) {
+ loader.abort();
+ }
+
+ this.loaders[frag.type] = undefined;
+ this.hls.trigger(events.ERROR, {
+ type: ErrorTypes.NETWORK_ERROR,
+ details: ErrorDetails.FRAG_LOAD_TIMEOUT,
+ fatal: false,
+ frag: context.frag,
+ networkDetails: networkDetails
+ });
+ } // data will be used for progressive parsing
+ ;
+
+ _proto.loadprogress = function loadprogress(stats, context, data, networkDetails) {
+ if (networkDetails === void 0) {
+ networkDetails = null;
+ }
+
+ // jshint ignore:line
+ var frag = context.frag;
+ frag.loaded = stats.loaded;
+ this.hls.trigger(events.FRAG_LOAD_PROGRESS, {
+ frag: frag,
+ stats: stats,
+ networkDetails: networkDetails
+ });
+ };
+
+ return FragmentLoader;
+}(event_handler);
+
+/* harmony default export */ var fragment_loader = (fragment_loader_FragmentLoader);
+// CONCATENATED MODULE: ./src/loader/key-loader.ts
+function key_loader_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+
+/*
+ * Decrypt key Loader
+*/
+
+
+
+
+
+var key_loader_KeyLoader =
+/*#__PURE__*/
+function (_EventHandler) {
+ key_loader_inheritsLoose(KeyLoader, _EventHandler);
+
+ function KeyLoader(hls) {
+ var _this;
+
+ _this = _EventHandler.call(this, hls, events.KEY_LOADING) || this;
+ _this.loaders = {};
+ _this.decryptkey = null;
+ _this.decrypturl = null;
+ return _this;
+ }
+
+ var _proto = KeyLoader.prototype;
+
+ _proto.destroy = function destroy() {
+ for (var loaderName in this.loaders) {
+ var loader = this.loaders[loaderName];
+
+ if (loader) {
+ loader.destroy();
+ }
+ }
+
+ this.loaders = {};
+
+ _EventHandler.prototype.destroy.call(this);
+ };
+
+ _proto.onKeyLoading = function onKeyLoading(data) {
+ var frag = data.frag;
+ var type = frag.type;
+ var loader = this.loaders[type];
+
+ if (!frag.decryptdata) {
+ logger.warn('Missing decryption data on fragment in onKeyLoading');
+ return;
+ } // Load the key if the uri is different from previous one, or if the decrypt key has not yet been retrieved
+
+
+ var uri = frag.decryptdata.uri;
+
+ if (uri !== this.decrypturl || this.decryptkey === null) {
+ var config = this.hls.config;
+
+ if (loader) {
+ logger.warn("abort previous key loader for type:" + type);
+ loader.abort();
+ }
+
+ if (!uri) {
+ logger.warn('key uri is falsy');
+ return;
+ }
+
+ frag.loader = this.loaders[type] = new config.loader(config);
+ this.decrypturl = uri;
+ this.decryptkey = null;
+ var loaderContext = {
+ url: uri,
+ frag: frag,
+ responseType: 'arraybuffer'
+ }; // maxRetry is 0 so that instead of retrying the same key on the same variant multiple times,
+ // key-loader will trigger an error and rely on stream-controller to handle retry logic.
+ // this will also align retry logic with fragment-loader
+
+ var loaderConfig = {
+ timeout: config.fragLoadingTimeOut,
+ maxRetry: 0,
+ retryDelay: config.fragLoadingRetryDelay,
+ maxRetryDelay: config.fragLoadingMaxRetryTimeout
+ };
+ var loaderCallbacks = {
+ onSuccess: this.loadsuccess.bind(this),
+ onError: this.loaderror.bind(this),
+ onTimeout: this.loadtimeout.bind(this)
+ };
+ frag.loader.load(loaderContext, loaderConfig, loaderCallbacks);
+ } else if (this.decryptkey) {
+ // Return the key if it's already been loaded
+ frag.decryptdata.key = this.decryptkey;
+ this.hls.trigger(events.KEY_LOADED, {
+ frag: frag
+ });
+ }
+ };
+
+ _proto.loadsuccess = function loadsuccess(response, stats, context) {
+ var frag = context.frag;
+
+ if (!frag.decryptdata) {
+ logger.error('after key load, decryptdata unset');
+ return;
+ }
+
+ this.decryptkey = frag.decryptdata.key = new Uint8Array(response.data); // detach fragment loader on load success
+
+ frag.loader = undefined;
+ delete this.loaders[frag.type];
+ this.hls.trigger(events.KEY_LOADED, {
+ frag: frag
+ });
+ };
+
+ _proto.loaderror = function loaderror(response, context) {
+ var frag = context.frag;
+ var loader = frag.loader;
+
+ if (loader) {
+ loader.abort();
+ }
+
+ delete this.loaders[frag.type];
+ this.hls.trigger(events.ERROR, {
+ type: ErrorTypes.NETWORK_ERROR,
+ details: ErrorDetails.KEY_LOAD_ERROR,
+ fatal: false,
+ frag: frag,
+ response: response
+ });
+ };
+
+ _proto.loadtimeout = function loadtimeout(stats, context) {
+ var frag = context.frag;
+ var loader = frag.loader;
+
+ if (loader) {
+ loader.abort();
+ }
+
+ delete this.loaders[frag.type];
+ this.hls.trigger(events.ERROR, {
+ type: ErrorTypes.NETWORK_ERROR,
+ details: ErrorDetails.KEY_LOAD_TIMEOUT,
+ fatal: false,
+ frag: frag
+ });
+ };
+
+ return KeyLoader;
+}(event_handler);
+
+/* harmony default export */ var key_loader = (key_loader_KeyLoader);
+// CONCATENATED MODULE: ./src/controller/fragment-tracker.js
+
+
+function fragment_tracker_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+
+
+
+var FragmentState = {
+ NOT_LOADED: 'NOT_LOADED',
+ APPENDING: 'APPENDING',
+ PARTIAL: 'PARTIAL',
+ OK: 'OK'
+};
+var fragment_tracker_FragmentTracker =
+/*#__PURE__*/
+function (_EventHandler) {
+ fragment_tracker_inheritsLoose(FragmentTracker, _EventHandler);
+
+ function FragmentTracker(hls) {
+ var _this;
+
+ _this = _EventHandler.call(this, hls, events.BUFFER_APPENDED, events.FRAG_BUFFERED, events.FRAG_LOADED) || this;
+ _this.bufferPadding = 0.2;
+ _this.fragments = Object.create(null);
+ _this.timeRanges = Object.create(null);
+ _this.config = hls.config;
+ return _this;
+ }
+
+ var _proto = FragmentTracker.prototype;
+
+ _proto.destroy = function destroy() {
+ this.fragments = Object.create(null);
+ this.timeRanges = Object.create(null);
+ this.config = null;
+ event_handler.prototype.destroy.call(this);
+
+ _EventHandler.prototype.destroy.call(this);
+ }
+ /**
+ * Return a Fragment that match the position and levelType.
+ * If not found any Fragment, return null
+ * @param {number} position
+ * @param {LevelType} levelType
+ * @returns {Fragment|null}
+ */
+ ;
+
+ _proto.getBufferedFrag = function getBufferedFrag(position, levelType) {
+ var fragments = this.fragments;
+ var bufferedFrags = Object.keys(fragments).filter(function (key) {
+ var fragmentEntity = fragments[key];
+
+ if (fragmentEntity.body.type !== levelType) {
+ return false;
+ }
+
+ if (!fragmentEntity.buffered) {
+ return false;
+ }
+
+ var frag = fragmentEntity.body;
+ return frag.startPTS <= position && position <= frag.endPTS;
+ });
+
+ if (bufferedFrags.length === 0) {
+ return null;
+ } else {
+ // https://github.com/video-dev/hls.js/pull/1545#discussion_r166229566
+ var bufferedFragKey = bufferedFrags.pop();
+ return fragments[bufferedFragKey].body;
+ }
+ }
+ /**
+ * Partial fragments effected by coded frame eviction will be removed
+ * The browser will unload parts of the buffer to free up memory for new buffer data
+ * Fragments will need to be reloaded when the buffer is freed up, removing partial fragments will allow them to reload(since there might be parts that are still playable)
+ * @param {String} elementaryStream The elementaryStream of media this is (eg. video/audio)
+ * @param {TimeRanges} timeRange TimeRange object from a sourceBuffer
+ */
+ ;
+
+ _proto.detectEvictedFragments = function detectEvictedFragments(elementaryStream, timeRange) {
+ var _this2 = this;
+
+ var fragmentTimes, time; // Check if any flagged fragments have been unloaded
+
+ Object.keys(this.fragments).forEach(function (key) {
+ var fragmentEntity = _this2.fragments[key];
+
+ if (fragmentEntity.buffered === true) {
+ var esData = fragmentEntity.range[elementaryStream];
+
+ if (esData) {
+ fragmentTimes = esData.time;
+
+ for (var i = 0; i < fragmentTimes.length; i++) {
+ time = fragmentTimes[i];
+
+ if (_this2.isTimeBuffered(time.startPTS, time.endPTS, timeRange) === false) {
+ // Unregister partial fragment as it needs to load again to be reused
+ _this2.removeFragment(fragmentEntity.body);
+
+ break;
+ }
+ }
+ }
+ }
+ });
+ }
+ /**
+ * Checks if the fragment passed in is loaded in the buffer properly
+ * Partially loaded fragments will be registered as a partial fragment
+ * @param {Object} fragment Check the fragment against all sourceBuffers loaded
+ */
+ ;
+
+ _proto.detectPartialFragments = function detectPartialFragments(fragment) {
+ var _this3 = this;
+
+ var fragKey = this.getFragmentKey(fragment);
+ var fragmentEntity = this.fragments[fragKey];
+
+ if (fragmentEntity) {
+ fragmentEntity.buffered = true;
+ Object.keys(this.timeRanges).forEach(function (elementaryStream) {
+ if (fragment.hasElementaryStream(elementaryStream)) {
+ var timeRange = _this3.timeRanges[elementaryStream]; // Check for malformed fragments
+ // Gaps need to be calculated for each elementaryStream
+
+ fragmentEntity.range[elementaryStream] = _this3.getBufferedTimes(fragment.startPTS, fragment.endPTS, timeRange);
+ }
+ });
+ }
+ };
+
+ _proto.getBufferedTimes = function getBufferedTimes(startPTS, endPTS, timeRange) {
+ var fragmentTimes = [];
+ var startTime, endTime;
+ var fragmentPartial = false;
+
+ for (var i = 0; i < timeRange.length; i++) {
+ startTime = timeRange.start(i) - this.bufferPadding;
+ endTime = timeRange.end(i) + this.bufferPadding;
+
+ if (startPTS >= startTime && endPTS <= endTime) {
+ // Fragment is entirely contained in buffer
+ // No need to check the other timeRange times since it's completely playable
+ fragmentTimes.push({
+ startPTS: Math.max(startPTS, timeRange.start(i)),
+ endPTS: Math.min(endPTS, timeRange.end(i))
+ });
+ break;
+ } else if (startPTS < endTime && endPTS > startTime) {
+ // Check for intersection with buffer
+ // Get playable sections of the fragment
+ fragmentTimes.push({
+ startPTS: Math.max(startPTS, timeRange.start(i)),
+ endPTS: Math.min(endPTS, timeRange.end(i))
+ });
+ fragmentPartial = true;
+ } else if (endPTS <= startTime) {
+ // No need to check the rest of the timeRange as it is in order
+ break;
+ }
+ }
+
+ return {
+ time: fragmentTimes,
+ partial: fragmentPartial
+ };
+ };
+
+ _proto.getFragmentKey = function getFragmentKey(fragment) {
+ return fragment.type + "_" + fragment.level + "_" + fragment.urlId + "_" + fragment.sn;
+ }
+ /**
+ * Gets the partial fragment for a certain time
+ * @param {Number} time
+ * @returns {Object} fragment Returns a partial fragment at a time or null if there is no partial fragment
+ */
+ ;
+
+ _proto.getPartialFragment = function getPartialFragment(time) {
+ var _this4 = this;
+
+ var timePadding, startTime, endTime;
+ var bestFragment = null;
+ var bestOverlap = 0;
+ Object.keys(this.fragments).forEach(function (key) {
+ var fragmentEntity = _this4.fragments[key];
+
+ if (_this4.isPartial(fragmentEntity)) {
+ startTime = fragmentEntity.body.startPTS - _this4.bufferPadding;
+ endTime = fragmentEntity.body.endPTS + _this4.bufferPadding;
+
+ if (time >= startTime && time <= endTime) {
+ // Use the fragment that has the most padding from start and end time
+ timePadding = Math.min(time - startTime, endTime - time);
+
+ if (bestOverlap <= timePadding) {
+ bestFragment = fragmentEntity.body;
+ bestOverlap = timePadding;
+ }
+ }
+ }
+ });
+ return bestFragment;
+ }
+ /**
+ * @param {Object} fragment The fragment to check
+ * @returns {String} Returns the fragment state when a fragment never loaded or if it partially loaded
+ */
+ ;
+
+ _proto.getState = function getState(fragment) {
+ var fragKey = this.getFragmentKey(fragment);
+ var fragmentEntity = this.fragments[fragKey];
+ var state = FragmentState.NOT_LOADED;
+
+ if (fragmentEntity !== undefined) {
+ if (!fragmentEntity.buffered) {
+ state = FragmentState.APPENDING;
+ } else if (this.isPartial(fragmentEntity) === true) {
+ state = FragmentState.PARTIAL;
+ } else {
+ state = FragmentState.OK;
+ }
+ }
+
+ return state;
+ };
+
+ _proto.isPartial = function isPartial(fragmentEntity) {
+ return fragmentEntity.buffered === true && (fragmentEntity.range.video !== undefined && fragmentEntity.range.video.partial === true || fragmentEntity.range.audio !== undefined && fragmentEntity.range.audio.partial === true);
+ };
+
+ _proto.isTimeBuffered = function isTimeBuffered(startPTS, endPTS, timeRange) {
+ var startTime, endTime;
+
+ for (var i = 0; i < timeRange.length; i++) {
+ startTime = timeRange.start(i) - this.bufferPadding;
+ endTime = timeRange.end(i) + this.bufferPadding;
+
+ if (startPTS >= startTime && endPTS <= endTime) {
+ return true;
+ }
+
+ if (endPTS <= startTime) {
+ // No need to check the rest of the timeRange as it is in order
+ return false;
+ }
+ }
+
+ return false;
+ }
+ /**
+ * Fires when a fragment loading is completed
+ */
+ ;
+
+ _proto.onFragLoaded = function onFragLoaded(e) {
+ var fragment = e.frag; // don't track initsegment (for which sn is not a number)
+ // don't track frags used for bitrateTest, they're irrelevant.
+
+ if (!isFiniteNumber(fragment.sn) || fragment.bitrateTest) {
+ return;
+ }
+
+ this.fragments[this.getFragmentKey(fragment)] = {
+ body: fragment,
+ range: Object.create(null),
+ buffered: false
+ };
+ }
+ /**
+ * Fires when the buffer is updated
+ */
+ ;
+
+ _proto.onBufferAppended = function onBufferAppended(e) {
+ var _this5 = this;
+
+ // Store the latest timeRanges loaded in the buffer
+ this.timeRanges = e.timeRanges;
+ Object.keys(this.timeRanges).forEach(function (elementaryStream) {
+ var timeRange = _this5.timeRanges[elementaryStream];
+
+ _this5.detectEvictedFragments(elementaryStream, timeRange);
+ });
+ }
+ /**
+ * Fires after a fragment has been loaded into the source buffer
+ */
+ ;
+
+ _proto.onFragBuffered = function onFragBuffered(e) {
+ this.detectPartialFragments(e.frag);
+ }
+ /**
+ * Return true if fragment tracker has the fragment.
+ * @param {Object} fragment
+ * @returns {boolean}
+ */
+ ;
+
+ _proto.hasFragment = function hasFragment(fragment) {
+ var fragKey = this.getFragmentKey(fragment);
+ return this.fragments[fragKey] !== undefined;
+ }
+ /**
+ * Remove a fragment from fragment tracker until it is loaded again
+ * @param {Object} fragment The fragment to remove
+ */
+ ;
+
+ _proto.removeFragment = function removeFragment(fragment) {
+ var fragKey = this.getFragmentKey(fragment);
+ delete this.fragments[fragKey];
+ }
+ /**
+ * Remove all fragments from fragment tracker.
+ */
+ ;
+
+ _proto.removeAllFragments = function removeAllFragments() {
+ this.fragments = Object.create(null);
+ };
+
+ return FragmentTracker;
+}(event_handler);
+// EXTERNAL MODULE: ./src/controller/stream-controller.js
+var stream_controller = __webpack_require__("./src/controller/stream-controller.js");
+var stream_controller_default = /*#__PURE__*/__webpack_require__.n(stream_controller);
+
+// CONCATENATED MODULE: ./src/controller/level-helper.js
+
+
+
+
+
+/**
+ * @module LevelHelper
+ *
+ * Providing methods dealing with playlist sliding and drift
+ *
+ * TODO: Create an actual `Level` class/model that deals with all this logic in an object-oriented-manner.
+ *
+ * */
+
+function addGroupId(level, type, id) {
+ switch (type) {
+ case 'audio':
+ if (!level.audioGroupIds) {
+ level.audioGroupIds = [];
+ }
+
+ level.audioGroupIds.push(id);
+ break;
+
+ case 'text':
+ if (!level.textGroupIds) {
+ level.textGroupIds = [];
+ }
+
+ level.textGroupIds.push(id);
+ break;
+ }
+}
+function updatePTS(fragments, fromIdx, toIdx) {
+ var fragFrom = fragments[fromIdx],
+ fragTo = fragments[toIdx],
+ fragToPTS = fragTo.startPTS; // if we know startPTS[toIdx]
+
+ if (isFiniteNumber(fragToPTS)) {
+ // update fragment duration.
+ // it helps to fix drifts between playlist reported duration and fragment real duration
+ if (toIdx > fromIdx) {
+ fragFrom.duration = fragToPTS - fragFrom.start;
+
+ if (fragFrom.duration < 0) {
+ logger.warn("negative duration computed for frag " + fragFrom.sn + ",level " + fragFrom.level + ", there should be some duration drift between playlist and fragment!");
+ }
+ } else {
+ fragTo.duration = fragFrom.start - fragToPTS;
+
+ if (fragTo.duration < 0) {
+ logger.warn("negative duration computed for frag " + fragTo.sn + ",level " + fragTo.level + ", there should be some duration drift between playlist and fragment!");
+ }
+ }
+ } else {
+ // we dont know startPTS[toIdx]
+ if (toIdx > fromIdx) {
+ fragTo.start = fragFrom.start + fragFrom.duration;
+ } else {
+ fragTo.start = Math.max(fragFrom.start - fragTo.duration, 0);
+ }
+ }
+}
+function updateFragPTSDTS(details, frag, startPTS, endPTS, startDTS, endDTS) {
+ // update frag PTS/DTS
+ var maxStartPTS = startPTS;
+
+ if (isFiniteNumber(frag.startPTS)) {
+ // delta PTS between audio and video
+ var deltaPTS = Math.abs(frag.startPTS - startPTS);
+
+ if (!isFiniteNumber(frag.deltaPTS)) {
+ frag.deltaPTS = deltaPTS;
+ } else {
+ frag.deltaPTS = Math.max(deltaPTS, frag.deltaPTS);
+ }
+
+ maxStartPTS = Math.max(startPTS, frag.startPTS);
+ startPTS = Math.min(startPTS, frag.startPTS);
+ endPTS = Math.max(endPTS, frag.endPTS);
+ startDTS = Math.min(startDTS, frag.startDTS);
+ endDTS = Math.max(endDTS, frag.endDTS);
+ }
+
+ var drift = startPTS - frag.start;
+ frag.start = frag.startPTS = startPTS;
+ frag.maxStartPTS = maxStartPTS;
+ frag.endPTS = endPTS;
+ frag.startDTS = startDTS;
+ frag.endDTS = endDTS;
+ frag.duration = endPTS - startPTS;
+ var sn = frag.sn; // exit if sn out of range
+
+ if (!details || sn < details.startSN || sn > details.endSN) {
+ return 0;
+ }
+
+ var fragIdx, fragments, i;
+ fragIdx = sn - details.startSN;
+ fragments = details.fragments; // update frag reference in fragments array
+ // rationale is that fragments array might not contain this frag object.
+ // this will happen if playlist has been refreshed between frag loading and call to updateFragPTSDTS()
+ // if we don't update frag, we won't be able to propagate PTS info on the playlist
+ // resulting in invalid sliding computation
+
+ fragments[fragIdx] = frag; // adjust fragment PTS/duration from seqnum-1 to frag 0
+
+ for (i = fragIdx; i > 0; i--) {
+ updatePTS(fragments, i, i - 1);
+ } // adjust fragment PTS/duration from seqnum to last frag
+
+
+ for (i = fragIdx; i < fragments.length - 1; i++) {
+ updatePTS(fragments, i, i + 1);
+ }
+
+ details.PTSKnown = true;
+ return drift;
+}
+function mergeDetails(oldDetails, newDetails) {
+ // potentially retrieve cached initsegment
+ if (newDetails.initSegment && oldDetails.initSegment) {
+ newDetails.initSegment = oldDetails.initSegment;
+ } // check if old/new playlists have fragments in common
+ // loop through overlapping SN and update startPTS , cc, and duration if any found
+
+
+ var ccOffset = 0;
+ var PTSFrag;
+ mapFragmentIntersection(oldDetails, newDetails, function (oldFrag, newFrag) {
+ ccOffset = oldFrag.cc - newFrag.cc;
+
+ if (isFiniteNumber(oldFrag.startPTS)) {
+ newFrag.start = newFrag.startPTS = oldFrag.startPTS;
+ newFrag.endPTS = oldFrag.endPTS;
+ newFrag.duration = oldFrag.duration;
+ newFrag.backtracked = oldFrag.backtracked;
+ newFrag.dropped = oldFrag.dropped;
+ PTSFrag = newFrag;
+ } // PTS is known when there are overlapping segments
+
+
+ newDetails.PTSKnown = true;
+ });
+
+ if (!newDetails.PTSKnown) {
+ return;
+ }
+
+ if (ccOffset) {
+ logger.log('discontinuity sliding from playlist, take drift into account');
+ var newFragments = newDetails.fragments;
+
+ for (var i = 0; i < newFragments.length; i++) {
+ newFragments[i].cc += ccOffset;
+ }
+ } // if at least one fragment contains PTS info, recompute PTS information for all fragments
+
+
+ if (PTSFrag) {
+ updateFragPTSDTS(newDetails, PTSFrag, PTSFrag.startPTS, PTSFrag.endPTS, PTSFrag.startDTS, PTSFrag.endDTS);
+ } else {
+ // ensure that delta is within oldFragments range
+ // also adjust sliding in case delta is 0 (we could have old=[50-60] and new=old=[50-61])
+ // in that case we also need to adjust start offset of all fragments
+ adjustSliding(oldDetails, newDetails);
+ } // if we are here, it means we have fragments overlapping between
+ // old and new level. reliable PTS info is thus relying on old level
+
+
+ newDetails.PTSKnown = oldDetails.PTSKnown;
+}
+function mergeSubtitlePlaylists(oldPlaylist, newPlaylist, referenceStart) {
+ if (referenceStart === void 0) {
+ referenceStart = 0;
+ }
+
+ var lastIndex = -1;
+ mapFragmentIntersection(oldPlaylist, newPlaylist, function (oldFrag, newFrag, index) {
+ newFrag.start = oldFrag.start;
+ lastIndex = index;
+ });
+ var frags = newPlaylist.fragments;
+
+ if (lastIndex < 0) {
+ frags.forEach(function (frag) {
+ frag.start += referenceStart;
+ });
+ return;
+ }
+
+ for (var i = lastIndex + 1; i < frags.length; i++) {
+ frags[i].start = frags[i - 1].start + frags[i - 1].duration;
+ }
+}
+function mapFragmentIntersection(oldPlaylist, newPlaylist, intersectionFn) {
+ if (!oldPlaylist || !newPlaylist) {
+ return;
+ }
+
+ var start = Math.max(oldPlaylist.startSN, newPlaylist.startSN) - newPlaylist.startSN;
+ var end = Math.min(oldPlaylist.endSN, newPlaylist.endSN) - newPlaylist.startSN;
+ var delta = newPlaylist.startSN - oldPlaylist.startSN;
+
+ for (var i = start; i <= end; i++) {
+ var oldFrag = oldPlaylist.fragments[delta + i];
+ var newFrag = newPlaylist.fragments[i];
+
+ if (!oldFrag || !newFrag) {
+ break;
+ }
+
+ intersectionFn(oldFrag, newFrag, i);
+ }
+}
+function adjustSliding(oldPlaylist, newPlaylist) {
+ var delta = newPlaylist.startSN - oldPlaylist.startSN;
+ var oldFragments = oldPlaylist.fragments;
+ var newFragments = newPlaylist.fragments;
+
+ if (delta < 0 || delta > oldFragments.length) {
+ return;
+ }
+
+ for (var i = 0; i < newFragments.length; i++) {
+ newFragments[i].start += oldFragments[delta].start;
+ }
+}
+function computeReloadInterval(currentPlaylist, newPlaylist, lastRequestTime) {
+ var reloadInterval = 1000 * (newPlaylist.averagetargetduration ? newPlaylist.averagetargetduration : newPlaylist.targetduration);
+ var minReloadInterval = reloadInterval / 2;
+
+ if (currentPlaylist && newPlaylist.endSN === currentPlaylist.endSN) {
+ // follow HLS Spec, If the client reloads a Playlist file and finds that it has not
+ // changed then it MUST wait for a period of one-half the target
+ // duration before retrying.
+ reloadInterval = minReloadInterval;
+ }
+
+ if (lastRequestTime) {
+ reloadInterval = Math.max(minReloadInterval, reloadInterval - (window.performance.now() - lastRequestTime));
+ } // in any case, don't reload more than half of target duration
+
+
+ return Math.round(reloadInterval);
+}
+// CONCATENATED MODULE: ./src/controller/level-controller.js
+function level_controller_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function level_controller_createClass(Constructor, protoProps, staticProps) { if (protoProps) level_controller_defineProperties(Constructor.prototype, protoProps); if (staticProps) level_controller_defineProperties(Constructor, staticProps); return Constructor; }
+
+function level_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+
+/*
+ * Level Controller
+*/
+
+
+
+
+
+
+var level_controller_window = window,
+ level_controller_performance = level_controller_window.performance;
+var chromeOrFirefox;
+
+var level_controller_LevelController =
+/*#__PURE__*/
+function (_EventHandler) {
+ level_controller_inheritsLoose(LevelController, _EventHandler);
+
+ function LevelController(hls) {
+ var _this;
+
+ _this = _EventHandler.call(this, hls, events.MANIFEST_LOADED, events.LEVEL_LOADED, events.AUDIO_TRACK_SWITCHED, events.FRAG_LOADED, events.ERROR) || this;
+ _this.canload = false;
+ _this.currentLevelIndex = null;
+ _this.manualLevelIndex = -1;
+ _this.timer = null;
+ chromeOrFirefox = /chrome|firefox/.test(navigator.userAgent.toLowerCase());
+ return _this;
+ }
+
+ var _proto = LevelController.prototype;
+
+ _proto.onHandlerDestroying = function onHandlerDestroying() {
+ this.clearTimer();
+ this.manualLevelIndex = -1;
+ };
+
+ _proto.clearTimer = function clearTimer() {
+ if (this.timer !== null) {
+ clearTimeout(this.timer);
+ this.timer = null;
+ }
+ };
+
+ _proto.startLoad = function startLoad() {
+ var levels = this._levels;
+ this.canload = true;
+ this.levelRetryCount = 0; // clean up live level details to force reload them, and reset load errors
+
+ if (levels) {
+ levels.forEach(function (level) {
+ level.loadError = 0;
+ var levelDetails = level.details;
+
+ if (levelDetails && levelDetails.live) {
+ level.details = undefined;
+ }
+ });
+ } // speed up live playlist refresh if timer exists
+
+
+ if (this.timer !== null) {
+ this.loadLevel();
+ }
+ };
+
+ _proto.stopLoad = function stopLoad() {
+ this.canload = false;
+ };
+
+ _proto.onManifestLoaded = function onManifestLoaded(data) {
+ var levels = [];
+ var audioTracks = [];
+ var bitrateStart;
+ var levelSet = {};
+ var levelFromSet = null;
+ var videoCodecFound = false;
+ var audioCodecFound = false; // regroup redundant levels together
+
+ data.levels.forEach(function (level) {
+ var attributes = level.attrs;
+ level.loadError = 0;
+ level.fragmentError = false;
+ videoCodecFound = videoCodecFound || !!level.videoCodec;
+ audioCodecFound = audioCodecFound || !!level.audioCodec; // erase audio codec info if browser does not support mp4a.40.34.
+ // demuxer will autodetect codec and fallback to mpeg/audio
+
+ if (chromeOrFirefox && level.audioCodec && level.audioCodec.indexOf('mp4a.40.34') !== -1) {
+ level.audioCodec = undefined;
+ }
+
+ levelFromSet = levelSet[level.bitrate]; // FIXME: we would also have to match the resolution here
+
+ if (!levelFromSet) {
+ level.url = [level.url];
+ level.urlId = 0;
+ levelSet[level.bitrate] = level;
+ levels.push(level);
+ } else {
+ levelFromSet.url.push(level.url);
+ }
+
+ if (attributes) {
+ if (attributes.AUDIO) {
+ audioCodecFound = true;
+ addGroupId(levelFromSet || level, 'audio', attributes.AUDIO);
+ }
+
+ if (attributes.SUBTITLES) {
+ addGroupId(levelFromSet || level, 'text', attributes.SUBTITLES);
+ }
+ }
+ }); // remove audio-only level if we also have levels with audio+video codecs signalled
+
+ if (videoCodecFound && audioCodecFound) {
+ levels = levels.filter(function (_ref) {
+ var videoCodec = _ref.videoCodec;
+ return !!videoCodec;
+ });
+ } // only keep levels with supported audio/video codecs
+
+
+ levels = levels.filter(function (_ref2) {
+ var audioCodec = _ref2.audioCodec,
+ videoCodec = _ref2.videoCodec;
+ return (!audioCodec || isCodecSupportedInMp4(audioCodec, 'audio')) && (!videoCodec || isCodecSupportedInMp4(videoCodec, 'video'));
+ });
+
+ if (data.audioTracks) {
+ audioTracks = data.audioTracks.filter(function (track) {
+ return !track.audioCodec || isCodecSupportedInMp4(track.audioCodec, 'audio');
+ }); // Reassign id's after filtering since they're used as array indices
+
+ audioTracks.forEach(function (track, index) {
+ track.id = index;
+ });
+ }
+
+ if (levels.length > 0) {
+ // start bitrate is the first bitrate of the manifest
+ bitrateStart = levels[0].bitrate; // sort level on bitrate
+
+ levels.sort(function (a, b) {
+ return a.bitrate - b.bitrate;
+ });
+ this._levels = levels; // find index of first level in sorted levels
+
+ for (var i = 0; i < levels.length; i++) {
+ if (levels[i].bitrate === bitrateStart) {
+ this._firstLevel = i;
+ logger.log("manifest loaded," + levels.length + " level(s) found, first bitrate:" + bitrateStart);
+ break;
+ }
+ } // Audio is only alternate if manifest include a URI along with the audio group tag
+
+
+ this.hls.trigger(events.MANIFEST_PARSED, {
+ levels: levels,
+ audioTracks: audioTracks,
+ firstLevel: this._firstLevel,
+ stats: data.stats,
+ audio: audioCodecFound,
+ video: videoCodecFound,
+ altAudio: audioTracks.some(function (t) {
+ return !!t.url;
+ })
+ });
+ } else {
+ this.hls.trigger(events.ERROR, {
+ type: ErrorTypes.MEDIA_ERROR,
+ details: ErrorDetails.MANIFEST_INCOMPATIBLE_CODECS_ERROR,
+ fatal: true,
+ url: this.hls.url,
+ reason: 'no level with compatible codecs found in manifest'
+ });
+ }
+ };
+
+ _proto.setLevelInternal = function setLevelInternal(newLevel) {
+ var levels = this._levels;
+ var hls = this.hls; // check if level idx is valid
+
+ if (newLevel >= 0 && newLevel < levels.length) {
+ // stopping live reloading timer if any
+ this.clearTimer();
+
+ if (this.currentLevelIndex !== newLevel) {
+ logger.log("switching to level " + newLevel);
+ this.currentLevelIndex = newLevel;
+ var levelProperties = levels[newLevel];
+ levelProperties.level = newLevel;
+ hls.trigger(events.LEVEL_SWITCHING, levelProperties);
+ }
+
+ var level = levels[newLevel];
+ var levelDetails = level.details; // check if we need to load playlist for this level
+
+ if (!levelDetails || levelDetails.live) {
+ // level not retrieved yet, or live playlist we need to (re)load it
+ var urlId = level.urlId;
+ hls.trigger(events.LEVEL_LOADING, {
+ url: level.url[urlId],
+ level: newLevel,
+ id: urlId
+ });
+ }
+ } else {
+ // invalid level id given, trigger error
+ hls.trigger(events.ERROR, {
+ type: ErrorTypes.OTHER_ERROR,
+ details: ErrorDetails.LEVEL_SWITCH_ERROR,
+ level: newLevel,
+ fatal: false,
+ reason: 'invalid level idx'
+ });
+ }
+ };
+
+ _proto.onError = function onError(data) {
+ if (data.fatal) {
+ if (data.type === ErrorTypes.NETWORK_ERROR) {
+ this.clearTimer();
+ }
+
+ return;
+ }
+
+ var levelError = false,
+ fragmentError = false;
+ var levelIndex; // try to recover not fatal errors
+
+ switch (data.details) {
+ case ErrorDetails.FRAG_LOAD_ERROR:
+ case ErrorDetails.FRAG_LOAD_TIMEOUT:
+ case ErrorDetails.KEY_LOAD_ERROR:
+ case ErrorDetails.KEY_LOAD_TIMEOUT:
+ levelIndex = data.frag.level;
+ fragmentError = true;
+ break;
+
+ case ErrorDetails.LEVEL_LOAD_ERROR:
+ case ErrorDetails.LEVEL_LOAD_TIMEOUT:
+ levelIndex = data.context.level;
+ levelError = true;
+ break;
+
+ case ErrorDetails.REMUX_ALLOC_ERROR:
+ levelIndex = data.level;
+ levelError = true;
+ break;
+ }
+
+ if (levelIndex !== undefined) {
+ this.recoverLevel(data, levelIndex, levelError, fragmentError);
+ }
+ }
+ /**
+ * Switch to a redundant stream if any available.
+ * If redundant stream is not available, emergency switch down if ABR mode is enabled.
+ *
+ * @param {Object} errorEvent
+ * @param {Number} levelIndex current level index
+ * @param {Boolean} levelError
+ * @param {Boolean} fragmentError
+ */
+ // FIXME Find a better abstraction where fragment/level retry management is well decoupled
+ ;
+
+ _proto.recoverLevel = function recoverLevel(errorEvent, levelIndex, levelError, fragmentError) {
+ var _this2 = this;
+
+ var config = this.hls.config;
+ var errorDetails = errorEvent.details;
+ var level = this._levels[levelIndex];
+ var redundantLevels, delay, nextLevel;
+ level.loadError++;
+ level.fragmentError = fragmentError;
+
+ if (levelError) {
+ if (this.levelRetryCount + 1 <= config.levelLoadingMaxRetry) {
+ // exponential backoff capped to max retry timeout
+ delay = Math.min(Math.pow(2, this.levelRetryCount) * config.levelLoadingRetryDelay, config.levelLoadingMaxRetryTimeout); // Schedule level reload
+
+ this.timer = setTimeout(function () {
+ return _this2.loadLevel();
+ }, delay); // boolean used to inform stream controller not to switch back to IDLE on non fatal error
+
+ errorEvent.levelRetry = true;
+ this.levelRetryCount++;
+ logger.warn("level controller, " + errorDetails + ", retry in " + delay + " ms, current retry count is " + this.levelRetryCount);
+ } else {
+ logger.error("level controller, cannot recover from " + errorDetails + " error");
+ this.currentLevelIndex = null; // stopping live reloading timer if any
+
+ this.clearTimer(); // switch error to fatal
+
+ errorEvent.fatal = true;
+ return;
+ }
+ } // Try any redundant streams if available for both errors: level and fragment
+ // If level.loadError reaches redundantLevels it means that we tried them all, no hope => let's switch down
+
+
+ if (levelError || fragmentError) {
+ redundantLevels = level.url.length;
+
+ if (redundantLevels > 1 && level.loadError < redundantLevels) {
+ level.urlId = (level.urlId + 1) % redundantLevels;
+ level.details = undefined;
+ logger.warn("level controller, " + errorDetails + " for level " + levelIndex + ": switching to redundant URL-id " + level.urlId); // console.log('Current audio track group ID:', this.hls.audioTracks[this.hls.audioTrack].groupId);
+ // console.log('New video quality level audio group id:', level.attrs.AUDIO);
+ } else {
+ // Search for available level
+ if (this.manualLevelIndex === -1) {
+ // When lowest level has been reached, let's start hunt from the top
+ nextLevel = levelIndex === 0 ? this._levels.length - 1 : levelIndex - 1;
+ logger.warn("level controller, " + errorDetails + ": switch to " + nextLevel);
+ this.hls.nextAutoLevel = this.currentLevelIndex = nextLevel;
+ } else if (fragmentError) {
+ // Allow fragment retry as long as configuration allows.
+ // reset this._level so that another call to set level() will trigger again a frag load
+ logger.warn("level controller, " + errorDetails + ": reload a fragment");
+ this.currentLevelIndex = null;
+ }
+ }
+ }
+ } // reset errors on the successful load of a fragment
+ ;
+
+ _proto.onFragLoaded = function onFragLoaded(_ref3) {
+ var frag = _ref3.frag;
+
+ if (frag !== undefined && frag.type === 'main') {
+ var level = this._levels[frag.level];
+
+ if (level !== undefined) {
+ level.fragmentError = false;
+ level.loadError = 0;
+ this.levelRetryCount = 0;
+ }
+ }
+ };
+
+ _proto.onLevelLoaded = function onLevelLoaded(data) {
+ var _this3 = this;
+
+ var level = data.level,
+ details = data.details; // only process level loaded events matching with expected level
+
+ if (level !== this.currentLevelIndex) {
+ return;
+ }
+
+ var curLevel = this._levels[level]; // reset level load error counter on successful level loaded only if there is no issues with fragments
+
+ if (!curLevel.fragmentError) {
+ curLevel.loadError = 0;
+ this.levelRetryCount = 0;
+ } // if current playlist is a live playlist, arm a timer to reload it
+
+
+ if (details.live) {
+ var reloadInterval = computeReloadInterval(curLevel.details, details, data.stats.trequest);
+ logger.log("live playlist, reload in " + Math.round(reloadInterval) + " ms");
+ this.timer = setTimeout(function () {
+ return _this3.loadLevel();
+ }, reloadInterval);
+ } else {
+ this.clearTimer();
+ }
+ };
+
+ _proto.onAudioTrackSwitched = function onAudioTrackSwitched(data) {
+ var audioGroupId = this.hls.audioTracks[data.id].groupId;
+ var currentLevel = this.hls.levels[this.currentLevelIndex];
+
+ if (!currentLevel) {
+ return;
+ }
+
+ if (currentLevel.audioGroupIds) {
+ var urlId = -1;
+
+ for (var i = 0; i < currentLevel.audioGroupIds.length; i++) {
+ if (currentLevel.audioGroupIds[i] === audioGroupId) {
+ urlId = i;
+ break;
+ }
+ }
+
+ if (urlId !== currentLevel.urlId) {
+ currentLevel.urlId = urlId;
+ this.startLoad();
+ }
+ }
+ };
+
+ _proto.loadLevel = function loadLevel() {
+ logger.debug('call to loadLevel');
+
+ if (this.currentLevelIndex !== null && this.canload) {
+ var levelObject = this._levels[this.currentLevelIndex];
+
+ if (typeof levelObject === 'object' && levelObject.url.length > 0) {
+ var level = this.currentLevelIndex;
+ var id = levelObject.urlId;
+ var url = levelObject.url[id];
+ logger.log("Attempt loading level index " + level + " with URL-id " + id); // console.log('Current audio track group ID:', this.hls.audioTracks[this.hls.audioTrack].groupId);
+ // console.log('New video quality level audio group id:', levelObject.attrs.AUDIO, level);
+
+ this.hls.trigger(events.LEVEL_LOADING, {
+ url: url,
+ level: level,
+ id: id
+ });
+ }
+ }
+ };
+
+ level_controller_createClass(LevelController, [{
+ key: "levels",
+ get: function get() {
+ return this._levels;
+ }
+ }, {
+ key: "level",
+ get: function get() {
+ return this.currentLevelIndex;
+ },
+ set: function set(newLevel) {
+ var levels = this._levels;
+
+ if (levels) {
+ newLevel = Math.min(newLevel, levels.length - 1);
+
+ if (this.currentLevelIndex !== newLevel || !levels[newLevel].details) {
+ this.setLevelInternal(newLevel);
+ }
+ }
+ }
+ }, {
+ key: "manualLevel",
+ get: function get() {
+ return this.manualLevelIndex;
+ },
+ set: function set(newLevel) {
+ this.manualLevelIndex = newLevel;
+
+ if (this._startLevel === undefined) {
+ this._startLevel = newLevel;
+ }
+
+ if (newLevel !== -1) {
+ this.level = newLevel;
+ }
+ }
+ }, {
+ key: "firstLevel",
+ get: function get() {
+ return this._firstLevel;
+ },
+ set: function set(newLevel) {
+ this._firstLevel = newLevel;
+ }
+ }, {
+ key: "startLevel",
+ get: function get() {
+ // hls.startLevel takes precedence over config.startLevel
+ // if none of these values are defined, fallback on this._firstLevel (first quality level appearing in variant manifest)
+ if (this._startLevel === undefined) {
+ var configStartLevel = this.hls.config.startLevel;
+
+ if (configStartLevel !== undefined) {
+ return configStartLevel;
+ } else {
+ return this._firstLevel;
+ }
+ } else {
+ return this._startLevel;
+ }
+ },
+ set: function set(newLevel) {
+ this._startLevel = newLevel;
+ }
+ }, {
+ key: "nextLoadLevel",
+ get: function get() {
+ if (this.manualLevelIndex !== -1) {
+ return this.manualLevelIndex;
+ } else {
+ return this.hls.nextAutoLevel;
+ }
+ },
+ set: function set(nextLevel) {
+ this.level = nextLevel;
+
+ if (this.manualLevelIndex === -1) {
+ this.hls.nextAutoLevel = nextLevel;
+ }
+ }
+ }]);
+
+ return LevelController;
+}(event_handler);
+
+
+// CONCATENATED MODULE: ./src/demux/id3.js
+
+/**
+ * ID3 parser
+ */
+
+var ID3 =
+/*#__PURE__*/
+function () {
+ function ID3() {}
+
+ /**
+ * Returns true if an ID3 header can be found at offset in data
+ * @param {Uint8Array} data - The data to search in
+ * @param {number} offset - The offset at which to start searching
+ * @return {boolean} - True if an ID3 header is found
+ */
+ ID3.isHeader = function isHeader(data, offset) {
+ /*
+ * http://id3.org/id3v2.3.0
+ * [0] = 'I'
+ * [1] = 'D'
+ * [2] = '3'
+ * [3,4] = {Version}
+ * [5] = {Flags}
+ * [6-9] = {ID3 Size}
+ *
+ * An ID3v2 tag can be detected with the following pattern:
+ * $49 44 33 yy yy xx zz zz zz zz
+ * Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80
+ */
+ if (offset + 10 <= data.length) {
+ // look for 'ID3' identifier
+ if (data[offset] === 0x49 && data[offset + 1] === 0x44 && data[offset + 2] === 0x33) {
+ // check version is within range
+ if (data[offset + 3] < 0xFF && data[offset + 4] < 0xFF) {
+ // check size is within range
+ if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
+ return true;
+ }
+ }
+ }
+ }
+
+ return false;
+ }
+ /**
+ * Returns true if an ID3 footer can be found at offset in data
+ * @param {Uint8Array} data - The data to search in
+ * @param {number} offset - The offset at which to start searching
+ * @return {boolean} - True if an ID3 footer is found
+ */
+ ;
+
+ ID3.isFooter = function isFooter(data, offset) {
+ /*
+ * The footer is a copy of the header, but with a different identifier
+ */
+ if (offset + 10 <= data.length) {
+ // look for '3DI' identifier
+ if (data[offset] === 0x33 && data[offset + 1] === 0x44 && data[offset + 2] === 0x49) {
+ // check version is within range
+ if (data[offset + 3] < 0xFF && data[offset + 4] < 0xFF) {
+ // check size is within range
+ if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
+ return true;
+ }
+ }
+ }
+ }
+
+ return false;
+ }
+ /**
+ * Returns any adjacent ID3 tags found in data starting at offset, as one block of data
+ * @param {Uint8Array} data - The data to search in
+ * @param {number} offset - The offset at which to start searching
+ * @return {Uint8Array} - The block of data containing any ID3 tags found
+ */
+ ;
+
+ ID3.getID3Data = function getID3Data(data, offset) {
+ var front = offset;
+ var length = 0;
+
+ while (ID3.isHeader(data, offset)) {
+ // ID3 header is 10 bytes
+ length += 10;
+
+ var size = ID3._readSize(data, offset + 6);
+
+ length += size;
+
+ if (ID3.isFooter(data, offset + 10)) {
+ // ID3 footer is 10 bytes
+ length += 10;
+ }
+
+ offset += length;
+ }
+
+ if (length > 0) {
+ return data.subarray(front, front + length);
+ }
+
+ return undefined;
+ };
+
+ ID3._readSize = function _readSize(data, offset) {
+ var size = 0;
+ size = (data[offset] & 0x7f) << 21;
+ size |= (data[offset + 1] & 0x7f) << 14;
+ size |= (data[offset + 2] & 0x7f) << 7;
+ size |= data[offset + 3] & 0x7f;
+ return size;
+ }
+ /**
+ * Searches for the Elementary Stream timestamp found in the ID3 data chunk
+ * @param {Uint8Array} data - Block of data containing one or more ID3 tags
+ * @return {number} - The timestamp
+ */
+ ;
+
+ ID3.getTimeStamp = function getTimeStamp(data) {
+ var frames = ID3.getID3Frames(data);
+
+ for (var i = 0; i < frames.length; i++) {
+ var frame = frames[i];
+
+ if (ID3.isTimeStampFrame(frame)) {
+ return ID3._readTimeStamp(frame);
+ }
+ }
+
+ return undefined;
+ }
+ /**
+ * Returns true if the ID3 frame is an Elementary Stream timestamp frame
+ * @param {ID3 frame} frame
+ */
+ ;
+
+ ID3.isTimeStampFrame = function isTimeStampFrame(frame) {
+ return frame && frame.key === 'PRIV' && frame.info === 'com.apple.streaming.transportStreamTimestamp';
+ };
+
+ ID3._getFrameData = function _getFrameData(data) {
+ /*
+ Frame ID $xx xx xx xx (four characters)
+ Size $xx xx xx xx
+ Flags $xx xx
+ */
+ var type = String.fromCharCode(data[0], data[1], data[2], data[3]);
+
+ var size = ID3._readSize(data, 4); // skip frame id, size, and flags
+
+
+ var offset = 10;
+ return {
+ type: type,
+ size: size,
+ data: data.subarray(offset, offset + size)
+ };
+ }
+ /**
+ * Returns an array of ID3 frames found in all the ID3 tags in the id3Data
+ * @param {Uint8Array} id3Data - The ID3 data containing one or more ID3 tags
+ * @return {ID3 frame[]} - Array of ID3 frame objects
+ */
+ ;
+
+ ID3.getID3Frames = function getID3Frames(id3Data) {
+ var offset = 0;
+ var frames = [];
+
+ while (ID3.isHeader(id3Data, offset)) {
+ var size = ID3._readSize(id3Data, offset + 6); // skip past ID3 header
+
+
+ offset += 10;
+ var end = offset + size; // loop through frames in the ID3 tag
+
+ while (offset + 8 < end) {
+ var frameData = ID3._getFrameData(id3Data.subarray(offset));
+
+ var frame = ID3._decodeFrame(frameData);
+
+ if (frame) {
+ frames.push(frame);
+ } // skip frame header and frame data
+
+
+ offset += frameData.size + 10;
+ }
+
+ if (ID3.isFooter(id3Data, offset)) {
+ offset += 10;
+ }
+ }
+
+ return frames;
+ };
+
+ ID3._decodeFrame = function _decodeFrame(frame) {
+ if (frame.type === 'PRIV') {
+ return ID3._decodePrivFrame(frame);
+ } else if (frame.type[0] === 'T') {
+ return ID3._decodeTextFrame(frame);
+ } else if (frame.type[0] === 'W') {
+ return ID3._decodeURLFrame(frame);
+ }
+
+ return undefined;
+ };
+
+ ID3._readTimeStamp = function _readTimeStamp(timeStampFrame) {
+ if (timeStampFrame.data.byteLength === 8) {
+ var data = new Uint8Array(timeStampFrame.data); // timestamp is 33 bit expressed as a big-endian eight-octet number,
+ // with the upper 31 bits set to zero.
+
+ var pts33Bit = data[3] & 0x1;
+ var timestamp = (data[4] << 23) + (data[5] << 15) + (data[6] << 7) + data[7];
+ timestamp /= 45;
+
+ if (pts33Bit) {
+ timestamp += 47721858.84;
+ } // 2^32 / 90
+
+
+ return Math.round(timestamp);
+ }
+
+ return undefined;
+ };
+
+ ID3._decodePrivFrame = function _decodePrivFrame(frame) {
+ /*
+ Format: \0
+ */
+ if (frame.size < 2) {
+ return undefined;
+ }
+
+ var owner = ID3._utf8ArrayToStr(frame.data, true);
+
+ var privateData = new Uint8Array(frame.data.subarray(owner.length + 1));
+ return {
+ key: frame.type,
+ info: owner,
+ data: privateData.buffer
+ };
+ };
+
+ ID3._decodeTextFrame = function _decodeTextFrame(frame) {
+ if (frame.size < 2) {
+ return undefined;
+ }
+
+ if (frame.type === 'TXXX') {
+ /*
+ Format:
+ [0] = {Text Encoding}
+ [1-?] = {Description}\0{Value}
+ */
+ var index = 1;
+
+ var description = ID3._utf8ArrayToStr(frame.data.subarray(index), true);
+
+ index += description.length + 1;
+
+ var value = ID3._utf8ArrayToStr(frame.data.subarray(index));
+
+ return {
+ key: frame.type,
+ info: description,
+ data: value
+ };
+ } else {
+ /*
+ Format:
+ [0] = {Text Encoding}
+ [1-?] = {Value}
+ */
+ var text = ID3._utf8ArrayToStr(frame.data.subarray(1));
+
+ return {
+ key: frame.type,
+ data: text
+ };
+ }
+ };
+
+ ID3._decodeURLFrame = function _decodeURLFrame(frame) {
+ if (frame.type === 'WXXX') {
+ /*
+ Format:
+ [0] = {Text Encoding}
+ [1-?] = {Description}\0{URL}
+ */
+ if (frame.size < 2) {
+ return undefined;
+ }
+
+ var index = 1;
+
+ var description = ID3._utf8ArrayToStr(frame.data.subarray(index));
+
+ index += description.length + 1;
+
+ var value = ID3._utf8ArrayToStr(frame.data.subarray(index));
+
+ return {
+ key: frame.type,
+ info: description,
+ data: value
+ };
+ } else {
+ /*
+ Format:
+ [0-?] = {URL}
+ */
+ var url = ID3._utf8ArrayToStr(frame.data);
+
+ return {
+ key: frame.type,
+ data: url
+ };
+ }
+ } // http://stackoverflow.com/questions/8936984/uint8array-to-string-in-javascript/22373197
+ // http://www.onicos.com/staff/iz/amuse/javascript/expert/utf.txt
+
+ /* utf.js - UTF-8 <=> UTF-16 convertion
+ *
+ * Copyright (C) 1999 Masanao Izumo
+ * Version: 1.0
+ * LastModified: Dec 25 1999
+ * This library is free. You can redistribute it and/or modify it.
+ */
+ ;
+
+ ID3._utf8ArrayToStr = function _utf8ArrayToStr(array, exitOnNull) {
+ if (exitOnNull === void 0) {
+ exitOnNull = false;
+ }
+
+ var decoder = getTextDecoder();
+
+ if (decoder) {
+ var decoded = decoder.decode(array);
+
+ if (exitOnNull) {
+ // grab up to the first null
+ var idx = decoded.indexOf('\0');
+ return idx !== -1 ? decoded.substring(0, idx) : decoded;
+ } // remove any null characters
+
+
+ return decoded.replace(/\0/g, '');
+ }
+
+ var len = array.length;
+ var c;
+ var char2;
+ var char3;
+ var out = '';
+ var i = 0;
+
+ while (i < len) {
+ c = array[i++];
+
+ if (c === 0x00 && exitOnNull) {
+ return out;
+ } else if (c === 0x00 || c === 0x03) {
+ // If the character is 3 (END_OF_TEXT) or 0 (NULL) then skip it
+ continue;
+ }
+
+ switch (c >> 4) {
+ case 0:
+ case 1:
+ case 2:
+ case 3:
+ case 4:
+ case 5:
+ case 6:
+ case 7:
+ // 0xxxxxxx
+ out += String.fromCharCode(c);
+ break;
+
+ case 12:
+ case 13:
+ // 110x xxxx 10xx xxxx
+ char2 = array[i++];
+ out += String.fromCharCode((c & 0x1F) << 6 | char2 & 0x3F);
+ break;
+
+ case 14:
+ // 1110 xxxx 10xx xxxx 10xx xxxx
+ char2 = array[i++];
+ char3 = array[i++];
+ out += String.fromCharCode((c & 0x0F) << 12 | (char2 & 0x3F) << 6 | (char3 & 0x3F) << 0);
+ break;
+
+ default:
+ }
+ }
+
+ return out;
+ };
+
+ return ID3;
+}();
+
+var decoder;
+
+function getTextDecoder() {
+ var global = getSelfScope(); // safeguard for code that might run both on worker and main thread
+
+ if (!decoder && typeof global.TextDecoder !== 'undefined') {
+ decoder = new global.TextDecoder('utf-8');
+ }
+
+ return decoder;
+}
+
+var utf8ArrayToStr = ID3._utf8ArrayToStr;
+/* harmony default export */ var id3 = (ID3);
+
+// CONCATENATED MODULE: ./src/utils/texttrack-utils.ts
+function sendAddTrackEvent(track, videoEl) {
+ var event;
+
+ try {
+ event = new Event('addtrack');
+ } catch (err) {
+ // for IE11
+ event = document.createEvent('Event');
+ event.initEvent('addtrack', false, false);
+ }
+
+ event.track = track;
+ videoEl.dispatchEvent(event);
+}
+function clearCurrentCues(track) {
+ if (track && track.cues) {
+ while (track.cues.length > 0) {
+ track.removeCue(track.cues[0]);
+ }
+ }
+}
+/**
+
+ * Given a list of Cues, finds the closest cue matching the given time.
+ * Modified verison of binary search O(log(n)).
+ *
+ * @export
+ * @param {(TextTrackCueList | TextTrackCue[])} cues - List of cues.
+ * @param {number} time - Target time, to find closest cue to.
+ * @returns {TextTrackCue}
+ */
+
+function getClosestCue(cues, time) {
+ // If the offset is less than the first element, the first element is the closest.
+ if (time < cues[0].endTime) {
+ return cues[0];
+ } // If the offset is greater than the last cue, the last is the closest.
+
+
+ if (time > cues[cues.length - 1].endTime) {
+ return cues[cues.length - 1];
+ }
+
+ var left = 0;
+ var right = cues.length - 1;
+
+ while (left <= right) {
+ var mid = Math.floor((right + left) / 2);
+
+ if (time < cues[mid].endTime) {
+ right = mid - 1;
+ } else if (time > cues[mid].endTime) {
+ left = mid + 1;
+ } else {
+ // If it's not lower or higher, it must be equal.
+ return cues[mid];
+ }
+ } // At this point, left and right have swapped.
+ // No direct match was found, left or right element must be the closest. Check which one has the smallest diff.
+
+
+ return cues[left].endTime - time < time - cues[right].endTime ? cues[left] : cues[right];
+}
+// CONCATENATED MODULE: ./src/controller/id3-track-controller.js
+function id3_track_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+
+/*
+ * id3 metadata track controller
+*/
+
+
+
+
+
+
+var id3_track_controller_ID3TrackController =
+/*#__PURE__*/
+function (_EventHandler) {
+ id3_track_controller_inheritsLoose(ID3TrackController, _EventHandler);
+
+ function ID3TrackController(hls) {
+ var _this;
+
+ _this = _EventHandler.call(this, hls, events.MEDIA_ATTACHED, events.MEDIA_DETACHING, events.FRAG_PARSING_METADATA, events.LIVE_BACK_BUFFER_REACHED) || this;
+ _this.id3Track = undefined;
+ _this.media = undefined;
+ return _this;
+ }
+
+ var _proto = ID3TrackController.prototype;
+
+ _proto.destroy = function destroy() {
+ event_handler.prototype.destroy.call(this);
+ } // Add ID3 metatadata text track.
+ ;
+
+ _proto.onMediaAttached = function onMediaAttached(data) {
+ this.media = data.media;
+
+ if (!this.media) {}
+ };
+
+ _proto.onMediaDetaching = function onMediaDetaching() {
+ clearCurrentCues(this.id3Track);
+ this.id3Track = undefined;
+ this.media = undefined;
+ };
+
+ _proto.getID3Track = function getID3Track(textTracks) {
+ for (var i = 0; i < textTracks.length; i++) {
+ var textTrack = textTracks[i];
+
+ if (textTrack.kind === 'metadata' && textTrack.label === 'id3') {
+ // send 'addtrack' when reusing the textTrack for metadata,
+ // same as what we do for captions
+ sendAddTrackEvent(textTrack, this.media);
+ return textTrack;
+ }
+ }
+
+ return this.media.addTextTrack('metadata', 'id3');
+ };
+
+ _proto.onLiveBackBufferReached = function onLiveBackBufferReached(_ref) {
+ var bufferEnd = _ref.bufferEnd;
+
+ if (!this.id3Track || !this.id3Track.cues || !this.id3Track.cues.length) {
+ return;
+ }
+
+ var foundCue = getClosestCue(this.id3Track.cues, bufferEnd);
+
+ if (!foundCue) {
+ return;
+ }
+
+ var removeCues = true;
+
+ while (removeCues) {
+ var cue = this.id3Track.cues[0];
+
+ if (!this.id3Track.cues.length || cue.id === foundCue.id) {
+ removeCues = false;
+ return;
+ }
+
+ this.id3Track.removeCue(cue);
+ }
+ };
+
+ _proto.onFragParsingMetadata = function onFragParsingMetadata(data) {
+ var fragment = data.frag;
+ var samples = data.samples; // create track dynamically
+
+ if (!this.id3Track) {
+ this.id3Track = this.getID3Track(this.media.textTracks);
+ this.id3Track.mode = 'hidden';
+ } // Attempt to recreate Safari functionality by creating
+ // WebKitDataCue objects when available and store the decoded
+ // ID3 data in the value property of the cue
+
+
+ var Cue = window.WebKitDataCue || window.VTTCue || window.TextTrackCue;
+
+ for (var i = 0; i < samples.length; i++) {
+ var frames = id3.getID3Frames(samples[i].data);
+
+ if (frames) {
+ var startTime = samples[i].pts;
+ var endTime = i < samples.length - 1 ? samples[i + 1].pts : fragment.endPTS;
+
+ if (startTime === endTime) {
+ // Give a slight bump to the endTime if it's equal to startTime to avoid a SyntaxError in IE
+ endTime += 0.0001;
+ } else if (startTime > endTime) {
+ logger.warn('detected an id3 sample with endTime < startTime, adjusting endTime to (startTime + 0.25)');
+ endTime = startTime + 0.25;
+ }
+
+ for (var j = 0; j < frames.length; j++) {
+ var frame = frames[j]; // Safari doesn't put the timestamp frame in the TextTrack
+
+ if (!id3.isTimeStampFrame(frame)) {
+ var cue = new Cue(startTime, endTime, '');
+ cue.value = frame;
+ this.id3Track.addCue(cue);
+ }
+ }
+ }
+ }
+ };
+
+ _proto.onLiveBackBufferReached = function onLiveBackBufferReached(_ref2) {
+ var bufferEnd = _ref2.bufferEnd;
+ var id3Track = this.id3Track;
+
+ if (!id3Track || !id3Track.cues || !id3Track.cues.length) {
+ return;
+ }
+
+ var foundCue = getClosestCue(id3Track.cues, bufferEnd);
+
+ if (!foundCue) {
+ return;
+ }
+
+ while (id3Track.cues[0] !== foundCue) {
+ id3Track.removeCue(id3Track.cues[0]);
+ }
+ };
+
+ return ID3TrackController;
+}(event_handler);
+
+/* harmony default export */ var id3_track_controller = (id3_track_controller_ID3TrackController);
+// CONCATENATED MODULE: ./src/utils/mediasource-helper.ts
+/**
+ * MediaSource helper
+ */
+function getMediaSource() {
+ return window.MediaSource || window.WebKitMediaSource;
+}
+// CONCATENATED MODULE: ./src/is-supported.ts
+
+function is_supported_isSupported() {
+ var mediaSource = getMediaSource();
+
+ if (!mediaSource) {
+ return false;
+ }
+
+ var sourceBuffer = self.SourceBuffer || self.WebKitSourceBuffer;
+ var isTypeSupported = mediaSource && typeof mediaSource.isTypeSupported === 'function' && mediaSource.isTypeSupported('video/mp4; codecs="avc1.42E01E,mp4a.40.2"'); // if SourceBuffer is exposed ensure its API is valid
+ // safari and old version of Chrome doe not expose SourceBuffer globally so checking SourceBuffer.prototype is impossible
+
+ var sourceBufferValidAPI = !sourceBuffer || sourceBuffer.prototype && typeof sourceBuffer.prototype.appendBuffer === 'function' && typeof sourceBuffer.prototype.remove === 'function';
+ return !!isTypeSupported && !!sourceBufferValidAPI;
+}
+// CONCATENATED MODULE: ./src/utils/buffer-helper.ts
+/**
+ * @module BufferHelper
+ *
+ * Providing methods dealing with buffer length retrieval for example.
+ *
+ * In general, a helper around HTML5 MediaElement TimeRanges gathered from `buffered` property.
+ *
+ * Also @see https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/buffered
+*/
+var BufferHelper =
+/*#__PURE__*/
+function () {
+ function BufferHelper() {}
+
+ /**
+ * Return true if `media`'s buffered include `position`
+ * @param {Bufferable} media
+ * @param {number} position
+ * @returns {boolean}
+ */
+ BufferHelper.isBuffered = function isBuffered(media, position) {
+ try {
+ if (media) {
+ var buffered = media.buffered;
+
+ for (var i = 0; i < buffered.length; i++) {
+ if (position >= buffered.start(i) && position <= buffered.end(i)) {
+ return true;
+ }
+ }
+ }
+ } catch (error) {// this is to catch
+ // InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
+ // This SourceBuffer has been removed from the parent media source
+ }
+
+ return false;
+ };
+
+ BufferHelper.bufferInfo = function bufferInfo(media, pos, maxHoleDuration) {
+ try {
+ if (media) {
+ var vbuffered = media.buffered;
+ var buffered = [];
+ var i;
+
+ for (i = 0; i < vbuffered.length; i++) {
+ buffered.push({
+ start: vbuffered.start(i),
+ end: vbuffered.end(i)
+ });
+ }
+
+ return this.bufferedInfo(buffered, pos, maxHoleDuration);
+ }
+ } catch (error) {// this is to catch
+ // InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
+ // This SourceBuffer has been removed from the parent media source
+ }
+
+ return {
+ len: 0,
+ start: pos,
+ end: pos,
+ nextStart: undefined
+ };
+ };
+
+ BufferHelper.bufferedInfo = function bufferedInfo(buffered, pos, maxHoleDuration) {
+ // sort on buffer.start/smaller end (IE does not always return sorted buffered range)
+ buffered.sort(function (a, b) {
+ var diff = a.start - b.start;
+
+ if (diff) {
+ return diff;
+ } else {
+ return b.end - a.end;
+ }
+ });
+ var buffered2 = [];
+
+ if (maxHoleDuration) {
+ // there might be some small holes between buffer time range
+ // consider that holes smaller than maxHoleDuration are irrelevant and build another
+ // buffer time range representations that discards those holes
+ for (var i = 0; i < buffered.length; i++) {
+ var buf2len = buffered2.length;
+
+ if (buf2len) {
+ var buf2end = buffered2[buf2len - 1].end; // if small hole (value between 0 or maxHoleDuration ) or overlapping (negative)
+
+ if (buffered[i].start - buf2end < maxHoleDuration) {
+ // merge overlapping time ranges
+ // update lastRange.end only if smaller than item.end
+ // e.g. [ 1, 15] with [ 2,8] => [ 1,15] (no need to modify lastRange.end)
+ // whereas [ 1, 8] with [ 2,15] => [ 1,15] ( lastRange should switch from [1,8] to [1,15])
+ if (buffered[i].end > buf2end) {
+ buffered2[buf2len - 1].end = buffered[i].end;
+ }
+ } else {
+ // big hole
+ buffered2.push(buffered[i]);
+ }
+ } else {
+ // first value
+ buffered2.push(buffered[i]);
+ }
+ }
+ } else {
+ buffered2 = buffered;
+ }
+
+ var bufferLen = 0; // bufferStartNext can possibly be undefined based on the conditional logic below
+
+ var bufferStartNext; // bufferStart and bufferEnd are buffer boundaries around current video position
+
+ var bufferStart = pos;
+ var bufferEnd = pos;
+
+ for (var _i = 0; _i < buffered2.length; _i++) {
+ var start = buffered2[_i].start,
+ end = buffered2[_i].end; // logger.log('buf start/end:' + buffered.start(i) + '/' + buffered.end(i));
+
+ if (pos + maxHoleDuration >= start && pos < end) {
+ // play position is inside this buffer TimeRange, retrieve end of buffer position and buffer length
+ bufferStart = start;
+ bufferEnd = end;
+ bufferLen = bufferEnd - pos;
+ } else if (pos + maxHoleDuration < start) {
+ bufferStartNext = start;
+ break;
+ }
+ }
+
+ return {
+ len: bufferLen,
+ start: bufferStart,
+ end: bufferEnd,
+ nextStart: bufferStartNext
+ };
+ };
+
+ return BufferHelper;
+}();
+// CONCATENATED MODULE: ./src/utils/ewma.ts
+/*
+ * compute an Exponential Weighted moving average
+ * - https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
+ * - heavily inspired from shaka-player
+ */
+var EWMA =
+/*#__PURE__*/
+function () {
+ // About half of the estimated value will be from the last |halfLife| samples by weight.
+ function EWMA(halfLife) {
+ this.alpha_ = void 0;
+ this.estimate_ = void 0;
+ this.totalWeight_ = void 0;
+ // Larger values of alpha expire historical data more slowly.
+ this.alpha_ = halfLife ? Math.exp(Math.log(0.5) / halfLife) : 0;
+ this.estimate_ = 0;
+ this.totalWeight_ = 0;
+ }
+
+ var _proto = EWMA.prototype;
+
+ _proto.sample = function sample(weight, value) {
+ var adjAlpha = Math.pow(this.alpha_, weight);
+ this.estimate_ = value * (1 - adjAlpha) + adjAlpha * this.estimate_;
+ this.totalWeight_ += weight;
+ };
+
+ _proto.getTotalWeight = function getTotalWeight() {
+ return this.totalWeight_;
+ };
+
+ _proto.getEstimate = function getEstimate() {
+ if (this.alpha_) {
+ var zeroFactor = 1 - Math.pow(this.alpha_, this.totalWeight_);
+ return this.estimate_ / zeroFactor;
+ } else {
+ return this.estimate_;
+ }
+ };
+
+ return EWMA;
+}();
+
+/* harmony default export */ var ewma = (EWMA);
+// CONCATENATED MODULE: ./src/utils/ewma-bandwidth-estimator.ts
+/*
+ * EWMA Bandwidth Estimator
+ * - heavily inspired from shaka-player
+ * Tracks bandwidth samples and estimates available bandwidth.
+ * Based on the minimum of two exponentially-weighted moving averages with
+ * different half-lives.
+ */
+
+
+var ewma_bandwidth_estimator_EwmaBandWidthEstimator =
+/*#__PURE__*/
+function () {
+ // TODO(typescript-hls)
+ function EwmaBandWidthEstimator(hls, slow, fast, defaultEstimate) {
+ this.hls = void 0;
+ this.defaultEstimate_ = void 0;
+ this.minWeight_ = void 0;
+ this.minDelayMs_ = void 0;
+ this.slow_ = void 0;
+ this.fast_ = void 0;
+ this.hls = hls;
+ this.defaultEstimate_ = defaultEstimate;
+ this.minWeight_ = 0.001;
+ this.minDelayMs_ = 50;
+ this.slow_ = new ewma(slow);
+ this.fast_ = new ewma(fast);
+ }
+
+ var _proto = EwmaBandWidthEstimator.prototype;
+
+ _proto.sample = function sample(durationMs, numBytes) {
+ durationMs = Math.max(durationMs, this.minDelayMs_);
+ var numBits = 8 * numBytes,
+ // weight is duration in seconds
+ durationS = durationMs / 1000,
+ // value is bandwidth in bits/s
+ bandwidthInBps = numBits / durationS;
+ this.fast_.sample(durationS, bandwidthInBps);
+ this.slow_.sample(durationS, bandwidthInBps);
+ };
+
+ _proto.canEstimate = function canEstimate() {
+ var fast = this.fast_;
+ return fast && fast.getTotalWeight() >= this.minWeight_;
+ };
+
+ _proto.getEstimate = function getEstimate() {
+ if (this.canEstimate()) {
+ // console.log('slow estimate:'+ Math.round(this.slow_.getEstimate()));
+ // console.log('fast estimate:'+ Math.round(this.fast_.getEstimate()));
+ // Take the minimum of these two estimates. This should have the effect of
+ // adapting down quickly, but up more slowly.
+ return Math.min(this.fast_.getEstimate(), this.slow_.getEstimate());
+ } else {
+ return this.defaultEstimate_;
+ }
+ };
+
+ _proto.destroy = function destroy() {};
+
+ return EwmaBandWidthEstimator;
+}();
+
+/* harmony default export */ var ewma_bandwidth_estimator = (ewma_bandwidth_estimator_EwmaBandWidthEstimator);
+// CONCATENATED MODULE: ./src/controller/abr-controller.js
+
+
+
+function abr_controller_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function abr_controller_createClass(Constructor, protoProps, staticProps) { if (protoProps) abr_controller_defineProperties(Constructor.prototype, protoProps); if (staticProps) abr_controller_defineProperties(Constructor, staticProps); return Constructor; }
+
+function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function abr_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+
+/*
+ * simple ABR Controller
+ * - compute next level based on last fragment bw heuristics
+ * - implement an abandon rules triggered if we have less than 2 frag buffered and if computed bw shows that we risk buffer stalling
+ */
+
+
+
+
+
+
+var abr_controller_window = window,
+ abr_controller_performance = abr_controller_window.performance;
+
+var abr_controller_AbrController =
+/*#__PURE__*/
+function (_EventHandler) {
+ abr_controller_inheritsLoose(AbrController, _EventHandler);
+
+ function AbrController(hls) {
+ var _this;
+
+ _this = _EventHandler.call(this, hls, events.FRAG_LOADING, events.FRAG_LOADED, events.FRAG_BUFFERED, events.ERROR) || this;
+ _this.lastLoadedFragLevel = 0;
+ _this._nextAutoLevel = -1;
+ _this.hls = hls;
+ _this.timer = null;
+ _this._bwEstimator = null;
+ _this.onCheck = _this._abandonRulesCheck.bind(_assertThisInitialized(_this));
+ return _this;
+ }
+
+ var _proto = AbrController.prototype;
+
+ _proto.destroy = function destroy() {
+ this.clearTimer();
+ event_handler.prototype.destroy.call(this);
+ };
+
+ _proto.onFragLoading = function onFragLoading(data) {
+ var frag = data.frag;
+
+ if (frag.type === 'main') {
+ if (!this.timer) {
+ this.fragCurrent = frag;
+ this.timer = setInterval(this.onCheck, 100);
+ } // lazy init of BwEstimator, rationale is that we use different params for Live/VoD
+ // so we need to wait for stream manifest / playlist type to instantiate it.
+
+
+ if (!this._bwEstimator) {
+ var hls = this.hls;
+ var config = hls.config;
+ var level = frag.level;
+ var isLive = hls.levels[level].details.live;
+ var ewmaFast;
+ var ewmaSlow;
+
+ if (isLive) {
+ ewmaFast = config.abrEwmaFastLive;
+ ewmaSlow = config.abrEwmaSlowLive;
+ } else {
+ ewmaFast = config.abrEwmaFastVoD;
+ ewmaSlow = config.abrEwmaSlowVoD;
+ }
+
+ this._bwEstimator = new ewma_bandwidth_estimator(hls, ewmaSlow, ewmaFast, config.abrEwmaDefaultEstimate);
+ }
+ }
+ };
+
+ _proto._abandonRulesCheck = function _abandonRulesCheck() {
+ /*
+ monitor fragment retrieval time...
+ we compute expected time of arrival of the complete fragment.
+ we compare it to expected time of buffer starvation
+ */
+ var hls = this.hls;
+ var video = hls.media;
+ var frag = this.fragCurrent;
+
+ if (!frag) {
+ return;
+ }
+
+ var loader = frag.loader;
+ var minAutoLevel = hls.minAutoLevel; // if loader has been destroyed or loading has been aborted, stop timer and return
+
+ if (!loader || loader.stats && loader.stats.aborted) {
+ logger.warn('frag loader destroy or aborted, disarm abandonRules');
+ this.clearTimer(); // reset forced auto level value so that next level will be selected
+
+ this._nextAutoLevel = -1;
+ return;
+ }
+
+ var stats = loader.stats;
+ /* only monitor frag retrieval time if
+ (video not paused OR first fragment being loaded(ready state === HAVE_NOTHING = 0)) AND autoswitching enabled AND not lowest level (=> means that we have several levels) */
+
+ if (video && stats && (!video.paused && video.playbackRate !== 0 || !video.readyState) && frag.autoLevel && frag.level) {
+ var requestDelay = abr_controller_performance.now() - stats.trequest;
+ var playbackRate = Math.abs(video.playbackRate); // monitor fragment load progress after half of expected fragment duration,to stabilize bitrate
+
+ if (requestDelay > 500 * frag.duration / playbackRate) {
+ var levels = hls.levels;
+ var loadRate = Math.max(1, stats.bw ? stats.bw / 8 : stats.loaded * 1000 / requestDelay); // byte/s; at least 1 byte/s to avoid division by zero
+ // compute expected fragment length using frag duration and level bitrate. also ensure that expected len is gte than already loaded size
+
+ var level = levels[frag.level];
+ var levelBitrate = level.realBitrate ? Math.max(level.realBitrate, level.bitrate) : level.bitrate;
+ var expectedLen = stats.total ? stats.total : Math.max(stats.loaded, Math.round(frag.duration * levelBitrate / 8));
+ var pos = video.currentTime;
+ var fragLoadedDelay = (expectedLen - stats.loaded) / loadRate;
+ var bufferStarvationDelay = (BufferHelper.bufferInfo(video, pos, hls.config.maxBufferHole).end - pos) / playbackRate; // consider emergency switch down only if we have less than 2 frag buffered AND
+ // time to finish loading current fragment is bigger than buffer starvation delay
+ // ie if we risk buffer starvation if bw does not increase quickly
+
+ if (bufferStarvationDelay < 2 * frag.duration / playbackRate && fragLoadedDelay > bufferStarvationDelay) {
+ var fragLevelNextLoadedDelay;
+ var nextLoadLevel; // lets iterate through lower level and try to find the biggest one that could avoid rebuffering
+ // we start from current level - 1 and we step down , until we find a matching level
+
+ for (nextLoadLevel = frag.level - 1; nextLoadLevel > minAutoLevel; nextLoadLevel--) {
+ // compute time to load next fragment at lower level
+ // 0.8 : consider only 80% of current bw to be conservative
+ // 8 = bits per byte (bps/Bps)
+ var levelNextBitrate = levels[nextLoadLevel].realBitrate ? Math.max(levels[nextLoadLevel].realBitrate, levels[nextLoadLevel].bitrate) : levels[nextLoadLevel].bitrate;
+
+ var _fragLevelNextLoadedDelay = frag.duration * levelNextBitrate / (8 * 0.8 * loadRate);
+
+ if (_fragLevelNextLoadedDelay < bufferStarvationDelay) {
+ // we found a lower level that be rebuffering free with current estimated bw !
+ break;
+ }
+ } // only emergency switch down if it takes less time to load new fragment at lowest level instead
+ // of finishing loading current one ...
+
+
+ if (fragLevelNextLoadedDelay < fragLoadedDelay) {
+ logger.warn("loading too slow, abort fragment loading and switch to level " + nextLoadLevel + ":fragLoadedDelay[" + nextLoadLevel + "]= minAutoLevel; i--) {
+ var levelInfo = levels[i];
+
+ if (!levelInfo) {
+ continue;
+ }
+
+ var levelDetails = levelInfo.details;
+ var avgDuration = levelDetails ? levelDetails.totalduration / levelDetails.fragments.length : currentFragDuration;
+ var live = levelDetails ? levelDetails.live : false;
+ var adjustedbw = void 0; // follow algorithm captured from stagefright :
+ // https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/httplive/LiveSession.cpp
+ // Pick the highest bandwidth stream below or equal to estimated bandwidth.
+ // consider only 80% of the available bandwidth, but if we are switching up,
+ // be even more conservative (70%) to avoid overestimating and immediately
+ // switching back.
+
+ if (i <= currentLevel) {
+ adjustedbw = bwFactor * currentBw;
+ } else {
+ adjustedbw = bwUpFactor * currentBw;
+ }
+
+ var bitrate = levels[i].realBitrate ? Math.max(levels[i].realBitrate, levels[i].bitrate) : levels[i].bitrate;
+ var fetchDuration = bitrate * avgDuration / adjustedbw;
+ logger.trace("level/adjustedbw/bitrate/avgDuration/maxFetchDuration/fetchDuration: " + i + "/" + Math.round(adjustedbw) + "/" + bitrate + "/" + avgDuration + "/" + maxFetchDuration + "/" + fetchDuration); // if adjusted bw is greater than level bitrate AND
+
+ if (adjustedbw > bitrate && ( // fragment fetchDuration unknown OR live stream OR fragment fetchDuration less than max allowed fetch duration, then this level matches
+ // we don't account for max Fetch Duration for live streams, this is to avoid switching down when near the edge of live sliding window ...
+ // special case to support startLevel = -1 (bitrateTest) on live streams : in that case we should not exit loop so that _findBestLevel will return -1
+ !fetchDuration || live && !this.bitrateTestDelay || fetchDuration < maxFetchDuration)) {
+ // as we are looping from highest to lowest, this will return the best achievable quality level
+ return i;
+ }
+ } // not enough time budget even with quality level 0 ... rebuffering might happen
+
+
+ return -1;
+ };
+
+ abr_controller_createClass(AbrController, [{
+ key: "nextAutoLevel",
+ get: function get() {
+ var forcedAutoLevel = this._nextAutoLevel;
+ var bwEstimator = this._bwEstimator; // in case next auto level has been forced, and bw not available or not reliable, return forced value
+
+ if (forcedAutoLevel !== -1 && (!bwEstimator || !bwEstimator.canEstimate())) {
+ return forcedAutoLevel;
+ } // compute next level using ABR logic
+
+
+ var nextABRAutoLevel = this._nextABRAutoLevel; // if forced auto level has been defined, use it to cap ABR computed quality level
+
+ if (forcedAutoLevel !== -1) {
+ nextABRAutoLevel = Math.min(forcedAutoLevel, nextABRAutoLevel);
+ }
+
+ return nextABRAutoLevel;
+ },
+ set: function set(nextLevel) {
+ this._nextAutoLevel = nextLevel;
+ }
+ }, {
+ key: "_nextABRAutoLevel",
+ get: function get() {
+ var hls = this.hls;
+ var maxAutoLevel = hls.maxAutoLevel,
+ levels = hls.levels,
+ config = hls.config,
+ minAutoLevel = hls.minAutoLevel;
+ var video = hls.media;
+ var currentLevel = this.lastLoadedFragLevel;
+ var currentFragDuration = this.fragCurrent ? this.fragCurrent.duration : 0;
+ var pos = video ? video.currentTime : 0; // playbackRate is the absolute value of the playback rate; if video.playbackRate is 0, we use 1 to load as
+ // if we're playing back at the normal rate.
+
+ var playbackRate = video && video.playbackRate !== 0 ? Math.abs(video.playbackRate) : 1.0;
+ var avgbw = this._bwEstimator ? this._bwEstimator.getEstimate() : config.abrEwmaDefaultEstimate; // bufferStarvationDelay is the wall-clock time left until the playback buffer is exhausted.
+
+ var bufferStarvationDelay = (BufferHelper.bufferInfo(video, pos, config.maxBufferHole).end - pos) / playbackRate; // First, look to see if we can find a level matching with our avg bandwidth AND that could also guarantee no rebuffering at all
+
+ var bestLevel = this._findBestLevel(currentLevel, currentFragDuration, avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, config.abrBandWidthFactor, config.abrBandWidthUpFactor, levels);
+
+ if (bestLevel >= 0) {
+ return bestLevel;
+ } else {
+ logger.trace('rebuffering expected to happen, lets try to find a quality level minimizing the rebuffering'); // not possible to get rid of rebuffering ... let's try to find level that will guarantee less than maxStarvationDelay of rebuffering
+ // if no matching level found, logic will return 0
+
+ var maxStarvationDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxStarvationDelay) : config.maxStarvationDelay;
+ var bwFactor = config.abrBandWidthFactor;
+ var bwUpFactor = config.abrBandWidthUpFactor;
+
+ if (bufferStarvationDelay === 0) {
+ // in case buffer is empty, let's check if previous fragment was loaded to perform a bitrate test
+ var bitrateTestDelay = this.bitrateTestDelay;
+
+ if (bitrateTestDelay) {
+ // if it is the case, then we need to adjust our max starvation delay using maxLoadingDelay config value
+ // max video loading delay used in automatic start level selection :
+ // in that mode ABR controller will ensure that video loading time (ie the time to fetch the first fragment at lowest quality level +
+ // the time to fetch the fragment at the appropriate quality level is less than ```maxLoadingDelay``` )
+ // cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
+ var maxLoadingDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxLoadingDelay) : config.maxLoadingDelay;
+ maxStarvationDelay = maxLoadingDelay - bitrateTestDelay;
+ logger.trace("bitrate test took " + Math.round(1000 * bitrateTestDelay) + "ms, set first fragment max fetchDuration to " + Math.round(1000 * maxStarvationDelay) + " ms"); // don't use conservative factor on bitrate test
+
+ bwFactor = bwUpFactor = 1;
+ }
+ }
+
+ bestLevel = this._findBestLevel(currentLevel, currentFragDuration, avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay + maxStarvationDelay, bwFactor, bwUpFactor, levels);
+ return Math.max(bestLevel, 0);
+ }
+ }
+ }]);
+
+ return AbrController;
+}(event_handler);
+
+/* harmony default export */ var abr_controller = (abr_controller_AbrController);
+// CONCATENATED MODULE: ./src/controller/buffer-controller.ts
+
+
+function buffer_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+
+/*
+ * Buffer Controller
+ */
+
+
+
+
+
+var buffer_controller_MediaSource = getMediaSource();
+
+var buffer_controller_BufferController =
+/*#__PURE__*/
+function (_EventHandler) {
+ buffer_controller_inheritsLoose(BufferController, _EventHandler);
+
+ // the value that we have set mediasource.duration to
+ // (the actual duration may be tweaked slighly by the browser)
+ // the value that we want to set mediaSource.duration to
+ // the target duration of the current media playlist
+ // current stream state: true - for live broadcast, false - for VoD content
+ // cache the self generated object url to detect hijack of video tag
+ // signals that the sourceBuffers need to be flushed
+ // signals that mediaSource should have endOfStream called
+ // this is optional because this property is removed from the class sometimes
+ // The number of BUFFER_CODEC events received before any sourceBuffers are created
+ // The total number of BUFFER_CODEC events received
+ // A reference to the attached media element
+ // A reference to the active media source
+ // List of pending segments to be appended to source buffer
+ // A guard to see if we are currently appending to the source buffer
+ // counters
+ function BufferController(hls) {
+ var _this;
+
+ _this = _EventHandler.call(this, hls, events.MEDIA_ATTACHING, events.MEDIA_DETACHING, events.MANIFEST_PARSED, events.BUFFER_RESET, events.BUFFER_APPENDING, events.BUFFER_CODECS, events.BUFFER_EOS, events.BUFFER_FLUSHING, events.LEVEL_PTS_UPDATED, events.LEVEL_UPDATED) || this;
+ _this._msDuration = null;
+ _this._levelDuration = null;
+ _this._levelTargetDuration = 10;
+ _this._live = null;
+ _this._objectUrl = null;
+ _this._needsFlush = false;
+ _this._needsEos = false;
+ _this.config = void 0;
+ _this.audioTimestampOffset = void 0;
+ _this.bufferCodecEventsExpected = 0;
+ _this._bufferCodecEventsTotal = 0;
+ _this.media = null;
+ _this.mediaSource = null;
+ _this.segments = [];
+ _this.parent = void 0;
+ _this.appending = false;
+ _this.appended = 0;
+ _this.appendError = 0;
+ _this.flushBufferCounter = 0;
+ _this.tracks = {};
+ _this.pendingTracks = {};
+ _this.sourceBuffer = {};
+ _this.flushRange = [];
+
+ _this._onMediaSourceOpen = function () {
+ logger.log('media source opened');
+
+ _this.hls.trigger(events.MEDIA_ATTACHED, {
+ media: _this.media
+ });
+
+ var mediaSource = _this.mediaSource;
+
+ if (mediaSource) {
+ // once received, don't listen anymore to sourceopen event
+ mediaSource.removeEventListener('sourceopen', _this._onMediaSourceOpen);
+ }
+
+ _this.checkPendingTracks();
+ };
+
+ _this._onMediaSourceClose = function () {
+ logger.log('media source closed');
+ };
+
+ _this._onMediaSourceEnded = function () {
+ logger.log('media source ended');
+ };
+
+ _this._onSBUpdateEnd = function () {
+ // update timestampOffset
+ if (_this.audioTimestampOffset && _this.sourceBuffer.audio) {
+ var audioBuffer = _this.sourceBuffer.audio;
+ logger.warn("change mpeg audio timestamp offset from " + audioBuffer.timestampOffset + " to " + _this.audioTimestampOffset);
+ audioBuffer.timestampOffset = _this.audioTimestampOffset;
+ delete _this.audioTimestampOffset;
+ }
+
+ if (_this._needsFlush) {
+ _this.doFlush();
+ }
+
+ if (_this._needsEos) {
+ _this.checkEos();
+ }
+
+ _this.appending = false;
+ var parent = _this.parent; // count nb of pending segments waiting for appending on this sourcebuffer
+
+ var pending = _this.segments.reduce(function (counter, segment) {
+ return segment.parent === parent ? counter + 1 : counter;
+ }, 0); // this.sourceBuffer is better to use than media.buffered as it is closer to the PTS data from the fragments
+
+
+ var timeRanges = {};
+ var sbSet = _this.sourceBuffer;
+
+ for (var streamType in sbSet) {
+ var sb = sbSet[streamType];
+
+ if (!sb) {
+ throw Error("handling source buffer update end error: source buffer for " + streamType + " uninitilized and unable to update buffered TimeRanges.");
+ }
+
+ timeRanges[streamType] = sb.buffered;
+ }
+
+ _this.hls.trigger(events.BUFFER_APPENDED, {
+ parent: parent,
+ pending: pending,
+ timeRanges: timeRanges
+ }); // don't append in flushing mode
+
+
+ if (!_this._needsFlush) {
+ _this.doAppending();
+ }
+
+ _this.updateMediaElementDuration(); // appending goes first
+
+
+ if (pending === 0) {
+ _this.flushLiveBackBuffer();
+ }
+ };
+
+ _this._onSBUpdateError = function (event) {
+ logger.error('sourceBuffer error:', event); // according to http://www.w3.org/TR/media-source/#sourcebuffer-append-error
+ // this error might not always be fatal (it is fatal if decode error is set, in that case
+ // it will be followed by a mediaElement error ...)
+
+ _this.hls.trigger(events.ERROR, {
+ type: ErrorTypes.MEDIA_ERROR,
+ details: ErrorDetails.BUFFER_APPENDING_ERROR,
+ fatal: false
+ }); // we don't need to do more than that, as accordin to the spec, updateend will be fired just after
+
+ };
+
+ _this.config = hls.config;
+ return _this;
+ }
+
+ var _proto = BufferController.prototype;
+
+ _proto.destroy = function destroy() {
+ event_handler.prototype.destroy.call(this);
+ };
+
+ _proto.onLevelPtsUpdated = function onLevelPtsUpdated(data) {
+ var type = data.type;
+ var audioTrack = this.tracks.audio; // Adjusting `SourceBuffer.timestampOffset` (desired point in the timeline where the next frames should be appended)
+ // in Chrome browser when we detect MPEG audio container and time delta between level PTS and `SourceBuffer.timestampOffset`
+ // is greater than 100ms (this is enough to handle seek for VOD or level change for LIVE videos). At the time of change we issue
+ // `SourceBuffer.abort()` and adjusting `SourceBuffer.timestampOffset` if `SourceBuffer.updating` is false or awaiting `updateend`
+ // event if SB is in updating state.
+ // More info here: https://github.com/video-dev/hls.js/issues/332#issuecomment-257986486
+
+ if (type === 'audio' && audioTrack && audioTrack.container === 'audio/mpeg') {
+ // Chrome audio mp3 track
+ var audioBuffer = this.sourceBuffer.audio;
+
+ if (!audioBuffer) {
+ throw Error('Level PTS Updated and source buffer for audio uninitalized');
+ }
+
+ var delta = Math.abs(audioBuffer.timestampOffset - data.start); // adjust timestamp offset if time delta is greater than 100ms
+
+ if (delta > 0.1) {
+ var updating = audioBuffer.updating;
+
+ try {
+ audioBuffer.abort();
+ } catch (err) {
+ logger.warn('can not abort audio buffer: ' + err);
+ }
+
+ if (!updating) {
+ logger.warn('change mpeg audio timestamp offset from ' + audioBuffer.timestampOffset + ' to ' + data.start);
+ audioBuffer.timestampOffset = data.start;
+ } else {
+ this.audioTimestampOffset = data.start;
+ }
+ }
+ }
+ };
+
+ _proto.onManifestParsed = function onManifestParsed(data) {
+ // in case of alt audio 2 BUFFER_CODECS events will be triggered, one per stream controller
+ // sourcebuffers will be created all at once when the expected nb of tracks will be reached
+ // in case alt audio is not used, only one BUFFER_CODEC event will be fired from main stream controller
+ // it will contain the expected nb of source buffers, no need to compute it
+ this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = data.altAudio ? 2 : 1;
+ logger.log(this.bufferCodecEventsExpected + " bufferCodec event(s) expected");
+ };
+
+ _proto.onMediaAttaching = function onMediaAttaching(data) {
+ var media = this.media = data.media;
+
+ if (media && buffer_controller_MediaSource) {
+ // setup the media source
+ var ms = this.mediaSource = new buffer_controller_MediaSource(); // Media Source listeners
+
+ ms.addEventListener('sourceopen', this._onMediaSourceOpen);
+ ms.addEventListener('sourceended', this._onMediaSourceEnded);
+ ms.addEventListener('sourceclose', this._onMediaSourceClose); // link video and media Source
+
+ media.src = window.URL.createObjectURL(ms); // cache the locally generated object url
+
+ this._objectUrl = media.src;
+ }
+ };
+
+ _proto.onMediaDetaching = function onMediaDetaching() {
+ logger.log('media source detaching');
+ var ms = this.mediaSource;
+
+ if (ms) {
+ if (ms.readyState === 'open') {
+ try {
+ // endOfStream could trigger exception if any sourcebuffer is in updating state
+ // we don't really care about checking sourcebuffer state here,
+ // as we are anyway detaching the MediaSource
+ // let's just avoid this exception to propagate
+ ms.endOfStream();
+ } catch (err) {
+ logger.warn("onMediaDetaching:" + err.message + " while calling endOfStream");
+ }
+ }
+
+ ms.removeEventListener('sourceopen', this._onMediaSourceOpen);
+ ms.removeEventListener('sourceended', this._onMediaSourceEnded);
+ ms.removeEventListener('sourceclose', this._onMediaSourceClose); // Detach properly the MediaSource from the HTMLMediaElement as
+ // suggested in https://github.com/w3c/media-source/issues/53.
+
+ if (this.media) {
+ if (this._objectUrl) {
+ window.URL.revokeObjectURL(this._objectUrl);
+ } // clean up video tag src only if it's our own url. some external libraries might
+ // hijack the video tag and change its 'src' without destroying the Hls instance first
+
+
+ if (this.media.src === this._objectUrl) {
+ this.media.removeAttribute('src');
+ this.media.load();
+ } else {
+ logger.warn('media.src was changed by a third party - skip cleanup');
+ }
+ }
+
+ this.mediaSource = null;
+ this.media = null;
+ this._objectUrl = null;
+ this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
+ this.pendingTracks = {};
+ this.tracks = {};
+ this.sourceBuffer = {};
+ this.flushRange = [];
+ this.segments = [];
+ this.appended = 0;
+ }
+
+ this.hls.trigger(events.MEDIA_DETACHED);
+ };
+
+ _proto.checkPendingTracks = function checkPendingTracks() {
+ var bufferCodecEventsExpected = this.bufferCodecEventsExpected,
+ pendingTracks = this.pendingTracks; // Check if we've received all of the expected bufferCodec events. When none remain, create all the sourceBuffers at once.
+ // This is important because the MSE spec allows implementations to throw QuotaExceededErrors if creating new sourceBuffers after
+ // data has been appended to existing ones.
+ // 2 tracks is the max (one for audio, one for video). If we've reach this max go ahead and create the buffers.
+
+ var pendingTracksCount = Object.keys(pendingTracks).length;
+
+ if (pendingTracksCount && !bufferCodecEventsExpected || pendingTracksCount === 2) {
+ // ok, let's create them now !
+ this.createSourceBuffers(pendingTracks);
+ this.pendingTracks = {}; // append any pending segments now !
+
+ this.doAppending();
+ }
+ };
+
+ _proto.onBufferReset = function onBufferReset() {
+ var sourceBuffer = this.sourceBuffer;
+
+ for (var type in sourceBuffer) {
+ var sb = sourceBuffer[type];
+
+ try {
+ if (sb) {
+ if (this.mediaSource) {
+ this.mediaSource.removeSourceBuffer(sb);
+ }
+
+ sb.removeEventListener('updateend', this._onSBUpdateEnd);
+ sb.removeEventListener('error', this._onSBUpdateError);
+ }
+ } catch (err) {}
+ }
+
+ this.sourceBuffer = {};
+ this.flushRange = [];
+ this.segments = [];
+ this.appended = 0;
+ };
+
+ _proto.onBufferCodecs = function onBufferCodecs(tracks) {
+ var _this2 = this;
+
+ // if source buffer(s) not created yet, appended buffer tracks in this.pendingTracks
+ // if sourcebuffers already created, do nothing ...
+ if (Object.keys(this.sourceBuffer).length) {
+ return;
+ }
+
+ Object.keys(tracks).forEach(function (trackName) {
+ _this2.pendingTracks[trackName] = tracks[trackName];
+ });
+ this.bufferCodecEventsExpected = Math.max(this.bufferCodecEventsExpected - 1, 0);
+
+ if (this.mediaSource && this.mediaSource.readyState === 'open') {
+ this.checkPendingTracks();
+ }
+ };
+
+ _proto.createSourceBuffers = function createSourceBuffers(tracks) {
+ var sourceBuffer = this.sourceBuffer,
+ mediaSource = this.mediaSource;
+
+ if (!mediaSource) {
+ throw Error('createSourceBuffers called when mediaSource was null');
+ }
+
+ for (var trackName in tracks) {
+ if (!sourceBuffer[trackName]) {
+ var track = tracks[trackName];
+
+ if (!track) {
+ throw Error("source buffer exists for track " + trackName + ", however track does not");
+ } // use levelCodec as first priority
+
+
+ var codec = track.levelCodec || track.codec;
+ var mimeType = track.container + ";codecs=" + codec;
+ logger.log("creating sourceBuffer(" + mimeType + ")");
+
+ try {
+ var sb = sourceBuffer[trackName] = mediaSource.addSourceBuffer(mimeType);
+ sb.addEventListener('updateend', this._onSBUpdateEnd);
+ sb.addEventListener('error', this._onSBUpdateError);
+ this.tracks[trackName] = {
+ buffer: sb,
+ codec: codec,
+ id: track.id,
+ container: track.container,
+ levelCodec: track.levelCodec
+ };
+ } catch (err) {
+ logger.error("error while trying to add sourceBuffer:" + err.message);
+ this.hls.trigger(events.ERROR, {
+ type: ErrorTypes.MEDIA_ERROR,
+ details: ErrorDetails.BUFFER_ADD_CODEC_ERROR,
+ fatal: false,
+ err: err,
+ mimeType: mimeType
+ });
+ }
+ }
+ }
+
+ this.hls.trigger(events.BUFFER_CREATED, {
+ tracks: this.tracks
+ });
+ };
+
+ _proto.onBufferAppending = function onBufferAppending(data) {
+ if (!this._needsFlush) {
+ if (!this.segments) {
+ this.segments = [data];
+ } else {
+ this.segments.push(data);
+ }
+
+ this.doAppending();
+ }
+ } // on BUFFER_EOS mark matching sourcebuffer(s) as ended and trigger checkEos()
+ // an undefined data.type will mark all buffers as EOS.
+ ;
+
+ _proto.onBufferEos = function onBufferEos(data) {
+ for (var type in this.sourceBuffer) {
+ if (!data.type || data.type === type) {
+ var sb = this.sourceBuffer[type];
+
+ if (sb && !sb.ended) {
+ sb.ended = true;
+ logger.log(type + " sourceBuffer now EOS");
+ }
+ }
+ }
+
+ this.checkEos();
+ } // if all source buffers are marked as ended, signal endOfStream() to MediaSource.
+ ;
+
+ _proto.checkEos = function checkEos() {
+ var sourceBuffer = this.sourceBuffer,
+ mediaSource = this.mediaSource;
+
+ if (!mediaSource || mediaSource.readyState !== 'open') {
+ this._needsEos = false;
+ return;
+ }
+
+ for (var type in sourceBuffer) {
+ var sb = sourceBuffer[type];
+ if (!sb) continue;
+
+ if (!sb.ended) {
+ return;
+ }
+
+ if (sb.updating) {
+ this._needsEos = true;
+ return;
+ }
+ }
+
+ logger.log('all media data are available, signal endOfStream() to MediaSource and stop loading fragment'); // Notify the media element that it now has all of the media data
+
+ try {
+ mediaSource.endOfStream();
+ } catch (e) {
+ logger.warn('exception while calling mediaSource.endOfStream()');
+ }
+
+ this._needsEos = false;
+ };
+
+ _proto.onBufferFlushing = function onBufferFlushing(data) {
+ if (data.type) {
+ this.flushRange.push({
+ start: data.startOffset,
+ end: data.endOffset,
+ type: data.type
+ });
+ } else {
+ this.flushRange.push({
+ start: data.startOffset,
+ end: data.endOffset,
+ type: 'video'
+ });
+ this.flushRange.push({
+ start: data.startOffset,
+ end: data.endOffset,
+ type: 'audio'
+ });
+ } // attempt flush immediately
+
+
+ this.flushBufferCounter = 0;
+ this.doFlush();
+ };
+
+ _proto.flushLiveBackBuffer = function flushLiveBackBuffer() {
+ // clear back buffer for live only
+ if (!this._live) {
+ return;
+ }
+
+ var liveBackBufferLength = this.config.liveBackBufferLength;
+
+ if (!isFinite(liveBackBufferLength) || liveBackBufferLength < 0) {
+ return;
+ }
+
+ if (!this.media) {
+ logger.error('flushLiveBackBuffer called without attaching media');
+ return;
+ }
+
+ var currentTime = this.media.currentTime;
+ var sourceBuffer = this.sourceBuffer;
+ var bufferTypes = Object.keys(sourceBuffer);
+ var targetBackBufferPosition = currentTime - Math.max(liveBackBufferLength, this._levelTargetDuration);
+
+ for (var index = bufferTypes.length - 1; index >= 0; index--) {
+ var bufferType = bufferTypes[index];
+ var sb = sourceBuffer[bufferType];
+
+ if (sb) {
+ var buffered = sb.buffered; // when target buffer start exceeds actual buffer start
+
+ if (buffered.length > 0 && targetBackBufferPosition > buffered.start(0)) {
+ // remove buffer up until current time minus minimum back buffer length (removing buffer too close to current
+ // time will lead to playback freezing)
+ // credits for level target duration - https://github.com/videojs/http-streaming/blob/3132933b6aa99ddefab29c10447624efd6fd6e52/src/segment-loader.js#L91
+ if (this.removeBufferRange(bufferType, sb, 0, targetBackBufferPosition)) {
+ this.hls.trigger(events.LIVE_BACK_BUFFER_REACHED, {
+ bufferEnd: targetBackBufferPosition
+ });
+ }
+ }
+ }
+ }
+ };
+
+ _proto.onLevelUpdated = function onLevelUpdated(_ref) {
+ var details = _ref.details;
+
+ if (details.fragments.length > 0) {
+ this._levelDuration = details.totalduration + details.fragments[0].start;
+ this._levelTargetDuration = details.averagetargetduration || details.targetduration || 10;
+ this._live = details.live;
+ this.updateMediaElementDuration();
+ }
+ }
+ /**
+ * Update Media Source duration to current level duration or override to Infinity if configuration parameter
+ * 'liveDurationInfinity` is set to `true`
+ * More details: https://github.com/video-dev/hls.js/issues/355
+ */
+ ;
+
+ _proto.updateMediaElementDuration = function updateMediaElementDuration() {
+ var config = this.config;
+ var duration;
+
+ if (this._levelDuration === null || !this.media || !this.mediaSource || !this.sourceBuffer || this.media.readyState === 0 || this.mediaSource.readyState !== 'open') {
+ return;
+ }
+
+ for (var type in this.sourceBuffer) {
+ var sb = this.sourceBuffer[type];
+
+ if (sb && sb.updating === true) {
+ // can't set duration whilst a buffer is updating
+ return;
+ }
+ }
+
+ duration = this.media.duration; // initialise to the value that the media source is reporting
+
+ if (this._msDuration === null) {
+ this._msDuration = this.mediaSource.duration;
+ }
+
+ if (this._live === true && config.liveDurationInfinity === true) {
+ // Override duration to Infinity
+ logger.log('Media Source duration is set to Infinity');
+ this._msDuration = this.mediaSource.duration = Infinity;
+ } else if (this._levelDuration > this._msDuration && this._levelDuration > duration || !isFiniteNumber(duration)) {
+ // levelDuration was the last value we set.
+ // not using mediaSource.duration as the browser may tweak this value
+ // only update Media Source duration if its value increase, this is to avoid
+ // flushing already buffered portion when switching between quality level
+ logger.log("Updating Media Source duration to " + this._levelDuration.toFixed(3));
+ this._msDuration = this.mediaSource.duration = this._levelDuration;
+ }
+ };
+
+ _proto.doFlush = function doFlush() {
+ // loop through all buffer ranges to flush
+ while (this.flushRange.length) {
+ var range = this.flushRange[0]; // flushBuffer will abort any buffer append in progress and flush Audio/Video Buffer
+
+ if (this.flushBuffer(range.start, range.end, range.type)) {
+ // range flushed, remove from flush array
+ this.flushRange.shift();
+ this.flushBufferCounter = 0;
+ } else {
+ this._needsFlush = true; // avoid looping, wait for SB update end to retrigger a flush
+
+ return;
+ }
+ }
+
+ if (this.flushRange.length === 0) {
+ // everything flushed
+ this._needsFlush = false; // let's recompute this.appended, which is used to avoid flush looping
+
+ var appended = 0;
+ var sourceBuffer = this.sourceBuffer;
+
+ try {
+ for (var type in sourceBuffer) {
+ var sb = sourceBuffer[type];
+
+ if (sb) {
+ appended += sb.buffered.length;
+ }
+ }
+ } catch (error) {
+ // error could be thrown while accessing buffered, in case sourcebuffer has already been removed from MediaSource
+ // this is harmess at this stage, catch this to avoid reporting an internal exception
+ logger.error('error while accessing sourceBuffer.buffered');
+ }
+
+ this.appended = appended;
+ this.hls.trigger(events.BUFFER_FLUSHED);
+ }
+ };
+
+ _proto.doAppending = function doAppending() {
+ var config = this.config,
+ hls = this.hls,
+ segments = this.segments,
+ sourceBuffer = this.sourceBuffer;
+
+ if (!Object.keys(sourceBuffer).length) {
+ // early exit if no source buffers have been initialized yet
+ return;
+ }
+
+ if (!this.media || this.media.error) {
+ this.segments = [];
+ logger.error('trying to append although a media error occured, flush segment and abort');
+ return;
+ }
+
+ if (this.appending) {
+ // logger.log(`sb appending in progress`);
+ return;
+ }
+
+ var segment = segments.shift();
+
+ if (!segment) {
+ // handle undefined shift
+ return;
+ }
+
+ try {
+ var sb = sourceBuffer[segment.type];
+
+ if (!sb) {
+ // in case we don't have any source buffer matching with this segment type,
+ // it means that Mediasource fails to create sourcebuffer
+ // discard this segment, and trigger update end
+ this._onSBUpdateEnd();
+
+ return;
+ }
+
+ if (sb.updating) {
+ // if we are still updating the source buffer from the last segment, place this back at the front of the queue
+ segments.unshift(segment);
+ return;
+ } // reset sourceBuffer ended flag before appending segment
+
+
+ sb.ended = false; // logger.log(`appending ${segment.content} ${type} SB, size:${segment.data.length}, ${segment.parent}`);
+
+ this.parent = segment.parent;
+ sb.appendBuffer(segment.data);
+ this.appendError = 0;
+ this.appended++;
+ this.appending = true;
+ } catch (err) {
+ // in case any error occured while appending, put back segment in segments table
+ logger.error("error while trying to append buffer:" + err.message);
+ segments.unshift(segment);
+ var event = {
+ type: ErrorTypes.MEDIA_ERROR,
+ parent: segment.parent,
+ details: '',
+ fatal: false
+ };
+
+ if (err.code === 22) {
+ // QuotaExceededError: http://www.w3.org/TR/html5/infrastructure.html#quotaexceedederror
+ // let's stop appending any segments, and report BUFFER_FULL_ERROR error
+ this.segments = [];
+ event.details = ErrorDetails.BUFFER_FULL_ERROR;
+ } else {
+ this.appendError++;
+ event.details = ErrorDetails.BUFFER_APPEND_ERROR;
+ /* with UHD content, we could get loop of quota exceeded error until
+ browser is able to evict some data from sourcebuffer. retrying help recovering this
+ */
+
+ if (this.appendError > config.appendErrorMaxRetry) {
+ logger.log("fail " + config.appendErrorMaxRetry + " times to append segment in sourceBuffer");
+ this.segments = [];
+ event.fatal = true;
+ }
+ }
+
+ hls.trigger(events.ERROR, event);
+ }
+ }
+ /*
+ flush specified buffered range,
+ return true once range has been flushed.
+ as sourceBuffer.remove() is asynchronous, flushBuffer will be retriggered on sourceBuffer update end
+ */
+ ;
+
+ _proto.flushBuffer = function flushBuffer(startOffset, endOffset, sbType) {
+ var sourceBuffer = this.sourceBuffer; // exit if no sourceBuffers are initialized
+
+ if (!Object.keys(sourceBuffer).length) {
+ return true;
+ }
+
+ var currentTime = 'null';
+
+ if (this.media) {
+ currentTime = this.media.currentTime.toFixed(3);
+ }
+
+ logger.log("flushBuffer,pos/start/end: " + currentTime + "/" + startOffset + "/" + endOffset); // safeguard to avoid infinite looping : don't try to flush more than the nb of appended segments
+
+ if (this.flushBufferCounter >= this.appended) {
+ logger.warn('abort flushing too many retries');
+ return true;
+ }
+
+ var sb = sourceBuffer[sbType]; // we are going to flush buffer, mark source buffer as 'not ended'
+
+ if (sb) {
+ sb.ended = false;
+
+ if (!sb.updating) {
+ if (this.removeBufferRange(sbType, sb, startOffset, endOffset)) {
+ this.flushBufferCounter++;
+ return false;
+ }
+ } else {
+ logger.warn('cannot flush, sb updating in progress');
+ return false;
+ }
+ }
+
+ logger.log('buffer flushed'); // everything flushed !
+
+ return true;
+ }
+ /**
+ * Removes first buffered range from provided source buffer that lies within given start and end offsets.
+ *
+ * @param {string} type Type of the source buffer, logging purposes only.
+ * @param {SourceBuffer} sb Target SourceBuffer instance.
+ * @param {number} startOffset
+ * @param {number} endOffset
+ *
+ * @returns {boolean} True when source buffer remove requested.
+ */
+ ;
+
+ _proto.removeBufferRange = function removeBufferRange(type, sb, startOffset, endOffset) {
+ try {
+ for (var i = 0; i < sb.buffered.length; i++) {
+ var bufStart = sb.buffered.start(i);
+ var bufEnd = sb.buffered.end(i);
+ var removeStart = Math.max(bufStart, startOffset);
+ var removeEnd = Math.min(bufEnd, endOffset);
+ /* sometimes sourcebuffer.remove() does not flush
+ the exact expected time range.
+ to avoid rounding issues/infinite loop,
+ only flush buffer range of length greater than 500ms.
+ */
+
+ if (Math.min(removeEnd, bufEnd) - removeStart > 0.5) {
+ var currentTime = 'null';
+
+ if (this.media) {
+ currentTime = this.media.currentTime.toString();
+ }
+
+ logger.log("sb remove " + type + " [" + removeStart + "," + removeEnd + "], of [" + bufStart + "," + bufEnd + "], pos:" + currentTime);
+ sb.remove(removeStart, removeEnd);
+ return true;
+ }
+ }
+ } catch (error) {
+ logger.warn('removeBufferRange failed', error);
+ }
+
+ return false;
+ };
+
+ return BufferController;
+}(event_handler);
+
+/* harmony default export */ var buffer_controller = (buffer_controller_BufferController);
+// CONCATENATED MODULE: ./src/controller/cap-level-controller.js
+function cap_level_controller_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function cap_level_controller_createClass(Constructor, protoProps, staticProps) { if (protoProps) cap_level_controller_defineProperties(Constructor.prototype, protoProps); if (staticProps) cap_level_controller_defineProperties(Constructor, staticProps); return Constructor; }
+
+function cap_level_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+
+/*
+ * cap stream level to media size dimension controller
+*/
+
+
+
+var cap_level_controller_CapLevelController =
+/*#__PURE__*/
+function (_EventHandler) {
+ cap_level_controller_inheritsLoose(CapLevelController, _EventHandler);
+
+ function CapLevelController(hls) {
+ var _this;
+
+ _this = _EventHandler.call(this, hls, events.FPS_DROP_LEVEL_CAPPING, events.MEDIA_ATTACHING, events.MANIFEST_PARSED, events.BUFFER_CODECS, events.MEDIA_DETACHING) || this;
+ _this.autoLevelCapping = Number.POSITIVE_INFINITY;
+ _this.firstLevel = null;
+ _this.levels = [];
+ _this.media = null;
+ _this.restrictedLevels = [];
+ _this.timer = null;
+ return _this;
+ }
+
+ var _proto = CapLevelController.prototype;
+
+ _proto.destroy = function destroy() {
+ if (this.hls.config.capLevelToPlayerSize) {
+ this.media = null;
+ this.stopCapping();
+ }
+ };
+
+ _proto.onFpsDropLevelCapping = function onFpsDropLevelCapping(data) {
+ // Don't add a restricted level more than once
+ if (CapLevelController.isLevelAllowed(data.droppedLevel, this.restrictedLevels)) {
+ this.restrictedLevels.push(data.droppedLevel);
+ }
+ };
+
+ _proto.onMediaAttaching = function onMediaAttaching(data) {
+ this.media = data.media instanceof window.HTMLVideoElement ? data.media : null;
+ };
+
+ _proto.onManifestParsed = function onManifestParsed(data) {
+ var hls = this.hls;
+ this.restrictedLevels = [];
+ this.levels = data.levels;
+ this.firstLevel = data.firstLevel;
+
+ if (hls.config.capLevelToPlayerSize && data.video) {
+ // Start capping immediately if the manifest has signaled video codecs
+ this.startCapping();
+ }
+ } // Only activate capping when playing a video stream; otherwise, multi-bitrate audio-only streams will be restricted
+ // to the first level
+ ;
+
+ _proto.onBufferCodecs = function onBufferCodecs(data) {
+ var hls = this.hls;
+
+ if (hls.config.capLevelToPlayerSize && data.video) {
+ // If the manifest did not signal a video codec capping has been deferred until we're certain video is present
+ this.startCapping();
+ }
+ };
+
+ _proto.onLevelsUpdated = function onLevelsUpdated(data) {
+ this.levels = data.levels;
+ };
+
+ _proto.onMediaDetaching = function onMediaDetaching() {
+ this.stopCapping();
+ };
+
+ _proto.detectPlayerSize = function detectPlayerSize() {
+ if (this.media) {
+ var levelsLength = this.levels ? this.levels.length : 0;
+
+ if (levelsLength) {
+ var hls = this.hls;
+ hls.autoLevelCapping = this.getMaxLevel(levelsLength - 1);
+
+ if (hls.autoLevelCapping > this.autoLevelCapping) {
+ // if auto level capping has a higher value for the previous one, flush the buffer using nextLevelSwitch
+ // usually happen when the user go to the fullscreen mode.
+ hls.streamController.nextLevelSwitch();
+ }
+
+ this.autoLevelCapping = hls.autoLevelCapping;
+ }
+ }
+ }
+ /*
+ * returns level should be the one with the dimensions equal or greater than the media (player) dimensions (so the video will be downscaled)
+ */
+ ;
+
+ _proto.getMaxLevel = function getMaxLevel(capLevelIndex) {
+ var _this2 = this;
+
+ if (!this.levels) {
+ return -1;
+ }
+
+ var validLevels = this.levels.filter(function (level, index) {
+ return CapLevelController.isLevelAllowed(index, _this2.restrictedLevels) && index <= capLevelIndex;
+ });
+ return CapLevelController.getMaxLevelByMediaSize(validLevels, this.mediaWidth, this.mediaHeight);
+ };
+
+ _proto.startCapping = function startCapping() {
+ if (this.timer) {
+ // Don't reset capping if started twice; this can happen if the manifest signals a video codec
+ return;
+ }
+
+ this.autoLevelCapping = Number.POSITIVE_INFINITY;
+ this.hls.firstLevel = this.getMaxLevel(this.firstLevel);
+ clearInterval(this.timer);
+ this.timer = setInterval(this.detectPlayerSize.bind(this), 1000);
+ this.detectPlayerSize();
+ };
+
+ _proto.stopCapping = function stopCapping() {
+ this.restrictedLevels = [];
+ this.firstLevel = null;
+ this.autoLevelCapping = Number.POSITIVE_INFINITY;
+
+ if (this.timer) {
+ this.timer = clearInterval(this.timer);
+ this.timer = null;
+ }
+ };
+
+ CapLevelController.isLevelAllowed = function isLevelAllowed(level, restrictedLevels) {
+ if (restrictedLevels === void 0) {
+ restrictedLevels = [];
+ }
+
+ return restrictedLevels.indexOf(level) === -1;
+ };
+
+ CapLevelController.getMaxLevelByMediaSize = function getMaxLevelByMediaSize(levels, width, height) {
+ if (!levels || levels && !levels.length) {
+ return -1;
+ } // Levels can have the same dimensions but differing bandwidths - since levels are ordered, we can look to the next
+ // to determine whether we've chosen the greatest bandwidth for the media's dimensions
+
+
+ var atGreatestBandiwdth = function atGreatestBandiwdth(curLevel, nextLevel) {
+ if (!nextLevel) {
+ return true;
+ }
+
+ return curLevel.width !== nextLevel.width || curLevel.height !== nextLevel.height;
+ }; // If we run through the loop without breaking, the media's dimensions are greater than every level, so default to
+ // the max level
+
+
+ var maxLevelIndex = levels.length - 1;
+
+ for (var i = 0; i < levels.length; i += 1) {
+ var level = levels[i];
+
+ if ((level.width >= width || level.height >= height) && atGreatestBandiwdth(level, levels[i + 1])) {
+ maxLevelIndex = i;
+ break;
+ }
+ }
+
+ return maxLevelIndex;
+ };
+
+ cap_level_controller_createClass(CapLevelController, [{
+ key: "mediaWidth",
+ get: function get() {
+ var width;
+ var media = this.media;
+
+ if (media) {
+ width = media.width || media.clientWidth || media.offsetWidth;
+ width *= CapLevelController.contentScaleFactor;
+ }
+
+ return width;
+ }
+ }, {
+ key: "mediaHeight",
+ get: function get() {
+ var height;
+ var media = this.media;
+
+ if (media) {
+ height = media.height || media.clientHeight || media.offsetHeight;
+ height *= CapLevelController.contentScaleFactor;
+ }
+
+ return height;
+ }
+ }], [{
+ key: "contentScaleFactor",
+ get: function get() {
+ var pixelRatio = 1;
+
+ try {
+ pixelRatio = window.devicePixelRatio;
+ } catch (e) {}
+
+ return pixelRatio;
+ }
+ }]);
+
+ return CapLevelController;
+}(event_handler);
+
+/* harmony default export */ var cap_level_controller = (cap_level_controller_CapLevelController);
+// CONCATENATED MODULE: ./src/controller/fps-controller.js
+function fps_controller_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+
+/*
+ * FPS Controller
+*/
+
+
+
+var fps_controller_window = window,
+ fps_controller_performance = fps_controller_window.performance;
+
+var fps_controller_FPSController =
+/*#__PURE__*/
+function (_EventHandler) {
+ fps_controller_inheritsLoose(FPSController, _EventHandler);
+
+ function FPSController(hls) {
+ return _EventHandler.call(this, hls, events.MEDIA_ATTACHING) || this;
+ }
+
+ var _proto = FPSController.prototype;
+
+ _proto.destroy = function destroy() {
+ if (this.timer) {
+ clearInterval(this.timer);
+ }
+
+ this.isVideoPlaybackQualityAvailable = false;
+ };
+
+ _proto.onMediaAttaching = function onMediaAttaching(data) {
+ var config = this.hls.config;
+
+ if (config.capLevelOnFPSDrop) {
+ var video = this.video = data.media instanceof window.HTMLVideoElement ? data.media : null;
+
+ if (typeof video.getVideoPlaybackQuality === 'function') {
+ this.isVideoPlaybackQualityAvailable = true;
+ }
+
+ clearInterval(this.timer);
+ this.timer = setInterval(this.checkFPSInterval.bind(this), config.fpsDroppedMonitoringPeriod);
+ }
+ };
+
+ _proto.checkFPS = function checkFPS(video, decodedFrames, droppedFrames) {
+ var currentTime = fps_controller_performance.now();
+
+ if (decodedFrames) {
+ if (this.lastTime) {
+ var currentPeriod = currentTime - this.lastTime,
+ currentDropped = droppedFrames - this.lastDroppedFrames,
+ currentDecoded = decodedFrames - this.lastDecodedFrames,
+ droppedFPS = 1000 * currentDropped / currentPeriod,
+ hls = this.hls;
+ hls.trigger(events.FPS_DROP, {
+ currentDropped: currentDropped,
+ currentDecoded: currentDecoded,
+ totalDroppedFrames: droppedFrames
+ });
+
+ if (droppedFPS > 0) {
+ // logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
+ if (currentDropped > hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
+ var currentLevel = hls.currentLevel;
+ logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
+
+ if (currentLevel > 0 && (hls.autoLevelCapping === -1 || hls.autoLevelCapping >= currentLevel)) {
+ currentLevel = currentLevel - 1;
+ hls.trigger(events.FPS_DROP_LEVEL_CAPPING, {
+ level: currentLevel,
+ droppedLevel: hls.currentLevel
+ });
+ hls.autoLevelCapping = currentLevel;
+ hls.streamController.nextLevelSwitch();
+ }
+ }
+ }
+ }
+
+ this.lastTime = currentTime;
+ this.lastDroppedFrames = droppedFrames;
+ this.lastDecodedFrames = decodedFrames;
+ }
+ };
+
+ _proto.checkFPSInterval = function checkFPSInterval() {
+ var video = this.video;
+
+ if (video) {
+ if (this.isVideoPlaybackQualityAvailable) {
+ var videoPlaybackQuality = video.getVideoPlaybackQuality();
+ this.checkFPS(video, videoPlaybackQuality.totalVideoFrames, videoPlaybackQuality.droppedVideoFrames);
+ } else {
+ this.checkFPS(video, video.webkitDecodedFrameCount, video.webkitDroppedFrameCount);
+ }
+ }
+ };
+
+ return FPSController;
+}(event_handler);
+
+/* harmony default export */ var fps_controller = (fps_controller_FPSController);
+// CONCATENATED MODULE: ./src/utils/xhr-loader.js
+/**
+ * XHR based logger
+*/
+
+var xhr_loader_window = window,
+ xhr_loader_performance = xhr_loader_window.performance,
+ XMLHttpRequest = xhr_loader_window.XMLHttpRequest;
+
+var xhr_loader_XhrLoader =
+/*#__PURE__*/
+function () {
+ function XhrLoader(config) {
+ if (config && config.xhrSetup) {
+ this.xhrSetup = config.xhrSetup;
+ }
+ }
+
+ var _proto = XhrLoader.prototype;
+
+ _proto.destroy = function destroy() {
+ this.abort();
+ this.loader = null;
+ };
+
+ _proto.abort = function abort() {
+ var loader = this.loader;
+
+ if (loader && loader.readyState !== 4) {
+ this.stats.aborted = true;
+ loader.abort();
+ }
+
+ window.clearTimeout(this.requestTimeout);
+ this.requestTimeout = null;
+ window.clearTimeout(this.retryTimeout);
+ this.retryTimeout = null;
+ };
+
+ _proto.load = function load(context, config, callbacks) {
+ this.context = context;
+ this.config = config;
+ this.callbacks = callbacks;
+ this.stats = {
+ trequest: xhr_loader_performance.now(),
+ retry: 0
+ };
+ this.retryDelay = config.retryDelay;
+ this.loadInternal();
+ };
+
+ _proto.loadInternal = function loadInternal() {
+ var xhr,
+ context = this.context;
+ xhr = this.loader = new XMLHttpRequest();
+ var stats = this.stats;
+ stats.tfirst = 0;
+ stats.loaded = 0;
+ var xhrSetup = this.xhrSetup;
+
+ try {
+ if (xhrSetup) {
+ try {
+ xhrSetup(xhr, context.url);
+ } catch (e) {
+ // fix xhrSetup: (xhr, url) => {xhr.setRequestHeader("Content-Language", "test");}
+ // not working, as xhr.setRequestHeader expects xhr.readyState === OPEN
+ xhr.open('GET', context.url, true);
+ xhrSetup(xhr, context.url);
+ }
+ }
+
+ if (!xhr.readyState) {
+ xhr.open('GET', context.url, true);
+ }
+ } catch (e) {
+ // IE11 throws an exception on xhr.open if attempting to access an HTTP resource over HTTPS
+ this.callbacks.onError({
+ code: xhr.status,
+ text: e.message
+ }, context, xhr);
+ return;
+ }
+
+ if (context.rangeEnd) {
+ xhr.setRequestHeader('Range', 'bytes=' + context.rangeStart + '-' + (context.rangeEnd - 1));
+ }
+
+ xhr.onreadystatechange = this.readystatechange.bind(this);
+ xhr.onprogress = this.loadprogress.bind(this);
+ xhr.responseType = context.responseType; // setup timeout before we perform request
+
+ this.requestTimeout = window.setTimeout(this.loadtimeout.bind(this), this.config.timeout);
+ xhr.send();
+ };
+
+ _proto.readystatechange = function readystatechange(event) {
+ var xhr = event.currentTarget,
+ readyState = xhr.readyState,
+ stats = this.stats,
+ context = this.context,
+ config = this.config; // don't proceed if xhr has been aborted
+
+ if (stats.aborted) {
+ return;
+ } // >= HEADERS_RECEIVED
+
+
+ if (readyState >= 2) {
+ // clear xhr timeout and rearm it if readyState less than 4
+ window.clearTimeout(this.requestTimeout);
+
+ if (stats.tfirst === 0) {
+ stats.tfirst = Math.max(xhr_loader_performance.now(), stats.trequest);
+ }
+
+ if (readyState === 4) {
+ var status = xhr.status; // http status between 200 to 299 are all successful
+
+ if (status >= 200 && status < 300) {
+ stats.tload = Math.max(stats.tfirst, xhr_loader_performance.now());
+ var data, len;
+
+ if (context.responseType === 'arraybuffer') {
+ data = xhr.response;
+ len = data.byteLength;
+ } else {
+ data = xhr.responseText;
+ len = data.length;
+ }
+
+ stats.loaded = stats.total = len;
+ var response = {
+ url: xhr.responseURL,
+ data: data
+ };
+ this.callbacks.onSuccess(response, stats, context, xhr);
+ } else {
+ // if max nb of retries reached or if http status between 400 and 499 (such error cannot be recovered, retrying is useless), return error
+ if (stats.retry >= config.maxRetry) {
+ logger.error(status + " while loading " + context.url);
+ this.callbacks.onError({
+ code: status,
+ text: xhr.statusText
+ }, context, xhr);
+ } else {
+ // retry
+ logger.warn(status + " while loading " + context.url + ", retrying in " + this.retryDelay + "..."); // aborts and resets internal state
+
+ this.destroy(); // schedule retry
+
+ this.retryTimeout = window.setTimeout(this.loadInternal.bind(this), this.retryDelay); // set exponential backoff
+
+ this.retryDelay = Math.min(2 * this.retryDelay, config.maxRetryDelay);
+ stats.retry++;
+ }
+ }
+ } else {
+ // readyState >= 2 AND readyState !==4 (readyState = HEADERS_RECEIVED || LOADING) rearm timeout as xhr not finished yet
+ this.requestTimeout = window.setTimeout(this.loadtimeout.bind(this), config.timeout);
+ }
+ }
+ };
+
+ _proto.loadtimeout = function loadtimeout() {
+ logger.warn("timeout while loading " + this.context.url);
+ this.callbacks.onTimeout(this.stats, this.context, null);
+ };
+
+ _proto.loadprogress = function loadprogress(event) {
+ var xhr = event.currentTarget,
+ stats = this.stats;
+ stats.loaded = event.loaded;
+
+ if (event.lengthComputable) {
+ stats.total = event.total;
+ }
+
+ var onProgress = this.callbacks.onProgress;
+
+ if (onProgress) {
+ // third arg is to provide on progress data
+ onProgress(stats, this.context, null, xhr);
+ }
+ };
+
+ return XhrLoader;
+}();
+
+/* harmony default export */ var xhr_loader = (xhr_loader_XhrLoader);
+// EXTERNAL MODULE: ./src/empty.js
+var empty = __webpack_require__("./src/empty.js");
+
+// CONCATENATED MODULE: ./src/utils/mediakeys-helper.ts
+/**
+ * @see https://developer.mozilla.org/en-US/docs/Web/API/Navigator/requestMediaKeySystemAccess
+ */
+var KeySystems;
+
+(function (KeySystems) {
+ KeySystems["WIDEVINE"] = "com.widevine.alpha";
+ KeySystems["PLAYREADY"] = "com.microsoft.playready";
+})(KeySystems || (KeySystems = {}));
+
+var requestMediaKeySystemAccess = function () {
+ if (typeof window !== 'undefined' && window.navigator && window.navigator.requestMediaKeySystemAccess) {
+ return window.navigator.requestMediaKeySystemAccess.bind(window.navigator);
+ } else {
+ return null;
+ }
+}();
+
+
+// CONCATENATED MODULE: ./src/config.ts
+function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; var ownKeys = Object.keys(source); if (typeof Object.getOwnPropertySymbols === 'function') { ownKeys = ownKeys.concat(Object.getOwnPropertySymbols(source).filter(function (sym) { return Object.getOwnPropertyDescriptor(source, sym).enumerable; })); } ownKeys.forEach(function (key) { _defineProperty(target, key, source[key]); }); } return target; }
+
+function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
+
+/**
+ * HLS config
+ */
+
+
+
+
+ // import FetchLoader from './utils/fetch-loader';
+
+
+
+
+
+
+
+
+
+// If possible, keep hlsDefaultConfig shallow
+// It is cloned whenever a new Hls instance is created, by keeping the config
+var hlsDefaultConfig = _objectSpread({
+ autoStartLoad: true,
+ // used by stream-controller
+ startPosition: -1,
+ // used by stream-controller
+ defaultAudioCodec: void 0,
+ // used by stream-controller
+ debug: false,
+ // used by logger
+ capLevelOnFPSDrop: false,
+ // used by fps-controller
+ capLevelToPlayerSize: false,
+ // used by cap-level-controller
+ initialLiveManifestSize: 1,
+ // used by stream-controller
+ maxBufferLength: 30,
+ // used by stream-controller
+ maxBufferSize: 60 * 1000 * 1000,
+ // used by stream-controller
+ maxBufferHole: 0.5,
+ // used by stream-controller
+ lowBufferWatchdogPeriod: 0.5,
+ // used by stream-controller
+ highBufferWatchdogPeriod: 3,
+ // used by stream-controller
+ nudgeOffset: 0.1,
+ // used by stream-controller
+ nudgeMaxRetry: 3,
+ // used by stream-controller
+ maxFragLookUpTolerance: 0.25,
+ // used by stream-controller
+ liveSyncDurationCount: 3,
+ // used by stream-controller
+ liveMaxLatencyDurationCount: Infinity,
+ // used by stream-controller
+ liveSyncDuration: void 0,
+ // used by stream-controller
+ liveMaxLatencyDuration: void 0,
+ // used by stream-controller
+ liveDurationInfinity: false,
+ // used by buffer-controller
+ liveBackBufferLength: Infinity,
+ // used by buffer-controller
+ maxMaxBufferLength: 600,
+ // used by stream-controller
+ enableWorker: true,
+ // used by demuxer
+ enableSoftwareAES: true,
+ // used by decrypter
+ manifestLoadingTimeOut: 10000,
+ // used by playlist-loader
+ manifestLoadingMaxRetry: 1,
+ // used by playlist-loader
+ manifestLoadingRetryDelay: 1000,
+ // used by playlist-loader
+ manifestLoadingMaxRetryTimeout: 64000,
+ // used by playlist-loader
+ startLevel: void 0,
+ // used by level-controller
+ levelLoadingTimeOut: 10000,
+ // used by playlist-loader
+ levelLoadingMaxRetry: 4,
+ // used by playlist-loader
+ levelLoadingRetryDelay: 1000,
+ // used by playlist-loader
+ levelLoadingMaxRetryTimeout: 64000,
+ // used by playlist-loader
+ fragLoadingTimeOut: 20000,
+ // used by fragment-loader
+ fragLoadingMaxRetry: 6,
+ // used by fragment-loader
+ fragLoadingRetryDelay: 1000,
+ // used by fragment-loader
+ fragLoadingMaxRetryTimeout: 64000,
+ // used by fragment-loader
+ startFragPrefetch: false,
+ // used by stream-controller
+ fpsDroppedMonitoringPeriod: 5000,
+ // used by fps-controller
+ fpsDroppedMonitoringThreshold: 0.2,
+ // used by fps-controller
+ appendErrorMaxRetry: 3,
+ // used by buffer-controller
+ loader: xhr_loader,
+ // loader: FetchLoader,
+ fLoader: void 0,
+ // used by fragment-loader
+ pLoader: void 0,
+ // used by playlist-loader
+ xhrSetup: void 0,
+ // used by xhr-loader
+ licenseXhrSetup: void 0,
+ // used by eme-controller
+ // fetchSetup: void 0,
+ abrController: abr_controller,
+ bufferController: buffer_controller,
+ capLevelController: cap_level_controller,
+ fpsController: fps_controller,
+ stretchShortVideoTrack: false,
+ // used by mp4-remuxer
+ maxAudioFramesDrift: 1,
+ // used by mp4-remuxer
+ forceKeyFrameOnDiscontinuity: true,
+ // used by ts-demuxer
+ abrEwmaFastLive: 3,
+ // used by abr-controller
+ abrEwmaSlowLive: 9,
+ // used by abr-controller
+ abrEwmaFastVoD: 3,
+ // used by abr-controller
+ abrEwmaSlowVoD: 9,
+ // used by abr-controller
+ abrEwmaDefaultEstimate: 5e5,
+ // 500 kbps // used by abr-controller
+ abrBandWidthFactor: 0.95,
+ // used by abr-controller
+ abrBandWidthUpFactor: 0.7,
+ // used by abr-controller
+ abrMaxWithRealBitrate: false,
+ // used by abr-controller
+ maxStarvationDelay: 4,
+ // used by abr-controller
+ maxLoadingDelay: 4,
+ // used by abr-controller
+ minAutoBitrate: 0,
+ // used by hls
+ emeEnabled: false,
+ // used by eme-controller
+ widevineLicenseUrl: void 0,
+ // used by eme-controller
+ requestMediaKeySystemAccessFunc: requestMediaKeySystemAccess
+}, timelineConfig(), {
+ subtitleStreamController: false ? undefined : void 0,
+ subtitleTrackController: false ? undefined : void 0,
+ timelineController: false ? undefined : void 0,
+ audioStreamController: false ? undefined : void 0,
+ audioTrackController: false ? undefined : void 0,
+ emeController: false ? undefined : void 0
+});
+
+function timelineConfig() {
+ if (true) {
+ // intentionally doing this over returning Partial above
+ // this has the added nice property of still requiring the object below to completely define all props.
+ return {};
+ }
+
+ return {
+ cueHandler: empty,
+ // used by timeline-controller
+ enableCEA708Captions: true,
+ // used by timeline-controller
+ enableWebVTT: true,
+ // used by timeline-controller
+ captionsTextTrack1Label: 'English',
+ // used by timeline-controller
+ captionsTextTrack1LanguageCode: 'en',
+ // used by timeline-controller
+ captionsTextTrack2Label: 'Spanish',
+ // used by timeline-controller
+ captionsTextTrack2LanguageCode: 'es' // used by timeline-controller
+
+ };
+}
+// EXTERNAL MODULE: ./node_modules/eventemitter3/index.js
+var eventemitter3 = __webpack_require__("./node_modules/eventemitter3/index.js");
+
+// CONCATENATED MODULE: ./src/observer.ts
+function observer_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+
+
+/**
+ * Simple adapter sub-class of Nodejs-like EventEmitter.
+ */
+
+var Observer =
+/*#__PURE__*/
+function (_EventEmitter) {
+ observer_inheritsLoose(Observer, _EventEmitter);
+
+ function Observer() {
+ return _EventEmitter.apply(this, arguments) || this;
+ }
+
+ var _proto = Observer.prototype;
+
+ /**
+ * We simply want to pass along the event-name itself
+ * in every call to a handler, which is the purpose of our `trigger` method
+ * extending the standard API.
+ */
+ _proto.trigger = function trigger(event) {
+ for (var _len = arguments.length, data = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
+ data[_key - 1] = arguments[_key];
+ }
+
+ this.emit.apply(this, [event, event].concat(data));
+ };
+
+ return Observer;
+}(eventemitter3["EventEmitter"]);
+// CONCATENATED MODULE: ./src/hls.ts
+/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return hls_Hls; });
+function hls_objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; var ownKeys = Object.keys(source); if (typeof Object.getOwnPropertySymbols === 'function') { ownKeys = ownKeys.concat(Object.getOwnPropertySymbols(source).filter(function (sym) { return Object.getOwnPropertyDescriptor(source, sym).enumerable; })); } ownKeys.forEach(function (key) { hls_defineProperty(target, key, source[key]); }); } return target; }
+
+function hls_defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
+
+function hls_assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function hls_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function hls_createClass(Constructor, protoProps, staticProps) { if (protoProps) hls_defineProperties(Constructor.prototype, protoProps); if (staticProps) hls_defineProperties(Constructor, staticProps); return Constructor; }
+
+function hls_inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+/**
+ * @module Hls
+ * @class
+ * @constructor
+ */
+
+var hls_Hls =
+/*#__PURE__*/
+function (_Observer) {
+ hls_inheritsLoose(Hls, _Observer);
+
+ /**
+ * @type {boolean}
+ */
+ Hls.isSupported = function isSupported() {
+ return is_supported_isSupported();
+ }
+ /**
+ * @type {HlsEvents}
+ */
+ ;
+
+ hls_createClass(Hls, null, [{
+ key: "version",
+
+ /**
+ * @type {string}
+ */
+ get: function get() {
+ return undefined;
+ }
+ }, {
+ key: "Events",
+ get: function get() {
+ return events;
+ }
+ /**
+ * @type {HlsErrorTypes}
+ */
+
+ }, {
+ key: "ErrorTypes",
+ get: function get() {
+ return ErrorTypes;
+ }
+ /**
+ * @type {HlsErrorDetails}
+ */
+
+ }, {
+ key: "ErrorDetails",
+ get: function get() {
+ return ErrorDetails;
+ }
+ /**
+ * @type {HlsConfig}
+ */
+
+ }, {
+ key: "DefaultConfig",
+ get: function get() {
+ if (!Hls.defaultConfig) {
+ return hlsDefaultConfig;
+ }
+
+ return Hls.defaultConfig;
+ }
+ /**
+ * @type {HlsConfig}
+ */
+ ,
+ set: function set(defaultConfig) {
+ Hls.defaultConfig = defaultConfig;
+ }
+ /**
+ * Creates an instance of an HLS client that can attach to exactly one `HTMLMediaElement`.
+ *
+ * @constructs Hls
+ * @param {HlsConfig} config
+ */
+
+ }]);
+
+ function Hls(userConfig) {
+ var _this;
+
+ if (userConfig === void 0) {
+ userConfig = {};
+ }
+
+ _this = _Observer.call(this) || this;
+ _this.config = void 0;
+ _this._autoLevelCapping = void 0;
+ _this.abrController = void 0;
+ _this.capLevelController = void 0;
+ _this.levelController = void 0;
+ _this.streamController = void 0;
+ _this.networkControllers = void 0;
+ _this.audioTrackController = void 0;
+ _this.subtitleTrackController = void 0;
+ _this.emeController = void 0;
+ _this.coreComponents = void 0;
+ _this.media = null;
+ _this.url = null;
+ var defaultConfig = Hls.DefaultConfig;
+
+ if ((userConfig.liveSyncDurationCount || userConfig.liveMaxLatencyDurationCount) && (userConfig.liveSyncDuration || userConfig.liveMaxLatencyDuration)) {
+ throw new Error('Illegal hls.js config: don\'t mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration');
+ } // Shallow clone
+
+
+ _this.config = hls_objectSpread({}, defaultConfig, userConfig);
+
+ var _assertThisInitialize = hls_assertThisInitialized(_this),
+ config = _assertThisInitialize.config;
+
+ if (config.liveMaxLatencyDurationCount !== void 0 && config.liveMaxLatencyDurationCount <= config.liveSyncDurationCount) {
+ throw new Error('Illegal hls.js config: "liveMaxLatencyDurationCount" must be gt "liveSyncDurationCount"');
+ }
+
+ if (config.liveMaxLatencyDuration !== void 0 && (config.liveSyncDuration === void 0 || config.liveMaxLatencyDuration <= config.liveSyncDuration)) {
+ throw new Error('Illegal hls.js config: "liveMaxLatencyDuration" must be gt "liveSyncDuration"');
+ }
+
+ enableLogs(config.debug);
+ _this._autoLevelCapping = -1; // core controllers and network loaders
+
+ /**
+ * @member {AbrController} abrController
+ */
+
+ var abrController = _this.abrController = new config.abrController(hls_assertThisInitialized(_this)); // eslint-disable-line new-cap
+
+ var bufferController = new config.bufferController(hls_assertThisInitialized(_this)); // eslint-disable-line new-cap
+
+ var capLevelController = _this.capLevelController = new config.capLevelController(hls_assertThisInitialized(_this)); // eslint-disable-line new-cap
+
+ var fpsController = new config.fpsController(hls_assertThisInitialized(_this)); // eslint-disable-line new-cap
+
+ var playListLoader = new playlist_loader(hls_assertThisInitialized(_this));
+ var fragmentLoader = new fragment_loader(hls_assertThisInitialized(_this));
+ var keyLoader = new key_loader(hls_assertThisInitialized(_this));
+ var id3TrackController = new id3_track_controller(hls_assertThisInitialized(_this)); // network controllers
+
+ /**
+ * @member {LevelController} levelController
+ */
+
+ var levelController = _this.levelController = new level_controller_LevelController(hls_assertThisInitialized(_this)); // FIXME: FragmentTracker must be defined before StreamController because the order of event handling is important
+
+ var fragmentTracker = new fragment_tracker_FragmentTracker(hls_assertThisInitialized(_this));
+ /**
+ * @member {StreamController} streamController
+ */
+
+ var streamController = _this.streamController = new stream_controller_default.a(hls_assertThisInitialized(_this), fragmentTracker);
+ var networkControllers = [levelController, streamController]; // optional audio stream controller
+
+ /**
+ * @var {ICoreComponent | Controller}
+ */
+
+ var Controller = config.audioStreamController;
+
+ if (Controller) {
+ networkControllers.push(new Controller(hls_assertThisInitialized(_this), fragmentTracker));
+ }
+ /**
+ * @member {INetworkController[]} networkControllers
+ */
+
+
+ _this.networkControllers = networkControllers;
+ /**
+ * @var {ICoreComponent[]}
+ */
+
+ var coreComponents = [playListLoader, fragmentLoader, keyLoader, abrController, bufferController, capLevelController, fpsController, id3TrackController, fragmentTracker]; // optional audio track and subtitle controller
+
+ Controller = config.audioTrackController;
+
+ if (Controller) {
+ var audioTrackController = new Controller(hls_assertThisInitialized(_this));
+ /**
+ * @member {AudioTrackController} audioTrackController
+ */
+
+ _this.audioTrackController = audioTrackController;
+ coreComponents.push(audioTrackController);
+ }
+
+ Controller = config.subtitleTrackController;
+
+ if (Controller) {
+ var subtitleTrackController = new Controller(hls_assertThisInitialized(_this));
+ /**
+ * @member {SubtitleTrackController} subtitleTrackController
+ */
+
+ _this.subtitleTrackController = subtitleTrackController;
+ networkControllers.push(subtitleTrackController);
+ }
+
+ Controller = config.emeController;
+
+ if (Controller) {
+ var emeController = new Controller(hls_assertThisInitialized(_this));
+ /**
+ * @member {EMEController} emeController
+ */
+
+ _this.emeController = emeController;
+ coreComponents.push(emeController);
+ } // optional subtitle controllers
+
+
+ Controller = config.subtitleStreamController;
+
+ if (Controller) {
+ networkControllers.push(new Controller(hls_assertThisInitialized(_this), fragmentTracker));
+ }
+
+ Controller = config.timelineController;
+
+ if (Controller) {
+ coreComponents.push(new Controller(hls_assertThisInitialized(_this)));
+ }
+ /**
+ * @member {ICoreComponent[]}
+ */
+
+
+ _this.coreComponents = coreComponents;
+ return _this;
+ }
+ /**
+ * Dispose of the instance
+ */
+
+
+ var _proto = Hls.prototype;
+
+ _proto.destroy = function destroy() {
+ logger.log('destroy');
+ this.trigger(events.DESTROYING);
+ this.detachMedia();
+ this.coreComponents.concat(this.networkControllers).forEach(function (component) {
+ component.destroy();
+ });
+ this.url = null;
+ this.removeAllListeners();
+ this._autoLevelCapping = -1;
+ }
+ /**
+ * Attach a media element
+ * @param {HTMLMediaElement} media
+ */
+ ;
+
+ _proto.attachMedia = function attachMedia(media) {
+ logger.log('attachMedia');
+ this.media = media;
+ this.trigger(events.MEDIA_ATTACHING, {
+ media: media
+ });
+ }
+ /**
+ * Detach from the media
+ */
+ ;
+
+ _proto.detachMedia = function detachMedia() {
+ logger.log('detachMedia');
+ this.trigger(events.MEDIA_DETACHING);
+ this.media = null;
+ }
+ /**
+ * Set the source URL. Can be relative or absolute.
+ * @param {string} url
+ */
+ ;
+
+ _proto.loadSource = function loadSource(url) {
+ url = url_toolkit["buildAbsoluteURL"](window.location.href, url, {
+ alwaysNormalize: true
+ });
+ logger.log("loadSource:" + url);
+ this.url = url; // when attaching to a source URL, trigger a playlist load
+
+ this.trigger(events.MANIFEST_LOADING, {
+ url: url
+ });
+ }
+ /**
+ * Start loading data from the stream source.
+ * Depending on default config, client starts loading automatically when a source is set.
+ *
+ * @param {number} startPosition Set the start position to stream from
+ * @default -1 None (from earliest point)
+ */
+ ;
+
+ _proto.startLoad = function startLoad(startPosition) {
+ if (startPosition === void 0) {
+ startPosition = -1;
+ }
+
+ logger.log("startLoad(" + startPosition + ")");
+ this.networkControllers.forEach(function (controller) {
+ controller.startLoad(startPosition);
+ });
+ }
+ /**
+ * Stop loading of any stream data.
+ */
+ ;
+
+ _proto.stopLoad = function stopLoad() {
+ logger.log('stopLoad');
+ this.networkControllers.forEach(function (controller) {
+ controller.stopLoad();
+ });
+ }
+ /**
+ * Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1)
+ */
+ ;
+
+ _proto.swapAudioCodec = function swapAudioCodec() {
+ logger.log('swapAudioCodec');
+ this.streamController.swapAudioCodec();
+ }
+ /**
+ * When the media-element fails, this allows to detach and then re-attach it
+ * as one call (convenience method).
+ *
+ * Automatic recovery of media-errors by this process is configurable.
+ */
+ ;
+
+ _proto.recoverMediaError = function recoverMediaError() {
+ logger.log('recoverMediaError');
+ var media = this.media;
+ this.detachMedia();
+
+ if (media) {
+ this.attachMedia(media);
+ }
+ }
+ /**
+ * @type {QualityLevel[]}
+ */
+ // todo(typescript-levelController)
+ ;
+
+ hls_createClass(Hls, [{
+ key: "levels",
+ get: function get() {
+ return this.levelController.levels;
+ }
+ /**
+ * Index of quality level currently played
+ * @type {number}
+ */
+
+ }, {
+ key: "currentLevel",
+ get: function get() {
+ return this.streamController.currentLevel;
+ }
+ /**
+ * Set quality level index immediately .
+ * This will flush the current buffer to replace the quality asap.
+ * That means playback will interrupt at least shortly to re-buffer and re-sync eventually.
+ * @type {number} -1 for automatic level selection
+ */
+ ,
+ set: function set(newLevel) {
+ logger.log("set currentLevel:" + newLevel);
+ this.loadLevel = newLevel;
+ this.streamController.immediateLevelSwitch();
+ }
+ /**
+ * Index of next quality level loaded as scheduled by stream controller.
+ * @type {number}
+ */
+
+ }, {
+ key: "nextLevel",
+ get: function get() {
+ return this.streamController.nextLevel;
+ }
+ /**
+ * Set quality level index for next loaded data.
+ * This will switch the video quality asap, without interrupting playback.
+ * May abort current loading of data, and flush parts of buffer (outside currently played fragment region).
+ * @type {number} -1 for automatic level selection
+ */
+ ,
+ set: function set(newLevel) {
+ logger.log("set nextLevel:" + newLevel);
+ this.levelController.manualLevel = newLevel;
+ this.streamController.nextLevelSwitch();
+ }
+ /**
+ * Return the quality level of the currently or last (of none is loaded currently) segment
+ * @type {number}
+ */
+
+ }, {
+ key: "loadLevel",
+ get: function get() {
+ return this.levelController.level;
+ }
+ /**
+ * Set quality level index for next loaded data in a conservative way.
+ * This will switch the quality without flushing, but interrupt current loading.
+ * Thus the moment when the quality switch will appear in effect will only be after the already existing buffer.
+ * @type {number} newLevel -1 for automatic level selection
+ */
+ ,
+ set: function set(newLevel) {
+ logger.log("set loadLevel:" + newLevel);
+ this.levelController.manualLevel = newLevel;
+ }
+ /**
+ * get next quality level loaded
+ * @type {number}
+ */
+
+ }, {
+ key: "nextLoadLevel",
+ get: function get() {
+ return this.levelController.nextLoadLevel;
+ }
+ /**
+ * Set quality level of next loaded segment in a fully "non-destructive" way.
+ * Same as `loadLevel` but will wait for next switch (until current loading is done).
+ * @type {number} level
+ */
+ ,
+ set: function set(level) {
+ this.levelController.nextLoadLevel = level;
+ }
+ /**
+ * Return "first level": like a default level, if not set,
+ * falls back to index of first level referenced in manifest
+ * @type {number}
+ */
+
+ }, {
+ key: "firstLevel",
+ get: function get() {
+ return Math.max(this.levelController.firstLevel, this.minAutoLevel);
+ }
+ /**
+ * Sets "first-level", see getter.
+ * @type {number}
+ */
+ ,
+ set: function set(newLevel) {
+ logger.log("set firstLevel:" + newLevel);
+ this.levelController.firstLevel = newLevel;
+ }
+ /**
+ * Return start level (level of first fragment that will be played back)
+ * if not overrided by user, first level appearing in manifest will be used as start level
+ * if -1 : automatic start level selection, playback will start from level matching download bandwidth
+ * (determined from download of first segment)
+ * @type {number}
+ */
+
+ }, {
+ key: "startLevel",
+ get: function get() {
+ return this.levelController.startLevel;
+ }
+ /**
+ * set start level (level of first fragment that will be played back)
+ * if not overrided by user, first level appearing in manifest will be used as start level
+ * if -1 : automatic start level selection, playback will start from level matching download bandwidth
+ * (determined from download of first segment)
+ * @type {number} newLevel
+ */
+ ,
+ set: function set(newLevel) {
+ logger.log("set startLevel:" + newLevel); // if not in automatic start level detection, ensure startLevel is greater than minAutoLevel
+
+ if (newLevel !== -1) {
+ newLevel = Math.max(newLevel, this.minAutoLevel);
+ }
+
+ this.levelController.startLevel = newLevel;
+ }
+ /**
+ * set dynamically set capLevelToPlayerSize against (`CapLevelController`)
+ *
+ * @type {boolean}
+ */
+
+ }, {
+ key: "capLevelToPlayerSize",
+ set: function set(shouldStartCapping) {
+ var newCapLevelToPlayerSize = !!shouldStartCapping;
+
+ if (newCapLevelToPlayerSize !== this.config.capLevelToPlayerSize) {
+ if (newCapLevelToPlayerSize) {
+ this.capLevelController.startCapping(); // If capping occurs, nextLevelSwitch will happen based on size.
+ } else {
+ this.capLevelController.stopCapping();
+ this.autoLevelCapping = -1;
+ this.streamController.nextLevelSwitch(); // Now we're uncapped, get the next level asap.
+ }
+
+ this.config.capLevelToPlayerSize = newCapLevelToPlayerSize;
+ }
+ }
+ /**
+ * Capping/max level value that should be used by automatic level selection algorithm (`ABRController`)
+ * @type {number}
+ */
+
+ }, {
+ key: "autoLevelCapping",
+ get: function get() {
+ return this._autoLevelCapping;
+ }
+ /**
+ * get bandwidth estimate
+ * @type {number}
+ */
+ ,
+
+ /**
+ * Capping/max level value that should be used by automatic level selection algorithm (`ABRController`)
+ * @type {number}
+ */
+ set: function set(newLevel) {
+ logger.log("set autoLevelCapping:" + newLevel);
+ this._autoLevelCapping = newLevel;
+ }
+ /**
+ * True when automatic level selection enabled
+ * @type {boolean}
+ */
+
+ }, {
+ key: "bandwidthEstimate",
+ get: function get() {
+ var bwEstimator = this.abrController._bwEstimator;
+ return bwEstimator ? bwEstimator.getEstimate() : NaN;
+ }
+ }, {
+ key: "autoLevelEnabled",
+ get: function get() {
+ return this.levelController.manualLevel === -1;
+ }
+ /**
+ * Level set manually (if any)
+ * @type {number}
+ */
+
+ }, {
+ key: "manualLevel",
+ get: function get() {
+ return this.levelController.manualLevel;
+ }
+ /**
+ * min level selectable in auto mode according to config.minAutoBitrate
+ * @type {number}
+ */
+
+ }, {
+ key: "minAutoLevel",
+ get: function get() {
+ var levels = this.levels,
+ minAutoBitrate = this.config.minAutoBitrate;
+ var len = levels ? levels.length : 0;
+
+ for (var i = 0; i < len; i++) {
+ var levelNextBitrate = levels[i].realBitrate ? Math.max(levels[i].realBitrate, levels[i].bitrate) : levels[i].bitrate;
+
+ if (levelNextBitrate > minAutoBitrate) {
+ return i;
+ }
+ }
+
+ return 0;
+ }
+ /**
+ * max level selectable in auto mode according to autoLevelCapping
+ * @type {number}
+ */
+
+ }, {
+ key: "maxAutoLevel",
+ get: function get() {
+ var levels = this.levels,
+ autoLevelCapping = this.autoLevelCapping;
+ var maxAutoLevel;
+
+ if (autoLevelCapping === -1 && levels && levels.length) {
+ maxAutoLevel = levels.length - 1;
+ } else {
+ maxAutoLevel = autoLevelCapping;
+ }
+
+ return maxAutoLevel;
+ }
+ /**
+ * next automatically selected quality level
+ * @type {number}
+ */
+
+ }, {
+ key: "nextAutoLevel",
+ get: function get() {
+ // ensure next auto level is between min and max auto level
+ return Math.min(Math.max(this.abrController.nextAutoLevel, this.minAutoLevel), this.maxAutoLevel);
+ }
+ /**
+ * this setter is used to force next auto level.
+ * this is useful to force a switch down in auto mode:
+ * in case of load error on level N, hls.js can set nextAutoLevel to N-1 for example)
+ * forced value is valid for one fragment. upon succesful frag loading at forced level,
+ * this value will be resetted to -1 by ABR controller.
+ * @type {number}
+ */
+ ,
+ set: function set(nextLevel) {
+ this.abrController.nextAutoLevel = Math.max(this.minAutoLevel, nextLevel);
+ }
+ /**
+ * @type {AudioTrack[]}
+ */
+ // todo(typescript-audioTrackController)
+
+ }, {
+ key: "audioTracks",
+ get: function get() {
+ var audioTrackController = this.audioTrackController;
+ return audioTrackController ? audioTrackController.audioTracks : [];
+ }
+ /**
+ * index of the selected audio track (index in audio track lists)
+ * @type {number}
+ */
+
+ }, {
+ key: "audioTrack",
+ get: function get() {
+ var audioTrackController = this.audioTrackController;
+ return audioTrackController ? audioTrackController.audioTrack : -1;
+ }
+ /**
+ * selects an audio track, based on its index in audio track lists
+ * @type {number}
+ */
+ ,
+ set: function set(audioTrackId) {
+ var audioTrackController = this.audioTrackController;
+
+ if (audioTrackController) {
+ audioTrackController.audioTrack = audioTrackId;
+ }
+ }
+ /**
+ * @type {Seconds}
+ */
+
+ }, {
+ key: "liveSyncPosition",
+ get: function get() {
+ return this.streamController.liveSyncPosition;
+ }
+ /**
+ * get alternate subtitle tracks list from playlist
+ * @type {SubtitleTrack[]}
+ */
+ // todo(typescript-subtitleTrackController)
+
+ }, {
+ key: "subtitleTracks",
+ get: function get() {
+ var subtitleTrackController = this.subtitleTrackController;
+ return subtitleTrackController ? subtitleTrackController.subtitleTracks : [];
+ }
+ /**
+ * index of the selected subtitle track (index in subtitle track lists)
+ * @type {number}
+ */
+
+ }, {
+ key: "subtitleTrack",
+ get: function get() {
+ var subtitleTrackController = this.subtitleTrackController;
+ return subtitleTrackController ? subtitleTrackController.subtitleTrack : -1;
+ }
+ /**
+ * select an subtitle track, based on its index in subtitle track lists
+ * @type {number}
+ */
+ ,
+ set: function set(subtitleTrackId) {
+ var subtitleTrackController = this.subtitleTrackController;
+
+ if (subtitleTrackController) {
+ subtitleTrackController.subtitleTrack = subtitleTrackId;
+ }
+ }
+ /**
+ * @type {boolean}
+ */
+
+ }, {
+ key: "subtitleDisplay",
+ get: function get() {
+ var subtitleTrackController = this.subtitleTrackController;
+ return subtitleTrackController ? subtitleTrackController.subtitleDisplay : false;
+ }
+ /**
+ * Enable/disable subtitle display rendering
+ * @type {boolean}
+ */
+ ,
+ set: function set(value) {
+ var subtitleTrackController = this.subtitleTrackController;
+
+ if (subtitleTrackController) {
+ subtitleTrackController.subtitleDisplay = value;
+ }
+ }
+ }]);
+
+ return Hls;
+}(Observer);
+
+hls_Hls.defaultConfig = void 0;
+
+
+/***/ })
+
+/******/ })["default"];
+});
+//# sourceMappingURL=hls.light.js.map
\ No newline at end of file
diff --git a/src/controller/gap-controller.js b/src/controller/gap-controller.js
index 5f4aaa05b08..e9573b21433 100644
--- a/src/controller/gap-controller.js
+++ b/src/controller/gap-controller.js
@@ -87,7 +87,14 @@ export default class GapController {
if (!this.moved && this.stalled) {
// Jump start gaps within jump threshold
const startJump = Math.max(nextStart, bufferInfo.start || 0) - currentTime;
- if (startJump > 0 && startJump <= MAX_START_GAP_JUMP) {
+
+ // When joining a live stream with audio tracks, account for live playlist window sliding by allowing
+ // a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
+ // that begins over 1 target duration after the video start position.
+ const level = this.hls.levels ? this.hls.levels[this.hls.currentLevel] : null;
+ const isLive = level?.details?.live;
+ const maxStartGapJump = isLive ? level.details.targetduration * 2 : MAX_START_GAP_JUMP;
+ if (startJump > 0 && startJump <= maxStartGapJump) {
this._trySkipBufferHole(null);
return;
}
diff --git a/src/controller/stream-controller.js b/src/controller/stream-controller.js
index d0869e372d0..3c0b26fb552 100644
--- a/src/controller/stream-controller.js
+++ b/src/controller/stream-controller.js
@@ -99,9 +99,9 @@ class StreamController extends BaseStreamController {
this._doTickIdle();
break;
case State.WAITING_LEVEL:
- var level = this.levels[this.level];
- // check if playlist is already loaded
- if (level && level.details) {
+ var details = this.levels[this.level]?.details;
+ // check if playlist is already loaded (must be current level for live)
+ if (details && (!details.live || this.levelLastLoaded === this.level)) {
this.state = State.IDLE;
}