6843 lines
909 KiB
JavaScript
6843 lines
909 KiB
JavaScript
/*
|
||
* XmlDigitalTeaching v0.0.1
|
||
* Copyright ©Mon Jul 08 2024 20:52:49 GMT+0800 (中国标准时间) smile
|
||
* Released under the ISC License.
|
||
*/
|
||
import vue from 'vue';
|
||
|
||
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
|
||
|
||
function unwrapExports (x) {
|
||
return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
|
||
}
|
||
|
||
function createCommonjsModule(fn, module) {
|
||
return module = { exports: {} }, fn(module, module.exports), module.exports;
|
||
}
|
||
|
||
function getCjsExportFromNamespace (n) {
|
||
return n && n['default'] || n;
|
||
}
|
||
|
||
function getDefaultExportFromCjs(x){return x&&x.__esModule&&Object.prototype.hasOwnProperty.call(x,'default')?x['default']:x;}var urlToolkit={exports:{}};(function(module,exports){// see https://tools.ietf.org/html/rfc1808
|
||
(function(root){var URL_REGEX=/^(?=((?:[a-zA-Z0-9+\-.]+:)?))\1(?=((?:\/\/[^\/?#]*)?))\2(?=((?:(?:[^?#\/]*\/)*[^;?#\/]*)?))\3((?:;[^?#]*)?)(\?[^#]*)?(#[^]*)?$/;var FIRST_SEGMENT_REGEX=/^(?=([^\/?#]*))\1([^]*)$/;var SLASH_DOT_REGEX=/(?:\/|^)\.(?=\/)/g;var SLASH_DOT_DOT_REGEX=/(?:\/|^)\.\.\/(?!\.\.\/)[^\/]*(?=\/)/g;var URLToolkit={// If opts.alwaysNormalize is true then the path will always be normalized even when it starts with / or //
|
||
// E.g
|
||
// With opts.alwaysNormalize = false (default, spec compliant)
|
||
// http://a.com/b/cd + /e/f/../g => http://a.com/e/f/../g
|
||
// With opts.alwaysNormalize = true (not spec compliant)
|
||
// http://a.com/b/cd + /e/f/../g => http://a.com/e/g
|
||
buildAbsoluteURL:function(baseURL,relativeURL,opts){opts=opts||{};// remove any remaining space and CRLF
|
||
baseURL=baseURL.trim();relativeURL=relativeURL.trim();if(!relativeURL){// 2a) If the embedded URL is entirely empty, it inherits the
|
||
// entire base URL (i.e., is set equal to the base URL)
|
||
// and we are done.
|
||
if(!opts.alwaysNormalize){return baseURL;}var basePartsForNormalise=URLToolkit.parseURL(baseURL);if(!basePartsForNormalise){throw new Error('Error trying to parse base URL.');}basePartsForNormalise.path=URLToolkit.normalizePath(basePartsForNormalise.path);return URLToolkit.buildURLFromParts(basePartsForNormalise);}var relativeParts=URLToolkit.parseURL(relativeURL);if(!relativeParts){throw new Error('Error trying to parse relative URL.');}if(relativeParts.scheme){// 2b) If the embedded URL starts with a scheme name, it is
|
||
// interpreted as an absolute URL and we are done.
|
||
if(!opts.alwaysNormalize){return relativeURL;}relativeParts.path=URLToolkit.normalizePath(relativeParts.path);return URLToolkit.buildURLFromParts(relativeParts);}var baseParts=URLToolkit.parseURL(baseURL);if(!baseParts){throw new Error('Error trying to parse base URL.');}if(!baseParts.netLoc&&baseParts.path&&baseParts.path[0]!=='/'){// If netLoc missing and path doesn't start with '/', assume everthing before the first '/' is the netLoc
|
||
// This causes 'example.com/a' to be handled as '//example.com/a' instead of '/example.com/a'
|
||
var pathParts=FIRST_SEGMENT_REGEX.exec(baseParts.path);baseParts.netLoc=pathParts[1];baseParts.path=pathParts[2];}if(baseParts.netLoc&&!baseParts.path){baseParts.path='/';}var builtParts={// 2c) Otherwise, the embedded URL inherits the scheme of
|
||
// the base URL.
|
||
scheme:baseParts.scheme,netLoc:relativeParts.netLoc,path:null,params:relativeParts.params,query:relativeParts.query,fragment:relativeParts.fragment};if(!relativeParts.netLoc){// 3) If the embedded URL's <net_loc> is non-empty, we skip to
|
||
// Step 7. Otherwise, the embedded URL inherits the <net_loc>
|
||
// (if any) of the base URL.
|
||
builtParts.netLoc=baseParts.netLoc;// 4) If the embedded URL path is preceded by a slash "/", the
|
||
// path is not relative and we skip to Step 7.
|
||
if(relativeParts.path[0]!=='/'){if(!relativeParts.path){// 5) If the embedded URL path is empty (and not preceded by a
|
||
// slash), then the embedded URL inherits the base URL path
|
||
builtParts.path=baseParts.path;// 5a) if the embedded URL's <params> is non-empty, we skip to
|
||
// step 7; otherwise, it inherits the <params> of the base
|
||
// URL (if any) and
|
||
if(!relativeParts.params){builtParts.params=baseParts.params;// 5b) if the embedded URL's <query> is non-empty, we skip to
|
||
// step 7; otherwise, it inherits the <query> of the base
|
||
// URL (if any) and we skip to step 7.
|
||
if(!relativeParts.query){builtParts.query=baseParts.query;}}}else {// 6) The last segment of the base URL's path (anything
|
||
// following the rightmost slash "/", or the entire path if no
|
||
// slash is present) is removed and the embedded URL's path is
|
||
// appended in its place.
|
||
var baseURLPath=baseParts.path;var newPath=baseURLPath.substring(0,baseURLPath.lastIndexOf('/')+1)+relativeParts.path;builtParts.path=URLToolkit.normalizePath(newPath);}}}if(builtParts.path===null){builtParts.path=opts.alwaysNormalize?URLToolkit.normalizePath(relativeParts.path):relativeParts.path;}return URLToolkit.buildURLFromParts(builtParts);},parseURL:function(url){var parts=URL_REGEX.exec(url);if(!parts){return null;}return {scheme:parts[1]||'',netLoc:parts[2]||'',path:parts[3]||'',params:parts[4]||'',query:parts[5]||'',fragment:parts[6]||''};},normalizePath:function(path){// The following operations are
|
||
// then applied, in order, to the new path:
|
||
// 6a) All occurrences of "./", where "." is a complete path
|
||
// segment, are removed.
|
||
// 6b) If the path ends with "." as a complete path segment,
|
||
// that "." is removed.
|
||
path=path.split('').reverse().join('').replace(SLASH_DOT_REGEX,'');// 6c) All occurrences of "<segment>/../", where <segment> is a
|
||
// complete path segment not equal to "..", are removed.
|
||
// Removal of these path segments is performed iteratively,
|
||
// removing the leftmost matching pattern on each iteration,
|
||
// until no matching pattern remains.
|
||
// 6d) If the path ends with "<segment>/..", where <segment> is a
|
||
// complete path segment not equal to "..", that
|
||
// "<segment>/.." is removed.
|
||
while(path.length!==(path=path.replace(SLASH_DOT_DOT_REGEX,'')).length){}return path.split('').reverse().join('');},buildURLFromParts:function(parts){return parts.scheme+parts.netLoc+parts.path+parts.params+parts.query+parts.fragment;}};module.exports=URLToolkit;})();})(urlToolkit);var urlToolkitExports=urlToolkit.exports;function ownKeys(e,r){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);r&&(o=o.filter(function(r){return Object.getOwnPropertyDescriptor(e,r).enumerable;})),t.push.apply(t,o);}return t;}function _objectSpread2(e){for(var r=1;r<arguments.length;r++){var t=null!=arguments[r]?arguments[r]:{};r%2?ownKeys(Object(t),!0).forEach(function(r){_defineProperty(e,r,t[r]);}):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):ownKeys(Object(t)).forEach(function(r){Object.defineProperty(e,r,Object.getOwnPropertyDescriptor(t,r));});}return e;}function _toPrimitive(t,r){if("object"!=typeof t||!t)return t;var e=t[Symbol.toPrimitive];if(void 0!==e){var i=e.call(t,r||"default");if("object"!=typeof i)return i;throw new TypeError("@@toPrimitive must return a primitive value.");}return ("string"===r?String:Number)(t);}function _toPropertyKey(t){var i=_toPrimitive(t,"string");return "symbol"==typeof i?i:String(i);}function _defineProperty(obj,key,value){key=_toPropertyKey(key);if(key in obj){Object.defineProperty(obj,key,{value:value,enumerable:true,configurable:true,writable:true});}else {obj[key]=value;}return obj;}function _extends(){_extends=Object.assign?Object.assign.bind():function(target){for(var i=1;i<arguments.length;i++){var source=arguments[i];for(var key in source){if(Object.prototype.hasOwnProperty.call(source,key)){target[key]=source[key];}}}return target;};return _extends.apply(this,arguments);}// https://caniuse.com/mdn-javascript_builtins_number_isfinite
|
||
const isFiniteNumber=Number.isFinite||function(value){return typeof value==='number'&&isFinite(value);};// https://caniuse.com/mdn-javascript_builtins_number_issafeinteger
|
||
const isSafeInteger=Number.isSafeInteger||function(value){return typeof value==='number'&&Math.abs(value)<=MAX_SAFE_INTEGER;};const MAX_SAFE_INTEGER=Number.MAX_SAFE_INTEGER||9007199254740991;let Events=/*#__PURE__*/function(Events){Events["MEDIA_ATTACHING"]="hlsMediaAttaching";Events["MEDIA_ATTACHED"]="hlsMediaAttached";Events["MEDIA_DETACHING"]="hlsMediaDetaching";Events["MEDIA_DETACHED"]="hlsMediaDetached";Events["BUFFER_RESET"]="hlsBufferReset";Events["BUFFER_CODECS"]="hlsBufferCodecs";Events["BUFFER_CREATED"]="hlsBufferCreated";Events["BUFFER_APPENDING"]="hlsBufferAppending";Events["BUFFER_APPENDED"]="hlsBufferAppended";Events["BUFFER_EOS"]="hlsBufferEos";Events["BUFFER_FLUSHING"]="hlsBufferFlushing";Events["BUFFER_FLUSHED"]="hlsBufferFlushed";Events["MANIFEST_LOADING"]="hlsManifestLoading";Events["MANIFEST_LOADED"]="hlsManifestLoaded";Events["MANIFEST_PARSED"]="hlsManifestParsed";Events["LEVEL_SWITCHING"]="hlsLevelSwitching";Events["LEVEL_SWITCHED"]="hlsLevelSwitched";Events["LEVEL_LOADING"]="hlsLevelLoading";Events["LEVEL_LOADED"]="hlsLevelLoaded";Events["LEVEL_UPDATED"]="hlsLevelUpdated";Events["LEVEL_PTS_UPDATED"]="hlsLevelPtsUpdated";Events["LEVELS_UPDATED"]="hlsLevelsUpdated";Events["AUDIO_TRACKS_UPDATED"]="hlsAudioTracksUpdated";Events["AUDIO_TRACK_SWITCHING"]="hlsAudioTrackSwitching";Events["AUDIO_TRACK_SWITCHED"]="hlsAudioTrackSwitched";Events["AUDIO_TRACK_LOADING"]="hlsAudioTrackLoading";Events["AUDIO_TRACK_LOADED"]="hlsAudioTrackLoaded";Events["SUBTITLE_TRACKS_UPDATED"]="hlsSubtitleTracksUpdated";Events["SUBTITLE_TRACKS_CLEARED"]="hlsSubtitleTracksCleared";Events["SUBTITLE_TRACK_SWITCH"]="hlsSubtitleTrackSwitch";Events["SUBTITLE_TRACK_LOADING"]="hlsSubtitleTrackLoading";Events["SUBTITLE_TRACK_LOADED"]="hlsSubtitleTrackLoaded";Events["SUBTITLE_FRAG_PROCESSED"]="hlsSubtitleFragProcessed";Events["CUES_PARSED"]="hlsCuesParsed";Events["NON_NATIVE_TEXT_TRACKS_FOUND"]="hlsNonNativeTextTracksFound";Events["INIT_PTS_FOUND"]="hlsInitPtsFound";Events["FRAG_LOADING"]="hlsFragLoading";Events["FRAG_LOAD_EMERGENCY_ABORTED"]="hlsFragLoadEmergencyAborted";Events["FRAG_LOADED"]="hlsFragLoaded";Events["FRAG_DECRYPTED"]="hlsFragDecrypted";Events["FRAG_PARSING_INIT_SEGMENT"]="hlsFragParsingInitSegment";Events["FRAG_PARSING_USERDATA"]="hlsFragParsingUserdata";Events["FRAG_PARSING_METADATA"]="hlsFragParsingMetadata";Events["FRAG_PARSED"]="hlsFragParsed";Events["FRAG_BUFFERED"]="hlsFragBuffered";Events["FRAG_CHANGED"]="hlsFragChanged";Events["FPS_DROP"]="hlsFpsDrop";Events["FPS_DROP_LEVEL_CAPPING"]="hlsFpsDropLevelCapping";Events["MAX_AUTO_LEVEL_UPDATED"]="hlsMaxAutoLevelUpdated";Events["ERROR"]="hlsError";Events["DESTROYING"]="hlsDestroying";Events["KEY_LOADING"]="hlsKeyLoading";Events["KEY_LOADED"]="hlsKeyLoaded";Events["LIVE_BACK_BUFFER_REACHED"]="hlsLiveBackBufferReached";Events["BACK_BUFFER_REACHED"]="hlsBackBufferReached";Events["STEERING_MANIFEST_LOADED"]="hlsSteeringManifestLoaded";return Events;}({});/**
|
||
* Defines each Event type and payload by Event name. Used in {@link hls.js#HlsEventEmitter} to strongly type the event listener API.
|
||
*/let ErrorTypes=/*#__PURE__*/function(ErrorTypes){ErrorTypes["NETWORK_ERROR"]="networkError";ErrorTypes["MEDIA_ERROR"]="mediaError";ErrorTypes["KEY_SYSTEM_ERROR"]="keySystemError";ErrorTypes["MUX_ERROR"]="muxError";ErrorTypes["OTHER_ERROR"]="otherError";return ErrorTypes;}({});let ErrorDetails=/*#__PURE__*/function(ErrorDetails){ErrorDetails["KEY_SYSTEM_NO_KEYS"]="keySystemNoKeys";ErrorDetails["KEY_SYSTEM_NO_ACCESS"]="keySystemNoAccess";ErrorDetails["KEY_SYSTEM_NO_SESSION"]="keySystemNoSession";ErrorDetails["KEY_SYSTEM_NO_CONFIGURED_LICENSE"]="keySystemNoConfiguredLicense";ErrorDetails["KEY_SYSTEM_LICENSE_REQUEST_FAILED"]="keySystemLicenseRequestFailed";ErrorDetails["KEY_SYSTEM_SERVER_CERTIFICATE_REQUEST_FAILED"]="keySystemServerCertificateRequestFailed";ErrorDetails["KEY_SYSTEM_SERVER_CERTIFICATE_UPDATE_FAILED"]="keySystemServerCertificateUpdateFailed";ErrorDetails["KEY_SYSTEM_SESSION_UPDATE_FAILED"]="keySystemSessionUpdateFailed";ErrorDetails["KEY_SYSTEM_STATUS_OUTPUT_RESTRICTED"]="keySystemStatusOutputRestricted";ErrorDetails["KEY_SYSTEM_STATUS_INTERNAL_ERROR"]="keySystemStatusInternalError";ErrorDetails["MANIFEST_LOAD_ERROR"]="manifestLoadError";ErrorDetails["MANIFEST_LOAD_TIMEOUT"]="manifestLoadTimeOut";ErrorDetails["MANIFEST_PARSING_ERROR"]="manifestParsingError";ErrorDetails["MANIFEST_INCOMPATIBLE_CODECS_ERROR"]="manifestIncompatibleCodecsError";ErrorDetails["LEVEL_EMPTY_ERROR"]="levelEmptyError";ErrorDetails["LEVEL_LOAD_ERROR"]="levelLoadError";ErrorDetails["LEVEL_LOAD_TIMEOUT"]="levelLoadTimeOut";ErrorDetails["LEVEL_PARSING_ERROR"]="levelParsingError";ErrorDetails["LEVEL_SWITCH_ERROR"]="levelSwitchError";ErrorDetails["AUDIO_TRACK_LOAD_ERROR"]="audioTrackLoadError";ErrorDetails["AUDIO_TRACK_LOAD_TIMEOUT"]="audioTrackLoadTimeOut";ErrorDetails["SUBTITLE_LOAD_ERROR"]="subtitleTrackLoadError";ErrorDetails["SUBTITLE_TRACK_LOAD_TIMEOUT"]="subtitleTrackLoadTimeOut";ErrorDetails["FRAG_LOAD_ERROR"]="fragLoadError";ErrorDetails["FRAG_LOAD_TIMEOUT"]="fragLoadTimeOut";ErrorDetails["FRAG_DECRYPT_ERROR"]="fragDecryptError";ErrorDetails["FRAG_PARSING_ERROR"]="fragParsingError";ErrorDetails["FRAG_GAP"]="fragGap";ErrorDetails["REMUX_ALLOC_ERROR"]="remuxAllocError";ErrorDetails["KEY_LOAD_ERROR"]="keyLoadError";ErrorDetails["KEY_LOAD_TIMEOUT"]="keyLoadTimeOut";ErrorDetails["BUFFER_ADD_CODEC_ERROR"]="bufferAddCodecError";ErrorDetails["BUFFER_INCOMPATIBLE_CODECS_ERROR"]="bufferIncompatibleCodecsError";ErrorDetails["BUFFER_APPEND_ERROR"]="bufferAppendError";ErrorDetails["BUFFER_APPENDING_ERROR"]="bufferAppendingError";ErrorDetails["BUFFER_STALLED_ERROR"]="bufferStalledError";ErrorDetails["BUFFER_FULL_ERROR"]="bufferFullError";ErrorDetails["BUFFER_SEEK_OVER_HOLE"]="bufferSeekOverHole";ErrorDetails["BUFFER_NUDGE_ON_STALL"]="bufferNudgeOnStall";ErrorDetails["INTERNAL_EXCEPTION"]="internalException";ErrorDetails["INTERNAL_ABORTED"]="aborted";ErrorDetails["UNKNOWN"]="unknown";return ErrorDetails;}({});const noop=function noop(){};const fakeLogger={trace:noop,debug:noop,log:noop,warn:noop,info:noop,error:noop};let exportedLogger=fakeLogger;// let lastCallTime;
|
||
// function formatMsgWithTimeInfo(type, msg) {
|
||
// const now = Date.now();
|
||
// const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
|
||
// lastCallTime = now;
|
||
// msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
|
||
// return msg;
|
||
// }
|
||
function consolePrintFn(type){const func=self.console[type];if(func){return func.bind(self.console,`[${type}] >`);}return noop;}function exportLoggerFunctions(debugConfig,...functions){functions.forEach(function(type){exportedLogger[type]=debugConfig[type]?debugConfig[type].bind(debugConfig):consolePrintFn(type);});}function enableLogs(debugConfig,id){// check that console is available
|
||
if(typeof console==='object'&&debugConfig===true||typeof debugConfig==='object'){exportLoggerFunctions(debugConfig,// Remove out from list here to hard-disable a log-level
|
||
// 'trace',
|
||
'debug','log','info','warn','error');// Some browsers don't allow to use bind on console object anyway
|
||
// fallback to default if needed
|
||
try{exportedLogger.log(`Debug logs enabled for "${id}" in hls.js version ${"1.5.13"}`);}catch(e){exportedLogger=fakeLogger;}}else {exportedLogger=fakeLogger;}}const logger=exportedLogger;const DECIMAL_RESOLUTION_REGEX=/^(\d+)x(\d+)$/;const ATTR_LIST_REGEX=/(.+?)=(".*?"|.*?)(?:,|$)/g;// adapted from https://github.com/kanongil/node-m3u8parse/blob/master/attrlist.js
|
||
class AttrList{constructor(attrs){if(typeof attrs==='string'){attrs=AttrList.parseAttrList(attrs);}_extends(this,attrs);}get clientAttrs(){return Object.keys(this).filter(attr=>attr.substring(0,2)==='X-');}decimalInteger(attrName){const intValue=parseInt(this[attrName],10);if(intValue>Number.MAX_SAFE_INTEGER){return Infinity;}return intValue;}hexadecimalInteger(attrName){if(this[attrName]){let stringValue=(this[attrName]||'0x').slice(2);stringValue=(stringValue.length&1?'0':'')+stringValue;const value=new Uint8Array(stringValue.length/2);for(let i=0;i<stringValue.length/2;i++){value[i]=parseInt(stringValue.slice(i*2,i*2+2),16);}return value;}else {return null;}}hexadecimalIntegerAsNumber(attrName){const intValue=parseInt(this[attrName],16);if(intValue>Number.MAX_SAFE_INTEGER){return Infinity;}return intValue;}decimalFloatingPoint(attrName){return parseFloat(this[attrName]);}optionalFloat(attrName,defaultValue){const value=this[attrName];return value?parseFloat(value):defaultValue;}enumeratedString(attrName){return this[attrName];}bool(attrName){return this[attrName]==='YES';}decimalResolution(attrName){const res=DECIMAL_RESOLUTION_REGEX.exec(this[attrName]);if(res===null){return undefined;}return {width:parseInt(res[1],10),height:parseInt(res[2],10)};}static parseAttrList(input){let match;const attrs={};const quote='"';ATTR_LIST_REGEX.lastIndex=0;while((match=ATTR_LIST_REGEX.exec(input))!==null){let value=match[2];if(value.indexOf(quote)===0&&value.lastIndexOf(quote)===value.length-1){value=value.slice(1,-1);}const name=match[1].trim();attrs[name]=value;}return attrs;}}// Avoid exporting const enum so that these values can be inlined
|
||
function isDateRangeCueAttribute(attrName){return attrName!=="ID"&&attrName!=="CLASS"&&attrName!=="START-DATE"&&attrName!=="DURATION"&&attrName!=="END-DATE"&&attrName!=="END-ON-NEXT";}function isSCTE35Attribute(attrName){return attrName==="SCTE35-OUT"||attrName==="SCTE35-IN";}class DateRange{constructor(dateRangeAttr,dateRangeWithSameId){this.attr=void 0;this._startDate=void 0;this._endDate=void 0;this._badValueForSameId=void 0;if(dateRangeWithSameId){const previousAttr=dateRangeWithSameId.attr;for(const key in previousAttr){if(Object.prototype.hasOwnProperty.call(dateRangeAttr,key)&&dateRangeAttr[key]!==previousAttr[key]){logger.warn(`DATERANGE tag attribute: "${key}" does not match for tags with ID: "${dateRangeAttr.ID}"`);this._badValueForSameId=key;break;}}// Merge DateRange tags with the same ID
|
||
dateRangeAttr=_extends(new AttrList({}),previousAttr,dateRangeAttr);}this.attr=dateRangeAttr;this._startDate=new Date(dateRangeAttr["START-DATE"]);if("END-DATE"in this.attr){const endDate=new Date(this.attr["END-DATE"]);if(isFiniteNumber(endDate.getTime())){this._endDate=endDate;}}}get id(){return this.attr.ID;}get class(){return this.attr.CLASS;}get startDate(){return this._startDate;}get endDate(){if(this._endDate){return this._endDate;}const duration=this.duration;if(duration!==null){return new Date(this._startDate.getTime()+duration*1000);}return null;}get duration(){if("DURATION"in this.attr){const duration=this.attr.decimalFloatingPoint("DURATION");if(isFiniteNumber(duration)){return duration;}}else if(this._endDate){return (this._endDate.getTime()-this._startDate.getTime())/1000;}return null;}get plannedDuration(){if("PLANNED-DURATION"in this.attr){return this.attr.decimalFloatingPoint("PLANNED-DURATION");}return null;}get endOnNext(){return this.attr.bool("END-ON-NEXT");}get isValid(){return !!this.id&&!this._badValueForSameId&&isFiniteNumber(this.startDate.getTime())&&(this.duration===null||this.duration>=0)&&(!this.endOnNext||!!this.class);}}class LoadStats{constructor(){this.aborted=false;this.loaded=0;this.retry=0;this.total=0;this.chunkCount=0;this.bwEstimate=0;this.loading={start:0,first:0,end:0};this.parsing={start:0,end:0};this.buffering={start:0,first:0,end:0};}}var ElementaryStreamTypes={AUDIO:"audio",VIDEO:"video",AUDIOVIDEO:"audiovideo"};class BaseSegment{constructor(baseurl){this._byteRange=null;this._url=null;// baseurl is the URL to the playlist
|
||
this.baseurl=void 0;// relurl is the portion of the URL that comes from inside the playlist.
|
||
this.relurl=void 0;// Holds the types of data this fragment supports
|
||
this.elementaryStreams={[ElementaryStreamTypes.AUDIO]:null,[ElementaryStreamTypes.VIDEO]:null,[ElementaryStreamTypes.AUDIOVIDEO]:null};this.baseurl=baseurl;}// setByteRange converts a EXT-X-BYTERANGE attribute into a two element array
|
||
setByteRange(value,previous){const params=value.split('@',2);let start;if(params.length===1){start=(previous==null?void 0:previous.byteRangeEndOffset)||0;}else {start=parseInt(params[1]);}this._byteRange=[start,parseInt(params[0])+start];}get byteRange(){if(!this._byteRange){return [];}return this._byteRange;}get byteRangeStartOffset(){return this.byteRange[0];}get byteRangeEndOffset(){return this.byteRange[1];}get url(){if(!this._url&&this.baseurl&&this.relurl){this._url=urlToolkitExports.buildAbsoluteURL(this.baseurl,this.relurl,{alwaysNormalize:true});}return this._url||'';}set url(value){this._url=value;}}/**
|
||
* Object representing parsed data from an HLS Segment. Found in {@link hls.js#LevelDetails.fragments}.
|
||
*/class Fragment extends BaseSegment{constructor(type,baseurl){super(baseurl);this._decryptdata=null;this.rawProgramDateTime=null;this.programDateTime=null;this.tagList=[];// EXTINF has to be present for a m3u8 to be considered valid
|
||
this.duration=0;// sn notates the sequence number for a segment, and if set to a string can be 'initSegment'
|
||
this.sn=0;// levelkeys are the EXT-X-KEY tags that apply to this segment for decryption
|
||
// core difference from the private field _decryptdata is the lack of the initialized IV
|
||
// _decryptdata will set the IV for this segment based on the segment number in the fragment
|
||
this.levelkeys=void 0;// A string representing the fragment type
|
||
this.type=void 0;// A reference to the loader. Set while the fragment is loading, and removed afterwards. Used to abort fragment loading
|
||
this.loader=null;// A reference to the key loader. Set while the key is loading, and removed afterwards. Used to abort key loading
|
||
this.keyLoader=null;// The level/track index to which the fragment belongs
|
||
this.level=-1;// The continuity counter of the fragment
|
||
this.cc=0;// The starting Presentation Time Stamp (PTS) of the fragment. Set after transmux complete.
|
||
this.startPTS=void 0;// The ending Presentation Time Stamp (PTS) of the fragment. Set after transmux complete.
|
||
this.endPTS=void 0;// The starting Decode Time Stamp (DTS) of the fragment. Set after transmux complete.
|
||
this.startDTS=void 0;// The ending Decode Time Stamp (DTS) of the fragment. Set after transmux complete.
|
||
this.endDTS=void 0;// The start time of the fragment, as listed in the manifest. Updated after transmux complete.
|
||
this.start=0;// Set by `updateFragPTSDTS` in level-helper
|
||
this.deltaPTS=void 0;// The maximum starting Presentation Time Stamp (audio/video PTS) of the fragment. Set after transmux complete.
|
||
this.maxStartPTS=void 0;// The minimum ending Presentation Time Stamp (audio/video PTS) of the fragment. Set after transmux complete.
|
||
this.minEndPTS=void 0;// Load/parse timing information
|
||
this.stats=new LoadStats();// Init Segment bytes (unset for media segments)
|
||
this.data=void 0;// A flag indicating whether the segment was downloaded in order to test bitrate, and was not buffered
|
||
this.bitrateTest=false;// #EXTINF segment title
|
||
this.title=null;// The Media Initialization Section for this segment
|
||
this.initSegment=null;// Fragment is the last fragment in the media playlist
|
||
this.endList=void 0;// Fragment is marked by an EXT-X-GAP tag indicating that it does not contain media data and should not be loaded
|
||
this.gap=void 0;// Deprecated
|
||
this.urlId=0;this.type=type;}get decryptdata(){const{levelkeys}=this;if(!levelkeys&&!this._decryptdata){return null;}if(!this._decryptdata&&this.levelkeys&&!this.levelkeys.NONE){const key=this.levelkeys.identity;if(key){this._decryptdata=key.getDecryptData(this.sn);}else {const keyFormats=Object.keys(this.levelkeys);if(keyFormats.length===1){return this._decryptdata=this.levelkeys[keyFormats[0]].getDecryptData(this.sn);}}}return this._decryptdata;}get end(){return this.start+this.duration;}get endProgramDateTime(){if(this.programDateTime===null){return null;}if(!isFiniteNumber(this.programDateTime)){return null;}const duration=!isFiniteNumber(this.duration)?0:this.duration;return this.programDateTime+duration*1000;}get encrypted(){var _this$_decryptdata;// At the m3u8-parser level we need to add support for manifest signalled keyformats
|
||
// when we want the fragment to start reporting that it is encrypted.
|
||
// Currently, keyFormat will only be set for identity keys
|
||
if((_this$_decryptdata=this._decryptdata)!=null&&_this$_decryptdata.encrypted){return true;}else if(this.levelkeys){const keyFormats=Object.keys(this.levelkeys);const len=keyFormats.length;if(len>1||len===1&&this.levelkeys[keyFormats[0]].encrypted){return true;}}return false;}setKeyFormat(keyFormat){if(this.levelkeys){const key=this.levelkeys[keyFormat];if(key&&!this._decryptdata){this._decryptdata=key.getDecryptData(this.sn);}}}abortRequests(){var _this$loader,_this$keyLoader;(_this$loader=this.loader)==null?void 0:_this$loader.abort();(_this$keyLoader=this.keyLoader)==null?void 0:_this$keyLoader.abort();}setElementaryStreamInfo(type,startPTS,endPTS,startDTS,endDTS,partial=false){const{elementaryStreams}=this;const info=elementaryStreams[type];if(!info){elementaryStreams[type]={startPTS,endPTS,startDTS,endDTS,partial};return;}info.startPTS=Math.min(info.startPTS,startPTS);info.endPTS=Math.max(info.endPTS,endPTS);info.startDTS=Math.min(info.startDTS,startDTS);info.endDTS=Math.max(info.endDTS,endDTS);}clearElementaryStreamInfo(){const{elementaryStreams}=this;elementaryStreams[ElementaryStreamTypes.AUDIO]=null;elementaryStreams[ElementaryStreamTypes.VIDEO]=null;elementaryStreams[ElementaryStreamTypes.AUDIOVIDEO]=null;}}/**
|
||
* Object representing parsed data from an HLS Partial Segment. Found in {@link hls.js#LevelDetails.partList}.
|
||
*/class Part extends BaseSegment{constructor(partAttrs,frag,baseurl,index,previous){super(baseurl);this.fragOffset=0;this.duration=0;this.gap=false;this.independent=false;this.relurl=void 0;this.fragment=void 0;this.index=void 0;this.stats=new LoadStats();this.duration=partAttrs.decimalFloatingPoint('DURATION');this.gap=partAttrs.bool('GAP');this.independent=partAttrs.bool('INDEPENDENT');this.relurl=partAttrs.enumeratedString('URI');this.fragment=frag;this.index=index;const byteRange=partAttrs.enumeratedString('BYTERANGE');if(byteRange){this.setByteRange(byteRange,previous);}if(previous){this.fragOffset=previous.fragOffset+previous.duration;}}get start(){return this.fragment.start+this.fragOffset;}get end(){return this.start+this.duration;}get loaded(){const{elementaryStreams}=this;return !!(elementaryStreams.audio||elementaryStreams.video||elementaryStreams.audiovideo);}}const DEFAULT_TARGET_DURATION=10;/**
|
||
* Object representing parsed data from an HLS Media Playlist. Found in {@link hls.js#Level.details}.
|
||
*/class LevelDetails{constructor(baseUrl){this.PTSKnown=false;this.alignedSliding=false;this.averagetargetduration=void 0;this.endCC=0;this.endSN=0;this.fragments=void 0;this.fragmentHint=void 0;this.partList=null;this.dateRanges=void 0;this.live=true;this.ageHeader=0;this.advancedDateTime=void 0;this.updated=true;this.advanced=true;this.availabilityDelay=void 0;// Manifest reload synchronization
|
||
this.misses=0;this.startCC=0;this.startSN=0;this.startTimeOffset=null;this.targetduration=0;this.totalduration=0;this.type=null;this.url=void 0;this.m3u8='';this.version=null;this.canBlockReload=false;this.canSkipUntil=0;this.canSkipDateRanges=false;this.skippedSegments=0;this.recentlyRemovedDateranges=void 0;this.partHoldBack=0;this.holdBack=0;this.partTarget=0;this.preloadHint=void 0;this.renditionReports=void 0;this.tuneInGoal=0;this.deltaUpdateFailed=void 0;this.driftStartTime=0;this.driftEndTime=0;this.driftStart=0;this.driftEnd=0;this.encryptedFragments=void 0;this.playlistParsingError=null;this.variableList=null;this.hasVariableRefs=false;this.fragments=[];this.encryptedFragments=[];this.dateRanges={};this.url=baseUrl;}reloaded(previous){if(!previous){this.advanced=true;this.updated=true;return;}const partSnDiff=this.lastPartSn-previous.lastPartSn;const partIndexDiff=this.lastPartIndex-previous.lastPartIndex;this.updated=this.endSN!==previous.endSN||!!partIndexDiff||!!partSnDiff||!this.live;this.advanced=this.endSN>previous.endSN||partSnDiff>0||partSnDiff===0&&partIndexDiff>0;if(this.updated||this.advanced){this.misses=Math.floor(previous.misses*0.6);}else {this.misses=previous.misses+1;}this.availabilityDelay=previous.availabilityDelay;}get hasProgramDateTime(){if(this.fragments.length){return isFiniteNumber(this.fragments[this.fragments.length-1].programDateTime);}return false;}get levelTargetDuration(){return this.averagetargetduration||this.targetduration||DEFAULT_TARGET_DURATION;}get drift(){const runTime=this.driftEndTime-this.driftStartTime;if(runTime>0){const runDuration=this.driftEnd-this.driftStart;return runDuration*1000/runTime;}return 1;}get edge(){return this.partEnd||this.fragmentEnd;}get partEnd(){var _this$partList;if((_this$partList=this.partList)!=null&&_this$partList.length){return this.partList[this.partList.length-1].end;}return this.fragmentEnd;}get fragmentEnd(){var _this$fragments;if((_this$fragments=this.fragments)!=null&&_this$fragments.length){return this.fragments[this.fragments.length-1].end;}return 0;}get age(){if(this.advancedDateTime){return Math.max(Date.now()-this.advancedDateTime,0)/1000;}return 0;}get lastPartIndex(){var _this$partList2;if((_this$partList2=this.partList)!=null&&_this$partList2.length){return this.partList[this.partList.length-1].index;}return -1;}get lastPartSn(){var _this$partList3;if((_this$partList3=this.partList)!=null&&_this$partList3.length){return this.partList[this.partList.length-1].fragment.sn;}return this.endSN;}}function base64Decode(base64encodedStr){return Uint8Array.from(atob(base64encodedStr),c=>c.charCodeAt(0));}function getKeyIdBytes(str){const keyIdbytes=strToUtf8array(str).subarray(0,16);const paddedkeyIdbytes=new Uint8Array(16);paddedkeyIdbytes.set(keyIdbytes,16-keyIdbytes.length);return paddedkeyIdbytes;}function changeEndianness(keyId){const swap=function swap(array,from,to){const cur=array[from];array[from]=array[to];array[to]=cur;};swap(keyId,0,3);swap(keyId,1,2);swap(keyId,4,5);swap(keyId,6,7);}function convertDataUriToArrayBytes(uri){// data:[<media type][;attribute=value][;base64],<data>
|
||
const colonsplit=uri.split(':');let keydata=null;if(colonsplit[0]==='data'&&colonsplit.length===2){const semicolonsplit=colonsplit[1].split(';');const commasplit=semicolonsplit[semicolonsplit.length-1].split(',');if(commasplit.length===2){const isbase64=commasplit[0]==='base64';const data=commasplit[1];if(isbase64){semicolonsplit.splice(-1,1);// remove from processing
|
||
keydata=base64Decode(data);}else {keydata=getKeyIdBytes(data);}}}return keydata;}function strToUtf8array(str){return Uint8Array.from(unescape(encodeURIComponent(str)),c=>c.charCodeAt(0));}/** returns `undefined` is `self` is missing, e.g. in node */const optionalSelf=typeof self!=='undefined'?self:undefined;/**
|
||
* @see https://developer.mozilla.org/en-US/docs/Web/API/Navigator/requestMediaKeySystemAccess
|
||
*/var KeySystems={CLEARKEY:"org.w3.clearkey",FAIRPLAY:"com.apple.fps",PLAYREADY:"com.microsoft.playready",WIDEVINE:"com.widevine.alpha"};// Playlist #EXT-X-KEY KEYFORMAT values
|
||
var KeySystemFormats={CLEARKEY:"org.w3.clearkey",FAIRPLAY:"com.apple.streamingkeydelivery",PLAYREADY:"com.microsoft.playready",WIDEVINE:"urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed"};function keySystemFormatToKeySystemDomain(format){switch(format){case KeySystemFormats.FAIRPLAY:return KeySystems.FAIRPLAY;case KeySystemFormats.PLAYREADY:return KeySystems.PLAYREADY;case KeySystemFormats.WIDEVINE:return KeySystems.WIDEVINE;case KeySystemFormats.CLEARKEY:return KeySystems.CLEARKEY;}}// System IDs for which we can extract a key ID from "encrypted" event PSSH
|
||
var KeySystemIds={WIDEVINE:"edef8ba979d64acea3c827dcd51d21ed"};function keySystemIdToKeySystemDomain(systemId){if(systemId===KeySystemIds.WIDEVINE){return KeySystems.WIDEVINE;// } else if (systemId === KeySystemIds.PLAYREADY) {
|
||
// return KeySystems.PLAYREADY;
|
||
// } else if (systemId === KeySystemIds.CENC || systemId === KeySystemIds.CLEARKEY) {
|
||
// return KeySystems.CLEARKEY;
|
||
}}function keySystemDomainToKeySystemFormat(keySystem){switch(keySystem){case KeySystems.FAIRPLAY:return KeySystemFormats.FAIRPLAY;case KeySystems.PLAYREADY:return KeySystemFormats.PLAYREADY;case KeySystems.WIDEVINE:return KeySystemFormats.WIDEVINE;case KeySystems.CLEARKEY:return KeySystemFormats.CLEARKEY;}}function getKeySystemsForConfig(config){const{drmSystems,widevineLicenseUrl}=config;const keySystemsToAttempt=drmSystems?[KeySystems.FAIRPLAY,KeySystems.WIDEVINE,KeySystems.PLAYREADY,KeySystems.CLEARKEY].filter(keySystem=>!!drmSystems[keySystem]):[];if(!keySystemsToAttempt[KeySystems.WIDEVINE]&&widevineLicenseUrl){keySystemsToAttempt.push(KeySystems.WIDEVINE);}return keySystemsToAttempt;}const requestMediaKeySystemAccess=function(_optionalSelf$navigat){if(optionalSelf!=null&&(_optionalSelf$navigat=optionalSelf.navigator)!=null&&_optionalSelf$navigat.requestMediaKeySystemAccess){return self.navigator.requestMediaKeySystemAccess.bind(self.navigator);}else {return null;}}();/**
|
||
* @see https://developer.mozilla.org/en-US/docs/Web/API/MediaKeySystemConfiguration
|
||
*/function getSupportedMediaKeySystemConfigurations(keySystem,audioCodecs,videoCodecs,drmSystemOptions){let initDataTypes;switch(keySystem){case KeySystems.FAIRPLAY:initDataTypes=['cenc','sinf'];break;case KeySystems.WIDEVINE:case KeySystems.PLAYREADY:initDataTypes=['cenc'];break;case KeySystems.CLEARKEY:initDataTypes=['cenc','keyids'];break;default:throw new Error(`Unknown key-system: ${keySystem}`);}return createMediaKeySystemConfigurations(initDataTypes,audioCodecs,videoCodecs,drmSystemOptions);}function createMediaKeySystemConfigurations(initDataTypes,audioCodecs,videoCodecs,drmSystemOptions){const baseConfig={initDataTypes:initDataTypes,persistentState:drmSystemOptions.persistentState||'optional',distinctiveIdentifier:drmSystemOptions.distinctiveIdentifier||'optional',sessionTypes:drmSystemOptions.sessionTypes||[drmSystemOptions.sessionType||'temporary'],audioCapabilities:audioCodecs.map(codec=>({contentType:`audio/mp4; codecs="${codec}"`,robustness:drmSystemOptions.audioRobustness||'',encryptionScheme:drmSystemOptions.audioEncryptionScheme||null})),videoCapabilities:videoCodecs.map(codec=>({contentType:`video/mp4; codecs="${codec}"`,robustness:drmSystemOptions.videoRobustness||'',encryptionScheme:drmSystemOptions.videoEncryptionScheme||null}))};return [baseConfig];}function sliceUint8(array,start,end){// @ts-expect-error This polyfills IE11 usage of Uint8Array slice.
|
||
// It always exists in the TypeScript definition so fails, but it fails at runtime on IE11.
|
||
return Uint8Array.prototype.slice?array.slice(start,end):new Uint8Array(Array.prototype.slice.call(array,start,end));}// breaking up those two types in order to clarify what is happening in the decoding path.
|
||
/**
|
||
* Returns true if an ID3 header can be found at offset in data
|
||
* @param data - The data to search
|
||
* @param offset - The offset at which to start searching
|
||
*/const isHeader$2=(data,offset)=>{/*
|
||
* http://id3.org/id3v2.3.0
|
||
* [0] = 'I'
|
||
* [1] = 'D'
|
||
* [2] = '3'
|
||
* [3,4] = {Version}
|
||
* [5] = {Flags}
|
||
* [6-9] = {ID3 Size}
|
||
*
|
||
* An ID3v2 tag can be detected with the following pattern:
|
||
* $49 44 33 yy yy xx zz zz zz zz
|
||
* Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80
|
||
*/if(offset+10<=data.length){// look for 'ID3' identifier
|
||
if(data[offset]===0x49&&data[offset+1]===0x44&&data[offset+2]===0x33){// check version is within range
|
||
if(data[offset+3]<0xff&&data[offset+4]<0xff){// check size is within range
|
||
if(data[offset+6]<0x80&&data[offset+7]<0x80&&data[offset+8]<0x80&&data[offset+9]<0x80){return true;}}}}return false;};/**
|
||
* Returns true if an ID3 footer can be found at offset in data
|
||
* @param data - The data to search
|
||
* @param offset - The offset at which to start searching
|
||
*/const isFooter=(data,offset)=>{/*
|
||
* The footer is a copy of the header, but with a different identifier
|
||
*/if(offset+10<=data.length){// look for '3DI' identifier
|
||
if(data[offset]===0x33&&data[offset+1]===0x44&&data[offset+2]===0x49){// check version is within range
|
||
if(data[offset+3]<0xff&&data[offset+4]<0xff){// check size is within range
|
||
if(data[offset+6]<0x80&&data[offset+7]<0x80&&data[offset+8]<0x80&&data[offset+9]<0x80){return true;}}}}return false;};/**
|
||
* Returns any adjacent ID3 tags found in data starting at offset, as one block of data
|
||
* @param data - The data to search in
|
||
* @param offset - The offset at which to start searching
|
||
* @returns the block of data containing any ID3 tags found
|
||
* or *undefined* if no header is found at the starting offset
|
||
*/const getID3Data=(data,offset)=>{const front=offset;let length=0;while(isHeader$2(data,offset)){// ID3 header is 10 bytes
|
||
length+=10;const size=readSize(data,offset+6);length+=size;if(isFooter(data,offset+10)){// ID3 footer is 10 bytes
|
||
length+=10;}offset+=length;}if(length>0){return data.subarray(front,front+length);}return undefined;};const readSize=(data,offset)=>{let size=0;size=(data[offset]&0x7f)<<21;size|=(data[offset+1]&0x7f)<<14;size|=(data[offset+2]&0x7f)<<7;size|=data[offset+3]&0x7f;return size;};const canParse$2=(data,offset)=>{return isHeader$2(data,offset)&&readSize(data,offset+6)+10<=data.length-offset;};/**
|
||
* Searches for the Elementary Stream timestamp found in the ID3 data chunk
|
||
* @param data - Block of data containing one or more ID3 tags
|
||
*/const getTimeStamp=data=>{const frames=getID3Frames(data);for(let i=0;i<frames.length;i++){const frame=frames[i];if(isTimeStampFrame(frame)){return readTimeStamp(frame);}}return undefined;};/**
|
||
* Returns true if the ID3 frame is an Elementary Stream timestamp frame
|
||
*/const isTimeStampFrame=frame=>{return frame&&frame.key==='PRIV'&&frame.info==='com.apple.streaming.transportStreamTimestamp';};const getFrameData=data=>{/*
|
||
Frame ID $xx xx xx xx (four characters)
|
||
Size $xx xx xx xx
|
||
Flags $xx xx
|
||
*/const type=String.fromCharCode(data[0],data[1],data[2],data[3]);const size=readSize(data,4);// skip frame id, size, and flags
|
||
const offset=10;return {type,size,data:data.subarray(offset,offset+size)};};/**
|
||
* Returns an array of ID3 frames found in all the ID3 tags in the id3Data
|
||
* @param id3Data - The ID3 data containing one or more ID3 tags
|
||
*/const getID3Frames=id3Data=>{let offset=0;const frames=[];while(isHeader$2(id3Data,offset)){const size=readSize(id3Data,offset+6);// skip past ID3 header
|
||
offset+=10;const end=offset+size;// loop through frames in the ID3 tag
|
||
while(offset+8<end){const frameData=getFrameData(id3Data.subarray(offset));const frame=decodeFrame(frameData);if(frame){frames.push(frame);}// skip frame header and frame data
|
||
offset+=frameData.size+10;}if(isFooter(id3Data,offset)){offset+=10;}}return frames;};const decodeFrame=frame=>{if(frame.type==='PRIV'){return decodePrivFrame(frame);}else if(frame.type[0]==='W'){return decodeURLFrame(frame);}return decodeTextFrame(frame);};const decodePrivFrame=frame=>{/*
|
||
Format: <text string>\0<binary data>
|
||
*/if(frame.size<2){return undefined;}const owner=utf8ArrayToStr(frame.data,true);const privateData=new Uint8Array(frame.data.subarray(owner.length+1));return {key:frame.type,info:owner,data:privateData.buffer};};const decodeTextFrame=frame=>{if(frame.size<2){return undefined;}if(frame.type==='TXXX'){/*
|
||
Format:
|
||
[0] = {Text Encoding}
|
||
[1-?] = {Description}\0{Value}
|
||
*/let index=1;const description=utf8ArrayToStr(frame.data.subarray(index),true);index+=description.length+1;const value=utf8ArrayToStr(frame.data.subarray(index));return {key:frame.type,info:description,data:value};}/*
|
||
Format:
|
||
[0] = {Text Encoding}
|
||
[1-?] = {Value}
|
||
*/const text=utf8ArrayToStr(frame.data.subarray(1));return {key:frame.type,data:text};};const decodeURLFrame=frame=>{if(frame.type==='WXXX'){/*
|
||
Format:
|
||
[0] = {Text Encoding}
|
||
[1-?] = {Description}\0{URL}
|
||
*/if(frame.size<2){return undefined;}let index=1;const description=utf8ArrayToStr(frame.data.subarray(index),true);index+=description.length+1;const value=utf8ArrayToStr(frame.data.subarray(index));return {key:frame.type,info:description,data:value};}/*
|
||
Format:
|
||
[0-?] = {URL}
|
||
*/const url=utf8ArrayToStr(frame.data);return {key:frame.type,data:url};};const readTimeStamp=timeStampFrame=>{if(timeStampFrame.data.byteLength===8){const data=new Uint8Array(timeStampFrame.data);// timestamp is 33 bit expressed as a big-endian eight-octet number,
|
||
// with the upper 31 bits set to zero.
|
||
const pts33Bit=data[3]&0x1;let timestamp=(data[4]<<23)+(data[5]<<15)+(data[6]<<7)+data[7];timestamp/=45;if(pts33Bit){timestamp+=47721858.84;}// 2^32 / 90
|
||
return Math.round(timestamp);}return undefined;};// http://stackoverflow.com/questions/8936984/uint8array-to-string-in-javascript/22373197
|
||
// http://www.onicos.com/staff/iz/amuse/javascript/expert/utf.txt
|
||
/* utf.js - UTF-8 <=> UTF-16 convertion
|
||
*
|
||
* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
|
||
* Version: 1.0
|
||
* LastModified: Dec 25 1999
|
||
* This library is free. You can redistribute it and/or modify it.
|
||
*/const utf8ArrayToStr=(array,exitOnNull=false)=>{const decoder=getTextDecoder();if(decoder){const decoded=decoder.decode(array);if(exitOnNull){// grab up to the first null
|
||
const idx=decoded.indexOf('\0');return idx!==-1?decoded.substring(0,idx):decoded;}// remove any null characters
|
||
return decoded.replace(/\0/g,'');}const len=array.length;let c;let char2;let char3;let out='';let i=0;while(i<len){c=array[i++];if(c===0x00&&exitOnNull){return out;}else if(c===0x00||c===0x03){// If the character is 3 (END_OF_TEXT) or 0 (NULL) then skip it
|
||
continue;}switch(c>>4){case 0:case 1:case 2:case 3:case 4:case 5:case 6:case 7:// 0xxxxxxx
|
||
out+=String.fromCharCode(c);break;case 12:case 13:// 110x xxxx 10xx xxxx
|
||
char2=array[i++];out+=String.fromCharCode((c&0x1f)<<6|char2&0x3f);break;case 14:// 1110 xxxx 10xx xxxx 10xx xxxx
|
||
char2=array[i++];char3=array[i++];out+=String.fromCharCode((c&0x0f)<<12|(char2&0x3f)<<6|(char3&0x3f)<<0);break;}}return out;};let decoder;function getTextDecoder(){// On Play Station 4, TextDecoder is defined but partially implemented.
|
||
// Manual decoding option is preferable
|
||
if(navigator.userAgent.includes('PlayStation 4')){return;}if(!decoder&&typeof self.TextDecoder!=='undefined'){decoder=new self.TextDecoder('utf-8');}return decoder;}/**
|
||
* hex dump helper class
|
||
*/const Hex={hexDump:function(array){let str='';for(let i=0;i<array.length;i++){let h=array[i].toString(16);if(h.length<2){h='0'+h;}str+=h;}return str;}};const UINT32_MAX$1=Math.pow(2,32)-1;const push=[].push;// We are using fixed track IDs for driving the MP4 remuxer
|
||
// instead of following the TS PIDs.
|
||
// There is no reason not to do this and some browsers/SourceBuffer-demuxers
|
||
// may not like if there are TrackID "switches"
|
||
// See https://github.com/video-dev/hls.js/issues/1331
|
||
// Here we are mapping our internal track types to constant MP4 track IDs
|
||
// With MSE currently one can only have one track of each, and we are muxing
|
||
// whatever video/audio rendition in them.
|
||
const RemuxerTrackIdConfig={video:1,audio:2,id3:3,text:4};function bin2str(data){return String.fromCharCode.apply(null,data);}function readUint16(buffer,offset){const val=buffer[offset]<<8|buffer[offset+1];return val<0?65536+val:val;}function readUint32(buffer,offset){const val=readSint32(buffer,offset);return val<0?4294967296+val:val;}function readUint64(buffer,offset){let result=readUint32(buffer,offset);result*=Math.pow(2,32);result+=readUint32(buffer,offset+4);return result;}function readSint32(buffer,offset){return buffer[offset]<<24|buffer[offset+1]<<16|buffer[offset+2]<<8|buffer[offset+3];}function writeUint32(buffer,offset,value){buffer[offset]=value>>24;buffer[offset+1]=value>>16&0xff;buffer[offset+2]=value>>8&0xff;buffer[offset+3]=value&0xff;}// Find "moof" box
|
||
function hasMoofData(data){const end=data.byteLength;for(let i=0;i<end;){const size=readUint32(data,i);if(size>8&&data[i+4]===0x6d&&data[i+5]===0x6f&&data[i+6]===0x6f&&data[i+7]===0x66){return true;}i=size>1?i+size:end;}return false;}// Find the data for a box specified by its path
|
||
function findBox(data,path){const results=[];if(!path.length){// short-circuit the search for empty paths
|
||
return results;}const end=data.byteLength;for(let i=0;i<end;){const size=readUint32(data,i);const type=bin2str(data.subarray(i+4,i+8));const endbox=size>1?i+size:end;if(type===path[0]){if(path.length===1){// this is the end of the path and we've found the box we were
|
||
// looking for
|
||
results.push(data.subarray(i+8,endbox));}else {// recursively search for the next box along the path
|
||
const subresults=findBox(data.subarray(i+8,endbox),path.slice(1));if(subresults.length){push.apply(results,subresults);}}}i=endbox;}// we've finished searching all of data
|
||
return results;}function parseSegmentIndex(sidx){const references=[];const version=sidx[0];// set initial offset, we skip the reference ID (not needed)
|
||
let index=8;const timescale=readUint32(sidx,index);index+=4;let earliestPresentationTime=0;let firstOffset=0;if(version===0){earliestPresentationTime=readUint32(sidx,index);firstOffset=readUint32(sidx,index+4);index+=8;}else {earliestPresentationTime=readUint64(sidx,index);firstOffset=readUint64(sidx,index+8);index+=16;}// skip reserved
|
||
index+=2;let startByte=sidx.length+firstOffset;const referencesCount=readUint16(sidx,index);index+=2;for(let i=0;i<referencesCount;i++){let referenceIndex=index;const referenceInfo=readUint32(sidx,referenceIndex);referenceIndex+=4;const referenceSize=referenceInfo&0x7fffffff;const referenceType=(referenceInfo&0x80000000)>>>31;if(referenceType===1){logger.warn('SIDX has hierarchical references (not supported)');return null;}const subsegmentDuration=readUint32(sidx,referenceIndex);referenceIndex+=4;references.push({referenceSize,subsegmentDuration,// unscaled
|
||
info:{duration:subsegmentDuration/timescale,start:startByte,end:startByte+referenceSize-1}});startByte+=referenceSize;// Skipping 1 bit for |startsWithSap|, 3 bits for |sapType|, and 28 bits
|
||
// for |sapDelta|.
|
||
referenceIndex+=4;// skip to next ref
|
||
index=referenceIndex;}return {earliestPresentationTime,timescale,version,referencesCount,references};}/**
|
||
* Parses an MP4 initialization segment and extracts stream type and
|
||
* timescale values for any declared tracks. Timescale values indicate the
|
||
* number of clock ticks per second to assume for time-based values
|
||
* elsewhere in the MP4.
|
||
*
|
||
* To determine the start time of an MP4, you need two pieces of
|
||
* information: the timescale unit and the earliest base media decode
|
||
* time. Multiple timescales can be specified within an MP4 but the
|
||
* base media decode time is always expressed in the timescale from
|
||
* the media header box for the track:
|
||
* ```
|
||
* moov > trak > mdia > mdhd.timescale
|
||
* moov > trak > mdia > hdlr
|
||
* ```
|
||
* @param initSegment the bytes of the init segment
|
||
* @returns a hash of track type to timescale values or null if
|
||
* the init segment is malformed.
|
||
*/function parseInitSegment(initSegment){const result=[];const traks=findBox(initSegment,['moov','trak']);for(let i=0;i<traks.length;i++){const trak=traks[i];const tkhd=findBox(trak,['tkhd'])[0];if(tkhd){let version=tkhd[0];const trackId=readUint32(tkhd,version===0?12:20);const mdhd=findBox(trak,['mdia','mdhd'])[0];if(mdhd){version=mdhd[0];const timescale=readUint32(mdhd,version===0?12:20);const hdlr=findBox(trak,['mdia','hdlr'])[0];if(hdlr){const hdlrType=bin2str(hdlr.subarray(8,12));const type={soun:ElementaryStreamTypes.AUDIO,vide:ElementaryStreamTypes.VIDEO}[hdlrType];if(type){// Parse codec details
|
||
const stsd=findBox(trak,['mdia','minf','stbl','stsd'])[0];const stsdData=parseStsd(stsd);result[trackId]={timescale,type};result[type]=_objectSpread2({timescale,id:trackId},stsdData);}}}}}const trex=findBox(initSegment,['moov','mvex','trex']);trex.forEach(trex=>{const trackId=readUint32(trex,4);const track=result[trackId];if(track){track.default={duration:readUint32(trex,12),flags:readUint32(trex,20)};}});return result;}function parseStsd(stsd){const sampleEntries=stsd.subarray(8);const sampleEntriesEnd=sampleEntries.subarray(8+78);const fourCC=bin2str(sampleEntries.subarray(4,8));let codec=fourCC;const encrypted=fourCC==='enca'||fourCC==='encv';if(encrypted){const encBox=findBox(sampleEntries,[fourCC])[0];const encBoxChildren=encBox.subarray(fourCC==='enca'?28:78);const sinfs=findBox(encBoxChildren,['sinf']);sinfs.forEach(sinf=>{const schm=findBox(sinf,['schm'])[0];if(schm){const scheme=bin2str(schm.subarray(4,8));if(scheme==='cbcs'||scheme==='cenc'){const frma=findBox(sinf,['frma'])[0];if(frma){// for encrypted content codec fourCC will be in frma
|
||
codec=bin2str(frma);}}}});}switch(codec){case'avc1':case'avc2':case'avc3':case'avc4':{// extract profile + compatibility + level out of avcC box
|
||
const avcCBox=findBox(sampleEntriesEnd,['avcC'])[0];codec+='.'+toHex(avcCBox[1])+toHex(avcCBox[2])+toHex(avcCBox[3]);break;}case'mp4a':{const codecBox=findBox(sampleEntries,[fourCC])[0];const esdsBox=findBox(codecBox.subarray(28),['esds'])[0];if(esdsBox&&esdsBox.length>12){let i=4;// ES Descriptor tag
|
||
if(esdsBox[i++]!==0x03){break;}i=skipBERInteger(esdsBox,i);i+=2;// skip es_id;
|
||
const flags=esdsBox[i++];if(flags&0x80){i+=2;// skip dependency es_id
|
||
}if(flags&0x40){i+=esdsBox[i++];// skip URL
|
||
}// Decoder config descriptor
|
||
if(esdsBox[i++]!==0x04){break;}i=skipBERInteger(esdsBox,i);const objectType=esdsBox[i++];if(objectType===0x40){codec+='.'+toHex(objectType);}else {break;}i+=12;// Decoder specific info
|
||
if(esdsBox[i++]!==0x05){break;}i=skipBERInteger(esdsBox,i);const firstByte=esdsBox[i++];let audioObjectType=(firstByte&0xf8)>>3;if(audioObjectType===31){audioObjectType+=1+((firstByte&0x7)<<3)+((esdsBox[i]&0xe0)>>5);}codec+='.'+audioObjectType;}break;}case'hvc1':case'hev1':{const hvcCBox=findBox(sampleEntriesEnd,['hvcC'])[0];const profileByte=hvcCBox[1];const profileSpace=['','A','B','C'][profileByte>>6];const generalProfileIdc=profileByte&0x1f;const profileCompat=readUint32(hvcCBox,2);const tierFlag=(profileByte&0x20)>>5?'H':'L';const levelIDC=hvcCBox[12];const constraintIndicator=hvcCBox.subarray(6,12);codec+='.'+profileSpace+generalProfileIdc;codec+='.'+profileCompat.toString(16).toUpperCase();codec+='.'+tierFlag+levelIDC;let constraintString='';for(let i=constraintIndicator.length;i--;){const byte=constraintIndicator[i];if(byte||constraintString){const encodedByte=byte.toString(16).toUpperCase();constraintString='.'+encodedByte+constraintString;}}codec+=constraintString;break;}case'dvh1':case'dvhe':{const dvcCBox=findBox(sampleEntriesEnd,['dvcC'])[0];const profile=dvcCBox[2]>>1&0x7f;const level=dvcCBox[2]<<5&0x20|dvcCBox[3]>>3&0x1f;codec+='.'+addLeadingZero(profile)+'.'+addLeadingZero(level);break;}case'vp09':{const vpcCBox=findBox(sampleEntriesEnd,['vpcC'])[0];const profile=vpcCBox[4];const level=vpcCBox[5];const bitDepth=vpcCBox[6]>>4&0x0f;codec+='.'+addLeadingZero(profile)+'.'+addLeadingZero(level)+'.'+addLeadingZero(bitDepth);break;}case'av01':{const av1CBox=findBox(sampleEntriesEnd,['av1C'])[0];const profile=av1CBox[1]>>>5;const level=av1CBox[1]&0x1f;const tierFlag=av1CBox[2]>>>7?'H':'M';const highBitDepth=(av1CBox[2]&0x40)>>6;const twelveBit=(av1CBox[2]&0x20)>>5;const bitDepth=profile===2&&highBitDepth?twelveBit?12:10:highBitDepth?10:8;const monochrome=(av1CBox[2]&0x10)>>4;const chromaSubsamplingX=(av1CBox[2]&0x08)>>3;const chromaSubsamplingY=(av1CBox[2]&0x04)>>2;const chromaSamplePosition=av1CBox[2]&0x03;// TODO: parse color_description_present_flag
|
||
// default it to BT.709/limited range for now
|
||
// more info https://aomediacodec.github.io/av1-isobmff/#av1codecconfigurationbox-syntax
|
||
const colorPrimaries=1;const transferCharacteristics=1;const matrixCoefficients=1;const videoFullRangeFlag=0;codec+='.'+profile+'.'+addLeadingZero(level)+tierFlag+'.'+addLeadingZero(bitDepth)+'.'+monochrome+'.'+chromaSubsamplingX+chromaSubsamplingY+chromaSamplePosition+'.'+addLeadingZero(colorPrimaries)+'.'+addLeadingZero(transferCharacteristics)+'.'+addLeadingZero(matrixCoefficients)+'.'+videoFullRangeFlag;break;}}return {codec,encrypted};}function skipBERInteger(bytes,i){const limit=i+5;while(bytes[i++]&0x80&&i<limit){}return i;}function toHex(x){return ('0'+x.toString(16).toUpperCase()).slice(-2);}function addLeadingZero(num){return (num<10?'0':'')+num;}function patchEncyptionData(initSegment,decryptdata){if(!initSegment||!decryptdata){return initSegment;}const keyId=decryptdata.keyId;if(keyId&&decryptdata.isCommonEncryption){const traks=findBox(initSegment,['moov','trak']);traks.forEach(trak=>{const stsd=findBox(trak,['mdia','minf','stbl','stsd'])[0];// skip the sample entry count
|
||
const sampleEntries=stsd.subarray(8);let encBoxes=findBox(sampleEntries,['enca']);const isAudio=encBoxes.length>0;if(!isAudio){encBoxes=findBox(sampleEntries,['encv']);}encBoxes.forEach(enc=>{const encBoxChildren=isAudio?enc.subarray(28):enc.subarray(78);const sinfBoxes=findBox(encBoxChildren,['sinf']);sinfBoxes.forEach(sinf=>{const tenc=parseSinf(sinf);if(tenc){// Look for default key id (keyID offset is always 8 within the tenc box):
|
||
const tencKeyId=tenc.subarray(8,24);if(!tencKeyId.some(b=>b!==0)){logger.log(`[eme] Patching keyId in 'enc${isAudio?'a':'v'}>sinf>>tenc' box: ${Hex.hexDump(tencKeyId)} -> ${Hex.hexDump(keyId)}`);tenc.set(keyId,8);}}});});});}return initSegment;}function parseSinf(sinf){const schm=findBox(sinf,['schm'])[0];if(schm){const scheme=bin2str(schm.subarray(4,8));if(scheme==='cbcs'||scheme==='cenc'){return findBox(sinf,['schi','tenc'])[0];}}logger.error(`[eme] missing 'schm' box`);return null;}/**
|
||
* Determine the base media decode start time, in seconds, for an MP4
|
||
* fragment. If multiple fragments are specified, the earliest time is
|
||
* returned.
|
||
*
|
||
* The base media decode time can be parsed from track fragment
|
||
* metadata:
|
||
* ```
|
||
* moof > traf > tfdt.baseMediaDecodeTime
|
||
* ```
|
||
* It requires the timescale value from the mdhd to interpret.
|
||
*
|
||
* @param initData - a hash of track type to timescale values
|
||
* @param fmp4 - the bytes of the mp4 fragment
|
||
* @returns the earliest base media decode start time for the
|
||
* fragment, in seconds
|
||
*/function getStartDTS(initData,fmp4){// we need info from two children of each track fragment box
|
||
return findBox(fmp4,['moof','traf']).reduce((result,traf)=>{const tfdt=findBox(traf,['tfdt'])[0];const version=tfdt[0];const start=findBox(traf,['tfhd']).reduce((result,tfhd)=>{// get the track id from the tfhd
|
||
const id=readUint32(tfhd,4);const track=initData[id];if(track){let baseTime=readUint32(tfdt,4);if(version===1){// If value is too large, assume signed 64-bit. Negative track fragment decode times are invalid, but they exist in the wild.
|
||
// This prevents large values from being used for initPTS, which can cause playlist sync issues.
|
||
// https://github.com/video-dev/hls.js/issues/5303
|
||
if(baseTime===UINT32_MAX$1){logger.warn(`[mp4-demuxer]: Ignoring assumed invalid signed 64-bit track fragment decode time`);return result;}baseTime*=UINT32_MAX$1+1;baseTime+=readUint32(tfdt,8);}// assume a 90kHz clock if no timescale was specified
|
||
const scale=track.timescale||90e3;// convert base time to seconds
|
||
const startTime=baseTime/scale;if(isFiniteNumber(startTime)&&(result===null||startTime<result)){return startTime;}}return result;},null);if(start!==null&&isFiniteNumber(start)&&(result===null||start<result)){return start;}return result;},null);}/*
|
||
For Reference:
|
||
aligned(8) class TrackFragmentHeaderBox
|
||
extends FullBox(‘tfhd’, 0, tf_flags){
|
||
unsigned int(32) track_ID;
|
||
// all the following are optional fields
|
||
unsigned int(64) base_data_offset;
|
||
unsigned int(32) sample_description_index;
|
||
unsigned int(32) default_sample_duration;
|
||
unsigned int(32) default_sample_size;
|
||
unsigned int(32) default_sample_flags
|
||
}
|
||
*/function getDuration(data,initData){let rawDuration=0;let videoDuration=0;let audioDuration=0;const trafs=findBox(data,['moof','traf']);for(let i=0;i<trafs.length;i++){const traf=trafs[i];// There is only one tfhd & trun per traf
|
||
// This is true for CMAF style content, and we should perhaps check the ftyp
|
||
// and only look for a single trun then, but for ISOBMFF we should check
|
||
// for multiple track runs.
|
||
const tfhd=findBox(traf,['tfhd'])[0];// get the track id from the tfhd
|
||
const id=readUint32(tfhd,4);const track=initData[id];if(!track){continue;}const trackDefault=track.default;const tfhdFlags=readUint32(tfhd,0)|(trackDefault==null?void 0:trackDefault.flags);let sampleDuration=trackDefault==null?void 0:trackDefault.duration;if(tfhdFlags&0x000008){// 0x000008 indicates the presence of the default_sample_duration field
|
||
if(tfhdFlags&0x000002){// 0x000002 indicates the presence of the sample_description_index field, which precedes default_sample_duration
|
||
// If present, the default_sample_duration exists at byte offset 12
|
||
sampleDuration=readUint32(tfhd,12);}else {// Otherwise, the duration is at byte offset 8
|
||
sampleDuration=readUint32(tfhd,8);}}// assume a 90kHz clock if no timescale was specified
|
||
const timescale=track.timescale||90e3;const truns=findBox(traf,['trun']);for(let j=0;j<truns.length;j++){rawDuration=computeRawDurationFromSamples(truns[j]);if(!rawDuration&&sampleDuration){const sampleCount=readUint32(truns[j],4);rawDuration=sampleDuration*sampleCount;}if(track.type===ElementaryStreamTypes.VIDEO){videoDuration+=rawDuration/timescale;}else if(track.type===ElementaryStreamTypes.AUDIO){audioDuration+=rawDuration/timescale;}}}if(videoDuration===0&&audioDuration===0){// If duration samples are not available in the traf use sidx subsegment_duration
|
||
let sidxMinStart=Infinity;let sidxMaxEnd=0;let sidxDuration=0;const sidxs=findBox(data,['sidx']);for(let i=0;i<sidxs.length;i++){const sidx=parseSegmentIndex(sidxs[i]);if(sidx!=null&&sidx.references){sidxMinStart=Math.min(sidxMinStart,sidx.earliestPresentationTime/sidx.timescale);const subSegmentDuration=sidx.references.reduce((dur,ref)=>dur+ref.info.duration||0,0);sidxMaxEnd=Math.max(sidxMaxEnd,subSegmentDuration+sidx.earliestPresentationTime/sidx.timescale);sidxDuration=sidxMaxEnd-sidxMinStart;}}if(sidxDuration&&isFiniteNumber(sidxDuration)){return sidxDuration;}}if(videoDuration){return videoDuration;}return audioDuration;}/*
|
||
For Reference:
|
||
aligned(8) class TrackRunBox
|
||
extends FullBox(‘trun’, version, tr_flags) {
|
||
unsigned int(32) sample_count;
|
||
// the following are optional fields
|
||
signed int(32) data_offset;
|
||
unsigned int(32) first_sample_flags;
|
||
// all fields in the following array are optional
|
||
{
|
||
unsigned int(32) sample_duration;
|
||
unsigned int(32) sample_size;
|
||
unsigned int(32) sample_flags
|
||
if (version == 0)
|
||
{ unsigned int(32)
|
||
else
|
||
{ signed int(32)
|
||
}[ sample_count ]
|
||
}
|
||
*/function computeRawDurationFromSamples(trun){const flags=readUint32(trun,0);// Flags are at offset 0, non-optional sample_count is at offset 4. Therefore we start 8 bytes in.
|
||
// Each field is an int32, which is 4 bytes
|
||
let offset=8;// data-offset-present flag
|
||
if(flags&0x000001){offset+=4;}// first-sample-flags-present flag
|
||
if(flags&0x000004){offset+=4;}let duration=0;const sampleCount=readUint32(trun,4);for(let i=0;i<sampleCount;i++){// sample-duration-present flag
|
||
if(flags&0x000100){const sampleDuration=readUint32(trun,offset);duration+=sampleDuration;offset+=4;}// sample-size-present flag
|
||
if(flags&0x000200){offset+=4;}// sample-flags-present flag
|
||
if(flags&0x000400){offset+=4;}// sample-composition-time-offsets-present flag
|
||
if(flags&0x000800){offset+=4;}}return duration;}function offsetStartDTS(initData,fmp4,timeOffset){findBox(fmp4,['moof','traf']).forEach(traf=>{findBox(traf,['tfhd']).forEach(tfhd=>{// get the track id from the tfhd
|
||
const id=readUint32(tfhd,4);const track=initData[id];if(!track){return;}// assume a 90kHz clock if no timescale was specified
|
||
const timescale=track.timescale||90e3;// get the base media decode time from the tfdt
|
||
findBox(traf,['tfdt']).forEach(tfdt=>{const version=tfdt[0];const offset=timeOffset*timescale;if(offset){let baseMediaDecodeTime=readUint32(tfdt,4);if(version===0){baseMediaDecodeTime-=offset;baseMediaDecodeTime=Math.max(baseMediaDecodeTime,0);writeUint32(tfdt,4,baseMediaDecodeTime);}else {baseMediaDecodeTime*=Math.pow(2,32);baseMediaDecodeTime+=readUint32(tfdt,8);baseMediaDecodeTime-=offset;baseMediaDecodeTime=Math.max(baseMediaDecodeTime,0);const upper=Math.floor(baseMediaDecodeTime/(UINT32_MAX$1+1));const lower=Math.floor(baseMediaDecodeTime%(UINT32_MAX$1+1));writeUint32(tfdt,4,upper);writeUint32(tfdt,8,lower);}}});});});}// TODO: Check if the last moof+mdat pair is part of the valid range
|
||
function segmentValidRange(data){const segmentedRange={valid:null,remainder:null};const moofs=findBox(data,['moof']);if(moofs.length<2){segmentedRange.remainder=data;return segmentedRange;}const last=moofs[moofs.length-1];// Offset by 8 bytes; findBox offsets the start by as much
|
||
segmentedRange.valid=sliceUint8(data,0,last.byteOffset-8);segmentedRange.remainder=sliceUint8(data,last.byteOffset-8);return segmentedRange;}function appendUint8Array(data1,data2){const temp=new Uint8Array(data1.length+data2.length);temp.set(data1);temp.set(data2,data1.length);return temp;}function parseSamples(timeOffset,track){const seiSamples=[];const videoData=track.samples;const timescale=track.timescale;const trackId=track.id;let isHEVCFlavor=false;const moofs=findBox(videoData,['moof']);moofs.map(moof=>{const moofOffset=moof.byteOffset-8;const trafs=findBox(moof,['traf']);trafs.map(traf=>{// get the base media decode time from the tfdt
|
||
const baseTime=findBox(traf,['tfdt']).map(tfdt=>{const version=tfdt[0];let result=readUint32(tfdt,4);if(version===1){result*=Math.pow(2,32);result+=readUint32(tfdt,8);}return result/timescale;})[0];if(baseTime!==undefined){timeOffset=baseTime;}return findBox(traf,['tfhd']).map(tfhd=>{const id=readUint32(tfhd,4);const tfhdFlags=readUint32(tfhd,0)&0xffffff;const baseDataOffsetPresent=(tfhdFlags&0x000001)!==0;const sampleDescriptionIndexPresent=(tfhdFlags&0x000002)!==0;const defaultSampleDurationPresent=(tfhdFlags&0x000008)!==0;let defaultSampleDuration=0;const defaultSampleSizePresent=(tfhdFlags&0x000010)!==0;let defaultSampleSize=0;const defaultSampleFlagsPresent=(tfhdFlags&0x000020)!==0;let tfhdOffset=8;if(id===trackId){if(baseDataOffsetPresent){tfhdOffset+=8;}if(sampleDescriptionIndexPresent){tfhdOffset+=4;}if(defaultSampleDurationPresent){defaultSampleDuration=readUint32(tfhd,tfhdOffset);tfhdOffset+=4;}if(defaultSampleSizePresent){defaultSampleSize=readUint32(tfhd,tfhdOffset);tfhdOffset+=4;}if(defaultSampleFlagsPresent){tfhdOffset+=4;}if(track.type==='video'){isHEVCFlavor=isHEVC(track.codec);}findBox(traf,['trun']).map(trun=>{const version=trun[0];const flags=readUint32(trun,0)&0xffffff;const dataOffsetPresent=(flags&0x000001)!==0;let dataOffset=0;const firstSampleFlagsPresent=(flags&0x000004)!==0;const sampleDurationPresent=(flags&0x000100)!==0;let sampleDuration=0;const sampleSizePresent=(flags&0x000200)!==0;let sampleSize=0;const sampleFlagsPresent=(flags&0x000400)!==0;const sampleCompositionOffsetsPresent=(flags&0x000800)!==0;let compositionOffset=0;const sampleCount=readUint32(trun,4);let trunOffset=8;// past version, flags, and sample count
|
||
if(dataOffsetPresent){dataOffset=readUint32(trun,trunOffset);trunOffset+=4;}if(firstSampleFlagsPresent){trunOffset+=4;}let sampleOffset=dataOffset+moofOffset;for(let ix=0;ix<sampleCount;ix++){if(sampleDurationPresent){sampleDuration=readUint32(trun,trunOffset);trunOffset+=4;}else {sampleDuration=defaultSampleDuration;}if(sampleSizePresent){sampleSize=readUint32(trun,trunOffset);trunOffset+=4;}else {sampleSize=defaultSampleSize;}if(sampleFlagsPresent){trunOffset+=4;}if(sampleCompositionOffsetsPresent){if(version===0){compositionOffset=readUint32(trun,trunOffset);}else {compositionOffset=readSint32(trun,trunOffset);}trunOffset+=4;}if(track.type===ElementaryStreamTypes.VIDEO){let naluTotalSize=0;while(naluTotalSize<sampleSize){const naluSize=readUint32(videoData,sampleOffset);sampleOffset+=4;if(isSEIMessage(isHEVCFlavor,videoData[sampleOffset])){const data=videoData.subarray(sampleOffset,sampleOffset+naluSize);parseSEIMessageFromNALu(data,isHEVCFlavor?2:1,timeOffset+compositionOffset/timescale,seiSamples);}sampleOffset+=naluSize;naluTotalSize+=naluSize+4;}}timeOffset+=sampleDuration/timescale;}});}});});});return seiSamples;}function isHEVC(codec){if(!codec){return false;}const delimit=codec.indexOf('.');const baseCodec=delimit<0?codec:codec.substring(0,delimit);return baseCodec==='hvc1'||baseCodec==='hev1'||// Dolby Vision
|
||
baseCodec==='dvh1'||baseCodec==='dvhe';}function isSEIMessage(isHEVCFlavor,naluHeader){if(isHEVCFlavor){const naluType=naluHeader>>1&0x3f;return naluType===39||naluType===40;}else {const naluType=naluHeader&0x1f;return naluType===6;}}function parseSEIMessageFromNALu(unescapedData,headerSize,pts,samples){const data=discardEPB(unescapedData);let seiPtr=0;// skip nal header
|
||
seiPtr+=headerSize;let payloadType=0;let payloadSize=0;let b=0;while(seiPtr<data.length){payloadType=0;do{if(seiPtr>=data.length){break;}b=data[seiPtr++];payloadType+=b;}while(b===0xff);// Parse payload size.
|
||
payloadSize=0;do{if(seiPtr>=data.length){break;}b=data[seiPtr++];payloadSize+=b;}while(b===0xff);const leftOver=data.length-seiPtr;// Create a variable to process the payload
|
||
let payPtr=seiPtr;// Increment the seiPtr to the end of the payload
|
||
if(payloadSize<leftOver){seiPtr+=payloadSize;}else if(payloadSize>leftOver){// Some type of corruption has happened?
|
||
logger.error(`Malformed SEI payload. ${payloadSize} is too small, only ${leftOver} bytes left to parse.`);// We might be able to parse some data, but let's be safe and ignore it.
|
||
break;}if(payloadType===4){const countryCode=data[payPtr++];if(countryCode===181){const providerCode=readUint16(data,payPtr);payPtr+=2;if(providerCode===49){const userStructure=readUint32(data,payPtr);payPtr+=4;if(userStructure===0x47413934){const userDataType=data[payPtr++];// Raw CEA-608 bytes wrapped in CEA-708 packet
|
||
if(userDataType===3){const firstByte=data[payPtr++];const totalCCs=0x1f&firstByte;const enabled=0x40&firstByte;const totalBytes=enabled?2+totalCCs*3:0;const byteArray=new Uint8Array(totalBytes);if(enabled){byteArray[0]=firstByte;for(let i=1;i<totalBytes;i++){byteArray[i]=data[payPtr++];}}samples.push({type:userDataType,payloadType,pts,bytes:byteArray});}}}}}else if(payloadType===5){if(payloadSize>16){const uuidStrArray=[];for(let i=0;i<16;i++){const _b=data[payPtr++].toString(16);uuidStrArray.push(_b.length==1?'0'+_b:_b);if(i===3||i===5||i===7||i===9){uuidStrArray.push('-');}}const length=payloadSize-16;const userDataBytes=new Uint8Array(length);for(let i=0;i<length;i++){userDataBytes[i]=data[payPtr++];}samples.push({payloadType,pts,uuid:uuidStrArray.join(''),userData:utf8ArrayToStr(userDataBytes),userDataBytes});}}}}/**
|
||
* remove Emulation Prevention bytes from a RBSP
|
||
*/function discardEPB(data){const length=data.byteLength;const EPBPositions=[];let i=1;// Find all `Emulation Prevention Bytes`
|
||
while(i<length-2){if(data[i]===0&&data[i+1]===0&&data[i+2]===0x03){EPBPositions.push(i+2);i+=2;}else {i++;}}// If no Emulation Prevention Bytes were found just return the original
|
||
// array
|
||
if(EPBPositions.length===0){return data;}// Create a new array to hold the NAL unit data
|
||
const newLength=length-EPBPositions.length;const newData=new Uint8Array(newLength);let sourceIndex=0;for(i=0;i<newLength;sourceIndex++,i++){if(sourceIndex===EPBPositions[0]){// Skip this byte
|
||
sourceIndex++;// Remove this position index
|
||
EPBPositions.shift();}newData[i]=data[sourceIndex];}return newData;}function parseEmsg(data){const version=data[0];let schemeIdUri='';let value='';let timeScale=0;let presentationTimeDelta=0;let presentationTime=0;let eventDuration=0;let id=0;let offset=0;if(version===0){while(bin2str(data.subarray(offset,offset+1))!=='\0'){schemeIdUri+=bin2str(data.subarray(offset,offset+1));offset+=1;}schemeIdUri+=bin2str(data.subarray(offset,offset+1));offset+=1;while(bin2str(data.subarray(offset,offset+1))!=='\0'){value+=bin2str(data.subarray(offset,offset+1));offset+=1;}value+=bin2str(data.subarray(offset,offset+1));offset+=1;timeScale=readUint32(data,12);presentationTimeDelta=readUint32(data,16);eventDuration=readUint32(data,20);id=readUint32(data,24);offset=28;}else if(version===1){offset+=4;timeScale=readUint32(data,offset);offset+=4;const leftPresentationTime=readUint32(data,offset);offset+=4;const rightPresentationTime=readUint32(data,offset);offset+=4;presentationTime=2**32*leftPresentationTime+rightPresentationTime;if(!isSafeInteger(presentationTime)){presentationTime=Number.MAX_SAFE_INTEGER;logger.warn('Presentation time exceeds safe integer limit and wrapped to max safe integer in parsing emsg box');}eventDuration=readUint32(data,offset);offset+=4;id=readUint32(data,offset);offset+=4;while(bin2str(data.subarray(offset,offset+1))!=='\0'){schemeIdUri+=bin2str(data.subarray(offset,offset+1));offset+=1;}schemeIdUri+=bin2str(data.subarray(offset,offset+1));offset+=1;while(bin2str(data.subarray(offset,offset+1))!=='\0'){value+=bin2str(data.subarray(offset,offset+1));offset+=1;}value+=bin2str(data.subarray(offset,offset+1));offset+=1;}const payload=data.subarray(offset,data.byteLength);return {schemeIdUri,value,timeScale,presentationTime,presentationTimeDelta,eventDuration,id,payload};}function mp4Box(type,...payload){const len=payload.length;let size=8;let i=len;while(i--){size+=payload[i].byteLength;}const result=new Uint8Array(size);result[0]=size>>24&0xff;result[1]=size>>16&0xff;result[2]=size>>8&0xff;result[3]=size&0xff;result.set(type,4);for(i=0,size=8;i<len;i++){result.set(payload[i],size);size+=payload[i].byteLength;}return result;}function mp4pssh(systemId,keyids,data){if(systemId.byteLength!==16){throw new RangeError('Invalid system id');}let version;let kids;if(keyids){version=1;kids=new Uint8Array(keyids.length*16);for(let ix=0;ix<keyids.length;ix++){const k=keyids[ix];// uint8array
|
||
if(k.byteLength!==16){throw new RangeError('Invalid key');}kids.set(k,ix*16);}}else {version=0;kids=new Uint8Array();}let kidCount;if(version>0){kidCount=new Uint8Array(4);if(keyids.length>0){new DataView(kidCount.buffer).setUint32(0,keyids.length,false);}}else {kidCount=new Uint8Array();}const dataSize=new Uint8Array(4);if(data&&data.byteLength>0){new DataView(dataSize.buffer).setUint32(0,data.byteLength,false);}return mp4Box([112,115,115,104],new Uint8Array([version,0x00,0x00,0x00// Flags
|
||
]),systemId,// 16 bytes
|
||
kidCount,kids,dataSize,data||new Uint8Array());}function parsePssh(initData){if(!(initData instanceof ArrayBuffer)||initData.byteLength<32){return null;}const result={version:0,systemId:'',kids:null,data:null};const view=new DataView(initData);const boxSize=view.getUint32(0);if(initData.byteLength!==boxSize&&boxSize>44){return null;}const type=view.getUint32(4);if(type!==0x70737368){return null;}result.version=view.getUint32(8)>>>24;if(result.version>1){return null;}result.systemId=Hex.hexDump(new Uint8Array(initData,12,16));const dataSizeOrKidCount=view.getUint32(28);if(result.version===0){if(boxSize-32<dataSizeOrKidCount){return null;}result.data=new Uint8Array(initData,32,dataSizeOrKidCount);}else if(result.version===1){result.kids=[];for(let i=0;i<dataSizeOrKidCount;i++){result.kids.push(new Uint8Array(initData,32+i*16,16));}}return result;}let keyUriToKeyIdMap={};class LevelKey{static clearKeyUriToKeyIdMap(){keyUriToKeyIdMap={};}constructor(method,uri,format,formatversions=[1],iv=null){this.uri=void 0;this.method=void 0;this.keyFormat=void 0;this.keyFormatVersions=void 0;this.encrypted=void 0;this.isCommonEncryption=void 0;this.iv=null;this.key=null;this.keyId=null;this.pssh=null;this.method=method;this.uri=uri;this.keyFormat=format;this.keyFormatVersions=formatversions;this.iv=iv;this.encrypted=method?method!=='NONE':false;this.isCommonEncryption=this.encrypted&&method!=='AES-128';}isSupported(){// If it's Segment encryption or No encryption, just select that key system
|
||
if(this.method){if(this.method==='AES-128'||this.method==='NONE'){return true;}if(this.keyFormat==='identity'){// Maintain support for clear SAMPLE-AES with MPEG-3 TS
|
||
return this.method==='SAMPLE-AES';}else {switch(this.keyFormat){case KeySystemFormats.FAIRPLAY:case KeySystemFormats.WIDEVINE:case KeySystemFormats.PLAYREADY:case KeySystemFormats.CLEARKEY:return ['ISO-23001-7','SAMPLE-AES','SAMPLE-AES-CENC','SAMPLE-AES-CTR'].indexOf(this.method)!==-1;}}}return false;}getDecryptData(sn){if(!this.encrypted||!this.uri){return null;}if(this.method==='AES-128'&&this.uri&&!this.iv){if(typeof sn!=='number'){// We are fetching decryption data for a initialization segment
|
||
// If the segment was encrypted with AES-128
|
||
// It must have an IV defined. We cannot substitute the Segment Number in.
|
||
if(this.method==='AES-128'&&!this.iv){logger.warn(`missing IV for initialization segment with method="${this.method}" - compliance issue`);}// Explicitly set sn to resulting value from implicit conversions 'initSegment' values for IV generation.
|
||
sn=0;}const iv=createInitializationVector(sn);const decryptdata=new LevelKey(this.method,this.uri,'identity',this.keyFormatVersions,iv);return decryptdata;}// Initialize keyId if possible
|
||
const keyBytes=convertDataUriToArrayBytes(this.uri);if(keyBytes){switch(this.keyFormat){case KeySystemFormats.WIDEVINE:this.pssh=keyBytes;// In case of widevine keyID is embedded in PSSH box. Read Key ID.
|
||
if(keyBytes.length>=22){this.keyId=keyBytes.subarray(keyBytes.length-22,keyBytes.length-6);}break;case KeySystemFormats.PLAYREADY:{const PlayReadyKeySystemUUID=new Uint8Array([0x9a,0x04,0xf0,0x79,0x98,0x40,0x42,0x86,0xab,0x92,0xe6,0x5b,0xe0,0x88,0x5f,0x95]);this.pssh=mp4pssh(PlayReadyKeySystemUUID,null,keyBytes);const keyBytesUtf16=new Uint16Array(keyBytes.buffer,keyBytes.byteOffset,keyBytes.byteLength/2);const keyByteStr=String.fromCharCode.apply(null,Array.from(keyBytesUtf16));// Parse Playready WRMHeader XML
|
||
const xmlKeyBytes=keyByteStr.substring(keyByteStr.indexOf('<'),keyByteStr.length);const parser=new DOMParser();const xmlDoc=parser.parseFromString(xmlKeyBytes,'text/xml');const keyData=xmlDoc.getElementsByTagName('KID')[0];if(keyData){const keyId=keyData.childNodes[0]?keyData.childNodes[0].nodeValue:keyData.getAttribute('VALUE');if(keyId){const keyIdArray=base64Decode(keyId).subarray(0,16);// KID value in PRO is a base64-encoded little endian GUID interpretation of UUID
|
||
// KID value in ‘tenc’ is a big endian UUID GUID interpretation of UUID
|
||
changeEndianness(keyIdArray);this.keyId=keyIdArray;}}break;}default:{let keydata=keyBytes.subarray(0,16);if(keydata.length!==16){const padded=new Uint8Array(16);padded.set(keydata,16-keydata.length);keydata=padded;}this.keyId=keydata;break;}}}// Default behavior: assign a new keyId for each uri
|
||
if(!this.keyId||this.keyId.byteLength!==16){let keyId=keyUriToKeyIdMap[this.uri];if(!keyId){const val=Object.keys(keyUriToKeyIdMap).length%Number.MAX_SAFE_INTEGER;keyId=new Uint8Array(16);const dv=new DataView(keyId.buffer,12,4);// Just set the last 4 bytes
|
||
dv.setUint32(0,val);keyUriToKeyIdMap[this.uri]=keyId;}this.keyId=keyId;}return this;}}function createInitializationVector(segmentNumber){const uint8View=new Uint8Array(16);for(let i=12;i<16;i++){uint8View[i]=segmentNumber>>8*(15-i)&0xff;}return uint8View;}const VARIABLE_REPLACEMENT_REGEX=/\{\$([a-zA-Z0-9-_]+)\}/g;function hasVariableReferences(str){return VARIABLE_REPLACEMENT_REGEX.test(str);}function substituteVariablesInAttributes(parsed,attr,attributeNames){if(parsed.variableList!==null||parsed.hasVariableRefs){for(let i=attributeNames.length;i--;){const name=attributeNames[i];const value=attr[name];if(value){attr[name]=substituteVariables(parsed,value);}}}}function substituteVariables(parsed,value){if(parsed.variableList!==null||parsed.hasVariableRefs){const variableList=parsed.variableList;return value.replace(VARIABLE_REPLACEMENT_REGEX,variableReference=>{const variableName=variableReference.substring(2,variableReference.length-1);const variableValue=variableList==null?void 0:variableList[variableName];if(variableValue===undefined){parsed.playlistParsingError||(parsed.playlistParsingError=new Error(`Missing preceding EXT-X-DEFINE tag for Variable Reference: "${variableName}"`));return variableReference;}return variableValue;});}return value;}function addVariableDefinition(parsed,attr,parentUrl){let variableList=parsed.variableList;if(!variableList){parsed.variableList=variableList={};}let NAME;let VALUE;if('QUERYPARAM'in attr){NAME=attr.QUERYPARAM;try{const searchParams=new self.URL(parentUrl).searchParams;if(searchParams.has(NAME)){VALUE=searchParams.get(NAME);}else {throw new Error(`"${NAME}" does not match any query parameter in URI: "${parentUrl}"`);}}catch(error){parsed.playlistParsingError||(parsed.playlistParsingError=new Error(`EXT-X-DEFINE QUERYPARAM: ${error.message}`));}}else {NAME=attr.NAME;VALUE=attr.VALUE;}if(NAME in variableList){parsed.playlistParsingError||(parsed.playlistParsingError=new Error(`EXT-X-DEFINE duplicate Variable Name declarations: "${NAME}"`));}else {variableList[NAME]=VALUE||'';}}function importVariableDefinition(parsed,attr,sourceVariableList){const IMPORT=attr.IMPORT;if(sourceVariableList&&IMPORT in sourceVariableList){let variableList=parsed.variableList;if(!variableList){parsed.variableList=variableList={};}variableList[IMPORT]=sourceVariableList[IMPORT];}else {parsed.playlistParsingError||(parsed.playlistParsingError=new Error(`EXT-X-DEFINE IMPORT attribute not found in Multivariant Playlist: "${IMPORT}"`));}}/**
|
||
* MediaSource helper
|
||
*/function getMediaSource(preferManagedMediaSource=true){if(typeof self==='undefined')return undefined;const mms=(preferManagedMediaSource||!self.MediaSource)&&self.ManagedMediaSource;return mms||self.MediaSource||self.WebKitMediaSource;}function isManagedMediaSource(source){return typeof self!=='undefined'&&source===self.ManagedMediaSource;}// from http://mp4ra.org/codecs.html
|
||
// values indicate codec selection preference (lower is higher priority)
|
||
const sampleEntryCodesISO={audio:{a3ds:1,'ac-3':0.95,'ac-4':1,alac:0.9,alaw:1,dra1:1,'dts+':1,'dts-':1,dtsc:1,dtse:1,dtsh:1,'ec-3':0.9,enca:1,fLaC:0.9,// MP4-RA listed codec entry for FLAC
|
||
flac:0.9,// legacy browser codec name for FLAC
|
||
FLAC:0.9,// some manifests may list "FLAC" with Apple's tools
|
||
g719:1,g726:1,m4ae:1,mha1:1,mha2:1,mhm1:1,mhm2:1,mlpa:1,mp4a:1,'raw ':1,Opus:1,opus:1,// browsers expect this to be lowercase despite MP4RA says 'Opus'
|
||
samr:1,sawb:1,sawp:1,sevc:1,sqcp:1,ssmv:1,twos:1,ulaw:1},video:{avc1:1,avc2:1,avc3:1,avc4:1,avcp:1,av01:0.8,drac:1,dva1:1,dvav:1,dvh1:0.7,dvhe:0.7,encv:1,hev1:0.75,hvc1:0.75,mjp2:1,mp4v:1,mvc1:1,mvc2:1,mvc3:1,mvc4:1,resv:1,rv60:1,s263:1,svc1:1,svc2:1,'vc-1':1,vp08:1,vp09:0.9},text:{stpp:1,wvtt:1}};function isCodecType(codec,type){const typeCodes=sampleEntryCodesISO[type];return !!typeCodes&&!!typeCodes[codec.slice(0,4)];}function areCodecsMediaSourceSupported(codecs,type,preferManagedMediaSource=true){return !codecs.split(',').some(codec=>!isCodecMediaSourceSupported(codec,type,preferManagedMediaSource));}function isCodecMediaSourceSupported(codec,type,preferManagedMediaSource=true){var _MediaSource$isTypeSu;const MediaSource=getMediaSource(preferManagedMediaSource);return (_MediaSource$isTypeSu=MediaSource==null?void 0:MediaSource.isTypeSupported(mimeTypeForCodec(codec,type)))!=null?_MediaSource$isTypeSu:false;}function mimeTypeForCodec(codec,type){return `${type}/mp4;codecs="${codec}"`;}function videoCodecPreferenceValue(videoCodec){if(videoCodec){const fourCC=videoCodec.substring(0,4);return sampleEntryCodesISO.video[fourCC];}return 2;}function codecsSetSelectionPreferenceValue(codecSet){return codecSet.split(',').reduce((num,fourCC)=>{const preferenceValue=sampleEntryCodesISO.video[fourCC];if(preferenceValue){return (preferenceValue*2+num)/(num?3:2);}return (sampleEntryCodesISO.audio[fourCC]+num)/(num?2:1);},0);}const CODEC_COMPATIBLE_NAMES={};function getCodecCompatibleNameLower(lowerCaseCodec,preferManagedMediaSource=true){if(CODEC_COMPATIBLE_NAMES[lowerCaseCodec]){return CODEC_COMPATIBLE_NAMES[lowerCaseCodec];}// Idealy fLaC and Opus would be first (spec-compliant) but
|
||
// some browsers will report that fLaC is supported then fail.
|
||
// see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
|
||
const codecsToCheck={flac:['flac','fLaC','FLAC'],opus:['opus','Opus']}[lowerCaseCodec];for(let i=0;i<codecsToCheck.length;i++){if(isCodecMediaSourceSupported(codecsToCheck[i],'audio',preferManagedMediaSource)){CODEC_COMPATIBLE_NAMES[lowerCaseCodec]=codecsToCheck[i];return codecsToCheck[i];}}return lowerCaseCodec;}const AUDIO_CODEC_REGEXP=/flac|opus/i;function getCodecCompatibleName(codec,preferManagedMediaSource=true){return codec.replace(AUDIO_CODEC_REGEXP,m=>getCodecCompatibleNameLower(m.toLowerCase(),preferManagedMediaSource));}function pickMostCompleteCodecName(parsedCodec,levelCodec){// Parsing of mp4a codecs strings in mp4-tools from media is incomplete as of d8c6c7a
|
||
// so use level codec is parsed codec is unavailable or incomplete
|
||
if(parsedCodec&&parsedCodec!=='mp4a'){return parsedCodec;}return levelCodec?levelCodec.split(',')[0]:levelCodec;}function convertAVC1ToAVCOTI(codec){// Convert avc1 codec string from RFC-4281 to RFC-6381 for MediaSource.isTypeSupported
|
||
// Examples: avc1.66.30 to avc1.42001e and avc1.77.30,avc1.66.30 to avc1.4d001e,avc1.42001e.
|
||
const codecs=codec.split(',');for(let i=0;i<codecs.length;i++){const avcdata=codecs[i].split('.');if(avcdata.length>2){let result=avcdata.shift()+'.';result+=parseInt(avcdata.shift()).toString(16);result+=('000'+parseInt(avcdata.shift()).toString(16)).slice(-4);codecs[i]=result;}}return codecs.join(',');}const MASTER_PLAYLIST_REGEX=/#EXT-X-STREAM-INF:([^\r\n]*)(?:[\r\n](?:#[^\r\n]*)?)*([^\r\n]+)|#EXT-X-(SESSION-DATA|SESSION-KEY|DEFINE|CONTENT-STEERING|START):([^\r\n]*)[\r\n]+/g;const MASTER_PLAYLIST_MEDIA_REGEX=/#EXT-X-MEDIA:(.*)/g;const IS_MEDIA_PLAYLIST=/^#EXT(?:INF|-X-TARGETDURATION):/m;// Handle empty Media Playlist (first EXTINF not signaled, but TARGETDURATION present)
|
||
const LEVEL_PLAYLIST_REGEX_FAST=new RegExp([/#EXTINF:\s*(\d*(?:\.\d+)?)(?:,(.*)\s+)?/.source,// duration (#EXTINF:<duration>,<title>), group 1 => duration, group 2 => title
|
||
/(?!#) *(\S[^\r\n]*)/.source,// segment URI, group 3 => the URI (note newline is not eaten)
|
||
/#EXT-X-BYTERANGE:*(.+)/.source,// next segment's byterange, group 4 => range spec (x@y)
|
||
/#EXT-X-PROGRAM-DATE-TIME:(.+)/.source,// next segment's program date/time group 5 => the datetime spec
|
||
/#.*/.source// All other non-segment oriented tags will match with all groups empty
|
||
].join('|'),'g');const LEVEL_PLAYLIST_REGEX_SLOW=new RegExp([/#(EXTM3U)/.source,/#EXT-X-(DATERANGE|DEFINE|KEY|MAP|PART|PART-INF|PLAYLIST-TYPE|PRELOAD-HINT|RENDITION-REPORT|SERVER-CONTROL|SKIP|START):(.+)/.source,/#EXT-X-(BITRATE|DISCONTINUITY-SEQUENCE|MEDIA-SEQUENCE|TARGETDURATION|VERSION): *(\d+)/.source,/#EXT-X-(DISCONTINUITY|ENDLIST|GAP|INDEPENDENT-SEGMENTS)/.source,/(#)([^:]*):(.*)/.source,/(#)(.*)(?:.*)\r?\n?/.source].join('|'));class M3U8Parser{static findGroup(groups,mediaGroupId){for(let i=0;i<groups.length;i++){const group=groups[i];if(group.id===mediaGroupId){return group;}}}static resolve(url,baseUrl){return urlToolkitExports.buildAbsoluteURL(baseUrl,url,{alwaysNormalize:true});}static isMediaPlaylist(str){return IS_MEDIA_PLAYLIST.test(str);}static parseMasterPlaylist(string,baseurl){const hasVariableRefs=hasVariableReferences(string);const parsed={contentSteering:null,levels:[],playlistParsingError:null,sessionData:null,sessionKeys:null,startTimeOffset:null,variableList:null,hasVariableRefs};const levelsWithKnownCodecs=[];MASTER_PLAYLIST_REGEX.lastIndex=0;let result;while((result=MASTER_PLAYLIST_REGEX.exec(string))!=null){if(result[1]){var _level$unknownCodecs;// '#EXT-X-STREAM-INF' is found, parse level tag in group 1
|
||
const attrs=new AttrList(result[1]);{substituteVariablesInAttributes(parsed,attrs,['CODECS','SUPPLEMENTAL-CODECS','ALLOWED-CPC','PATHWAY-ID','STABLE-VARIANT-ID','AUDIO','VIDEO','SUBTITLES','CLOSED-CAPTIONS','NAME']);}const uri=substituteVariables(parsed,result[2]);const level={attrs,bitrate:attrs.decimalInteger('BANDWIDTH')||attrs.decimalInteger('AVERAGE-BANDWIDTH'),name:attrs.NAME,url:M3U8Parser.resolve(uri,baseurl)};const resolution=attrs.decimalResolution('RESOLUTION');if(resolution){level.width=resolution.width;level.height=resolution.height;}setCodecs(attrs.CODECS,level);if(!((_level$unknownCodecs=level.unknownCodecs)!=null&&_level$unknownCodecs.length)){levelsWithKnownCodecs.push(level);}parsed.levels.push(level);}else if(result[3]){const tag=result[3];const attributes=result[4];switch(tag){case'SESSION-DATA':{// #EXT-X-SESSION-DATA
|
||
const sessionAttrs=new AttrList(attributes);{substituteVariablesInAttributes(parsed,sessionAttrs,['DATA-ID','LANGUAGE','VALUE','URI']);}const dataId=sessionAttrs['DATA-ID'];if(dataId){if(parsed.sessionData===null){parsed.sessionData={};}parsed.sessionData[dataId]=sessionAttrs;}break;}case'SESSION-KEY':{// #EXT-X-SESSION-KEY
|
||
const sessionKey=parseKey(attributes,baseurl,parsed);if(sessionKey.encrypted&&sessionKey.isSupported()){if(parsed.sessionKeys===null){parsed.sessionKeys=[];}parsed.sessionKeys.push(sessionKey);}else {logger.warn(`[Keys] Ignoring invalid EXT-X-SESSION-KEY tag: "${attributes}"`);}break;}case'DEFINE':{// #EXT-X-DEFINE
|
||
{const variableAttributes=new AttrList(attributes);substituteVariablesInAttributes(parsed,variableAttributes,['NAME','VALUE','QUERYPARAM']);addVariableDefinition(parsed,variableAttributes,baseurl);}break;}case'CONTENT-STEERING':{// #EXT-X-CONTENT-STEERING
|
||
const contentSteeringAttributes=new AttrList(attributes);{substituteVariablesInAttributes(parsed,contentSteeringAttributes,['SERVER-URI','PATHWAY-ID']);}parsed.contentSteering={uri:M3U8Parser.resolve(contentSteeringAttributes['SERVER-URI'],baseurl),pathwayId:contentSteeringAttributes['PATHWAY-ID']||'.'};break;}case'START':{// #EXT-X-START
|
||
parsed.startTimeOffset=parseStartTimeOffset(attributes);break;}}}}// Filter out levels with unknown codecs if it does not remove all levels
|
||
const stripUnknownCodecLevels=levelsWithKnownCodecs.length>0&&levelsWithKnownCodecs.length<parsed.levels.length;parsed.levels=stripUnknownCodecLevels?levelsWithKnownCodecs:parsed.levels;if(parsed.levels.length===0){parsed.playlistParsingError=new Error('no levels found in manifest');}return parsed;}static parseMasterPlaylistMedia(string,baseurl,parsed){let result;const results={};const levels=parsed.levels;const groupsByType={AUDIO:levels.map(level=>({id:level.attrs.AUDIO,audioCodec:level.audioCodec})),SUBTITLES:levels.map(level=>({id:level.attrs.SUBTITLES,textCodec:level.textCodec})),'CLOSED-CAPTIONS':[]};let id=0;MASTER_PLAYLIST_MEDIA_REGEX.lastIndex=0;while((result=MASTER_PLAYLIST_MEDIA_REGEX.exec(string))!==null){const attrs=new AttrList(result[1]);const type=attrs.TYPE;if(type){const groups=groupsByType[type];const medias=results[type]||[];results[type]=medias;{substituteVariablesInAttributes(parsed,attrs,['URI','GROUP-ID','LANGUAGE','ASSOC-LANGUAGE','STABLE-RENDITION-ID','NAME','INSTREAM-ID','CHARACTERISTICS','CHANNELS']);}const lang=attrs.LANGUAGE;const assocLang=attrs['ASSOC-LANGUAGE'];const channels=attrs.CHANNELS;const characteristics=attrs.CHARACTERISTICS;const instreamId=attrs['INSTREAM-ID'];const media={attrs,bitrate:0,id:id++,groupId:attrs['GROUP-ID']||'',name:attrs.NAME||lang||'',type,default:attrs.bool('DEFAULT'),autoselect:attrs.bool('AUTOSELECT'),forced:attrs.bool('FORCED'),lang,url:attrs.URI?M3U8Parser.resolve(attrs.URI,baseurl):''};if(assocLang){media.assocLang=assocLang;}if(channels){media.channels=channels;}if(characteristics){media.characteristics=characteristics;}if(instreamId){media.instreamId=instreamId;}if(groups!=null&&groups.length){// If there are audio or text groups signalled in the manifest, let's look for a matching codec string for this track
|
||
// If we don't find the track signalled, lets use the first audio groups codec we have
|
||
// Acting as a best guess
|
||
const groupCodec=M3U8Parser.findGroup(groups,media.groupId)||groups[0];assignCodec(media,groupCodec,'audioCodec');assignCodec(media,groupCodec,'textCodec');}medias.push(media);}}return results;}static parseLevelPlaylist(string,baseurl,id,type,levelUrlId,multivariantVariableList){const level=new LevelDetails(baseurl);const fragments=level.fragments;// The most recent init segment seen (applies to all subsequent segments)
|
||
let currentInitSegment=null;let currentSN=0;let currentPart=0;let totalduration=0;let discontinuityCounter=0;let prevFrag=null;let frag=new Fragment(type,baseurl);let result;let i;let levelkeys;let firstPdtIndex=-1;let createNextFrag=false;let nextByteRange=null;LEVEL_PLAYLIST_REGEX_FAST.lastIndex=0;level.m3u8=string;level.hasVariableRefs=hasVariableReferences(string);while((result=LEVEL_PLAYLIST_REGEX_FAST.exec(string))!==null){if(createNextFrag){createNextFrag=false;frag=new Fragment(type,baseurl);// setup the next fragment for part loading
|
||
frag.start=totalduration;frag.sn=currentSN;frag.cc=discontinuityCounter;frag.level=id;if(currentInitSegment){frag.initSegment=currentInitSegment;frag.rawProgramDateTime=currentInitSegment.rawProgramDateTime;currentInitSegment.rawProgramDateTime=null;if(nextByteRange){frag.setByteRange(nextByteRange);nextByteRange=null;}}}const duration=result[1];if(duration){// INF
|
||
frag.duration=parseFloat(duration);// avoid sliced strings https://github.com/video-dev/hls.js/issues/939
|
||
const title=(' '+result[2]).slice(1);frag.title=title||null;frag.tagList.push(title?['INF',duration,title]:['INF',duration]);}else if(result[3]){// url
|
||
if(isFiniteNumber(frag.duration)){frag.start=totalduration;if(levelkeys){setFragLevelKeys(frag,levelkeys,level);}frag.sn=currentSN;frag.level=id;frag.cc=discontinuityCounter;fragments.push(frag);// avoid sliced strings https://github.com/video-dev/hls.js/issues/939
|
||
const uri=(' '+result[3]).slice(1);frag.relurl=substituteVariables(level,uri);assignProgramDateTime(frag,prevFrag);prevFrag=frag;totalduration+=frag.duration;currentSN++;currentPart=0;createNextFrag=true;}}else if(result[4]){// X-BYTERANGE
|
||
const data=(' '+result[4]).slice(1);if(prevFrag){frag.setByteRange(data,prevFrag);}else {frag.setByteRange(data);}}else if(result[5]){// PROGRAM-DATE-TIME
|
||
// avoid sliced strings https://github.com/video-dev/hls.js/issues/939
|
||
frag.rawProgramDateTime=(' '+result[5]).slice(1);frag.tagList.push(['PROGRAM-DATE-TIME',frag.rawProgramDateTime]);if(firstPdtIndex===-1){firstPdtIndex=fragments.length;}}else {result=result[0].match(LEVEL_PLAYLIST_REGEX_SLOW);if(!result){logger.warn('No matches on slow regex match for level playlist!');continue;}for(i=1;i<result.length;i++){if(typeof result[i]!=='undefined'){break;}}// avoid sliced strings https://github.com/video-dev/hls.js/issues/939
|
||
const tag=(' '+result[i]).slice(1);const value1=(' '+result[i+1]).slice(1);const value2=result[i+2]?(' '+result[i+2]).slice(1):'';switch(tag){case'PLAYLIST-TYPE':level.type=value1.toUpperCase();break;case'MEDIA-SEQUENCE':currentSN=level.startSN=parseInt(value1);break;case'SKIP':{const skipAttrs=new AttrList(value1);{substituteVariablesInAttributes(level,skipAttrs,['RECENTLY-REMOVED-DATERANGES']);}const skippedSegments=skipAttrs.decimalInteger('SKIPPED-SEGMENTS');if(isFiniteNumber(skippedSegments)){level.skippedSegments=skippedSegments;// This will result in fragments[] containing undefined values, which we will fill in with `mergeDetails`
|
||
for(let _i=skippedSegments;_i--;){fragments.unshift(null);}currentSN+=skippedSegments;}const recentlyRemovedDateranges=skipAttrs.enumeratedString('RECENTLY-REMOVED-DATERANGES');if(recentlyRemovedDateranges){level.recentlyRemovedDateranges=recentlyRemovedDateranges.split('\t');}break;}case'TARGETDURATION':level.targetduration=Math.max(parseInt(value1),1);break;case'VERSION':level.version=parseInt(value1);break;case'INDEPENDENT-SEGMENTS':case'EXTM3U':break;case'ENDLIST':level.live=false;break;case'#':if(value1||value2){frag.tagList.push(value2?[value1,value2]:[value1]);}break;case'DISCONTINUITY':discontinuityCounter++;frag.tagList.push(['DIS']);break;case'GAP':frag.gap=true;frag.tagList.push([tag]);break;case'BITRATE':frag.tagList.push([tag,value1]);break;case'DATERANGE':{const dateRangeAttr=new AttrList(value1);{substituteVariablesInAttributes(level,dateRangeAttr,['ID','CLASS','START-DATE','END-DATE','SCTE35-CMD','SCTE35-OUT','SCTE35-IN']);substituteVariablesInAttributes(level,dateRangeAttr,dateRangeAttr.clientAttrs);}const dateRange=new DateRange(dateRangeAttr,level.dateRanges[dateRangeAttr.ID]);if(dateRange.isValid||level.skippedSegments){level.dateRanges[dateRange.id]=dateRange;}else {logger.warn(`Ignoring invalid DATERANGE tag: "${value1}"`);}// Add to fragment tag list for backwards compatibility (< v1.2.0)
|
||
frag.tagList.push(['EXT-X-DATERANGE',value1]);break;}case'DEFINE':{{const variableAttributes=new AttrList(value1);substituteVariablesInAttributes(level,variableAttributes,['NAME','VALUE','IMPORT','QUERYPARAM']);if('IMPORT'in variableAttributes){importVariableDefinition(level,variableAttributes,multivariantVariableList);}else {addVariableDefinition(level,variableAttributes,baseurl);}}break;}case'DISCONTINUITY-SEQUENCE':discontinuityCounter=parseInt(value1);break;case'KEY':{const levelKey=parseKey(value1,baseurl,level);if(levelKey.isSupported()){if(levelKey.method==='NONE'){levelkeys=undefined;break;}if(!levelkeys){levelkeys={};}if(levelkeys[levelKey.keyFormat]){levelkeys=_extends({},levelkeys);}levelkeys[levelKey.keyFormat]=levelKey;}else {logger.warn(`[Keys] Ignoring invalid EXT-X-KEY tag: "${value1}"`);}break;}case'START':level.startTimeOffset=parseStartTimeOffset(value1);break;case'MAP':{const mapAttrs=new AttrList(value1);{substituteVariablesInAttributes(level,mapAttrs,['BYTERANGE','URI']);}if(frag.duration){// Initial segment tag is after segment duration tag.
|
||
// #EXTINF: 6.0
|
||
// #EXT-X-MAP:URI="init.mp4
|
||
const init=new Fragment(type,baseurl);setInitSegment(init,mapAttrs,id,levelkeys);currentInitSegment=init;frag.initSegment=currentInitSegment;if(currentInitSegment.rawProgramDateTime&&!frag.rawProgramDateTime){frag.rawProgramDateTime=currentInitSegment.rawProgramDateTime;}}else {// Initial segment tag is before segment duration tag
|
||
// Handle case where EXT-X-MAP is declared after EXT-X-BYTERANGE
|
||
const end=frag.byteRangeEndOffset;if(end){const start=frag.byteRangeStartOffset;nextByteRange=`${end-start}@${start}`;}else {nextByteRange=null;}setInitSegment(frag,mapAttrs,id,levelkeys);currentInitSegment=frag;createNextFrag=true;}break;}case'SERVER-CONTROL':{const serverControlAttrs=new AttrList(value1);level.canBlockReload=serverControlAttrs.bool('CAN-BLOCK-RELOAD');level.canSkipUntil=serverControlAttrs.optionalFloat('CAN-SKIP-UNTIL',0);level.canSkipDateRanges=level.canSkipUntil>0&&serverControlAttrs.bool('CAN-SKIP-DATERANGES');level.partHoldBack=serverControlAttrs.optionalFloat('PART-HOLD-BACK',0);level.holdBack=serverControlAttrs.optionalFloat('HOLD-BACK',0);break;}case'PART-INF':{const partInfAttrs=new AttrList(value1);level.partTarget=partInfAttrs.decimalFloatingPoint('PART-TARGET');break;}case'PART':{let partList=level.partList;if(!partList){partList=level.partList=[];}const previousFragmentPart=currentPart>0?partList[partList.length-1]:undefined;const index=currentPart++;const partAttrs=new AttrList(value1);{substituteVariablesInAttributes(level,partAttrs,['BYTERANGE','URI']);}const part=new Part(partAttrs,frag,baseurl,index,previousFragmentPart);partList.push(part);frag.duration+=part.duration;break;}case'PRELOAD-HINT':{const preloadHintAttrs=new AttrList(value1);{substituteVariablesInAttributes(level,preloadHintAttrs,['URI']);}level.preloadHint=preloadHintAttrs;break;}case'RENDITION-REPORT':{const renditionReportAttrs=new AttrList(value1);{substituteVariablesInAttributes(level,renditionReportAttrs,['URI']);}level.renditionReports=level.renditionReports||[];level.renditionReports.push(renditionReportAttrs);break;}default:logger.warn(`line parsed but not handled: ${result}`);break;}}}if(prevFrag&&!prevFrag.relurl){fragments.pop();totalduration-=prevFrag.duration;if(level.partList){level.fragmentHint=prevFrag;}}else if(level.partList){assignProgramDateTime(frag,prevFrag);frag.cc=discontinuityCounter;level.fragmentHint=frag;if(levelkeys){setFragLevelKeys(frag,levelkeys,level);}}const fragmentLength=fragments.length;const firstFragment=fragments[0];const lastFragment=fragments[fragmentLength-1];totalduration+=level.skippedSegments*level.targetduration;if(totalduration>0&&fragmentLength&&lastFragment){level.averagetargetduration=totalduration/fragmentLength;const lastSn=lastFragment.sn;level.endSN=lastSn!=='initSegment'?lastSn:0;if(!level.live){lastFragment.endList=true;}if(firstFragment){level.startCC=firstFragment.cc;}}else {level.endSN=0;level.startCC=0;}if(level.fragmentHint){totalduration+=level.fragmentHint.duration;}level.totalduration=totalduration;level.endCC=discontinuityCounter;/**
|
||
* Backfill any missing PDT values
|
||
* "If the first EXT-X-PROGRAM-DATE-TIME tag in a Playlist appears after
|
||
* one or more Media Segment URIs, the client SHOULD extrapolate
|
||
* backward from that tag (using EXTINF durations and/or media
|
||
* timestamps) to associate dates with those segments."
|
||
* We have already extrapolated forward, but all fragments up to the first instance of PDT do not have their PDTs
|
||
* computed.
|
||
*/if(firstPdtIndex>0){backfillProgramDateTimes(fragments,firstPdtIndex);}return level;}}function parseKey(keyTagAttributes,baseurl,parsed){var _keyAttrs$METHOD,_keyAttrs$KEYFORMAT;// https://tools.ietf.org/html/rfc8216#section-4.3.2.4
|
||
const keyAttrs=new AttrList(keyTagAttributes);{substituteVariablesInAttributes(parsed,keyAttrs,['KEYFORMAT','KEYFORMATVERSIONS','URI','IV','URI']);}const decryptmethod=(_keyAttrs$METHOD=keyAttrs.METHOD)!=null?_keyAttrs$METHOD:'';const decrypturi=keyAttrs.URI;const decryptiv=keyAttrs.hexadecimalInteger('IV');const decryptkeyformatversions=keyAttrs.KEYFORMATVERSIONS;// From RFC: This attribute is OPTIONAL; its absence indicates an implicit value of "identity".
|
||
const decryptkeyformat=(_keyAttrs$KEYFORMAT=keyAttrs.KEYFORMAT)!=null?_keyAttrs$KEYFORMAT:'identity';if(decrypturi&&keyAttrs.IV&&!decryptiv){logger.error(`Invalid IV: ${keyAttrs.IV}`);}// If decrypturi is a URI with a scheme, then baseurl will be ignored
|
||
// No uri is allowed when METHOD is NONE
|
||
const resolvedUri=decrypturi?M3U8Parser.resolve(decrypturi,baseurl):'';const keyFormatVersions=(decryptkeyformatversions?decryptkeyformatversions:'1').split('/').map(Number).filter(Number.isFinite);return new LevelKey(decryptmethod,resolvedUri,decryptkeyformat,keyFormatVersions,decryptiv);}function parseStartTimeOffset(startAttributes){const startAttrs=new AttrList(startAttributes);const startTimeOffset=startAttrs.decimalFloatingPoint('TIME-OFFSET');if(isFiniteNumber(startTimeOffset)){return startTimeOffset;}return null;}function setCodecs(codecsAttributeValue,level){let codecs=(codecsAttributeValue||'').split(/[ ,]+/).filter(c=>c);['video','audio','text'].forEach(type=>{const filtered=codecs.filter(codec=>isCodecType(codec,type));if(filtered.length){// Comma separated list of all codecs for type
|
||
level[`${type}Codec`]=filtered.join(',');// Remove known codecs so that only unknownCodecs are left after iterating through each type
|
||
codecs=codecs.filter(codec=>filtered.indexOf(codec)===-1);}});level.unknownCodecs=codecs;}function assignCodec(media,groupItem,codecProperty){const codecValue=groupItem[codecProperty];if(codecValue){media[codecProperty]=codecValue;}}function backfillProgramDateTimes(fragments,firstPdtIndex){let fragPrev=fragments[firstPdtIndex];for(let i=firstPdtIndex;i--;){const frag=fragments[i];// Exit on delta-playlist skipped segments
|
||
if(!frag){return;}frag.programDateTime=fragPrev.programDateTime-frag.duration*1000;fragPrev=frag;}}function assignProgramDateTime(frag,prevFrag){if(frag.rawProgramDateTime){frag.programDateTime=Date.parse(frag.rawProgramDateTime);}else if(prevFrag!=null&&prevFrag.programDateTime){frag.programDateTime=prevFrag.endProgramDateTime;}if(!isFiniteNumber(frag.programDateTime)){frag.programDateTime=null;frag.rawProgramDateTime=null;}}function setInitSegment(frag,mapAttrs,id,levelkeys){frag.relurl=mapAttrs.URI;if(mapAttrs.BYTERANGE){frag.setByteRange(mapAttrs.BYTERANGE);}frag.level=id;frag.sn='initSegment';if(levelkeys){frag.levelkeys=levelkeys;}frag.initSegment=null;}function setFragLevelKeys(frag,levelkeys,level){frag.levelkeys=levelkeys;const{encryptedFragments}=level;if((!encryptedFragments.length||encryptedFragments[encryptedFragments.length-1].levelkeys!==levelkeys)&&Object.keys(levelkeys).some(format=>levelkeys[format].isCommonEncryption)){encryptedFragments.push(frag);}}var PlaylistContextType={MANIFEST:"manifest",LEVEL:"level",AUDIO_TRACK:"audioTrack",SUBTITLE_TRACK:"subtitleTrack"};var PlaylistLevelType={MAIN:"main",AUDIO:"audio",SUBTITLE:"subtitle"};function mapContextToLevelType(context){const{type}=context;switch(type){case PlaylistContextType.AUDIO_TRACK:return PlaylistLevelType.AUDIO;case PlaylistContextType.SUBTITLE_TRACK:return PlaylistLevelType.SUBTITLE;default:return PlaylistLevelType.MAIN;}}function getResponseUrl(response,context){let url=response.url;// responseURL not supported on some browsers (it is used to detect URL redirection)
|
||
// data-uri mode also not supported (but no need to detect redirection)
|
||
if(url===undefined||url.indexOf('data:')===0){// fallback to initial URL
|
||
url=context.url;}return url;}class PlaylistLoader{constructor(hls){this.hls=void 0;this.loaders=Object.create(null);this.variableList=null;this.hls=hls;this.registerListeners();}startLoad(startPosition){}stopLoad(){this.destroyInternalLoaders();}registerListeners(){const{hls}=this;hls.on(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.on(Events.LEVEL_LOADING,this.onLevelLoading,this);hls.on(Events.AUDIO_TRACK_LOADING,this.onAudioTrackLoading,this);hls.on(Events.SUBTITLE_TRACK_LOADING,this.onSubtitleTrackLoading,this);}unregisterListeners(){const{hls}=this;hls.off(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.off(Events.LEVEL_LOADING,this.onLevelLoading,this);hls.off(Events.AUDIO_TRACK_LOADING,this.onAudioTrackLoading,this);hls.off(Events.SUBTITLE_TRACK_LOADING,this.onSubtitleTrackLoading,this);}/**
|
||
* Returns defaults or configured loader-type overloads (pLoader and loader config params)
|
||
*/createInternalLoader(context){const config=this.hls.config;const PLoader=config.pLoader;const Loader=config.loader;const InternalLoader=PLoader||Loader;const loader=new InternalLoader(config);this.loaders[context.type]=loader;return loader;}getInternalLoader(context){return this.loaders[context.type];}resetInternalLoader(contextType){if(this.loaders[contextType]){delete this.loaders[contextType];}}/**
|
||
* Call `destroy` on all internal loader instances mapped (one per context type)
|
||
*/destroyInternalLoaders(){for(const contextType in this.loaders){const loader=this.loaders[contextType];if(loader){loader.destroy();}this.resetInternalLoader(contextType);}}destroy(){this.variableList=null;this.unregisterListeners();this.destroyInternalLoaders();}onManifestLoading(event,data){const{url}=data;this.variableList=null;this.load({id:null,level:0,responseType:'text',type:PlaylistContextType.MANIFEST,url,deliveryDirectives:null});}onLevelLoading(event,data){const{id,level,pathwayId,url,deliveryDirectives}=data;this.load({id,level,pathwayId,responseType:'text',type:PlaylistContextType.LEVEL,url,deliveryDirectives});}onAudioTrackLoading(event,data){const{id,groupId,url,deliveryDirectives}=data;this.load({id,groupId,level:null,responseType:'text',type:PlaylistContextType.AUDIO_TRACK,url,deliveryDirectives});}onSubtitleTrackLoading(event,data){const{id,groupId,url,deliveryDirectives}=data;this.load({id,groupId,level:null,responseType:'text',type:PlaylistContextType.SUBTITLE_TRACK,url,deliveryDirectives});}load(context){var _context$deliveryDire;const config=this.hls.config;// logger.debug(`[playlist-loader]: Loading playlist of type ${context.type}, level: ${context.level}, id: ${context.id}`);
|
||
// Check if a loader for this context already exists
|
||
let loader=this.getInternalLoader(context);if(loader){const loaderContext=loader.context;if(loaderContext&&loaderContext.url===context.url&&loaderContext.level===context.level){// same URL can't overlap
|
||
logger.trace('[playlist-loader]: playlist request ongoing');return;}logger.log(`[playlist-loader]: aborting previous loader for type: ${context.type}`);loader.abort();}// apply different configs for retries depending on
|
||
// context (manifest, level, audio/subs playlist)
|
||
let loadPolicy;if(context.type===PlaylistContextType.MANIFEST){loadPolicy=config.manifestLoadPolicy.default;}else {loadPolicy=_extends({},config.playlistLoadPolicy.default,{timeoutRetry:null,errorRetry:null});}loader=this.createInternalLoader(context);// Override level/track timeout for LL-HLS requests
|
||
// (the default of 10000ms is counter productive to blocking playlist reload requests)
|
||
if(isFiniteNumber((_context$deliveryDire=context.deliveryDirectives)==null?void 0:_context$deliveryDire.part)){let levelDetails;if(context.type===PlaylistContextType.LEVEL&&context.level!==null){levelDetails=this.hls.levels[context.level].details;}else if(context.type===PlaylistContextType.AUDIO_TRACK&&context.id!==null){levelDetails=this.hls.audioTracks[context.id].details;}else if(context.type===PlaylistContextType.SUBTITLE_TRACK&&context.id!==null){levelDetails=this.hls.subtitleTracks[context.id].details;}if(levelDetails){const partTarget=levelDetails.partTarget;const targetDuration=levelDetails.targetduration;if(partTarget&&targetDuration){const maxLowLatencyPlaylistRefresh=Math.max(partTarget*3,targetDuration*0.8)*1000;loadPolicy=_extends({},loadPolicy,{maxTimeToFirstByteMs:Math.min(maxLowLatencyPlaylistRefresh,loadPolicy.maxTimeToFirstByteMs),maxLoadTimeMs:Math.min(maxLowLatencyPlaylistRefresh,loadPolicy.maxTimeToFirstByteMs)});}}}const legacyRetryCompatibility=loadPolicy.errorRetry||loadPolicy.timeoutRetry||{};const loaderConfig={loadPolicy,timeout:loadPolicy.maxLoadTimeMs,maxRetry:legacyRetryCompatibility.maxNumRetry||0,retryDelay:legacyRetryCompatibility.retryDelayMs||0,maxRetryDelay:legacyRetryCompatibility.maxRetryDelayMs||0};const loaderCallbacks={onSuccess:(response,stats,context,networkDetails)=>{const loader=this.getInternalLoader(context);this.resetInternalLoader(context.type);const string=response.data;// Validate if it is an M3U8 at all
|
||
if(string.indexOf('#EXTM3U')!==0){this.handleManifestParsingError(response,context,new Error('no EXTM3U delimiter'),networkDetails||null,stats);return;}stats.parsing.start=performance.now();if(M3U8Parser.isMediaPlaylist(string)){this.handleTrackOrLevelPlaylist(response,stats,context,networkDetails||null,loader);}else {this.handleMasterPlaylist(response,stats,context,networkDetails);}},onError:(response,context,networkDetails,stats)=>{this.handleNetworkError(context,networkDetails,false,response,stats);},onTimeout:(stats,context,networkDetails)=>{this.handleNetworkError(context,networkDetails,true,undefined,stats);}};// logger.debug(`[playlist-loader]: Calling internal loader delegate for URL: ${context.url}`);
|
||
loader.load(context,loaderConfig,loaderCallbacks);}handleMasterPlaylist(response,stats,context,networkDetails){const hls=this.hls;const string=response.data;const url=getResponseUrl(response,context);const parsedResult=M3U8Parser.parseMasterPlaylist(string,url);if(parsedResult.playlistParsingError){this.handleManifestParsingError(response,context,parsedResult.playlistParsingError,networkDetails,stats);return;}const{contentSteering,levels,sessionData,sessionKeys,startTimeOffset,variableList}=parsedResult;this.variableList=variableList;const{AUDIO:audioTracks=[],SUBTITLES:subtitles,'CLOSED-CAPTIONS':captions}=M3U8Parser.parseMasterPlaylistMedia(string,url,parsedResult);if(audioTracks.length){// check if we have found an audio track embedded in main playlist (audio track without URI attribute)
|
||
const embeddedAudioFound=audioTracks.some(audioTrack=>!audioTrack.url);// if no embedded audio track defined, but audio codec signaled in quality level,
|
||
// we need to signal this main audio track this could happen with playlists with
|
||
// alt audio rendition in which quality levels (main)
|
||
// contains both audio+video. but with mixed audio track not signaled
|
||
if(!embeddedAudioFound&&levels[0].audioCodec&&!levels[0].attrs.AUDIO){logger.log('[playlist-loader]: audio codec signaled in quality level, but no embedded audio track signaled, create one');audioTracks.unshift({type:'main',name:'main',groupId:'main',default:false,autoselect:false,forced:false,id:-1,attrs:new AttrList({}),bitrate:0,url:''});}}hls.trigger(Events.MANIFEST_LOADED,{levels,audioTracks,subtitles,captions,contentSteering,url,stats,networkDetails,sessionData,sessionKeys,startTimeOffset,variableList});}handleTrackOrLevelPlaylist(response,stats,context,networkDetails,loader){const hls=this.hls;const{id,level,type}=context;const url=getResponseUrl(response,context);const levelUrlId=0;const levelId=isFiniteNumber(level)?level:isFiniteNumber(id)?id:0;const levelType=mapContextToLevelType(context);const levelDetails=M3U8Parser.parseLevelPlaylist(response.data,url,levelId,levelType,levelUrlId,this.variableList);// We have done our first request (Manifest-type) and receive
|
||
// not a master playlist but a chunk-list (track/level)
|
||
// We fire the manifest-loaded event anyway with the parsed level-details
|
||
// by creating a single-level structure for it.
|
||
if(type===PlaylistContextType.MANIFEST){const singleLevel={attrs:new AttrList({}),bitrate:0,details:levelDetails,name:'',url};hls.trigger(Events.MANIFEST_LOADED,{levels:[singleLevel],audioTracks:[],url,stats,networkDetails,sessionData:null,sessionKeys:null,contentSteering:null,startTimeOffset:null,variableList:null});}// save parsing time
|
||
stats.parsing.end=performance.now();// extend the context with the new levelDetails property
|
||
context.levelDetails=levelDetails;this.handlePlaylistLoaded(levelDetails,response,stats,context,networkDetails,loader);}handleManifestParsingError(response,context,error,networkDetails,stats){this.hls.trigger(Events.ERROR,{type:ErrorTypes.NETWORK_ERROR,details:ErrorDetails.MANIFEST_PARSING_ERROR,fatal:context.type===PlaylistContextType.MANIFEST,url:response.url,err:error,error,reason:error.message,response,context,networkDetails,stats});}handleNetworkError(context,networkDetails,timeout=false,response,stats){let message=`A network ${timeout?'timeout':'error'+(response?' (status '+response.code+')':'')} occurred while loading ${context.type}`;if(context.type===PlaylistContextType.LEVEL){message+=`: ${context.level} id: ${context.id}`;}else if(context.type===PlaylistContextType.AUDIO_TRACK||context.type===PlaylistContextType.SUBTITLE_TRACK){message+=` id: ${context.id} group-id: "${context.groupId}"`;}const error=new Error(message);logger.warn(`[playlist-loader]: ${message}`);let details=ErrorDetails.UNKNOWN;let fatal=false;const loader=this.getInternalLoader(context);switch(context.type){case PlaylistContextType.MANIFEST:details=timeout?ErrorDetails.MANIFEST_LOAD_TIMEOUT:ErrorDetails.MANIFEST_LOAD_ERROR;fatal=true;break;case PlaylistContextType.LEVEL:details=timeout?ErrorDetails.LEVEL_LOAD_TIMEOUT:ErrorDetails.LEVEL_LOAD_ERROR;fatal=false;break;case PlaylistContextType.AUDIO_TRACK:details=timeout?ErrorDetails.AUDIO_TRACK_LOAD_TIMEOUT:ErrorDetails.AUDIO_TRACK_LOAD_ERROR;fatal=false;break;case PlaylistContextType.SUBTITLE_TRACK:details=timeout?ErrorDetails.SUBTITLE_TRACK_LOAD_TIMEOUT:ErrorDetails.SUBTITLE_LOAD_ERROR;fatal=false;break;}if(loader){this.resetInternalLoader(context.type);}const errorData={type:ErrorTypes.NETWORK_ERROR,details,fatal,url:context.url,loader,context,error,networkDetails,stats};if(response){const url=(networkDetails==null?void 0:networkDetails.url)||context.url;errorData.response=_objectSpread2({url,data:undefined},response);}this.hls.trigger(Events.ERROR,errorData);}handlePlaylistLoaded(levelDetails,response,stats,context,networkDetails,loader){const hls=this.hls;const{type,level,id,groupId,deliveryDirectives}=context;const url=getResponseUrl(response,context);const parent=mapContextToLevelType(context);const levelIndex=typeof context.level==='number'&&parent===PlaylistLevelType.MAIN?level:undefined;if(!levelDetails.fragments.length){const _error=new Error('No Segments found in Playlist');hls.trigger(Events.ERROR,{type:ErrorTypes.NETWORK_ERROR,details:ErrorDetails.LEVEL_EMPTY_ERROR,fatal:false,url,error:_error,reason:_error.message,response,context,level:levelIndex,parent,networkDetails,stats});return;}if(!levelDetails.targetduration){levelDetails.playlistParsingError=new Error('Missing Target Duration');}const error=levelDetails.playlistParsingError;if(error){hls.trigger(Events.ERROR,{type:ErrorTypes.NETWORK_ERROR,details:ErrorDetails.LEVEL_PARSING_ERROR,fatal:false,url,error,reason:error.message,response,context,level:levelIndex,parent,networkDetails,stats});return;}if(levelDetails.live&&loader){if(loader.getCacheAge){levelDetails.ageHeader=loader.getCacheAge()||0;}if(!loader.getCacheAge||isNaN(levelDetails.ageHeader)){levelDetails.ageHeader=0;}}switch(type){case PlaylistContextType.MANIFEST:case PlaylistContextType.LEVEL:hls.trigger(Events.LEVEL_LOADED,{details:levelDetails,level:levelIndex||0,id:id||0,stats,networkDetails,deliveryDirectives});break;case PlaylistContextType.AUDIO_TRACK:hls.trigger(Events.AUDIO_TRACK_LOADED,{details:levelDetails,id:id||0,groupId:groupId||'',stats,networkDetails,deliveryDirectives});break;case PlaylistContextType.SUBTITLE_TRACK:hls.trigger(Events.SUBTITLE_TRACK_LOADED,{details:levelDetails,id:id||0,groupId:groupId||'',stats,networkDetails,deliveryDirectives});break;}}}function sendAddTrackEvent(track,videoEl){let event;try{event=new Event('addtrack');}catch(err){// for IE11
|
||
event=document.createEvent('Event');event.initEvent('addtrack',false,false);}event.track=track;videoEl.dispatchEvent(event);}function addCueToTrack(track,cue){// Sometimes there are cue overlaps on segmented vtts so the same
|
||
// cue can appear more than once in different vtt files.
|
||
// This avoid showing duplicated cues with same timecode and text.
|
||
const mode=track.mode;if(mode==='disabled'){track.mode='hidden';}if(track.cues&&!track.cues.getCueById(cue.id)){try{track.addCue(cue);if(!track.cues.getCueById(cue.id)){throw new Error(`addCue is failed for: ${cue}`);}}catch(err){logger.debug(`[texttrack-utils]: ${err}`);try{const textTrackCue=new self.TextTrackCue(cue.startTime,cue.endTime,cue.text);textTrackCue.id=cue.id;track.addCue(textTrackCue);}catch(err2){logger.debug(`[texttrack-utils]: Legacy TextTrackCue fallback failed: ${err2}`);}}}if(mode==='disabled'){track.mode=mode;}}function clearCurrentCues(track){// When track.mode is disabled, track.cues will be null.
|
||
// To guarantee the removal of cues, we need to temporarily
|
||
// change the mode to hidden
|
||
const mode=track.mode;if(mode==='disabled'){track.mode='hidden';}if(track.cues){for(let i=track.cues.length;i--;){track.removeCue(track.cues[i]);}}if(mode==='disabled'){track.mode=mode;}}function removeCuesInRange(track,start,end,predicate){const mode=track.mode;if(mode==='disabled'){track.mode='hidden';}if(track.cues&&track.cues.length>0){const cues=getCuesInRange(track.cues,start,end);for(let i=0;i<cues.length;i++){if(!predicate||predicate(cues[i])){track.removeCue(cues[i]);}}}if(mode==='disabled'){track.mode=mode;}}// Find first cue starting after given time.
|
||
// Modified version of binary search O(log(n)).
|
||
function getFirstCueIndexAfterTime(cues,time){// If first cue starts after time, start there
|
||
if(time<cues[0].startTime){return 0;}// If the last cue ends before time there is no overlap
|
||
const len=cues.length-1;if(time>cues[len].endTime){return -1;}let left=0;let right=len;while(left<=right){const mid=Math.floor((right+left)/2);if(time<cues[mid].startTime){right=mid-1;}else if(time>cues[mid].startTime&&left<len){left=mid+1;}else {// If it's not lower or higher, it must be equal.
|
||
return mid;}}// At this point, left and right have swapped.
|
||
// No direct match was found, left or right element must be the closest. Check which one has the smallest diff.
|
||
return cues[left].startTime-time<time-cues[right].startTime?left:right;}function getCuesInRange(cues,start,end){const cuesFound=[];const firstCueInRange=getFirstCueIndexAfterTime(cues,start);if(firstCueInRange>-1){for(let i=firstCueInRange,len=cues.length;i<len;i++){const cue=cues[i];if(cue.startTime>=start&&cue.endTime<=end){cuesFound.push(cue);}else if(cue.startTime>end){return cuesFound;}}}return cuesFound;}function filterSubtitleTracks(textTrackList){const tracks=[];for(let i=0;i<textTrackList.length;i++){const track=textTrackList[i];// Edge adds a track without a label; we don't want to use it
|
||
if((track.kind==='subtitles'||track.kind==='captions')&&track.label){tracks.push(textTrackList[i]);}}return tracks;}var MetadataSchema={audioId3:"org.id3",dateRange:"com.apple.quicktime.HLS",emsg:"https://aomedia.org/emsg/ID3"};const MIN_CUE_DURATION=0.25;function getCueClass(){if(typeof self==='undefined')return undefined;return self.VTTCue||self.TextTrackCue;}function createCueWithDataFields(Cue,startTime,endTime,data,type){let cue=new Cue(startTime,endTime,'');try{cue.value=data;if(type){cue.type=type;}}catch(e){cue=new Cue(startTime,endTime,JSON.stringify(type?_objectSpread2({type},data):data));}return cue;}// VTTCue latest draft allows an infinite duration, fallback
|
||
// to MAX_VALUE if necessary
|
||
const MAX_CUE_ENDTIME=(()=>{const Cue=getCueClass();try{Cue&&new Cue(0,Number.POSITIVE_INFINITY,'');}catch(e){return Number.MAX_VALUE;}return Number.POSITIVE_INFINITY;})();function dateRangeDateToTimelineSeconds(date,offset){return date.getTime()/1000-offset;}function hexToArrayBuffer(str){return Uint8Array.from(str.replace(/^0x/,'').replace(/([\da-fA-F]{2}) ?/g,'0x$1 ').replace(/ +$/,'').split(' ')).buffer;}class ID3TrackController{constructor(hls){this.hls=void 0;this.id3Track=null;this.media=null;this.dateRangeCuesAppended={};this.hls=hls;this._registerListeners();}destroy(){this._unregisterListeners();this.id3Track=null;this.media=null;this.dateRangeCuesAppended={};// @ts-ignore
|
||
this.hls=null;}_registerListeners(){const{hls}=this;hls.on(Events.MEDIA_ATTACHED,this.onMediaAttached,this);hls.on(Events.MEDIA_DETACHING,this.onMediaDetaching,this);hls.on(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.on(Events.FRAG_PARSING_METADATA,this.onFragParsingMetadata,this);hls.on(Events.BUFFER_FLUSHING,this.onBufferFlushing,this);hls.on(Events.LEVEL_UPDATED,this.onLevelUpdated,this);}_unregisterListeners(){const{hls}=this;hls.off(Events.MEDIA_ATTACHED,this.onMediaAttached,this);hls.off(Events.MEDIA_DETACHING,this.onMediaDetaching,this);hls.off(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.off(Events.FRAG_PARSING_METADATA,this.onFragParsingMetadata,this);hls.off(Events.BUFFER_FLUSHING,this.onBufferFlushing,this);hls.off(Events.LEVEL_UPDATED,this.onLevelUpdated,this);}// Add ID3 metatadata text track.
|
||
onMediaAttached(event,data){this.media=data.media;}onMediaDetaching(){if(!this.id3Track){return;}clearCurrentCues(this.id3Track);this.id3Track=null;this.media=null;this.dateRangeCuesAppended={};}onManifestLoading(){this.dateRangeCuesAppended={};}createTrack(media){const track=this.getID3Track(media.textTracks);track.mode='hidden';return track;}getID3Track(textTracks){if(!this.media){return;}for(let i=0;i<textTracks.length;i++){const textTrack=textTracks[i];if(textTrack.kind==='metadata'&&textTrack.label==='id3'){// send 'addtrack' when reusing the textTrack for metadata,
|
||
// same as what we do for captions
|
||
sendAddTrackEvent(textTrack,this.media);return textTrack;}}return this.media.addTextTrack('metadata','id3');}onFragParsingMetadata(event,data){if(!this.media){return;}const{hls:{config:{enableEmsgMetadataCues,enableID3MetadataCues}}}=this;if(!enableEmsgMetadataCues&&!enableID3MetadataCues){return;}const{samples}=data;// create track dynamically
|
||
if(!this.id3Track){this.id3Track=this.createTrack(this.media);}const Cue=getCueClass();if(!Cue){return;}for(let i=0;i<samples.length;i++){const type=samples[i].type;if(type===MetadataSchema.emsg&&!enableEmsgMetadataCues||!enableID3MetadataCues){continue;}const frames=getID3Frames(samples[i].data);if(frames){const startTime=samples[i].pts;let endTime=startTime+samples[i].duration;if(endTime>MAX_CUE_ENDTIME){endTime=MAX_CUE_ENDTIME;}const timeDiff=endTime-startTime;if(timeDiff<=0){endTime=startTime+MIN_CUE_DURATION;}for(let j=0;j<frames.length;j++){const frame=frames[j];// Safari doesn't put the timestamp frame in the TextTrack
|
||
if(!isTimeStampFrame(frame)){// add a bounds to any unbounded cues
|
||
this.updateId3CueEnds(startTime,type);const cue=createCueWithDataFields(Cue,startTime,endTime,frame,type);if(cue){this.id3Track.addCue(cue);}}}}}}updateId3CueEnds(startTime,type){var _this$id3Track;const cues=(_this$id3Track=this.id3Track)==null?void 0:_this$id3Track.cues;if(cues){for(let i=cues.length;i--;){const cue=cues[i];if(cue.type===type&&cue.startTime<startTime&&cue.endTime===MAX_CUE_ENDTIME){cue.endTime=startTime;}}}}onBufferFlushing(event,{startOffset,endOffset,type}){const{id3Track,hls}=this;if(!hls){return;}const{config:{enableEmsgMetadataCues,enableID3MetadataCues}}=hls;if(id3Track&&(enableEmsgMetadataCues||enableID3MetadataCues)){let predicate;if(type==='audio'){predicate=cue=>cue.type===MetadataSchema.audioId3&&enableID3MetadataCues;}else if(type==='video'){predicate=cue=>cue.type===MetadataSchema.emsg&&enableEmsgMetadataCues;}else {predicate=cue=>cue.type===MetadataSchema.audioId3&&enableID3MetadataCues||cue.type===MetadataSchema.emsg&&enableEmsgMetadataCues;}removeCuesInRange(id3Track,startOffset,endOffset,predicate);}}onLevelUpdated(event,{details}){if(!this.media||!details.hasProgramDateTime||!this.hls.config.enableDateRangeMetadataCues){return;}const{dateRangeCuesAppended,id3Track}=this;const{dateRanges}=details;const ids=Object.keys(dateRanges);// Remove cues from track not found in details.dateRanges
|
||
if(id3Track){const idsToRemove=Object.keys(dateRangeCuesAppended).filter(id=>!ids.includes(id));for(let i=idsToRemove.length;i--;){const id=idsToRemove[i];Object.keys(dateRangeCuesAppended[id].cues).forEach(key=>{id3Track.removeCue(dateRangeCuesAppended[id].cues[key]);});delete dateRangeCuesAppended[id];}}// Exit if the playlist does not have Date Ranges or does not have Program Date Time
|
||
const lastFragment=details.fragments[details.fragments.length-1];if(ids.length===0||!isFiniteNumber(lastFragment==null?void 0:lastFragment.programDateTime)){return;}if(!this.id3Track){this.id3Track=this.createTrack(this.media);}const dateTimeOffset=lastFragment.programDateTime/1000-lastFragment.start;const Cue=getCueClass();for(let i=0;i<ids.length;i++){const id=ids[i];const dateRange=dateRanges[id];const startTime=dateRangeDateToTimelineSeconds(dateRange.startDate,dateTimeOffset);// Process DateRanges to determine end-time (known DURATION, END-DATE, or END-ON-NEXT)
|
||
const appendedDateRangeCues=dateRangeCuesAppended[id];const cues=(appendedDateRangeCues==null?void 0:appendedDateRangeCues.cues)||{};let durationKnown=(appendedDateRangeCues==null?void 0:appendedDateRangeCues.durationKnown)||false;let endTime=MAX_CUE_ENDTIME;const endDate=dateRange.endDate;if(endDate){endTime=dateRangeDateToTimelineSeconds(endDate,dateTimeOffset);durationKnown=true;}else if(dateRange.endOnNext&&!durationKnown){const nextDateRangeWithSameClass=ids.reduce((candidateDateRange,id)=>{if(id!==dateRange.id){const otherDateRange=dateRanges[id];if(otherDateRange.class===dateRange.class&&otherDateRange.startDate>dateRange.startDate&&(!candidateDateRange||dateRange.startDate<candidateDateRange.startDate)){return otherDateRange;}}return candidateDateRange;},null);if(nextDateRangeWithSameClass){endTime=dateRangeDateToTimelineSeconds(nextDateRangeWithSameClass.startDate,dateTimeOffset);durationKnown=true;}}// Create TextTrack Cues for each MetadataGroup Item (select DateRange attribute)
|
||
// This is to emulate Safari HLS playback handling of DateRange tags
|
||
const attributes=Object.keys(dateRange.attr);for(let j=0;j<attributes.length;j++){const key=attributes[j];if(!isDateRangeCueAttribute(key)){continue;}const cue=cues[key];if(cue){if(durationKnown&&!appendedDateRangeCues.durationKnown){cue.endTime=endTime;}}else if(Cue){let data=dateRange.attr[key];if(isSCTE35Attribute(key)){data=hexToArrayBuffer(data);}const _cue=createCueWithDataFields(Cue,startTime,endTime,{key,data},MetadataSchema.dateRange);if(_cue){_cue.id=id;this.id3Track.addCue(_cue);cues[key]=_cue;}}}// Keep track of processed DateRanges by ID for updating cues with new DateRange tag attributes
|
||
dateRangeCuesAppended[id]={cues,dateRange,durationKnown};}}}class LatencyController{constructor(hls){this.hls=void 0;this.config=void 0;this.media=null;this.levelDetails=null;this.currentTime=0;this.stallCount=0;this._latency=null;this.timeupdateHandler=()=>this.timeupdate();this.hls=hls;this.config=hls.config;this.registerListeners();}get latency(){return this._latency||0;}get maxLatency(){const{config,levelDetails}=this;if(config.liveMaxLatencyDuration!==undefined){return config.liveMaxLatencyDuration;}return levelDetails?config.liveMaxLatencyDurationCount*levelDetails.targetduration:0;}get targetLatency(){const{levelDetails}=this;if(levelDetails===null){return null;}const{holdBack,partHoldBack,targetduration}=levelDetails;const{liveSyncDuration,liveSyncDurationCount,lowLatencyMode}=this.config;const userConfig=this.hls.userConfig;let targetLatency=lowLatencyMode?partHoldBack||holdBack:holdBack;if(userConfig.liveSyncDuration||userConfig.liveSyncDurationCount||targetLatency===0){targetLatency=liveSyncDuration!==undefined?liveSyncDuration:liveSyncDurationCount*targetduration;}const maxLiveSyncOnStallIncrease=targetduration;const liveSyncOnStallIncrease=1.0;return targetLatency+Math.min(this.stallCount*liveSyncOnStallIncrease,maxLiveSyncOnStallIncrease);}get liveSyncPosition(){const liveEdge=this.estimateLiveEdge();const targetLatency=this.targetLatency;const levelDetails=this.levelDetails;if(liveEdge===null||targetLatency===null||levelDetails===null){return null;}const edge=levelDetails.edge;const syncPosition=liveEdge-targetLatency-this.edgeStalled;const min=edge-levelDetails.totalduration;const max=edge-(this.config.lowLatencyMode&&levelDetails.partTarget||levelDetails.targetduration);return Math.min(Math.max(min,syncPosition),max);}get drift(){const{levelDetails}=this;if(levelDetails===null){return 1;}return levelDetails.drift;}get edgeStalled(){const{levelDetails}=this;if(levelDetails===null){return 0;}const maxLevelUpdateAge=(this.config.lowLatencyMode&&levelDetails.partTarget||levelDetails.targetduration)*3;return Math.max(levelDetails.age-maxLevelUpdateAge,0);}get forwardBufferLength(){const{media,levelDetails}=this;if(!media||!levelDetails){return 0;}const bufferedRanges=media.buffered.length;return (bufferedRanges?media.buffered.end(bufferedRanges-1):levelDetails.edge)-this.currentTime;}destroy(){this.unregisterListeners();this.onMediaDetaching();this.levelDetails=null;// @ts-ignore
|
||
this.hls=this.timeupdateHandler=null;}registerListeners(){this.hls.on(Events.MEDIA_ATTACHED,this.onMediaAttached,this);this.hls.on(Events.MEDIA_DETACHING,this.onMediaDetaching,this);this.hls.on(Events.MANIFEST_LOADING,this.onManifestLoading,this);this.hls.on(Events.LEVEL_UPDATED,this.onLevelUpdated,this);this.hls.on(Events.ERROR,this.onError,this);}unregisterListeners(){this.hls.off(Events.MEDIA_ATTACHED,this.onMediaAttached,this);this.hls.off(Events.MEDIA_DETACHING,this.onMediaDetaching,this);this.hls.off(Events.MANIFEST_LOADING,this.onManifestLoading,this);this.hls.off(Events.LEVEL_UPDATED,this.onLevelUpdated,this);this.hls.off(Events.ERROR,this.onError,this);}onMediaAttached(event,data){this.media=data.media;this.media.addEventListener('timeupdate',this.timeupdateHandler);}onMediaDetaching(){if(this.media){this.media.removeEventListener('timeupdate',this.timeupdateHandler);this.media=null;}}onManifestLoading(){this.levelDetails=null;this._latency=null;this.stallCount=0;}onLevelUpdated(event,{details}){this.levelDetails=details;if(details.advanced){this.timeupdate();}if(!details.live&&this.media){this.media.removeEventListener('timeupdate',this.timeupdateHandler);}}onError(event,data){var _this$levelDetails;if(data.details!==ErrorDetails.BUFFER_STALLED_ERROR){return;}this.stallCount++;if((_this$levelDetails=this.levelDetails)!=null&&_this$levelDetails.live){logger.warn('[playback-rate-controller]: Stall detected, adjusting target latency');}}timeupdate(){const{media,levelDetails}=this;if(!media||!levelDetails){return;}this.currentTime=media.currentTime;const latency=this.computeLatency();if(latency===null){return;}this._latency=latency;// Adapt playbackRate to meet target latency in low-latency mode
|
||
const{lowLatencyMode,maxLiveSyncPlaybackRate}=this.config;if(!lowLatencyMode||maxLiveSyncPlaybackRate===1||!levelDetails.live){return;}const targetLatency=this.targetLatency;if(targetLatency===null){return;}const distanceFromTarget=latency-targetLatency;// Only adjust playbackRate when within one target duration of targetLatency
|
||
// and more than one second from under-buffering.
|
||
// Playback further than one target duration from target can be considered DVR playback.
|
||
const liveMinLatencyDuration=Math.min(this.maxLatency,targetLatency+levelDetails.targetduration);const inLiveRange=distanceFromTarget<liveMinLatencyDuration;if(inLiveRange&&distanceFromTarget>0.05&&this.forwardBufferLength>1){const max=Math.min(2,Math.max(1.0,maxLiveSyncPlaybackRate));const rate=Math.round(2/(1+Math.exp(-0.75*distanceFromTarget-this.edgeStalled))*20)/20;media.playbackRate=Math.min(max,Math.max(1,rate));}else if(media.playbackRate!==1&&media.playbackRate!==0){media.playbackRate=1;}}estimateLiveEdge(){const{levelDetails}=this;if(levelDetails===null){return null;}return levelDetails.edge+levelDetails.age;}computeLatency(){const liveEdge=this.estimateLiveEdge();if(liveEdge===null){return null;}return liveEdge-this.currentTime;}}const HdcpLevels=['NONE','TYPE-0','TYPE-1',null];function isHdcpLevel(value){return HdcpLevels.indexOf(value)>-1;}const VideoRangeValues=['SDR','PQ','HLG'];function isVideoRange(value){return !!value&&VideoRangeValues.indexOf(value)>-1;}var HlsSkip={No:"",Yes:"YES",v2:"v2"};function getSkipValue(details){const{canSkipUntil,canSkipDateRanges,age}=details;// A Client SHOULD NOT request a Playlist Delta Update unless it already
|
||
// has a version of the Playlist that is no older than one-half of the Skip Boundary.
|
||
// @see: https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis#section-6.3.7
|
||
const playlistRecentEnough=age<canSkipUntil/2;if(canSkipUntil&&playlistRecentEnough){if(canSkipDateRanges){return HlsSkip.v2;}return HlsSkip.Yes;}return HlsSkip.No;}class HlsUrlParameters{constructor(msn,part,skip){this.msn=void 0;this.part=void 0;this.skip=void 0;this.msn=msn;this.part=part;this.skip=skip;}addDirectives(uri){const url=new self.URL(uri);if(this.msn!==undefined){url.searchParams.set('_HLS_msn',this.msn.toString());}if(this.part!==undefined){url.searchParams.set('_HLS_part',this.part.toString());}if(this.skip){url.searchParams.set('_HLS_skip',this.skip);}return url.href;}}class Level{constructor(data){this._attrs=void 0;this.audioCodec=void 0;this.bitrate=void 0;this.codecSet=void 0;this.url=void 0;this.frameRate=void 0;this.height=void 0;this.id=void 0;this.name=void 0;this.videoCodec=void 0;this.width=void 0;this.details=void 0;this.fragmentError=0;this.loadError=0;this.loaded=void 0;this.realBitrate=0;this.supportedPromise=void 0;this.supportedResult=void 0;this._avgBitrate=0;this._audioGroups=void 0;this._subtitleGroups=void 0;// Deprecated (retained for backwards compatibility)
|
||
this._urlId=0;this.url=[data.url];this._attrs=[data.attrs];this.bitrate=data.bitrate;if(data.details){this.details=data.details;}this.id=data.id||0;this.name=data.name;this.width=data.width||0;this.height=data.height||0;this.frameRate=data.attrs.optionalFloat('FRAME-RATE',0);this._avgBitrate=data.attrs.decimalInteger('AVERAGE-BANDWIDTH');this.audioCodec=data.audioCodec;this.videoCodec=data.videoCodec;this.codecSet=[data.videoCodec,data.audioCodec].filter(c=>!!c).map(s=>s.substring(0,4)).join(',');this.addGroupId('audio',data.attrs.AUDIO);this.addGroupId('text',data.attrs.SUBTITLES);}get maxBitrate(){return Math.max(this.realBitrate,this.bitrate);}get averageBitrate(){return this._avgBitrate||this.realBitrate||this.bitrate;}get attrs(){return this._attrs[0];}get codecs(){return this.attrs.CODECS||'';}get pathwayId(){return this.attrs['PATHWAY-ID']||'.';}get videoRange(){return this.attrs['VIDEO-RANGE']||'SDR';}get score(){return this.attrs.optionalFloat('SCORE',0);}get uri(){return this.url[0]||'';}hasAudioGroup(groupId){return hasGroup(this._audioGroups,groupId);}hasSubtitleGroup(groupId){return hasGroup(this._subtitleGroups,groupId);}get audioGroups(){return this._audioGroups;}get subtitleGroups(){return this._subtitleGroups;}addGroupId(type,groupId){if(!groupId){return;}if(type==='audio'){let audioGroups=this._audioGroups;if(!audioGroups){audioGroups=this._audioGroups=[];}if(audioGroups.indexOf(groupId)===-1){audioGroups.push(groupId);}}else if(type==='text'){let subtitleGroups=this._subtitleGroups;if(!subtitleGroups){subtitleGroups=this._subtitleGroups=[];}if(subtitleGroups.indexOf(groupId)===-1){subtitleGroups.push(groupId);}}}// Deprecated methods (retained for backwards compatibility)
|
||
get urlId(){return 0;}set urlId(value){}get audioGroupIds(){return this.audioGroups?[this.audioGroupId]:undefined;}get textGroupIds(){return this.subtitleGroups?[this.textGroupId]:undefined;}get audioGroupId(){var _this$audioGroups;return (_this$audioGroups=this.audioGroups)==null?void 0:_this$audioGroups[0];}get textGroupId(){var _this$subtitleGroups;return (_this$subtitleGroups=this.subtitleGroups)==null?void 0:_this$subtitleGroups[0];}addFallback(){}}function hasGroup(groups,groupId){if(!groupId||!groups){return false;}return groups.indexOf(groupId)!==-1;}function updateFromToPTS(fragFrom,fragTo){const fragToPTS=fragTo.startPTS;// if we know startPTS[toIdx]
|
||
if(isFiniteNumber(fragToPTS)){// update fragment duration.
|
||
// it helps to fix drifts between playlist reported duration and fragment real duration
|
||
let duration=0;let frag;if(fragTo.sn>fragFrom.sn){duration=fragToPTS-fragFrom.start;frag=fragFrom;}else {duration=fragFrom.start-fragToPTS;frag=fragTo;}if(frag.duration!==duration){frag.duration=duration;}// we dont know startPTS[toIdx]
|
||
}else if(fragTo.sn>fragFrom.sn){const contiguous=fragFrom.cc===fragTo.cc;// TODO: With part-loading end/durations we need to confirm the whole fragment is loaded before using (or setting) minEndPTS
|
||
if(contiguous&&fragFrom.minEndPTS){fragTo.start=fragFrom.start+(fragFrom.minEndPTS-fragFrom.start);}else {fragTo.start=fragFrom.start+fragFrom.duration;}}else {fragTo.start=Math.max(fragFrom.start-fragTo.duration,0);}}function updateFragPTSDTS(details,frag,startPTS,endPTS,startDTS,endDTS){const parsedMediaDuration=endPTS-startPTS;if(parsedMediaDuration<=0){logger.warn('Fragment should have a positive duration',frag);endPTS=startPTS+frag.duration;endDTS=startDTS+frag.duration;}let maxStartPTS=startPTS;let minEndPTS=endPTS;const fragStartPts=frag.startPTS;const fragEndPts=frag.endPTS;if(isFiniteNumber(fragStartPts)){// delta PTS between audio and video
|
||
const deltaPTS=Math.abs(fragStartPts-startPTS);if(!isFiniteNumber(frag.deltaPTS)){frag.deltaPTS=deltaPTS;}else {frag.deltaPTS=Math.max(deltaPTS,frag.deltaPTS);}maxStartPTS=Math.max(startPTS,fragStartPts);startPTS=Math.min(startPTS,fragStartPts);startDTS=Math.min(startDTS,frag.startDTS);minEndPTS=Math.min(endPTS,fragEndPts);endPTS=Math.max(endPTS,fragEndPts);endDTS=Math.max(endDTS,frag.endDTS);}const drift=startPTS-frag.start;if(frag.start!==0){frag.start=startPTS;}frag.duration=endPTS-frag.start;frag.startPTS=startPTS;frag.maxStartPTS=maxStartPTS;frag.startDTS=startDTS;frag.endPTS=endPTS;frag.minEndPTS=minEndPTS;frag.endDTS=endDTS;const sn=frag.sn;// 'initSegment'
|
||
// exit if sn out of range
|
||
if(!details||sn<details.startSN||sn>details.endSN){return 0;}let i;const fragIdx=sn-details.startSN;const fragments=details.fragments;// update frag reference in fragments array
|
||
// rationale is that fragments array might not contain this frag object.
|
||
// this will happen if playlist has been refreshed between frag loading and call to updateFragPTSDTS()
|
||
// if we don't update frag, we won't be able to propagate PTS info on the playlist
|
||
// resulting in invalid sliding computation
|
||
fragments[fragIdx]=frag;// adjust fragment PTS/duration from seqnum-1 to frag 0
|
||
for(i=fragIdx;i>0;i--){updateFromToPTS(fragments[i],fragments[i-1]);}// adjust fragment PTS/duration from seqnum to last frag
|
||
for(i=fragIdx;i<fragments.length-1;i++){updateFromToPTS(fragments[i],fragments[i+1]);}if(details.fragmentHint){updateFromToPTS(fragments[fragments.length-1],details.fragmentHint);}details.PTSKnown=details.alignedSliding=true;return drift;}function mergeDetails(oldDetails,newDetails){// Track the last initSegment processed. Initialize it to the last one on the timeline.
|
||
let currentInitSegment=null;const oldFragments=oldDetails.fragments;for(let i=oldFragments.length-1;i>=0;i--){const oldInit=oldFragments[i].initSegment;if(oldInit){currentInitSegment=oldInit;break;}}if(oldDetails.fragmentHint){// prevent PTS and duration from being adjusted on the next hint
|
||
delete oldDetails.fragmentHint.endPTS;}// check if old/new playlists have fragments in common
|
||
// loop through overlapping SN and update startPTS , cc, and duration if any found
|
||
let ccOffset=0;let PTSFrag;mapFragmentIntersection(oldDetails,newDetails,(oldFrag,newFrag)=>{if(oldFrag.relurl){// Do not compare CC if the old fragment has no url. This is a level.fragmentHint used by LL-HLS parts.
|
||
// It maybe be off by 1 if it was created before any parts or discontinuity tags were appended to the end
|
||
// of the playlist.
|
||
ccOffset=oldFrag.cc-newFrag.cc;}if(isFiniteNumber(oldFrag.startPTS)&&isFiniteNumber(oldFrag.endPTS)){newFrag.start=newFrag.startPTS=oldFrag.startPTS;newFrag.startDTS=oldFrag.startDTS;newFrag.maxStartPTS=oldFrag.maxStartPTS;newFrag.endPTS=oldFrag.endPTS;newFrag.endDTS=oldFrag.endDTS;newFrag.minEndPTS=oldFrag.minEndPTS;newFrag.duration=oldFrag.endPTS-oldFrag.startPTS;if(newFrag.duration){PTSFrag=newFrag;}// PTS is known when any segment has startPTS and endPTS
|
||
newDetails.PTSKnown=newDetails.alignedSliding=true;}newFrag.elementaryStreams=oldFrag.elementaryStreams;newFrag.loader=oldFrag.loader;newFrag.stats=oldFrag.stats;if(oldFrag.initSegment){newFrag.initSegment=oldFrag.initSegment;currentInitSegment=oldFrag.initSegment;}});if(currentInitSegment){const fragmentsToCheck=newDetails.fragmentHint?newDetails.fragments.concat(newDetails.fragmentHint):newDetails.fragments;fragmentsToCheck.forEach(frag=>{var _currentInitSegment;if(frag&&(!frag.initSegment||frag.initSegment.relurl===((_currentInitSegment=currentInitSegment)==null?void 0:_currentInitSegment.relurl))){frag.initSegment=currentInitSegment;}});}if(newDetails.skippedSegments){newDetails.deltaUpdateFailed=newDetails.fragments.some(frag=>!frag);if(newDetails.deltaUpdateFailed){logger.warn('[level-helper] Previous playlist missing segments skipped in delta playlist');for(let i=newDetails.skippedSegments;i--;){newDetails.fragments.shift();}newDetails.startSN=newDetails.fragments[0].sn;newDetails.startCC=newDetails.fragments[0].cc;}else if(newDetails.canSkipDateRanges){newDetails.dateRanges=mergeDateRanges(oldDetails.dateRanges,newDetails.dateRanges,newDetails.recentlyRemovedDateranges);}}const newFragments=newDetails.fragments;if(ccOffset){logger.warn('discontinuity sliding from playlist, take drift into account');for(let i=0;i<newFragments.length;i++){newFragments[i].cc+=ccOffset;}}if(newDetails.skippedSegments){newDetails.startCC=newDetails.fragments[0].cc;}// Merge parts
|
||
mapPartIntersection(oldDetails.partList,newDetails.partList,(oldPart,newPart)=>{newPart.elementaryStreams=oldPart.elementaryStreams;newPart.stats=oldPart.stats;});// if at least one fragment contains PTS info, recompute PTS information for all fragments
|
||
if(PTSFrag){updateFragPTSDTS(newDetails,PTSFrag,PTSFrag.startPTS,PTSFrag.endPTS,PTSFrag.startDTS,PTSFrag.endDTS);}else {// ensure that delta is within oldFragments range
|
||
// also adjust sliding in case delta is 0 (we could have old=[50-60] and new=old=[50-61])
|
||
// in that case we also need to adjust start offset of all fragments
|
||
adjustSliding(oldDetails,newDetails);}if(newFragments.length){newDetails.totalduration=newDetails.edge-newFragments[0].start;}newDetails.driftStartTime=oldDetails.driftStartTime;newDetails.driftStart=oldDetails.driftStart;const advancedDateTime=newDetails.advancedDateTime;if(newDetails.advanced&&advancedDateTime){const edge=newDetails.edge;if(!newDetails.driftStart){newDetails.driftStartTime=advancedDateTime;newDetails.driftStart=edge;}newDetails.driftEndTime=advancedDateTime;newDetails.driftEnd=edge;}else {newDetails.driftEndTime=oldDetails.driftEndTime;newDetails.driftEnd=oldDetails.driftEnd;newDetails.advancedDateTime=oldDetails.advancedDateTime;}}function mergeDateRanges(oldDateRanges,deltaDateRanges,recentlyRemovedDateranges){const dateRanges=_extends({},oldDateRanges);if(recentlyRemovedDateranges){recentlyRemovedDateranges.forEach(id=>{delete dateRanges[id];});}Object.keys(deltaDateRanges).forEach(id=>{const dateRange=new DateRange(deltaDateRanges[id].attr,dateRanges[id]);if(dateRange.isValid){dateRanges[id]=dateRange;}else {logger.warn(`Ignoring invalid Playlist Delta Update DATERANGE tag: "${JSON.stringify(deltaDateRanges[id].attr)}"`);}});return dateRanges;}function mapPartIntersection(oldParts,newParts,intersectionFn){if(oldParts&&newParts){let delta=0;for(let i=0,len=oldParts.length;i<=len;i++){const oldPart=oldParts[i];const newPart=newParts[i+delta];if(oldPart&&newPart&&oldPart.index===newPart.index&&oldPart.fragment.sn===newPart.fragment.sn){intersectionFn(oldPart,newPart);}else {delta--;}}}}function mapFragmentIntersection(oldDetails,newDetails,intersectionFn){const skippedSegments=newDetails.skippedSegments;const start=Math.max(oldDetails.startSN,newDetails.startSN)-newDetails.startSN;const end=(oldDetails.fragmentHint?1:0)+(skippedSegments?newDetails.endSN:Math.min(oldDetails.endSN,newDetails.endSN))-newDetails.startSN;const delta=newDetails.startSN-oldDetails.startSN;const newFrags=newDetails.fragmentHint?newDetails.fragments.concat(newDetails.fragmentHint):newDetails.fragments;const oldFrags=oldDetails.fragmentHint?oldDetails.fragments.concat(oldDetails.fragmentHint):oldDetails.fragments;for(let i=start;i<=end;i++){const oldFrag=oldFrags[delta+i];let newFrag=newFrags[i];if(skippedSegments&&!newFrag&&i<skippedSegments){// Fill in skipped segments in delta playlist
|
||
newFrag=newDetails.fragments[i]=oldFrag;}if(oldFrag&&newFrag){intersectionFn(oldFrag,newFrag);}}}function adjustSliding(oldDetails,newDetails){const delta=newDetails.startSN+newDetails.skippedSegments-oldDetails.startSN;const oldFragments=oldDetails.fragments;if(delta<0||delta>=oldFragments.length){return;}addSliding(newDetails,oldFragments[delta].start);}function addSliding(details,start){if(start){const fragments=details.fragments;for(let i=details.skippedSegments;i<fragments.length;i++){fragments[i].start+=start;}if(details.fragmentHint){details.fragmentHint.start+=start;}}}function computeReloadInterval(newDetails,distanceToLiveEdgeMs=Infinity){let reloadInterval=1000*newDetails.targetduration;if(newDetails.updated){// Use last segment duration when shorter than target duration and near live edge
|
||
const fragments=newDetails.fragments;const liveEdgeMaxTargetDurations=4;if(fragments.length&&reloadInterval*liveEdgeMaxTargetDurations>distanceToLiveEdgeMs){const lastSegmentDuration=fragments[fragments.length-1].duration*1000;if(lastSegmentDuration<reloadInterval){reloadInterval=lastSegmentDuration;}}}else {// estimate = 'miss half average';
|
||
// follow HLS Spec, If the client reloads a Playlist file and finds that it has not
|
||
// changed then it MUST wait for a period of one-half the target
|
||
// duration before retrying.
|
||
reloadInterval/=2;}return Math.round(reloadInterval);}function getFragmentWithSN(level,sn,fragCurrent){if(!(level!=null&&level.details)){return null;}const levelDetails=level.details;let fragment=levelDetails.fragments[sn-levelDetails.startSN];if(fragment){return fragment;}fragment=levelDetails.fragmentHint;if(fragment&&fragment.sn===sn){return fragment;}if(sn<levelDetails.startSN&&fragCurrent&&fragCurrent.sn===sn){return fragCurrent;}return null;}function getPartWith(level,sn,partIndex){var _level$details;if(!(level!=null&&level.details)){return null;}return findPart((_level$details=level.details)==null?void 0:_level$details.partList,sn,partIndex);}function findPart(partList,sn,partIndex){if(partList){for(let i=partList.length;i--;){const part=partList[i];if(part.index===partIndex&&part.fragment.sn===sn){return part;}}}return null;}function reassignFragmentLevelIndexes(levels){levels.forEach((level,index)=>{const{details}=level;if(details!=null&&details.fragments){details.fragments.forEach(fragment=>{fragment.level=index;});}});}function isTimeoutError(error){switch(error.details){case ErrorDetails.FRAG_LOAD_TIMEOUT:case ErrorDetails.KEY_LOAD_TIMEOUT:case ErrorDetails.LEVEL_LOAD_TIMEOUT:case ErrorDetails.MANIFEST_LOAD_TIMEOUT:return true;}return false;}function getRetryConfig(loadPolicy,error){const isTimeout=isTimeoutError(error);return loadPolicy.default[`${isTimeout?'timeout':'error'}Retry`];}function getRetryDelay(retryConfig,retryCount){// exponential backoff capped to max retry delay
|
||
const backoffFactor=retryConfig.backoff==='linear'?1:Math.pow(2,retryCount);return Math.min(backoffFactor*retryConfig.retryDelayMs,retryConfig.maxRetryDelayMs);}function getLoaderConfigWithoutReties(loderConfig){return _objectSpread2(_objectSpread2({},loderConfig),{errorRetry:null,timeoutRetry:null});}function shouldRetry(retryConfig,retryCount,isTimeout,loaderResponse){if(!retryConfig){return false;}const httpStatus=loaderResponse==null?void 0:loaderResponse.code;const retry=retryCount<retryConfig.maxNumRetry&&(retryForHttpStatus(httpStatus)||!!isTimeout);return retryConfig.shouldRetry?retryConfig.shouldRetry(retryConfig,retryCount,isTimeout,loaderResponse,retry):retry;}function retryForHttpStatus(httpStatus){// Do not retry on status 4xx, status 0 (CORS error), or undefined (decrypt/gap/parse error)
|
||
return httpStatus===0&&navigator.onLine===false||!!httpStatus&&(httpStatus<400||httpStatus>499);}const BinarySearch={/**
|
||
* Searches for an item in an array which matches a certain condition.
|
||
* This requires the condition to only match one item in the array,
|
||
* and for the array to be ordered.
|
||
*
|
||
* @param list The array to search.
|
||
* @param comparisonFn
|
||
* Called and provided a candidate item as the first argument.
|
||
* Should return:
|
||
* > -1 if the item should be located at a lower index than the provided item.
|
||
* > 1 if the item should be located at a higher index than the provided item.
|
||
* > 0 if the item is the item you're looking for.
|
||
*
|
||
* @returns the object if found, otherwise returns null
|
||
*/search:function(list,comparisonFn){let minIndex=0;let maxIndex=list.length-1;let currentIndex=null;let currentElement=null;while(minIndex<=maxIndex){currentIndex=(minIndex+maxIndex)/2|0;currentElement=list[currentIndex];const comparisonResult=comparisonFn(currentElement);if(comparisonResult>0){minIndex=currentIndex+1;}else if(comparisonResult<0){maxIndex=currentIndex-1;}else {return currentElement;}}return null;}};/**
|
||
* Returns first fragment whose endPdt value exceeds the given PDT, or null.
|
||
* @param fragments - The array of candidate fragments
|
||
* @param PDTValue - The PDT value which must be exceeded
|
||
* @param maxFragLookUpTolerance - The amount of time that a fragment's start/end can be within in order to be considered contiguous
|
||
*/function findFragmentByPDT(fragments,PDTValue,maxFragLookUpTolerance){if(PDTValue===null||!Array.isArray(fragments)||!fragments.length||!isFiniteNumber(PDTValue)){return null;}// if less than start
|
||
const startPDT=fragments[0].programDateTime;if(PDTValue<(startPDT||0)){return null;}const endPDT=fragments[fragments.length-1].endProgramDateTime;if(PDTValue>=(endPDT||0)){return null;}maxFragLookUpTolerance=maxFragLookUpTolerance||0;for(let seg=0;seg<fragments.length;++seg){const frag=fragments[seg];if(pdtWithinToleranceTest(PDTValue,maxFragLookUpTolerance,frag)){return frag;}}return null;}/**
|
||
* Finds a fragment based on the SN of the previous fragment; or based on the needs of the current buffer.
|
||
* This method compensates for small buffer gaps by applying a tolerance to the start of any candidate fragment, thus
|
||
* breaking any traps which would cause the same fragment to be continuously selected within a small range.
|
||
* @param fragPrevious - The last frag successfully appended
|
||
* @param fragments - The array of candidate fragments
|
||
* @param bufferEnd - The end of the contiguous buffered range the playhead is currently within
|
||
* @param maxFragLookUpTolerance - The amount of time that a fragment's start/end can be within in order to be considered contiguous
|
||
* @returns a matching fragment or null
|
||
*/function findFragmentByPTS(fragPrevious,fragments,bufferEnd=0,maxFragLookUpTolerance=0,nextFragLookupTolerance=0.005){let fragNext=null;if(fragPrevious){fragNext=fragments[fragPrevious.sn-fragments[0].sn+1]||null;// check for buffer-end rounding error
|
||
const bufferEdgeError=fragPrevious.endDTS-bufferEnd;if(bufferEdgeError>0&&bufferEdgeError<0.0000015){bufferEnd+=0.0000015;}}else if(bufferEnd===0&&fragments[0].start===0){fragNext=fragments[0];}// Prefer the next fragment if it's within tolerance
|
||
if(fragNext&&((!fragPrevious||fragPrevious.level===fragNext.level)&&fragmentWithinToleranceTest(bufferEnd,maxFragLookUpTolerance,fragNext)===0||fragmentWithinFastStartSwitch(fragNext,fragPrevious,Math.min(nextFragLookupTolerance,maxFragLookUpTolerance)))){return fragNext;}// We might be seeking past the tolerance so find the best match
|
||
const foundFragment=BinarySearch.search(fragments,fragmentWithinToleranceTest.bind(null,bufferEnd,maxFragLookUpTolerance));if(foundFragment&&(foundFragment!==fragPrevious||!fragNext)){return foundFragment;}// If no match was found return the next fragment after fragPrevious, or null
|
||
return fragNext;}function fragmentWithinFastStartSwitch(fragNext,fragPrevious,nextFragLookupTolerance){if(fragPrevious&&fragPrevious.start===0&&fragPrevious.level<fragNext.level&&(fragPrevious.endPTS||0)>0){const firstDuration=fragPrevious.tagList.reduce((duration,tag)=>{if(tag[0]==='INF'){duration+=parseFloat(tag[1]);}return duration;},nextFragLookupTolerance);return fragNext.start<=firstDuration;}return false;}/**
|
||
* The test function used by the findFragmentBySn's BinarySearch to look for the best match to the current buffer conditions.
|
||
* @param candidate - The fragment to test
|
||
* @param bufferEnd - The end of the current buffered range the playhead is currently within
|
||
* @param maxFragLookUpTolerance - The amount of time that a fragment's start can be within in order to be considered contiguous
|
||
* @returns 0 if it matches, 1 if too low, -1 if too high
|
||
*/function fragmentWithinToleranceTest(bufferEnd=0,maxFragLookUpTolerance=0,candidate){// eagerly accept an accurate match (no tolerance)
|
||
if(candidate.start<=bufferEnd&&candidate.start+candidate.duration>bufferEnd){return 0;}// offset should be within fragment boundary - config.maxFragLookUpTolerance
|
||
// this is to cope with situations like
|
||
// bufferEnd = 9.991
|
||
// frag[Ø] : [0,10]
|
||
// frag[1] : [10,20]
|
||
// bufferEnd is within frag[0] range ... although what we are expecting is to return frag[1] here
|
||
// frag start frag start+duration
|
||
// |-----------------------------|
|
||
// <---> <--->
|
||
// ...--------><-----------------------------><---------....
|
||
// previous frag matching fragment next frag
|
||
// return -1 return 0 return 1
|
||
// logger.log(`level/sn/start/end/bufEnd:${level}/${candidate.sn}/${candidate.start}/${(candidate.start+candidate.duration)}/${bufferEnd}`);
|
||
// Set the lookup tolerance to be small enough to detect the current segment - ensures we don't skip over very small segments
|
||
const candidateLookupTolerance=Math.min(maxFragLookUpTolerance,candidate.duration+(candidate.deltaPTS?candidate.deltaPTS:0));if(candidate.start+candidate.duration-candidateLookupTolerance<=bufferEnd){return 1;}else if(candidate.start-candidateLookupTolerance>bufferEnd&&candidate.start){// if maxFragLookUpTolerance will have negative value then don't return -1 for first element
|
||
return -1;}return 0;}/**
|
||
* The test function used by the findFragmentByPdt's BinarySearch to look for the best match to the current buffer conditions.
|
||
* This function tests the candidate's program date time values, as represented in Unix time
|
||
* @param candidate - The fragment to test
|
||
* @param pdtBufferEnd - The Unix time representing the end of the current buffered range
|
||
* @param maxFragLookUpTolerance - The amount of time that a fragment's start can be within in order to be considered contiguous
|
||
* @returns true if contiguous, false otherwise
|
||
*/function pdtWithinToleranceTest(pdtBufferEnd,maxFragLookUpTolerance,candidate){const candidateLookupTolerance=Math.min(maxFragLookUpTolerance,candidate.duration+(candidate.deltaPTS?candidate.deltaPTS:0))*1000;// endProgramDateTime can be null, default to zero
|
||
const endProgramDateTime=candidate.endProgramDateTime||0;return endProgramDateTime-candidateLookupTolerance>pdtBufferEnd;}function findFragWithCC(fragments,cc){return BinarySearch.search(fragments,candidate=>{if(candidate.cc<cc){return 1;}else if(candidate.cc>cc){return -1;}else {return 0;}});}var NetworkErrorAction={DoNothing:0,SendEndCallback:1,SendAlternateToPenaltyBox:2,RemoveAlternatePermanently:3,InsertDiscontinuity:4,RetryRequest:5};var ErrorActionFlags={None:0,MoveAllAlternatesMatchingHost:1,MoveAllAlternatesMatchingHDCP:2,SwitchToSDR:4};// Reserved for future use
|
||
class ErrorController{constructor(hls){this.hls=void 0;this.playlistError=0;this.penalizedRenditions={};this.log=void 0;this.warn=void 0;this.error=void 0;this.hls=hls;this.log=logger.log.bind(logger,`[info]:`);this.warn=logger.warn.bind(logger,`[warning]:`);this.error=logger.error.bind(logger,`[error]:`);this.registerListeners();}registerListeners(){const hls=this.hls;hls.on(Events.ERROR,this.onError,this);hls.on(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.on(Events.LEVEL_UPDATED,this.onLevelUpdated,this);}unregisterListeners(){const hls=this.hls;if(!hls){return;}hls.off(Events.ERROR,this.onError,this);hls.off(Events.ERROR,this.onErrorOut,this);hls.off(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.off(Events.LEVEL_UPDATED,this.onLevelUpdated,this);}destroy(){this.unregisterListeners();// @ts-ignore
|
||
this.hls=null;this.penalizedRenditions={};}startLoad(startPosition){}stopLoad(){this.playlistError=0;}getVariantLevelIndex(frag){return (frag==null?void 0:frag.type)===PlaylistLevelType.MAIN?frag.level:this.hls.loadLevel;}onManifestLoading(){this.playlistError=0;this.penalizedRenditions={};}onLevelUpdated(){this.playlistError=0;}onError(event,data){var _data$frag,_data$level;if(data.fatal){return;}const hls=this.hls;const context=data.context;switch(data.details){case ErrorDetails.FRAG_LOAD_ERROR:case ErrorDetails.FRAG_LOAD_TIMEOUT:case ErrorDetails.KEY_LOAD_ERROR:case ErrorDetails.KEY_LOAD_TIMEOUT:data.errorAction=this.getFragRetryOrSwitchAction(data);return;case ErrorDetails.FRAG_PARSING_ERROR:// ignore empty segment errors marked as gap
|
||
if((_data$frag=data.frag)!=null&&_data$frag.gap){data.errorAction={action:NetworkErrorAction.DoNothing,flags:ErrorActionFlags.None};return;}// falls through
|
||
case ErrorDetails.FRAG_GAP:case ErrorDetails.FRAG_DECRYPT_ERROR:{// Switch level if possible, otherwise allow retry count to reach max error retries
|
||
data.errorAction=this.getFragRetryOrSwitchAction(data);data.errorAction.action=NetworkErrorAction.SendAlternateToPenaltyBox;return;}case ErrorDetails.LEVEL_EMPTY_ERROR:case ErrorDetails.LEVEL_PARSING_ERROR:{var _data$context,_data$context$levelDe;// Only retry when empty and live
|
||
const levelIndex=data.parent===PlaylistLevelType.MAIN?data.level:hls.loadLevel;if(data.details===ErrorDetails.LEVEL_EMPTY_ERROR&&!!((_data$context=data.context)!=null&&(_data$context$levelDe=_data$context.levelDetails)!=null&&_data$context$levelDe.live)){data.errorAction=this.getPlaylistRetryOrSwitchAction(data,levelIndex);}else {// Escalate to fatal if not retrying or switching
|
||
data.levelRetry=false;data.errorAction=this.getLevelSwitchAction(data,levelIndex);}}return;case ErrorDetails.LEVEL_LOAD_ERROR:case ErrorDetails.LEVEL_LOAD_TIMEOUT:if(typeof(context==null?void 0:context.level)==='number'){data.errorAction=this.getPlaylistRetryOrSwitchAction(data,context.level);}return;case ErrorDetails.AUDIO_TRACK_LOAD_ERROR:case ErrorDetails.AUDIO_TRACK_LOAD_TIMEOUT:case ErrorDetails.SUBTITLE_LOAD_ERROR:case ErrorDetails.SUBTITLE_TRACK_LOAD_TIMEOUT:if(context){const level=hls.levels[hls.loadLevel];if(level&&(context.type===PlaylistContextType.AUDIO_TRACK&&level.hasAudioGroup(context.groupId)||context.type===PlaylistContextType.SUBTITLE_TRACK&&level.hasSubtitleGroup(context.groupId))){// Perform Pathway switch or Redundant failover if possible for fastest recovery
|
||
// otherwise allow playlist retry count to reach max error retries
|
||
data.errorAction=this.getPlaylistRetryOrSwitchAction(data,hls.loadLevel);data.errorAction.action=NetworkErrorAction.SendAlternateToPenaltyBox;data.errorAction.flags=ErrorActionFlags.MoveAllAlternatesMatchingHost;return;}}return;case ErrorDetails.KEY_SYSTEM_STATUS_OUTPUT_RESTRICTED:{const level=hls.levels[hls.loadLevel];const restrictedHdcpLevel=level==null?void 0:level.attrs['HDCP-LEVEL'];if(restrictedHdcpLevel){data.errorAction={action:NetworkErrorAction.SendAlternateToPenaltyBox,flags:ErrorActionFlags.MoveAllAlternatesMatchingHDCP,hdcpLevel:restrictedHdcpLevel};}else {this.keySystemError(data);}}return;case ErrorDetails.BUFFER_ADD_CODEC_ERROR:case ErrorDetails.REMUX_ALLOC_ERROR:case ErrorDetails.BUFFER_APPEND_ERROR:data.errorAction=this.getLevelSwitchAction(data,(_data$level=data.level)!=null?_data$level:hls.loadLevel);return;case ErrorDetails.INTERNAL_EXCEPTION:case ErrorDetails.BUFFER_APPENDING_ERROR:case ErrorDetails.BUFFER_FULL_ERROR:case ErrorDetails.LEVEL_SWITCH_ERROR:case ErrorDetails.BUFFER_STALLED_ERROR:case ErrorDetails.BUFFER_SEEK_OVER_HOLE:case ErrorDetails.BUFFER_NUDGE_ON_STALL:data.errorAction={action:NetworkErrorAction.DoNothing,flags:ErrorActionFlags.None};return;}if(data.type===ErrorTypes.KEY_SYSTEM_ERROR){this.keySystemError(data);}}keySystemError(data){const levelIndex=this.getVariantLevelIndex(data.frag);// Do not retry level. Escalate to fatal if switching levels fails.
|
||
data.levelRetry=false;data.errorAction=this.getLevelSwitchAction(data,levelIndex);}getPlaylistRetryOrSwitchAction(data,levelIndex){const hls=this.hls;const retryConfig=getRetryConfig(hls.config.playlistLoadPolicy,data);const retryCount=this.playlistError++;const retry=shouldRetry(retryConfig,retryCount,isTimeoutError(data),data.response);if(retry){return {action:NetworkErrorAction.RetryRequest,flags:ErrorActionFlags.None,retryConfig,retryCount};}const errorAction=this.getLevelSwitchAction(data,levelIndex);if(retryConfig){errorAction.retryConfig=retryConfig;errorAction.retryCount=retryCount;}return errorAction;}getFragRetryOrSwitchAction(data){const hls=this.hls;// Share fragment error count accross media options (main, audio, subs)
|
||
// This allows for level based rendition switching when media option assets fail
|
||
const variantLevelIndex=this.getVariantLevelIndex(data.frag);const level=hls.levels[variantLevelIndex];const{fragLoadPolicy,keyLoadPolicy}=hls.config;const retryConfig=getRetryConfig(data.details.startsWith('key')?keyLoadPolicy:fragLoadPolicy,data);const fragmentErrors=hls.levels.reduce((acc,level)=>acc+level.fragmentError,0);// Switch levels when out of retried or level index out of bounds
|
||
if(level){if(data.details!==ErrorDetails.FRAG_GAP){level.fragmentError++;}const retry=shouldRetry(retryConfig,fragmentErrors,isTimeoutError(data),data.response);if(retry){return {action:NetworkErrorAction.RetryRequest,flags:ErrorActionFlags.None,retryConfig,retryCount:fragmentErrors};}}// Reach max retry count, or Missing level reference
|
||
// Switch to valid index
|
||
const errorAction=this.getLevelSwitchAction(data,variantLevelIndex);// Add retry details to allow skipping of FRAG_PARSING_ERROR
|
||
if(retryConfig){errorAction.retryConfig=retryConfig;errorAction.retryCount=fragmentErrors;}return errorAction;}getLevelSwitchAction(data,levelIndex){const hls=this.hls;if(levelIndex===null||levelIndex===undefined){levelIndex=hls.loadLevel;}const level=this.hls.levels[levelIndex];if(level){var _data$frag2,_data$context2;const errorDetails=data.details;level.loadError++;if(errorDetails===ErrorDetails.BUFFER_APPEND_ERROR){level.fragmentError++;}// Search for next level to retry
|
||
let nextLevel=-1;const{levels,loadLevel,minAutoLevel,maxAutoLevel}=hls;if(!hls.autoLevelEnabled){hls.loadLevel=-1;}const fragErrorType=(_data$frag2=data.frag)==null?void 0:_data$frag2.type;// Find alternate audio codec if available on audio codec error
|
||
const isAudioCodecError=fragErrorType===PlaylistLevelType.AUDIO&&errorDetails===ErrorDetails.FRAG_PARSING_ERROR||data.sourceBufferName==='audio'&&(errorDetails===ErrorDetails.BUFFER_ADD_CODEC_ERROR||errorDetails===ErrorDetails.BUFFER_APPEND_ERROR);const findAudioCodecAlternate=isAudioCodecError&&levels.some(({audioCodec})=>level.audioCodec!==audioCodec);// Find alternate video codec if available on video codec error
|
||
const isVideoCodecError=data.sourceBufferName==='video'&&(errorDetails===ErrorDetails.BUFFER_ADD_CODEC_ERROR||errorDetails===ErrorDetails.BUFFER_APPEND_ERROR);const findVideoCodecAlternate=isVideoCodecError&&levels.some(({codecSet,audioCodec})=>level.codecSet!==codecSet&&level.audioCodec===audioCodec);const{type:playlistErrorType,groupId:playlistErrorGroupId}=(_data$context2=data.context)!=null?_data$context2:{};for(let i=levels.length;i--;){const candidate=(i+loadLevel)%levels.length;if(candidate!==loadLevel&&candidate>=minAutoLevel&&candidate<=maxAutoLevel&&levels[candidate].loadError===0){var _level$audioGroups,_level$subtitleGroups;const levelCandidate=levels[candidate];// Skip level switch if GAP tag is found in next level at same position
|
||
if(errorDetails===ErrorDetails.FRAG_GAP&&fragErrorType===PlaylistLevelType.MAIN&&data.frag){const levelDetails=levels[candidate].details;if(levelDetails){const fragCandidate=findFragmentByPTS(data.frag,levelDetails.fragments,data.frag.start);if(fragCandidate!=null&&fragCandidate.gap){continue;}}}else if(playlistErrorType===PlaylistContextType.AUDIO_TRACK&&levelCandidate.hasAudioGroup(playlistErrorGroupId)||playlistErrorType===PlaylistContextType.SUBTITLE_TRACK&&levelCandidate.hasSubtitleGroup(playlistErrorGroupId)){// For audio/subs playlist errors find another group ID or fallthrough to redundant fail-over
|
||
continue;}else if(fragErrorType===PlaylistLevelType.AUDIO&&(_level$audioGroups=level.audioGroups)!=null&&_level$audioGroups.some(groupId=>levelCandidate.hasAudioGroup(groupId))||fragErrorType===PlaylistLevelType.SUBTITLE&&(_level$subtitleGroups=level.subtitleGroups)!=null&&_level$subtitleGroups.some(groupId=>levelCandidate.hasSubtitleGroup(groupId))||findAudioCodecAlternate&&level.audioCodec===levelCandidate.audioCodec||!findAudioCodecAlternate&&level.audioCodec!==levelCandidate.audioCodec||findVideoCodecAlternate&&level.codecSet===levelCandidate.codecSet){// For video/audio/subs frag errors find another group ID or fallthrough to redundant fail-over
|
||
continue;}nextLevel=candidate;break;}}if(nextLevel>-1&&hls.loadLevel!==nextLevel){data.levelRetry=true;this.playlistError=0;return {action:NetworkErrorAction.SendAlternateToPenaltyBox,flags:ErrorActionFlags.None,nextAutoLevel:nextLevel};}}// No levels to switch / Manual level selection / Level not found
|
||
// Resolve with Pathway switch, Redundant fail-over, or stay on lowest Level
|
||
return {action:NetworkErrorAction.SendAlternateToPenaltyBox,flags:ErrorActionFlags.MoveAllAlternatesMatchingHost};}onErrorOut(event,data){var _data$errorAction;switch((_data$errorAction=data.errorAction)==null?void 0:_data$errorAction.action){case NetworkErrorAction.DoNothing:break;case NetworkErrorAction.SendAlternateToPenaltyBox:this.sendAlternateToPenaltyBox(data);if(!data.errorAction.resolved&&data.details!==ErrorDetails.FRAG_GAP){data.fatal=true;}else if(/MediaSource readyState: ended/.test(data.error.message)){this.warn(`MediaSource ended after "${data.sourceBufferName}" sourceBuffer append error. Attempting to recover from media error.`);this.hls.recoverMediaError();}break;case NetworkErrorAction.RetryRequest:// handled by stream and playlist/level controllers
|
||
break;}if(data.fatal){this.hls.stopLoad();return;}}sendAlternateToPenaltyBox(data){const hls=this.hls;const errorAction=data.errorAction;if(!errorAction){return;}const{flags,hdcpLevel,nextAutoLevel}=errorAction;switch(flags){case ErrorActionFlags.None:this.switchLevel(data,nextAutoLevel);break;case ErrorActionFlags.MoveAllAlternatesMatchingHDCP:if(hdcpLevel){hls.maxHdcpLevel=HdcpLevels[HdcpLevels.indexOf(hdcpLevel)-1];errorAction.resolved=true;}this.warn(`Restricting playback to HDCP-LEVEL of "${hls.maxHdcpLevel}" or lower`);break;}// If not resolved by previous actions try to switch to next level
|
||
if(!errorAction.resolved){this.switchLevel(data,nextAutoLevel);}}switchLevel(data,levelIndex){if(levelIndex!==undefined&&data.errorAction){this.warn(`switching to level ${levelIndex} after ${data.details}`);this.hls.nextAutoLevel=levelIndex;data.errorAction.resolved=true;// Stream controller is responsible for this but won't switch on false start
|
||
this.hls.nextLoadLevel=this.hls.nextAutoLevel;}}}class BasePlaylistController{constructor(hls,logPrefix){this.hls=void 0;this.timer=-1;this.requestScheduled=-1;this.canLoad=false;this.log=void 0;this.warn=void 0;this.log=logger.log.bind(logger,`${logPrefix}:`);this.warn=logger.warn.bind(logger,`${logPrefix}:`);this.hls=hls;}destroy(){this.clearTimer();// @ts-ignore
|
||
this.hls=this.log=this.warn=null;}clearTimer(){if(this.timer!==-1){self.clearTimeout(this.timer);this.timer=-1;}}startLoad(){this.canLoad=true;this.requestScheduled=-1;this.loadPlaylist();}stopLoad(){this.canLoad=false;this.clearTimer();}switchParams(playlistUri,previous,current){const renditionReports=previous==null?void 0:previous.renditionReports;if(renditionReports){let foundIndex=-1;for(let i=0;i<renditionReports.length;i++){const attr=renditionReports[i];let uri;try{uri=new self.URL(attr.URI,previous.url).href;}catch(error){logger.warn(`Could not construct new URL for Rendition Report: ${error}`);uri=attr.URI||'';}// Use exact match. Otherwise, the last partial match, if any, will be used
|
||
// (Playlist URI includes a query string that the Rendition Report does not)
|
||
if(uri===playlistUri){foundIndex=i;break;}else if(uri===playlistUri.substring(0,uri.length)){foundIndex=i;}}if(foundIndex!==-1){const attr=renditionReports[foundIndex];const msn=parseInt(attr['LAST-MSN'])||(previous==null?void 0:previous.lastPartSn);let part=parseInt(attr['LAST-PART'])||(previous==null?void 0:previous.lastPartIndex);if(this.hls.config.lowLatencyMode){const currentGoal=Math.min(previous.age-previous.partTarget,previous.targetduration);if(part>=0&¤tGoal>previous.partTarget){part+=1;}}const skip=current&&getSkipValue(current);return new HlsUrlParameters(msn,part>=0?part:undefined,skip);}}}loadPlaylist(hlsUrlParameters){if(this.requestScheduled===-1){this.requestScheduled=self.performance.now();}// Loading is handled by the subclasses
|
||
}shouldLoadPlaylist(playlist){return this.canLoad&&!!playlist&&!!playlist.url&&(!playlist.details||playlist.details.live);}shouldReloadPlaylist(playlist){return this.timer===-1&&this.requestScheduled===-1&&this.shouldLoadPlaylist(playlist);}playlistLoaded(index,data,previousDetails){const{details,stats}=data;// Set last updated date-time
|
||
const now=self.performance.now();const elapsed=stats.loading.first?Math.max(0,now-stats.loading.first):0;details.advancedDateTime=Date.now()-elapsed;// if current playlist is a live playlist, arm a timer to reload it
|
||
if(details.live||previousDetails!=null&&previousDetails.live){details.reloaded(previousDetails);if(previousDetails){this.log(`live playlist ${index} ${details.advanced?'REFRESHED '+details.lastPartSn+'-'+details.lastPartIndex:details.updated?'UPDATED':'MISSED'}`);}// Merge live playlists to adjust fragment starts and fill in delta playlist skipped segments
|
||
if(previousDetails&&details.fragments.length>0){mergeDetails(previousDetails,details);}if(!this.canLoad||!details.live){return;}let deliveryDirectives;let msn=undefined;let part=undefined;if(details.canBlockReload&&details.endSN&&details.advanced){// Load level with LL-HLS delivery directives
|
||
const lowLatencyMode=this.hls.config.lowLatencyMode;const lastPartSn=details.lastPartSn;const endSn=details.endSN;const lastPartIndex=details.lastPartIndex;const hasParts=lastPartIndex!==-1;const lastPart=lastPartSn===endSn;// When low latency mode is disabled, we'll skip part requests once the last part index is found
|
||
const nextSnStartIndex=lowLatencyMode?0:lastPartIndex;if(hasParts){msn=lastPart?endSn+1:lastPartSn;part=lastPart?nextSnStartIndex:lastPartIndex+1;}else {msn=endSn+1;}// Low-Latency CDN Tune-in: "age" header and time since load indicates we're behind by more than one part
|
||
// Update directives to obtain the Playlist that has the estimated additional duration of media
|
||
const lastAdvanced=details.age;const cdnAge=lastAdvanced+details.ageHeader;let currentGoal=Math.min(cdnAge-details.partTarget,details.targetduration*1.5);if(currentGoal>0){if(previousDetails&¤tGoal>previousDetails.tuneInGoal){// If we attempted to get the next or latest playlist update, but currentGoal increased,
|
||
// then we either can't catchup, or the "age" header cannot be trusted.
|
||
this.warn(`CDN Tune-in goal increased from: ${previousDetails.tuneInGoal} to: ${currentGoal} with playlist age: ${details.age}`);currentGoal=0;}else {const segments=Math.floor(currentGoal/details.targetduration);msn+=segments;if(part!==undefined){const parts=Math.round(currentGoal%details.targetduration/details.partTarget);part+=parts;}this.log(`CDN Tune-in age: ${details.ageHeader}s last advanced ${lastAdvanced.toFixed(2)}s goal: ${currentGoal} skip sn ${segments} to part ${part}`);}details.tuneInGoal=currentGoal;}deliveryDirectives=this.getDeliveryDirectives(details,data.deliveryDirectives,msn,part);if(lowLatencyMode||!lastPart){this.loadPlaylist(deliveryDirectives);return;}}else if(details.canBlockReload||details.canSkipUntil){deliveryDirectives=this.getDeliveryDirectives(details,data.deliveryDirectives,msn,part);}const bufferInfo=this.hls.mainForwardBufferInfo;const position=bufferInfo?bufferInfo.end-bufferInfo.len:0;const distanceToLiveEdgeMs=(details.edge-position)*1000;const reloadInterval=computeReloadInterval(details,distanceToLiveEdgeMs);if(details.updated&&now>this.requestScheduled+reloadInterval){this.requestScheduled=stats.loading.start;}if(msn!==undefined&&details.canBlockReload){this.requestScheduled=stats.loading.first+reloadInterval-(details.partTarget*1000||1000);}else if(this.requestScheduled===-1||this.requestScheduled+reloadInterval<now){this.requestScheduled=now;}else if(this.requestScheduled-now<=0){this.requestScheduled+=reloadInterval;}let estimatedTimeUntilUpdate=this.requestScheduled-now;estimatedTimeUntilUpdate=Math.max(0,estimatedTimeUntilUpdate);this.log(`reload live playlist ${index} in ${Math.round(estimatedTimeUntilUpdate)} ms`);// this.log(
|
||
// `live reload ${details.updated ? 'REFRESHED' : 'MISSED'}
|
||
// reload in ${estimatedTimeUntilUpdate / 1000}
|
||
// round trip ${(stats.loading.end - stats.loading.start) / 1000}
|
||
// diff ${
|
||
// (reloadInterval -
|
||
// (estimatedTimeUntilUpdate +
|
||
// stats.loading.end -
|
||
// stats.loading.start)) /
|
||
// 1000
|
||
// }
|
||
// reload interval ${reloadInterval / 1000}
|
||
// target duration ${details.targetduration}
|
||
// distance to edge ${distanceToLiveEdgeMs / 1000}`
|
||
// );
|
||
this.timer=self.setTimeout(()=>this.loadPlaylist(deliveryDirectives),estimatedTimeUntilUpdate);}else {this.clearTimer();}}getDeliveryDirectives(details,previousDeliveryDirectives,msn,part){let skip=getSkipValue(details);if(previousDeliveryDirectives!=null&&previousDeliveryDirectives.skip&&details.deltaUpdateFailed){msn=previousDeliveryDirectives.msn;part=previousDeliveryDirectives.part;skip=HlsSkip.No;}return new HlsUrlParameters(msn,part,skip);}checkRetry(errorEvent){const errorDetails=errorEvent.details;const isTimeout=isTimeoutError(errorEvent);const errorAction=errorEvent.errorAction;const{action,retryCount=0,retryConfig}=errorAction||{};const retry=!!errorAction&&!!retryConfig&&(action===NetworkErrorAction.RetryRequest||!errorAction.resolved&&action===NetworkErrorAction.SendAlternateToPenaltyBox);if(retry){var _errorEvent$context;this.requestScheduled=-1;if(retryCount>=retryConfig.maxNumRetry){return false;}if(isTimeout&&(_errorEvent$context=errorEvent.context)!=null&&_errorEvent$context.deliveryDirectives){// The LL-HLS request already timed out so retry immediately
|
||
this.warn(`Retrying playlist loading ${retryCount+1}/${retryConfig.maxNumRetry} after "${errorDetails}" without delivery-directives`);this.loadPlaylist();}else {const delay=getRetryDelay(retryConfig,retryCount);// Schedule level/track reload
|
||
this.timer=self.setTimeout(()=>this.loadPlaylist(),delay);this.warn(`Retrying playlist loading ${retryCount+1}/${retryConfig.maxNumRetry} after "${errorDetails}" in ${delay}ms`);}// `levelRetry = true` used to inform other controllers that a retry is happening
|
||
errorEvent.levelRetry=true;errorAction.resolved=true;}return retry;}}/*
|
||
* compute an Exponential Weighted moving average
|
||
* - https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
|
||
* - heavily inspired from shaka-player
|
||
*/class EWMA{// About half of the estimated value will be from the last |halfLife| samples by weight.
|
||
constructor(halfLife,estimate=0,weight=0){this.halfLife=void 0;this.alpha_=void 0;this.estimate_=void 0;this.totalWeight_=void 0;this.halfLife=halfLife;// Larger values of alpha expire historical data more slowly.
|
||
this.alpha_=halfLife?Math.exp(Math.log(0.5)/halfLife):0;this.estimate_=estimate;this.totalWeight_=weight;}sample(weight,value){const adjAlpha=Math.pow(this.alpha_,weight);this.estimate_=value*(1-adjAlpha)+adjAlpha*this.estimate_;this.totalWeight_+=weight;}getTotalWeight(){return this.totalWeight_;}getEstimate(){if(this.alpha_){const zeroFactor=1-Math.pow(this.alpha_,this.totalWeight_);if(zeroFactor){return this.estimate_/zeroFactor;}}return this.estimate_;}}/*
|
||
* EWMA Bandwidth Estimator
|
||
* - heavily inspired from shaka-player
|
||
* Tracks bandwidth samples and estimates available bandwidth.
|
||
* Based on the minimum of two exponentially-weighted moving averages with
|
||
* different half-lives.
|
||
*/class EwmaBandWidthEstimator{constructor(slow,fast,defaultEstimate,defaultTTFB=100){this.defaultEstimate_=void 0;this.minWeight_=void 0;this.minDelayMs_=void 0;this.slow_=void 0;this.fast_=void 0;this.defaultTTFB_=void 0;this.ttfb_=void 0;this.defaultEstimate_=defaultEstimate;this.minWeight_=0.001;this.minDelayMs_=50;this.slow_=new EWMA(slow);this.fast_=new EWMA(fast);this.defaultTTFB_=defaultTTFB;this.ttfb_=new EWMA(slow);}update(slow,fast){const{slow_,fast_,ttfb_}=this;if(slow_.halfLife!==slow){this.slow_=new EWMA(slow,slow_.getEstimate(),slow_.getTotalWeight());}if(fast_.halfLife!==fast){this.fast_=new EWMA(fast,fast_.getEstimate(),fast_.getTotalWeight());}if(ttfb_.halfLife!==slow){this.ttfb_=new EWMA(slow,ttfb_.getEstimate(),ttfb_.getTotalWeight());}}sample(durationMs,numBytes){durationMs=Math.max(durationMs,this.minDelayMs_);const numBits=8*numBytes;// weight is duration in seconds
|
||
const durationS=durationMs/1000;// value is bandwidth in bits/s
|
||
const bandwidthInBps=numBits/durationS;this.fast_.sample(durationS,bandwidthInBps);this.slow_.sample(durationS,bandwidthInBps);}sampleTTFB(ttfb){// weight is frequency curve applied to TTFB in seconds
|
||
// (longer times have less weight with expected input under 1 second)
|
||
const seconds=ttfb/1000;const weight=Math.sqrt(2)*Math.exp(-Math.pow(seconds,2)/2);this.ttfb_.sample(weight,Math.max(ttfb,5));}canEstimate(){return this.fast_.getTotalWeight()>=this.minWeight_;}getEstimate(){if(this.canEstimate()){// console.log('slow estimate:'+ Math.round(this.slow_.getEstimate()));
|
||
// console.log('fast estimate:'+ Math.round(this.fast_.getEstimate()));
|
||
// Take the minimum of these two estimates. This should have the effect of
|
||
// adapting down quickly, but up more slowly.
|
||
return Math.min(this.fast_.getEstimate(),this.slow_.getEstimate());}else {return this.defaultEstimate_;}}getEstimateTTFB(){if(this.ttfb_.getTotalWeight()>=this.minWeight_){return this.ttfb_.getEstimate();}else {return this.defaultTTFB_;}}destroy(){}}const SUPPORTED_INFO_DEFAULT={supported:true,configurations:[],decodingInfoResults:[{supported:true,powerEfficient:true,smooth:true}]};const SUPPORTED_INFO_CACHE={};function requiresMediaCapabilitiesDecodingInfo(level,audioTracksByGroup,currentVideoRange,currentFrameRate,currentBw,audioPreference){// Only test support when configuration is exceeds minimum options
|
||
const audioGroups=level.audioCodec?level.audioGroups:null;const audioCodecPreference=audioPreference==null?void 0:audioPreference.audioCodec;const channelsPreference=audioPreference==null?void 0:audioPreference.channels;const maxChannels=channelsPreference?parseInt(channelsPreference):audioCodecPreference?Infinity:2;let audioChannels=null;if(audioGroups!=null&&audioGroups.length){try{if(audioGroups.length===1&&audioGroups[0]){audioChannels=audioTracksByGroup.groups[audioGroups[0]].channels;}else {audioChannels=audioGroups.reduce((acc,groupId)=>{if(groupId){const audioTrackGroup=audioTracksByGroup.groups[groupId];if(!audioTrackGroup){throw new Error(`Audio track group ${groupId} not found`);}// Sum all channel key values
|
||
Object.keys(audioTrackGroup.channels).forEach(key=>{acc[key]=(acc[key]||0)+audioTrackGroup.channels[key];});}return acc;},{2:0});}}catch(error){return true;}}return level.videoCodec!==undefined&&(level.width>1920&&level.height>1088||level.height>1920&&level.width>1088||level.frameRate>Math.max(currentFrameRate,30)||level.videoRange!=='SDR'&&level.videoRange!==currentVideoRange||level.bitrate>Math.max(currentBw,8e6))||!!audioChannels&&isFiniteNumber(maxChannels)&&Object.keys(audioChannels).some(channels=>parseInt(channels)>maxChannels);}function getMediaDecodingInfoPromise(level,audioTracksByGroup,mediaCapabilities){const videoCodecs=level.videoCodec;const audioCodecs=level.audioCodec;if(!videoCodecs||!audioCodecs||!mediaCapabilities){return Promise.resolve(SUPPORTED_INFO_DEFAULT);}const baseVideoConfiguration={width:level.width,height:level.height,bitrate:Math.ceil(Math.max(level.bitrate*0.9,level.averageBitrate)),// Assume a framerate of 30fps since MediaCapabilities will not accept Level default of 0.
|
||
framerate:level.frameRate||30};const videoRange=level.videoRange;if(videoRange!=='SDR'){baseVideoConfiguration.transferFunction=videoRange.toLowerCase();}const configurations=videoCodecs.split(',').map(videoCodec=>({type:'media-source',video:_objectSpread2(_objectSpread2({},baseVideoConfiguration),{},{contentType:mimeTypeForCodec(videoCodec,'video')})}));if(audioCodecs&&level.audioGroups){level.audioGroups.forEach(audioGroupId=>{var _audioTracksByGroup$g;if(!audioGroupId){return;}(_audioTracksByGroup$g=audioTracksByGroup.groups[audioGroupId])==null?void 0:_audioTracksByGroup$g.tracks.forEach(audioTrack=>{if(audioTrack.groupId===audioGroupId){const channels=audioTrack.channels||'';const channelsNumber=parseFloat(channels);if(isFiniteNumber(channelsNumber)&&channelsNumber>2){configurations.push.apply(configurations,audioCodecs.split(',').map(audioCodec=>({type:'media-source',audio:{contentType:mimeTypeForCodec(audioCodec,'audio'),channels:''+channelsNumber// spatialRendering:
|
||
// audioCodec === 'ec-3' && channels.indexOf('JOC'),
|
||
}})));}}});});}return Promise.all(configurations.map(configuration=>{// Cache MediaCapabilities promises
|
||
const decodingInfoKey=getMediaDecodingInfoKey(configuration);return SUPPORTED_INFO_CACHE[decodingInfoKey]||(SUPPORTED_INFO_CACHE[decodingInfoKey]=mediaCapabilities.decodingInfo(configuration));})).then(decodingInfoResults=>({supported:!decodingInfoResults.some(info=>!info.supported),configurations,decodingInfoResults})).catch(error=>({supported:false,configurations,decodingInfoResults:[],error}));}function getMediaDecodingInfoKey(config){const{audio,video}=config;const mediaConfig=video||audio;if(mediaConfig){const codec=mediaConfig.contentType.split('"')[1];if(video){return `r${video.height}x${video.width}f${Math.ceil(video.framerate)}${video.transferFunction||'sd'}_${codec}_${Math.ceil(video.bitrate/1e5)}`;}if(audio){return `c${audio.channels}${audio.spatialRendering?'s':'n'}_${codec}`;}}return '';}/**
|
||
* @returns Whether we can detect and validate HDR capability within the window context
|
||
*/function isHdrSupported(){if(typeof matchMedia==='function'){const mediaQueryList=matchMedia('(dynamic-range: high)');const badQuery=matchMedia('bad query');if(mediaQueryList.media!==badQuery.media){return mediaQueryList.matches===true;}}return false;}/**
|
||
* Sanitizes inputs to return the active video selection options for HDR/SDR.
|
||
* When both inputs are null:
|
||
*
|
||
* `{ preferHDR: false, allowedVideoRanges: [] }`
|
||
*
|
||
* When `currentVideoRange` non-null, maintain the active range:
|
||
*
|
||
* `{ preferHDR: currentVideoRange !== 'SDR', allowedVideoRanges: [currentVideoRange] }`
|
||
*
|
||
* When VideoSelectionOption non-null:
|
||
*
|
||
* - Allow all video ranges if `allowedVideoRanges` unspecified.
|
||
* - If `preferHDR` is non-null use the value to filter `allowedVideoRanges`.
|
||
* - Else check window for HDR support and set `preferHDR` to the result.
|
||
*
|
||
* @param currentVideoRange
|
||
* @param videoPreference
|
||
*/function getVideoSelectionOptions(currentVideoRange,videoPreference){let preferHDR=false;let allowedVideoRanges=[];if(currentVideoRange){preferHDR=currentVideoRange!=='SDR';allowedVideoRanges=[currentVideoRange];}if(videoPreference){allowedVideoRanges=videoPreference.allowedVideoRanges||VideoRangeValues.slice(0);preferHDR=videoPreference.preferHDR!==undefined?videoPreference.preferHDR:isHdrSupported();if(preferHDR){allowedVideoRanges=allowedVideoRanges.filter(range=>range!=='SDR');}else {allowedVideoRanges=['SDR'];}}return {preferHDR,allowedVideoRanges};}function getStartCodecTier(codecTiers,currentVideoRange,currentBw,audioPreference,videoPreference){const codecSets=Object.keys(codecTiers);const channelsPreference=audioPreference==null?void 0:audioPreference.channels;const audioCodecPreference=audioPreference==null?void 0:audioPreference.audioCodec;const preferStereo=channelsPreference&&parseInt(channelsPreference)===2;// Use first level set to determine stereo, and minimum resolution and framerate
|
||
let hasStereo=true;let hasCurrentVideoRange=false;let minHeight=Infinity;let minFramerate=Infinity;let minBitrate=Infinity;let selectedScore=0;let videoRanges=[];const{preferHDR,allowedVideoRanges}=getVideoSelectionOptions(currentVideoRange,videoPreference);for(let i=codecSets.length;i--;){const tier=codecTiers[codecSets[i]];hasStereo=tier.channels[2]>0;minHeight=Math.min(minHeight,tier.minHeight);minFramerate=Math.min(minFramerate,tier.minFramerate);minBitrate=Math.min(minBitrate,tier.minBitrate);const matchingVideoRanges=allowedVideoRanges.filter(range=>tier.videoRanges[range]>0);if(matchingVideoRanges.length>0){hasCurrentVideoRange=true;videoRanges=matchingVideoRanges;}}minHeight=isFiniteNumber(minHeight)?minHeight:0;minFramerate=isFiniteNumber(minFramerate)?minFramerate:0;const maxHeight=Math.max(1080,minHeight);const maxFramerate=Math.max(30,minFramerate);minBitrate=isFiniteNumber(minBitrate)?minBitrate:currentBw;currentBw=Math.max(minBitrate,currentBw);// If there are no variants with matching preference, set currentVideoRange to undefined
|
||
if(!hasCurrentVideoRange){currentVideoRange=undefined;videoRanges=[];}const codecSet=codecSets.reduce((selected,candidate)=>{// Remove candiates which do not meet bitrate, default audio, stereo or channels preference, 1080p or lower, 30fps or lower, or SDR/HDR selection if present
|
||
const candidateTier=codecTiers[candidate];if(candidate===selected){return selected;}if(candidateTier.minBitrate>currentBw){logStartCodecCandidateIgnored(candidate,`min bitrate of ${candidateTier.minBitrate} > current estimate of ${currentBw}`);return selected;}if(!candidateTier.hasDefaultAudio){logStartCodecCandidateIgnored(candidate,`no renditions with default or auto-select sound found`);return selected;}if(audioCodecPreference&&candidate.indexOf(audioCodecPreference.substring(0,4))%5!==0){logStartCodecCandidateIgnored(candidate,`audio codec preference "${audioCodecPreference}" not found`);return selected;}if(channelsPreference&&!preferStereo){if(!candidateTier.channels[channelsPreference]){logStartCodecCandidateIgnored(candidate,`no renditions with ${channelsPreference} channel sound found (channels options: ${Object.keys(candidateTier.channels)})`);return selected;}}else if((!audioCodecPreference||preferStereo)&&hasStereo&&candidateTier.channels['2']===0){logStartCodecCandidateIgnored(candidate,`no renditions with stereo sound found`);return selected;}if(candidateTier.minHeight>maxHeight){logStartCodecCandidateIgnored(candidate,`min resolution of ${candidateTier.minHeight} > maximum of ${maxHeight}`);return selected;}if(candidateTier.minFramerate>maxFramerate){logStartCodecCandidateIgnored(candidate,`min framerate of ${candidateTier.minFramerate} > maximum of ${maxFramerate}`);return selected;}if(!videoRanges.some(range=>candidateTier.videoRanges[range]>0)){logStartCodecCandidateIgnored(candidate,`no variants with VIDEO-RANGE of ${JSON.stringify(videoRanges)} found`);return selected;}if(candidateTier.maxScore<selectedScore){logStartCodecCandidateIgnored(candidate,`max score of ${candidateTier.maxScore} < selected max of ${selectedScore}`);return selected;}// Remove candiates with less preferred codecs or more errors
|
||
if(selected&&(codecsSetSelectionPreferenceValue(candidate)>=codecsSetSelectionPreferenceValue(selected)||candidateTier.fragmentError>codecTiers[selected].fragmentError)){return selected;}selectedScore=candidateTier.maxScore;return candidate;},undefined);return {codecSet,videoRanges,preferHDR,minFramerate,minBitrate};}function logStartCodecCandidateIgnored(codeSet,reason){logger.log(`[abr] start candidates with "${codeSet}" ignored because ${reason}`);}function getAudioTracksByGroup(allAudioTracks){return allAudioTracks.reduce((audioTracksByGroup,track)=>{let trackGroup=audioTracksByGroup.groups[track.groupId];if(!trackGroup){trackGroup=audioTracksByGroup.groups[track.groupId]={tracks:[],channels:{2:0},hasDefault:false,hasAutoSelect:false};}trackGroup.tracks.push(track);const channelsKey=track.channels||'2';trackGroup.channels[channelsKey]=(trackGroup.channels[channelsKey]||0)+1;trackGroup.hasDefault=trackGroup.hasDefault||track.default;trackGroup.hasAutoSelect=trackGroup.hasAutoSelect||track.autoselect;if(trackGroup.hasDefault){audioTracksByGroup.hasDefaultAudio=true;}if(trackGroup.hasAutoSelect){audioTracksByGroup.hasAutoSelectAudio=true;}return audioTracksByGroup;},{hasDefaultAudio:false,hasAutoSelectAudio:false,groups:{}});}function getCodecTiers(levels,audioTracksByGroup,minAutoLevel,maxAutoLevel){return levels.slice(minAutoLevel,maxAutoLevel+1).reduce((tiers,level)=>{if(!level.codecSet){return tiers;}const audioGroups=level.audioGroups;let tier=tiers[level.codecSet];if(!tier){tiers[level.codecSet]=tier={minBitrate:Infinity,minHeight:Infinity,minFramerate:Infinity,maxScore:0,videoRanges:{SDR:0},channels:{'2':0},hasDefaultAudio:!audioGroups,fragmentError:0};}tier.minBitrate=Math.min(tier.minBitrate,level.bitrate);const lesserWidthOrHeight=Math.min(level.height,level.width);tier.minHeight=Math.min(tier.minHeight,lesserWidthOrHeight);tier.minFramerate=Math.min(tier.minFramerate,level.frameRate);tier.maxScore=Math.max(tier.maxScore,level.score);tier.fragmentError+=level.fragmentError;tier.videoRanges[level.videoRange]=(tier.videoRanges[level.videoRange]||0)+1;if(audioGroups){audioGroups.forEach(audioGroupId=>{if(!audioGroupId){return;}const audioGroup=audioTracksByGroup.groups[audioGroupId];if(!audioGroup){return;}// Default audio is any group with DEFAULT=YES, or if missing then any group with AUTOSELECT=YES, or all variants
|
||
tier.hasDefaultAudio=tier.hasDefaultAudio||audioTracksByGroup.hasDefaultAudio?audioGroup.hasDefault:audioGroup.hasAutoSelect||!audioTracksByGroup.hasDefaultAudio&&!audioTracksByGroup.hasAutoSelectAudio;Object.keys(audioGroup.channels).forEach(channels=>{tier.channels[channels]=(tier.channels[channels]||0)+audioGroup.channels[channels];});});}return tiers;},{});}function findMatchingOption(option,tracks,matchPredicate){if('attrs'in option){const index=tracks.indexOf(option);if(index!==-1){return index;}}for(let i=0;i<tracks.length;i++){const track=tracks[i];if(matchesOption(option,track,matchPredicate)){return i;}}return -1;}function matchesOption(option,track,matchPredicate){const{groupId,name,lang,assocLang,characteristics,default:isDefault}=option;const forced=option.forced;return (groupId===undefined||track.groupId===groupId)&&(name===undefined||track.name===name)&&(lang===undefined||track.lang===lang)&&(lang===undefined||track.assocLang===assocLang)&&(isDefault===undefined||track.default===isDefault)&&(forced===undefined||track.forced===forced)&&(characteristics===undefined||characteristicsMatch(characteristics,track.characteristics))&&(matchPredicate===undefined||matchPredicate(option,track));}function characteristicsMatch(characteristicsA,characteristicsB=''){const arrA=characteristicsA.split(',');const arrB=characteristicsB.split(',');// Expects each item to be unique:
|
||
return arrA.length===arrB.length&&!arrA.some(el=>arrB.indexOf(el)===-1);}function audioMatchPredicate(option,track){const{audioCodec,channels}=option;return (audioCodec===undefined||(track.audioCodec||'').substring(0,4)===audioCodec.substring(0,4))&&(channels===undefined||channels===(track.channels||'2'));}function findClosestLevelWithAudioGroup(option,levels,allAudioTracks,searchIndex,matchPredicate){const currentLevel=levels[searchIndex];// Are there variants with same URI as current level?
|
||
// If so, find a match that does not require any level URI change
|
||
const variants=levels.reduce((variantMap,level,index)=>{const uri=level.uri;const renditions=variantMap[uri]||(variantMap[uri]=[]);renditions.push(index);return variantMap;},{});const renditions=variants[currentLevel.uri];if(renditions.length>1){searchIndex=Math.max.apply(Math,renditions);}// Find best match
|
||
const currentVideoRange=currentLevel.videoRange;const currentFrameRate=currentLevel.frameRate;const currentVideoCodec=currentLevel.codecSet.substring(0,4);const matchingVideo=searchDownAndUpList(levels,searchIndex,level=>{if(level.videoRange!==currentVideoRange||level.frameRate!==currentFrameRate||level.codecSet.substring(0,4)!==currentVideoCodec){return false;}const audioGroups=level.audioGroups;const tracks=allAudioTracks.filter(track=>!audioGroups||audioGroups.indexOf(track.groupId)!==-1);return findMatchingOption(option,tracks,matchPredicate)>-1;});if(matchingVideo>-1){return matchingVideo;}return searchDownAndUpList(levels,searchIndex,level=>{const audioGroups=level.audioGroups;const tracks=allAudioTracks.filter(track=>!audioGroups||audioGroups.indexOf(track.groupId)!==-1);return findMatchingOption(option,tracks,matchPredicate)>-1;});}function searchDownAndUpList(arr,searchIndex,predicate){for(let i=searchIndex;i;i--){if(predicate(arr[i])){return i;}}for(let i=searchIndex+1;i<arr.length;i++){if(predicate(arr[i])){return i;}}return -1;}class AbrController{constructor(_hls){this.hls=void 0;this.lastLevelLoadSec=0;this.lastLoadedFragLevel=-1;this.firstSelection=-1;this._nextAutoLevel=-1;this.nextAutoLevelKey='';this.audioTracksByGroup=null;this.codecTiers=null;this.timer=-1;this.fragCurrent=null;this.partCurrent=null;this.bitrateTestDelay=0;this.bwEstimator=void 0;/*
|
||
This method monitors the download rate of the current fragment, and will downswitch if that fragment will not load
|
||
quickly enough to prevent underbuffering
|
||
*/this._abandonRulesCheck=()=>{const{fragCurrent:frag,partCurrent:part,hls}=this;const{autoLevelEnabled,media}=hls;if(!frag||!media){return;}const now=performance.now();const stats=part?part.stats:frag.stats;const duration=part?part.duration:frag.duration;const timeLoading=now-stats.loading.start;const minAutoLevel=hls.minAutoLevel;// If frag loading is aborted, complete, or from lowest level, stop timer and return
|
||
if(stats.aborted||stats.loaded&&stats.loaded===stats.total||frag.level<=minAutoLevel){this.clearTimer();// reset forced auto level value so that next level will be selected
|
||
this._nextAutoLevel=-1;return;}// This check only runs if we're in ABR mode and actually playing
|
||
if(!autoLevelEnabled||media.paused||!media.playbackRate||!media.readyState){return;}const bufferInfo=hls.mainForwardBufferInfo;if(bufferInfo===null){return;}const ttfbEstimate=this.bwEstimator.getEstimateTTFB();const playbackRate=Math.abs(media.playbackRate);// To maintain stable adaptive playback, only begin monitoring frag loading after half or more of its playback duration has passed
|
||
if(timeLoading<=Math.max(ttfbEstimate,1000*(duration/(playbackRate*2)))){return;}// bufferStarvationDelay is an estimate of the amount time (in seconds) it will take to exhaust the buffer
|
||
const bufferStarvationDelay=bufferInfo.len/playbackRate;const ttfb=stats.loading.first?stats.loading.first-stats.loading.start:-1;const loadedFirstByte=stats.loaded&&ttfb>-1;const bwEstimate=this.getBwEstimate();const levels=hls.levels;const level=levels[frag.level];const expectedLen=stats.total||Math.max(stats.loaded,Math.round(duration*level.averageBitrate/8));let timeStreaming=loadedFirstByte?timeLoading-ttfb:timeLoading;if(timeStreaming<1&&loadedFirstByte){timeStreaming=Math.min(timeLoading,stats.loaded*8/bwEstimate);}const loadRate=loadedFirstByte?stats.loaded*1000/timeStreaming:0;// fragLoadDelay is an estimate of the time (in seconds) it will take to buffer the remainder of the fragment
|
||
const fragLoadedDelay=loadRate?(expectedLen-stats.loaded)/loadRate:expectedLen*8/bwEstimate+ttfbEstimate/1000;// Only downswitch if the time to finish loading the current fragment is greater than the amount of buffer left
|
||
if(fragLoadedDelay<=bufferStarvationDelay){return;}const bwe=loadRate?loadRate*8:bwEstimate;let fragLevelNextLoadedDelay=Number.POSITIVE_INFINITY;let nextLoadLevel;// Iterate through lower level and try to find the largest one that avoids rebuffering
|
||
for(nextLoadLevel=frag.level-1;nextLoadLevel>minAutoLevel;nextLoadLevel--){// compute time to load next fragment at lower level
|
||
// 8 = bits per byte (bps/Bps)
|
||
const levelNextBitrate=levels[nextLoadLevel].maxBitrate;fragLevelNextLoadedDelay=this.getTimeToLoadFrag(ttfbEstimate/1000,bwe,duration*levelNextBitrate,!levels[nextLoadLevel].details);if(fragLevelNextLoadedDelay<bufferStarvationDelay){break;}}// Only emergency switch down if it takes less time to load a new fragment at lowest level instead of continuing
|
||
// to load the current one
|
||
if(fragLevelNextLoadedDelay>=fragLoadedDelay){return;}// if estimated load time of new segment is completely unreasonable, ignore and do not emergency switch down
|
||
if(fragLevelNextLoadedDelay>duration*10){return;}hls.nextLoadLevel=hls.nextAutoLevel=nextLoadLevel;if(loadedFirstByte){// If there has been loading progress, sample bandwidth using loading time offset by minimum TTFB time
|
||
this.bwEstimator.sample(timeLoading-Math.min(ttfbEstimate,ttfb),stats.loaded);}else {// If there has been no loading progress, sample TTFB
|
||
this.bwEstimator.sampleTTFB(timeLoading);}const nextLoadLevelBitrate=levels[nextLoadLevel].maxBitrate;if(this.getBwEstimate()*this.hls.config.abrBandWidthUpFactor>nextLoadLevelBitrate){this.resetEstimator(nextLoadLevelBitrate);}this.clearTimer();logger.warn(`[abr] Fragment ${frag.sn}${part?' part '+part.index:''} of level ${frag.level} is loading too slowly;
|
||
Time to underbuffer: ${bufferStarvationDelay.toFixed(3)} s
|
||
Estimated load time for current fragment: ${fragLoadedDelay.toFixed(3)} s
|
||
Estimated load time for down switch fragment: ${fragLevelNextLoadedDelay.toFixed(3)} s
|
||
TTFB estimate: ${ttfb|0} ms
|
||
Current BW estimate: ${isFiniteNumber(bwEstimate)?bwEstimate|0:'Unknown'} bps
|
||
New BW estimate: ${this.getBwEstimate()|0} bps
|
||
Switching to level ${nextLoadLevel} @ ${nextLoadLevelBitrate|0} bps`);hls.trigger(Events.FRAG_LOAD_EMERGENCY_ABORTED,{frag,part,stats});};this.hls=_hls;this.bwEstimator=this.initEstimator();this.registerListeners();}resetEstimator(abrEwmaDefaultEstimate){if(abrEwmaDefaultEstimate){logger.log(`setting initial bwe to ${abrEwmaDefaultEstimate}`);this.hls.config.abrEwmaDefaultEstimate=abrEwmaDefaultEstimate;}this.firstSelection=-1;this.bwEstimator=this.initEstimator();}initEstimator(){const config=this.hls.config;return new EwmaBandWidthEstimator(config.abrEwmaSlowVoD,config.abrEwmaFastVoD,config.abrEwmaDefaultEstimate);}registerListeners(){const{hls}=this;hls.on(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.on(Events.FRAG_LOADING,this.onFragLoading,this);hls.on(Events.FRAG_LOADED,this.onFragLoaded,this);hls.on(Events.FRAG_BUFFERED,this.onFragBuffered,this);hls.on(Events.LEVEL_SWITCHING,this.onLevelSwitching,this);hls.on(Events.LEVEL_LOADED,this.onLevelLoaded,this);hls.on(Events.LEVELS_UPDATED,this.onLevelsUpdated,this);hls.on(Events.MAX_AUTO_LEVEL_UPDATED,this.onMaxAutoLevelUpdated,this);hls.on(Events.ERROR,this.onError,this);}unregisterListeners(){const{hls}=this;if(!hls){return;}hls.off(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.off(Events.FRAG_LOADING,this.onFragLoading,this);hls.off(Events.FRAG_LOADED,this.onFragLoaded,this);hls.off(Events.FRAG_BUFFERED,this.onFragBuffered,this);hls.off(Events.LEVEL_SWITCHING,this.onLevelSwitching,this);hls.off(Events.LEVEL_LOADED,this.onLevelLoaded,this);hls.off(Events.LEVELS_UPDATED,this.onLevelsUpdated,this);hls.off(Events.MAX_AUTO_LEVEL_UPDATED,this.onMaxAutoLevelUpdated,this);hls.off(Events.ERROR,this.onError,this);}destroy(){this.unregisterListeners();this.clearTimer();// @ts-ignore
|
||
this.hls=this._abandonRulesCheck=null;this.fragCurrent=this.partCurrent=null;}onManifestLoading(event,data){this.lastLoadedFragLevel=-1;this.firstSelection=-1;this.lastLevelLoadSec=0;this.fragCurrent=this.partCurrent=null;this.onLevelsUpdated();this.clearTimer();}onLevelsUpdated(){if(this.lastLoadedFragLevel>-1&&this.fragCurrent){this.lastLoadedFragLevel=this.fragCurrent.level;}this._nextAutoLevel=-1;this.onMaxAutoLevelUpdated();this.codecTiers=null;this.audioTracksByGroup=null;}onMaxAutoLevelUpdated(){this.firstSelection=-1;this.nextAutoLevelKey='';}onFragLoading(event,data){const frag=data.frag;if(this.ignoreFragment(frag)){return;}if(!frag.bitrateTest){var _data$part;this.fragCurrent=frag;this.partCurrent=(_data$part=data.part)!=null?_data$part:null;}this.clearTimer();this.timer=self.setInterval(this._abandonRulesCheck,100);}onLevelSwitching(event,data){this.clearTimer();}onError(event,data){if(data.fatal){return;}switch(data.details){case ErrorDetails.BUFFER_ADD_CODEC_ERROR:case ErrorDetails.BUFFER_APPEND_ERROR:// Reset last loaded level so that a new selection can be made after calling recoverMediaError
|
||
this.lastLoadedFragLevel=-1;this.firstSelection=-1;break;case ErrorDetails.FRAG_LOAD_TIMEOUT:{const frag=data.frag;const{fragCurrent,partCurrent:part}=this;if(frag&&fragCurrent&&frag.sn===fragCurrent.sn&&frag.level===fragCurrent.level){const now=performance.now();const stats=part?part.stats:frag.stats;const timeLoading=now-stats.loading.start;const ttfb=stats.loading.first?stats.loading.first-stats.loading.start:-1;const loadedFirstByte=stats.loaded&&ttfb>-1;if(loadedFirstByte){const ttfbEstimate=this.bwEstimator.getEstimateTTFB();this.bwEstimator.sample(timeLoading-Math.min(ttfbEstimate,ttfb),stats.loaded);}else {this.bwEstimator.sampleTTFB(timeLoading);}}break;}}}getTimeToLoadFrag(timeToFirstByteSec,bandwidth,fragSizeBits,isSwitch){const fragLoadSec=timeToFirstByteSec+fragSizeBits/bandwidth;const playlistLoadSec=isSwitch?this.lastLevelLoadSec:0;return fragLoadSec+playlistLoadSec;}onLevelLoaded(event,data){const config=this.hls.config;const{loading}=data.stats;const timeLoadingMs=loading.end-loading.start;if(isFiniteNumber(timeLoadingMs)){this.lastLevelLoadSec=timeLoadingMs/1000;}if(data.details.live){this.bwEstimator.update(config.abrEwmaSlowLive,config.abrEwmaFastLive);}else {this.bwEstimator.update(config.abrEwmaSlowVoD,config.abrEwmaFastVoD);}}onFragLoaded(event,{frag,part}){const stats=part?part.stats:frag.stats;if(frag.type===PlaylistLevelType.MAIN){this.bwEstimator.sampleTTFB(stats.loading.first-stats.loading.start);}if(this.ignoreFragment(frag)){return;}// stop monitoring bw once frag loaded
|
||
this.clearTimer();// reset forced auto level value so that next level will be selected
|
||
if(frag.level===this._nextAutoLevel){this._nextAutoLevel=-1;}this.firstSelection=-1;// compute level average bitrate
|
||
if(this.hls.config.abrMaxWithRealBitrate){const duration=part?part.duration:frag.duration;const level=this.hls.levels[frag.level];const loadedBytes=(level.loaded?level.loaded.bytes:0)+stats.loaded;const loadedDuration=(level.loaded?level.loaded.duration:0)+duration;level.loaded={bytes:loadedBytes,duration:loadedDuration};level.realBitrate=Math.round(8*loadedBytes/loadedDuration);}if(frag.bitrateTest){const fragBufferedData={stats,frag,part,id:frag.type};this.onFragBuffered(Events.FRAG_BUFFERED,fragBufferedData);frag.bitrateTest=false;}else {// store level id after successful fragment load for playback
|
||
this.lastLoadedFragLevel=frag.level;}}onFragBuffered(event,data){const{frag,part}=data;const stats=part!=null&&part.stats.loaded?part.stats:frag.stats;if(stats.aborted){return;}if(this.ignoreFragment(frag)){return;}// Use the difference between parsing and request instead of buffering and request to compute fragLoadingProcessing;
|
||
// rationale is that buffer appending only happens once media is attached. This can happen when config.startFragPrefetch
|
||
// is used. If we used buffering in that case, our BW estimate sample will be very large.
|
||
const processingMs=stats.parsing.end-stats.loading.start-Math.min(stats.loading.first-stats.loading.start,this.bwEstimator.getEstimateTTFB());this.bwEstimator.sample(processingMs,stats.loaded);stats.bwEstimate=this.getBwEstimate();if(frag.bitrateTest){this.bitrateTestDelay=processingMs/1000;}else {this.bitrateTestDelay=0;}}ignoreFragment(frag){// Only count non-alt-audio frags which were actually buffered in our BW calculations
|
||
return frag.type!==PlaylistLevelType.MAIN||frag.sn==='initSegment';}clearTimer(){if(this.timer>-1){self.clearInterval(this.timer);this.timer=-1;}}get firstAutoLevel(){const{maxAutoLevel,minAutoLevel}=this.hls;const bwEstimate=this.getBwEstimate();const maxStartDelay=this.hls.config.maxStarvationDelay;const abrAutoLevel=this.findBestLevel(bwEstimate,minAutoLevel,maxAutoLevel,0,maxStartDelay,1,1);if(abrAutoLevel>-1){return abrAutoLevel;}const firstLevel=this.hls.firstLevel;const clamped=Math.min(Math.max(firstLevel,minAutoLevel),maxAutoLevel);logger.warn(`[abr] Could not find best starting auto level. Defaulting to first in playlist ${firstLevel} clamped to ${clamped}`);return clamped;}get forcedAutoLevel(){if(this.nextAutoLevelKey){return -1;}return this._nextAutoLevel;}// return next auto level
|
||
get nextAutoLevel(){const forcedAutoLevel=this.forcedAutoLevel;const bwEstimator=this.bwEstimator;const useEstimate=bwEstimator.canEstimate();const loadedFirstFrag=this.lastLoadedFragLevel>-1;// in case next auto level has been forced, and bw not available or not reliable, return forced value
|
||
if(forcedAutoLevel!==-1&&(!useEstimate||!loadedFirstFrag||this.nextAutoLevelKey===this.getAutoLevelKey())){return forcedAutoLevel;}// compute next level using ABR logic
|
||
const nextABRAutoLevel=useEstimate&&loadedFirstFrag?this.getNextABRAutoLevel():this.firstAutoLevel;// use forced auto level while it hasn't errored more than ABR selection
|
||
if(forcedAutoLevel!==-1){const levels=this.hls.levels;if(levels.length>Math.max(forcedAutoLevel,nextABRAutoLevel)&&levels[forcedAutoLevel].loadError<=levels[nextABRAutoLevel].loadError){return forcedAutoLevel;}}// save result until state has changed
|
||
this._nextAutoLevel=nextABRAutoLevel;this.nextAutoLevelKey=this.getAutoLevelKey();return nextABRAutoLevel;}getAutoLevelKey(){return `${this.getBwEstimate()}_${this.getStarvationDelay().toFixed(2)}`;}getNextABRAutoLevel(){const{fragCurrent,partCurrent,hls}=this;const{maxAutoLevel,config,minAutoLevel}=hls;const currentFragDuration=partCurrent?partCurrent.duration:fragCurrent?fragCurrent.duration:0;const avgbw=this.getBwEstimate();// bufferStarvationDelay is the wall-clock time left until the playback buffer is exhausted.
|
||
const bufferStarvationDelay=this.getStarvationDelay();let bwFactor=config.abrBandWidthFactor;let bwUpFactor=config.abrBandWidthUpFactor;// First, look to see if we can find a level matching with our avg bandwidth AND that could also guarantee no rebuffering at all
|
||
if(bufferStarvationDelay){const _bestLevel=this.findBestLevel(avgbw,minAutoLevel,maxAutoLevel,bufferStarvationDelay,0,bwFactor,bwUpFactor);if(_bestLevel>=0){return _bestLevel;}}// not possible to get rid of rebuffering... try to find level that will guarantee less than maxStarvationDelay of rebuffering
|
||
let maxStarvationDelay=currentFragDuration?Math.min(currentFragDuration,config.maxStarvationDelay):config.maxStarvationDelay;if(!bufferStarvationDelay){// in case buffer is empty, let's check if previous fragment was loaded to perform a bitrate test
|
||
const bitrateTestDelay=this.bitrateTestDelay;if(bitrateTestDelay){// if it is the case, then we need to adjust our max starvation delay using maxLoadingDelay config value
|
||
// max video loading delay used in automatic start level selection :
|
||
// in that mode ABR controller will ensure that video loading time (ie the time to fetch the first fragment at lowest quality level +
|
||
// the time to fetch the fragment at the appropriate quality level is less than ```maxLoadingDelay``` )
|
||
// cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
|
||
const maxLoadingDelay=currentFragDuration?Math.min(currentFragDuration,config.maxLoadingDelay):config.maxLoadingDelay;maxStarvationDelay=maxLoadingDelay-bitrateTestDelay;logger.info(`[abr] bitrate test took ${Math.round(1000*bitrateTestDelay)}ms, set first fragment max fetchDuration to ${Math.round(1000*maxStarvationDelay)} ms`);// don't use conservative factor on bitrate test
|
||
bwFactor=bwUpFactor=1;}}const bestLevel=this.findBestLevel(avgbw,minAutoLevel,maxAutoLevel,bufferStarvationDelay,maxStarvationDelay,bwFactor,bwUpFactor);logger.info(`[abr] ${bufferStarvationDelay?'rebuffering expected':'buffer is empty'}, optimal quality level ${bestLevel}`);if(bestLevel>-1){return bestLevel;}// If no matching level found, see if min auto level would be a better option
|
||
const minLevel=hls.levels[minAutoLevel];const autoLevel=hls.levels[hls.loadLevel];if((minLevel==null?void 0:minLevel.bitrate)<(autoLevel==null?void 0:autoLevel.bitrate)){return minAutoLevel;}// or if bitrate is not lower, continue to use loadLevel
|
||
return hls.loadLevel;}getStarvationDelay(){const hls=this.hls;const media=hls.media;if(!media){return Infinity;}// playbackRate is the absolute value of the playback rate; if media.playbackRate is 0, we use 1 to load as
|
||
// if we're playing back at the normal rate.
|
||
const playbackRate=media&&media.playbackRate!==0?Math.abs(media.playbackRate):1.0;const bufferInfo=hls.mainForwardBufferInfo;return (bufferInfo?bufferInfo.len:0)/playbackRate;}getBwEstimate(){return this.bwEstimator.canEstimate()?this.bwEstimator.getEstimate():this.hls.config.abrEwmaDefaultEstimate;}findBestLevel(currentBw,minAutoLevel,maxAutoLevel,bufferStarvationDelay,maxStarvationDelay,bwFactor,bwUpFactor){var _level$details;const maxFetchDuration=bufferStarvationDelay+maxStarvationDelay;const lastLoadedFragLevel=this.lastLoadedFragLevel;const selectionBaseLevel=lastLoadedFragLevel===-1?this.hls.firstLevel:lastLoadedFragLevel;const{fragCurrent,partCurrent}=this;const{levels,allAudioTracks,loadLevel,config}=this.hls;if(levels.length===1){return 0;}const level=levels[selectionBaseLevel];const live=!!(level!=null&&(_level$details=level.details)!=null&&_level$details.live);const firstSelection=loadLevel===-1||lastLoadedFragLevel===-1;let currentCodecSet;let currentVideoRange='SDR';let currentFrameRate=(level==null?void 0:level.frameRate)||0;const{audioPreference,videoPreference}=config;const audioTracksByGroup=this.audioTracksByGroup||(this.audioTracksByGroup=getAudioTracksByGroup(allAudioTracks));if(firstSelection){if(this.firstSelection!==-1){return this.firstSelection;}const codecTiers=this.codecTiers||(this.codecTiers=getCodecTiers(levels,audioTracksByGroup,minAutoLevel,maxAutoLevel));const startTier=getStartCodecTier(codecTiers,currentVideoRange,currentBw,audioPreference,videoPreference);const{codecSet,videoRanges,minFramerate,minBitrate,preferHDR}=startTier;currentCodecSet=codecSet;currentVideoRange=preferHDR?videoRanges[videoRanges.length-1]:videoRanges[0];currentFrameRate=minFramerate;currentBw=Math.max(currentBw,minBitrate);logger.log(`[abr] picked start tier ${JSON.stringify(startTier)}`);}else {currentCodecSet=level==null?void 0:level.codecSet;currentVideoRange=level==null?void 0:level.videoRange;}const currentFragDuration=partCurrent?partCurrent.duration:fragCurrent?fragCurrent.duration:0;const ttfbEstimateSec=this.bwEstimator.getEstimateTTFB()/1000;const levelsSkipped=[];for(let i=maxAutoLevel;i>=minAutoLevel;i--){var _levelInfo$supportedR;const levelInfo=levels[i];const upSwitch=i>selectionBaseLevel;if(!levelInfo){continue;}if(config.useMediaCapabilities&&!levelInfo.supportedResult&&!levelInfo.supportedPromise){const mediaCapabilities=navigator.mediaCapabilities;if(typeof(mediaCapabilities==null?void 0:mediaCapabilities.decodingInfo)==='function'&&requiresMediaCapabilitiesDecodingInfo(levelInfo,audioTracksByGroup,currentVideoRange,currentFrameRate,currentBw,audioPreference)){levelInfo.supportedPromise=getMediaDecodingInfoPromise(levelInfo,audioTracksByGroup,mediaCapabilities);levelInfo.supportedPromise.then(decodingInfo=>{if(!this.hls){return;}levelInfo.supportedResult=decodingInfo;const levels=this.hls.levels;const index=levels.indexOf(levelInfo);if(decodingInfo.error){logger.warn(`[abr] MediaCapabilities decodingInfo error: "${decodingInfo.error}" for level ${index} ${JSON.stringify(decodingInfo)}`);}else if(!decodingInfo.supported){logger.warn(`[abr] Unsupported MediaCapabilities decodingInfo result for level ${index} ${JSON.stringify(decodingInfo)}`);if(index>-1&&levels.length>1){logger.log(`[abr] Removing unsupported level ${index}`);this.hls.removeLevel(index);}}});}else {levelInfo.supportedResult=SUPPORTED_INFO_DEFAULT;}}// skip candidates which change codec-family or video-range,
|
||
// and which decrease or increase frame-rate for up and down-switch respectfully
|
||
if(currentCodecSet&&levelInfo.codecSet!==currentCodecSet||currentVideoRange&&levelInfo.videoRange!==currentVideoRange||upSwitch&¤tFrameRate>levelInfo.frameRate||!upSwitch&¤tFrameRate>0&¤tFrameRate<levelInfo.frameRate||levelInfo.supportedResult&&!((_levelInfo$supportedR=levelInfo.supportedResult.decodingInfoResults)!=null&&_levelInfo$supportedR[0].smooth)){levelsSkipped.push(i);continue;}const levelDetails=levelInfo.details;const avgDuration=(partCurrent?levelDetails==null?void 0:levelDetails.partTarget:levelDetails==null?void 0:levelDetails.averagetargetduration)||currentFragDuration;let adjustedbw;// follow algorithm captured from stagefright :
|
||
// https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/httplive/LiveSession.cpp
|
||
// Pick the highest bandwidth stream below or equal to estimated bandwidth.
|
||
// consider only 80% of the available bandwidth, but if we are switching up,
|
||
// be even more conservative (70%) to avoid overestimating and immediately
|
||
// switching back.
|
||
if(!upSwitch){adjustedbw=bwFactor*currentBw;}else {adjustedbw=bwUpFactor*currentBw;}// Use average bitrate when starvation delay (buffer length) is gt or eq two segment durations and rebuffering is not expected (maxStarvationDelay > 0)
|
||
const bitrate=currentFragDuration&&bufferStarvationDelay>=currentFragDuration*2&&maxStarvationDelay===0?levels[i].averageBitrate:levels[i].maxBitrate;const fetchDuration=this.getTimeToLoadFrag(ttfbEstimateSec,adjustedbw,bitrate*avgDuration,levelDetails===undefined);const canSwitchWithinTolerance=// if adjusted bw is greater than level bitrate AND
|
||
adjustedbw>=bitrate&&(// no level change, or new level has no error history
|
||
i===lastLoadedFragLevel||levelInfo.loadError===0&&levelInfo.fragmentError===0)&&(// fragment fetchDuration unknown OR live stream OR fragment fetchDuration less than max allowed fetch duration, then this level matches
|
||
// we don't account for max Fetch Duration for live streams, this is to avoid switching down when near the edge of live sliding window ...
|
||
// special case to support startLevel = -1 (bitrateTest) on live streams : in that case we should not exit loop so that findBestLevel will return -1
|
||
fetchDuration<=ttfbEstimateSec||!isFiniteNumber(fetchDuration)||live&&!this.bitrateTestDelay||fetchDuration<maxFetchDuration);if(canSwitchWithinTolerance){const forcedAutoLevel=this.forcedAutoLevel;if(i!==loadLevel&&(forcedAutoLevel===-1||forcedAutoLevel!==loadLevel)){if(levelsSkipped.length){logger.trace(`[abr] Skipped level(s) ${levelsSkipped.join(',')} of ${maxAutoLevel} max with CODECS and VIDEO-RANGE:"${levels[levelsSkipped[0]].codecs}" ${levels[levelsSkipped[0]].videoRange}; not compatible with "${level.codecs}" ${currentVideoRange}`);}logger.info(`[abr] switch candidate:${selectionBaseLevel}->${i} adjustedbw(${Math.round(adjustedbw)})-bitrate=${Math.round(adjustedbw-bitrate)} ttfb:${ttfbEstimateSec.toFixed(1)} avgDuration:${avgDuration.toFixed(1)} maxFetchDuration:${maxFetchDuration.toFixed(1)} fetchDuration:${fetchDuration.toFixed(1)} firstSelection:${firstSelection} codecSet:${currentCodecSet} videoRange:${currentVideoRange} hls.loadLevel:${loadLevel}`);}if(firstSelection){this.firstSelection=i;}// as we are looping from highest to lowest, this will return the best achievable quality level
|
||
return i;}}// not enough time budget even with quality level 0 ... rebuffering might happen
|
||
return -1;}set nextAutoLevel(nextLevel){const{maxAutoLevel,minAutoLevel}=this.hls;const value=Math.min(Math.max(nextLevel,minAutoLevel),maxAutoLevel);if(this._nextAutoLevel!==value){this.nextAutoLevelKey='';this._nextAutoLevel=value;}}}/**
|
||
* @ignore
|
||
* Sub-class specialization of EventHandler base class.
|
||
*
|
||
* TaskLoop allows to schedule a task function being called (optionnaly repeatedly) on the main loop,
|
||
* scheduled asynchroneously, avoiding recursive calls in the same tick.
|
||
*
|
||
* The task itself is implemented in `doTick`. It can be requested and called for single execution
|
||
* using the `tick` method.
|
||
*
|
||
* It will be assured that the task execution method (`tick`) only gets called once per main loop "tick",
|
||
* no matter how often it gets requested for execution. Execution in further ticks will be scheduled accordingly.
|
||
*
|
||
* If further execution requests have already been scheduled on the next tick, it can be checked with `hasNextTick`,
|
||
* and cancelled with `clearNextTick`.
|
||
*
|
||
* The task can be scheduled as an interval repeatedly with a period as parameter (see `setInterval`, `clearInterval`).
|
||
*
|
||
* Sub-classes need to implement the `doTick` method which will effectively have the task execution routine.
|
||
*
|
||
* Further explanations:
|
||
*
|
||
* The baseclass has a `tick` method that will schedule the doTick call. It may be called synchroneously
|
||
* only for a stack-depth of one. On re-entrant calls, sub-sequent calls are scheduled for next main loop ticks.
|
||
*
|
||
* When the task execution (`tick` method) is called in re-entrant way this is detected and
|
||
* we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further
|
||
* task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo).
|
||
*/class TaskLoop{constructor(){this._boundTick=void 0;this._tickTimer=null;this._tickInterval=null;this._tickCallCount=0;this._boundTick=this.tick.bind(this);}destroy(){this.onHandlerDestroying();this.onHandlerDestroyed();}onHandlerDestroying(){// clear all timers before unregistering from event bus
|
||
this.clearNextTick();this.clearInterval();}onHandlerDestroyed(){}hasInterval(){return !!this._tickInterval;}hasNextTick(){return !!this._tickTimer;}/**
|
||
* @param millis - Interval time (ms)
|
||
* @eturns True when interval has been scheduled, false when already scheduled (no effect)
|
||
*/setInterval(millis){if(!this._tickInterval){this._tickCallCount=0;this._tickInterval=self.setInterval(this._boundTick,millis);return true;}return false;}/**
|
||
* @returns True when interval was cleared, false when none was set (no effect)
|
||
*/clearInterval(){if(this._tickInterval){self.clearInterval(this._tickInterval);this._tickInterval=null;return true;}return false;}/**
|
||
* @returns True when timeout was cleared, false when none was set (no effect)
|
||
*/clearNextTick(){if(this._tickTimer){self.clearTimeout(this._tickTimer);this._tickTimer=null;return true;}return false;}/**
|
||
* Will call the subclass doTick implementation in this main loop tick
|
||
* or in the next one (via setTimeout(,0)) in case it has already been called
|
||
* in this tick (in case this is a re-entrant call).
|
||
*/tick(){this._tickCallCount++;if(this._tickCallCount===1){this.doTick();// re-entrant call to tick from previous doTick call stack
|
||
// -> schedule a call on the next main loop iteration to process this task processing request
|
||
if(this._tickCallCount>1){// make sure only one timer exists at any time at max
|
||
this.tickImmediate();}this._tickCallCount=0;}}tickImmediate(){this.clearNextTick();this._tickTimer=self.setTimeout(this._boundTick,0);}/**
|
||
* For subclass to implement task logic
|
||
* @abstract
|
||
*/doTick(){}}var FragmentState={NOT_LOADED:"NOT_LOADED",APPENDING:"APPENDING",PARTIAL:"PARTIAL",OK:"OK"};class FragmentTracker{constructor(hls){this.activePartLists=Object.create(null);this.endListFragments=Object.create(null);this.fragments=Object.create(null);this.timeRanges=Object.create(null);this.bufferPadding=0.2;this.hls=void 0;this.hasGaps=false;this.hls=hls;this._registerListeners();}_registerListeners(){const{hls}=this;hls.on(Events.BUFFER_APPENDED,this.onBufferAppended,this);hls.on(Events.FRAG_BUFFERED,this.onFragBuffered,this);hls.on(Events.FRAG_LOADED,this.onFragLoaded,this);}_unregisterListeners(){const{hls}=this;hls.off(Events.BUFFER_APPENDED,this.onBufferAppended,this);hls.off(Events.FRAG_BUFFERED,this.onFragBuffered,this);hls.off(Events.FRAG_LOADED,this.onFragLoaded,this);}destroy(){this._unregisterListeners();// @ts-ignore
|
||
this.fragments=// @ts-ignore
|
||
this.activePartLists=// @ts-ignore
|
||
this.endListFragments=this.timeRanges=null;}/**
|
||
* Return a Fragment or Part with an appended range that matches the position and levelType
|
||
* Otherwise, return null
|
||
*/getAppendedFrag(position,levelType){const activeParts=this.activePartLists[levelType];if(activeParts){for(let i=activeParts.length;i--;){const activePart=activeParts[i];if(!activePart){break;}const appendedPTS=activePart.end;if(activePart.start<=position&&appendedPTS!==null&&position<=appendedPTS){return activePart;}}}return this.getBufferedFrag(position,levelType);}/**
|
||
* Return a buffered Fragment that matches the position and levelType.
|
||
* A buffered Fragment is one whose loading, parsing and appending is done (completed or "partial" meaning aborted).
|
||
* If not found any Fragment, return null
|
||
*/getBufferedFrag(position,levelType){const{fragments}=this;const keys=Object.keys(fragments);for(let i=keys.length;i--;){const fragmentEntity=fragments[keys[i]];if((fragmentEntity==null?void 0:fragmentEntity.body.type)===levelType&&fragmentEntity.buffered){const frag=fragmentEntity.body;if(frag.start<=position&&position<=frag.end){return frag;}}}return null;}/**
|
||
* Partial fragments effected by coded frame eviction will be removed
|
||
* The browser will unload parts of the buffer to free up memory for new buffer data
|
||
* Fragments will need to be reloaded when the buffer is freed up, removing partial fragments will allow them to reload(since there might be parts that are still playable)
|
||
*/detectEvictedFragments(elementaryStream,timeRange,playlistType,appendedPart){if(this.timeRanges){this.timeRanges[elementaryStream]=timeRange;}// Check if any flagged fragments have been unloaded
|
||
// excluding anything newer than appendedPartSn
|
||
const appendedPartSn=(appendedPart==null?void 0:appendedPart.fragment.sn)||-1;Object.keys(this.fragments).forEach(key=>{const fragmentEntity=this.fragments[key];if(!fragmentEntity){return;}if(appendedPartSn>=fragmentEntity.body.sn){return;}if(!fragmentEntity.buffered&&!fragmentEntity.loaded){if(fragmentEntity.body.type===playlistType){this.removeFragment(fragmentEntity.body);}return;}const esData=fragmentEntity.range[elementaryStream];if(!esData){return;}esData.time.some(time=>{const isNotBuffered=!this.isTimeBuffered(time.startPTS,time.endPTS,timeRange);if(isNotBuffered){// Unregister partial fragment as it needs to load again to be reused
|
||
this.removeFragment(fragmentEntity.body);}return isNotBuffered;});});}/**
|
||
* Checks if the fragment passed in is loaded in the buffer properly
|
||
* Partially loaded fragments will be registered as a partial fragment
|
||
*/detectPartialFragments(data){const timeRanges=this.timeRanges;const{frag,part}=data;if(!timeRanges||frag.sn==='initSegment'){return;}const fragKey=getFragmentKey(frag);const fragmentEntity=this.fragments[fragKey];if(!fragmentEntity||fragmentEntity.buffered&&frag.gap){return;}const isFragHint=!frag.relurl;Object.keys(timeRanges).forEach(elementaryStream=>{const streamInfo=frag.elementaryStreams[elementaryStream];if(!streamInfo){return;}const timeRange=timeRanges[elementaryStream];const partial=isFragHint||streamInfo.partial===true;fragmentEntity.range[elementaryStream]=this.getBufferedTimes(frag,part,partial,timeRange);});fragmentEntity.loaded=null;if(Object.keys(fragmentEntity.range).length){fragmentEntity.buffered=true;const endList=fragmentEntity.body.endList=frag.endList||fragmentEntity.body.endList;if(endList){this.endListFragments[fragmentEntity.body.type]=fragmentEntity;}if(!isPartial(fragmentEntity)){// Remove older fragment parts from lookup after frag is tracked as buffered
|
||
this.removeParts(frag.sn-1,frag.type);}}else {// remove fragment if nothing was appended
|
||
this.removeFragment(fragmentEntity.body);}}removeParts(snToKeep,levelType){const activeParts=this.activePartLists[levelType];if(!activeParts){return;}this.activePartLists[levelType]=activeParts.filter(part=>part.fragment.sn>=snToKeep);}fragBuffered(frag,force){const fragKey=getFragmentKey(frag);let fragmentEntity=this.fragments[fragKey];if(!fragmentEntity&&force){fragmentEntity=this.fragments[fragKey]={body:frag,appendedPTS:null,loaded:null,buffered:false,range:Object.create(null)};if(frag.gap){this.hasGaps=true;}}if(fragmentEntity){fragmentEntity.loaded=null;fragmentEntity.buffered=true;}}getBufferedTimes(fragment,part,partial,timeRange){const buffered={time:[],partial};const startPTS=fragment.start;const endPTS=fragment.end;const minEndPTS=fragment.minEndPTS||endPTS;const maxStartPTS=fragment.maxStartPTS||startPTS;for(let i=0;i<timeRange.length;i++){const startTime=timeRange.start(i)-this.bufferPadding;const endTime=timeRange.end(i)+this.bufferPadding;if(maxStartPTS>=startTime&&minEndPTS<=endTime){// Fragment is entirely contained in buffer
|
||
// No need to check the other timeRange times since it's completely playable
|
||
buffered.time.push({startPTS:Math.max(startPTS,timeRange.start(i)),endPTS:Math.min(endPTS,timeRange.end(i))});break;}else if(startPTS<endTime&&endPTS>startTime){const start=Math.max(startPTS,timeRange.start(i));const end=Math.min(endPTS,timeRange.end(i));if(end>start){buffered.partial=true;// Check for intersection with buffer
|
||
// Get playable sections of the fragment
|
||
buffered.time.push({startPTS:start,endPTS:end});}}else if(endPTS<=startTime){// No need to check the rest of the timeRange as it is in order
|
||
break;}}return buffered;}/**
|
||
* Gets the partial fragment for a certain time
|
||
*/getPartialFragment(time){let bestFragment=null;let timePadding;let startTime;let endTime;let bestOverlap=0;const{bufferPadding,fragments}=this;Object.keys(fragments).forEach(key=>{const fragmentEntity=fragments[key];if(!fragmentEntity){return;}if(isPartial(fragmentEntity)){startTime=fragmentEntity.body.start-bufferPadding;endTime=fragmentEntity.body.end+bufferPadding;if(time>=startTime&&time<=endTime){// Use the fragment that has the most padding from start and end time
|
||
timePadding=Math.min(time-startTime,endTime-time);if(bestOverlap<=timePadding){bestFragment=fragmentEntity.body;bestOverlap=timePadding;}}}});return bestFragment;}isEndListAppended(type){const lastFragmentEntity=this.endListFragments[type];return lastFragmentEntity!==undefined&&(lastFragmentEntity.buffered||isPartial(lastFragmentEntity));}getState(fragment){const fragKey=getFragmentKey(fragment);const fragmentEntity=this.fragments[fragKey];if(fragmentEntity){if(!fragmentEntity.buffered){return FragmentState.APPENDING;}else if(isPartial(fragmentEntity)){return FragmentState.PARTIAL;}else {return FragmentState.OK;}}return FragmentState.NOT_LOADED;}isTimeBuffered(startPTS,endPTS,timeRange){let startTime;let endTime;for(let i=0;i<timeRange.length;i++){startTime=timeRange.start(i)-this.bufferPadding;endTime=timeRange.end(i)+this.bufferPadding;if(startPTS>=startTime&&endPTS<=endTime){return true;}if(endPTS<=startTime){// No need to check the rest of the timeRange as it is in order
|
||
return false;}}return false;}onFragLoaded(event,data){const{frag,part}=data;// don't track initsegment (for which sn is not a number)
|
||
// don't track frags used for bitrateTest, they're irrelevant.
|
||
if(frag.sn==='initSegment'||frag.bitrateTest){return;}// Fragment entity `loaded` FragLoadedData is null when loading parts
|
||
const loaded=part?null:data;const fragKey=getFragmentKey(frag);this.fragments[fragKey]={body:frag,appendedPTS:null,loaded,buffered:false,range:Object.create(null)};}onBufferAppended(event,data){const{frag,part,timeRanges}=data;if(frag.sn==='initSegment'){return;}const playlistType=frag.type;if(part){let activeParts=this.activePartLists[playlistType];if(!activeParts){this.activePartLists[playlistType]=activeParts=[];}activeParts.push(part);}// Store the latest timeRanges loaded in the buffer
|
||
this.timeRanges=timeRanges;Object.keys(timeRanges).forEach(elementaryStream=>{const timeRange=timeRanges[elementaryStream];this.detectEvictedFragments(elementaryStream,timeRange,playlistType,part);});}onFragBuffered(event,data){this.detectPartialFragments(data);}hasFragment(fragment){const fragKey=getFragmentKey(fragment);return !!this.fragments[fragKey];}hasParts(type){var _this$activePartLists;return !!((_this$activePartLists=this.activePartLists[type])!=null&&_this$activePartLists.length);}removeFragmentsInRange(start,end,playlistType,withGapOnly,unbufferedOnly){if(withGapOnly&&!this.hasGaps){return;}Object.keys(this.fragments).forEach(key=>{const fragmentEntity=this.fragments[key];if(!fragmentEntity){return;}const frag=fragmentEntity.body;if(frag.type!==playlistType||withGapOnly&&!frag.gap){return;}if(frag.start<end&&frag.end>start&&(fragmentEntity.buffered||unbufferedOnly)){this.removeFragment(frag);}});}removeFragment(fragment){const fragKey=getFragmentKey(fragment);fragment.stats.loaded=0;fragment.clearElementaryStreamInfo();const activeParts=this.activePartLists[fragment.type];if(activeParts){const snToRemove=fragment.sn;this.activePartLists[fragment.type]=activeParts.filter(part=>part.fragment.sn!==snToRemove);}delete this.fragments[fragKey];if(fragment.endList){delete this.endListFragments[fragment.type];}}removeAllFragments(){this.fragments=Object.create(null);this.endListFragments=Object.create(null);this.activePartLists=Object.create(null);this.hasGaps=false;}}function isPartial(fragmentEntity){var _fragmentEntity$range,_fragmentEntity$range2,_fragmentEntity$range3;return fragmentEntity.buffered&&(fragmentEntity.body.gap||((_fragmentEntity$range=fragmentEntity.range.video)==null?void 0:_fragmentEntity$range.partial)||((_fragmentEntity$range2=fragmentEntity.range.audio)==null?void 0:_fragmentEntity$range2.partial)||((_fragmentEntity$range3=fragmentEntity.range.audiovideo)==null?void 0:_fragmentEntity$range3.partial));}function getFragmentKey(fragment){return `${fragment.type}_${fragment.level}_${fragment.sn}`;}/**
|
||
* Provides methods dealing with buffer length retrieval for example.
|
||
*
|
||
* In general, a helper around HTML5 MediaElement TimeRanges gathered from `buffered` property.
|
||
*
|
||
* Also @see https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/buffered
|
||
*/const noopBuffered={length:0,start:()=>0,end:()=>0};class BufferHelper{/**
|
||
* Return true if `media`'s buffered include `position`
|
||
*/static isBuffered(media,position){try{if(media){const buffered=BufferHelper.getBuffered(media);for(let i=0;i<buffered.length;i++){if(position>=buffered.start(i)&&position<=buffered.end(i)){return true;}}}}catch(error){// this is to catch
|
||
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
|
||
// This SourceBuffer has been removed from the parent media source
|
||
}return false;}static bufferInfo(media,pos,maxHoleDuration){try{if(media){const vbuffered=BufferHelper.getBuffered(media);const buffered=[];let i;for(i=0;i<vbuffered.length;i++){buffered.push({start:vbuffered.start(i),end:vbuffered.end(i)});}return this.bufferedInfo(buffered,pos,maxHoleDuration);}}catch(error){// this is to catch
|
||
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
|
||
// This SourceBuffer has been removed from the parent media source
|
||
}return {len:0,start:pos,end:pos,nextStart:undefined};}static bufferedInfo(buffered,pos,maxHoleDuration){pos=Math.max(0,pos);// sort on buffer.start/smaller end (IE does not always return sorted buffered range)
|
||
buffered.sort(function(a,b){const diff=a.start-b.start;if(diff){return diff;}else {return b.end-a.end;}});let buffered2=[];if(maxHoleDuration){// there might be some small holes between buffer time range
|
||
// consider that holes smaller than maxHoleDuration are irrelevant and build another
|
||
// buffer time range representations that discards those holes
|
||
for(let i=0;i<buffered.length;i++){const buf2len=buffered2.length;if(buf2len){const buf2end=buffered2[buf2len-1].end;// if small hole (value between 0 or maxHoleDuration ) or overlapping (negative)
|
||
if(buffered[i].start-buf2end<maxHoleDuration){// merge overlapping time ranges
|
||
// update lastRange.end only if smaller than item.end
|
||
// e.g. [ 1, 15] with [ 2,8] => [ 1,15] (no need to modify lastRange.end)
|
||
// whereas [ 1, 8] with [ 2,15] => [ 1,15] ( lastRange should switch from [1,8] to [1,15])
|
||
if(buffered[i].end>buf2end){buffered2[buf2len-1].end=buffered[i].end;}}else {// big hole
|
||
buffered2.push(buffered[i]);}}else {// first value
|
||
buffered2.push(buffered[i]);}}}else {buffered2=buffered;}let bufferLen=0;// bufferStartNext can possibly be undefined based on the conditional logic below
|
||
let bufferStartNext;// bufferStart and bufferEnd are buffer boundaries around current video position
|
||
let bufferStart=pos;let bufferEnd=pos;for(let i=0;i<buffered2.length;i++){const start=buffered2[i].start;const end=buffered2[i].end;// logger.log('buf start/end:' + buffered.start(i) + '/' + buffered.end(i));
|
||
if(pos+maxHoleDuration>=start&&pos<end){// play position is inside this buffer TimeRange, retrieve end of buffer position and buffer length
|
||
bufferStart=start;bufferEnd=end;bufferLen=bufferEnd-pos;}else if(pos+maxHoleDuration<start){bufferStartNext=start;break;}}return {len:bufferLen,start:bufferStart||0,end:bufferEnd||0,nextStart:bufferStartNext};}/**
|
||
* Safe method to get buffered property.
|
||
* SourceBuffer.buffered may throw if SourceBuffer is removed from it's MediaSource
|
||
*/static getBuffered(media){try{return media.buffered;}catch(e){logger.log('failed to get media.buffered',e);return noopBuffered;}}}class ChunkMetadata{constructor(level,sn,id,size=0,part=-1,partial=false){this.level=void 0;this.sn=void 0;this.part=void 0;this.id=void 0;this.size=void 0;this.partial=void 0;this.transmuxing=getNewPerformanceTiming();this.buffering={audio:getNewPerformanceTiming(),video:getNewPerformanceTiming(),audiovideo:getNewPerformanceTiming()};this.level=level;this.sn=sn;this.id=id;this.size=size;this.part=part;this.partial=partial;}}function getNewPerformanceTiming(){return {start:0,executeStart:0,executeEnd:0,end:0};}function findFirstFragWithCC(fragments,cc){for(let i=0,len=fragments.length;i<len;i++){var _fragments$i;if(((_fragments$i=fragments[i])==null?void 0:_fragments$i.cc)===cc){return fragments[i];}}return null;}function shouldAlignOnDiscontinuities(lastFrag,switchDetails,details){if(switchDetails){if(details.endCC>details.startCC||lastFrag&&lastFrag.cc<details.startCC){return true;}}return false;}// Find the first frag in the previous level which matches the CC of the first frag of the new level
|
||
function findDiscontinuousReferenceFrag(prevDetails,curDetails){const prevFrags=prevDetails.fragments;const curFrags=curDetails.fragments;if(!curFrags.length||!prevFrags.length){logger.log('No fragments to align');return;}const prevStartFrag=findFirstFragWithCC(prevFrags,curFrags[0].cc);if(!prevStartFrag||prevStartFrag&&!prevStartFrag.startPTS){logger.log('No frag in previous level to align on');return;}return prevStartFrag;}function adjustFragmentStart(frag,sliding){if(frag){const start=frag.start+sliding;frag.start=frag.startPTS=start;frag.endPTS=start+frag.duration;}}function adjustSlidingStart(sliding,details){// Update segments
|
||
const fragments=details.fragments;for(let i=0,len=fragments.length;i<len;i++){adjustFragmentStart(fragments[i],sliding);}// Update LL-HLS parts at the end of the playlist
|
||
if(details.fragmentHint){adjustFragmentStart(details.fragmentHint,sliding);}details.alignedSliding=true;}/**
|
||
* Using the parameters of the last level, this function computes PTS' of the new fragments so that they form a
|
||
* contiguous stream with the last fragments.
|
||
* The PTS of a fragment lets Hls.js know where it fits into a stream - by knowing every PTS, we know which fragment to
|
||
* download at any given time. PTS is normally computed when the fragment is demuxed, so taking this step saves us time
|
||
* and an extra download.
|
||
* @param lastFrag
|
||
* @param lastLevel
|
||
* @param details
|
||
*/function alignStream(lastFrag,switchDetails,details){if(!switchDetails){return;}alignDiscontinuities(lastFrag,details,switchDetails);if(!details.alignedSliding&&switchDetails){// If the PTS wasn't figured out via discontinuity sequence that means there was no CC increase within the level.
|
||
// Aligning via Program Date Time should therefore be reliable, since PDT should be the same within the same
|
||
// discontinuity sequence.
|
||
alignMediaPlaylistByPDT(details,switchDetails);}if(!details.alignedSliding&&switchDetails&&!details.skippedSegments){// Try to align on sn so that we pick a better start fragment.
|
||
// Do not perform this on playlists with delta updates as this is only to align levels on switch
|
||
// and adjustSliding only adjusts fragments after skippedSegments.
|
||
adjustSliding(switchDetails,details);}}/**
|
||
* Computes the PTS if a new level's fragments using the PTS of a fragment in the last level which shares the same
|
||
* discontinuity sequence.
|
||
* @param lastFrag - The last Fragment which shares the same discontinuity sequence
|
||
* @param lastLevel - The details of the last loaded level
|
||
* @param details - The details of the new level
|
||
*/function alignDiscontinuities(lastFrag,details,switchDetails){if(shouldAlignOnDiscontinuities(lastFrag,switchDetails,details)){const referenceFrag=findDiscontinuousReferenceFrag(switchDetails,details);if(referenceFrag&&isFiniteNumber(referenceFrag.start)){logger.log(`Adjusting PTS using last level due to CC increase within current level ${details.url}`);adjustSlidingStart(referenceFrag.start,details);}}}/**
|
||
* Ensures appropriate time-alignment between renditions based on PDT.
|
||
* This function assumes the timelines represented in `refDetails` are accurate, including the PDTs
|
||
* for the last discontinuity sequence number shared by both playlists when present,
|
||
* and uses the "wallclock"/PDT timeline as a cross-reference to `details`, adjusting the presentation
|
||
* times/timelines of `details` accordingly.
|
||
* Given the asynchronous nature of fetches and initial loads of live `main` and audio/subtitle tracks,
|
||
* the primary purpose of this function is to ensure the "local timelines" of audio/subtitle tracks
|
||
* are aligned to the main/video timeline, using PDT as the cross-reference/"anchor" that should
|
||
* be consistent across playlists, per the HLS spec.
|
||
* @param details - The details of the rendition you'd like to time-align (e.g. an audio rendition).
|
||
* @param refDetails - The details of the reference rendition with start and PDT times for alignment.
|
||
*/function alignMediaPlaylistByPDT(details,refDetails){if(!details.hasProgramDateTime||!refDetails.hasProgramDateTime){return;}const fragments=details.fragments;const refFragments=refDetails.fragments;if(!fragments.length||!refFragments.length){return;}// Calculate a delta to apply to all fragments according to the delta in PDT times and start times
|
||
// of a fragment in the reference details, and a fragment in the target details of the same discontinuity.
|
||
// If a fragment of the same discontinuity was not found use the middle fragment of both.
|
||
let refFrag;let frag;const targetCC=Math.min(refDetails.endCC,details.endCC);if(refDetails.startCC<targetCC&&details.startCC<targetCC){refFrag=findFirstFragWithCC(refFragments,targetCC);frag=findFirstFragWithCC(fragments,targetCC);}if(!refFrag||!frag){refFrag=refFragments[Math.floor(refFragments.length/2)];frag=findFirstFragWithCC(fragments,refFrag.cc)||fragments[Math.floor(fragments.length/2)];}const refPDT=refFrag.programDateTime;const targetPDT=frag.programDateTime;if(!refPDT||!targetPDT){return;}const delta=(targetPDT-refPDT)/1000-(frag.start-refFrag.start);adjustSlidingStart(delta,details);}const MIN_CHUNK_SIZE=Math.pow(2,17);// 128kb
|
||
class FragmentLoader{constructor(config){this.config=void 0;this.loader=null;this.partLoadTimeout=-1;this.config=config;}destroy(){if(this.loader){this.loader.destroy();this.loader=null;}}abort(){if(this.loader){// Abort the loader for current fragment. Only one may load at any given time
|
||
this.loader.abort();}}load(frag,onProgress){const url=frag.url;if(!url){return Promise.reject(new LoadError({type:ErrorTypes.NETWORK_ERROR,details:ErrorDetails.FRAG_LOAD_ERROR,fatal:false,frag,error:new Error(`Fragment does not have a ${url?'part list':'url'}`),networkDetails:null}));}this.abort();const config=this.config;const FragmentILoader=config.fLoader;const DefaultILoader=config.loader;return new Promise((resolve,reject)=>{if(this.loader){this.loader.destroy();}if(frag.gap){if(frag.tagList.some(tags=>tags[0]==='GAP')){reject(createGapLoadError(frag));return;}else {// Reset temporary treatment as GAP tag
|
||
frag.gap=false;}}const loader=this.loader=frag.loader=FragmentILoader?new FragmentILoader(config):new DefaultILoader(config);const loaderContext=createLoaderContext(frag);const loadPolicy=getLoaderConfigWithoutReties(config.fragLoadPolicy.default);const loaderConfig={loadPolicy,timeout:loadPolicy.maxLoadTimeMs,maxRetry:0,retryDelay:0,maxRetryDelay:0,highWaterMark:frag.sn==='initSegment'?Infinity:MIN_CHUNK_SIZE};// Assign frag stats to the loader's stats reference
|
||
frag.stats=loader.stats;loader.load(loaderContext,loaderConfig,{onSuccess:(response,stats,context,networkDetails)=>{this.resetLoader(frag,loader);let payload=response.data;if(context.resetIV&&frag.decryptdata){frag.decryptdata.iv=new Uint8Array(payload.slice(0,16));payload=payload.slice(16);}resolve({frag,part:null,payload,networkDetails});},onError:(response,context,networkDetails,stats)=>{this.resetLoader(frag,loader);reject(new LoadError({type:ErrorTypes.NETWORK_ERROR,details:ErrorDetails.FRAG_LOAD_ERROR,fatal:false,frag,response:_objectSpread2({url,data:undefined},response),error:new Error(`HTTP Error ${response.code} ${response.text}`),networkDetails,stats}));},onAbort:(stats,context,networkDetails)=>{this.resetLoader(frag,loader);reject(new LoadError({type:ErrorTypes.NETWORK_ERROR,details:ErrorDetails.INTERNAL_ABORTED,fatal:false,frag,error:new Error('Aborted'),networkDetails,stats}));},onTimeout:(stats,context,networkDetails)=>{this.resetLoader(frag,loader);reject(new LoadError({type:ErrorTypes.NETWORK_ERROR,details:ErrorDetails.FRAG_LOAD_TIMEOUT,fatal:false,frag,error:new Error(`Timeout after ${loaderConfig.timeout}ms`),networkDetails,stats}));},onProgress:(stats,context,data,networkDetails)=>{if(onProgress){onProgress({frag,part:null,payload:data,networkDetails});}}});});}loadPart(frag,part,onProgress){this.abort();const config=this.config;const FragmentILoader=config.fLoader;const DefaultILoader=config.loader;return new Promise((resolve,reject)=>{if(this.loader){this.loader.destroy();}if(frag.gap||part.gap){reject(createGapLoadError(frag,part));return;}const loader=this.loader=frag.loader=FragmentILoader?new FragmentILoader(config):new DefaultILoader(config);const loaderContext=createLoaderContext(frag,part);// Should we define another load policy for parts?
|
||
const loadPolicy=getLoaderConfigWithoutReties(config.fragLoadPolicy.default);const loaderConfig={loadPolicy,timeout:loadPolicy.maxLoadTimeMs,maxRetry:0,retryDelay:0,maxRetryDelay:0,highWaterMark:MIN_CHUNK_SIZE};// Assign part stats to the loader's stats reference
|
||
part.stats=loader.stats;loader.load(loaderContext,loaderConfig,{onSuccess:(response,stats,context,networkDetails)=>{this.resetLoader(frag,loader);this.updateStatsFromPart(frag,part);const partLoadedData={frag,part,payload:response.data,networkDetails};onProgress(partLoadedData);resolve(partLoadedData);},onError:(response,context,networkDetails,stats)=>{this.resetLoader(frag,loader);reject(new LoadError({type:ErrorTypes.NETWORK_ERROR,details:ErrorDetails.FRAG_LOAD_ERROR,fatal:false,frag,part,response:_objectSpread2({url:loaderContext.url,data:undefined},response),error:new Error(`HTTP Error ${response.code} ${response.text}`),networkDetails,stats}));},onAbort:(stats,context,networkDetails)=>{frag.stats.aborted=part.stats.aborted;this.resetLoader(frag,loader);reject(new LoadError({type:ErrorTypes.NETWORK_ERROR,details:ErrorDetails.INTERNAL_ABORTED,fatal:false,frag,part,error:new Error('Aborted'),networkDetails,stats}));},onTimeout:(stats,context,networkDetails)=>{this.resetLoader(frag,loader);reject(new LoadError({type:ErrorTypes.NETWORK_ERROR,details:ErrorDetails.FRAG_LOAD_TIMEOUT,fatal:false,frag,part,error:new Error(`Timeout after ${loaderConfig.timeout}ms`),networkDetails,stats}));}});});}updateStatsFromPart(frag,part){const fragStats=frag.stats;const partStats=part.stats;const partTotal=partStats.total;fragStats.loaded+=partStats.loaded;if(partTotal){const estTotalParts=Math.round(frag.duration/part.duration);const estLoadedParts=Math.min(Math.round(fragStats.loaded/partTotal),estTotalParts);const estRemainingParts=estTotalParts-estLoadedParts;const estRemainingBytes=estRemainingParts*Math.round(fragStats.loaded/estLoadedParts);fragStats.total=fragStats.loaded+estRemainingBytes;}else {fragStats.total=Math.max(fragStats.loaded,fragStats.total);}const fragLoading=fragStats.loading;const partLoading=partStats.loading;if(fragLoading.start){// add to fragment loader latency
|
||
fragLoading.first+=partLoading.first-partLoading.start;}else {fragLoading.start=partLoading.start;fragLoading.first=partLoading.first;}fragLoading.end=partLoading.end;}resetLoader(frag,loader){frag.loader=null;if(this.loader===loader){self.clearTimeout(this.partLoadTimeout);this.loader=null;}loader.destroy();}}function createLoaderContext(frag,part=null){const segment=part||frag;const loaderContext={frag,part,responseType:'arraybuffer',url:segment.url,headers:{},rangeStart:0,rangeEnd:0};const start=segment.byteRangeStartOffset;const end=segment.byteRangeEndOffset;if(isFiniteNumber(start)&&isFiniteNumber(end)){var _frag$decryptdata;let byteRangeStart=start;let byteRangeEnd=end;if(frag.sn==='initSegment'&&((_frag$decryptdata=frag.decryptdata)==null?void 0:_frag$decryptdata.method)==='AES-128'){// MAP segment encrypted with method 'AES-128', when served with HTTP Range,
|
||
// has the unencrypted size specified in the range.
|
||
// Ref: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6
|
||
const fragmentLen=end-start;if(fragmentLen%16){byteRangeEnd=end+(16-fragmentLen%16);}if(start!==0){loaderContext.resetIV=true;byteRangeStart=start-16;}}loaderContext.rangeStart=byteRangeStart;loaderContext.rangeEnd=byteRangeEnd;}return loaderContext;}function createGapLoadError(frag,part){const error=new Error(`GAP ${frag.gap?'tag':'attribute'} found`);const errorData={type:ErrorTypes.MEDIA_ERROR,details:ErrorDetails.FRAG_GAP,fatal:false,frag,error,networkDetails:null};if(part){errorData.part=part;}(part?part:frag).stats.aborted=true;return new LoadError(errorData);}class LoadError extends Error{constructor(data){super(data.error.message);this.data=void 0;this.data=data;}}class AESCrypto{constructor(subtle,iv){this.subtle=void 0;this.aesIV=void 0;this.subtle=subtle;this.aesIV=iv;}decrypt(data,key){return this.subtle.decrypt({name:'AES-CBC',iv:this.aesIV},key,data);}}class FastAESKey{constructor(subtle,key){this.subtle=void 0;this.key=void 0;this.subtle=subtle;this.key=key;}expandKey(){return this.subtle.importKey('raw',this.key,{name:'AES-CBC'},false,['encrypt','decrypt']);}}// PKCS7
|
||
function removePadding(array){const outputBytes=array.byteLength;const paddingBytes=outputBytes&&new DataView(array.buffer).getUint8(outputBytes-1);if(paddingBytes){return sliceUint8(array,0,outputBytes-paddingBytes);}return array;}class AESDecryptor{constructor(){this.rcon=[0x0,0x1,0x2,0x4,0x8,0x10,0x20,0x40,0x80,0x1b,0x36];this.subMix=[new Uint32Array(256),new Uint32Array(256),new Uint32Array(256),new Uint32Array(256)];this.invSubMix=[new Uint32Array(256),new Uint32Array(256),new Uint32Array(256),new Uint32Array(256)];this.sBox=new Uint32Array(256);this.invSBox=new Uint32Array(256);this.key=new Uint32Array(0);this.ksRows=0;this.keySize=0;this.keySchedule=void 0;this.invKeySchedule=void 0;this.initTable();}// Using view.getUint32() also swaps the byte order.
|
||
uint8ArrayToUint32Array_(arrayBuffer){const view=new DataView(arrayBuffer);const newArray=new Uint32Array(4);for(let i=0;i<4;i++){newArray[i]=view.getUint32(i*4);}return newArray;}initTable(){const sBox=this.sBox;const invSBox=this.invSBox;const subMix=this.subMix;const subMix0=subMix[0];const subMix1=subMix[1];const subMix2=subMix[2];const subMix3=subMix[3];const invSubMix=this.invSubMix;const invSubMix0=invSubMix[0];const invSubMix1=invSubMix[1];const invSubMix2=invSubMix[2];const invSubMix3=invSubMix[3];const d=new Uint32Array(256);let x=0;let xi=0;let i=0;for(i=0;i<256;i++){if(i<128){d[i]=i<<1;}else {d[i]=i<<1^0x11b;}}for(i=0;i<256;i++){let sx=xi^xi<<1^xi<<2^xi<<3^xi<<4;sx=sx>>>8^sx&0xff^0x63;sBox[x]=sx;invSBox[sx]=x;// Compute multiplication
|
||
const x2=d[x];const x4=d[x2];const x8=d[x4];// Compute sub/invSub bytes, mix columns tables
|
||
let t=d[sx]*0x101^sx*0x1010100;subMix0[x]=t<<24|t>>>8;subMix1[x]=t<<16|t>>>16;subMix2[x]=t<<8|t>>>24;subMix3[x]=t;// Compute inv sub bytes, inv mix columns tables
|
||
t=x8*0x1010101^x4*0x10001^x2*0x101^x*0x1010100;invSubMix0[sx]=t<<24|t>>>8;invSubMix1[sx]=t<<16|t>>>16;invSubMix2[sx]=t<<8|t>>>24;invSubMix3[sx]=t;// Compute next counter
|
||
if(!x){x=xi=1;}else {x=x2^d[d[d[x8^x2]]];xi^=d[d[xi]];}}}expandKey(keyBuffer){// convert keyBuffer to Uint32Array
|
||
const key=this.uint8ArrayToUint32Array_(keyBuffer);let sameKey=true;let offset=0;while(offset<key.length&&sameKey){sameKey=key[offset]===this.key[offset];offset++;}if(sameKey){return;}this.key=key;const keySize=this.keySize=key.length;if(keySize!==4&&keySize!==6&&keySize!==8){throw new Error('Invalid aes key size='+keySize);}const ksRows=this.ksRows=(keySize+6+1)*4;let ksRow;let invKsRow;const keySchedule=this.keySchedule=new Uint32Array(ksRows);const invKeySchedule=this.invKeySchedule=new Uint32Array(ksRows);const sbox=this.sBox;const rcon=this.rcon;const invSubMix=this.invSubMix;const invSubMix0=invSubMix[0];const invSubMix1=invSubMix[1];const invSubMix2=invSubMix[2];const invSubMix3=invSubMix[3];let prev;let t;for(ksRow=0;ksRow<ksRows;ksRow++){if(ksRow<keySize){prev=keySchedule[ksRow]=key[ksRow];continue;}t=prev;if(ksRow%keySize===0){// Rot word
|
||
t=t<<8|t>>>24;// Sub word
|
||
t=sbox[t>>>24]<<24|sbox[t>>>16&0xff]<<16|sbox[t>>>8&0xff]<<8|sbox[t&0xff];// Mix Rcon
|
||
t^=rcon[ksRow/keySize|0]<<24;}else if(keySize>6&&ksRow%keySize===4){// Sub word
|
||
t=sbox[t>>>24]<<24|sbox[t>>>16&0xff]<<16|sbox[t>>>8&0xff]<<8|sbox[t&0xff];}keySchedule[ksRow]=prev=(keySchedule[ksRow-keySize]^t)>>>0;}for(invKsRow=0;invKsRow<ksRows;invKsRow++){ksRow=ksRows-invKsRow;if(invKsRow&3){t=keySchedule[ksRow];}else {t=keySchedule[ksRow-4];}if(invKsRow<4||ksRow<=4){invKeySchedule[invKsRow]=t;}else {invKeySchedule[invKsRow]=invSubMix0[sbox[t>>>24]]^invSubMix1[sbox[t>>>16&0xff]]^invSubMix2[sbox[t>>>8&0xff]]^invSubMix3[sbox[t&0xff]];}invKeySchedule[invKsRow]=invKeySchedule[invKsRow]>>>0;}}// Adding this as a method greatly improves performance.
|
||
networkToHostOrderSwap(word){return word<<24|(word&0xff00)<<8|(word&0xff0000)>>8|word>>>24;}decrypt(inputArrayBuffer,offset,aesIV){const nRounds=this.keySize+6;const invKeySchedule=this.invKeySchedule;const invSBOX=this.invSBox;const invSubMix=this.invSubMix;const invSubMix0=invSubMix[0];const invSubMix1=invSubMix[1];const invSubMix2=invSubMix[2];const invSubMix3=invSubMix[3];const initVector=this.uint8ArrayToUint32Array_(aesIV);let initVector0=initVector[0];let initVector1=initVector[1];let initVector2=initVector[2];let initVector3=initVector[3];const inputInt32=new Int32Array(inputArrayBuffer);const outputInt32=new Int32Array(inputInt32.length);let t0,t1,t2,t3;let s0,s1,s2,s3;let inputWords0,inputWords1,inputWords2,inputWords3;let ksRow,i;const swapWord=this.networkToHostOrderSwap;while(offset<inputInt32.length){inputWords0=swapWord(inputInt32[offset]);inputWords1=swapWord(inputInt32[offset+1]);inputWords2=swapWord(inputInt32[offset+2]);inputWords3=swapWord(inputInt32[offset+3]);s0=inputWords0^invKeySchedule[0];s1=inputWords3^invKeySchedule[1];s2=inputWords2^invKeySchedule[2];s3=inputWords1^invKeySchedule[3];ksRow=4;// Iterate through the rounds of decryption
|
||
for(i=1;i<nRounds;i++){t0=invSubMix0[s0>>>24]^invSubMix1[s1>>16&0xff]^invSubMix2[s2>>8&0xff]^invSubMix3[s3&0xff]^invKeySchedule[ksRow];t1=invSubMix0[s1>>>24]^invSubMix1[s2>>16&0xff]^invSubMix2[s3>>8&0xff]^invSubMix3[s0&0xff]^invKeySchedule[ksRow+1];t2=invSubMix0[s2>>>24]^invSubMix1[s3>>16&0xff]^invSubMix2[s0>>8&0xff]^invSubMix3[s1&0xff]^invKeySchedule[ksRow+2];t3=invSubMix0[s3>>>24]^invSubMix1[s0>>16&0xff]^invSubMix2[s1>>8&0xff]^invSubMix3[s2&0xff]^invKeySchedule[ksRow+3];// Update state
|
||
s0=t0;s1=t1;s2=t2;s3=t3;ksRow=ksRow+4;}// Shift rows, sub bytes, add round key
|
||
t0=invSBOX[s0>>>24]<<24^invSBOX[s1>>16&0xff]<<16^invSBOX[s2>>8&0xff]<<8^invSBOX[s3&0xff]^invKeySchedule[ksRow];t1=invSBOX[s1>>>24]<<24^invSBOX[s2>>16&0xff]<<16^invSBOX[s3>>8&0xff]<<8^invSBOX[s0&0xff]^invKeySchedule[ksRow+1];t2=invSBOX[s2>>>24]<<24^invSBOX[s3>>16&0xff]<<16^invSBOX[s0>>8&0xff]<<8^invSBOX[s1&0xff]^invKeySchedule[ksRow+2];t3=invSBOX[s3>>>24]<<24^invSBOX[s0>>16&0xff]<<16^invSBOX[s1>>8&0xff]<<8^invSBOX[s2&0xff]^invKeySchedule[ksRow+3];// Write
|
||
outputInt32[offset]=swapWord(t0^initVector0);outputInt32[offset+1]=swapWord(t3^initVector1);outputInt32[offset+2]=swapWord(t2^initVector2);outputInt32[offset+3]=swapWord(t1^initVector3);// reset initVector to last 4 unsigned int
|
||
initVector0=inputWords0;initVector1=inputWords1;initVector2=inputWords2;initVector3=inputWords3;offset=offset+4;}return outputInt32.buffer;}}const CHUNK_SIZE=16;// 16 bytes, 128 bits
|
||
class Decrypter{constructor(config,{removePKCS7Padding=true}={}){this.logEnabled=true;this.removePKCS7Padding=void 0;this.subtle=null;this.softwareDecrypter=null;this.key=null;this.fastAesKey=null;this.remainderData=null;this.currentIV=null;this.currentResult=null;this.useSoftware=void 0;this.useSoftware=config.enableSoftwareAES;this.removePKCS7Padding=removePKCS7Padding;// built in decryptor expects PKCS7 padding
|
||
if(removePKCS7Padding){try{const browserCrypto=self.crypto;if(browserCrypto){this.subtle=browserCrypto.subtle||browserCrypto.webkitSubtle;}}catch(e){/* no-op */}}this.useSoftware=!this.subtle;}destroy(){this.subtle=null;this.softwareDecrypter=null;this.key=null;this.fastAesKey=null;this.remainderData=null;this.currentIV=null;this.currentResult=null;}isSync(){return this.useSoftware;}flush(){const{currentResult,remainderData}=this;if(!currentResult||remainderData){this.reset();return null;}const data=new Uint8Array(currentResult);this.reset();if(this.removePKCS7Padding){return removePadding(data);}return data;}reset(){this.currentResult=null;this.currentIV=null;this.remainderData=null;if(this.softwareDecrypter){this.softwareDecrypter=null;}}decrypt(data,key,iv){if(this.useSoftware){return new Promise((resolve,reject)=>{this.softwareDecrypt(new Uint8Array(data),key,iv);const decryptResult=this.flush();if(decryptResult){resolve(decryptResult.buffer);}else {reject(new Error('[softwareDecrypt] Failed to decrypt data'));}});}return this.webCryptoDecrypt(new Uint8Array(data),key,iv);}// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
|
||
// data is handled in the flush() call
|
||
softwareDecrypt(data,key,iv){const{currentIV,currentResult,remainderData}=this;this.logOnce('JS AES decrypt');// The output is staggered during progressive parsing - the current result is cached, and emitted on the next call
|
||
// This is done in order to strip PKCS7 padding, which is found at the end of each segment. We only know we've reached
|
||
// the end on flush(), but by that time we have already received all bytes for the segment.
|
||
// Progressive decryption does not work with WebCrypto
|
||
if(remainderData){data=appendUint8Array(remainderData,data);this.remainderData=null;}// Byte length must be a multiple of 16 (AES-128 = 128 bit blocks = 16 bytes)
|
||
const currentChunk=this.getValidChunk(data);if(!currentChunk.length){return null;}if(currentIV){iv=currentIV;}let softwareDecrypter=this.softwareDecrypter;if(!softwareDecrypter){softwareDecrypter=this.softwareDecrypter=new AESDecryptor();}softwareDecrypter.expandKey(key);const result=currentResult;this.currentResult=softwareDecrypter.decrypt(currentChunk.buffer,0,iv);this.currentIV=sliceUint8(currentChunk,-16).buffer;if(!result){return null;}return result;}webCryptoDecrypt(data,key,iv){if(this.key!==key||!this.fastAesKey){if(!this.subtle){return Promise.resolve(this.onWebCryptoError(data,key,iv));}this.key=key;this.fastAesKey=new FastAESKey(this.subtle,key);}return this.fastAesKey.expandKey().then(aesKey=>{// decrypt using web crypto
|
||
if(!this.subtle){return Promise.reject(new Error('web crypto not initialized'));}this.logOnce('WebCrypto AES decrypt');const crypto=new AESCrypto(this.subtle,new Uint8Array(iv));return crypto.decrypt(data.buffer,aesKey);}).catch(err=>{logger.warn(`[decrypter]: WebCrypto Error, disable WebCrypto API, ${err.name}: ${err.message}`);return this.onWebCryptoError(data,key,iv);});}onWebCryptoError(data,key,iv){this.useSoftware=true;this.logEnabled=true;this.softwareDecrypt(data,key,iv);const decryptResult=this.flush();if(decryptResult){return decryptResult.buffer;}throw new Error('WebCrypto and softwareDecrypt: failed to decrypt data');}getValidChunk(data){let currentChunk=data;const splitPoint=data.length-data.length%CHUNK_SIZE;if(splitPoint!==data.length){currentChunk=sliceUint8(data,0,splitPoint);this.remainderData=sliceUint8(data,splitPoint);}return currentChunk;}logOnce(msg){if(!this.logEnabled){return;}logger.log(`[decrypter]: ${msg}`);this.logEnabled=false;}}/**
|
||
* TimeRanges to string helper
|
||
*/const TimeRanges={toString:function(r){let log='';const len=r.length;for(let i=0;i<len;i++){log+=`[${r.start(i).toFixed(3)}-${r.end(i).toFixed(3)}]`;}return log;}};const State={STOPPED:'STOPPED',IDLE:'IDLE',KEY_LOADING:'KEY_LOADING',FRAG_LOADING:'FRAG_LOADING',FRAG_LOADING_WAITING_RETRY:'FRAG_LOADING_WAITING_RETRY',WAITING_TRACK:'WAITING_TRACK',PARSING:'PARSING',PARSED:'PARSED',ENDED:'ENDED',ERROR:'ERROR',WAITING_INIT_PTS:'WAITING_INIT_PTS',WAITING_LEVEL:'WAITING_LEVEL'};class BaseStreamController extends TaskLoop{constructor(hls,fragmentTracker,keyLoader,logPrefix,playlistType){super();this.hls=void 0;this.fragPrevious=null;this.fragCurrent=null;this.fragmentTracker=void 0;this.transmuxer=null;this._state=State.STOPPED;this.playlistType=void 0;this.media=null;this.mediaBuffer=null;this.config=void 0;this.bitrateTest=false;this.lastCurrentTime=0;this.nextLoadPosition=0;this.startPosition=0;this.startTimeOffset=null;this.loadedmetadata=false;this.retryDate=0;this.levels=null;this.fragmentLoader=void 0;this.keyLoader=void 0;this.levelLastLoaded=null;this.startFragRequested=false;this.decrypter=void 0;this.initPTS=[];this.onvseeking=null;this.onvended=null;this.logPrefix='';this.log=void 0;this.warn=void 0;this.playlistType=playlistType;this.logPrefix=logPrefix;this.log=logger.log.bind(logger,`${logPrefix}:`);this.warn=logger.warn.bind(logger,`${logPrefix}:`);this.hls=hls;this.fragmentLoader=new FragmentLoader(hls.config);this.keyLoader=keyLoader;this.fragmentTracker=fragmentTracker;this.config=hls.config;this.decrypter=new Decrypter(hls.config);hls.on(Events.MANIFEST_LOADED,this.onManifestLoaded,this);}doTick(){this.onTickEnd();}onTickEnd(){}// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||
startLoad(startPosition){}stopLoad(){this.fragmentLoader.abort();this.keyLoader.abort(this.playlistType);const frag=this.fragCurrent;if(frag!=null&&frag.loader){frag.abortRequests();this.fragmentTracker.removeFragment(frag);}this.resetTransmuxer();this.fragCurrent=null;this.fragPrevious=null;this.clearInterval();this.clearNextTick();this.state=State.STOPPED;}_streamEnded(bufferInfo,levelDetails){// If playlist is live, there is another buffered range after the current range, nothing buffered, media is detached,
|
||
// of nothing loading/loaded return false
|
||
if(levelDetails.live||bufferInfo.nextStart||!bufferInfo.end||!this.media){return false;}const partList=levelDetails.partList;// Since the last part isn't guaranteed to correspond to the last playlist segment for Low-Latency HLS,
|
||
// check instead if the last part is buffered.
|
||
if(partList!=null&&partList.length){const lastPart=partList[partList.length-1];// Checking the midpoint of the part for potential margin of error and related issues.
|
||
// NOTE: Technically I believe parts could yield content that is < the computed duration (including potential a duration of 0)
|
||
// and still be spec-compliant, so there may still be edge cases here. Likewise, there could be issues in end of stream
|
||
// part mismatches for independent audio and video playlists/segments.
|
||
const lastPartBuffered=BufferHelper.isBuffered(this.media,lastPart.start+lastPart.duration/2);return lastPartBuffered;}const playlistType=levelDetails.fragments[levelDetails.fragments.length-1].type;return this.fragmentTracker.isEndListAppended(playlistType);}getLevelDetails(){if(this.levels&&this.levelLastLoaded!==null){var _this$levelLastLoaded;return (_this$levelLastLoaded=this.levelLastLoaded)==null?void 0:_this$levelLastLoaded.details;}}onMediaAttached(event,data){const media=this.media=this.mediaBuffer=data.media;this.onvseeking=this.onMediaSeeking.bind(this);this.onvended=this.onMediaEnded.bind(this);media.addEventListener('seeking',this.onvseeking);media.addEventListener('ended',this.onvended);const config=this.config;if(this.levels&&config.autoStartLoad&&this.state===State.STOPPED){this.startLoad(config.startPosition);}}onMediaDetaching(){const media=this.media;if(media!=null&&media.ended){this.log('MSE detaching and video ended, reset startPosition');this.startPosition=this.lastCurrentTime=0;}// remove video listeners
|
||
if(media&&this.onvseeking&&this.onvended){media.removeEventListener('seeking',this.onvseeking);media.removeEventListener('ended',this.onvended);this.onvseeking=this.onvended=null;}if(this.keyLoader){this.keyLoader.detach();}this.media=this.mediaBuffer=null;this.loadedmetadata=false;this.fragmentTracker.removeAllFragments();this.stopLoad();}onMediaSeeking(){const{config,fragCurrent,media,mediaBuffer,state}=this;const currentTime=media?media.currentTime:0;const bufferInfo=BufferHelper.bufferInfo(mediaBuffer?mediaBuffer:media,currentTime,config.maxBufferHole);this.log(`media seeking to ${isFiniteNumber(currentTime)?currentTime.toFixed(3):currentTime}, state: ${state}`);if(this.state===State.ENDED){this.resetLoadingState();}else if(fragCurrent){// Seeking while frag load is in progress
|
||
const tolerance=config.maxFragLookUpTolerance;const fragStartOffset=fragCurrent.start-tolerance;const fragEndOffset=fragCurrent.start+fragCurrent.duration+tolerance;// if seeking out of buffered range or into new one
|
||
if(!bufferInfo.len||fragEndOffset<bufferInfo.start||fragStartOffset>bufferInfo.end){const pastFragment=currentTime>fragEndOffset;// if the seek position is outside the current fragment range
|
||
if(currentTime<fragStartOffset||pastFragment){if(pastFragment&&fragCurrent.loader){this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');fragCurrent.abortRequests();this.resetLoadingState();}this.fragPrevious=null;}}}if(media){// Remove gap fragments
|
||
this.fragmentTracker.removeFragmentsInRange(currentTime,Infinity,this.playlistType,true);this.lastCurrentTime=currentTime;}// in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
|
||
if(!this.loadedmetadata&&!bufferInfo.len){this.nextLoadPosition=this.startPosition=currentTime;}// Async tick to speed up processing
|
||
this.tickImmediate();}onMediaEnded(){// reset startPosition and lastCurrentTime to restart playback @ stream beginning
|
||
this.startPosition=this.lastCurrentTime=0;}onManifestLoaded(event,data){this.startTimeOffset=data.startTimeOffset;this.initPTS=[];}onHandlerDestroying(){this.hls.off(Events.MANIFEST_LOADED,this.onManifestLoaded,this);this.stopLoad();super.onHandlerDestroying();// @ts-ignore
|
||
this.hls=null;}onHandlerDestroyed(){this.state=State.STOPPED;if(this.fragmentLoader){this.fragmentLoader.destroy();}if(this.keyLoader){this.keyLoader.destroy();}if(this.decrypter){this.decrypter.destroy();}this.hls=this.log=this.warn=this.decrypter=this.keyLoader=this.fragmentLoader=this.fragmentTracker=null;super.onHandlerDestroyed();}loadFragment(frag,level,targetBufferTime){this._loadFragForPlayback(frag,level,targetBufferTime);}_loadFragForPlayback(frag,level,targetBufferTime){const progressCallback=data=>{if(this.fragContextChanged(frag)){this.warn(`Fragment ${frag.sn}${data.part?' p: '+data.part.index:''} of level ${frag.level} was dropped during download.`);this.fragmentTracker.removeFragment(frag);return;}frag.stats.chunkCount++;this._handleFragmentLoadProgress(data);};this._doFragLoad(frag,level,targetBufferTime,progressCallback).then(data=>{if(!data){// if we're here we probably needed to backtrack or are waiting for more parts
|
||
return;}const state=this.state;if(this.fragContextChanged(frag)){if(state===State.FRAG_LOADING||!this.fragCurrent&&state===State.PARSING){this.fragmentTracker.removeFragment(frag);this.state=State.IDLE;}return;}if('payload'in data){this.log(`Loaded fragment ${frag.sn} of level ${frag.level}`);this.hls.trigger(Events.FRAG_LOADED,data);}// Pass through the whole payload; controllers not implementing progressive loading receive data from this callback
|
||
this._handleFragmentLoadComplete(data);}).catch(reason=>{if(this.state===State.STOPPED||this.state===State.ERROR){return;}this.warn(`Frag error: ${(reason==null?void 0:reason.message)||reason}`);this.resetFragmentLoading(frag);});}clearTrackerIfNeeded(frag){var _this$mediaBuffer;const{fragmentTracker}=this;const fragState=fragmentTracker.getState(frag);if(fragState===FragmentState.APPENDING){// Lower the max buffer length and try again
|
||
const playlistType=frag.type;const bufferedInfo=this.getFwdBufferInfo(this.mediaBuffer,playlistType);const minForwardBufferLength=Math.max(frag.duration,bufferedInfo?bufferedInfo.len:this.config.maxBufferLength);// If backtracking, always remove from the tracker without reducing max buffer length
|
||
const backtrackFragment=this.backtrackFragment;const backtracked=backtrackFragment?frag.sn-backtrackFragment.sn:0;if(backtracked===1||this.reduceMaxBufferLength(minForwardBufferLength,frag.duration)){fragmentTracker.removeFragment(frag);}}else if(((_this$mediaBuffer=this.mediaBuffer)==null?void 0:_this$mediaBuffer.buffered.length)===0){// Stop gap for bad tracker / buffer flush behavior
|
||
fragmentTracker.removeAllFragments();}else if(fragmentTracker.hasParts(frag.type)){// In low latency mode, remove fragments for which only some parts were buffered
|
||
fragmentTracker.detectPartialFragments({frag,part:null,stats:frag.stats,id:frag.type});if(fragmentTracker.getState(frag)===FragmentState.PARTIAL){fragmentTracker.removeFragment(frag);}}}checkLiveUpdate(details){if(details.updated&&!details.live){// Live stream ended, update fragment tracker
|
||
const lastFragment=details.fragments[details.fragments.length-1];this.fragmentTracker.detectPartialFragments({frag:lastFragment,part:null,stats:lastFragment.stats,id:lastFragment.type});}if(!details.fragments[0]){details.deltaUpdateFailed=true;}}flushMainBuffer(startOffset,endOffset,type=null){if(!(startOffset-endOffset)){return;}// When alternate audio is playing, the audio-stream-controller is responsible for the audio buffer. Otherwise,
|
||
// passing a null type flushes both buffers
|
||
const flushScope={startOffset,endOffset,type};this.hls.trigger(Events.BUFFER_FLUSHING,flushScope);}_loadInitSegment(frag,level){this._doFragLoad(frag,level).then(data=>{if(!data||this.fragContextChanged(frag)||!this.levels){throw new Error('init load aborted');}return data;}).then(data=>{const{hls}=this;const{payload}=data;const decryptData=frag.decryptdata;// check to see if the payload needs to be decrypted
|
||
if(payload&&payload.byteLength>0&&decryptData!=null&&decryptData.key&&decryptData.iv&&decryptData.method==='AES-128'){const startTime=self.performance.now();// decrypt init segment data
|
||
return this.decrypter.decrypt(new Uint8Array(payload),decryptData.key.buffer,decryptData.iv.buffer).catch(err=>{hls.trigger(Events.ERROR,{type:ErrorTypes.MEDIA_ERROR,details:ErrorDetails.FRAG_DECRYPT_ERROR,fatal:false,error:err,reason:err.message,frag});throw err;}).then(decryptedData=>{const endTime=self.performance.now();hls.trigger(Events.FRAG_DECRYPTED,{frag,payload:decryptedData,stats:{tstart:startTime,tdecrypt:endTime}});data.payload=decryptedData;return this.completeInitSegmentLoad(data);});}return this.completeInitSegmentLoad(data);}).catch(reason=>{if(this.state===State.STOPPED||this.state===State.ERROR){return;}this.warn(reason);this.resetFragmentLoading(frag);});}completeInitSegmentLoad(data){const{levels}=this;if(!levels){throw new Error('init load aborted, missing levels');}const stats=data.frag.stats;this.state=State.IDLE;data.frag.data=new Uint8Array(data.payload);stats.parsing.start=stats.buffering.start=self.performance.now();stats.parsing.end=stats.buffering.end=self.performance.now();this.tick();}fragContextChanged(frag){const{fragCurrent}=this;return !frag||!fragCurrent||frag.sn!==fragCurrent.sn||frag.level!==fragCurrent.level;}fragBufferedComplete(frag,part){var _frag$startPTS,_frag$endPTS,_this$fragCurrent,_this$fragPrevious;const media=this.mediaBuffer?this.mediaBuffer:this.media;this.log(`Buffered ${frag.type} sn: ${frag.sn}${part?' part: '+part.index:''} of ${this.playlistType===PlaylistLevelType.MAIN?'level':'track'} ${frag.level} (frag:[${((_frag$startPTS=frag.startPTS)!=null?_frag$startPTS:NaN).toFixed(3)}-${((_frag$endPTS=frag.endPTS)!=null?_frag$endPTS:NaN).toFixed(3)}] > buffer:${media?TimeRanges.toString(BufferHelper.getBuffered(media)):'(detached)'})`);if(frag.sn!=='initSegment'){var _this$levels;if(frag.type!==PlaylistLevelType.SUBTITLE){const el=frag.elementaryStreams;if(!Object.keys(el).some(type=>!!el[type])){// empty segment
|
||
this.state=State.IDLE;return;}}const level=(_this$levels=this.levels)==null?void 0:_this$levels[frag.level];if(level!=null&&level.fragmentError){this.log(`Resetting level fragment error count of ${level.fragmentError} on frag buffered`);level.fragmentError=0;}}this.state=State.IDLE;if(!media){return;}if(!this.loadedmetadata&&frag.type==PlaylistLevelType.MAIN&&media.buffered.length&&((_this$fragCurrent=this.fragCurrent)==null?void 0:_this$fragCurrent.sn)===((_this$fragPrevious=this.fragPrevious)==null?void 0:_this$fragPrevious.sn)){this.loadedmetadata=true;this.seekToStartPos();}this.tick();}seekToStartPos(){}_handleFragmentLoadComplete(fragLoadedEndData){const{transmuxer}=this;if(!transmuxer){return;}const{frag,part,partsLoaded}=fragLoadedEndData;// If we did not load parts, or loaded all parts, we have complete (not partial) fragment data
|
||
const complete=!partsLoaded||partsLoaded.length===0||partsLoaded.some(fragLoaded=>!fragLoaded);const chunkMeta=new ChunkMetadata(frag.level,frag.sn,frag.stats.chunkCount+1,0,part?part.index:-1,!complete);transmuxer.flush(chunkMeta);}// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||
_handleFragmentLoadProgress(frag){}_doFragLoad(frag,level,targetBufferTime=null,progressCallback){var _frag$decryptdata;const details=level==null?void 0:level.details;if(!this.levels||!details){throw new Error(`frag load aborted, missing level${details?'':' detail'}s`);}let keyLoadingPromise=null;if(frag.encrypted&&!((_frag$decryptdata=frag.decryptdata)!=null&&_frag$decryptdata.key)){this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.logPrefix==='[stream-controller]'?'level':'track'} ${frag.level}`);this.state=State.KEY_LOADING;this.fragCurrent=frag;keyLoadingPromise=this.keyLoader.load(frag).then(keyLoadedData=>{if(!this.fragContextChanged(keyLoadedData.frag)){this.hls.trigger(Events.KEY_LOADED,keyLoadedData);if(this.state===State.KEY_LOADING){this.state=State.IDLE;}return keyLoadedData;}});this.hls.trigger(Events.KEY_LOADING,{frag});if(this.fragCurrent===null){keyLoadingPromise=Promise.reject(new Error(`frag load aborted, context changed in KEY_LOADING`));}}else if(!frag.encrypted&&details.encryptedFragments.length){this.keyLoader.loadClear(frag,details.encryptedFragments);}targetBufferTime=Math.max(frag.start,targetBufferTime||0);if(this.config.lowLatencyMode&&frag.sn!=='initSegment'){const partList=details.partList;if(partList&&progressCallback){if(targetBufferTime>frag.end&&details.fragmentHint){frag=details.fragmentHint;}const partIndex=this.getNextPart(partList,frag,targetBufferTime);if(partIndex>-1){const part=partList[partIndex];this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length-1}] ${this.logPrefix==='[stream-controller]'?'level':'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);this.nextLoadPosition=part.start+part.duration;this.state=State.FRAG_LOADING;let _result;if(keyLoadingPromise){_result=keyLoadingPromise.then(keyLoadedData=>{if(!keyLoadedData||this.fragContextChanged(keyLoadedData.frag)){return null;}return this.doFragPartsLoad(frag,part,level,progressCallback);}).catch(error=>this.handleFragLoadError(error));}else {_result=this.doFragPartsLoad(frag,part,level,progressCallback).catch(error=>this.handleFragLoadError(error));}this.hls.trigger(Events.FRAG_LOADING,{frag,part,targetBufferTime});if(this.fragCurrent===null){return Promise.reject(new Error(`frag load aborted, context changed in FRAG_LOADING parts`));}return _result;}else if(!frag.url||this.loadedEndOfParts(partList,targetBufferTime)){// Fragment hint has no parts
|
||
return Promise.resolve(null);}}}this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details?'of ['+details.startSN+'-'+details.endSN+'] ':''}${this.logPrefix==='[stream-controller]'?'level':'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);// Don't update nextLoadPosition for fragments which are not buffered
|
||
if(isFiniteNumber(frag.sn)&&!this.bitrateTest){this.nextLoadPosition=frag.start+frag.duration;}this.state=State.FRAG_LOADING;// Load key before streaming fragment data
|
||
const dataOnProgress=this.config.progressive;let result;if(dataOnProgress&&keyLoadingPromise){result=keyLoadingPromise.then(keyLoadedData=>{if(!keyLoadedData||this.fragContextChanged(keyLoadedData==null?void 0:keyLoadedData.frag)){return null;}return this.fragmentLoader.load(frag,progressCallback);}).catch(error=>this.handleFragLoadError(error));}else {// load unencrypted fragment data with progress event,
|
||
// or handle fragment result after key and fragment are finished loading
|
||
result=Promise.all([this.fragmentLoader.load(frag,dataOnProgress?progressCallback:undefined),keyLoadingPromise]).then(([fragLoadedData])=>{if(!dataOnProgress&&fragLoadedData&&progressCallback){progressCallback(fragLoadedData);}return fragLoadedData;}).catch(error=>this.handleFragLoadError(error));}this.hls.trigger(Events.FRAG_LOADING,{frag,targetBufferTime});if(this.fragCurrent===null){return Promise.reject(new Error(`frag load aborted, context changed in FRAG_LOADING`));}return result;}doFragPartsLoad(frag,fromPart,level,progressCallback){return new Promise((resolve,reject)=>{var _level$details;const partsLoaded=[];const initialPartList=(_level$details=level.details)==null?void 0:_level$details.partList;const loadPart=part=>{this.fragmentLoader.loadPart(frag,part,progressCallback).then(partLoadedData=>{partsLoaded[part.index]=partLoadedData;const loadedPart=partLoadedData.part;this.hls.trigger(Events.FRAG_LOADED,partLoadedData);const nextPart=getPartWith(level,frag.sn,part.index+1)||findPart(initialPartList,frag.sn,part.index+1);if(nextPart){loadPart(nextPart);}else {return resolve({frag,part:loadedPart,partsLoaded});}}).catch(reject);};loadPart(fromPart);});}handleFragLoadError(error){if('data'in error){const data=error.data;if(error.data&&data.details===ErrorDetails.INTERNAL_ABORTED){this.handleFragLoadAborted(data.frag,data.part);}else {this.hls.trigger(Events.ERROR,data);}}else {this.hls.trigger(Events.ERROR,{type:ErrorTypes.OTHER_ERROR,details:ErrorDetails.INTERNAL_EXCEPTION,err:error,error,fatal:true});}return null;}_handleTransmuxerFlush(chunkMeta){const context=this.getCurrentContext(chunkMeta);if(!context||this.state!==State.PARSING){if(!this.fragCurrent&&this.state!==State.STOPPED&&this.state!==State.ERROR){this.state=State.IDLE;}return;}const{frag,part,level}=context;const now=self.performance.now();frag.stats.parsing.end=now;if(part){part.stats.parsing.end=now;}this.updateLevelTiming(frag,part,level,chunkMeta.partial);}getCurrentContext(chunkMeta){const{levels,fragCurrent}=this;const{level:levelIndex,sn,part:partIndex}=chunkMeta;if(!(levels!=null&&levels[levelIndex])){this.warn(`Levels object was unset while buffering fragment ${sn} of level ${levelIndex}. The current chunk will not be buffered.`);return null;}const level=levels[levelIndex];const part=partIndex>-1?getPartWith(level,sn,partIndex):null;const frag=part?part.fragment:getFragmentWithSN(level,sn,fragCurrent);if(!frag){return null;}if(fragCurrent&&fragCurrent!==frag){frag.stats=fragCurrent.stats;}return {frag,part,level};}bufferFragmentData(data,frag,part,chunkMeta,noBacktracking){var _buffer;if(!data||this.state!==State.PARSING){return;}const{data1,data2}=data;let buffer=data1;if(data1&&data2){// Combine the moof + mdat so that we buffer with a single append
|
||
buffer=appendUint8Array(data1,data2);}if(!((_buffer=buffer)!=null&&_buffer.length)){return;}const segment={type:data.type,frag,part,chunkMeta,parent:frag.type,data:buffer};this.hls.trigger(Events.BUFFER_APPENDING,segment);if(data.dropped&&data.independent&&!part){if(noBacktracking){return;}// Clear buffer so that we reload previous segments sequentially if required
|
||
this.flushBufferGap(frag);}}flushBufferGap(frag){const media=this.media;if(!media){return;}// If currentTime is not buffered, clear the back buffer so that we can backtrack as much as needed
|
||
if(!BufferHelper.isBuffered(media,media.currentTime)){this.flushMainBuffer(0,frag.start);return;}// Remove back-buffer without interrupting playback to allow back tracking
|
||
const currentTime=media.currentTime;const bufferInfo=BufferHelper.bufferInfo(media,currentTime,0);const fragDuration=frag.duration;const segmentFraction=Math.min(this.config.maxFragLookUpTolerance*2,fragDuration*0.25);const start=Math.max(Math.min(frag.start-segmentFraction,bufferInfo.end-segmentFraction),currentTime+segmentFraction);if(frag.start-start>segmentFraction){this.flushMainBuffer(start,frag.start);}}getFwdBufferInfo(bufferable,type){const pos=this.getLoadPosition();if(!isFiniteNumber(pos)){return null;}return this.getFwdBufferInfoAtPos(bufferable,pos,type);}getFwdBufferInfoAtPos(bufferable,pos,type){const{config:{maxBufferHole}}=this;const bufferInfo=BufferHelper.bufferInfo(bufferable,pos,maxBufferHole);// Workaround flaw in getting forward buffer when maxBufferHole is smaller than gap at current pos
|
||
if(bufferInfo.len===0&&bufferInfo.nextStart!==undefined){const bufferedFragAtPos=this.fragmentTracker.getBufferedFrag(pos,type);if(bufferedFragAtPos&&bufferInfo.nextStart<bufferedFragAtPos.end){return BufferHelper.bufferInfo(bufferable,pos,Math.max(bufferInfo.nextStart,maxBufferHole));}}return bufferInfo;}getMaxBufferLength(levelBitrate){const{config}=this;let maxBufLen;if(levelBitrate){maxBufLen=Math.max(8*config.maxBufferSize/levelBitrate,config.maxBufferLength);}else {maxBufLen=config.maxBufferLength;}return Math.min(maxBufLen,config.maxMaxBufferLength);}reduceMaxBufferLength(threshold,fragDuration){const config=this.config;const minLength=Math.max(Math.min(threshold-fragDuration,config.maxBufferLength),fragDuration);const reducedLength=Math.max(threshold-fragDuration*3,config.maxMaxBufferLength/2,minLength);if(reducedLength>=minLength){// reduce max buffer length as it might be too high. we do this to avoid loop flushing ...
|
||
config.maxMaxBufferLength=reducedLength;this.warn(`Reduce max buffer length to ${reducedLength}s`);return true;}return false;}getAppendedFrag(position,playlistType=PlaylistLevelType.MAIN){const fragOrPart=this.fragmentTracker.getAppendedFrag(position,PlaylistLevelType.MAIN);if(fragOrPart&&'fragment'in fragOrPart){return fragOrPart.fragment;}return fragOrPart;}getNextFragment(pos,levelDetails){const fragments=levelDetails.fragments;const fragLen=fragments.length;if(!fragLen){return null;}// find fragment index, contiguous with end of buffer position
|
||
const{config}=this;const start=fragments[0].start;let frag;if(levelDetails.live){const initialLiveManifestSize=config.initialLiveManifestSize;if(fragLen<initialLiveManifestSize){this.warn(`Not enough fragments to start playback (have: ${fragLen}, need: ${initialLiveManifestSize})`);return null;}// The real fragment start times for a live stream are only known after the PTS range for that level is known.
|
||
// In order to discover the range, we load the best matching fragment for that level and demux it.
|
||
// Do not load using live logic if the starting frag is requested - we want to use getFragmentAtPosition() so that
|
||
// we get the fragment matching that start time
|
||
if(!levelDetails.PTSKnown&&!this.startFragRequested&&this.startPosition===-1||pos<start){frag=this.getInitialLiveFragment(levelDetails,fragments);this.startPosition=this.nextLoadPosition=frag?this.hls.liveSyncPosition||frag.start:pos;}}else if(pos<=start){// VoD playlist: if loadPosition before start of playlist, load first fragment
|
||
frag=fragments[0];}// If we haven't run into any special cases already, just load the fragment most closely matching the requested position
|
||
if(!frag){const end=config.lowLatencyMode?levelDetails.partEnd:levelDetails.fragmentEnd;frag=this.getFragmentAtPosition(pos,end,levelDetails);}return this.mapToInitFragWhenRequired(frag);}isLoopLoading(frag,targetBufferTime){const trackerState=this.fragmentTracker.getState(frag);return (trackerState===FragmentState.OK||trackerState===FragmentState.PARTIAL&&!!frag.gap)&&this.nextLoadPosition>targetBufferTime;}getNextFragmentLoopLoading(frag,levelDetails,bufferInfo,playlistType,maxBufLen){const gapStart=frag.gap;const nextFragment=this.getNextFragment(this.nextLoadPosition,levelDetails);if(nextFragment===null){return nextFragment;}frag=nextFragment;if(gapStart&&frag&&!frag.gap&&bufferInfo.nextStart){// Media buffered after GAP tags should not make the next buffer timerange exceed forward buffer length
|
||
const nextbufferInfo=this.getFwdBufferInfoAtPos(this.mediaBuffer?this.mediaBuffer:this.media,bufferInfo.nextStart,playlistType);if(nextbufferInfo!==null&&bufferInfo.len+nextbufferInfo.len>=maxBufLen){// Returning here might result in not finding an audio and video candiate to skip to
|
||
this.log(`buffer full after gaps in "${playlistType}" playlist starting at sn: ${frag.sn}`);return null;}}return frag;}mapToInitFragWhenRequired(frag){// If an initSegment is present, it must be buffered first
|
||
if(frag!=null&&frag.initSegment&&!(frag!=null&&frag.initSegment.data)&&!this.bitrateTest){return frag.initSegment;}return frag;}getNextPart(partList,frag,targetBufferTime){let nextPart=-1;let contiguous=false;let independentAttrOmitted=true;for(let i=0,len=partList.length;i<len;i++){const part=partList[i];independentAttrOmitted=independentAttrOmitted&&!part.independent;if(nextPart>-1&&targetBufferTime<part.start){break;}const loaded=part.loaded;if(loaded){nextPart=-1;}else if((contiguous||part.independent||independentAttrOmitted)&&part.fragment===frag){nextPart=i;}contiguous=loaded;}return nextPart;}loadedEndOfParts(partList,targetBufferTime){const lastPart=partList[partList.length-1];return lastPart&&targetBufferTime>lastPart.start&&lastPart.loaded;}/*
|
||
This method is used find the best matching first fragment for a live playlist. This fragment is used to calculate the
|
||
"sliding" of the playlist, which is its offset from the start of playback. After sliding we can compute the real
|
||
start and end times for each fragment in the playlist (after which this method will not need to be called).
|
||
*/getInitialLiveFragment(levelDetails,fragments){const fragPrevious=this.fragPrevious;let frag=null;if(fragPrevious){if(levelDetails.hasProgramDateTime){// Prefer using PDT, because it can be accurate enough to choose the correct fragment without knowing the level sliding
|
||
this.log(`Live playlist, switching playlist, load frag with same PDT: ${fragPrevious.programDateTime}`);frag=findFragmentByPDT(fragments,fragPrevious.endProgramDateTime,this.config.maxFragLookUpTolerance);}if(!frag){// SN does not need to be accurate between renditions, but depending on the packaging it may be so.
|
||
const targetSN=fragPrevious.sn+1;if(targetSN>=levelDetails.startSN&&targetSN<=levelDetails.endSN){const fragNext=fragments[targetSN-levelDetails.startSN];// Ensure that we're staying within the continuity range, since PTS resets upon a new range
|
||
if(fragPrevious.cc===fragNext.cc){frag=fragNext;this.log(`Live playlist, switching playlist, load frag with next SN: ${frag.sn}`);}}// It's important to stay within the continuity range if available; otherwise the fragments in the playlist
|
||
// will have the wrong start times
|
||
if(!frag){frag=findFragWithCC(fragments,fragPrevious.cc);if(frag){this.log(`Live playlist, switching playlist, load frag with same CC: ${frag.sn}`);}}}}else {// Find a new start fragment when fragPrevious is null
|
||
const liveStart=this.hls.liveSyncPosition;if(liveStart!==null){frag=this.getFragmentAtPosition(liveStart,this.bitrateTest?levelDetails.fragmentEnd:levelDetails.edge,levelDetails);}}return frag;}/*
|
||
This method finds the best matching fragment given the provided position.
|
||
*/getFragmentAtPosition(bufferEnd,end,levelDetails){const{config}=this;let{fragPrevious}=this;let{fragments,endSN}=levelDetails;const{fragmentHint}=levelDetails;const{maxFragLookUpTolerance}=config;const partList=levelDetails.partList;const loadingParts=!!(config.lowLatencyMode&&partList!=null&&partList.length&&fragmentHint);if(loadingParts&&fragmentHint&&!this.bitrateTest){// Include incomplete fragment with parts at end
|
||
fragments=fragments.concat(fragmentHint);endSN=fragmentHint.sn;}let frag;if(bufferEnd<end){const lookupTolerance=bufferEnd>end-maxFragLookUpTolerance?0:maxFragLookUpTolerance;// Remove the tolerance if it would put the bufferEnd past the actual end of stream
|
||
// Uses buffer and sequence number to calculate switch segment (required if using EXT-X-DISCONTINUITY-SEQUENCE)
|
||
frag=findFragmentByPTS(fragPrevious,fragments,bufferEnd,lookupTolerance);}else {// reach end of playlist
|
||
frag=fragments[fragments.length-1];}if(frag){const curSNIdx=frag.sn-levelDetails.startSN;// Move fragPrevious forward to support forcing the next fragment to load
|
||
// when the buffer catches up to a previously buffered range.
|
||
const fragState=this.fragmentTracker.getState(frag);if(fragState===FragmentState.OK||fragState===FragmentState.PARTIAL&&frag.gap){fragPrevious=frag;}if(fragPrevious&&frag.sn===fragPrevious.sn&&(!loadingParts||partList[0].fragment.sn>frag.sn)){// Force the next fragment to load if the previous one was already selected. This can occasionally happen with
|
||
// non-uniform fragment durations
|
||
const sameLevel=fragPrevious&&frag.level===fragPrevious.level;if(sameLevel){const nextFrag=fragments[curSNIdx+1];if(frag.sn<endSN&&this.fragmentTracker.getState(nextFrag)!==FragmentState.OK){frag=nextFrag;}else {frag=null;}}}}return frag;}synchronizeToLiveEdge(levelDetails){const{config,media}=this;if(!media){return;}const liveSyncPosition=this.hls.liveSyncPosition;const currentTime=media.currentTime;const start=levelDetails.fragments[0].start;const end=levelDetails.edge;const withinSlidingWindow=currentTime>=start-config.maxFragLookUpTolerance&¤tTime<=end;// Continue if we can seek forward to sync position or if current time is outside of sliding window
|
||
if(liveSyncPosition!==null&&media.duration>liveSyncPosition&&(currentTime<liveSyncPosition||!withinSlidingWindow)){// Continue if buffer is starving or if current time is behind max latency
|
||
const maxLatency=config.liveMaxLatencyDuration!==undefined?config.liveMaxLatencyDuration:config.liveMaxLatencyDurationCount*levelDetails.targetduration;if(!withinSlidingWindow&&media.readyState<4||currentTime<end-maxLatency){if(!this.loadedmetadata){this.nextLoadPosition=liveSyncPosition;}// Only seek if ready and there is not a significant forward buffer available for playback
|
||
if(media.readyState){this.warn(`Playback: ${currentTime.toFixed(3)} is located too far from the end of live sliding playlist: ${end}, reset currentTime to : ${liveSyncPosition.toFixed(3)}`);media.currentTime=liveSyncPosition;}}}}alignPlaylists(details,previousDetails,switchDetails){// FIXME: If not for `shouldAlignOnDiscontinuities` requiring fragPrevious.cc,
|
||
// this could all go in level-helper mergeDetails()
|
||
const length=details.fragments.length;if(!length){this.warn(`No fragments in live playlist`);return 0;}const slidingStart=details.fragments[0].start;const firstLevelLoad=!previousDetails;const aligned=details.alignedSliding&&isFiniteNumber(slidingStart);if(firstLevelLoad||!aligned&&!slidingStart){const{fragPrevious}=this;alignStream(fragPrevious,switchDetails,details);const alignedSlidingStart=details.fragments[0].start;this.log(`Live playlist sliding: ${alignedSlidingStart.toFixed(2)} start-sn: ${previousDetails?previousDetails.startSN:'na'}->${details.startSN} prev-sn: ${fragPrevious?fragPrevious.sn:'na'} fragments: ${length}`);return alignedSlidingStart;}return slidingStart;}waitForCdnTuneIn(details){// Wait for Low-Latency CDN Tune-in to get an updated playlist
|
||
const advancePartLimit=3;return details.live&&details.canBlockReload&&details.partTarget&&details.tuneInGoal>Math.max(details.partHoldBack,details.partTarget*advancePartLimit);}setStartPosition(details,sliding){// compute start position if set to -1. use it straight away if value is defined
|
||
let startPosition=this.startPosition;if(startPosition<sliding){startPosition=-1;}if(startPosition===-1||this.lastCurrentTime===-1){// Use Playlist EXT-X-START:TIME-OFFSET when set
|
||
// Prioritize Multivariant Playlist offset so that main, audio, and subtitle stream-controller start times match
|
||
const offsetInMultivariantPlaylist=this.startTimeOffset!==null;const startTimeOffset=offsetInMultivariantPlaylist?this.startTimeOffset:details.startTimeOffset;if(startTimeOffset!==null&&isFiniteNumber(startTimeOffset)){startPosition=sliding+startTimeOffset;if(startTimeOffset<0){startPosition+=details.totalduration;}startPosition=Math.min(Math.max(sliding,startPosition),sliding+details.totalduration);this.log(`Start time offset ${startTimeOffset} found in ${offsetInMultivariantPlaylist?'multivariant':'media'} playlist, adjust startPosition to ${startPosition}`);this.startPosition=startPosition;}else if(details.live){// Leave this.startPosition at -1, so that we can use `getInitialLiveFragment` logic when startPosition has
|
||
// not been specified via the config or an as an argument to startLoad (#3736).
|
||
startPosition=this.hls.liveSyncPosition||sliding;}else {this.startPosition=startPosition=0;}this.lastCurrentTime=startPosition;}this.nextLoadPosition=startPosition;}getLoadPosition(){const{media}=this;// if we have not yet loaded any fragment, start loading from start position
|
||
let pos=0;if(this.loadedmetadata&&media){pos=media.currentTime;}else if(this.nextLoadPosition){pos=this.nextLoadPosition;}return pos;}handleFragLoadAborted(frag,part){if(this.transmuxer&&frag.sn!=='initSegment'&&frag.stats.aborted){this.warn(`Fragment ${frag.sn}${part?' part '+part.index:''} of level ${frag.level} was aborted`);this.resetFragmentLoading(frag);}}resetFragmentLoading(frag){if(!this.fragCurrent||!this.fragContextChanged(frag)&&this.state!==State.FRAG_LOADING_WAITING_RETRY){this.state=State.IDLE;}}onFragmentOrKeyLoadError(filterType,data){if(data.chunkMeta&&!data.frag){const context=this.getCurrentContext(data.chunkMeta);if(context){data.frag=context.frag;}}const frag=data.frag;// Handle frag error related to caller's filterType
|
||
if(!frag||frag.type!==filterType||!this.levels){return;}if(this.fragContextChanged(frag)){var _this$fragCurrent2;this.warn(`Frag load error must match current frag to retry ${frag.url} > ${(_this$fragCurrent2=this.fragCurrent)==null?void 0:_this$fragCurrent2.url}`);return;}const gapTagEncountered=data.details===ErrorDetails.FRAG_GAP;if(gapTagEncountered){this.fragmentTracker.fragBuffered(frag,true);}// keep retrying until the limit will be reached
|
||
const errorAction=data.errorAction;const{action,retryCount=0,retryConfig}=errorAction||{};if(errorAction&&action===NetworkErrorAction.RetryRequest&&retryConfig){this.resetStartWhenNotLoaded(this.levelLastLoaded);const delay=getRetryDelay(retryConfig,retryCount);this.warn(`Fragment ${frag.sn} of ${filterType} ${frag.level} errored with ${data.details}, retrying loading ${retryCount+1}/${retryConfig.maxNumRetry} in ${delay}ms`);errorAction.resolved=true;this.retryDate=self.performance.now()+delay;this.state=State.FRAG_LOADING_WAITING_RETRY;}else if(retryConfig&&errorAction){this.resetFragmentErrors(filterType);if(retryCount<retryConfig.maxNumRetry){// Network retry is skipped when level switch is preferred
|
||
if(!gapTagEncountered&&action!==NetworkErrorAction.RemoveAlternatePermanently){errorAction.resolved=true;}}else {logger.warn(`${data.details} reached or exceeded max retry (${retryCount})`);return;}}else if((errorAction==null?void 0:errorAction.action)===NetworkErrorAction.SendAlternateToPenaltyBox){this.state=State.WAITING_LEVEL;}else {this.state=State.ERROR;}// Perform next async tick sooner to speed up error action resolution
|
||
this.tickImmediate();}reduceLengthAndFlushBuffer(data){// if in appending state
|
||
if(this.state===State.PARSING||this.state===State.PARSED){const frag=data.frag;const playlistType=data.parent;const bufferedInfo=this.getFwdBufferInfo(this.mediaBuffer,playlistType);// 0.5 : tolerance needed as some browsers stalls playback before reaching buffered end
|
||
// reduce max buf len if current position is buffered
|
||
const buffered=bufferedInfo&&bufferedInfo.len>0.5;if(buffered){this.reduceMaxBufferLength(bufferedInfo.len,(frag==null?void 0:frag.duration)||10);}const flushBuffer=!buffered;if(flushBuffer){// current position is not buffered, but browser is still complaining about buffer full error
|
||
// this happens on IE/Edge, refer to https://github.com/video-dev/hls.js/pull/708
|
||
// in that case flush the whole audio buffer to recover
|
||
this.warn(`Buffer full error while media.currentTime is not buffered, flush ${playlistType} buffer`);}if(frag){this.fragmentTracker.removeFragment(frag);this.nextLoadPosition=frag.start;}this.resetLoadingState();return flushBuffer;}return false;}resetFragmentErrors(filterType){if(filterType===PlaylistLevelType.AUDIO){// Reset current fragment since audio track audio is essential and may not have a fail-over track
|
||
this.fragCurrent=null;}// Fragment errors that result in a level switch or redundant fail-over
|
||
// should reset the stream controller state to idle
|
||
if(!this.loadedmetadata){this.startFragRequested=false;}if(this.state!==State.STOPPED){this.state=State.IDLE;}}afterBufferFlushed(media,bufferType,playlistType){if(!media){return;}// After successful buffer flushing, filter flushed fragments from bufferedFrags use mediaBuffered instead of media
|
||
// (so that we will check against video.buffered ranges in case of alt audio track)
|
||
const bufferedTimeRanges=BufferHelper.getBuffered(media);this.fragmentTracker.detectEvictedFragments(bufferType,bufferedTimeRanges,playlistType);if(this.state===State.ENDED){this.resetLoadingState();}}resetLoadingState(){this.log('Reset loading state');this.fragCurrent=null;this.fragPrevious=null;this.state=State.IDLE;}resetStartWhenNotLoaded(level){// if loadedmetadata is not set, it means that first frag request failed
|
||
// in that case, reset startFragRequested flag
|
||
if(!this.loadedmetadata){this.startFragRequested=false;const details=level?level.details:null;if(details!=null&&details.live){// Update the start position and return to IDLE to recover live start
|
||
this.startPosition=-1;this.setStartPosition(details,0);this.resetLoadingState();}else {this.nextLoadPosition=this.startPosition;}}}resetWhenMissingContext(chunkMeta){this.warn(`The loading context changed while buffering fragment ${chunkMeta.sn} of level ${chunkMeta.level}. This chunk will not be buffered.`);this.removeUnbufferedFrags();this.resetStartWhenNotLoaded(this.levelLastLoaded);this.resetLoadingState();}removeUnbufferedFrags(start=0){this.fragmentTracker.removeFragmentsInRange(start,Infinity,this.playlistType,false,true);}updateLevelTiming(frag,part,level,partial){var _this$transmuxer;const details=level.details;if(!details){this.warn('level.details undefined');return;}const parsed=Object.keys(frag.elementaryStreams).reduce((result,type)=>{const info=frag.elementaryStreams[type];if(info){const parsedDuration=info.endPTS-info.startPTS;if(parsedDuration<=0){// Destroy the transmuxer after it's next time offset failed to advance because duration was <= 0.
|
||
// The new transmuxer will be configured with a time offset matching the next fragment start,
|
||
// preventing the timeline from shifting.
|
||
this.warn(`Could not parse fragment ${frag.sn} ${type} duration reliably (${parsedDuration})`);return result||false;}const drift=partial?0:updateFragPTSDTS(details,frag,info.startPTS,info.endPTS,info.startDTS,info.endDTS);this.hls.trigger(Events.LEVEL_PTS_UPDATED,{details,level,drift,type,frag,start:info.startPTS,end:info.endPTS});return true;}return result;},false);if(!parsed&&((_this$transmuxer=this.transmuxer)==null?void 0:_this$transmuxer.error)===null){const error=new Error(`Found no media in fragment ${frag.sn} of level ${frag.level} resetting transmuxer to fallback to playlist timing`);if(level.fragmentError===0){// Mark and track the odd empty segment as a gap to avoid reloading
|
||
level.fragmentError++;frag.gap=true;this.fragmentTracker.removeFragment(frag);this.fragmentTracker.fragBuffered(frag,true);}this.warn(error.message);this.hls.trigger(Events.ERROR,{type:ErrorTypes.MEDIA_ERROR,details:ErrorDetails.FRAG_PARSING_ERROR,fatal:false,error,frag,reason:`Found no media in msn ${frag.sn} of level "${level.url}"`});if(!this.hls){return;}this.resetTransmuxer();// For this error fallthrough. Marking parsed will allow advancing to next fragment.
|
||
}this.state=State.PARSED;this.hls.trigger(Events.FRAG_PARSED,{frag,part});}resetTransmuxer(){if(this.transmuxer){this.transmuxer.destroy();this.transmuxer=null;}}recoverWorkerError(data){if(data.event==='demuxerWorker'){this.fragmentTracker.removeAllFragments();this.resetTransmuxer();this.resetStartWhenNotLoaded(this.levelLastLoaded);this.resetLoadingState();}}set state(nextState){const previousState=this._state;if(previousState!==nextState){this._state=nextState;this.log(`${previousState}->${nextState}`);}}get state(){return this._state;}}class ChunkCache{constructor(){this.chunks=[];this.dataLength=0;}push(chunk){this.chunks.push(chunk);this.dataLength+=chunk.length;}flush(){const{chunks,dataLength}=this;let result;if(!chunks.length){return new Uint8Array(0);}else if(chunks.length===1){result=chunks[0];}else {result=concatUint8Arrays(chunks,dataLength);}this.reset();return result;}reset(){this.chunks.length=0;this.dataLength=0;}}function concatUint8Arrays(chunks,dataLength){const result=new Uint8Array(dataLength);let offset=0;for(let i=0;i<chunks.length;i++){const chunk=chunks[i];result.set(chunk,offset);offset+=chunk.length;}return result;}// ensure the worker ends up in the bundle
|
||
// If the worker should not be included this gets aliased to empty.js
|
||
function hasUMDWorker(){return typeof __HLS_WORKER_BUNDLE__==='function';}function injectWorker(){const blob=new self.Blob([`var exports={};var module={exports:exports};function define(f){f()};define.amd=true;(${__HLS_WORKER_BUNDLE__.toString()})(true);`],{type:'text/javascript'});const objectURL=self.URL.createObjectURL(blob);const worker=new self.Worker(objectURL);return {worker,objectURL};}function loadWorker(path){const scriptURL=new self.URL(path,self.location.href).href;const worker=new self.Worker(scriptURL);return {worker,scriptURL};}function dummyTrack(type='',inputTimeScale=90000){return {type,id:-1,pid:-1,inputTimeScale,sequenceNumber:-1,samples:[],dropped:0};}class BaseAudioDemuxer{constructor(){this._audioTrack=void 0;this._id3Track=void 0;this.frameIndex=0;this.cachedData=null;this.basePTS=null;this.initPTS=null;this.lastPTS=null;}resetInitSegment(initSegment,audioCodec,videoCodec,trackDuration){this._id3Track={type:'id3',id:3,pid:-1,inputTimeScale:90000,sequenceNumber:0,samples:[],dropped:0};}resetTimeStamp(deaultTimestamp){this.initPTS=deaultTimestamp;this.resetContiguity();}resetContiguity(){this.basePTS=null;this.lastPTS=null;this.frameIndex=0;}canParse(data,offset){return false;}appendFrame(track,data,offset){}// feed incoming data to the front of the parsing pipeline
|
||
demux(data,timeOffset){if(this.cachedData){data=appendUint8Array(this.cachedData,data);this.cachedData=null;}let id3Data=getID3Data(data,0);let offset=id3Data?id3Data.length:0;let lastDataIndex;const track=this._audioTrack;const id3Track=this._id3Track;const timestamp=id3Data?getTimeStamp(id3Data):undefined;const length=data.length;if(this.basePTS===null||this.frameIndex===0&&isFiniteNumber(timestamp)){this.basePTS=initPTSFn(timestamp,timeOffset,this.initPTS);this.lastPTS=this.basePTS;}if(this.lastPTS===null){this.lastPTS=this.basePTS;}// more expressive than alternative: id3Data?.length
|
||
if(id3Data&&id3Data.length>0){id3Track.samples.push({pts:this.lastPTS,dts:this.lastPTS,data:id3Data,type:MetadataSchema.audioId3,duration:Number.POSITIVE_INFINITY});}while(offset<length){if(this.canParse(data,offset)){const frame=this.appendFrame(track,data,offset);if(frame){this.frameIndex++;this.lastPTS=frame.sample.pts;offset+=frame.length;lastDataIndex=offset;}else {offset=length;}}else if(canParse$2(data,offset)){// after a ID3.canParse, a call to ID3.getID3Data *should* always returns some data
|
||
id3Data=getID3Data(data,offset);id3Track.samples.push({pts:this.lastPTS,dts:this.lastPTS,data:id3Data,type:MetadataSchema.audioId3,duration:Number.POSITIVE_INFINITY});offset+=id3Data.length;lastDataIndex=offset;}else {offset++;}if(offset===length&&lastDataIndex!==length){const partialData=sliceUint8(data,lastDataIndex);if(this.cachedData){this.cachedData=appendUint8Array(this.cachedData,partialData);}else {this.cachedData=partialData;}}}return {audioTrack:track,videoTrack:dummyTrack(),id3Track,textTrack:dummyTrack()};}demuxSampleAes(data,keyData,timeOffset){return Promise.reject(new Error(`[${this}] This demuxer does not support Sample-AES decryption`));}flush(timeOffset){// Parse cache in case of remaining frames.
|
||
const cachedData=this.cachedData;if(cachedData){this.cachedData=null;this.demux(cachedData,0);}return {audioTrack:this._audioTrack,videoTrack:dummyTrack(),id3Track:this._id3Track,textTrack:dummyTrack()};}destroy(){}}/**
|
||
* Initialize PTS
|
||
* <p>
|
||
* use timestamp unless it is undefined, NaN or Infinity
|
||
* </p>
|
||
*/const initPTSFn=(timestamp,timeOffset,initPTS)=>{if(isFiniteNumber(timestamp)){return timestamp*90;}const init90kHz=initPTS?initPTS.baseTime*90000/initPTS.timescale:0;return timeOffset*90000+init90kHz;};/**
|
||
* ADTS parser helper
|
||
* @link https://wiki.multimedia.cx/index.php?title=ADTS
|
||
*/function getAudioConfig(observer,data,offset,audioCodec){let adtsObjectType;let adtsExtensionSamplingIndex;let adtsChannelConfig;let config;const userAgent=navigator.userAgent.toLowerCase();const manifestCodec=audioCodec;const adtsSamplingRates=[96000,88200,64000,48000,44100,32000,24000,22050,16000,12000,11025,8000,7350];// byte 2
|
||
adtsObjectType=((data[offset+2]&0xc0)>>>6)+1;const adtsSamplingIndex=(data[offset+2]&0x3c)>>>2;if(adtsSamplingIndex>adtsSamplingRates.length-1){const error=new Error(`invalid ADTS sampling index:${adtsSamplingIndex}`);observer.emit(Events.ERROR,Events.ERROR,{type:ErrorTypes.MEDIA_ERROR,details:ErrorDetails.FRAG_PARSING_ERROR,fatal:true,error,reason:error.message});return;}adtsChannelConfig=(data[offset+2]&0x01)<<2;// byte 3
|
||
adtsChannelConfig|=(data[offset+3]&0xc0)>>>6;logger.log(`manifest codec:${audioCodec}, ADTS type:${adtsObjectType}, samplingIndex:${adtsSamplingIndex}`);// firefox: freq less than 24kHz = AAC SBR (HE-AAC)
|
||
if(/firefox/i.test(userAgent)){if(adtsSamplingIndex>=6){adtsObjectType=5;config=new Array(4);// HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
|
||
// there is a factor 2 between frame sample rate and output sample rate
|
||
// multiply frequency by 2 (see table below, equivalent to substract 3)
|
||
adtsExtensionSamplingIndex=adtsSamplingIndex-3;}else {adtsObjectType=2;config=new Array(2);adtsExtensionSamplingIndex=adtsSamplingIndex;}// Android : always use AAC
|
||
}else if(userAgent.indexOf('android')!==-1){adtsObjectType=2;config=new Array(2);adtsExtensionSamplingIndex=adtsSamplingIndex;}else {/* for other browsers (Chrome/Vivaldi/Opera ...)
|
||
always force audio type to be HE-AAC SBR, as some browsers do not support audio codec switch properly (like Chrome ...)
|
||
*/adtsObjectType=5;config=new Array(4);// if (manifest codec is HE-AAC or HE-AACv2) OR (manifest codec not specified AND frequency less than 24kHz)
|
||
if(audioCodec&&(audioCodec.indexOf('mp4a.40.29')!==-1||audioCodec.indexOf('mp4a.40.5')!==-1)||!audioCodec&&adtsSamplingIndex>=6){// HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
|
||
// there is a factor 2 between frame sample rate and output sample rate
|
||
// multiply frequency by 2 (see table below, equivalent to substract 3)
|
||
adtsExtensionSamplingIndex=adtsSamplingIndex-3;}else {// if (manifest codec is AAC) AND (frequency less than 24kHz AND nb channel is 1) OR (manifest codec not specified and mono audio)
|
||
// Chrome fails to play back with low frequency AAC LC mono when initialized with HE-AAC. This is not a problem with stereo.
|
||
if(audioCodec&&audioCodec.indexOf('mp4a.40.2')!==-1&&(adtsSamplingIndex>=6&&adtsChannelConfig===1||/vivaldi/i.test(userAgent))||!audioCodec&&adtsChannelConfig===1){adtsObjectType=2;config=new Array(2);}adtsExtensionSamplingIndex=adtsSamplingIndex;}}/* refer to http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio#Audio_Specific_Config
|
||
ISO 14496-3 (AAC).pdf - Table 1.13 — Syntax of AudioSpecificConfig()
|
||
Audio Profile / Audio Object Type
|
||
0: Null
|
||
1: AAC Main
|
||
2: AAC LC (Low Complexity)
|
||
3: AAC SSR (Scalable Sample Rate)
|
||
4: AAC LTP (Long Term Prediction)
|
||
5: SBR (Spectral Band Replication)
|
||
6: AAC Scalable
|
||
sampling freq
|
||
0: 96000 Hz
|
||
1: 88200 Hz
|
||
2: 64000 Hz
|
||
3: 48000 Hz
|
||
4: 44100 Hz
|
||
5: 32000 Hz
|
||
6: 24000 Hz
|
||
7: 22050 Hz
|
||
8: 16000 Hz
|
||
9: 12000 Hz
|
||
10: 11025 Hz
|
||
11: 8000 Hz
|
||
12: 7350 Hz
|
||
13: Reserved
|
||
14: Reserved
|
||
15: frequency is written explictly
|
||
Channel Configurations
|
||
These are the channel configurations:
|
||
0: Defined in AOT Specifc Config
|
||
1: 1 channel: front-center
|
||
2: 2 channels: front-left, front-right
|
||
*/ // audioObjectType = profile => profile, the MPEG-4 Audio Object Type minus 1
|
||
config[0]=adtsObjectType<<3;// samplingFrequencyIndex
|
||
config[0]|=(adtsSamplingIndex&0x0e)>>1;config[1]|=(adtsSamplingIndex&0x01)<<7;// channelConfiguration
|
||
config[1]|=adtsChannelConfig<<3;if(adtsObjectType===5){// adtsExtensionSamplingIndex
|
||
config[1]|=(adtsExtensionSamplingIndex&0x0e)>>1;config[2]=(adtsExtensionSamplingIndex&0x01)<<7;// adtsObjectType (force to 2, chrome is checking that object type is less than 5 ???
|
||
// https://chromium.googlesource.com/chromium/src.git/+/master/media/formats/mp4/aac.cc
|
||
config[2]|=2<<2;config[3]=0;}return {config,samplerate:adtsSamplingRates[adtsSamplingIndex],channelCount:adtsChannelConfig,codec:'mp4a.40.'+adtsObjectType,manifestCodec};}function isHeaderPattern$1(data,offset){return data[offset]===0xff&&(data[offset+1]&0xf6)===0xf0;}function getHeaderLength(data,offset){return data[offset+1]&0x01?7:9;}function getFullFrameLength(data,offset){return (data[offset+3]&0x03)<<11|data[offset+4]<<3|(data[offset+5]&0xe0)>>>5;}function canGetFrameLength(data,offset){return offset+5<data.length;}function isHeader$1(data,offset){// Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1
|
||
// Layer bits (position 14 and 15) in header should be always 0 for ADTS
|
||
// More info https://wiki.multimedia.cx/index.php?title=ADTS
|
||
return offset+1<data.length&&isHeaderPattern$1(data,offset);}function canParse$1(data,offset){return canGetFrameLength(data,offset)&&isHeaderPattern$1(data,offset)&&getFullFrameLength(data,offset)<=data.length-offset;}function probe$1(data,offset){// same as isHeader but we also check that ADTS frame follows last ADTS frame
|
||
// or end of data is reached
|
||
if(isHeader$1(data,offset)){// ADTS header Length
|
||
const headerLength=getHeaderLength(data,offset);if(offset+headerLength>=data.length){return false;}// ADTS frame Length
|
||
const frameLength=getFullFrameLength(data,offset);if(frameLength<=headerLength){return false;}const newOffset=offset+frameLength;return newOffset===data.length||isHeader$1(data,newOffset);}return false;}function initTrackConfig(track,observer,data,offset,audioCodec){if(!track.samplerate){const config=getAudioConfig(observer,data,offset,audioCodec);if(!config){return;}track.config=config.config;track.samplerate=config.samplerate;track.channelCount=config.channelCount;track.codec=config.codec;track.manifestCodec=config.manifestCodec;logger.log(`parsed codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);}}function getFrameDuration(samplerate){return 1024*90000/samplerate;}function parseFrameHeader(data,offset){// The protection skip bit tells us if we have 2 bytes of CRC data at the end of the ADTS header
|
||
const headerLength=getHeaderLength(data,offset);if(offset+headerLength<=data.length){// retrieve frame size
|
||
const frameLength=getFullFrameLength(data,offset)-headerLength;if(frameLength>0){// logger.log(`AAC frame, offset/length/total/pts:${offset+headerLength}/${frameLength}/${data.byteLength}`);
|
||
return {headerLength,frameLength};}}}function appendFrame$2(track,data,offset,pts,frameIndex){const frameDuration=getFrameDuration(track.samplerate);const stamp=pts+frameIndex*frameDuration;const header=parseFrameHeader(data,offset);let unit;if(header){const{frameLength,headerLength}=header;const _length=headerLength+frameLength;const missing=Math.max(0,offset+_length-data.length);// logger.log(`AAC frame ${frameIndex}, pts:${stamp} length@offset/total: ${frameLength}@${offset+headerLength}/${data.byteLength} missing: ${missing}`);
|
||
if(missing){unit=new Uint8Array(_length-headerLength);unit.set(data.subarray(offset+headerLength,data.length),0);}else {unit=data.subarray(offset+headerLength,offset+_length);}const _sample={unit,pts:stamp};if(!missing){track.samples.push(_sample);}return {sample:_sample,length:_length,missing};}// overflow incomplete header
|
||
const length=data.length-offset;unit=new Uint8Array(length);unit.set(data.subarray(offset,data.length),0);const sample={unit,pts:stamp};return {sample,length,missing:-1};}/**
|
||
* MPEG parser helper
|
||
*/let chromeVersion$1=null;const BitratesMap=[32,64,96,128,160,192,224,256,288,320,352,384,416,448,32,48,56,64,80,96,112,128,160,192,224,256,320,384,32,40,48,56,64,80,96,112,128,160,192,224,256,320,32,48,56,64,80,96,112,128,144,160,176,192,224,256,8,16,24,32,40,48,56,64,80,96,112,128,144,160];const SamplingRateMap=[44100,48000,32000,22050,24000,16000,11025,12000,8000];const SamplesCoefficients=[// MPEG 2.5
|
||
[0,// Reserved
|
||
72,// Layer3
|
||
144,// Layer2
|
||
12// Layer1
|
||
],// Reserved
|
||
[0,// Reserved
|
||
0,// Layer3
|
||
0,// Layer2
|
||
0// Layer1
|
||
],// MPEG 2
|
||
[0,// Reserved
|
||
72,// Layer3
|
||
144,// Layer2
|
||
12// Layer1
|
||
],// MPEG 1
|
||
[0,// Reserved
|
||
144,// Layer3
|
||
144,// Layer2
|
||
12// Layer1
|
||
]];const BytesInSlot=[0,// Reserved
|
||
1,// Layer3
|
||
1,// Layer2
|
||
4// Layer1
|
||
];function appendFrame$1(track,data,offset,pts,frameIndex){// Using http://www.datavoyage.com/mpgscript/mpeghdr.htm as a reference
|
||
if(offset+24>data.length){return;}const header=parseHeader(data,offset);if(header&&offset+header.frameLength<=data.length){const frameDuration=header.samplesPerFrame*90000/header.sampleRate;const stamp=pts+frameIndex*frameDuration;const sample={unit:data.subarray(offset,offset+header.frameLength),pts:stamp,dts:stamp};track.config=[];track.channelCount=header.channelCount;track.samplerate=header.sampleRate;track.samples.push(sample);return {sample,length:header.frameLength,missing:0};}}function parseHeader(data,offset){const mpegVersion=data[offset+1]>>3&3;const mpegLayer=data[offset+1]>>1&3;const bitRateIndex=data[offset+2]>>4&15;const sampleRateIndex=data[offset+2]>>2&3;if(mpegVersion!==1&&bitRateIndex!==0&&bitRateIndex!==15&&sampleRateIndex!==3){const paddingBit=data[offset+2]>>1&1;const channelMode=data[offset+3]>>6;const columnInBitrates=mpegVersion===3?3-mpegLayer:mpegLayer===3?3:4;const bitRate=BitratesMap[columnInBitrates*14+bitRateIndex-1]*1000;const columnInSampleRates=mpegVersion===3?0:mpegVersion===2?1:2;const sampleRate=SamplingRateMap[columnInSampleRates*3+sampleRateIndex];const channelCount=channelMode===3?1:2;// If bits of channel mode are `11` then it is a single channel (Mono)
|
||
const sampleCoefficient=SamplesCoefficients[mpegVersion][mpegLayer];const bytesInSlot=BytesInSlot[mpegLayer];const samplesPerFrame=sampleCoefficient*8*bytesInSlot;const frameLength=Math.floor(sampleCoefficient*bitRate/sampleRate+paddingBit)*bytesInSlot;if(chromeVersion$1===null){const userAgent=navigator.userAgent||'';const result=userAgent.match(/Chrome\/(\d+)/i);chromeVersion$1=result?parseInt(result[1]):0;}const needChromeFix=!!chromeVersion$1&&chromeVersion$1<=87;if(needChromeFix&&mpegLayer===2&&bitRate>=224000&&channelMode===0){// Work around bug in Chromium by setting channelMode to dual-channel (01) instead of stereo (00)
|
||
data[offset+3]=data[offset+3]|0x80;}return {sampleRate,channelCount,frameLength,samplesPerFrame};}}function isHeaderPattern(data,offset){return data[offset]===0xff&&(data[offset+1]&0xe0)===0xe0&&(data[offset+1]&0x06)!==0x00;}function isHeader(data,offset){// Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1
|
||
// Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III)
|
||
// More info http://www.mp3-tech.org/programmer/frame_header.html
|
||
return offset+1<data.length&&isHeaderPattern(data,offset);}function canParse(data,offset){const headerSize=4;return isHeaderPattern(data,offset)&&headerSize<=data.length-offset;}function probe(data,offset){// same as isHeader but we also check that MPEG frame follows last MPEG frame
|
||
// or end of data is reached
|
||
if(offset+1<data.length&&isHeaderPattern(data,offset)){// MPEG header Length
|
||
const headerLength=4;// MPEG frame Length
|
||
const header=parseHeader(data,offset);let frameLength=headerLength;if(header!=null&&header.frameLength){frameLength=header.frameLength;}const newOffset=offset+frameLength;return newOffset===data.length||isHeader(data,newOffset);}return false;}/**
|
||
* AAC demuxer
|
||
*/class AACDemuxer extends BaseAudioDemuxer{constructor(observer,config){super();this.observer=void 0;this.config=void 0;this.observer=observer;this.config=config;}resetInitSegment(initSegment,audioCodec,videoCodec,trackDuration){super.resetInitSegment(initSegment,audioCodec,videoCodec,trackDuration);this._audioTrack={container:'audio/adts',type:'audio',id:2,pid:-1,sequenceNumber:0,segmentCodec:'aac',samples:[],manifestCodec:audioCodec,duration:trackDuration,inputTimeScale:90000,dropped:0};}// Source for probe info - https://wiki.multimedia.cx/index.php?title=ADTS
|
||
static probe(data){if(!data){return false;}// Check for the ADTS sync word
|
||
// Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1
|
||
// Layer bits (position 14 and 15) in header should be always 0 for ADTS
|
||
// More info https://wiki.multimedia.cx/index.php?title=ADTS
|
||
const id3Data=getID3Data(data,0);let offset=(id3Data==null?void 0:id3Data.length)||0;if(probe(data,offset)){return false;}for(let length=data.length;offset<length;offset++){if(probe$1(data,offset)){logger.log('ADTS sync word found !');return true;}}return false;}canParse(data,offset){return canParse$1(data,offset);}appendFrame(track,data,offset){initTrackConfig(track,this.observer,data,offset,track.manifestCodec);const frame=appendFrame$2(track,data,offset,this.basePTS,this.frameIndex);if(frame&&frame.missing===0){return frame;}}}const emsgSchemePattern=/\/emsg[-/]ID3/i;class MP4Demuxer{constructor(observer,config){this.remainderData=null;this.timeOffset=0;this.config=void 0;this.videoTrack=void 0;this.audioTrack=void 0;this.id3Track=void 0;this.txtTrack=void 0;this.config=config;}resetTimeStamp(){}resetInitSegment(initSegment,audioCodec,videoCodec,trackDuration){const videoTrack=this.videoTrack=dummyTrack('video',1);const audioTrack=this.audioTrack=dummyTrack('audio',1);const captionTrack=this.txtTrack=dummyTrack('text',1);this.id3Track=dummyTrack('id3',1);this.timeOffset=0;if(!(initSegment!=null&&initSegment.byteLength)){return;}const initData=parseInitSegment(initSegment);if(initData.video){const{id,timescale,codec}=initData.video;videoTrack.id=id;videoTrack.timescale=captionTrack.timescale=timescale;videoTrack.codec=codec;}if(initData.audio){const{id,timescale,codec}=initData.audio;audioTrack.id=id;audioTrack.timescale=timescale;audioTrack.codec=codec;}captionTrack.id=RemuxerTrackIdConfig.text;videoTrack.sampleDuration=0;videoTrack.duration=audioTrack.duration=trackDuration;}resetContiguity(){this.remainderData=null;}static probe(data){return hasMoofData(data);}demux(data,timeOffset){this.timeOffset=timeOffset;// Load all data into the avc track. The CMAF remuxer will look for the data in the samples object; the rest of the fields do not matter
|
||
let videoSamples=data;const videoTrack=this.videoTrack;const textTrack=this.txtTrack;if(this.config.progressive){// Split the bytestream into two ranges: one encompassing all data up until the start of the last moof, and everything else.
|
||
// This is done to guarantee that we're sending valid data to MSE - when demuxing progressively, we have no guarantee
|
||
// that the fetch loader gives us flush moof+mdat pairs. If we push jagged data to MSE, it will throw an exception.
|
||
if(this.remainderData){videoSamples=appendUint8Array(this.remainderData,data);}const segmentedData=segmentValidRange(videoSamples);this.remainderData=segmentedData.remainder;videoTrack.samples=segmentedData.valid||new Uint8Array();}else {videoTrack.samples=videoSamples;}const id3Track=this.extractID3Track(videoTrack,timeOffset);textTrack.samples=parseSamples(timeOffset,videoTrack);return {videoTrack,audioTrack:this.audioTrack,id3Track,textTrack:this.txtTrack};}flush(){const timeOffset=this.timeOffset;const videoTrack=this.videoTrack;const textTrack=this.txtTrack;videoTrack.samples=this.remainderData||new Uint8Array();this.remainderData=null;const id3Track=this.extractID3Track(videoTrack,this.timeOffset);textTrack.samples=parseSamples(timeOffset,videoTrack);return {videoTrack,audioTrack:dummyTrack(),id3Track,textTrack:dummyTrack()};}extractID3Track(videoTrack,timeOffset){const id3Track=this.id3Track;if(videoTrack.samples.length){const emsgs=findBox(videoTrack.samples,['emsg']);if(emsgs){emsgs.forEach(data=>{const emsgInfo=parseEmsg(data);if(emsgSchemePattern.test(emsgInfo.schemeIdUri)){const pts=isFiniteNumber(emsgInfo.presentationTime)?emsgInfo.presentationTime/emsgInfo.timeScale:timeOffset+emsgInfo.presentationTimeDelta/emsgInfo.timeScale;let duration=emsgInfo.eventDuration===0xffffffff?Number.POSITIVE_INFINITY:emsgInfo.eventDuration/emsgInfo.timeScale;// Safari takes anything <= 0.001 seconds and maps it to Infinity
|
||
if(duration<=0.001){duration=Number.POSITIVE_INFINITY;}const payload=emsgInfo.payload;id3Track.samples.push({data:payload,len:payload.byteLength,dts:pts,pts:pts,type:MetadataSchema.emsg,duration:duration});}});}}return id3Track;}demuxSampleAes(data,keyData,timeOffset){return Promise.reject(new Error('The MP4 demuxer does not support SAMPLE-AES decryption'));}destroy(){}}const getAudioBSID=(data,offset)=>{// check the bsid to confirm ac-3 | ec-3
|
||
let bsid=0;let numBits=5;offset+=numBits;const temp=new Uint32Array(1);// unsigned 32 bit for temporary storage
|
||
const mask=new Uint32Array(1);// unsigned 32 bit mask value
|
||
const byte=new Uint8Array(1);// unsigned 8 bit for temporary storage
|
||
while(numBits>0){byte[0]=data[offset];// read remaining bits, upto 8 bits at a time
|
||
const bits=Math.min(numBits,8);const shift=8-bits;mask[0]=0xff000000>>>24+shift<<shift;temp[0]=(byte[0]&mask[0])>>shift;bsid=!bsid?temp[0]:bsid<<bits|temp[0];offset+=1;numBits-=bits;}return bsid;};class AC3Demuxer extends BaseAudioDemuxer{constructor(observer){super();this.observer=void 0;this.observer=observer;}resetInitSegment(initSegment,audioCodec,videoCodec,trackDuration){super.resetInitSegment(initSegment,audioCodec,videoCodec,trackDuration);this._audioTrack={container:'audio/ac-3',type:'audio',id:2,pid:-1,sequenceNumber:0,segmentCodec:'ac3',samples:[],manifestCodec:audioCodec,duration:trackDuration,inputTimeScale:90000,dropped:0};}canParse(data,offset){return offset+64<data.length;}appendFrame(track,data,offset){const frameLength=appendFrame(track,data,offset,this.basePTS,this.frameIndex);if(frameLength!==-1){const sample=track.samples[track.samples.length-1];return {sample,length:frameLength,missing:0};}}static probe(data){if(!data){return false;}const id3Data=getID3Data(data,0);if(!id3Data){return false;}// look for the ac-3 sync bytes
|
||
const offset=id3Data.length;if(data[offset]===0x0b&&data[offset+1]===0x77&&getTimeStamp(id3Data)!==undefined&&// check the bsid to confirm ac-3
|
||
getAudioBSID(data,offset)<16){return true;}return false;}}function appendFrame(track,data,start,pts,frameIndex){if(start+8>data.length){return -1;// not enough bytes left
|
||
}if(data[start]!==0x0b||data[start+1]!==0x77){return -1;// invalid magic
|
||
}// get sample rate
|
||
const samplingRateCode=data[start+4]>>6;if(samplingRateCode>=3){return -1;// invalid sampling rate
|
||
}const samplingRateMap=[48000,44100,32000];const sampleRate=samplingRateMap[samplingRateCode];// get frame size
|
||
const frameSizeCode=data[start+4]&0x3f;const frameSizeMap=[64,69,96,64,70,96,80,87,120,80,88,120,96,104,144,96,105,144,112,121,168,112,122,168,128,139,192,128,140,192,160,174,240,160,175,240,192,208,288,192,209,288,224,243,336,224,244,336,256,278,384,256,279,384,320,348,480,320,349,480,384,417,576,384,418,576,448,487,672,448,488,672,512,557,768,512,558,768,640,696,960,640,697,960,768,835,1152,768,836,1152,896,975,1344,896,976,1344,1024,1114,1536,1024,1115,1536,1152,1253,1728,1152,1254,1728,1280,1393,1920,1280,1394,1920];const frameLength=frameSizeMap[frameSizeCode*3+samplingRateCode]*2;if(start+frameLength>data.length){return -1;}// get channel count
|
||
const channelMode=data[start+6]>>5;let skipCount=0;if(channelMode===2){skipCount+=2;}else {if(channelMode&1&&channelMode!==1){skipCount+=2;}if(channelMode&4){skipCount+=2;}}const lfeon=(data[start+6]<<8|data[start+7])>>12-skipCount&1;const channelsMap=[2,1,2,3,3,4,4,5];const channelCount=channelsMap[channelMode]+lfeon;// build dac3 box
|
||
const bsid=data[start+5]>>3;const bsmod=data[start+5]&7;const config=new Uint8Array([samplingRateCode<<6|bsid<<1|bsmod>>2,(bsmod&3)<<6|channelMode<<3|lfeon<<2|frameSizeCode>>4,frameSizeCode<<4&0xe0]);const frameDuration=1536/sampleRate*90000;const stamp=pts+frameIndex*frameDuration;const unit=data.subarray(start,start+frameLength);track.config=config;track.channelCount=channelCount;track.samplerate=sampleRate;track.samples.push({unit,pts:stamp});return frameLength;}class BaseVideoParser{constructor(){this.VideoSample=null;}createVideoSample(key,pts,dts,debug){return {key,frame:false,pts,dts,units:[],debug,length:0};}getLastNalUnit(samples){var _VideoSample;let VideoSample=this.VideoSample;let lastUnit;// try to fallback to previous sample if current one is empty
|
||
if(!VideoSample||VideoSample.units.length===0){VideoSample=samples[samples.length-1];}if((_VideoSample=VideoSample)!=null&&_VideoSample.units){const units=VideoSample.units;lastUnit=units[units.length-1];}return lastUnit;}pushAccessUnit(VideoSample,videoTrack){if(VideoSample.units.length&&VideoSample.frame){// if sample does not have PTS/DTS, patch with last sample PTS/DTS
|
||
if(VideoSample.pts===undefined){const samples=videoTrack.samples;const nbSamples=samples.length;if(nbSamples){const lastSample=samples[nbSamples-1];VideoSample.pts=lastSample.pts;VideoSample.dts=lastSample.dts;}else {// dropping samples, no timestamp found
|
||
videoTrack.dropped++;return;}}videoTrack.samples.push(VideoSample);}if(VideoSample.debug.length){logger.log(VideoSample.pts+'/'+VideoSample.dts+':'+VideoSample.debug);}}}/**
|
||
* Parser for exponential Golomb codes, a variable-bitwidth number encoding scheme used by h264.
|
||
*/class ExpGolomb{constructor(data){this.data=void 0;this.bytesAvailable=void 0;this.word=void 0;this.bitsAvailable=void 0;this.data=data;// the number of bytes left to examine in this.data
|
||
this.bytesAvailable=data.byteLength;// the current word being examined
|
||
this.word=0;// :uint
|
||
// the number of bits left to examine in the current word
|
||
this.bitsAvailable=0;// :uint
|
||
}// ():void
|
||
loadWord(){const data=this.data;const bytesAvailable=this.bytesAvailable;const position=data.byteLength-bytesAvailable;const workingBytes=new Uint8Array(4);const availableBytes=Math.min(4,bytesAvailable);if(availableBytes===0){throw new Error('no bytes available');}workingBytes.set(data.subarray(position,position+availableBytes));this.word=new DataView(workingBytes.buffer).getUint32(0);// track the amount of this.data that has been processed
|
||
this.bitsAvailable=availableBytes*8;this.bytesAvailable-=availableBytes;}// (count:int):void
|
||
skipBits(count){let skipBytes;// :int
|
||
count=Math.min(count,this.bytesAvailable*8+this.bitsAvailable);if(this.bitsAvailable>count){this.word<<=count;this.bitsAvailable-=count;}else {count-=this.bitsAvailable;skipBytes=count>>3;count-=skipBytes<<3;this.bytesAvailable-=skipBytes;this.loadWord();this.word<<=count;this.bitsAvailable-=count;}}// (size:int):uint
|
||
readBits(size){let bits=Math.min(this.bitsAvailable,size);// :uint
|
||
const valu=this.word>>>32-bits;// :uint
|
||
if(size>32){logger.error('Cannot read more than 32 bits at a time');}this.bitsAvailable-=bits;if(this.bitsAvailable>0){this.word<<=bits;}else if(this.bytesAvailable>0){this.loadWord();}else {throw new Error('no bits available');}bits=size-bits;if(bits>0&&this.bitsAvailable){return valu<<bits|this.readBits(bits);}else {return valu;}}// ():uint
|
||
skipLZ(){let leadingZeroCount;// :uint
|
||
for(leadingZeroCount=0;leadingZeroCount<this.bitsAvailable;++leadingZeroCount){if((this.word&0x80000000>>>leadingZeroCount)!==0){// the first bit of working word is 1
|
||
this.word<<=leadingZeroCount;this.bitsAvailable-=leadingZeroCount;return leadingZeroCount;}}// we exhausted word and still have not found a 1
|
||
this.loadWord();return leadingZeroCount+this.skipLZ();}// ():void
|
||
skipUEG(){this.skipBits(1+this.skipLZ());}// ():void
|
||
skipEG(){this.skipBits(1+this.skipLZ());}// ():uint
|
||
readUEG(){const clz=this.skipLZ();// :uint
|
||
return this.readBits(clz+1)-1;}// ():int
|
||
readEG(){const valu=this.readUEG();// :int
|
||
if(0x01&valu){// the number is odd if the low order bit is set
|
||
return 1+valu>>>1;// add 1 to make it even, and divide by 2
|
||
}else {return -1*(valu>>>1);// divide by two then make it negative
|
||
}}// Some convenience functions
|
||
// :Boolean
|
||
readBoolean(){return this.readBits(1)===1;}// ():int
|
||
readUByte(){return this.readBits(8);}// ():int
|
||
readUShort(){return this.readBits(16);}// ():int
|
||
readUInt(){return this.readBits(32);}/**
|
||
* Advance the ExpGolomb decoder past a scaling list. The scaling
|
||
* list is optionally transmitted as part of a sequence parameter
|
||
* set and is not relevant to transmuxing.
|
||
* @param count the number of entries in this scaling list
|
||
* @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
|
||
*/skipScalingList(count){let lastScale=8;let nextScale=8;let deltaScale;for(let j=0;j<count;j++){if(nextScale!==0){deltaScale=this.readEG();nextScale=(lastScale+deltaScale+256)%256;}lastScale=nextScale===0?lastScale:nextScale;}}/**
|
||
* Read a sequence parameter set and return some interesting video
|
||
* properties. A sequence parameter set is the H264 metadata that
|
||
* describes the properties of upcoming video frames.
|
||
* @returns an object with configuration parsed from the
|
||
* sequence parameter set, including the dimensions of the
|
||
* associated video frames.
|
||
*/readSPS(){let frameCropLeftOffset=0;let frameCropRightOffset=0;let frameCropTopOffset=0;let frameCropBottomOffset=0;let numRefFramesInPicOrderCntCycle;let scalingListCount;let i;const readUByte=this.readUByte.bind(this);const readBits=this.readBits.bind(this);const readUEG=this.readUEG.bind(this);const readBoolean=this.readBoolean.bind(this);const skipBits=this.skipBits.bind(this);const skipEG=this.skipEG.bind(this);const skipUEG=this.skipUEG.bind(this);const skipScalingList=this.skipScalingList.bind(this);readUByte();const profileIdc=readUByte();// profile_idc
|
||
readBits(5);// profileCompat constraint_set[0-4]_flag, u(5)
|
||
skipBits(3);// reserved_zero_3bits u(3),
|
||
readUByte();// level_idc u(8)
|
||
skipUEG();// seq_parameter_set_id
|
||
// some profiles have more optional data we don't need
|
||
if(profileIdc===100||profileIdc===110||profileIdc===122||profileIdc===244||profileIdc===44||profileIdc===83||profileIdc===86||profileIdc===118||profileIdc===128){const chromaFormatIdc=readUEG();if(chromaFormatIdc===3){skipBits(1);}// separate_colour_plane_flag
|
||
skipUEG();// bit_depth_luma_minus8
|
||
skipUEG();// bit_depth_chroma_minus8
|
||
skipBits(1);// qpprime_y_zero_transform_bypass_flag
|
||
if(readBoolean()){// seq_scaling_matrix_present_flag
|
||
scalingListCount=chromaFormatIdc!==3?8:12;for(i=0;i<scalingListCount;i++){if(readBoolean()){// seq_scaling_list_present_flag[ i ]
|
||
if(i<6){skipScalingList(16);}else {skipScalingList(64);}}}}}skipUEG();// log2_max_frame_num_minus4
|
||
const picOrderCntType=readUEG();if(picOrderCntType===0){readUEG();// log2_max_pic_order_cnt_lsb_minus4
|
||
}else if(picOrderCntType===1){skipBits(1);// delta_pic_order_always_zero_flag
|
||
skipEG();// offset_for_non_ref_pic
|
||
skipEG();// offset_for_top_to_bottom_field
|
||
numRefFramesInPicOrderCntCycle=readUEG();for(i=0;i<numRefFramesInPicOrderCntCycle;i++){skipEG();}// offset_for_ref_frame[ i ]
|
||
}skipUEG();// max_num_ref_frames
|
||
skipBits(1);// gaps_in_frame_num_value_allowed_flag
|
||
const picWidthInMbsMinus1=readUEG();const picHeightInMapUnitsMinus1=readUEG();const frameMbsOnlyFlag=readBits(1);if(frameMbsOnlyFlag===0){skipBits(1);}// mb_adaptive_frame_field_flag
|
||
skipBits(1);// direct_8x8_inference_flag
|
||
if(readBoolean()){// frame_cropping_flag
|
||
frameCropLeftOffset=readUEG();frameCropRightOffset=readUEG();frameCropTopOffset=readUEG();frameCropBottomOffset=readUEG();}let pixelRatio=[1,1];if(readBoolean()){// vui_parameters_present_flag
|
||
if(readBoolean()){// aspect_ratio_info_present_flag
|
||
const aspectRatioIdc=readUByte();switch(aspectRatioIdc){case 1:pixelRatio=[1,1];break;case 2:pixelRatio=[12,11];break;case 3:pixelRatio=[10,11];break;case 4:pixelRatio=[16,11];break;case 5:pixelRatio=[40,33];break;case 6:pixelRatio=[24,11];break;case 7:pixelRatio=[20,11];break;case 8:pixelRatio=[32,11];break;case 9:pixelRatio=[80,33];break;case 10:pixelRatio=[18,11];break;case 11:pixelRatio=[15,11];break;case 12:pixelRatio=[64,33];break;case 13:pixelRatio=[160,99];break;case 14:pixelRatio=[4,3];break;case 15:pixelRatio=[3,2];break;case 16:pixelRatio=[2,1];break;case 255:{pixelRatio=[readUByte()<<8|readUByte(),readUByte()<<8|readUByte()];break;}}}}return {width:Math.ceil((picWidthInMbsMinus1+1)*16-frameCropLeftOffset*2-frameCropRightOffset*2),height:(2-frameMbsOnlyFlag)*(picHeightInMapUnitsMinus1+1)*16-(frameMbsOnlyFlag?2:4)*(frameCropTopOffset+frameCropBottomOffset),pixelRatio:pixelRatio};}readSliceType(){// skip NALu type
|
||
this.readUByte();// discard first_mb_in_slice
|
||
this.readUEG();// return slice_type
|
||
return this.readUEG();}}class AvcVideoParser extends BaseVideoParser{parseAVCPES(track,textTrack,pes,last,duration){const units=this.parseAVCNALu(track,pes.data);let VideoSample=this.VideoSample;let push;let spsfound=false;// free pes.data to save up some memory
|
||
pes.data=null;// if new NAL units found and last sample still there, let's push ...
|
||
// this helps parsing streams with missing AUD (only do this if AUD never found)
|
||
if(VideoSample&&units.length&&!track.audFound){this.pushAccessUnit(VideoSample,track);VideoSample=this.VideoSample=this.createVideoSample(false,pes.pts,pes.dts,'');}units.forEach(unit=>{var _VideoSample2;switch(unit.type){// NDR
|
||
case 1:{let iskey=false;push=true;const data=unit.data;// only check slice type to detect KF in case SPS found in same packet (any keyframe is preceded by SPS ...)
|
||
if(spsfound&&data.length>4){// retrieve slice type by parsing beginning of NAL unit (follow H264 spec, slice_header definition) to detect keyframe embedded in NDR
|
||
const sliceType=new ExpGolomb(data).readSliceType();// 2 : I slice, 4 : SI slice, 7 : I slice, 9: SI slice
|
||
// SI slice : A slice that is coded using intra prediction only and using quantisation of the prediction samples.
|
||
// An SI slice can be coded such that its decoded samples can be constructed identically to an SP slice.
|
||
// I slice: A slice that is not an SI slice that is decoded using intra prediction only.
|
||
// if (sliceType === 2 || sliceType === 7) {
|
||
if(sliceType===2||sliceType===4||sliceType===7||sliceType===9){iskey=true;}}if(iskey){var _VideoSample;// if we have non-keyframe data already, that cannot belong to the same frame as a keyframe, so force a push
|
||
if((_VideoSample=VideoSample)!=null&&_VideoSample.frame&&!VideoSample.key){this.pushAccessUnit(VideoSample,track);VideoSample=this.VideoSample=null;}}if(!VideoSample){VideoSample=this.VideoSample=this.createVideoSample(true,pes.pts,pes.dts,'');}VideoSample.frame=true;VideoSample.key=iskey;break;// IDR
|
||
}case 5:push=true;// handle PES not starting with AUD
|
||
// if we have frame data already, that cannot belong to the same frame, so force a push
|
||
if((_VideoSample2=VideoSample)!=null&&_VideoSample2.frame&&!VideoSample.key){this.pushAccessUnit(VideoSample,track);VideoSample=this.VideoSample=null;}if(!VideoSample){VideoSample=this.VideoSample=this.createVideoSample(true,pes.pts,pes.dts,'');}VideoSample.key=true;VideoSample.frame=true;break;// SEI
|
||
case 6:{push=true;parseSEIMessageFromNALu(unit.data,1,pes.pts,textTrack.samples);break;// SPS
|
||
}case 7:{var _track$pixelRatio,_track$pixelRatio2;push=true;spsfound=true;const sps=unit.data;const expGolombDecoder=new ExpGolomb(sps);const config=expGolombDecoder.readSPS();if(!track.sps||track.width!==config.width||track.height!==config.height||((_track$pixelRatio=track.pixelRatio)==null?void 0:_track$pixelRatio[0])!==config.pixelRatio[0]||((_track$pixelRatio2=track.pixelRatio)==null?void 0:_track$pixelRatio2[1])!==config.pixelRatio[1]){track.width=config.width;track.height=config.height;track.pixelRatio=config.pixelRatio;track.sps=[sps];track.duration=duration;const codecarray=sps.subarray(1,4);let codecstring='avc1.';for(let i=0;i<3;i++){let h=codecarray[i].toString(16);if(h.length<2){h='0'+h;}codecstring+=h;}track.codec=codecstring;}break;}// PPS
|
||
case 8:push=true;track.pps=[unit.data];break;// AUD
|
||
case 9:push=true;track.audFound=true;if(VideoSample){this.pushAccessUnit(VideoSample,track);}VideoSample=this.VideoSample=this.createVideoSample(false,pes.pts,pes.dts,'');break;// Filler Data
|
||
case 12:push=true;break;default:push=false;if(VideoSample){VideoSample.debug+='unknown NAL '+unit.type+' ';}break;}if(VideoSample&&push){const units=VideoSample.units;units.push(unit);}});// if last PES packet, push samples
|
||
if(last&&VideoSample){this.pushAccessUnit(VideoSample,track);this.VideoSample=null;}}parseAVCNALu(track,array){const len=array.byteLength;let state=track.naluState||0;const lastState=state;const units=[];let i=0;let value;let overflow;let unitType;let lastUnitStart=-1;let lastUnitType=0;// logger.log('PES:' + Hex.hexDump(array));
|
||
if(state===-1){// special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
|
||
lastUnitStart=0;// NALu type is value read from offset 0
|
||
lastUnitType=array[0]&0x1f;state=0;i=1;}while(i<len){value=array[i++];// optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
|
||
if(!state){state=value?0:1;continue;}if(state===1){state=value?0:2;continue;}// here we have state either equal to 2 or 3
|
||
if(!value){state=3;}else if(value===1){overflow=i-state-1;if(lastUnitStart>=0){const unit={data:array.subarray(lastUnitStart,overflow),type:lastUnitType};// logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
|
||
units.push(unit);}else {// lastUnitStart is undefined => this is the first start code found in this PES packet
|
||
// first check if start code delimiter is overlapping between 2 PES packets,
|
||
// ie it started in last packet (lastState not zero)
|
||
// and ended at the beginning of this PES packet (i <= 4 - lastState)
|
||
const lastUnit=this.getLastNalUnit(track.samples);if(lastUnit){if(lastState&&i<=4-lastState){// start delimiter overlapping between PES packets
|
||
// strip start delimiter bytes from the end of last NAL unit
|
||
// check if lastUnit had a state different from zero
|
||
if(lastUnit.state){// strip last bytes
|
||
lastUnit.data=lastUnit.data.subarray(0,lastUnit.data.byteLength-lastState);}}// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
|
||
if(overflow>0){// logger.log('first NALU found with overflow:' + overflow);
|
||
lastUnit.data=appendUint8Array(lastUnit.data,array.subarray(0,overflow));lastUnit.state=0;}}}// check if we can read unit type
|
||
if(i<len){unitType=array[i]&0x1f;// logger.log('find NALU @ offset:' + i + ',type:' + unitType);
|
||
lastUnitStart=i;lastUnitType=unitType;state=0;}else {// not enough byte to read unit type. let's read it on next PES parsing
|
||
state=-1;}}else {state=0;}}if(lastUnitStart>=0&&state>=0){const unit={data:array.subarray(lastUnitStart,len),type:lastUnitType,state:state};units.push(unit);// logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
|
||
}// no NALu found
|
||
if(units.length===0){// append pes.data to previous NAL unit
|
||
const lastUnit=this.getLastNalUnit(track.samples);if(lastUnit){lastUnit.data=appendUint8Array(lastUnit.data,array);}}track.naluState=state;return units;}}/**
|
||
* SAMPLE-AES decrypter
|
||
*/class SampleAesDecrypter{constructor(observer,config,keyData){this.keyData=void 0;this.decrypter=void 0;this.keyData=keyData;this.decrypter=new Decrypter(config,{removePKCS7Padding:false});}decryptBuffer(encryptedData){return this.decrypter.decrypt(encryptedData,this.keyData.key.buffer,this.keyData.iv.buffer);}// AAC - encrypt all full 16 bytes blocks starting from offset 16
|
||
decryptAacSample(samples,sampleIndex,callback){const curUnit=samples[sampleIndex].unit;if(curUnit.length<=16){// No encrypted portion in this sample (first 16 bytes is not
|
||
// encrypted, see https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/HLS_Sample_Encryption/Encryption/Encryption.html),
|
||
return;}const encryptedData=curUnit.subarray(16,curUnit.length-curUnit.length%16);const encryptedBuffer=encryptedData.buffer.slice(encryptedData.byteOffset,encryptedData.byteOffset+encryptedData.length);this.decryptBuffer(encryptedBuffer).then(decryptedBuffer=>{const decryptedData=new Uint8Array(decryptedBuffer);curUnit.set(decryptedData,16);if(!this.decrypter.isSync()){this.decryptAacSamples(samples,sampleIndex+1,callback);}});}decryptAacSamples(samples,sampleIndex,callback){for(;;sampleIndex++){if(sampleIndex>=samples.length){callback();return;}if(samples[sampleIndex].unit.length<32){continue;}this.decryptAacSample(samples,sampleIndex,callback);if(!this.decrypter.isSync()){return;}}}// AVC - encrypt one 16 bytes block out of ten, starting from offset 32
|
||
getAvcEncryptedData(decodedData){const encryptedDataLen=Math.floor((decodedData.length-48)/160)*16+16;const encryptedData=new Int8Array(encryptedDataLen);let outputPos=0;for(let inputPos=32;inputPos<decodedData.length-16;inputPos+=160,outputPos+=16){encryptedData.set(decodedData.subarray(inputPos,inputPos+16),outputPos);}return encryptedData;}getAvcDecryptedUnit(decodedData,decryptedData){const uint8DecryptedData=new Uint8Array(decryptedData);let inputPos=0;for(let outputPos=32;outputPos<decodedData.length-16;outputPos+=160,inputPos+=16){decodedData.set(uint8DecryptedData.subarray(inputPos,inputPos+16),outputPos);}return decodedData;}decryptAvcSample(samples,sampleIndex,unitIndex,callback,curUnit){const decodedData=discardEPB(curUnit.data);const encryptedData=this.getAvcEncryptedData(decodedData);this.decryptBuffer(encryptedData.buffer).then(decryptedBuffer=>{curUnit.data=this.getAvcDecryptedUnit(decodedData,decryptedBuffer);if(!this.decrypter.isSync()){this.decryptAvcSamples(samples,sampleIndex,unitIndex+1,callback);}});}decryptAvcSamples(samples,sampleIndex,unitIndex,callback){if(samples instanceof Uint8Array){throw new Error('Cannot decrypt samples of type Uint8Array');}for(;;sampleIndex++,unitIndex=0){if(sampleIndex>=samples.length){callback();return;}const curUnits=samples[sampleIndex].units;for(;;unitIndex++){if(unitIndex>=curUnits.length){break;}const curUnit=curUnits[unitIndex];if(curUnit.data.length<=48||curUnit.type!==1&&curUnit.type!==5){continue;}this.decryptAvcSample(samples,sampleIndex,unitIndex,callback,curUnit);if(!this.decrypter.isSync()){return;}}}}}const PACKET_LENGTH=188;class TSDemuxer{constructor(observer,config,typeSupported){this.observer=void 0;this.config=void 0;this.typeSupported=void 0;this.sampleAes=null;this.pmtParsed=false;this.audioCodec=void 0;this.videoCodec=void 0;this._duration=0;this._pmtId=-1;this._videoTrack=void 0;this._audioTrack=void 0;this._id3Track=void 0;this._txtTrack=void 0;this.aacOverFlow=null;this.remainderData=null;this.videoParser=void 0;this.observer=observer;this.config=config;this.typeSupported=typeSupported;this.videoParser=new AvcVideoParser();}static probe(data){const syncOffset=TSDemuxer.syncOffset(data);if(syncOffset>0){logger.warn(`MPEG2-TS detected but first sync word found @ offset ${syncOffset}`);}return syncOffset!==-1;}static syncOffset(data){const length=data.length;let scanwindow=Math.min(PACKET_LENGTH*5,length-PACKET_LENGTH)+1;let i=0;while(i<scanwindow){// a TS init segment should contain at least 2 TS packets: PAT and PMT, each starting with 0x47
|
||
let foundPat=false;let packetStart=-1;let tsPackets=0;for(let j=i;j<length;j+=PACKET_LENGTH){if(data[j]===0x47&&(length-j===PACKET_LENGTH||data[j+PACKET_LENGTH]===0x47)){tsPackets++;if(packetStart===-1){packetStart=j;// First sync word found at offset, increase scan length (#5251)
|
||
if(packetStart!==0){scanwindow=Math.min(packetStart+PACKET_LENGTH*99,data.length-PACKET_LENGTH)+1;}}if(!foundPat){foundPat=parsePID(data,j)===0;}// Sync word found at 0 with 3 packets, or found at offset least 2 packets up to scanwindow (#5501)
|
||
if(foundPat&&tsPackets>1&&(packetStart===0&&tsPackets>2||j+PACKET_LENGTH>scanwindow)){return packetStart;}}else if(tsPackets){// Exit if sync word found, but does not contain contiguous packets
|
||
return -1;}else {break;}}i++;}return -1;}/**
|
||
* Creates a track model internal to demuxer used to drive remuxing input
|
||
*/static createTrack(type,duration){return {container:type==='video'||type==='audio'?'video/mp2t':undefined,type,id:RemuxerTrackIdConfig[type],pid:-1,inputTimeScale:90000,sequenceNumber:0,samples:[],dropped:0,duration:type==='audio'?duration:undefined};}/**
|
||
* Initializes a new init segment on the demuxer/remuxer interface. Needed for discontinuities/track-switches (or at stream start)
|
||
* Resets all internal track instances of the demuxer.
|
||
*/resetInitSegment(initSegment,audioCodec,videoCodec,trackDuration){this.pmtParsed=false;this._pmtId=-1;this._videoTrack=TSDemuxer.createTrack('video');this._audioTrack=TSDemuxer.createTrack('audio',trackDuration);this._id3Track=TSDemuxer.createTrack('id3');this._txtTrack=TSDemuxer.createTrack('text');this._audioTrack.segmentCodec='aac';// flush any partial content
|
||
this.aacOverFlow=null;this.remainderData=null;this.audioCodec=audioCodec;this.videoCodec=videoCodec;this._duration=trackDuration;}resetTimeStamp(){}resetContiguity(){const{_audioTrack,_videoTrack,_id3Track}=this;if(_audioTrack){_audioTrack.pesData=null;}if(_videoTrack){_videoTrack.pesData=null;}if(_id3Track){_id3Track.pesData=null;}this.aacOverFlow=null;this.remainderData=null;}demux(data,timeOffset,isSampleAes=false,flush=false){if(!isSampleAes){this.sampleAes=null;}let pes;const videoTrack=this._videoTrack;const audioTrack=this._audioTrack;const id3Track=this._id3Track;const textTrack=this._txtTrack;let videoPid=videoTrack.pid;let videoData=videoTrack.pesData;let audioPid=audioTrack.pid;let id3Pid=id3Track.pid;let audioData=audioTrack.pesData;let id3Data=id3Track.pesData;let unknownPID=null;let pmtParsed=this.pmtParsed;let pmtId=this._pmtId;let len=data.length;if(this.remainderData){data=appendUint8Array(this.remainderData,data);len=data.length;this.remainderData=null;}if(len<PACKET_LENGTH&&!flush){this.remainderData=data;return {audioTrack,videoTrack,id3Track,textTrack};}const syncOffset=Math.max(0,TSDemuxer.syncOffset(data));len-=(len-syncOffset)%PACKET_LENGTH;if(len<data.byteLength&&!flush){this.remainderData=new Uint8Array(data.buffer,len,data.buffer.byteLength-len);}// loop through TS packets
|
||
let tsPacketErrors=0;for(let start=syncOffset;start<len;start+=PACKET_LENGTH){if(data[start]===0x47){const stt=!!(data[start+1]&0x40);const pid=parsePID(data,start);const atf=(data[start+3]&0x30)>>4;// if an adaption field is present, its length is specified by the fifth byte of the TS packet header.
|
||
let offset;if(atf>1){offset=start+5+data[start+4];// continue if there is only adaptation field
|
||
if(offset===start+PACKET_LENGTH){continue;}}else {offset=start+4;}switch(pid){case videoPid:if(stt){if(videoData&&(pes=parsePES(videoData))){this.videoParser.parseAVCPES(videoTrack,textTrack,pes,false,this._duration);}videoData={data:[],size:0};}if(videoData){videoData.data.push(data.subarray(offset,start+PACKET_LENGTH));videoData.size+=start+PACKET_LENGTH-offset;}break;case audioPid:if(stt){if(audioData&&(pes=parsePES(audioData))){switch(audioTrack.segmentCodec){case'aac':this.parseAACPES(audioTrack,pes);break;case'mp3':this.parseMPEGPES(audioTrack,pes);break;case'ac3':{this.parseAC3PES(audioTrack,pes);}break;}}audioData={data:[],size:0};}if(audioData){audioData.data.push(data.subarray(offset,start+PACKET_LENGTH));audioData.size+=start+PACKET_LENGTH-offset;}break;case id3Pid:if(stt){if(id3Data&&(pes=parsePES(id3Data))){this.parseID3PES(id3Track,pes);}id3Data={data:[],size:0};}if(id3Data){id3Data.data.push(data.subarray(offset,start+PACKET_LENGTH));id3Data.size+=start+PACKET_LENGTH-offset;}break;case 0:if(stt){offset+=data[offset]+1;}pmtId=this._pmtId=parsePAT(data,offset);// logger.log('PMT PID:' + this._pmtId);
|
||
break;case pmtId:{if(stt){offset+=data[offset]+1;}const parsedPIDs=parsePMT(data,offset,this.typeSupported,isSampleAes,this.observer);// only update track id if track PID found while parsing PMT
|
||
// this is to avoid resetting the PID to -1 in case
|
||
// track PID transiently disappears from the stream
|
||
// this could happen in case of transient missing audio samples for example
|
||
// NOTE this is only the PID of the track as found in TS,
|
||
// but we are not using this for MP4 track IDs.
|
||
videoPid=parsedPIDs.videoPid;if(videoPid>0){videoTrack.pid=videoPid;videoTrack.segmentCodec=parsedPIDs.segmentVideoCodec;}audioPid=parsedPIDs.audioPid;if(audioPid>0){audioTrack.pid=audioPid;audioTrack.segmentCodec=parsedPIDs.segmentAudioCodec;}id3Pid=parsedPIDs.id3Pid;if(id3Pid>0){id3Track.pid=id3Pid;}if(unknownPID!==null&&!pmtParsed){logger.warn(`MPEG-TS PMT found at ${start} after unknown PID '${unknownPID}'. Backtracking to sync byte @${syncOffset} to parse all TS packets.`);unknownPID=null;// we set it to -188, the += 188 in the for loop will reset start to 0
|
||
start=syncOffset-188;}pmtParsed=this.pmtParsed=true;break;}case 0x11:case 0x1fff:break;default:unknownPID=pid;break;}}else {tsPacketErrors++;}}if(tsPacketErrors>0){emitParsingError(this.observer,new Error(`Found ${tsPacketErrors} TS packet/s that do not start with 0x47`));}videoTrack.pesData=videoData;audioTrack.pesData=audioData;id3Track.pesData=id3Data;const demuxResult={audioTrack,videoTrack,id3Track,textTrack};if(flush){this.extractRemainingSamples(demuxResult);}return demuxResult;}flush(){const{remainderData}=this;this.remainderData=null;let result;if(remainderData){result=this.demux(remainderData,-1,false,true);}else {result={videoTrack:this._videoTrack,audioTrack:this._audioTrack,id3Track:this._id3Track,textTrack:this._txtTrack};}this.extractRemainingSamples(result);if(this.sampleAes){return this.decrypt(result,this.sampleAes);}return result;}extractRemainingSamples(demuxResult){const{audioTrack,videoTrack,id3Track,textTrack}=demuxResult;const videoData=videoTrack.pesData;const audioData=audioTrack.pesData;const id3Data=id3Track.pesData;// try to parse last PES packets
|
||
let pes;if(videoData&&(pes=parsePES(videoData))){this.videoParser.parseAVCPES(videoTrack,textTrack,pes,true,this._duration);videoTrack.pesData=null;}else {// either avcData null or PES truncated, keep it for next frag parsing
|
||
videoTrack.pesData=videoData;}if(audioData&&(pes=parsePES(audioData))){switch(audioTrack.segmentCodec){case'aac':this.parseAACPES(audioTrack,pes);break;case'mp3':this.parseMPEGPES(audioTrack,pes);break;case'ac3':{this.parseAC3PES(audioTrack,pes);}break;}audioTrack.pesData=null;}else {if(audioData!=null&&audioData.size){logger.log('last AAC PES packet truncated,might overlap between fragments');}// either audioData null or PES truncated, keep it for next frag parsing
|
||
audioTrack.pesData=audioData;}if(id3Data&&(pes=parsePES(id3Data))){this.parseID3PES(id3Track,pes);id3Track.pesData=null;}else {// either id3Data null or PES truncated, keep it for next frag parsing
|
||
id3Track.pesData=id3Data;}}demuxSampleAes(data,keyData,timeOffset){const demuxResult=this.demux(data,timeOffset,true,!this.config.progressive);const sampleAes=this.sampleAes=new SampleAesDecrypter(this.observer,this.config,keyData);return this.decrypt(demuxResult,sampleAes);}decrypt(demuxResult,sampleAes){return new Promise(resolve=>{const{audioTrack,videoTrack}=demuxResult;if(audioTrack.samples&&audioTrack.segmentCodec==='aac'){sampleAes.decryptAacSamples(audioTrack.samples,0,()=>{if(videoTrack.samples){sampleAes.decryptAvcSamples(videoTrack.samples,0,0,()=>{resolve(demuxResult);});}else {resolve(demuxResult);}});}else if(videoTrack.samples){sampleAes.decryptAvcSamples(videoTrack.samples,0,0,()=>{resolve(demuxResult);});}});}destroy(){this._duration=0;}parseAACPES(track,pes){let startOffset=0;const aacOverFlow=this.aacOverFlow;let data=pes.data;if(aacOverFlow){this.aacOverFlow=null;const frameMissingBytes=aacOverFlow.missing;const sampleLength=aacOverFlow.sample.unit.byteLength;// logger.log(`AAC: append overflowing ${sampleLength} bytes to beginning of new PES`);
|
||
if(frameMissingBytes===-1){data=appendUint8Array(aacOverFlow.sample.unit,data);}else {const frameOverflowBytes=sampleLength-frameMissingBytes;aacOverFlow.sample.unit.set(data.subarray(0,frameMissingBytes),frameOverflowBytes);track.samples.push(aacOverFlow.sample);startOffset=aacOverFlow.missing;}}// look for ADTS header (0xFFFx)
|
||
let offset;let len;for(offset=startOffset,len=data.length;offset<len-1;offset++){if(isHeader$1(data,offset)){break;}}// if ADTS header does not start straight from the beginning of the PES payload, raise an error
|
||
if(offset!==startOffset){let reason;const recoverable=offset<len-1;if(recoverable){reason=`AAC PES did not start with ADTS header,offset:${offset}`;}else {reason='No ADTS header found in AAC PES';}emitParsingError(this.observer,new Error(reason),recoverable);if(!recoverable){return;}}initTrackConfig(track,this.observer,data,offset,this.audioCodec);let pts;if(pes.pts!==undefined){pts=pes.pts;}else if(aacOverFlow){// if last AAC frame is overflowing, we should ensure timestamps are contiguous:
|
||
// first sample PTS should be equal to last sample PTS + frameDuration
|
||
const frameDuration=getFrameDuration(track.samplerate);pts=aacOverFlow.sample.pts+frameDuration;}else {logger.warn('[tsdemuxer]: AAC PES unknown PTS');return;}// scan for aac samples
|
||
let frameIndex=0;let frame;while(offset<len){frame=appendFrame$2(track,data,offset,pts,frameIndex);offset+=frame.length;if(!frame.missing){frameIndex++;for(;offset<len-1;offset++){if(isHeader$1(data,offset)){break;}}}else {this.aacOverFlow=frame;break;}}}parseMPEGPES(track,pes){const data=pes.data;const length=data.length;let frameIndex=0;let offset=0;const pts=pes.pts;if(pts===undefined){logger.warn('[tsdemuxer]: MPEG PES unknown PTS');return;}while(offset<length){if(isHeader(data,offset)){const frame=appendFrame$1(track,data,offset,pts,frameIndex);if(frame){offset+=frame.length;frameIndex++;}else {// logger.log('Unable to parse Mpeg audio frame');
|
||
break;}}else {// nothing found, keep looking
|
||
offset++;}}}parseAC3PES(track,pes){{const data=pes.data;const pts=pes.pts;if(pts===undefined){logger.warn('[tsdemuxer]: AC3 PES unknown PTS');return;}const length=data.length;let frameIndex=0;let offset=0;let parsed;while(offset<length&&(parsed=appendFrame(track,data,offset,pts,frameIndex++))>0){offset+=parsed;}}}parseID3PES(id3Track,pes){if(pes.pts===undefined){logger.warn('[tsdemuxer]: ID3 PES unknown PTS');return;}const id3Sample=_extends({},pes,{type:this._videoTrack?MetadataSchema.emsg:MetadataSchema.audioId3,duration:Number.POSITIVE_INFINITY});id3Track.samples.push(id3Sample);}}function parsePID(data,offset){// pid is a 13-bit field starting at the last bit of TS[1]
|
||
return ((data[offset+1]&0x1f)<<8)+data[offset+2];}function parsePAT(data,offset){// skip the PSI header and parse the first PMT entry
|
||
return (data[offset+10]&0x1f)<<8|data[offset+11];}function parsePMT(data,offset,typeSupported,isSampleAes,observer){const result={audioPid:-1,videoPid:-1,id3Pid:-1,segmentVideoCodec:'avc',segmentAudioCodec:'aac'};const sectionLength=(data[offset+1]&0x0f)<<8|data[offset+2];const tableEnd=offset+3+sectionLength-4;// to determine where the table is, we have to figure out how
|
||
// long the program info descriptors are
|
||
const programInfoLength=(data[offset+10]&0x0f)<<8|data[offset+11];// advance the offset to the first entry in the mapping table
|
||
offset+=12+programInfoLength;while(offset<tableEnd){const pid=parsePID(data,offset);const esInfoLength=(data[offset+3]&0x0f)<<8|data[offset+4];switch(data[offset]){case 0xcf:// SAMPLE-AES AAC
|
||
if(!isSampleAes){logEncryptedSamplesFoundInUnencryptedStream('ADTS AAC');break;}/* falls through */case 0x0f:// ISO/IEC 13818-7 ADTS AAC (MPEG-2 lower bit-rate audio)
|
||
// logger.log('AAC PID:' + pid);
|
||
if(result.audioPid===-1){result.audioPid=pid;}break;// Packetized metadata (ID3)
|
||
case 0x15:// logger.log('ID3 PID:' + pid);
|
||
if(result.id3Pid===-1){result.id3Pid=pid;}break;case 0xdb:// SAMPLE-AES AVC
|
||
if(!isSampleAes){logEncryptedSamplesFoundInUnencryptedStream('H.264');break;}/* falls through */case 0x1b:// ITU-T Rec. H.264 and ISO/IEC 14496-10 (lower bit-rate video)
|
||
// logger.log('AVC PID:' + pid);
|
||
if(result.videoPid===-1){result.videoPid=pid;result.segmentVideoCodec='avc';}break;// ISO/IEC 11172-3 (MPEG-1 audio)
|
||
// or ISO/IEC 13818-3 (MPEG-2 halved sample rate audio)
|
||
case 0x03:case 0x04:// logger.log('MPEG PID:' + pid);
|
||
if(!typeSupported.mpeg&&!typeSupported.mp3){logger.log('MPEG audio found, not supported in this browser');}else if(result.audioPid===-1){result.audioPid=pid;result.segmentAudioCodec='mp3';}break;case 0xc1:// SAMPLE-AES AC3
|
||
if(!isSampleAes){logEncryptedSamplesFoundInUnencryptedStream('AC-3');break;}/* falls through */case 0x81:{if(!typeSupported.ac3){logger.log('AC-3 audio found, not supported in this browser');}else if(result.audioPid===-1){result.audioPid=pid;result.segmentAudioCodec='ac3';}}break;case 0x06:// stream_type 6 can mean a lot of different things in case of DVB.
|
||
// We need to look at the descriptors. Right now, we're only interested
|
||
// in AC-3 audio, so we do the descriptor parsing only when we don't have
|
||
// an audio PID yet.
|
||
if(result.audioPid===-1&&esInfoLength>0){let parsePos=offset+5;let remaining=esInfoLength;while(remaining>2){const descriptorId=data[parsePos];switch(descriptorId){case 0x6a:// DVB Descriptor for AC-3
|
||
{if(typeSupported.ac3!==true){logger.log('AC-3 audio found, not supported in this browser for now');}else {result.audioPid=pid;result.segmentAudioCodec='ac3';}}break;}const descriptorLen=data[parsePos+1]+2;parsePos+=descriptorLen;remaining-=descriptorLen;}}break;case 0xc2:// SAMPLE-AES EC3
|
||
/* falls through */case 0x87:emitParsingError(observer,new Error('Unsupported EC-3 in M2TS found'));return result;case 0x24:emitParsingError(observer,new Error('Unsupported HEVC in M2TS found'));return result;}// move to the next table entry
|
||
// skip past the elementary stream descriptors, if present
|
||
offset+=esInfoLength+5;}return result;}function emitParsingError(observer,error,levelRetry){logger.warn(`parsing error: ${error.message}`);observer.emit(Events.ERROR,Events.ERROR,{type:ErrorTypes.MEDIA_ERROR,details:ErrorDetails.FRAG_PARSING_ERROR,fatal:false,levelRetry,error,reason:error.message});}function logEncryptedSamplesFoundInUnencryptedStream(type){logger.log(`${type} with AES-128-CBC encryption found in unencrypted stream`);}function parsePES(stream){let i=0;let frag;let pesLen;let pesHdrLen;let pesPts;let pesDts;const data=stream.data;// safety check
|
||
if(!stream||stream.size===0){return null;}// we might need up to 19 bytes to read PES header
|
||
// if first chunk of data is less than 19 bytes, let's merge it with following ones until we get 19 bytes
|
||
// usually only one merge is needed (and this is rare ...)
|
||
while(data[0].length<19&&data.length>1){data[0]=appendUint8Array(data[0],data[1]);data.splice(1,1);}// retrieve PTS/DTS from first fragment
|
||
frag=data[0];const pesPrefix=(frag[0]<<16)+(frag[1]<<8)+frag[2];if(pesPrefix===1){pesLen=(frag[4]<<8)+frag[5];// if PES parsed length is not zero and greater than total received length, stop parsing. PES might be truncated
|
||
// minus 6 : PES header size
|
||
if(pesLen&&pesLen>stream.size-6){return null;}const pesFlags=frag[7];if(pesFlags&0xc0){/* PES header described here : http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
|
||
as PTS / DTS is 33 bit we cannot use bitwise operator in JS,
|
||
as Bitwise operators treat their operands as a sequence of 32 bits */pesPts=(frag[9]&0x0e)*536870912+// 1 << 29
|
||
(frag[10]&0xff)*4194304+// 1 << 22
|
||
(frag[11]&0xfe)*16384+// 1 << 14
|
||
(frag[12]&0xff)*128+// 1 << 7
|
||
(frag[13]&0xfe)/2;if(pesFlags&0x40){pesDts=(frag[14]&0x0e)*536870912+// 1 << 29
|
||
(frag[15]&0xff)*4194304+// 1 << 22
|
||
(frag[16]&0xfe)*16384+// 1 << 14
|
||
(frag[17]&0xff)*128+// 1 << 7
|
||
(frag[18]&0xfe)/2;if(pesPts-pesDts>60*90000){logger.warn(`${Math.round((pesPts-pesDts)/90000)}s delta between PTS and DTS, align them`);pesPts=pesDts;}}else {pesDts=pesPts;}}pesHdrLen=frag[8];// 9 bytes : 6 bytes for PES header + 3 bytes for PES extension
|
||
let payloadStartOffset=pesHdrLen+9;if(stream.size<=payloadStartOffset){return null;}stream.size-=payloadStartOffset;// reassemble PES packet
|
||
const pesData=new Uint8Array(stream.size);for(let j=0,dataLen=data.length;j<dataLen;j++){frag=data[j];let len=frag.byteLength;if(payloadStartOffset){if(payloadStartOffset>len){// trim full frag if PES header bigger than frag
|
||
payloadStartOffset-=len;continue;}else {// trim partial frag if PES header smaller than frag
|
||
frag=frag.subarray(payloadStartOffset);len-=payloadStartOffset;payloadStartOffset=0;}}pesData.set(frag,i);i+=len;}if(pesLen){// payload size : remove PES header + PES extension
|
||
pesLen-=pesHdrLen+3;}return {data:pesData,pts:pesPts,dts:pesDts,len:pesLen};}return null;}/**
|
||
* MP3 demuxer
|
||
*/class MP3Demuxer extends BaseAudioDemuxer{resetInitSegment(initSegment,audioCodec,videoCodec,trackDuration){super.resetInitSegment(initSegment,audioCodec,videoCodec,trackDuration);this._audioTrack={container:'audio/mpeg',type:'audio',id:2,pid:-1,sequenceNumber:0,segmentCodec:'mp3',samples:[],manifestCodec:audioCodec,duration:trackDuration,inputTimeScale:90000,dropped:0};}static probe(data){if(!data){return false;}// check if data contains ID3 timestamp and MPEG sync word
|
||
// Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1
|
||
// Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III)
|
||
// More info http://www.mp3-tech.org/programmer/frame_header.html
|
||
const id3Data=getID3Data(data,0);let offset=(id3Data==null?void 0:id3Data.length)||0;// Check for ac-3|ec-3 sync bytes and return false if present
|
||
if(id3Data&&data[offset]===0x0b&&data[offset+1]===0x77&&getTimeStamp(id3Data)!==undefined&&// check the bsid to confirm ac-3 or ec-3 (not mp3)
|
||
getAudioBSID(data,offset)<=16){return false;}for(let length=data.length;offset<length;offset++){if(probe(data,offset)){logger.log('MPEG Audio sync word found !');return true;}}return false;}canParse(data,offset){return canParse(data,offset);}appendFrame(track,data,offset){if(this.basePTS===null){return;}return appendFrame$1(track,data,offset,this.basePTS,this.frameIndex);}}/**
|
||
* AAC helper
|
||
*/class AAC{static getSilentFrame(codec,channelCount){switch(codec){case'mp4a.40.2':if(channelCount===1){return new Uint8Array([0x00,0xc8,0x00,0x80,0x23,0x80]);}else if(channelCount===2){return new Uint8Array([0x21,0x00,0x49,0x90,0x02,0x19,0x00,0x23,0x80]);}else if(channelCount===3){return new Uint8Array([0x00,0xc8,0x00,0x80,0x20,0x84,0x01,0x26,0x40,0x08,0x64,0x00,0x8e]);}else if(channelCount===4){return new Uint8Array([0x00,0xc8,0x00,0x80,0x20,0x84,0x01,0x26,0x40,0x08,0x64,0x00,0x80,0x2c,0x80,0x08,0x02,0x38]);}else if(channelCount===5){return new Uint8Array([0x00,0xc8,0x00,0x80,0x20,0x84,0x01,0x26,0x40,0x08,0x64,0x00,0x82,0x30,0x04,0x99,0x00,0x21,0x90,0x02,0x38]);}else if(channelCount===6){return new Uint8Array([0x00,0xc8,0x00,0x80,0x20,0x84,0x01,0x26,0x40,0x08,0x64,0x00,0x82,0x30,0x04,0x99,0x00,0x21,0x90,0x02,0x00,0xb2,0x00,0x20,0x08,0xe0]);}break;// handle HE-AAC below (mp4a.40.5 / mp4a.40.29)
|
||
default:if(channelCount===1){// ffmpeg -y -f lavfi -i "aevalsrc=0:d=0.05" -c:a libfdk_aac -profile:a aac_he -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac
|
||
return new Uint8Array([0x1,0x40,0x22,0x80,0xa3,0x4e,0xe6,0x80,0xba,0x8,0x0,0x0,0x0,0x1c,0x6,0xf1,0xc1,0xa,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5e]);}else if(channelCount===2){// ffmpeg -y -f lavfi -i "aevalsrc=0|0:d=0.05" -c:a libfdk_aac -profile:a aac_he_v2 -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac
|
||
return new Uint8Array([0x1,0x40,0x22,0x80,0xa3,0x5e,0xe6,0x80,0xba,0x8,0x0,0x0,0x0,0x0,0x95,0x0,0x6,0xf1,0xa1,0xa,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5e]);}else if(channelCount===3){// ffmpeg -y -f lavfi -i "aevalsrc=0|0|0:d=0.05" -c:a libfdk_aac -profile:a aac_he_v2 -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac
|
||
return new Uint8Array([0x1,0x40,0x22,0x80,0xa3,0x5e,0xe6,0x80,0xba,0x8,0x0,0x0,0x0,0x0,0x95,0x0,0x6,0xf1,0xa1,0xa,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5a,0x5e]);}break;}return undefined;}}/**
|
||
* Generate MP4 Box
|
||
*/const UINT32_MAX=Math.pow(2,32)-1;class MP4{static init(){MP4.types={avc1:[],// codingname
|
||
avcC:[],btrt:[],dinf:[],dref:[],esds:[],ftyp:[],hdlr:[],mdat:[],mdhd:[],mdia:[],mfhd:[],minf:[],moof:[],moov:[],mp4a:[],'.mp3':[],dac3:[],'ac-3':[],mvex:[],mvhd:[],pasp:[],sdtp:[],stbl:[],stco:[],stsc:[],stsd:[],stsz:[],stts:[],tfdt:[],tfhd:[],traf:[],trak:[],trun:[],trex:[],tkhd:[],vmhd:[],smhd:[]};let i;for(i in MP4.types){if(MP4.types.hasOwnProperty(i)){MP4.types[i]=[i.charCodeAt(0),i.charCodeAt(1),i.charCodeAt(2),i.charCodeAt(3)];}}const videoHdlr=new Uint8Array([0x00,// version 0
|
||
0x00,0x00,0x00,// flags
|
||
0x00,0x00,0x00,0x00,// pre_defined
|
||
0x76,0x69,0x64,0x65,// handler_type: 'vide'
|
||
0x00,0x00,0x00,0x00,// reserved
|
||
0x00,0x00,0x00,0x00,// reserved
|
||
0x00,0x00,0x00,0x00,// reserved
|
||
0x56,0x69,0x64,0x65,0x6f,0x48,0x61,0x6e,0x64,0x6c,0x65,0x72,0x00// name: 'VideoHandler'
|
||
]);const audioHdlr=new Uint8Array([0x00,// version 0
|
||
0x00,0x00,0x00,// flags
|
||
0x00,0x00,0x00,0x00,// pre_defined
|
||
0x73,0x6f,0x75,0x6e,// handler_type: 'soun'
|
||
0x00,0x00,0x00,0x00,// reserved
|
||
0x00,0x00,0x00,0x00,// reserved
|
||
0x00,0x00,0x00,0x00,// reserved
|
||
0x53,0x6f,0x75,0x6e,0x64,0x48,0x61,0x6e,0x64,0x6c,0x65,0x72,0x00// name: 'SoundHandler'
|
||
]);MP4.HDLR_TYPES={video:videoHdlr,audio:audioHdlr};const dref=new Uint8Array([0x00,// version 0
|
||
0x00,0x00,0x00,// flags
|
||
0x00,0x00,0x00,0x01,// entry_count
|
||
0x00,0x00,0x00,0x0c,// entry_size
|
||
0x75,0x72,0x6c,0x20,// 'url' type
|
||
0x00,// version 0
|
||
0x00,0x00,0x01// entry_flags
|
||
]);const stco=new Uint8Array([0x00,// version
|
||
0x00,0x00,0x00,// flags
|
||
0x00,0x00,0x00,0x00// entry_count
|
||
]);MP4.STTS=MP4.STSC=MP4.STCO=stco;MP4.STSZ=new Uint8Array([0x00,// version
|
||
0x00,0x00,0x00,// flags
|
||
0x00,0x00,0x00,0x00,// sample_size
|
||
0x00,0x00,0x00,0x00// sample_count
|
||
]);MP4.VMHD=new Uint8Array([0x00,// version
|
||
0x00,0x00,0x01,// flags
|
||
0x00,0x00,// graphicsmode
|
||
0x00,0x00,0x00,0x00,0x00,0x00// opcolor
|
||
]);MP4.SMHD=new Uint8Array([0x00,// version
|
||
0x00,0x00,0x00,// flags
|
||
0x00,0x00,// balance
|
||
0x00,0x00// reserved
|
||
]);MP4.STSD=new Uint8Array([0x00,// version 0
|
||
0x00,0x00,0x00,// flags
|
||
0x00,0x00,0x00,0x01]);// entry_count
|
||
const majorBrand=new Uint8Array([105,115,111,109]);// isom
|
||
const avc1Brand=new Uint8Array([97,118,99,49]);// avc1
|
||
const minorVersion=new Uint8Array([0,0,0,1]);MP4.FTYP=MP4.box(MP4.types.ftyp,majorBrand,minorVersion,majorBrand,avc1Brand);MP4.DINF=MP4.box(MP4.types.dinf,MP4.box(MP4.types.dref,dref));}static box(type,...payload){let size=8;let i=payload.length;const len=i;// calculate the total size we need to allocate
|
||
while(i--){size+=payload[i].byteLength;}const result=new Uint8Array(size);result[0]=size>>24&0xff;result[1]=size>>16&0xff;result[2]=size>>8&0xff;result[3]=size&0xff;result.set(type,4);// copy the payload into the result
|
||
for(i=0,size=8;i<len;i++){// copy payload[i] array @ offset size
|
||
result.set(payload[i],size);size+=payload[i].byteLength;}return result;}static hdlr(type){return MP4.box(MP4.types.hdlr,MP4.HDLR_TYPES[type]);}static mdat(data){return MP4.box(MP4.types.mdat,data);}static mdhd(timescale,duration){duration*=timescale;const upperWordDuration=Math.floor(duration/(UINT32_MAX+1));const lowerWordDuration=Math.floor(duration%(UINT32_MAX+1));return MP4.box(MP4.types.mdhd,new Uint8Array([0x01,// version 1
|
||
0x00,0x00,0x00,// flags
|
||
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x02,// creation_time
|
||
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,// modification_time
|
||
timescale>>24&0xff,timescale>>16&0xff,timescale>>8&0xff,timescale&0xff,// timescale
|
||
upperWordDuration>>24,upperWordDuration>>16&0xff,upperWordDuration>>8&0xff,upperWordDuration&0xff,lowerWordDuration>>24,lowerWordDuration>>16&0xff,lowerWordDuration>>8&0xff,lowerWordDuration&0xff,0x55,0xc4,// 'und' language (undetermined)
|
||
0x00,0x00]));}static mdia(track){return MP4.box(MP4.types.mdia,MP4.mdhd(track.timescale,track.duration),MP4.hdlr(track.type),MP4.minf(track));}static mfhd(sequenceNumber){return MP4.box(MP4.types.mfhd,new Uint8Array([0x00,0x00,0x00,0x00,// flags
|
||
sequenceNumber>>24,sequenceNumber>>16&0xff,sequenceNumber>>8&0xff,sequenceNumber&0xff// sequence_number
|
||
]));}static minf(track){if(track.type==='audio'){return MP4.box(MP4.types.minf,MP4.box(MP4.types.smhd,MP4.SMHD),MP4.DINF,MP4.stbl(track));}else {return MP4.box(MP4.types.minf,MP4.box(MP4.types.vmhd,MP4.VMHD),MP4.DINF,MP4.stbl(track));}}static moof(sn,baseMediaDecodeTime,track){return MP4.box(MP4.types.moof,MP4.mfhd(sn),MP4.traf(track,baseMediaDecodeTime));}static moov(tracks){let i=tracks.length;const boxes=[];while(i--){boxes[i]=MP4.trak(tracks[i]);}return MP4.box.apply(null,[MP4.types.moov,MP4.mvhd(tracks[0].timescale,tracks[0].duration)].concat(boxes).concat(MP4.mvex(tracks)));}static mvex(tracks){let i=tracks.length;const boxes=[];while(i--){boxes[i]=MP4.trex(tracks[i]);}return MP4.box.apply(null,[MP4.types.mvex,...boxes]);}static mvhd(timescale,duration){duration*=timescale;const upperWordDuration=Math.floor(duration/(UINT32_MAX+1));const lowerWordDuration=Math.floor(duration%(UINT32_MAX+1));const bytes=new Uint8Array([0x01,// version 1
|
||
0x00,0x00,0x00,// flags
|
||
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x02,// creation_time
|
||
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,// modification_time
|
||
timescale>>24&0xff,timescale>>16&0xff,timescale>>8&0xff,timescale&0xff,// timescale
|
||
upperWordDuration>>24,upperWordDuration>>16&0xff,upperWordDuration>>8&0xff,upperWordDuration&0xff,lowerWordDuration>>24,lowerWordDuration>>16&0xff,lowerWordDuration>>8&0xff,lowerWordDuration&0xff,0x00,0x01,0x00,0x00,// 1.0 rate
|
||
0x01,0x00,// 1.0 volume
|
||
0x00,0x00,// reserved
|
||
0x00,0x00,0x00,0x00,// reserved
|
||
0x00,0x00,0x00,0x00,// reserved
|
||
0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x40,0x00,0x00,0x00,// transformation: unity matrix
|
||
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,// pre_defined
|
||
0xff,0xff,0xff,0xff// next_track_ID
|
||
]);return MP4.box(MP4.types.mvhd,bytes);}static sdtp(track){const samples=track.samples||[];const bytes=new Uint8Array(4+samples.length);let i;let flags;// leave the full box header (4 bytes) all zero
|
||
// write the sample table
|
||
for(i=0;i<samples.length;i++){flags=samples[i].flags;bytes[i+4]=flags.dependsOn<<4|flags.isDependedOn<<2|flags.hasRedundancy;}return MP4.box(MP4.types.sdtp,bytes);}static stbl(track){return MP4.box(MP4.types.stbl,MP4.stsd(track),MP4.box(MP4.types.stts,MP4.STTS),MP4.box(MP4.types.stsc,MP4.STSC),MP4.box(MP4.types.stsz,MP4.STSZ),MP4.box(MP4.types.stco,MP4.STCO));}static avc1(track){let sps=[];let pps=[];let i;let data;let len;// assemble the SPSs
|
||
for(i=0;i<track.sps.length;i++){data=track.sps[i];len=data.byteLength;sps.push(len>>>8&0xff);sps.push(len&0xff);// SPS
|
||
sps=sps.concat(Array.prototype.slice.call(data));}// assemble the PPSs
|
||
for(i=0;i<track.pps.length;i++){data=track.pps[i];len=data.byteLength;pps.push(len>>>8&0xff);pps.push(len&0xff);pps=pps.concat(Array.prototype.slice.call(data));}const avcc=MP4.box(MP4.types.avcC,new Uint8Array([0x01,// version
|
||
sps[3],// profile
|
||
sps[4],// profile compat
|
||
sps[5],// level
|
||
0xfc|3,// lengthSizeMinusOne, hard-coded to 4 bytes
|
||
0xe0|track.sps.length// 3bit reserved (111) + numOfSequenceParameterSets
|
||
].concat(sps).concat([track.pps.length// numOfPictureParameterSets
|
||
]).concat(pps)));// "PPS"
|
||
const width=track.width;const height=track.height;const hSpacing=track.pixelRatio[0];const vSpacing=track.pixelRatio[1];return MP4.box(MP4.types.avc1,new Uint8Array([0x00,0x00,0x00,// reserved
|
||
0x00,0x00,0x00,// reserved
|
||
0x00,0x01,// data_reference_index
|
||
0x00,0x00,// pre_defined
|
||
0x00,0x00,// reserved
|
||
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,// pre_defined
|
||
width>>8&0xff,width&0xff,// width
|
||
height>>8&0xff,height&0xff,// height
|
||
0x00,0x48,0x00,0x00,// horizresolution
|
||
0x00,0x48,0x00,0x00,// vertresolution
|
||
0x00,0x00,0x00,0x00,// reserved
|
||
0x00,0x01,// frame_count
|
||
0x12,0x64,0x61,0x69,0x6c,// dailymotion/hls.js
|
||
0x79,0x6d,0x6f,0x74,0x69,0x6f,0x6e,0x2f,0x68,0x6c,0x73,0x2e,0x6a,0x73,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,// compressorname
|
||
0x00,0x18,// depth = 24
|
||
0x11,0x11]),// pre_defined = -1
|
||
avcc,MP4.box(MP4.types.btrt,new Uint8Array([0x00,0x1c,0x9c,0x80,// bufferSizeDB
|
||
0x00,0x2d,0xc6,0xc0,// maxBitrate
|
||
0x00,0x2d,0xc6,0xc0])),// avgBitrate
|
||
MP4.box(MP4.types.pasp,new Uint8Array([hSpacing>>24,// hSpacing
|
||
hSpacing>>16&0xff,hSpacing>>8&0xff,hSpacing&0xff,vSpacing>>24,// vSpacing
|
||
vSpacing>>16&0xff,vSpacing>>8&0xff,vSpacing&0xff])));}static esds(track){const configlen=track.config.length;return new Uint8Array([0x00,// version 0
|
||
0x00,0x00,0x00,// flags
|
||
0x03,// descriptor_type
|
||
0x17+configlen,// length
|
||
0x00,0x01,// es_id
|
||
0x00,// stream_priority
|
||
0x04,// descriptor_type
|
||
0x0f+configlen,// length
|
||
0x40,// codec : mpeg4_audio
|
||
0x15,// stream_type
|
||
0x00,0x00,0x00,// buffer_size
|
||
0x00,0x00,0x00,0x00,// maxBitrate
|
||
0x00,0x00,0x00,0x00,// avgBitrate
|
||
0x05// descriptor_type
|
||
].concat([configlen]).concat(track.config).concat([0x06,0x01,0x02]));// GASpecificConfig)); // length + audio config descriptor
|
||
}static audioStsd(track){const samplerate=track.samplerate;return new Uint8Array([0x00,0x00,0x00,// reserved
|
||
0x00,0x00,0x00,// reserved
|
||
0x00,0x01,// data_reference_index
|
||
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,// reserved
|
||
0x00,track.channelCount,// channelcount
|
||
0x00,0x10,// sampleSize:16bits
|
||
0x00,0x00,0x00,0x00,// reserved2
|
||
samplerate>>8&0xff,samplerate&0xff,//
|
||
0x00,0x00]);}static mp4a(track){return MP4.box(MP4.types.mp4a,MP4.audioStsd(track),MP4.box(MP4.types.esds,MP4.esds(track)));}static mp3(track){return MP4.box(MP4.types['.mp3'],MP4.audioStsd(track));}static ac3(track){return MP4.box(MP4.types['ac-3'],MP4.audioStsd(track),MP4.box(MP4.types.dac3,track.config));}static stsd(track){if(track.type==='audio'){if(track.segmentCodec==='mp3'&&track.codec==='mp3'){return MP4.box(MP4.types.stsd,MP4.STSD,MP4.mp3(track));}if(track.segmentCodec==='ac3'){return MP4.box(MP4.types.stsd,MP4.STSD,MP4.ac3(track));}return MP4.box(MP4.types.stsd,MP4.STSD,MP4.mp4a(track));}else {return MP4.box(MP4.types.stsd,MP4.STSD,MP4.avc1(track));}}static tkhd(track){const id=track.id;const duration=track.duration*track.timescale;const width=track.width;const height=track.height;const upperWordDuration=Math.floor(duration/(UINT32_MAX+1));const lowerWordDuration=Math.floor(duration%(UINT32_MAX+1));return MP4.box(MP4.types.tkhd,new Uint8Array([0x01,// version 1
|
||
0x00,0x00,0x07,// flags
|
||
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x02,// creation_time
|
||
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,// modification_time
|
||
id>>24&0xff,id>>16&0xff,id>>8&0xff,id&0xff,// track_ID
|
||
0x00,0x00,0x00,0x00,// reserved
|
||
upperWordDuration>>24,upperWordDuration>>16&0xff,upperWordDuration>>8&0xff,upperWordDuration&0xff,lowerWordDuration>>24,lowerWordDuration>>16&0xff,lowerWordDuration>>8&0xff,lowerWordDuration&0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,// reserved
|
||
0x00,0x00,// layer
|
||
0x00,0x00,// alternate_group
|
||
0x00,0x00,// non-audio track volume
|
||
0x00,0x00,// reserved
|
||
0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x40,0x00,0x00,0x00,// transformation: unity matrix
|
||
width>>8&0xff,width&0xff,0x00,0x00,// width
|
||
height>>8&0xff,height&0xff,0x00,0x00// height
|
||
]));}static traf(track,baseMediaDecodeTime){const sampleDependencyTable=MP4.sdtp(track);const id=track.id;const upperWordBaseMediaDecodeTime=Math.floor(baseMediaDecodeTime/(UINT32_MAX+1));const lowerWordBaseMediaDecodeTime=Math.floor(baseMediaDecodeTime%(UINT32_MAX+1));return MP4.box(MP4.types.traf,MP4.box(MP4.types.tfhd,new Uint8Array([0x00,// version 0
|
||
0x00,0x00,0x00,// flags
|
||
id>>24,id>>16&0xff,id>>8&0xff,id&0xff// track_ID
|
||
])),MP4.box(MP4.types.tfdt,new Uint8Array([0x01,// version 1
|
||
0x00,0x00,0x00,// flags
|
||
upperWordBaseMediaDecodeTime>>24,upperWordBaseMediaDecodeTime>>16&0xff,upperWordBaseMediaDecodeTime>>8&0xff,upperWordBaseMediaDecodeTime&0xff,lowerWordBaseMediaDecodeTime>>24,lowerWordBaseMediaDecodeTime>>16&0xff,lowerWordBaseMediaDecodeTime>>8&0xff,lowerWordBaseMediaDecodeTime&0xff])),MP4.trun(track,sampleDependencyTable.length+16+// tfhd
|
||
20+// tfdt
|
||
8+// traf header
|
||
16+// mfhd
|
||
8+// moof header
|
||
8),// mdat header
|
||
sampleDependencyTable);}/**
|
||
* Generate a track box.
|
||
* @param track a track definition
|
||
*/static trak(track){track.duration=track.duration||0xffffffff;return MP4.box(MP4.types.trak,MP4.tkhd(track),MP4.mdia(track));}static trex(track){const id=track.id;return MP4.box(MP4.types.trex,new Uint8Array([0x00,// version 0
|
||
0x00,0x00,0x00,// flags
|
||
id>>24,id>>16&0xff,id>>8&0xff,id&0xff,// track_ID
|
||
0x00,0x00,0x00,0x01,// default_sample_description_index
|
||
0x00,0x00,0x00,0x00,// default_sample_duration
|
||
0x00,0x00,0x00,0x00,// default_sample_size
|
||
0x00,0x01,0x00,0x01// default_sample_flags
|
||
]));}static trun(track,offset){const samples=track.samples||[];const len=samples.length;const arraylen=12+16*len;const array=new Uint8Array(arraylen);let i;let sample;let duration;let size;let flags;let cts;offset+=8+arraylen;array.set([track.type==='video'?0x01:0x00,// version 1 for video with signed-int sample_composition_time_offset
|
||
0x00,0x0f,0x01,// flags
|
||
len>>>24&0xff,len>>>16&0xff,len>>>8&0xff,len&0xff,// sample_count
|
||
offset>>>24&0xff,offset>>>16&0xff,offset>>>8&0xff,offset&0xff// data_offset
|
||
],0);for(i=0;i<len;i++){sample=samples[i];duration=sample.duration;size=sample.size;flags=sample.flags;cts=sample.cts;array.set([duration>>>24&0xff,duration>>>16&0xff,duration>>>8&0xff,duration&0xff,// sample_duration
|
||
size>>>24&0xff,size>>>16&0xff,size>>>8&0xff,size&0xff,// sample_size
|
||
flags.isLeading<<2|flags.dependsOn,flags.isDependedOn<<6|flags.hasRedundancy<<4|flags.paddingValue<<1|flags.isNonSync,flags.degradPrio&0xf0<<8,flags.degradPrio&0x0f,// sample_flags
|
||
cts>>>24&0xff,cts>>>16&0xff,cts>>>8&0xff,cts&0xff// sample_composition_time_offset
|
||
],12+16*i);}return MP4.box(MP4.types.trun,array);}static initSegment(tracks){if(!MP4.types){MP4.init();}const movie=MP4.moov(tracks);const result=appendUint8Array(MP4.FTYP,movie);return result;}}MP4.types=void 0;MP4.HDLR_TYPES=void 0;MP4.STTS=void 0;MP4.STSC=void 0;MP4.STCO=void 0;MP4.STSZ=void 0;MP4.VMHD=void 0;MP4.SMHD=void 0;MP4.STSD=void 0;MP4.FTYP=void 0;MP4.DINF=void 0;const MPEG_TS_CLOCK_FREQ_HZ=90000;function toTimescaleFromBase(baseTime,destScale,srcBase=1,round=false){const result=baseTime*destScale*srcBase;// equivalent to `(value * scale) / (1 / base)`
|
||
return round?Math.round(result):result;}function toTimescaleFromScale(baseTime,destScale,srcScale=1,round=false){return toTimescaleFromBase(baseTime,destScale,1/srcScale,round);}function toMsFromMpegTsClock(baseTime,round=false){return toTimescaleFromBase(baseTime,1000,1/MPEG_TS_CLOCK_FREQ_HZ,round);}function toMpegTsClockFromTimescale(baseTime,srcScale=1){return toTimescaleFromBase(baseTime,MPEG_TS_CLOCK_FREQ_HZ,1/srcScale);}const MAX_SILENT_FRAME_DURATION=10*1000;// 10 seconds
|
||
const AAC_SAMPLES_PER_FRAME=1024;const MPEG_AUDIO_SAMPLE_PER_FRAME=1152;const AC3_SAMPLES_PER_FRAME=1536;let chromeVersion=null;let safariWebkitVersion=null;class MP4Remuxer{constructor(observer,config,typeSupported,vendor=''){this.observer=void 0;this.config=void 0;this.typeSupported=void 0;this.ISGenerated=false;this._initPTS=null;this._initDTS=null;this.nextAvcDts=null;this.nextAudioPts=null;this.videoSampleDuration=null;this.isAudioContiguous=false;this.isVideoContiguous=false;this.videoTrackConfig=void 0;this.observer=observer;this.config=config;this.typeSupported=typeSupported;this.ISGenerated=false;if(chromeVersion===null){const userAgent=navigator.userAgent||'';const result=userAgent.match(/Chrome\/(\d+)/i);chromeVersion=result?parseInt(result[1]):0;}if(safariWebkitVersion===null){const result=navigator.userAgent.match(/Safari\/(\d+)/i);safariWebkitVersion=result?parseInt(result[1]):0;}}destroy(){// @ts-ignore
|
||
this.config=this.videoTrackConfig=this._initPTS=this._initDTS=null;}resetTimeStamp(defaultTimeStamp){logger.log('[mp4-remuxer]: initPTS & initDTS reset');this._initPTS=this._initDTS=defaultTimeStamp;}resetNextTimestamp(){logger.log('[mp4-remuxer]: reset next timestamp');this.isVideoContiguous=false;this.isAudioContiguous=false;}resetInitSegment(){logger.log('[mp4-remuxer]: ISGenerated flag reset');this.ISGenerated=false;this.videoTrackConfig=undefined;}getVideoStartPts(videoSamples){let rolloverDetected=false;const startPTS=videoSamples.reduce((minPTS,sample)=>{const delta=sample.pts-minPTS;if(delta<-4294967296){// 2^32, see PTSNormalize for reasoning, but we're hitting a rollover here, and we don't want that to impact the timeOffset calculation
|
||
rolloverDetected=true;return normalizePts(minPTS,sample.pts);}else if(delta>0){return minPTS;}else {return sample.pts;}},videoSamples[0].pts);if(rolloverDetected){logger.debug('PTS rollover detected');}return startPTS;}remux(audioTrack,videoTrack,id3Track,textTrack,timeOffset,accurateTimeOffset,flush,playlistType){let video;let audio;let initSegment;let text;let id3;let independent;let audioTimeOffset=timeOffset;let videoTimeOffset=timeOffset;// If we're remuxing audio and video progressively, wait until we've received enough samples for each track before proceeding.
|
||
// This is done to synchronize the audio and video streams. We know if the current segment will have samples if the "pid"
|
||
// parameter is greater than -1. The pid is set when the PMT is parsed, which contains the tracks list.
|
||
// However, if the initSegment has already been generated, or we've reached the end of a segment (flush),
|
||
// then we can remux one track without waiting for the other.
|
||
const hasAudio=audioTrack.pid>-1;const hasVideo=videoTrack.pid>-1;const length=videoTrack.samples.length;const enoughAudioSamples=audioTrack.samples.length>0;const enoughVideoSamples=flush&&length>0||length>1;const canRemuxAvc=(!hasAudio||enoughAudioSamples)&&(!hasVideo||enoughVideoSamples)||this.ISGenerated||flush;if(canRemuxAvc){if(this.ISGenerated){var _videoTrack$pixelRati,_config$pixelRatio,_videoTrack$pixelRati2,_config$pixelRatio2;const config=this.videoTrackConfig;if(config&&(videoTrack.width!==config.width||videoTrack.height!==config.height||((_videoTrack$pixelRati=videoTrack.pixelRatio)==null?void 0:_videoTrack$pixelRati[0])!==((_config$pixelRatio=config.pixelRatio)==null?void 0:_config$pixelRatio[0])||((_videoTrack$pixelRati2=videoTrack.pixelRatio)==null?void 0:_videoTrack$pixelRati2[1])!==((_config$pixelRatio2=config.pixelRatio)==null?void 0:_config$pixelRatio2[1]))){this.resetInitSegment();}}else {initSegment=this.generateIS(audioTrack,videoTrack,timeOffset,accurateTimeOffset);}const isVideoContiguous=this.isVideoContiguous;let firstKeyFrameIndex=-1;let firstKeyFramePTS;if(enoughVideoSamples){firstKeyFrameIndex=findKeyframeIndex(videoTrack.samples);if(!isVideoContiguous&&this.config.forceKeyFrameOnDiscontinuity){independent=true;if(firstKeyFrameIndex>0){logger.warn(`[mp4-remuxer]: Dropped ${firstKeyFrameIndex} out of ${length} video samples due to a missing keyframe`);const startPTS=this.getVideoStartPts(videoTrack.samples);videoTrack.samples=videoTrack.samples.slice(firstKeyFrameIndex);videoTrack.dropped+=firstKeyFrameIndex;videoTimeOffset+=(videoTrack.samples[0].pts-startPTS)/videoTrack.inputTimeScale;firstKeyFramePTS=videoTimeOffset;}else if(firstKeyFrameIndex===-1){logger.warn(`[mp4-remuxer]: No keyframe found out of ${length} video samples`);independent=false;}}}if(this.ISGenerated){if(enoughAudioSamples&&enoughVideoSamples){// timeOffset is expected to be the offset of the first timestamp of this fragment (first DTS)
|
||
// if first audio DTS is not aligned with first video DTS then we need to take that into account
|
||
// when providing timeOffset to remuxAudio / remuxVideo. if we don't do that, there might be a permanent / small
|
||
// drift between audio and video streams
|
||
const startPTS=this.getVideoStartPts(videoTrack.samples);const tsDelta=normalizePts(audioTrack.samples[0].pts,startPTS)-startPTS;const audiovideoTimestampDelta=tsDelta/videoTrack.inputTimeScale;audioTimeOffset+=Math.max(0,audiovideoTimestampDelta);videoTimeOffset+=Math.max(0,-audiovideoTimestampDelta);}// Purposefully remuxing audio before video, so that remuxVideo can use nextAudioPts, which is calculated in remuxAudio.
|
||
if(enoughAudioSamples){// if initSegment was generated without audio samples, regenerate it again
|
||
if(!audioTrack.samplerate){logger.warn('[mp4-remuxer]: regenerate InitSegment as audio detected');initSegment=this.generateIS(audioTrack,videoTrack,timeOffset,accurateTimeOffset);}audio=this.remuxAudio(audioTrack,audioTimeOffset,this.isAudioContiguous,accurateTimeOffset,hasVideo||enoughVideoSamples||playlistType===PlaylistLevelType.AUDIO?videoTimeOffset:undefined);if(enoughVideoSamples){const audioTrackLength=audio?audio.endPTS-audio.startPTS:0;// if initSegment was generated without video samples, regenerate it again
|
||
if(!videoTrack.inputTimeScale){logger.warn('[mp4-remuxer]: regenerate InitSegment as video detected');initSegment=this.generateIS(audioTrack,videoTrack,timeOffset,accurateTimeOffset);}video=this.remuxVideo(videoTrack,videoTimeOffset,isVideoContiguous,audioTrackLength);}}else if(enoughVideoSamples){video=this.remuxVideo(videoTrack,videoTimeOffset,isVideoContiguous,0);}if(video){video.firstKeyFrame=firstKeyFrameIndex;video.independent=firstKeyFrameIndex!==-1;video.firstKeyFramePTS=firstKeyFramePTS;}}}// Allow ID3 and text to remux, even if more audio/video samples are required
|
||
if(this.ISGenerated&&this._initPTS&&this._initDTS){if(id3Track.samples.length){id3=flushTextTrackMetadataCueSamples(id3Track,timeOffset,this._initPTS,this._initDTS);}if(textTrack.samples.length){text=flushTextTrackUserdataCueSamples(textTrack,timeOffset,this._initPTS);}}return {audio,video,initSegment,independent,text,id3};}generateIS(audioTrack,videoTrack,timeOffset,accurateTimeOffset){const audioSamples=audioTrack.samples;const videoSamples=videoTrack.samples;const typeSupported=this.typeSupported;const tracks={};const _initPTS=this._initPTS;let computePTSDTS=!_initPTS||accurateTimeOffset;let container='audio/mp4';let initPTS;let initDTS;let timescale;if(computePTSDTS){initPTS=initDTS=Infinity;}if(audioTrack.config&&audioSamples.length){// let's use audio sampling rate as MP4 time scale.
|
||
// rationale is that there is a integer nb of audio frames per audio sample (1024 for AAC)
|
||
// using audio sampling rate here helps having an integer MP4 frame duration
|
||
// this avoids potential rounding issue and AV sync issue
|
||
audioTrack.timescale=audioTrack.samplerate;switch(audioTrack.segmentCodec){case'mp3':if(typeSupported.mpeg){// Chrome and Safari
|
||
container='audio/mpeg';audioTrack.codec='';}else if(typeSupported.mp3){// Firefox
|
||
audioTrack.codec='mp3';}break;case'ac3':audioTrack.codec='ac-3';break;}tracks.audio={id:'audio',container:container,codec:audioTrack.codec,initSegment:audioTrack.segmentCodec==='mp3'&&typeSupported.mpeg?new Uint8Array(0):MP4.initSegment([audioTrack]),metadata:{channelCount:audioTrack.channelCount}};if(computePTSDTS){timescale=audioTrack.inputTimeScale;if(!_initPTS||timescale!==_initPTS.timescale){// remember first PTS of this demuxing context. for audio, PTS = DTS
|
||
initPTS=initDTS=audioSamples[0].pts-Math.round(timescale*timeOffset);}else {computePTSDTS=false;}}}if(videoTrack.sps&&videoTrack.pps&&videoSamples.length){// let's use input time scale as MP4 video timescale
|
||
// we use input time scale straight away to avoid rounding issues on frame duration / cts computation
|
||
videoTrack.timescale=videoTrack.inputTimeScale;tracks.video={id:'main',container:'video/mp4',codec:videoTrack.codec,initSegment:MP4.initSegment([videoTrack]),metadata:{width:videoTrack.width,height:videoTrack.height}};if(computePTSDTS){timescale=videoTrack.inputTimeScale;if(!_initPTS||timescale!==_initPTS.timescale){const startPTS=this.getVideoStartPts(videoSamples);const startOffset=Math.round(timescale*timeOffset);initDTS=Math.min(initDTS,normalizePts(videoSamples[0].dts,startPTS)-startOffset);initPTS=Math.min(initPTS,startPTS-startOffset);}else {computePTSDTS=false;}}this.videoTrackConfig={width:videoTrack.width,height:videoTrack.height,pixelRatio:videoTrack.pixelRatio};}if(Object.keys(tracks).length){this.ISGenerated=true;if(computePTSDTS){this._initPTS={baseTime:initPTS,timescale:timescale};this._initDTS={baseTime:initDTS,timescale:timescale};}else {initPTS=timescale=undefined;}return {tracks,initPTS,timescale};}}remuxVideo(track,timeOffset,contiguous,audioTrackLength){const timeScale=track.inputTimeScale;const inputSamples=track.samples;const outputSamples=[];const nbSamples=inputSamples.length;const initPTS=this._initPTS;let nextAvcDts=this.nextAvcDts;let offset=8;let mp4SampleDuration=this.videoSampleDuration;let firstDTS;let lastDTS;let minPTS=Number.POSITIVE_INFINITY;let maxPTS=Number.NEGATIVE_INFINITY;let sortSamples=false;// if parsed fragment is contiguous with last one, let's use last DTS value as reference
|
||
if(!contiguous||nextAvcDts===null){const pts=timeOffset*timeScale;const cts=inputSamples[0].pts-normalizePts(inputSamples[0].dts,inputSamples[0].pts);if(chromeVersion&&nextAvcDts!==null&&Math.abs(pts-cts-nextAvcDts)<15000){// treat as contigous to adjust samples that would otherwise produce video buffer gaps in Chrome
|
||
contiguous=true;}else {// if not contiguous, let's use target timeOffset
|
||
nextAvcDts=pts-cts;}}// PTS is coded on 33bits, and can loop from -2^32 to 2^32
|
||
// PTSNormalize will make PTS/DTS value monotonic, we use last known DTS value as reference value
|
||
const initTime=initPTS.baseTime*timeScale/initPTS.timescale;for(let i=0;i<nbSamples;i++){const sample=inputSamples[i];sample.pts=normalizePts(sample.pts-initTime,nextAvcDts);sample.dts=normalizePts(sample.dts-initTime,nextAvcDts);if(sample.dts<inputSamples[i>0?i-1:i].dts){sortSamples=true;}}// sort video samples by DTS then PTS then demux id order
|
||
if(sortSamples){inputSamples.sort(function(a,b){const deltadts=a.dts-b.dts;const deltapts=a.pts-b.pts;return deltadts||deltapts;});}// Get first/last DTS
|
||
firstDTS=inputSamples[0].dts;lastDTS=inputSamples[inputSamples.length-1].dts;// Sample duration (as expected by trun MP4 boxes), should be the delta between sample DTS
|
||
// set this constant duration as being the avg delta between consecutive DTS.
|
||
const inputDuration=lastDTS-firstDTS;const averageSampleDuration=inputDuration?Math.round(inputDuration/(nbSamples-1)):mp4SampleDuration||track.inputTimeScale/30;// if fragment are contiguous, detect hole/overlapping between fragments
|
||
if(contiguous){// check timestamp continuity across consecutive fragments (this is to remove inter-fragment gap/hole)
|
||
const delta=firstDTS-nextAvcDts;const foundHole=delta>averageSampleDuration;const foundOverlap=delta<-1;if(foundHole||foundOverlap){if(foundHole){logger.warn(`AVC: ${toMsFromMpegTsClock(delta,true)} ms (${delta}dts) hole between fragments detected at ${timeOffset.toFixed(3)}`);}else {logger.warn(`AVC: ${toMsFromMpegTsClock(-delta,true)} ms (${delta}dts) overlapping between fragments detected at ${timeOffset.toFixed(3)}`);}if(!foundOverlap||nextAvcDts>=inputSamples[0].pts||chromeVersion){firstDTS=nextAvcDts;const firstPTS=inputSamples[0].pts-delta;if(foundHole){inputSamples[0].dts=firstDTS;inputSamples[0].pts=firstPTS;}else {for(let i=0;i<inputSamples.length;i++){if(inputSamples[i].dts>firstPTS){break;}inputSamples[i].dts-=delta;inputSamples[i].pts-=delta;}}logger.log(`Video: Initial PTS/DTS adjusted: ${toMsFromMpegTsClock(firstPTS,true)}/${toMsFromMpegTsClock(firstDTS,true)}, delta: ${toMsFromMpegTsClock(delta,true)} ms`);}}}firstDTS=Math.max(0,firstDTS);let nbNalu=0;let naluLen=0;let dtsStep=firstDTS;for(let i=0;i<nbSamples;i++){// compute total/avc sample length and nb of NAL units
|
||
const sample=inputSamples[i];const units=sample.units;const nbUnits=units.length;let sampleLen=0;for(let j=0;j<nbUnits;j++){sampleLen+=units[j].data.length;}naluLen+=sampleLen;nbNalu+=nbUnits;sample.length=sampleLen;// ensure sample monotonic DTS
|
||
if(sample.dts<dtsStep){sample.dts=dtsStep;dtsStep+=averageSampleDuration/4|0||1;}else {dtsStep=sample.dts;}minPTS=Math.min(sample.pts,minPTS);maxPTS=Math.max(sample.pts,maxPTS);}lastDTS=inputSamples[nbSamples-1].dts;/* concatenate the video data and construct the mdat in place
|
||
(need 8 more bytes to fill length and mpdat type) */const mdatSize=naluLen+4*nbNalu+8;let mdat;try{mdat=new Uint8Array(mdatSize);}catch(err){this.observer.emit(Events.ERROR,Events.ERROR,{type:ErrorTypes.MUX_ERROR,details:ErrorDetails.REMUX_ALLOC_ERROR,fatal:false,error:err,bytes:mdatSize,reason:`fail allocating video mdat ${mdatSize}`});return;}const view=new DataView(mdat.buffer);view.setUint32(0,mdatSize);mdat.set(MP4.types.mdat,4);let stretchedLastFrame=false;let minDtsDelta=Number.POSITIVE_INFINITY;let minPtsDelta=Number.POSITIVE_INFINITY;let maxDtsDelta=Number.NEGATIVE_INFINITY;let maxPtsDelta=Number.NEGATIVE_INFINITY;for(let i=0;i<nbSamples;i++){const VideoSample=inputSamples[i];const VideoSampleUnits=VideoSample.units;let mp4SampleLength=0;// convert NALU bitstream to MP4 format (prepend NALU with size field)
|
||
for(let j=0,nbUnits=VideoSampleUnits.length;j<nbUnits;j++){const unit=VideoSampleUnits[j];const unitData=unit.data;const unitDataLen=unit.data.byteLength;view.setUint32(offset,unitDataLen);offset+=4;mdat.set(unitData,offset);offset+=unitDataLen;mp4SampleLength+=4+unitDataLen;}// expected sample duration is the Decoding Timestamp diff of consecutive samples
|
||
let ptsDelta;if(i<nbSamples-1){mp4SampleDuration=inputSamples[i+1].dts-VideoSample.dts;ptsDelta=inputSamples[i+1].pts-VideoSample.pts;}else {const config=this.config;const lastFrameDuration=i>0?VideoSample.dts-inputSamples[i-1].dts:averageSampleDuration;ptsDelta=i>0?VideoSample.pts-inputSamples[i-1].pts:averageSampleDuration;if(config.stretchShortVideoTrack&&this.nextAudioPts!==null){// In some cases, a segment's audio track duration may exceed the video track duration.
|
||
// Since we've already remuxed audio, and we know how long the audio track is, we look to
|
||
// see if the delta to the next segment is longer than maxBufferHole.
|
||
// If so, playback would potentially get stuck, so we artificially inflate
|
||
// the duration of the last frame to minimize any potential gap between segments.
|
||
const gapTolerance=Math.floor(config.maxBufferHole*timeScale);const deltaToFrameEnd=(audioTrackLength?minPTS+audioTrackLength*timeScale:this.nextAudioPts)-VideoSample.pts;if(deltaToFrameEnd>gapTolerance){// We subtract lastFrameDuration from deltaToFrameEnd to try to prevent any video
|
||
// frame overlap. maxBufferHole should be >> lastFrameDuration anyway.
|
||
mp4SampleDuration=deltaToFrameEnd-lastFrameDuration;if(mp4SampleDuration<0){mp4SampleDuration=lastFrameDuration;}else {stretchedLastFrame=true;}logger.log(`[mp4-remuxer]: It is approximately ${deltaToFrameEnd/90} ms to the next segment; using duration ${mp4SampleDuration/90} ms for the last video frame.`);}else {mp4SampleDuration=lastFrameDuration;}}else {mp4SampleDuration=lastFrameDuration;}}const compositionTimeOffset=Math.round(VideoSample.pts-VideoSample.dts);minDtsDelta=Math.min(minDtsDelta,mp4SampleDuration);maxDtsDelta=Math.max(maxDtsDelta,mp4SampleDuration);minPtsDelta=Math.min(minPtsDelta,ptsDelta);maxPtsDelta=Math.max(maxPtsDelta,ptsDelta);outputSamples.push(new Mp4Sample(VideoSample.key,mp4SampleDuration,mp4SampleLength,compositionTimeOffset));}if(outputSamples.length){if(chromeVersion){if(chromeVersion<70){// Chrome workaround, mark first sample as being a Random Access Point (keyframe) to avoid sourcebuffer append issue
|
||
// https://code.google.com/p/chromium/issues/detail?id=229412
|
||
const flags=outputSamples[0].flags;flags.dependsOn=2;flags.isNonSync=0;}}else if(safariWebkitVersion){// Fix for "CNN special report, with CC" in test-streams (Safari browser only)
|
||
// Ignore DTS when frame durations are irregular. Safari MSE does not handle this leading to gaps.
|
||
if(maxPtsDelta-minPtsDelta<maxDtsDelta-minDtsDelta&&averageSampleDuration/maxDtsDelta<0.025&&outputSamples[0].cts===0){logger.warn('Found irregular gaps in sample duration. Using PTS instead of DTS to determine MP4 sample duration.');let dts=firstDTS;for(let i=0,len=outputSamples.length;i<len;i++){const nextDts=dts+outputSamples[i].duration;const pts=dts+outputSamples[i].cts;if(i<len-1){const nextPts=nextDts+outputSamples[i+1].cts;outputSamples[i].duration=nextPts-pts;}else {outputSamples[i].duration=i?outputSamples[i-1].duration:averageSampleDuration;}outputSamples[i].cts=0;dts=nextDts;}}}}// next AVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
|
||
mp4SampleDuration=stretchedLastFrame||!mp4SampleDuration?averageSampleDuration:mp4SampleDuration;this.nextAvcDts=nextAvcDts=lastDTS+mp4SampleDuration;this.videoSampleDuration=mp4SampleDuration;this.isVideoContiguous=true;const moof=MP4.moof(track.sequenceNumber++,firstDTS,_extends({},track,{samples:outputSamples}));const type='video';const data={data1:moof,data2:mdat,startPTS:minPTS/timeScale,endPTS:(maxPTS+mp4SampleDuration)/timeScale,startDTS:firstDTS/timeScale,endDTS:nextAvcDts/timeScale,type,hasAudio:false,hasVideo:true,nb:outputSamples.length,dropped:track.dropped};track.samples=[];track.dropped=0;return data;}getSamplesPerFrame(track){switch(track.segmentCodec){case'mp3':return MPEG_AUDIO_SAMPLE_PER_FRAME;case'ac3':return AC3_SAMPLES_PER_FRAME;default:return AAC_SAMPLES_PER_FRAME;}}remuxAudio(track,timeOffset,contiguous,accurateTimeOffset,videoTimeOffset){const inputTimeScale=track.inputTimeScale;const mp4timeScale=track.samplerate?track.samplerate:inputTimeScale;const scaleFactor=inputTimeScale/mp4timeScale;const mp4SampleDuration=this.getSamplesPerFrame(track);const inputSampleDuration=mp4SampleDuration*scaleFactor;const initPTS=this._initPTS;const rawMPEG=track.segmentCodec==='mp3'&&this.typeSupported.mpeg;const outputSamples=[];const alignedWithVideo=videoTimeOffset!==undefined;let inputSamples=track.samples;let offset=rawMPEG?0:8;let nextAudioPts=this.nextAudioPts||-1;// window.audioSamples ? window.audioSamples.push(inputSamples.map(s => s.pts)) : (window.audioSamples = [inputSamples.map(s => s.pts)]);
|
||
// for audio samples, also consider consecutive fragments as being contiguous (even if a level switch occurs),
|
||
// for sake of clarity:
|
||
// consecutive fragments are frags with
|
||
// - less than 100ms gaps between new time offset (if accurate) and next expected PTS OR
|
||
// - less than 20 audio frames distance
|
||
// contiguous fragments are consecutive fragments from same quality level (same level, new SN = old SN + 1)
|
||
// this helps ensuring audio continuity
|
||
// and this also avoids audio glitches/cut when switching quality, or reporting wrong duration on first audio frame
|
||
const timeOffsetMpegTS=timeOffset*inputTimeScale;const initTime=initPTS.baseTime*inputTimeScale/initPTS.timescale;this.isAudioContiguous=contiguous=contiguous||inputSamples.length&&nextAudioPts>0&&(accurateTimeOffset&&Math.abs(timeOffsetMpegTS-nextAudioPts)<9000||Math.abs(normalizePts(inputSamples[0].pts-initTime,timeOffsetMpegTS)-nextAudioPts)<20*inputSampleDuration);// compute normalized PTS
|
||
inputSamples.forEach(function(sample){sample.pts=normalizePts(sample.pts-initTime,timeOffsetMpegTS);});if(!contiguous||nextAudioPts<0){// filter out sample with negative PTS that are not playable anyway
|
||
// if we don't remove these negative samples, they will shift all audio samples forward.
|
||
// leading to audio overlap between current / next fragment
|
||
inputSamples=inputSamples.filter(sample=>sample.pts>=0);// in case all samples have negative PTS, and have been filtered out, return now
|
||
if(!inputSamples.length){return;}if(videoTimeOffset===0){// Set the start to 0 to match video so that start gaps larger than inputSampleDuration are filled with silence
|
||
nextAudioPts=0;}else if(accurateTimeOffset&&!alignedWithVideo){// When not seeking, not live, and LevelDetails.PTSKnown, use fragment start as predicted next audio PTS
|
||
nextAudioPts=Math.max(0,timeOffsetMpegTS);}else {// if frags are not contiguous and if we cant trust time offset, let's use first sample PTS as next audio PTS
|
||
nextAudioPts=inputSamples[0].pts;}}// If the audio track is missing samples, the frames seem to get "left-shifted" within the
|
||
// resulting mp4 segment, causing sync issues and leaving gaps at the end of the audio segment.
|
||
// In an effort to prevent this from happening, we inject frames here where there are gaps.
|
||
// When possible, we inject a silent frame; when that's not possible, we duplicate the last
|
||
// frame.
|
||
if(track.segmentCodec==='aac'){const maxAudioFramesDrift=this.config.maxAudioFramesDrift;for(let i=0,nextPts=nextAudioPts;i<inputSamples.length;i++){// First, let's see how far off this frame is from where we expect it to be
|
||
const sample=inputSamples[i];const pts=sample.pts;const delta=pts-nextPts;const duration=Math.abs(1000*delta/inputTimeScale);// When remuxing with video, if we're overlapping by more than a duration, drop this sample to stay in sync
|
||
if(delta<=-maxAudioFramesDrift*inputSampleDuration&&alignedWithVideo){if(i===0){logger.warn(`Audio frame @ ${(pts/inputTimeScale).toFixed(3)}s overlaps nextAudioPts by ${Math.round(1000*delta/inputTimeScale)} ms.`);this.nextAudioPts=nextAudioPts=nextPts=pts;}}// eslint-disable-line brace-style
|
||
// Insert missing frames if:
|
||
// 1: We're more than maxAudioFramesDrift frame away
|
||
// 2: Not more than MAX_SILENT_FRAME_DURATION away
|
||
// 3: currentTime (aka nextPtsNorm) is not 0
|
||
// 4: remuxing with video (videoTimeOffset !== undefined)
|
||
else if(delta>=maxAudioFramesDrift*inputSampleDuration&&duration<MAX_SILENT_FRAME_DURATION&&alignedWithVideo){let missing=Math.round(delta/inputSampleDuration);// Adjust nextPts so that silent samples are aligned with media pts. This will prevent media samples from
|
||
// later being shifted if nextPts is based on timeOffset and delta is not a multiple of inputSampleDuration.
|
||
nextPts=pts-missing*inputSampleDuration;if(nextPts<0){missing--;nextPts+=inputSampleDuration;}if(i===0){this.nextAudioPts=nextAudioPts=nextPts;}logger.warn(`[mp4-remuxer]: Injecting ${missing} audio frame @ ${(nextPts/inputTimeScale).toFixed(3)}s due to ${Math.round(1000*delta/inputTimeScale)} ms gap.`);for(let j=0;j<missing;j++){const newStamp=Math.max(nextPts,0);let fillFrame=AAC.getSilentFrame(track.manifestCodec||track.codec,track.channelCount);if(!fillFrame){logger.log('[mp4-remuxer]: Unable to get silent frame for given audio codec; duplicating last frame instead.');fillFrame=sample.unit.subarray();}inputSamples.splice(i,0,{unit:fillFrame,pts:newStamp});nextPts+=inputSampleDuration;i++;}}sample.pts=nextPts;nextPts+=inputSampleDuration;}}let firstPTS=null;let lastPTS=null;let mdat;let mdatSize=0;let sampleLength=inputSamples.length;while(sampleLength--){mdatSize+=inputSamples[sampleLength].unit.byteLength;}for(let j=0,_nbSamples=inputSamples.length;j<_nbSamples;j++){const audioSample=inputSamples[j];const unit=audioSample.unit;let pts=audioSample.pts;if(lastPTS!==null){// If we have more than one sample, set the duration of the sample to the "real" duration; the PTS diff with
|
||
// the previous sample
|
||
const prevSample=outputSamples[j-1];prevSample.duration=Math.round((pts-lastPTS)/scaleFactor);}else {if(contiguous&&track.segmentCodec==='aac'){// set PTS/DTS to expected PTS/DTS
|
||
pts=nextAudioPts;}// remember first PTS of our audioSamples
|
||
firstPTS=pts;if(mdatSize>0){/* concatenate the audio data and construct the mdat in place
|
||
(need 8 more bytes to fill length and mdat type) */mdatSize+=offset;try{mdat=new Uint8Array(mdatSize);}catch(err){this.observer.emit(Events.ERROR,Events.ERROR,{type:ErrorTypes.MUX_ERROR,details:ErrorDetails.REMUX_ALLOC_ERROR,fatal:false,error:err,bytes:mdatSize,reason:`fail allocating audio mdat ${mdatSize}`});return;}if(!rawMPEG){const view=new DataView(mdat.buffer);view.setUint32(0,mdatSize);mdat.set(MP4.types.mdat,4);}}else {// no audio samples
|
||
return;}}mdat.set(unit,offset);const unitLen=unit.byteLength;offset+=unitLen;// Default the sample's duration to the computed mp4SampleDuration, which will either be 1024 for AAC or 1152 for MPEG
|
||
// In the case that we have 1 sample, this will be the duration. If we have more than one sample, the duration
|
||
// becomes the PTS diff with the previous sample
|
||
outputSamples.push(new Mp4Sample(true,mp4SampleDuration,unitLen,0));lastPTS=pts;}// We could end up with no audio samples if all input samples were overlapping with the previously remuxed ones
|
||
const nbSamples=outputSamples.length;if(!nbSamples){return;}// The next audio sample PTS should be equal to last sample PTS + duration
|
||
const lastSample=outputSamples[outputSamples.length-1];this.nextAudioPts=nextAudioPts=lastPTS+scaleFactor*lastSample.duration;// Set the track samples from inputSamples to outputSamples before remuxing
|
||
const moof=rawMPEG?new Uint8Array(0):MP4.moof(track.sequenceNumber++,firstPTS/scaleFactor,_extends({},track,{samples:outputSamples}));// Clear the track samples. This also clears the samples array in the demuxer, since the reference is shared
|
||
track.samples=[];const start=firstPTS/inputTimeScale;const end=nextAudioPts/inputTimeScale;const type='audio';const audioData={data1:moof,data2:mdat,startPTS:start,endPTS:end,startDTS:start,endDTS:end,type,hasAudio:true,hasVideo:false,nb:nbSamples};this.isAudioContiguous=true;return audioData;}remuxEmptyAudio(track,timeOffset,contiguous,videoData){const inputTimeScale=track.inputTimeScale;const mp4timeScale=track.samplerate?track.samplerate:inputTimeScale;const scaleFactor=inputTimeScale/mp4timeScale;const nextAudioPts=this.nextAudioPts;// sync with video's timestamp
|
||
const initDTS=this._initDTS;const init90kHz=initDTS.baseTime*90000/initDTS.timescale;const startDTS=(nextAudioPts!==null?nextAudioPts:videoData.startDTS*inputTimeScale)+init90kHz;const endDTS=videoData.endDTS*inputTimeScale+init90kHz;// one sample's duration value
|
||
const frameDuration=scaleFactor*AAC_SAMPLES_PER_FRAME;// samples count of this segment's duration
|
||
const nbSamples=Math.ceil((endDTS-startDTS)/frameDuration);// silent frame
|
||
const silentFrame=AAC.getSilentFrame(track.manifestCodec||track.codec,track.channelCount);logger.warn('[mp4-remuxer]: remux empty Audio');// Can't remux if we can't generate a silent frame...
|
||
if(!silentFrame){logger.trace('[mp4-remuxer]: Unable to remuxEmptyAudio since we were unable to get a silent frame for given audio codec');return;}const samples=[];for(let i=0;i<nbSamples;i++){const stamp=startDTS+i*frameDuration;samples.push({unit:silentFrame,pts:stamp,dts:stamp});}track.samples=samples;return this.remuxAudio(track,timeOffset,contiguous,false);}}function normalizePts(value,reference){let offset;if(reference===null){return value;}if(reference<value){// - 2^33
|
||
offset=-8589934592;}else {// + 2^33
|
||
offset=8589934592;}/* PTS is 33bit (from 0 to 2^33 -1)
|
||
if diff between value and reference is bigger than half of the amplitude (2^32) then it means that
|
||
PTS looping occured. fill the gap */while(Math.abs(value-reference)>4294967296){value+=offset;}return value;}function findKeyframeIndex(samples){for(let i=0;i<samples.length;i++){if(samples[i].key){return i;}}return -1;}function flushTextTrackMetadataCueSamples(track,timeOffset,initPTS,initDTS){const length=track.samples.length;if(!length){return;}const inputTimeScale=track.inputTimeScale;for(let index=0;index<length;index++){const sample=track.samples[index];// setting id3 pts, dts to relative time
|
||
// using this._initPTS and this._initDTS to calculate relative time
|
||
sample.pts=normalizePts(sample.pts-initPTS.baseTime*inputTimeScale/initPTS.timescale,timeOffset*inputTimeScale)/inputTimeScale;sample.dts=normalizePts(sample.dts-initDTS.baseTime*inputTimeScale/initDTS.timescale,timeOffset*inputTimeScale)/inputTimeScale;}const samples=track.samples;track.samples=[];return {samples};}function flushTextTrackUserdataCueSamples(track,timeOffset,initPTS){const length=track.samples.length;if(!length){return;}const inputTimeScale=track.inputTimeScale;for(let index=0;index<length;index++){const sample=track.samples[index];// setting text pts, dts to relative time
|
||
// using this._initPTS and this._initDTS to calculate relative time
|
||
sample.pts=normalizePts(sample.pts-initPTS.baseTime*inputTimeScale/initPTS.timescale,timeOffset*inputTimeScale)/inputTimeScale;}track.samples.sort((a,b)=>a.pts-b.pts);const samples=track.samples;track.samples=[];return {samples};}class Mp4Sample{constructor(isKeyframe,duration,size,cts){this.size=void 0;this.duration=void 0;this.cts=void 0;this.flags=void 0;this.duration=duration;this.size=size;this.cts=cts;this.flags={isLeading:0,isDependedOn:0,hasRedundancy:0,degradPrio:0,dependsOn:isKeyframe?2:1,isNonSync:isKeyframe?0:1};}}class PassThroughRemuxer{constructor(){this.emitInitSegment=false;this.audioCodec=void 0;this.videoCodec=void 0;this.initData=void 0;this.initPTS=null;this.initTracks=void 0;this.lastEndTime=null;}destroy(){}resetTimeStamp(defaultInitPTS){this.initPTS=defaultInitPTS;this.lastEndTime=null;}resetNextTimestamp(){this.lastEndTime=null;}resetInitSegment(initSegment,audioCodec,videoCodec,decryptdata){this.audioCodec=audioCodec;this.videoCodec=videoCodec;this.generateInitSegment(patchEncyptionData(initSegment,decryptdata));this.emitInitSegment=true;}generateInitSegment(initSegment){let{audioCodec,videoCodec}=this;if(!(initSegment!=null&&initSegment.byteLength)){this.initTracks=undefined;this.initData=undefined;return;}const initData=this.initData=parseInitSegment(initSegment);// Get codec from initSegment or fallback to default
|
||
if(initData.audio){audioCodec=getParsedTrackCodec(initData.audio,ElementaryStreamTypes.AUDIO);}if(initData.video){videoCodec=getParsedTrackCodec(initData.video,ElementaryStreamTypes.VIDEO);}const tracks={};if(initData.audio&&initData.video){tracks.audiovideo={container:'video/mp4',codec:audioCodec+','+videoCodec,initSegment,id:'main'};}else if(initData.audio){tracks.audio={container:'audio/mp4',codec:audioCodec,initSegment,id:'audio'};}else if(initData.video){tracks.video={container:'video/mp4',codec:videoCodec,initSegment,id:'main'};}else {logger.warn('[passthrough-remuxer.ts]: initSegment does not contain moov or trak boxes.');}this.initTracks=tracks;}remux(audioTrack,videoTrack,id3Track,textTrack,timeOffset,accurateTimeOffset){var _initData,_initData2;let{initPTS,lastEndTime}=this;const result={audio:undefined,video:undefined,text:textTrack,id3:id3Track,initSegment:undefined};// If we haven't yet set a lastEndDTS, or it was reset, set it to the provided timeOffset. We want to use the
|
||
// lastEndDTS over timeOffset whenever possible; during progressive playback, the media source will not update
|
||
// the media duration (which is what timeOffset is provided as) before we need to process the next chunk.
|
||
if(!isFiniteNumber(lastEndTime)){lastEndTime=this.lastEndTime=timeOffset||0;}// The binary segment data is added to the videoTrack in the mp4demuxer. We don't check to see if the data is only
|
||
// audio or video (or both); adding it to video was an arbitrary choice.
|
||
const data=videoTrack.samples;if(!(data!=null&&data.length)){return result;}const initSegment={initPTS:undefined,timescale:1};let initData=this.initData;if(!((_initData=initData)!=null&&_initData.length)){this.generateInitSegment(data);initData=this.initData;}if(!((_initData2=initData)!=null&&_initData2.length)){// We can't remux if the initSegment could not be generated
|
||
logger.warn('[passthrough-remuxer.ts]: Failed to generate initSegment.');return result;}if(this.emitInitSegment){initSegment.tracks=this.initTracks;this.emitInitSegment=false;}const duration=getDuration(data,initData);const startDTS=getStartDTS(initData,data);const decodeTime=startDTS===null?timeOffset:startDTS;if(isInvalidInitPts(initPTS,decodeTime,timeOffset,duration)||initSegment.timescale!==initPTS.timescale&&accurateTimeOffset){initSegment.initPTS=decodeTime-timeOffset;if(initPTS&&initPTS.timescale===1){logger.warn(`Adjusting initPTS by ${initSegment.initPTS-initPTS.baseTime}`);}this.initPTS=initPTS={baseTime:initSegment.initPTS,timescale:1};}const startTime=audioTrack?decodeTime-initPTS.baseTime/initPTS.timescale:lastEndTime;const endTime=startTime+duration;offsetStartDTS(initData,data,initPTS.baseTime/initPTS.timescale);if(duration>0){this.lastEndTime=endTime;}else {logger.warn('Duration parsed from mp4 should be greater than zero');this.resetNextTimestamp();}const hasAudio=!!initData.audio;const hasVideo=!!initData.video;let type='';if(hasAudio){type+='audio';}if(hasVideo){type+='video';}const track={data1:data,startPTS:startTime,startDTS:startTime,endPTS:endTime,endDTS:endTime,type,hasAudio,hasVideo,nb:1,dropped:0};result.audio=track.type==='audio'?track:undefined;result.video=track.type!=='audio'?track:undefined;result.initSegment=initSegment;result.id3=flushTextTrackMetadataCueSamples(id3Track,timeOffset,initPTS,initPTS);if(textTrack.samples.length){result.text=flushTextTrackUserdataCueSamples(textTrack,timeOffset,initPTS);}return result;}}function isInvalidInitPts(initPTS,startDTS,timeOffset,duration){if(initPTS===null){return true;}// InitPTS is invalid when distance from program would be more than segment duration or a minimum of one second
|
||
const minDuration=Math.max(duration,1);const startTime=startDTS-initPTS.baseTime/initPTS.timescale;return Math.abs(startTime-timeOffset)>minDuration;}function getParsedTrackCodec(track,type){const parsedCodec=track==null?void 0:track.codec;if(parsedCodec&&parsedCodec.length>4){return parsedCodec;}if(type===ElementaryStreamTypes.AUDIO){if(parsedCodec==='ec-3'||parsedCodec==='ac-3'||parsedCodec==='alac'){return parsedCodec;}if(parsedCodec==='fLaC'||parsedCodec==='Opus'){// Opting not to get `preferManagedMediaSource` from player config for isSupported() check for simplicity
|
||
const preferManagedMediaSource=false;return getCodecCompatibleName(parsedCodec,preferManagedMediaSource);}const result='mp4a.40.5';logger.info(`Parsed audio codec "${parsedCodec}" or audio object type not handled. Using "${result}"`);return result;}// Provide defaults based on codec type
|
||
// This allows for some playback of some fmp4 playlists without CODECS defined in manifest
|
||
logger.warn(`Unhandled video codec "${parsedCodec}"`);if(parsedCodec==='hvc1'||parsedCodec==='hev1'){return 'hvc1.1.6.L120.90';}if(parsedCodec==='av01'){return 'av01.0.04M.08';}return 'avc1.42e01e';}let now;// performance.now() not available on WebWorker, at least on Safari Desktop
|
||
try{now=self.performance.now.bind(self.performance);}catch(err){logger.debug('Unable to use Performance API on this environment');now=optionalSelf==null?void 0:optionalSelf.Date.now;}const muxConfig=[{demux:MP4Demuxer,remux:PassThroughRemuxer},{demux:TSDemuxer,remux:MP4Remuxer},{demux:AACDemuxer,remux:MP4Remuxer},{demux:MP3Demuxer,remux:MP4Remuxer}];{muxConfig.splice(2,0,{demux:AC3Demuxer,remux:MP4Remuxer});}class Transmuxer{constructor(observer,typeSupported,config,vendor,id){this.async=false;this.observer=void 0;this.typeSupported=void 0;this.config=void 0;this.vendor=void 0;this.id=void 0;this.demuxer=void 0;this.remuxer=void 0;this.decrypter=void 0;this.probe=void 0;this.decryptionPromise=null;this.transmuxConfig=void 0;this.currentTransmuxState=void 0;this.observer=observer;this.typeSupported=typeSupported;this.config=config;this.vendor=vendor;this.id=id;}configure(transmuxConfig){this.transmuxConfig=transmuxConfig;if(this.decrypter){this.decrypter.reset();}}push(data,decryptdata,chunkMeta,state){const stats=chunkMeta.transmuxing;stats.executeStart=now();let uintData=new Uint8Array(data);const{currentTransmuxState,transmuxConfig}=this;if(state){this.currentTransmuxState=state;}const{contiguous,discontinuity,trackSwitch,accurateTimeOffset,timeOffset,initSegmentChange}=state||currentTransmuxState;const{audioCodec,videoCodec,defaultInitPts,duration,initSegmentData}=transmuxConfig;const keyData=getEncryptionType(uintData,decryptdata);if(keyData&&keyData.method==='AES-128'){const decrypter=this.getDecrypter();// Software decryption is synchronous; webCrypto is not
|
||
if(decrypter.isSync()){// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
|
||
// data is handled in the flush() call
|
||
let decryptedData=decrypter.softwareDecrypt(uintData,keyData.key.buffer,keyData.iv.buffer);// For Low-Latency HLS Parts, decrypt in place, since part parsing is expected on push progress
|
||
const loadingParts=chunkMeta.part>-1;if(loadingParts){decryptedData=decrypter.flush();}if(!decryptedData){stats.executeEnd=now();return emptyResult(chunkMeta);}uintData=new Uint8Array(decryptedData);}else {this.decryptionPromise=decrypter.webCryptoDecrypt(uintData,keyData.key.buffer,keyData.iv.buffer).then(decryptedData=>{// Calling push here is important; if flush() is called while this is still resolving, this ensures that
|
||
// the decrypted data has been transmuxed
|
||
const result=this.push(decryptedData,null,chunkMeta);this.decryptionPromise=null;return result;});return this.decryptionPromise;}}const resetMuxers=this.needsProbing(discontinuity,trackSwitch);if(resetMuxers){const error=this.configureTransmuxer(uintData);if(error){logger.warn(`[transmuxer] ${error.message}`);this.observer.emit(Events.ERROR,Events.ERROR,{type:ErrorTypes.MEDIA_ERROR,details:ErrorDetails.FRAG_PARSING_ERROR,fatal:false,error,reason:error.message});stats.executeEnd=now();return emptyResult(chunkMeta);}}if(discontinuity||trackSwitch||initSegmentChange||resetMuxers){this.resetInitSegment(initSegmentData,audioCodec,videoCodec,duration,decryptdata);}if(discontinuity||initSegmentChange||resetMuxers){this.resetInitialTimestamp(defaultInitPts);}if(!contiguous){this.resetContiguity();}const result=this.transmux(uintData,keyData,timeOffset,accurateTimeOffset,chunkMeta);const currentState=this.currentTransmuxState;currentState.contiguous=true;currentState.discontinuity=false;currentState.trackSwitch=false;stats.executeEnd=now();return result;}// Due to data caching, flush calls can produce more than one TransmuxerResult (hence the Array type)
|
||
flush(chunkMeta){const stats=chunkMeta.transmuxing;stats.executeStart=now();const{decrypter,currentTransmuxState,decryptionPromise}=this;if(decryptionPromise){// Upon resolution, the decryption promise calls push() and returns its TransmuxerResult up the stack. Therefore
|
||
// only flushing is required for async decryption
|
||
return decryptionPromise.then(()=>{return this.flush(chunkMeta);});}const transmuxResults=[];const{timeOffset}=currentTransmuxState;if(decrypter){// The decrypter may have data cached, which needs to be demuxed. In this case we'll have two TransmuxResults
|
||
// This happens in the case that we receive only 1 push call for a segment (either for non-progressive downloads,
|
||
// or for progressive downloads with small segments)
|
||
const decryptedData=decrypter.flush();if(decryptedData){// Push always returns a TransmuxerResult if decryptdata is null
|
||
transmuxResults.push(this.push(decryptedData,null,chunkMeta));}}const{demuxer,remuxer}=this;if(!demuxer||!remuxer){// If probing failed, then Hls.js has been given content its not able to handle
|
||
stats.executeEnd=now();return [emptyResult(chunkMeta)];}const demuxResultOrPromise=demuxer.flush(timeOffset);if(isPromise(demuxResultOrPromise)){// Decrypt final SAMPLE-AES samples
|
||
return demuxResultOrPromise.then(demuxResult=>{this.flushRemux(transmuxResults,demuxResult,chunkMeta);return transmuxResults;});}this.flushRemux(transmuxResults,demuxResultOrPromise,chunkMeta);return transmuxResults;}flushRemux(transmuxResults,demuxResult,chunkMeta){const{audioTrack,videoTrack,id3Track,textTrack}=demuxResult;const{accurateTimeOffset,timeOffset}=this.currentTransmuxState;logger.log(`[transmuxer.ts]: Flushed fragment ${chunkMeta.sn}${chunkMeta.part>-1?' p: '+chunkMeta.part:''} of level ${chunkMeta.level}`);const remuxResult=this.remuxer.remux(audioTrack,videoTrack,id3Track,textTrack,timeOffset,accurateTimeOffset,true,this.id);transmuxResults.push({remuxResult,chunkMeta});chunkMeta.transmuxing.executeEnd=now();}resetInitialTimestamp(defaultInitPts){const{demuxer,remuxer}=this;if(!demuxer||!remuxer){return;}demuxer.resetTimeStamp(defaultInitPts);remuxer.resetTimeStamp(defaultInitPts);}resetContiguity(){const{demuxer,remuxer}=this;if(!demuxer||!remuxer){return;}demuxer.resetContiguity();remuxer.resetNextTimestamp();}resetInitSegment(initSegmentData,audioCodec,videoCodec,trackDuration,decryptdata){const{demuxer,remuxer}=this;if(!demuxer||!remuxer){return;}demuxer.resetInitSegment(initSegmentData,audioCodec,videoCodec,trackDuration);remuxer.resetInitSegment(initSegmentData,audioCodec,videoCodec,decryptdata);}destroy(){if(this.demuxer){this.demuxer.destroy();this.demuxer=undefined;}if(this.remuxer){this.remuxer.destroy();this.remuxer=undefined;}}transmux(data,keyData,timeOffset,accurateTimeOffset,chunkMeta){let result;if(keyData&&keyData.method==='SAMPLE-AES'){result=this.transmuxSampleAes(data,keyData,timeOffset,accurateTimeOffset,chunkMeta);}else {result=this.transmuxUnencrypted(data,timeOffset,accurateTimeOffset,chunkMeta);}return result;}transmuxUnencrypted(data,timeOffset,accurateTimeOffset,chunkMeta){const{audioTrack,videoTrack,id3Track,textTrack}=this.demuxer.demux(data,timeOffset,false,!this.config.progressive);const remuxResult=this.remuxer.remux(audioTrack,videoTrack,id3Track,textTrack,timeOffset,accurateTimeOffset,false,this.id);return {remuxResult,chunkMeta};}transmuxSampleAes(data,decryptData,timeOffset,accurateTimeOffset,chunkMeta){return this.demuxer.demuxSampleAes(data,decryptData,timeOffset).then(demuxResult=>{const remuxResult=this.remuxer.remux(demuxResult.audioTrack,demuxResult.videoTrack,demuxResult.id3Track,demuxResult.textTrack,timeOffset,accurateTimeOffset,false,this.id);return {remuxResult,chunkMeta};});}configureTransmuxer(data){const{config,observer,typeSupported,vendor}=this;// probe for content type
|
||
let mux;for(let i=0,len=muxConfig.length;i<len;i++){var _muxConfig$i$demux;if((_muxConfig$i$demux=muxConfig[i].demux)!=null&&_muxConfig$i$demux.probe(data)){mux=muxConfig[i];break;}}if(!mux){return new Error('Failed to find demuxer by probing fragment data');}// so let's check that current remuxer and demuxer are still valid
|
||
const demuxer=this.demuxer;const remuxer=this.remuxer;const Remuxer=mux.remux;const Demuxer=mux.demux;if(!remuxer||!(remuxer instanceof Remuxer)){this.remuxer=new Remuxer(observer,config,typeSupported,vendor);}if(!demuxer||!(demuxer instanceof Demuxer)){this.demuxer=new Demuxer(observer,config,typeSupported);this.probe=Demuxer.probe;}}needsProbing(discontinuity,trackSwitch){// in case of continuity change, or track switch
|
||
// we might switch from content type (AAC container to TS container, or TS to fmp4 for example)
|
||
return !this.demuxer||!this.remuxer||discontinuity||trackSwitch;}getDecrypter(){let decrypter=this.decrypter;if(!decrypter){decrypter=this.decrypter=new Decrypter(this.config);}return decrypter;}}function getEncryptionType(data,decryptData){let encryptionType=null;if(data.byteLength>0&&(decryptData==null?void 0:decryptData.key)!=null&&decryptData.iv!==null&&decryptData.method!=null){encryptionType=decryptData;}return encryptionType;}const emptyResult=chunkMeta=>({remuxResult:{},chunkMeta});function isPromise(p){return 'then'in p&&p.then instanceof Function;}class TransmuxConfig{constructor(audioCodec,videoCodec,initSegmentData,duration,defaultInitPts){this.audioCodec=void 0;this.videoCodec=void 0;this.initSegmentData=void 0;this.duration=void 0;this.defaultInitPts=void 0;this.audioCodec=audioCodec;this.videoCodec=videoCodec;this.initSegmentData=initSegmentData;this.duration=duration;this.defaultInitPts=defaultInitPts||null;}}class TransmuxState{constructor(discontinuity,contiguous,accurateTimeOffset,trackSwitch,timeOffset,initSegmentChange){this.discontinuity=void 0;this.contiguous=void 0;this.accurateTimeOffset=void 0;this.trackSwitch=void 0;this.timeOffset=void 0;this.initSegmentChange=void 0;this.discontinuity=discontinuity;this.contiguous=contiguous;this.accurateTimeOffset=accurateTimeOffset;this.trackSwitch=trackSwitch;this.timeOffset=timeOffset;this.initSegmentChange=initSegmentChange;}}var eventemitter3={exports:{}};(function(module){var has=Object.prototype.hasOwnProperty,prefix='~';/**
|
||
* Constructor to create a storage for our `EE` objects.
|
||
* An `Events` instance is a plain object whose properties are event names.
|
||
*
|
||
* @constructor
|
||
* @private
|
||
*/function Events(){}//
|
||
// We try to not inherit from `Object.prototype`. In some engines creating an
|
||
// instance in this way is faster than calling `Object.create(null)` directly.
|
||
// If `Object.create(null)` is not supported we prefix the event names with a
|
||
// character to make sure that the built-in object properties are not
|
||
// overridden or used as an attack vector.
|
||
//
|
||
if(Object.create){Events.prototype=Object.create(null);//
|
||
// This hack is needed because the `__proto__` property is still inherited in
|
||
// some old browsers like Android 4, iPhone 5.1, Opera 11 and Safari 5.
|
||
//
|
||
if(!new Events().__proto__)prefix=false;}/**
|
||
* Representation of a single event listener.
|
||
*
|
||
* @param {Function} fn The listener function.
|
||
* @param {*} context The context to invoke the listener with.
|
||
* @param {Boolean} [once=false] Specify if the listener is a one-time listener.
|
||
* @constructor
|
||
* @private
|
||
*/function EE(fn,context,once){this.fn=fn;this.context=context;this.once=once||false;}/**
|
||
* Add a listener for a given event.
|
||
*
|
||
* @param {EventEmitter} emitter Reference to the `EventEmitter` instance.
|
||
* @param {(String|Symbol)} event The event name.
|
||
* @param {Function} fn The listener function.
|
||
* @param {*} context The context to invoke the listener with.
|
||
* @param {Boolean} once Specify if the listener is a one-time listener.
|
||
* @returns {EventEmitter}
|
||
* @private
|
||
*/function addListener(emitter,event,fn,context,once){if(typeof fn!=='function'){throw new TypeError('The listener must be a function');}var listener=new EE(fn,context||emitter,once),evt=prefix?prefix+event:event;if(!emitter._events[evt])emitter._events[evt]=listener,emitter._eventsCount++;else if(!emitter._events[evt].fn)emitter._events[evt].push(listener);else emitter._events[evt]=[emitter._events[evt],listener];return emitter;}/**
|
||
* Clear event by name.
|
||
*
|
||
* @param {EventEmitter} emitter Reference to the `EventEmitter` instance.
|
||
* @param {(String|Symbol)} evt The Event name.
|
||
* @private
|
||
*/function clearEvent(emitter,evt){if(--emitter._eventsCount===0)emitter._events=new Events();else delete emitter._events[evt];}/**
|
||
* Minimal `EventEmitter` interface that is molded against the Node.js
|
||
* `EventEmitter` interface.
|
||
*
|
||
* @constructor
|
||
* @public
|
||
*/function EventEmitter(){this._events=new Events();this._eventsCount=0;}/**
|
||
* Return an array listing the events for which the emitter has registered
|
||
* listeners.
|
||
*
|
||
* @returns {Array}
|
||
* @public
|
||
*/EventEmitter.prototype.eventNames=function eventNames(){var names=[],events,name;if(this._eventsCount===0)return names;for(name in events=this._events){if(has.call(events,name))names.push(prefix?name.slice(1):name);}if(Object.getOwnPropertySymbols){return names.concat(Object.getOwnPropertySymbols(events));}return names;};/**
|
||
* Return the listeners registered for a given event.
|
||
*
|
||
* @param {(String|Symbol)} event The event name.
|
||
* @returns {Array} The registered listeners.
|
||
* @public
|
||
*/EventEmitter.prototype.listeners=function listeners(event){var evt=prefix?prefix+event:event,handlers=this._events[evt];if(!handlers)return [];if(handlers.fn)return [handlers.fn];for(var i=0,l=handlers.length,ee=new Array(l);i<l;i++){ee[i]=handlers[i].fn;}return ee;};/**
|
||
* Return the number of listeners listening to a given event.
|
||
*
|
||
* @param {(String|Symbol)} event The event name.
|
||
* @returns {Number} The number of listeners.
|
||
* @public
|
||
*/EventEmitter.prototype.listenerCount=function listenerCount(event){var evt=prefix?prefix+event:event,listeners=this._events[evt];if(!listeners)return 0;if(listeners.fn)return 1;return listeners.length;};/**
|
||
* Calls each of the listeners registered for a given event.
|
||
*
|
||
* @param {(String|Symbol)} event The event name.
|
||
* @returns {Boolean} `true` if the event had listeners, else `false`.
|
||
* @public
|
||
*/EventEmitter.prototype.emit=function emit(event,a1,a2,a3,a4,a5){var evt=prefix?prefix+event:event;if(!this._events[evt])return false;var listeners=this._events[evt],len=arguments.length,args,i;if(listeners.fn){if(listeners.once)this.removeListener(event,listeners.fn,undefined,true);switch(len){case 1:return listeners.fn.call(listeners.context),true;case 2:return listeners.fn.call(listeners.context,a1),true;case 3:return listeners.fn.call(listeners.context,a1,a2),true;case 4:return listeners.fn.call(listeners.context,a1,a2,a3),true;case 5:return listeners.fn.call(listeners.context,a1,a2,a3,a4),true;case 6:return listeners.fn.call(listeners.context,a1,a2,a3,a4,a5),true;}for(i=1,args=new Array(len-1);i<len;i++){args[i-1]=arguments[i];}listeners.fn.apply(listeners.context,args);}else {var length=listeners.length,j;for(i=0;i<length;i++){if(listeners[i].once)this.removeListener(event,listeners[i].fn,undefined,true);switch(len){case 1:listeners[i].fn.call(listeners[i].context);break;case 2:listeners[i].fn.call(listeners[i].context,a1);break;case 3:listeners[i].fn.call(listeners[i].context,a1,a2);break;case 4:listeners[i].fn.call(listeners[i].context,a1,a2,a3);break;default:if(!args)for(j=1,args=new Array(len-1);j<len;j++){args[j-1]=arguments[j];}listeners[i].fn.apply(listeners[i].context,args);}}}return true;};/**
|
||
* Add a listener for a given event.
|
||
*
|
||
* @param {(String|Symbol)} event The event name.
|
||
* @param {Function} fn The listener function.
|
||
* @param {*} [context=this] The context to invoke the listener with.
|
||
* @returns {EventEmitter} `this`.
|
||
* @public
|
||
*/EventEmitter.prototype.on=function on(event,fn,context){return addListener(this,event,fn,context,false);};/**
|
||
* Add a one-time listener for a given event.
|
||
*
|
||
* @param {(String|Symbol)} event The event name.
|
||
* @param {Function} fn The listener function.
|
||
* @param {*} [context=this] The context to invoke the listener with.
|
||
* @returns {EventEmitter} `this`.
|
||
* @public
|
||
*/EventEmitter.prototype.once=function once(event,fn,context){return addListener(this,event,fn,context,true);};/**
|
||
* Remove the listeners of a given event.
|
||
*
|
||
* @param {(String|Symbol)} event The event name.
|
||
* @param {Function} fn Only remove the listeners that match this function.
|
||
* @param {*} context Only remove the listeners that have this context.
|
||
* @param {Boolean} once Only remove one-time listeners.
|
||
* @returns {EventEmitter} `this`.
|
||
* @public
|
||
*/EventEmitter.prototype.removeListener=function removeListener(event,fn,context,once){var evt=prefix?prefix+event:event;if(!this._events[evt])return this;if(!fn){clearEvent(this,evt);return this;}var listeners=this._events[evt];if(listeners.fn){if(listeners.fn===fn&&(!once||listeners.once)&&(!context||listeners.context===context)){clearEvent(this,evt);}}else {for(var i=0,events=[],length=listeners.length;i<length;i++){if(listeners[i].fn!==fn||once&&!listeners[i].once||context&&listeners[i].context!==context){events.push(listeners[i]);}}//
|
||
// Reset the array, or remove it completely if we have no more listeners.
|
||
//
|
||
if(events.length)this._events[evt]=events.length===1?events[0]:events;else clearEvent(this,evt);}return this;};/**
|
||
* Remove all listeners, or those of the specified event.
|
||
*
|
||
* @param {(String|Symbol)} [event] The event name.
|
||
* @returns {EventEmitter} `this`.
|
||
* @public
|
||
*/EventEmitter.prototype.removeAllListeners=function removeAllListeners(event){var evt;if(event){evt=prefix?prefix+event:event;if(this._events[evt])clearEvent(this,evt);}else {this._events=new Events();this._eventsCount=0;}return this;};//
|
||
// Alias methods names because people roll like that.
|
||
//
|
||
EventEmitter.prototype.off=EventEmitter.prototype.removeListener;EventEmitter.prototype.addListener=EventEmitter.prototype.on;//
|
||
// Expose the prefix.
|
||
//
|
||
EventEmitter.prefixed=prefix;//
|
||
// Allow `EventEmitter` to be imported as module namespace.
|
||
//
|
||
EventEmitter.EventEmitter=EventEmitter;//
|
||
// Expose the module.
|
||
//
|
||
{module.exports=EventEmitter;}})(eventemitter3);var eventemitter3Exports=eventemitter3.exports;var EventEmitter=/*@__PURE__*/getDefaultExportFromCjs(eventemitter3Exports);class TransmuxerInterface{constructor(hls,id,onTransmuxComplete,onFlush){this.error=null;this.hls=void 0;this.id=void 0;this.observer=void 0;this.frag=null;this.part=null;this.useWorker=void 0;this.workerContext=null;this.onwmsg=void 0;this.transmuxer=null;this.onTransmuxComplete=void 0;this.onFlush=void 0;const config=hls.config;this.hls=hls;this.id=id;this.useWorker=!!config.enableWorker;this.onTransmuxComplete=onTransmuxComplete;this.onFlush=onFlush;const forwardMessage=(ev,data)=>{data=data||{};data.frag=this.frag;data.id=this.id;if(ev===Events.ERROR){this.error=data.error;}this.hls.trigger(ev,data);};// forward events to main thread
|
||
this.observer=new EventEmitter();this.observer.on(Events.FRAG_DECRYPTED,forwardMessage);this.observer.on(Events.ERROR,forwardMessage);const MediaSource=getMediaSource(config.preferManagedMediaSource)||{isTypeSupported:()=>false};const m2tsTypeSupported={mpeg:MediaSource.isTypeSupported('audio/mpeg'),mp3:MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),ac3:MediaSource.isTypeSupported('audio/mp4; codecs="ac-3"')};if(this.useWorker&&typeof Worker!=='undefined'){const canCreateWorker=config.workerPath||hasUMDWorker();if(canCreateWorker){try{if(config.workerPath){logger.log(`loading Web Worker ${config.workerPath} for "${id}"`);this.workerContext=loadWorker(config.workerPath);}else {logger.log(`injecting Web Worker for "${id}"`);this.workerContext=injectWorker();}this.onwmsg=event=>this.onWorkerMessage(event);const{worker}=this.workerContext;worker.addEventListener('message',this.onwmsg);worker.onerror=event=>{const error=new Error(`${event.message} (${event.filename}:${event.lineno})`);config.enableWorker=false;logger.warn(`Error in "${id}" Web Worker, fallback to inline`);this.hls.trigger(Events.ERROR,{type:ErrorTypes.OTHER_ERROR,details:ErrorDetails.INTERNAL_EXCEPTION,fatal:false,event:'demuxerWorker',error});};worker.postMessage({cmd:'init',typeSupported:m2tsTypeSupported,vendor:'',id:id,config:JSON.stringify(config)});}catch(err){logger.warn(`Error setting up "${id}" Web Worker, fallback to inline`,err);this.resetWorker();this.error=null;this.transmuxer=new Transmuxer(this.observer,m2tsTypeSupported,config,'',id);}return;}}this.transmuxer=new Transmuxer(this.observer,m2tsTypeSupported,config,'',id);}resetWorker(){if(this.workerContext){const{worker,objectURL}=this.workerContext;if(objectURL){// revoke the Object URL that was used to create transmuxer worker, so as not to leak it
|
||
self.URL.revokeObjectURL(objectURL);}worker.removeEventListener('message',this.onwmsg);worker.onerror=null;worker.terminate();this.workerContext=null;}}destroy(){if(this.workerContext){this.resetWorker();this.onwmsg=undefined;}else {const transmuxer=this.transmuxer;if(transmuxer){transmuxer.destroy();this.transmuxer=null;}}const observer=this.observer;if(observer){observer.removeAllListeners();}this.frag=null;// @ts-ignore
|
||
this.observer=null;// @ts-ignore
|
||
this.hls=null;}push(data,initSegmentData,audioCodec,videoCodec,frag,part,duration,accurateTimeOffset,chunkMeta,defaultInitPTS){var _frag$initSegment,_lastFrag$initSegment;chunkMeta.transmuxing.start=self.performance.now();const{transmuxer}=this;const timeOffset=part?part.start:frag.start;// TODO: push "clear-lead" decrypt data for unencrypted fragments in streams with encrypted ones
|
||
const decryptdata=frag.decryptdata;const lastFrag=this.frag;const discontinuity=!(lastFrag&&frag.cc===lastFrag.cc);const trackSwitch=!(lastFrag&&chunkMeta.level===lastFrag.level);const snDiff=lastFrag?chunkMeta.sn-lastFrag.sn:-1;const partDiff=this.part?chunkMeta.part-this.part.index:-1;const progressive=snDiff===0&&chunkMeta.id>1&&chunkMeta.id===(lastFrag==null?void 0:lastFrag.stats.chunkCount);const contiguous=!trackSwitch&&(snDiff===1||snDiff===0&&(partDiff===1||progressive&&partDiff<=0));const now=self.performance.now();if(trackSwitch||snDiff||frag.stats.parsing.start===0){frag.stats.parsing.start=now;}if(part&&(partDiff||!contiguous)){part.stats.parsing.start=now;}const initSegmentChange=!(lastFrag&&((_frag$initSegment=frag.initSegment)==null?void 0:_frag$initSegment.url)===((_lastFrag$initSegment=lastFrag.initSegment)==null?void 0:_lastFrag$initSegment.url));const state=new TransmuxState(discontinuity,contiguous,accurateTimeOffset,trackSwitch,timeOffset,initSegmentChange);if(!contiguous||discontinuity||initSegmentChange){logger.log(`[transmuxer-interface, ${frag.type}]: Starting new transmux session for sn: ${chunkMeta.sn} p: ${chunkMeta.part} level: ${chunkMeta.level} id: ${chunkMeta.id}
|
||
discontinuity: ${discontinuity}
|
||
trackSwitch: ${trackSwitch}
|
||
contiguous: ${contiguous}
|
||
accurateTimeOffset: ${accurateTimeOffset}
|
||
timeOffset: ${timeOffset}
|
||
initSegmentChange: ${initSegmentChange}`);const config=new TransmuxConfig(audioCodec,videoCodec,initSegmentData,duration,defaultInitPTS);this.configureTransmuxer(config);}this.frag=frag;this.part=part;// Frags with sn of 'initSegment' are not transmuxed
|
||
if(this.workerContext){// post fragment payload as transferable objects for ArrayBuffer (no copy)
|
||
this.workerContext.worker.postMessage({cmd:'demux',data,decryptdata,chunkMeta,state},data instanceof ArrayBuffer?[data]:[]);}else if(transmuxer){const transmuxResult=transmuxer.push(data,decryptdata,chunkMeta,state);if(isPromise(transmuxResult)){transmuxer.async=true;transmuxResult.then(data=>{this.handleTransmuxComplete(data);}).catch(error=>{this.transmuxerError(error,chunkMeta,'transmuxer-interface push error');});}else {transmuxer.async=false;this.handleTransmuxComplete(transmuxResult);}}}flush(chunkMeta){chunkMeta.transmuxing.start=self.performance.now();const{transmuxer}=this;if(this.workerContext){this.workerContext.worker.postMessage({cmd:'flush',chunkMeta});}else if(transmuxer){let transmuxResult=transmuxer.flush(chunkMeta);const asyncFlush=isPromise(transmuxResult);if(asyncFlush||transmuxer.async){if(!isPromise(transmuxResult)){transmuxResult=Promise.resolve(transmuxResult);}transmuxResult.then(data=>{this.handleFlushResult(data,chunkMeta);}).catch(error=>{this.transmuxerError(error,chunkMeta,'transmuxer-interface flush error');});}else {this.handleFlushResult(transmuxResult,chunkMeta);}}}transmuxerError(error,chunkMeta,reason){if(!this.hls){return;}this.error=error;this.hls.trigger(Events.ERROR,{type:ErrorTypes.MEDIA_ERROR,details:ErrorDetails.FRAG_PARSING_ERROR,chunkMeta,frag:this.frag||undefined,fatal:false,error,err:error,reason});}handleFlushResult(results,chunkMeta){results.forEach(result=>{this.handleTransmuxComplete(result);});this.onFlush(chunkMeta);}onWorkerMessage(event){const data=event.data;if(!(data!=null&&data.event)){logger.warn(`worker message received with no ${data?'event name':'data'}`);return;}const hls=this.hls;if(!this.hls){return;}switch(data.event){case'init':{var _this$workerContext;const objectURL=(_this$workerContext=this.workerContext)==null?void 0:_this$workerContext.objectURL;if(objectURL){// revoke the Object URL that was used to create transmuxer worker, so as not to leak it
|
||
self.URL.revokeObjectURL(objectURL);}break;}case'transmuxComplete':{this.handleTransmuxComplete(data.data);break;}case'flush':{this.onFlush(data.data);break;}// pass logs from the worker thread to the main logger
|
||
case'workerLog':if(logger[data.data.logType]){logger[data.data.logType](data.data.message);}break;default:{data.data=data.data||{};data.data.frag=this.frag;data.data.id=this.id;hls.trigger(data.event,data.data);break;}}}configureTransmuxer(config){const{transmuxer}=this;if(this.workerContext){this.workerContext.worker.postMessage({cmd:'configure',config});}else if(transmuxer){transmuxer.configure(config);}}handleTransmuxComplete(result){result.chunkMeta.transmuxing.end=self.performance.now();this.onTransmuxComplete(result);}}function subtitleOptionsIdentical(trackList1,trackList2){if(trackList1.length!==trackList2.length){return false;}for(let i=0;i<trackList1.length;i++){if(!mediaAttributesIdentical(trackList1[i].attrs,trackList2[i].attrs)){return false;}}return true;}function mediaAttributesIdentical(attrs1,attrs2,customAttributes){// Media options with the same rendition ID must be bit identical
|
||
const stableRenditionId=attrs1['STABLE-RENDITION-ID'];if(stableRenditionId&&!customAttributes){return stableRenditionId===attrs2['STABLE-RENDITION-ID'];}// When rendition ID is not present, compare attributes
|
||
return !(customAttributes||['LANGUAGE','NAME','CHARACTERISTICS','AUTOSELECT','DEFAULT','FORCED','ASSOC-LANGUAGE']).some(subtitleAttribute=>attrs1[subtitleAttribute]!==attrs2[subtitleAttribute]);}function subtitleTrackMatchesTextTrack(subtitleTrack,textTrack){return textTrack.label.toLowerCase()===subtitleTrack.name.toLowerCase()&&(!textTrack.language||textTrack.language.toLowerCase()===(subtitleTrack.lang||'').toLowerCase());}const TICK_INTERVAL$2=100;// how often to tick in ms
|
||
class AudioStreamController extends BaseStreamController{constructor(hls,fragmentTracker,keyLoader){super(hls,fragmentTracker,keyLoader,'[audio-stream-controller]',PlaylistLevelType.AUDIO);this.videoBuffer=null;this.videoTrackCC=-1;this.waitingVideoCC=-1;this.bufferedTrack=null;this.switchingTrack=null;this.trackId=-1;this.waitingData=null;this.mainDetails=null;this.flushing=false;this.bufferFlushed=false;this.cachedTrackLoadedData=null;this._registerListeners();}onHandlerDestroying(){this._unregisterListeners();super.onHandlerDestroying();this.mainDetails=null;this.bufferedTrack=null;this.switchingTrack=null;}_registerListeners(){const{hls}=this;hls.on(Events.MEDIA_ATTACHED,this.onMediaAttached,this);hls.on(Events.MEDIA_DETACHING,this.onMediaDetaching,this);hls.on(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.on(Events.LEVEL_LOADED,this.onLevelLoaded,this);hls.on(Events.AUDIO_TRACKS_UPDATED,this.onAudioTracksUpdated,this);hls.on(Events.AUDIO_TRACK_SWITCHING,this.onAudioTrackSwitching,this);hls.on(Events.AUDIO_TRACK_LOADED,this.onAudioTrackLoaded,this);hls.on(Events.ERROR,this.onError,this);hls.on(Events.BUFFER_RESET,this.onBufferReset,this);hls.on(Events.BUFFER_CREATED,this.onBufferCreated,this);hls.on(Events.BUFFER_FLUSHING,this.onBufferFlushing,this);hls.on(Events.BUFFER_FLUSHED,this.onBufferFlushed,this);hls.on(Events.INIT_PTS_FOUND,this.onInitPtsFound,this);hls.on(Events.FRAG_BUFFERED,this.onFragBuffered,this);}_unregisterListeners(){const{hls}=this;hls.off(Events.MEDIA_ATTACHED,this.onMediaAttached,this);hls.off(Events.MEDIA_DETACHING,this.onMediaDetaching,this);hls.off(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.off(Events.LEVEL_LOADED,this.onLevelLoaded,this);hls.off(Events.AUDIO_TRACKS_UPDATED,this.onAudioTracksUpdated,this);hls.off(Events.AUDIO_TRACK_SWITCHING,this.onAudioTrackSwitching,this);hls.off(Events.AUDIO_TRACK_LOADED,this.onAudioTrackLoaded,this);hls.off(Events.ERROR,this.onError,this);hls.off(Events.BUFFER_RESET,this.onBufferReset,this);hls.off(Events.BUFFER_CREATED,this.onBufferCreated,this);hls.off(Events.BUFFER_FLUSHING,this.onBufferFlushing,this);hls.off(Events.BUFFER_FLUSHED,this.onBufferFlushed,this);hls.off(Events.INIT_PTS_FOUND,this.onInitPtsFound,this);hls.off(Events.FRAG_BUFFERED,this.onFragBuffered,this);}// INIT_PTS_FOUND is triggered when the video track parsed in the stream-controller has a new PTS value
|
||
onInitPtsFound(event,{frag,id,initPTS,timescale}){// Always update the new INIT PTS
|
||
// Can change due level switch
|
||
if(id==='main'){const cc=frag.cc;this.initPTS[frag.cc]={baseTime:initPTS,timescale};this.log(`InitPTS for cc: ${cc} found from main: ${initPTS}`);this.videoTrackCC=cc;// If we are waiting, tick immediately to unblock audio fragment transmuxing
|
||
if(this.state===State.WAITING_INIT_PTS){this.tick();}}}startLoad(startPosition){if(!this.levels){this.startPosition=startPosition;this.state=State.STOPPED;return;}const lastCurrentTime=this.lastCurrentTime;this.stopLoad();this.setInterval(TICK_INTERVAL$2);if(lastCurrentTime>0&&startPosition===-1){this.log(`Override startPosition with lastCurrentTime @${lastCurrentTime.toFixed(3)}`);startPosition=lastCurrentTime;this.state=State.IDLE;}else {this.loadedmetadata=false;this.state=State.WAITING_TRACK;}this.nextLoadPosition=this.startPosition=this.lastCurrentTime=startPosition;this.tick();}doTick(){switch(this.state){case State.IDLE:this.doTickIdle();break;case State.WAITING_TRACK:{var _levels$trackId;const{levels,trackId}=this;const details=levels==null?void 0:(_levels$trackId=levels[trackId])==null?void 0:_levels$trackId.details;if(details){if(this.waitForCdnTuneIn(details)){break;}this.state=State.WAITING_INIT_PTS;}break;}case State.FRAG_LOADING_WAITING_RETRY:{var _this$media;const now=performance.now();const retryDate=this.retryDate;// if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
|
||
if(!retryDate||now>=retryDate||(_this$media=this.media)!=null&&_this$media.seeking){const{levels,trackId}=this;this.log('RetryDate reached, switch back to IDLE state');this.resetStartWhenNotLoaded((levels==null?void 0:levels[trackId])||null);this.state=State.IDLE;}break;}case State.WAITING_INIT_PTS:{// Ensure we don't get stuck in the WAITING_INIT_PTS state if the waiting frag CC doesn't match any initPTS
|
||
const waitingData=this.waitingData;if(waitingData){const{frag,part,cache,complete}=waitingData;if(this.initPTS[frag.cc]!==undefined){this.waitingData=null;this.waitingVideoCC=-1;this.state=State.FRAG_LOADING;const payload=cache.flush();const data={frag,part,payload,networkDetails:null};this._handleFragmentLoadProgress(data);if(complete){super._handleFragmentLoadComplete(data);}}else if(this.videoTrackCC!==this.waitingVideoCC){// Drop waiting fragment if videoTrackCC has changed since waitingFragment was set and initPTS was not found
|
||
this.log(`Waiting fragment cc (${frag.cc}) cancelled because video is at cc ${this.videoTrackCC}`);this.clearWaitingFragment();}else {// Drop waiting fragment if an earlier fragment is needed
|
||
const pos=this.getLoadPosition();const bufferInfo=BufferHelper.bufferInfo(this.mediaBuffer,pos,this.config.maxBufferHole);const waitingFragmentAtPosition=fragmentWithinToleranceTest(bufferInfo.end,this.config.maxFragLookUpTolerance,frag);if(waitingFragmentAtPosition<0){this.log(`Waiting fragment cc (${frag.cc}) @ ${frag.start} cancelled because another fragment at ${bufferInfo.end} is needed`);this.clearWaitingFragment();}}}else {this.state=State.IDLE;}}}this.onTickEnd();}clearWaitingFragment(){const waitingData=this.waitingData;if(waitingData){this.fragmentTracker.removeFragment(waitingData.frag);this.waitingData=null;this.waitingVideoCC=-1;this.state=State.IDLE;}}resetLoadingState(){this.clearWaitingFragment();super.resetLoadingState();}onTickEnd(){const{media}=this;if(!(media!=null&&media.readyState)){// Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0)
|
||
return;}this.lastCurrentTime=media.currentTime;}doTickIdle(){const{hls,levels,media,trackId}=this;const config=hls.config;// 1. if video not attached AND
|
||
// start fragment already requested OR start frag prefetch not enabled
|
||
// 2. if tracks or track not loaded and selected
|
||
// then exit loop
|
||
// => if media not attached but start frag prefetch is enabled and start frag not requested yet, we will not exit loop
|
||
if(!media&&(this.startFragRequested||!config.startFragPrefetch)||!(levels!=null&&levels[trackId])){return;}const levelInfo=levels[trackId];const trackDetails=levelInfo.details;if(!trackDetails||trackDetails.live&&this.levelLastLoaded!==levelInfo||this.waitForCdnTuneIn(trackDetails)){this.state=State.WAITING_TRACK;return;}const bufferable=this.mediaBuffer?this.mediaBuffer:this.media;if(this.bufferFlushed&&bufferable){this.bufferFlushed=false;this.afterBufferFlushed(bufferable,ElementaryStreamTypes.AUDIO,PlaylistLevelType.AUDIO);}const bufferInfo=this.getFwdBufferInfo(bufferable,PlaylistLevelType.AUDIO);if(bufferInfo===null){return;}const{bufferedTrack,switchingTrack}=this;if(!switchingTrack&&this._streamEnded(bufferInfo,trackDetails)){hls.trigger(Events.BUFFER_EOS,{type:'audio'});this.state=State.ENDED;return;}const mainBufferInfo=this.getFwdBufferInfo(this.videoBuffer?this.videoBuffer:this.media,PlaylistLevelType.MAIN);const bufferLen=bufferInfo.len;const maxBufLen=this.getMaxBufferLength(mainBufferInfo==null?void 0:mainBufferInfo.len);const fragments=trackDetails.fragments;const start=fragments[0].start;let targetBufferTime=this.flushing?this.getLoadPosition():bufferInfo.end;if(switchingTrack&&media){const pos=this.getLoadPosition();// STABLE
|
||
if(bufferedTrack&&!mediaAttributesIdentical(switchingTrack.attrs,bufferedTrack.attrs)){targetBufferTime=pos;}// if currentTime (pos) is less than alt audio playlist start time, it means that alt audio is ahead of currentTime
|
||
if(trackDetails.PTSKnown&&pos<start){// if everything is buffered from pos to start or if audio buffer upfront, let's seek to start
|
||
if(bufferInfo.end>start||bufferInfo.nextStart){this.log('Alt audio track ahead of main track, seek to start of alt audio track');media.currentTime=start+0.05;}}}// if buffer length is less than maxBufLen, or near the end, find a fragment to load
|
||
if(bufferLen>=maxBufLen&&!switchingTrack&&targetBufferTime<fragments[fragments.length-1].start){return;}let frag=this.getNextFragment(targetBufferTime,trackDetails);let atGap=false;// Avoid loop loading by using nextLoadPosition set for backtracking and skipping consecutive GAP tags
|
||
if(frag&&this.isLoopLoading(frag,targetBufferTime)){atGap=!!frag.gap;frag=this.getNextFragmentLoopLoading(frag,trackDetails,bufferInfo,PlaylistLevelType.MAIN,maxBufLen);}if(!frag){this.bufferFlushed=true;return;}// Buffer audio up to one target duration ahead of main buffer
|
||
const atBufferSyncLimit=mainBufferInfo&&frag.start>mainBufferInfo.end+trackDetails.targetduration;if(atBufferSyncLimit||// Or wait for main buffer after buffing some audio
|
||
!(mainBufferInfo!=null&&mainBufferInfo.len)&&bufferInfo.len){// Check fragment-tracker for main fragments since GAP segments do not show up in bufferInfo
|
||
const mainFrag=this.getAppendedFrag(frag.start,PlaylistLevelType.MAIN);if(mainFrag===null){return;}// Bridge gaps in main buffer
|
||
atGap||(atGap=!!mainFrag.gap||!!atBufferSyncLimit&&mainBufferInfo.len===0);if(atBufferSyncLimit&&!atGap||atGap&&bufferInfo.nextStart&&bufferInfo.nextStart<mainFrag.end){return;}}this.loadFragment(frag,levelInfo,targetBufferTime);}getMaxBufferLength(mainBufferLength){const maxConfigBuffer=super.getMaxBufferLength();if(!mainBufferLength){return maxConfigBuffer;}return Math.min(Math.max(maxConfigBuffer,mainBufferLength),this.config.maxMaxBufferLength);}onMediaDetaching(){this.videoBuffer=null;this.bufferFlushed=this.flushing=false;super.onMediaDetaching();}onAudioTracksUpdated(event,{audioTracks}){// Reset tranxmuxer is essential for large context switches (Content Steering)
|
||
this.resetTransmuxer();this.levels=audioTracks.map(mediaPlaylist=>new Level(mediaPlaylist));}onAudioTrackSwitching(event,data){// if any URL found on new audio track, it is an alternate audio track
|
||
const altAudio=!!data.url;this.trackId=data.id;const{fragCurrent}=this;if(fragCurrent){fragCurrent.abortRequests();this.removeUnbufferedFrags(fragCurrent.start);}this.resetLoadingState();// destroy useless transmuxer when switching audio to main
|
||
if(!altAudio){this.resetTransmuxer();}else {// switching to audio track, start timer if not already started
|
||
this.setInterval(TICK_INTERVAL$2);}// should we switch tracks ?
|
||
if(altAudio){this.switchingTrack=data;// main audio track are handled by stream-controller, just do something if switching to alt audio track
|
||
this.state=State.IDLE;this.flushAudioIfNeeded(data);}else {this.switchingTrack=null;this.bufferedTrack=data;this.state=State.STOPPED;}this.tick();}onManifestLoading(){this.fragmentTracker.removeAllFragments();this.startPosition=this.lastCurrentTime=0;this.bufferFlushed=this.flushing=false;this.levels=this.mainDetails=this.waitingData=this.bufferedTrack=this.cachedTrackLoadedData=this.switchingTrack=null;this.startFragRequested=false;this.trackId=this.videoTrackCC=this.waitingVideoCC=-1;}onLevelLoaded(event,data){this.mainDetails=data.details;if(this.cachedTrackLoadedData!==null){this.hls.trigger(Events.AUDIO_TRACK_LOADED,this.cachedTrackLoadedData);this.cachedTrackLoadedData=null;}}onAudioTrackLoaded(event,data){var _track$details;if(this.mainDetails==null){this.cachedTrackLoadedData=data;return;}const{levels}=this;const{details:newDetails,id:trackId}=data;if(!levels){this.warn(`Audio tracks were reset while loading level ${trackId}`);return;}this.log(`Audio track ${trackId} loaded [${newDetails.startSN},${newDetails.endSN}]${newDetails.lastPartSn?`[part-${newDetails.lastPartSn}-${newDetails.lastPartIndex}]`:''},duration:${newDetails.totalduration}`);const track=levels[trackId];let sliding=0;if(newDetails.live||(_track$details=track.details)!=null&&_track$details.live){this.checkLiveUpdate(newDetails);const mainDetails=this.mainDetails;if(newDetails.deltaUpdateFailed||!mainDetails){return;}if(!track.details&&newDetails.hasProgramDateTime&&mainDetails.hasProgramDateTime){// Make sure our audio rendition is aligned with the "main" rendition, using
|
||
// pdt as our reference times.
|
||
alignMediaPlaylistByPDT(newDetails,mainDetails);sliding=newDetails.fragments[0].start;}else {var _this$levelLastLoaded;sliding=this.alignPlaylists(newDetails,track.details,(_this$levelLastLoaded=this.levelLastLoaded)==null?void 0:_this$levelLastLoaded.details);}}track.details=newDetails;this.levelLastLoaded=track;// compute start position if we are aligned with the main playlist
|
||
if(!this.startFragRequested&&(this.mainDetails||!newDetails.live)){this.setStartPosition(this.mainDetails||newDetails,sliding);}// only switch back to IDLE state if we were waiting for track to start downloading a new fragment
|
||
if(this.state===State.WAITING_TRACK&&!this.waitForCdnTuneIn(newDetails)){this.state=State.IDLE;}// trigger handler right now
|
||
this.tick();}_handleFragmentLoadProgress(data){var _frag$initSegment;const{frag,part,payload}=data;const{config,trackId,levels}=this;if(!levels){this.warn(`Audio tracks were reset while fragment load was in progress. Fragment ${frag.sn} of level ${frag.level} will not be buffered`);return;}const track=levels[trackId];if(!track){this.warn('Audio track is undefined on fragment load progress');return;}const details=track.details;if(!details){this.warn('Audio track details undefined on fragment load progress');this.removeUnbufferedFrags(frag.start);return;}const audioCodec=config.defaultAudioCodec||track.audioCodec||'mp4a.40.2';let transmuxer=this.transmuxer;if(!transmuxer){transmuxer=this.transmuxer=new TransmuxerInterface(this.hls,PlaylistLevelType.AUDIO,this._handleTransmuxComplete.bind(this),this._handleTransmuxerFlush.bind(this));}// Check if we have video initPTS
|
||
// If not we need to wait for it
|
||
const initPTS=this.initPTS[frag.cc];const initSegmentData=(_frag$initSegment=frag.initSegment)==null?void 0:_frag$initSegment.data;if(initPTS!==undefined){// this.log(`Transmuxing ${sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`);
|
||
// time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
|
||
const accurateTimeOffset=false;// details.PTSKnown || !details.live;
|
||
const partIndex=part?part.index:-1;const partial=partIndex!==-1;const chunkMeta=new ChunkMetadata(frag.level,frag.sn,frag.stats.chunkCount,payload.byteLength,partIndex,partial);transmuxer.push(payload,initSegmentData,audioCodec,'',frag,part,details.totalduration,accurateTimeOffset,chunkMeta,initPTS);}else {this.log(`Unknown video PTS for cc ${frag.cc}, waiting for video PTS before demuxing audio frag ${frag.sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`);const{cache}=this.waitingData=this.waitingData||{frag,part,cache:new ChunkCache(),complete:false};cache.push(new Uint8Array(payload));this.waitingVideoCC=this.videoTrackCC;this.state=State.WAITING_INIT_PTS;}}_handleFragmentLoadComplete(fragLoadedData){if(this.waitingData){this.waitingData.complete=true;return;}super._handleFragmentLoadComplete(fragLoadedData);}onBufferReset(/* event: Events.BUFFER_RESET */){// reset reference to sourcebuffers
|
||
this.mediaBuffer=this.videoBuffer=null;this.loadedmetadata=false;}onBufferCreated(event,data){const audioTrack=data.tracks.audio;if(audioTrack){this.mediaBuffer=audioTrack.buffer||null;}if(data.tracks.video){this.videoBuffer=data.tracks.video.buffer||null;}}onFragBuffered(event,data){const{frag,part}=data;if(frag.type!==PlaylistLevelType.AUDIO){if(!this.loadedmetadata&&frag.type===PlaylistLevelType.MAIN){const bufferable=this.videoBuffer||this.media;if(bufferable){const bufferedTimeRanges=BufferHelper.getBuffered(bufferable);if(bufferedTimeRanges.length){this.loadedmetadata=true;}}}return;}if(this.fragContextChanged(frag)){// If a level switch was requested while a fragment was buffering, it will emit the FRAG_BUFFERED event upon completion
|
||
// Avoid setting state back to IDLE or concluding the audio switch; otherwise, the switched-to track will not buffer
|
||
this.warn(`Fragment ${frag.sn}${part?' p: '+part.index:''} of level ${frag.level} finished buffering, but was aborted. state: ${this.state}, audioSwitch: ${this.switchingTrack?this.switchingTrack.name:'false'}`);return;}if(frag.sn!=='initSegment'){this.fragPrevious=frag;const track=this.switchingTrack;if(track){this.bufferedTrack=track;this.switchingTrack=null;this.hls.trigger(Events.AUDIO_TRACK_SWITCHED,_objectSpread2({},track));}}this.fragBufferedComplete(frag,part);}onError(event,data){var _data$context;if(data.fatal){this.state=State.ERROR;return;}switch(data.details){case ErrorDetails.FRAG_GAP:case ErrorDetails.FRAG_PARSING_ERROR:case ErrorDetails.FRAG_DECRYPT_ERROR:case ErrorDetails.FRAG_LOAD_ERROR:case ErrorDetails.FRAG_LOAD_TIMEOUT:case ErrorDetails.KEY_LOAD_ERROR:case ErrorDetails.KEY_LOAD_TIMEOUT:this.onFragmentOrKeyLoadError(PlaylistLevelType.AUDIO,data);break;case ErrorDetails.AUDIO_TRACK_LOAD_ERROR:case ErrorDetails.AUDIO_TRACK_LOAD_TIMEOUT:case ErrorDetails.LEVEL_PARSING_ERROR:// in case of non fatal error while loading track, if not retrying to load track, switch back to IDLE
|
||
if(!data.levelRetry&&this.state===State.WAITING_TRACK&&((_data$context=data.context)==null?void 0:_data$context.type)===PlaylistContextType.AUDIO_TRACK){this.state=State.IDLE;}break;case ErrorDetails.BUFFER_APPEND_ERROR:case ErrorDetails.BUFFER_FULL_ERROR:if(!data.parent||data.parent!=='audio'){return;}if(data.details===ErrorDetails.BUFFER_APPEND_ERROR){this.resetLoadingState();return;}if(this.reduceLengthAndFlushBuffer(data)){this.bufferedTrack=null;super.flushMainBuffer(0,Number.POSITIVE_INFINITY,'audio');}break;case ErrorDetails.INTERNAL_EXCEPTION:this.recoverWorkerError(data);break;}}onBufferFlushing(event,{type}){if(type!==ElementaryStreamTypes.VIDEO){this.flushing=true;}}onBufferFlushed(event,{type}){if(type!==ElementaryStreamTypes.VIDEO){this.flushing=false;this.bufferFlushed=true;if(this.state===State.ENDED){this.state=State.IDLE;}const mediaBuffer=this.mediaBuffer||this.media;if(mediaBuffer){this.afterBufferFlushed(mediaBuffer,type,PlaylistLevelType.AUDIO);this.tick();}}}_handleTransmuxComplete(transmuxResult){var _id3$samples;const id='audio';const{hls}=this;const{remuxResult,chunkMeta}=transmuxResult;const context=this.getCurrentContext(chunkMeta);if(!context){this.resetWhenMissingContext(chunkMeta);return;}const{frag,part,level}=context;const{details}=level;const{audio,text,id3,initSegment}=remuxResult;// Check if the current fragment has been aborted. We check this by first seeing if we're still playing the current level.
|
||
// If we are, subsequently check if the currently loading fragment (fragCurrent) has changed.
|
||
if(this.fragContextChanged(frag)||!details){this.fragmentTracker.removeFragment(frag);return;}this.state=State.PARSING;if(this.switchingTrack&&audio){this.completeAudioSwitch(this.switchingTrack);}if(initSegment!=null&&initSegment.tracks){const mapFragment=frag.initSegment||frag;this._bufferInitSegment(level,initSegment.tracks,mapFragment,chunkMeta);hls.trigger(Events.FRAG_PARSING_INIT_SEGMENT,{frag:mapFragment,id,tracks:initSegment.tracks});// Only flush audio from old audio tracks when PTS is known on new audio track
|
||
}if(audio){const{startPTS,endPTS,startDTS,endDTS}=audio;if(part){part.elementaryStreams[ElementaryStreamTypes.AUDIO]={startPTS,endPTS,startDTS,endDTS};}frag.setElementaryStreamInfo(ElementaryStreamTypes.AUDIO,startPTS,endPTS,startDTS,endDTS);this.bufferFragmentData(audio,frag,part,chunkMeta);}if(id3!=null&&(_id3$samples=id3.samples)!=null&&_id3$samples.length){const emittedID3=_extends({id,frag,details},id3);hls.trigger(Events.FRAG_PARSING_METADATA,emittedID3);}if(text){const emittedText=_extends({id,frag,details},text);hls.trigger(Events.FRAG_PARSING_USERDATA,emittedText);}}_bufferInitSegment(currentLevel,tracks,frag,chunkMeta){if(this.state!==State.PARSING){return;}// delete any video track found on audio transmuxer
|
||
if(tracks.video){delete tracks.video;}// include levelCodec in audio and video tracks
|
||
const track=tracks.audio;if(!track){return;}track.id='audio';const variantAudioCodecs=currentLevel.audioCodec;this.log(`Init audio buffer, container:${track.container}, codecs[level/parsed]=[${variantAudioCodecs}/${track.codec}]`);// SourceBuffer will use track.levelCodec if defined
|
||
if(variantAudioCodecs&&variantAudioCodecs.split(',').length===1){track.levelCodec=variantAudioCodecs;}this.hls.trigger(Events.BUFFER_CODECS,tracks);const initSegment=track.initSegment;if(initSegment!=null&&initSegment.byteLength){const segment={type:'audio',frag,part:null,chunkMeta,parent:frag.type,data:initSegment};this.hls.trigger(Events.BUFFER_APPENDING,segment);}// trigger handler right now
|
||
this.tickImmediate();}loadFragment(frag,track,targetBufferTime){// only load if fragment is not loaded or if in audio switch
|
||
const fragState=this.fragmentTracker.getState(frag);this.fragCurrent=frag;// we force a frag loading in audio switch as fragment tracker might not have evicted previous frags in case of quick audio switch
|
||
if(this.switchingTrack||fragState===FragmentState.NOT_LOADED||fragState===FragmentState.PARTIAL){var _track$details2;if(frag.sn==='initSegment'){this._loadInitSegment(frag,track);}else if((_track$details2=track.details)!=null&&_track$details2.live&&!this.initPTS[frag.cc]){this.log(`Waiting for video PTS in continuity counter ${frag.cc} of live stream before loading audio fragment ${frag.sn} of level ${this.trackId}`);this.state=State.WAITING_INIT_PTS;const mainDetails=this.mainDetails;if(mainDetails&&mainDetails.fragments[0].start!==track.details.fragments[0].start){alignMediaPlaylistByPDT(track.details,mainDetails);}}else {this.startFragRequested=true;super.loadFragment(frag,track,targetBufferTime);}}else {this.clearTrackerIfNeeded(frag);}}flushAudioIfNeeded(switchingTrack){const{media,bufferedTrack}=this;const bufferedAttributes=bufferedTrack==null?void 0:bufferedTrack.attrs;const switchAttributes=switchingTrack.attrs;if(media&&bufferedAttributes&&(bufferedAttributes.CHANNELS!==switchAttributes.CHANNELS||bufferedTrack.name!==switchingTrack.name||bufferedTrack.lang!==switchingTrack.lang)){this.log('Switching audio track : flushing all audio');super.flushMainBuffer(0,Number.POSITIVE_INFINITY,'audio');this.bufferedTrack=null;}}completeAudioSwitch(switchingTrack){const{hls}=this;this.flushAudioIfNeeded(switchingTrack);this.bufferedTrack=switchingTrack;this.switchingTrack=null;hls.trigger(Events.AUDIO_TRACK_SWITCHED,_objectSpread2({},switchingTrack));}}class AudioTrackController extends BasePlaylistController{constructor(hls){super(hls,'[audio-track-controller]');this.tracks=[];this.groupIds=null;this.tracksInGroup=[];this.trackId=-1;this.currentTrack=null;this.selectDefaultTrack=true;this.registerListeners();}registerListeners(){const{hls}=this;hls.on(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.on(Events.MANIFEST_PARSED,this.onManifestParsed,this);hls.on(Events.LEVEL_LOADING,this.onLevelLoading,this);hls.on(Events.LEVEL_SWITCHING,this.onLevelSwitching,this);hls.on(Events.AUDIO_TRACK_LOADED,this.onAudioTrackLoaded,this);hls.on(Events.ERROR,this.onError,this);}unregisterListeners(){const{hls}=this;hls.off(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.off(Events.MANIFEST_PARSED,this.onManifestParsed,this);hls.off(Events.LEVEL_LOADING,this.onLevelLoading,this);hls.off(Events.LEVEL_SWITCHING,this.onLevelSwitching,this);hls.off(Events.AUDIO_TRACK_LOADED,this.onAudioTrackLoaded,this);hls.off(Events.ERROR,this.onError,this);}destroy(){this.unregisterListeners();this.tracks.length=0;this.tracksInGroup.length=0;this.currentTrack=null;super.destroy();}onManifestLoading(){this.tracks=[];this.tracksInGroup=[];this.groupIds=null;this.currentTrack=null;this.trackId=-1;this.selectDefaultTrack=true;}onManifestParsed(event,data){this.tracks=data.audioTracks||[];}onAudioTrackLoaded(event,data){const{id,groupId,details}=data;const trackInActiveGroup=this.tracksInGroup[id];if(!trackInActiveGroup||trackInActiveGroup.groupId!==groupId){this.warn(`Audio track with id:${id} and group:${groupId} not found in active group ${trackInActiveGroup==null?void 0:trackInActiveGroup.groupId}`);return;}const curDetails=trackInActiveGroup.details;trackInActiveGroup.details=data.details;this.log(`Audio track ${id} "${trackInActiveGroup.name}" lang:${trackInActiveGroup.lang} group:${groupId} loaded [${details.startSN}-${details.endSN}]`);if(id===this.trackId){this.playlistLoaded(id,data,curDetails);}}onLevelLoading(event,data){this.switchLevel(data.level);}onLevelSwitching(event,data){this.switchLevel(data.level);}switchLevel(levelIndex){const levelInfo=this.hls.levels[levelIndex];if(!levelInfo){return;}const audioGroups=levelInfo.audioGroups||null;const currentGroups=this.groupIds;let currentTrack=this.currentTrack;if(!audioGroups||(currentGroups==null?void 0:currentGroups.length)!==(audioGroups==null?void 0:audioGroups.length)||audioGroups!=null&&audioGroups.some(groupId=>(currentGroups==null?void 0:currentGroups.indexOf(groupId))===-1)){this.groupIds=audioGroups;this.trackId=-1;this.currentTrack=null;const audioTracks=this.tracks.filter(track=>!audioGroups||audioGroups.indexOf(track.groupId)!==-1);if(audioTracks.length){// Disable selectDefaultTrack if there are no default tracks
|
||
if(this.selectDefaultTrack&&!audioTracks.some(track=>track.default)){this.selectDefaultTrack=false;}// track.id should match hls.audioTracks index
|
||
audioTracks.forEach((track,i)=>{track.id=i;});}else if(!currentTrack&&!this.tracksInGroup.length){// Do not dispatch AUDIO_TRACKS_UPDATED when there were and are no tracks
|
||
return;}this.tracksInGroup=audioTracks;// Find preferred track
|
||
const audioPreference=this.hls.config.audioPreference;if(!currentTrack&&audioPreference){const groupIndex=findMatchingOption(audioPreference,audioTracks,audioMatchPredicate);if(groupIndex>-1){currentTrack=audioTracks[groupIndex];}else {const allIndex=findMatchingOption(audioPreference,this.tracks);currentTrack=this.tracks[allIndex];}}// Select initial track
|
||
let trackId=this.findTrackId(currentTrack);if(trackId===-1&¤tTrack){trackId=this.findTrackId(null);}// Dispatch events and load track if needed
|
||
const audioTracksUpdated={audioTracks};this.log(`Updating audio tracks, ${audioTracks.length} track(s) found in group(s): ${audioGroups==null?void 0:audioGroups.join(',')}`);this.hls.trigger(Events.AUDIO_TRACKS_UPDATED,audioTracksUpdated);const selectedTrackId=this.trackId;if(trackId!==-1&&selectedTrackId===-1){this.setAudioTrack(trackId);}else if(audioTracks.length&&selectedTrackId===-1){var _this$groupIds;const error=new Error(`No audio track selected for current audio group-ID(s): ${(_this$groupIds=this.groupIds)==null?void 0:_this$groupIds.join(',')} track count: ${audioTracks.length}`);this.warn(error.message);this.hls.trigger(Events.ERROR,{type:ErrorTypes.MEDIA_ERROR,details:ErrorDetails.AUDIO_TRACK_LOAD_ERROR,fatal:true,error});}}else if(this.shouldReloadPlaylist(currentTrack)){// Retry playlist loading if no playlist is or has been loaded yet
|
||
this.setAudioTrack(this.trackId);}}onError(event,data){if(data.fatal||!data.context){return;}if(data.context.type===PlaylistContextType.AUDIO_TRACK&&data.context.id===this.trackId&&(!this.groupIds||this.groupIds.indexOf(data.context.groupId)!==-1)){this.requestScheduled=-1;this.checkRetry(data);}}get allAudioTracks(){return this.tracks;}get audioTracks(){return this.tracksInGroup;}get audioTrack(){return this.trackId;}set audioTrack(newId){// If audio track is selected from API then don't choose from the manifest default track
|
||
this.selectDefaultTrack=false;this.setAudioTrack(newId);}setAudioOption(audioOption){const hls=this.hls;hls.config.audioPreference=audioOption;if(audioOption){const allAudioTracks=this.allAudioTracks;this.selectDefaultTrack=false;if(allAudioTracks.length){// First see if current option matches (no switch op)
|
||
const currentTrack=this.currentTrack;if(currentTrack&&matchesOption(audioOption,currentTrack,audioMatchPredicate)){return currentTrack;}// Find option in available tracks (tracksInGroup)
|
||
const groupIndex=findMatchingOption(audioOption,this.tracksInGroup,audioMatchPredicate);if(groupIndex>-1){const track=this.tracksInGroup[groupIndex];this.setAudioTrack(groupIndex);return track;}else if(currentTrack){// Find option in nearest level audio group
|
||
let searchIndex=hls.loadLevel;if(searchIndex===-1){searchIndex=hls.firstAutoLevel;}const switchIndex=findClosestLevelWithAudioGroup(audioOption,hls.levels,allAudioTracks,searchIndex,audioMatchPredicate);if(switchIndex===-1){// could not find matching variant
|
||
return null;}// and switch level to acheive the audio group switch
|
||
hls.nextLoadLevel=switchIndex;}if(audioOption.channels||audioOption.audioCodec){// Could not find a match with codec / channels predicate
|
||
// Find a match without channels or codec
|
||
const withoutCodecAndChannelsMatch=findMatchingOption(audioOption,allAudioTracks);if(withoutCodecAndChannelsMatch>-1){return allAudioTracks[withoutCodecAndChannelsMatch];}}}}return null;}setAudioTrack(newId){const tracks=this.tracksInGroup;// check if level idx is valid
|
||
if(newId<0||newId>=tracks.length){this.warn(`Invalid audio track id: ${newId}`);return;}// stopping live reloading timer if any
|
||
this.clearTimer();this.selectDefaultTrack=false;const lastTrack=this.currentTrack;const track=tracks[newId];const trackLoaded=track.details&&!track.details.live;if(newId===this.trackId&&track===lastTrack&&trackLoaded){return;}this.log(`Switching to audio-track ${newId} "${track.name}" lang:${track.lang} group:${track.groupId} channels:${track.channels}`);this.trackId=newId;this.currentTrack=track;this.hls.trigger(Events.AUDIO_TRACK_SWITCHING,_objectSpread2({},track));// Do not reload track unless live
|
||
if(trackLoaded){return;}const hlsUrlParameters=this.switchParams(track.url,lastTrack==null?void 0:lastTrack.details,track.details);this.loadPlaylist(hlsUrlParameters);}findTrackId(currentTrack){const audioTracks=this.tracksInGroup;for(let i=0;i<audioTracks.length;i++){const track=audioTracks[i];if(this.selectDefaultTrack&&!track.default){continue;}if(!currentTrack||matchesOption(currentTrack,track,audioMatchPredicate)){return i;}}if(currentTrack){const{name,lang,assocLang,characteristics,audioCodec,channels}=currentTrack;for(let i=0;i<audioTracks.length;i++){const track=audioTracks[i];if(matchesOption({name,lang,assocLang,characteristics,audioCodec,channels},track,audioMatchPredicate)){return i;}}for(let i=0;i<audioTracks.length;i++){const track=audioTracks[i];if(mediaAttributesIdentical(currentTrack.attrs,track.attrs,['LANGUAGE','ASSOC-LANGUAGE','CHARACTERISTICS'])){return i;}}for(let i=0;i<audioTracks.length;i++){const track=audioTracks[i];if(mediaAttributesIdentical(currentTrack.attrs,track.attrs,['LANGUAGE'])){return i;}}}return -1;}loadPlaylist(hlsUrlParameters){const audioTrack=this.currentTrack;if(this.shouldLoadPlaylist(audioTrack)&&audioTrack){super.loadPlaylist();const id=audioTrack.id;const groupId=audioTrack.groupId;let url=audioTrack.url;if(hlsUrlParameters){try{url=hlsUrlParameters.addDirectives(url);}catch(error){this.warn(`Could not construct new URL with HLS Delivery Directives: ${error}`);}}// track not retrieved yet, or live playlist we need to (re)load it
|
||
this.log(`loading audio-track playlist ${id} "${audioTrack.name}" lang:${audioTrack.lang} group:${groupId}`);this.clearTimer();this.hls.trigger(Events.AUDIO_TRACK_LOADING,{url,id,groupId,deliveryDirectives:hlsUrlParameters||null});}}}const TICK_INTERVAL$1=500;// how often to tick in ms
|
||
class SubtitleStreamController extends BaseStreamController{constructor(hls,fragmentTracker,keyLoader){super(hls,fragmentTracker,keyLoader,'[subtitle-stream-controller]',PlaylistLevelType.SUBTITLE);this.currentTrackId=-1;this.tracksBuffered=[];this.mainDetails=null;this._registerListeners();}onHandlerDestroying(){this._unregisterListeners();super.onHandlerDestroying();this.mainDetails=null;}_registerListeners(){const{hls}=this;hls.on(Events.MEDIA_ATTACHED,this.onMediaAttached,this);hls.on(Events.MEDIA_DETACHING,this.onMediaDetaching,this);hls.on(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.on(Events.LEVEL_LOADED,this.onLevelLoaded,this);hls.on(Events.ERROR,this.onError,this);hls.on(Events.SUBTITLE_TRACKS_UPDATED,this.onSubtitleTracksUpdated,this);hls.on(Events.SUBTITLE_TRACK_SWITCH,this.onSubtitleTrackSwitch,this);hls.on(Events.SUBTITLE_TRACK_LOADED,this.onSubtitleTrackLoaded,this);hls.on(Events.SUBTITLE_FRAG_PROCESSED,this.onSubtitleFragProcessed,this);hls.on(Events.BUFFER_FLUSHING,this.onBufferFlushing,this);hls.on(Events.FRAG_BUFFERED,this.onFragBuffered,this);}_unregisterListeners(){const{hls}=this;hls.off(Events.MEDIA_ATTACHED,this.onMediaAttached,this);hls.off(Events.MEDIA_DETACHING,this.onMediaDetaching,this);hls.off(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.off(Events.LEVEL_LOADED,this.onLevelLoaded,this);hls.off(Events.ERROR,this.onError,this);hls.off(Events.SUBTITLE_TRACKS_UPDATED,this.onSubtitleTracksUpdated,this);hls.off(Events.SUBTITLE_TRACK_SWITCH,this.onSubtitleTrackSwitch,this);hls.off(Events.SUBTITLE_TRACK_LOADED,this.onSubtitleTrackLoaded,this);hls.off(Events.SUBTITLE_FRAG_PROCESSED,this.onSubtitleFragProcessed,this);hls.off(Events.BUFFER_FLUSHING,this.onBufferFlushing,this);hls.off(Events.FRAG_BUFFERED,this.onFragBuffered,this);}startLoad(startPosition){this.stopLoad();this.state=State.IDLE;this.setInterval(TICK_INTERVAL$1);this.nextLoadPosition=this.startPosition=this.lastCurrentTime=startPosition;this.tick();}onManifestLoading(){this.mainDetails=null;this.fragmentTracker.removeAllFragments();}onMediaDetaching(){this.tracksBuffered=[];super.onMediaDetaching();}onLevelLoaded(event,data){this.mainDetails=data.details;}onSubtitleFragProcessed(event,data){const{frag,success}=data;this.fragPrevious=frag;this.state=State.IDLE;if(!success){return;}const buffered=this.tracksBuffered[this.currentTrackId];if(!buffered){return;}// Create/update a buffered array matching the interface used by BufferHelper.bufferedInfo
|
||
// so we can re-use the logic used to detect how much has been buffered
|
||
let timeRange;const fragStart=frag.start;for(let i=0;i<buffered.length;i++){if(fragStart>=buffered[i].start&&fragStart<=buffered[i].end){timeRange=buffered[i];break;}}const fragEnd=frag.start+frag.duration;if(timeRange){timeRange.end=fragEnd;}else {timeRange={start:fragStart,end:fragEnd};buffered.push(timeRange);}this.fragmentTracker.fragBuffered(frag);this.fragBufferedComplete(frag,null);}onBufferFlushing(event,data){const{startOffset,endOffset}=data;if(startOffset===0&&endOffset!==Number.POSITIVE_INFINITY){const endOffsetSubtitles=endOffset-1;if(endOffsetSubtitles<=0){return;}data.endOffsetSubtitles=Math.max(0,endOffsetSubtitles);this.tracksBuffered.forEach(buffered=>{for(let i=0;i<buffered.length;){if(buffered[i].end<=endOffsetSubtitles){buffered.shift();continue;}else if(buffered[i].start<endOffsetSubtitles){buffered[i].start=endOffsetSubtitles;}else {break;}i++;}});this.fragmentTracker.removeFragmentsInRange(startOffset,endOffsetSubtitles,PlaylistLevelType.SUBTITLE);}}onFragBuffered(event,data){if(!this.loadedmetadata&&data.frag.type===PlaylistLevelType.MAIN){var _this$media;if((_this$media=this.media)!=null&&_this$media.buffered.length){this.loadedmetadata=true;}}}// If something goes wrong, proceed to next frag, if we were processing one.
|
||
onError(event,data){const frag=data.frag;if((frag==null?void 0:frag.type)===PlaylistLevelType.SUBTITLE){if(data.details===ErrorDetails.FRAG_GAP){this.fragmentTracker.fragBuffered(frag,true);}if(this.fragCurrent){this.fragCurrent.abortRequests();}if(this.state!==State.STOPPED){this.state=State.IDLE;}}}// Got all new subtitle levels.
|
||
onSubtitleTracksUpdated(event,{subtitleTracks}){if(this.levels&&subtitleOptionsIdentical(this.levels,subtitleTracks)){this.levels=subtitleTracks.map(mediaPlaylist=>new Level(mediaPlaylist));return;}this.tracksBuffered=[];this.levels=subtitleTracks.map(mediaPlaylist=>{const level=new Level(mediaPlaylist);this.tracksBuffered[level.id]=[];return level;});this.fragmentTracker.removeFragmentsInRange(0,Number.POSITIVE_INFINITY,PlaylistLevelType.SUBTITLE);this.fragPrevious=null;this.mediaBuffer=null;}onSubtitleTrackSwitch(event,data){var _this$levels;this.currentTrackId=data.id;if(!((_this$levels=this.levels)!=null&&_this$levels.length)||this.currentTrackId===-1){this.clearInterval();return;}// Check if track has the necessary details to load fragments
|
||
const currentTrack=this.levels[this.currentTrackId];if(currentTrack!=null&¤tTrack.details){this.mediaBuffer=this.mediaBufferTimeRanges;}else {this.mediaBuffer=null;}if(currentTrack){this.setInterval(TICK_INTERVAL$1);}}// Got a new set of subtitle fragments.
|
||
onSubtitleTrackLoaded(event,data){var _track$details;const{currentTrackId,levels}=this;const{details:newDetails,id:trackId}=data;if(!levels){this.warn(`Subtitle tracks were reset while loading level ${trackId}`);return;}const track=levels[trackId];if(trackId>=levels.length||!track){return;}this.log(`Subtitle track ${trackId} loaded [${newDetails.startSN},${newDetails.endSN}]${newDetails.lastPartSn?`[part-${newDetails.lastPartSn}-${newDetails.lastPartIndex}]`:''},duration:${newDetails.totalduration}`);this.mediaBuffer=this.mediaBufferTimeRanges;let sliding=0;if(newDetails.live||(_track$details=track.details)!=null&&_track$details.live){const mainDetails=this.mainDetails;if(newDetails.deltaUpdateFailed||!mainDetails){return;}const mainSlidingStartFragment=mainDetails.fragments[0];if(!track.details){if(newDetails.hasProgramDateTime&&mainDetails.hasProgramDateTime){alignMediaPlaylistByPDT(newDetails,mainDetails);sliding=newDetails.fragments[0].start;}else if(mainSlidingStartFragment){// line up live playlist with main so that fragments in range are loaded
|
||
sliding=mainSlidingStartFragment.start;addSliding(newDetails,sliding);}}else {var _this$levelLastLoaded;sliding=this.alignPlaylists(newDetails,track.details,(_this$levelLastLoaded=this.levelLastLoaded)==null?void 0:_this$levelLastLoaded.details);if(sliding===0&&mainSlidingStartFragment){// realign with main when there is no overlap with last refresh
|
||
sliding=mainSlidingStartFragment.start;addSliding(newDetails,sliding);}}}track.details=newDetails;this.levelLastLoaded=track;if(trackId!==currentTrackId){return;}if(!this.startFragRequested&&(this.mainDetails||!newDetails.live)){this.setStartPosition(this.mainDetails||newDetails,sliding);}// trigger handler right now
|
||
this.tick();// If playlist is misaligned because of bad PDT or drift, delete details to resync with main on reload
|
||
if(newDetails.live&&!this.fragCurrent&&this.media&&this.state===State.IDLE){const foundFrag=findFragmentByPTS(null,newDetails.fragments,this.media.currentTime,0);if(!foundFrag){this.warn('Subtitle playlist not aligned with playback');track.details=undefined;}}}_handleFragmentLoadComplete(fragLoadedData){const{frag,payload}=fragLoadedData;const decryptData=frag.decryptdata;const hls=this.hls;if(this.fragContextChanged(frag)){return;}// check to see if the payload needs to be decrypted
|
||
if(payload&&payload.byteLength>0&&decryptData!=null&&decryptData.key&&decryptData.iv&&decryptData.method==='AES-128'){const startTime=performance.now();// decrypt the subtitles
|
||
this.decrypter.decrypt(new Uint8Array(payload),decryptData.key.buffer,decryptData.iv.buffer).catch(err=>{hls.trigger(Events.ERROR,{type:ErrorTypes.MEDIA_ERROR,details:ErrorDetails.FRAG_DECRYPT_ERROR,fatal:false,error:err,reason:err.message,frag});throw err;}).then(decryptedData=>{const endTime=performance.now();hls.trigger(Events.FRAG_DECRYPTED,{frag,payload:decryptedData,stats:{tstart:startTime,tdecrypt:endTime}});}).catch(err=>{this.warn(`${err.name}: ${err.message}`);this.state=State.IDLE;});}}doTick(){if(!this.media){this.state=State.IDLE;return;}if(this.state===State.IDLE){const{currentTrackId,levels}=this;const track=levels==null?void 0:levels[currentTrackId];if(!track||!levels.length||!track.details){return;}const{config}=this;const currentTime=this.getLoadPosition();const bufferedInfo=BufferHelper.bufferedInfo(this.tracksBuffered[this.currentTrackId]||[],currentTime,config.maxBufferHole);const{end:targetBufferTime,len:bufferLen}=bufferedInfo;const mainBufferInfo=this.getFwdBufferInfo(this.media,PlaylistLevelType.MAIN);const trackDetails=track.details;const maxBufLen=this.getMaxBufferLength(mainBufferInfo==null?void 0:mainBufferInfo.len)+trackDetails.levelTargetDuration;if(bufferLen>maxBufLen){return;}const fragments=trackDetails.fragments;const fragLen=fragments.length;const end=trackDetails.edge;let foundFrag=null;const fragPrevious=this.fragPrevious;if(targetBufferTime<end){const tolerance=config.maxFragLookUpTolerance;const lookupTolerance=targetBufferTime>end-tolerance?0:tolerance;foundFrag=findFragmentByPTS(fragPrevious,fragments,Math.max(fragments[0].start,targetBufferTime),lookupTolerance);if(!foundFrag&&fragPrevious&&fragPrevious.start<fragments[0].start){foundFrag=fragments[0];}}else {foundFrag=fragments[fragLen-1];}if(!foundFrag){return;}foundFrag=this.mapToInitFragWhenRequired(foundFrag);if(foundFrag.sn!=='initSegment'){// Load earlier fragment in same discontinuity to make up for misaligned playlists and cues that extend beyond end of segment
|
||
const curSNIdx=foundFrag.sn-trackDetails.startSN;const prevFrag=fragments[curSNIdx-1];if(prevFrag&&prevFrag.cc===foundFrag.cc&&this.fragmentTracker.getState(prevFrag)===FragmentState.NOT_LOADED){foundFrag=prevFrag;}}if(this.fragmentTracker.getState(foundFrag)===FragmentState.NOT_LOADED){// only load if fragment is not loaded
|
||
this.loadFragment(foundFrag,track,targetBufferTime);}}}getMaxBufferLength(mainBufferLength){const maxConfigBuffer=super.getMaxBufferLength();if(!mainBufferLength){return maxConfigBuffer;}return Math.max(maxConfigBuffer,mainBufferLength);}loadFragment(frag,level,targetBufferTime){this.fragCurrent=frag;if(frag.sn==='initSegment'){this._loadInitSegment(frag,level);}else {this.startFragRequested=true;super.loadFragment(frag,level,targetBufferTime);}}get mediaBufferTimeRanges(){return new BufferableInstance(this.tracksBuffered[this.currentTrackId]||[]);}}class BufferableInstance{constructor(timeranges){this.buffered=void 0;const getRange=(name,index,length)=>{index=index>>>0;if(index>length-1){throw new DOMException(`Failed to execute '${name}' on 'TimeRanges': The index provided (${index}) is greater than the maximum bound (${length})`);}return timeranges[index][name];};this.buffered={get length(){return timeranges.length;},end(index){return getRange('end',index,timeranges.length);},start(index){return getRange('start',index,timeranges.length);}};}}class SubtitleTrackController extends BasePlaylistController{constructor(hls){super(hls,'[subtitle-track-controller]');this.media=null;this.tracks=[];this.groupIds=null;this.tracksInGroup=[];this.trackId=-1;this.currentTrack=null;this.selectDefaultTrack=true;this.queuedDefaultTrack=-1;this.asyncPollTrackChange=()=>this.pollTrackChange(0);this.useTextTrackPolling=false;this.subtitlePollingInterval=-1;this._subtitleDisplay=true;this.onTextTracksChanged=()=>{if(!this.useTextTrackPolling){self.clearInterval(this.subtitlePollingInterval);}// Media is undefined when switching streams via loadSource()
|
||
if(!this.media||!this.hls.config.renderTextTracksNatively){return;}let textTrack=null;const tracks=filterSubtitleTracks(this.media.textTracks);for(let i=0;i<tracks.length;i++){if(tracks[i].mode==='hidden'){// Do not break in case there is a following track with showing.
|
||
textTrack=tracks[i];}else if(tracks[i].mode==='showing'){textTrack=tracks[i];break;}}// Find internal track index for TextTrack
|
||
const trackId=this.findTrackForTextTrack(textTrack);if(this.subtitleTrack!==trackId){this.setSubtitleTrack(trackId);}};this.registerListeners();}destroy(){this.unregisterListeners();this.tracks.length=0;this.tracksInGroup.length=0;this.currentTrack=null;this.onTextTracksChanged=this.asyncPollTrackChange=null;super.destroy();}get subtitleDisplay(){return this._subtitleDisplay;}set subtitleDisplay(value){this._subtitleDisplay=value;if(this.trackId>-1){this.toggleTrackModes();}}registerListeners(){const{hls}=this;hls.on(Events.MEDIA_ATTACHED,this.onMediaAttached,this);hls.on(Events.MEDIA_DETACHING,this.onMediaDetaching,this);hls.on(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.on(Events.MANIFEST_PARSED,this.onManifestParsed,this);hls.on(Events.LEVEL_LOADING,this.onLevelLoading,this);hls.on(Events.LEVEL_SWITCHING,this.onLevelSwitching,this);hls.on(Events.SUBTITLE_TRACK_LOADED,this.onSubtitleTrackLoaded,this);hls.on(Events.ERROR,this.onError,this);}unregisterListeners(){const{hls}=this;hls.off(Events.MEDIA_ATTACHED,this.onMediaAttached,this);hls.off(Events.MEDIA_DETACHING,this.onMediaDetaching,this);hls.off(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.off(Events.MANIFEST_PARSED,this.onManifestParsed,this);hls.off(Events.LEVEL_LOADING,this.onLevelLoading,this);hls.off(Events.LEVEL_SWITCHING,this.onLevelSwitching,this);hls.off(Events.SUBTITLE_TRACK_LOADED,this.onSubtitleTrackLoaded,this);hls.off(Events.ERROR,this.onError,this);}// Listen for subtitle track change, then extract the current track ID.
|
||
onMediaAttached(event,data){this.media=data.media;if(!this.media){return;}if(this.queuedDefaultTrack>-1){this.subtitleTrack=this.queuedDefaultTrack;this.queuedDefaultTrack=-1;}this.useTextTrackPolling=!(this.media.textTracks&&'onchange'in this.media.textTracks);if(this.useTextTrackPolling){this.pollTrackChange(500);}else {this.media.textTracks.addEventListener('change',this.asyncPollTrackChange);}}pollTrackChange(timeout){self.clearInterval(this.subtitlePollingInterval);this.subtitlePollingInterval=self.setInterval(this.onTextTracksChanged,timeout);}onMediaDetaching(){if(!this.media){return;}self.clearInterval(this.subtitlePollingInterval);if(!this.useTextTrackPolling){this.media.textTracks.removeEventListener('change',this.asyncPollTrackChange);}if(this.trackId>-1){this.queuedDefaultTrack=this.trackId;}const textTracks=filterSubtitleTracks(this.media.textTracks);// Clear loaded cues on media detachment from tracks
|
||
textTracks.forEach(track=>{clearCurrentCues(track);});// Disable all subtitle tracks before detachment so when reattached only tracks in that content are enabled.
|
||
this.subtitleTrack=-1;this.media=null;}onManifestLoading(){this.tracks=[];this.groupIds=null;this.tracksInGroup=[];this.trackId=-1;this.currentTrack=null;this.selectDefaultTrack=true;}// Fired whenever a new manifest is loaded.
|
||
onManifestParsed(event,data){this.tracks=data.subtitleTracks;}onSubtitleTrackLoaded(event,data){const{id,groupId,details}=data;const trackInActiveGroup=this.tracksInGroup[id];if(!trackInActiveGroup||trackInActiveGroup.groupId!==groupId){this.warn(`Subtitle track with id:${id} and group:${groupId} not found in active group ${trackInActiveGroup==null?void 0:trackInActiveGroup.groupId}`);return;}const curDetails=trackInActiveGroup.details;trackInActiveGroup.details=data.details;this.log(`Subtitle track ${id} "${trackInActiveGroup.name}" lang:${trackInActiveGroup.lang} group:${groupId} loaded [${details.startSN}-${details.endSN}]`);if(id===this.trackId){this.playlistLoaded(id,data,curDetails);}}onLevelLoading(event,data){this.switchLevel(data.level);}onLevelSwitching(event,data){this.switchLevel(data.level);}switchLevel(levelIndex){const levelInfo=this.hls.levels[levelIndex];if(!levelInfo){return;}const subtitleGroups=levelInfo.subtitleGroups||null;const currentGroups=this.groupIds;let currentTrack=this.currentTrack;if(!subtitleGroups||(currentGroups==null?void 0:currentGroups.length)!==(subtitleGroups==null?void 0:subtitleGroups.length)||subtitleGroups!=null&&subtitleGroups.some(groupId=>(currentGroups==null?void 0:currentGroups.indexOf(groupId))===-1)){this.groupIds=subtitleGroups;this.trackId=-1;this.currentTrack=null;const subtitleTracks=this.tracks.filter(track=>!subtitleGroups||subtitleGroups.indexOf(track.groupId)!==-1);if(subtitleTracks.length){// Disable selectDefaultTrack if there are no default tracks
|
||
if(this.selectDefaultTrack&&!subtitleTracks.some(track=>track.default)){this.selectDefaultTrack=false;}// track.id should match hls.audioTracks index
|
||
subtitleTracks.forEach((track,i)=>{track.id=i;});}else if(!currentTrack&&!this.tracksInGroup.length){// Do not dispatch SUBTITLE_TRACKS_UPDATED when there were and are no tracks
|
||
return;}this.tracksInGroup=subtitleTracks;// Find preferred track
|
||
const subtitlePreference=this.hls.config.subtitlePreference;if(!currentTrack&&subtitlePreference){this.selectDefaultTrack=false;const groupIndex=findMatchingOption(subtitlePreference,subtitleTracks);if(groupIndex>-1){currentTrack=subtitleTracks[groupIndex];}else {const allIndex=findMatchingOption(subtitlePreference,this.tracks);currentTrack=this.tracks[allIndex];}}// Select initial track
|
||
let trackId=this.findTrackId(currentTrack);if(trackId===-1&¤tTrack){trackId=this.findTrackId(null);}// Dispatch events and load track if needed
|
||
const subtitleTracksUpdated={subtitleTracks};this.log(`Updating subtitle tracks, ${subtitleTracks.length} track(s) found in "${subtitleGroups==null?void 0:subtitleGroups.join(',')}" group-id`);this.hls.trigger(Events.SUBTITLE_TRACKS_UPDATED,subtitleTracksUpdated);if(trackId!==-1&&this.trackId===-1){this.setSubtitleTrack(trackId);}}else if(this.shouldReloadPlaylist(currentTrack)){// Retry playlist loading if no playlist is or has been loaded yet
|
||
this.setSubtitleTrack(this.trackId);}}findTrackId(currentTrack){const tracks=this.tracksInGroup;const selectDefault=this.selectDefaultTrack;for(let i=0;i<tracks.length;i++){const track=tracks[i];if(selectDefault&&!track.default||!selectDefault&&!currentTrack){continue;}if(!currentTrack||matchesOption(track,currentTrack)){return i;}}if(currentTrack){for(let i=0;i<tracks.length;i++){const track=tracks[i];if(mediaAttributesIdentical(currentTrack.attrs,track.attrs,['LANGUAGE','ASSOC-LANGUAGE','CHARACTERISTICS'])){return i;}}for(let i=0;i<tracks.length;i++){const track=tracks[i];if(mediaAttributesIdentical(currentTrack.attrs,track.attrs,['LANGUAGE'])){return i;}}}return -1;}findTrackForTextTrack(textTrack){if(textTrack){const tracks=this.tracksInGroup;for(let i=0;i<tracks.length;i++){const track=tracks[i];if(subtitleTrackMatchesTextTrack(track,textTrack)){return i;}}}return -1;}onError(event,data){if(data.fatal||!data.context){return;}if(data.context.type===PlaylistContextType.SUBTITLE_TRACK&&data.context.id===this.trackId&&(!this.groupIds||this.groupIds.indexOf(data.context.groupId)!==-1)){this.checkRetry(data);}}get allSubtitleTracks(){return this.tracks;}/** get alternate subtitle tracks list from playlist **/get subtitleTracks(){return this.tracksInGroup;}/** get/set index of the selected subtitle track (based on index in subtitle track lists) **/get subtitleTrack(){return this.trackId;}set subtitleTrack(newId){this.selectDefaultTrack=false;this.setSubtitleTrack(newId);}setSubtitleOption(subtitleOption){this.hls.config.subtitlePreference=subtitleOption;if(subtitleOption){const allSubtitleTracks=this.allSubtitleTracks;this.selectDefaultTrack=false;if(allSubtitleTracks.length){// First see if current option matches (no switch op)
|
||
const currentTrack=this.currentTrack;if(currentTrack&&matchesOption(subtitleOption,currentTrack)){return currentTrack;}// Find option in current group
|
||
const groupIndex=findMatchingOption(subtitleOption,this.tracksInGroup);if(groupIndex>-1){const track=this.tracksInGroup[groupIndex];this.setSubtitleTrack(groupIndex);return track;}else if(currentTrack){// If this is not the initial selection return null
|
||
// option should have matched one in active group
|
||
return null;}else {// Find the option in all tracks for initial selection
|
||
const allIndex=findMatchingOption(subtitleOption,allSubtitleTracks);if(allIndex>-1){return allSubtitleTracks[allIndex];}}}}return null;}loadPlaylist(hlsUrlParameters){super.loadPlaylist();const currentTrack=this.currentTrack;if(this.shouldLoadPlaylist(currentTrack)&¤tTrack){const id=currentTrack.id;const groupId=currentTrack.groupId;let url=currentTrack.url;if(hlsUrlParameters){try{url=hlsUrlParameters.addDirectives(url);}catch(error){this.warn(`Could not construct new URL with HLS Delivery Directives: ${error}`);}}this.log(`Loading subtitle playlist for id ${id}`);this.hls.trigger(Events.SUBTITLE_TRACK_LOADING,{url,id,groupId,deliveryDirectives:hlsUrlParameters||null});}}/**
|
||
* Disables the old subtitleTrack and sets current mode on the next subtitleTrack.
|
||
* This operates on the DOM textTracks.
|
||
* A value of -1 will disable all subtitle tracks.
|
||
*/toggleTrackModes(){const{media}=this;if(!media){return;}const textTracks=filterSubtitleTracks(media.textTracks);const currentTrack=this.currentTrack;let nextTrack;if(currentTrack){nextTrack=textTracks.filter(textTrack=>subtitleTrackMatchesTextTrack(currentTrack,textTrack))[0];if(!nextTrack){this.warn(`Unable to find subtitle TextTrack with name "${currentTrack.name}" and language "${currentTrack.lang}"`);}}[].slice.call(textTracks).forEach(track=>{if(track.mode!=='disabled'&&track!==nextTrack){track.mode='disabled';}});if(nextTrack){const mode=this.subtitleDisplay?'showing':'hidden';if(nextTrack.mode!==mode){nextTrack.mode=mode;}}}/**
|
||
* This method is responsible for validating the subtitle index and periodically reloading if live.
|
||
* Dispatches the SUBTITLE_TRACK_SWITCH event, which instructs the subtitle-stream-controller to load the selected track.
|
||
*/setSubtitleTrack(newId){const tracks=this.tracksInGroup;// setting this.subtitleTrack will trigger internal logic
|
||
// if media has not been attached yet, it will fail
|
||
// we keep a reference to the default track id
|
||
// and we'll set subtitleTrack when onMediaAttached is triggered
|
||
if(!this.media){this.queuedDefaultTrack=newId;return;}// exit if track id as already set or invalid
|
||
if(newId<-1||newId>=tracks.length||!isFiniteNumber(newId)){this.warn(`Invalid subtitle track id: ${newId}`);return;}// stopping live reloading timer if any
|
||
this.clearTimer();this.selectDefaultTrack=false;const lastTrack=this.currentTrack;const track=tracks[newId]||null;this.trackId=newId;this.currentTrack=track;this.toggleTrackModes();if(!track){// switch to -1
|
||
this.hls.trigger(Events.SUBTITLE_TRACK_SWITCH,{id:newId});return;}const trackLoaded=!!track.details&&!track.details.live;if(newId===this.trackId&&track===lastTrack&&trackLoaded){return;}this.log(`Switching to subtitle-track ${newId}`+(track?` "${track.name}" lang:${track.lang} group:${track.groupId}`:''));const{id,groupId='',name,type,url}=track;this.hls.trigger(Events.SUBTITLE_TRACK_SWITCH,{id,groupId,name,type,url});const hlsUrlParameters=this.switchParams(track.url,lastTrack==null?void 0:lastTrack.details,track.details);this.loadPlaylist(hlsUrlParameters);}}class BufferOperationQueue{constructor(sourceBufferReference){this.buffers=void 0;this.queues={video:[],audio:[],audiovideo:[]};this.buffers=sourceBufferReference;}append(operation,type,pending){const queue=this.queues[type];queue.push(operation);if(queue.length===1&&!pending){this.executeNext(type);}}insertAbort(operation,type){const queue=this.queues[type];queue.unshift(operation);this.executeNext(type);}appendBlocker(type){let execute;const promise=new Promise(resolve=>{execute=resolve;});const operation={execute,onStart:()=>{},onComplete:()=>{},onError:()=>{}};this.append(operation,type);return promise;}executeNext(type){const queue=this.queues[type];if(queue.length){const operation=queue[0];try{// Operations are expected to result in an 'updateend' event being fired. If not, the queue will lock. Operations
|
||
// which do not end with this event must call _onSBUpdateEnd manually
|
||
operation.execute();}catch(error){logger.warn(`[buffer-operation-queue]: Exception executing "${type}" SourceBuffer operation: ${error}`);operation.onError(error);// Only shift the current operation off, otherwise the updateend handler will do this for us
|
||
const sb=this.buffers[type];if(!(sb!=null&&sb.updating)){this.shiftAndExecuteNext(type);}}}}shiftAndExecuteNext(type){this.queues[type].shift();this.executeNext(type);}current(type){return this.queues[type][0];}}const VIDEO_CODEC_PROFILE_REPLACE=/(avc[1234]|hvc1|hev1|dvh[1e]|vp09|av01)(?:\.[^.,]+)+/;class BufferController{constructor(hls){// The level details used to determine duration, target-duration and live
|
||
this.details=null;// cache the self generated object url to detect hijack of video tag
|
||
this._objectUrl=null;// A queue of buffer operations which require the SourceBuffer to not be updating upon execution
|
||
this.operationQueue=void 0;// References to event listeners for each SourceBuffer, so that they can be referenced for event removal
|
||
this.listeners=void 0;this.hls=void 0;// The number of BUFFER_CODEC events received before any sourceBuffers are created
|
||
this.bufferCodecEventsExpected=0;// The total number of BUFFER_CODEC events received
|
||
this._bufferCodecEventsTotal=0;// A reference to the attached media element
|
||
this.media=null;// A reference to the active media source
|
||
this.mediaSource=null;// Last MP3 audio chunk appended
|
||
this.lastMpegAudioChunk=null;this.appendSource=void 0;// counters
|
||
this.appendErrors={audio:0,video:0,audiovideo:0};this.tracks={};this.pendingTracks={};this.sourceBuffer=void 0;this.log=void 0;this.warn=void 0;this.error=void 0;this._onEndStreaming=event=>{if(!this.hls){return;}this.hls.pauseBuffering();};this._onStartStreaming=event=>{if(!this.hls){return;}this.hls.resumeBuffering();};// Keep as arrow functions so that we can directly reference these functions directly as event listeners
|
||
this._onMediaSourceOpen=()=>{const{media,mediaSource}=this;this.log('Media source opened');if(media){media.removeEventListener('emptied',this._onMediaEmptied);this.updateMediaElementDuration();this.hls.trigger(Events.MEDIA_ATTACHED,{media,mediaSource:mediaSource});}if(mediaSource){// once received, don't listen anymore to sourceopen event
|
||
mediaSource.removeEventListener('sourceopen',this._onMediaSourceOpen);}this.checkPendingTracks();};this._onMediaSourceClose=()=>{this.log('Media source closed');};this._onMediaSourceEnded=()=>{this.log('Media source ended');};this._onMediaEmptied=()=>{const{mediaSrc,_objectUrl}=this;if(mediaSrc!==_objectUrl){logger.error(`Media element src was set while attaching MediaSource (${_objectUrl} > ${mediaSrc})`);}};this.hls=hls;const logPrefix='[buffer-controller]';this.appendSource=isManagedMediaSource(getMediaSource(hls.config.preferManagedMediaSource));this.log=logger.log.bind(logger,logPrefix);this.warn=logger.warn.bind(logger,logPrefix);this.error=logger.error.bind(logger,logPrefix);this._initSourceBuffer();this.registerListeners();}hasSourceTypes(){return this.getSourceBufferTypes().length>0||Object.keys(this.pendingTracks).length>0;}destroy(){this.unregisterListeners();this.details=null;this.lastMpegAudioChunk=null;// @ts-ignore
|
||
this.hls=null;}registerListeners(){const{hls}=this;hls.on(Events.MEDIA_ATTACHING,this.onMediaAttaching,this);hls.on(Events.MEDIA_DETACHING,this.onMediaDetaching,this);hls.on(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.on(Events.MANIFEST_PARSED,this.onManifestParsed,this);hls.on(Events.BUFFER_RESET,this.onBufferReset,this);hls.on(Events.BUFFER_APPENDING,this.onBufferAppending,this);hls.on(Events.BUFFER_CODECS,this.onBufferCodecs,this);hls.on(Events.BUFFER_EOS,this.onBufferEos,this);hls.on(Events.BUFFER_FLUSHING,this.onBufferFlushing,this);hls.on(Events.LEVEL_UPDATED,this.onLevelUpdated,this);hls.on(Events.FRAG_PARSED,this.onFragParsed,this);hls.on(Events.FRAG_CHANGED,this.onFragChanged,this);}unregisterListeners(){const{hls}=this;hls.off(Events.MEDIA_ATTACHING,this.onMediaAttaching,this);hls.off(Events.MEDIA_DETACHING,this.onMediaDetaching,this);hls.off(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.off(Events.MANIFEST_PARSED,this.onManifestParsed,this);hls.off(Events.BUFFER_RESET,this.onBufferReset,this);hls.off(Events.BUFFER_APPENDING,this.onBufferAppending,this);hls.off(Events.BUFFER_CODECS,this.onBufferCodecs,this);hls.off(Events.BUFFER_EOS,this.onBufferEos,this);hls.off(Events.BUFFER_FLUSHING,this.onBufferFlushing,this);hls.off(Events.LEVEL_UPDATED,this.onLevelUpdated,this);hls.off(Events.FRAG_PARSED,this.onFragParsed,this);hls.off(Events.FRAG_CHANGED,this.onFragChanged,this);}_initSourceBuffer(){this.sourceBuffer={};this.operationQueue=new BufferOperationQueue(this.sourceBuffer);this.listeners={audio:[],video:[],audiovideo:[]};this.appendErrors={audio:0,video:0,audiovideo:0};this.lastMpegAudioChunk=null;}onManifestLoading(){this.bufferCodecEventsExpected=this._bufferCodecEventsTotal=0;this.details=null;}onManifestParsed(event,data){// in case of alt audio 2 BUFFER_CODECS events will be triggered, one per stream controller
|
||
// sourcebuffers will be created all at once when the expected nb of tracks will be reached
|
||
// in case alt audio is not used, only one BUFFER_CODEC event will be fired from main stream controller
|
||
// it will contain the expected nb of source buffers, no need to compute it
|
||
let codecEvents=2;if(data.audio&&!data.video||!data.altAudio||!true){codecEvents=1;}this.bufferCodecEventsExpected=this._bufferCodecEventsTotal=codecEvents;this.log(`${this.bufferCodecEventsExpected} bufferCodec event(s) expected`);}onMediaAttaching(event,data){const media=this.media=data.media;const MediaSource=getMediaSource(this.appendSource);if(media&&MediaSource){var _ms$constructor;const ms=this.mediaSource=new MediaSource();this.log(`created media source: ${(_ms$constructor=ms.constructor)==null?void 0:_ms$constructor.name}`);// MediaSource listeners are arrow functions with a lexical scope, and do not need to be bound
|
||
ms.addEventListener('sourceopen',this._onMediaSourceOpen);ms.addEventListener('sourceended',this._onMediaSourceEnded);ms.addEventListener('sourceclose',this._onMediaSourceClose);if(this.appendSource){ms.addEventListener('startstreaming',this._onStartStreaming);ms.addEventListener('endstreaming',this._onEndStreaming);}// cache the locally generated object url
|
||
const objectUrl=this._objectUrl=self.URL.createObjectURL(ms);// link video and media Source
|
||
if(this.appendSource){try{media.removeAttribute('src');// ManagedMediaSource will not open without disableRemotePlayback set to false or source alternatives
|
||
const MMS=self.ManagedMediaSource;media.disableRemotePlayback=media.disableRemotePlayback||MMS&&ms instanceof MMS;removeSourceChildren(media);addSource(media,objectUrl);media.load();}catch(error){media.src=objectUrl;}}else {media.src=objectUrl;}media.addEventListener('emptied',this._onMediaEmptied);}}onMediaDetaching(){const{media,mediaSource,_objectUrl}=this;if(mediaSource){this.log('media source detaching');if(mediaSource.readyState==='open'){try{// endOfStream could trigger exception if any sourcebuffer is in updating state
|
||
// we don't really care about checking sourcebuffer state here,
|
||
// as we are anyway detaching the MediaSource
|
||
// let's just avoid this exception to propagate
|
||
mediaSource.endOfStream();}catch(err){this.warn(`onMediaDetaching: ${err.message} while calling endOfStream`);}}// Clean up the SourceBuffers by invoking onBufferReset
|
||
this.onBufferReset();mediaSource.removeEventListener('sourceopen',this._onMediaSourceOpen);mediaSource.removeEventListener('sourceended',this._onMediaSourceEnded);mediaSource.removeEventListener('sourceclose',this._onMediaSourceClose);if(this.appendSource){mediaSource.removeEventListener('startstreaming',this._onStartStreaming);mediaSource.removeEventListener('endstreaming',this._onEndStreaming);}// Detach properly the MediaSource from the HTMLMediaElement as
|
||
// suggested in https://github.com/w3c/media-source/issues/53.
|
||
if(media){media.removeEventListener('emptied',this._onMediaEmptied);if(_objectUrl){self.URL.revokeObjectURL(_objectUrl);}// clean up video tag src only if it's our own url. some external libraries might
|
||
// hijack the video tag and change its 'src' without destroying the Hls instance first
|
||
if(this.mediaSrc===_objectUrl){media.removeAttribute('src');if(this.appendSource){removeSourceChildren(media);}media.load();}else {this.warn('media|source.src was changed by a third party - skip cleanup');}}this.mediaSource=null;this.media=null;this._objectUrl=null;this.bufferCodecEventsExpected=this._bufferCodecEventsTotal;this.pendingTracks={};this.tracks={};}this.hls.trigger(Events.MEDIA_DETACHED,undefined);}onBufferReset(){this.getSourceBufferTypes().forEach(type=>{this.resetBuffer(type);});this._initSourceBuffer();}resetBuffer(type){const sb=this.sourceBuffer[type];try{if(sb){var _this$mediaSource;this.removeBufferListeners(type);// Synchronously remove the SB from the map before the next call in order to prevent an async function from
|
||
// accessing it
|
||
this.sourceBuffer[type]=undefined;if((_this$mediaSource=this.mediaSource)!=null&&_this$mediaSource.sourceBuffers.length){this.mediaSource.removeSourceBuffer(sb);}}}catch(err){this.warn(`onBufferReset ${type}`,err);}}onBufferCodecs(event,data){const sourceBufferCount=this.getSourceBufferTypes().length;const trackNames=Object.keys(data);trackNames.forEach(trackName=>{if(sourceBufferCount){// check if SourceBuffer codec needs to change
|
||
const track=this.tracks[trackName];if(track&&typeof track.buffer.changeType==='function'){var _trackCodec;const{id,codec,levelCodec,container,metadata}=data[trackName];const currentCodecFull=pickMostCompleteCodecName(track.codec,track.levelCodec);const currentCodec=currentCodecFull==null?void 0:currentCodecFull.replace(VIDEO_CODEC_PROFILE_REPLACE,'$1');let trackCodec=pickMostCompleteCodecName(codec,levelCodec);const nextCodec=(_trackCodec=trackCodec)==null?void 0:_trackCodec.replace(VIDEO_CODEC_PROFILE_REPLACE,'$1');if(trackCodec&¤tCodec!==nextCodec){if(trackName.slice(0,5)==='audio'){trackCodec=getCodecCompatibleName(trackCodec,this.appendSource);}const mimeType=`${container};codecs=${trackCodec}`;this.appendChangeType(trackName,mimeType);this.log(`switching codec ${currentCodecFull} to ${trackCodec}`);this.tracks[trackName]={buffer:track.buffer,codec,container,levelCodec,metadata,id};}}}else {// if source buffer(s) not created yet, appended buffer tracks in this.pendingTracks
|
||
this.pendingTracks[trackName]=data[trackName];}});// if sourcebuffers already created, do nothing ...
|
||
if(sourceBufferCount){return;}const bufferCodecEventsExpected=Math.max(this.bufferCodecEventsExpected-1,0);if(this.bufferCodecEventsExpected!==bufferCodecEventsExpected){this.log(`${bufferCodecEventsExpected} bufferCodec event(s) expected ${trackNames.join(',')}`);this.bufferCodecEventsExpected=bufferCodecEventsExpected;}if(this.mediaSource&&this.mediaSource.readyState==='open'){this.checkPendingTracks();}}appendChangeType(type,mimeType){const{operationQueue}=this;const operation={execute:()=>{const sb=this.sourceBuffer[type];if(sb){this.log(`changing ${type} sourceBuffer type to ${mimeType}`);sb.changeType(mimeType);}operationQueue.shiftAndExecuteNext(type);},onStart:()=>{},onComplete:()=>{},onError:error=>{this.warn(`Failed to change ${type} SourceBuffer type`,error);}};operationQueue.append(operation,type,!!this.pendingTracks[type]);}onBufferAppending(event,eventData){const{hls,operationQueue,tracks}=this;const{data,type,frag,part,chunkMeta}=eventData;const chunkStats=chunkMeta.buffering[type];const bufferAppendingStart=self.performance.now();chunkStats.start=bufferAppendingStart;const fragBuffering=frag.stats.buffering;const partBuffering=part?part.stats.buffering:null;if(fragBuffering.start===0){fragBuffering.start=bufferAppendingStart;}if(partBuffering&&partBuffering.start===0){partBuffering.start=bufferAppendingStart;}// TODO: Only update timestampOffset when audio/mpeg fragment or part is not contiguous with previously appended
|
||
// Adjusting `SourceBuffer.timestampOffset` (desired point in the timeline where the next frames should be appended)
|
||
// in Chrome browser when we detect MPEG audio container and time delta between level PTS and `SourceBuffer.timestampOffset`
|
||
// is greater than 100ms (this is enough to handle seek for VOD or level change for LIVE videos).
|
||
// More info here: https://github.com/video-dev/hls.js/issues/332#issuecomment-257986486
|
||
const audioTrack=tracks.audio;let checkTimestampOffset=false;if(type==='audio'&&(audioTrack==null?void 0:audioTrack.container)==='audio/mpeg'){checkTimestampOffset=!this.lastMpegAudioChunk||chunkMeta.id===1||this.lastMpegAudioChunk.sn!==chunkMeta.sn;this.lastMpegAudioChunk=chunkMeta;}const fragStart=frag.start;const operation={execute:()=>{chunkStats.executeStart=self.performance.now();if(checkTimestampOffset){const sb=this.sourceBuffer[type];if(sb){const delta=fragStart-sb.timestampOffset;if(Math.abs(delta)>=0.1){this.log(`Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${frag.sn})`);sb.timestampOffset=fragStart;}}}this.appendExecutor(data,type);},onStart:()=>{// logger.debug(`[buffer-controller]: ${type} SourceBuffer updatestart`);
|
||
},onComplete:()=>{// logger.debug(`[buffer-controller]: ${type} SourceBuffer updateend`);
|
||
const end=self.performance.now();chunkStats.executeEnd=chunkStats.end=end;if(fragBuffering.first===0){fragBuffering.first=end;}if(partBuffering&&partBuffering.first===0){partBuffering.first=end;}const{sourceBuffer}=this;const timeRanges={};for(const type in sourceBuffer){timeRanges[type]=BufferHelper.getBuffered(sourceBuffer[type]);}this.appendErrors[type]=0;if(type==='audio'||type==='video'){this.appendErrors.audiovideo=0;}else {this.appendErrors.audio=0;this.appendErrors.video=0;}this.hls.trigger(Events.BUFFER_APPENDED,{type,frag,part,chunkMeta,parent:frag.type,timeRanges});},onError:error=>{// in case any error occured while appending, put back segment in segments table
|
||
const event={type:ErrorTypes.MEDIA_ERROR,parent:frag.type,details:ErrorDetails.BUFFER_APPEND_ERROR,sourceBufferName:type,frag,part,chunkMeta,error,err:error,fatal:false};if(error.code===DOMException.QUOTA_EXCEEDED_ERR){// QuotaExceededError: http://www.w3.org/TR/html5/infrastructure.html#quotaexceedederror
|
||
// let's stop appending any segments, and report BUFFER_FULL_ERROR error
|
||
event.details=ErrorDetails.BUFFER_FULL_ERROR;}else {const appendErrorCount=++this.appendErrors[type];event.details=ErrorDetails.BUFFER_APPEND_ERROR;/* with UHD content, we could get loop of quota exceeded error until
|
||
browser is able to evict some data from sourcebuffer. Retrying can help recover.
|
||
*/this.warn(`Failed ${appendErrorCount}/${hls.config.appendErrorMaxRetry} times to append segment in "${type}" sourceBuffer`);if(appendErrorCount>=hls.config.appendErrorMaxRetry){event.fatal=true;}}hls.trigger(Events.ERROR,event);}};operationQueue.append(operation,type,!!this.pendingTracks[type]);}onBufferFlushing(event,data){const{operationQueue}=this;const flushOperation=type=>({execute:this.removeExecutor.bind(this,type,data.startOffset,data.endOffset),onStart:()=>{// logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
|
||
},onComplete:()=>{// logger.debug(`[buffer-controller]: Finished flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
|
||
this.hls.trigger(Events.BUFFER_FLUSHED,{type});},onError:error=>{this.warn(`Failed to remove from ${type} SourceBuffer`,error);}});if(data.type){operationQueue.append(flushOperation(data.type),data.type);}else {this.getSourceBufferTypes().forEach(type=>{operationQueue.append(flushOperation(type),type);});}}onFragParsed(event,data){const{frag,part}=data;const buffersAppendedTo=[];const elementaryStreams=part?part.elementaryStreams:frag.elementaryStreams;if(elementaryStreams[ElementaryStreamTypes.AUDIOVIDEO]){buffersAppendedTo.push('audiovideo');}else {if(elementaryStreams[ElementaryStreamTypes.AUDIO]){buffersAppendedTo.push('audio');}if(elementaryStreams[ElementaryStreamTypes.VIDEO]){buffersAppendedTo.push('video');}}const onUnblocked=()=>{const now=self.performance.now();frag.stats.buffering.end=now;if(part){part.stats.buffering.end=now;}const stats=part?part.stats:frag.stats;this.hls.trigger(Events.FRAG_BUFFERED,{frag,part,stats,id:frag.type});};if(buffersAppendedTo.length===0){this.warn(`Fragments must have at least one ElementaryStreamType set. type: ${frag.type} level: ${frag.level} sn: ${frag.sn}`);}this.blockBuffers(onUnblocked,buffersAppendedTo);}onFragChanged(event,data){this.trimBuffers();}// on BUFFER_EOS mark matching sourcebuffer(s) as ended and trigger checkEos()
|
||
// an undefined data.type will mark all buffers as EOS.
|
||
onBufferEos(event,data){const ended=this.getSourceBufferTypes().reduce((acc,type)=>{const sb=this.sourceBuffer[type];if(sb&&(!data.type||data.type===type)){sb.ending=true;if(!sb.ended){sb.ended=true;this.log(`${type} sourceBuffer now EOS`);}}return acc&&!!(!sb||sb.ended);},true);if(ended){this.log(`Queueing mediaSource.endOfStream()`);this.blockBuffers(()=>{this.getSourceBufferTypes().forEach(type=>{const sb=this.sourceBuffer[type];if(sb){sb.ending=false;}});const{mediaSource}=this;if(!mediaSource||mediaSource.readyState!=='open'){if(mediaSource){this.log(`Could not call mediaSource.endOfStream(). mediaSource.readyState: ${mediaSource.readyState}`);}return;}this.log(`Calling mediaSource.endOfStream()`);// Allow this to throw and be caught by the enqueueing function
|
||
mediaSource.endOfStream();});}}onLevelUpdated(event,{details}){if(!details.fragments.length){return;}this.details=details;if(this.getSourceBufferTypes().length){this.blockBuffers(this.updateMediaElementDuration.bind(this));}else {this.updateMediaElementDuration();}}trimBuffers(){const{hls,details,media}=this;if(!media||details===null){return;}const sourceBufferTypes=this.getSourceBufferTypes();if(!sourceBufferTypes.length){return;}const config=hls.config;const currentTime=media.currentTime;const targetDuration=details.levelTargetDuration;// Support for deprecated liveBackBufferLength
|
||
const backBufferLength=details.live&&config.liveBackBufferLength!==null?config.liveBackBufferLength:config.backBufferLength;if(isFiniteNumber(backBufferLength)&&backBufferLength>0){const maxBackBufferLength=Math.max(backBufferLength,targetDuration);const targetBackBufferPosition=Math.floor(currentTime/targetDuration)*targetDuration-maxBackBufferLength;this.flushBackBuffer(currentTime,targetDuration,targetBackBufferPosition);}if(isFiniteNumber(config.frontBufferFlushThreshold)&&config.frontBufferFlushThreshold>0){const frontBufferLength=Math.max(config.maxBufferLength,config.frontBufferFlushThreshold);const maxFrontBufferLength=Math.max(frontBufferLength,targetDuration);const targetFrontBufferPosition=Math.floor(currentTime/targetDuration)*targetDuration+maxFrontBufferLength;this.flushFrontBuffer(currentTime,targetDuration,targetFrontBufferPosition);}}flushBackBuffer(currentTime,targetDuration,targetBackBufferPosition){const{details,sourceBuffer}=this;const sourceBufferTypes=this.getSourceBufferTypes();sourceBufferTypes.forEach(type=>{const sb=sourceBuffer[type];if(sb){const buffered=BufferHelper.getBuffered(sb);// when target buffer start exceeds actual buffer start
|
||
if(buffered.length>0&&targetBackBufferPosition>buffered.start(0)){this.hls.trigger(Events.BACK_BUFFER_REACHED,{bufferEnd:targetBackBufferPosition});// Support for deprecated event:
|
||
if(details!=null&&details.live){this.hls.trigger(Events.LIVE_BACK_BUFFER_REACHED,{bufferEnd:targetBackBufferPosition});}else if(sb.ended&&buffered.end(buffered.length-1)-currentTime<targetDuration*2){this.log(`Cannot flush ${type} back buffer while SourceBuffer is in ended state`);return;}this.hls.trigger(Events.BUFFER_FLUSHING,{startOffset:0,endOffset:targetBackBufferPosition,type});}}});}flushFrontBuffer(currentTime,targetDuration,targetFrontBufferPosition){const{sourceBuffer}=this;const sourceBufferTypes=this.getSourceBufferTypes();sourceBufferTypes.forEach(type=>{const sb=sourceBuffer[type];if(sb){const buffered=BufferHelper.getBuffered(sb);const numBufferedRanges=buffered.length;// The buffer is either empty or contiguous
|
||
if(numBufferedRanges<2){return;}const bufferStart=buffered.start(numBufferedRanges-1);const bufferEnd=buffered.end(numBufferedRanges-1);// No flush if we can tolerate the current buffer length or the current buffer range we would flush is contiguous with current position
|
||
if(targetFrontBufferPosition>bufferStart||currentTime>=bufferStart&¤tTime<=bufferEnd){return;}else if(sb.ended&¤tTime-bufferEnd<2*targetDuration){this.log(`Cannot flush ${type} front buffer while SourceBuffer is in ended state`);return;}this.hls.trigger(Events.BUFFER_FLUSHING,{startOffset:bufferStart,endOffset:Infinity,type});}});}/**
|
||
* Update Media Source duration to current level duration or override to Infinity if configuration parameter
|
||
* 'liveDurationInfinity` is set to `true`
|
||
* More details: https://github.com/video-dev/hls.js/issues/355
|
||
*/updateMediaElementDuration(){if(!this.details||!this.media||!this.mediaSource||this.mediaSource.readyState!=='open'){return;}const{details,hls,media,mediaSource}=this;const levelDuration=details.fragments[0].start+details.totalduration;const mediaDuration=media.duration;const msDuration=isFiniteNumber(mediaSource.duration)?mediaSource.duration:0;if(details.live&&hls.config.liveDurationInfinity){// Override duration to Infinity
|
||
mediaSource.duration=Infinity;this.updateSeekableRange(details);}else if(levelDuration>msDuration&&levelDuration>mediaDuration||!isFiniteNumber(mediaDuration)){// levelDuration was the last value we set.
|
||
// not using mediaSource.duration as the browser may tweak this value
|
||
// only update Media Source duration if its value increase, this is to avoid
|
||
// flushing already buffered portion when switching between quality level
|
||
this.log(`Updating Media Source duration to ${levelDuration.toFixed(3)}`);mediaSource.duration=levelDuration;}}updateSeekableRange(levelDetails){const mediaSource=this.mediaSource;const fragments=levelDetails.fragments;const len=fragments.length;if(len&&levelDetails.live&&mediaSource!=null&&mediaSource.setLiveSeekableRange){const start=Math.max(0,fragments[0].start);const end=Math.max(start,start+levelDetails.totalduration);this.log(`Media Source duration is set to ${mediaSource.duration}. Setting seekable range to ${start}-${end}.`);mediaSource.setLiveSeekableRange(start,end);}}checkPendingTracks(){const{bufferCodecEventsExpected,operationQueue,pendingTracks}=this;// Check if we've received all of the expected bufferCodec events. When none remain, create all the sourceBuffers at once.
|
||
// This is important because the MSE spec allows implementations to throw QuotaExceededErrors if creating new sourceBuffers after
|
||
// data has been appended to existing ones.
|
||
// 2 tracks is the max (one for audio, one for video). If we've reach this max go ahead and create the buffers.
|
||
const pendingTracksCount=Object.keys(pendingTracks).length;if(pendingTracksCount&&(!bufferCodecEventsExpected||pendingTracksCount===2||'audiovideo'in pendingTracks)){// ok, let's create them now !
|
||
this.createSourceBuffers(pendingTracks);this.pendingTracks={};// append any pending segments now !
|
||
const buffers=this.getSourceBufferTypes();if(buffers.length){this.hls.trigger(Events.BUFFER_CREATED,{tracks:this.tracks});buffers.forEach(type=>{operationQueue.executeNext(type);});}else {const error=new Error('could not create source buffer for media codec(s)');this.hls.trigger(Events.ERROR,{type:ErrorTypes.MEDIA_ERROR,details:ErrorDetails.BUFFER_INCOMPATIBLE_CODECS_ERROR,fatal:true,error,reason:error.message});}}}createSourceBuffers(tracks){const{sourceBuffer,mediaSource}=this;if(!mediaSource){throw Error('createSourceBuffers called when mediaSource was null');}for(const trackName in tracks){if(!sourceBuffer[trackName]){var _track$levelCodec;const track=tracks[trackName];if(!track){throw Error(`source buffer exists for track ${trackName}, however track does not`);}// use levelCodec as first priority unless it contains multiple comma-separated codec values
|
||
let codec=((_track$levelCodec=track.levelCodec)==null?void 0:_track$levelCodec.indexOf(','))===-1?track.levelCodec:track.codec;if(codec){if(trackName.slice(0,5)==='audio'){codec=getCodecCompatibleName(codec,this.appendSource);}}const mimeType=`${track.container};codecs=${codec}`;this.log(`creating sourceBuffer(${mimeType})`);try{const sb=sourceBuffer[trackName]=mediaSource.addSourceBuffer(mimeType);const sbName=trackName;this.addBufferListener(sbName,'updatestart',this._onSBUpdateStart);this.addBufferListener(sbName,'updateend',this._onSBUpdateEnd);this.addBufferListener(sbName,'error',this._onSBUpdateError);// ManagedSourceBuffer bufferedchange event
|
||
if(this.appendSource){this.addBufferListener(sbName,'bufferedchange',(type,event)=>{// If media was ejected check for a change. Added ranges are redundant with changes on 'updateend' event.
|
||
const removedRanges=event.removedRanges;if(removedRanges!=null&&removedRanges.length){this.hls.trigger(Events.BUFFER_FLUSHED,{type:trackName});}});}this.tracks[trackName]={buffer:sb,codec:codec,container:track.container,levelCodec:track.levelCodec,metadata:track.metadata,id:track.id};}catch(err){this.error(`error while trying to add sourceBuffer: ${err.message}`);this.hls.trigger(Events.ERROR,{type:ErrorTypes.MEDIA_ERROR,details:ErrorDetails.BUFFER_ADD_CODEC_ERROR,fatal:false,error:err,sourceBufferName:trackName,mimeType:mimeType});}}}}get mediaSrc(){var _this$media;const media=((_this$media=this.media)==null?void 0:_this$media.firstChild)||this.media;return media==null?void 0:media.src;}_onSBUpdateStart(type){const{operationQueue}=this;const operation=operationQueue.current(type);operation.onStart();}_onSBUpdateEnd(type){var _this$mediaSource2;if(((_this$mediaSource2=this.mediaSource)==null?void 0:_this$mediaSource2.readyState)==='closed'){this.resetBuffer(type);return;}const{operationQueue}=this;const operation=operationQueue.current(type);operation.onComplete();operationQueue.shiftAndExecuteNext(type);}_onSBUpdateError(type,event){var _this$mediaSource3;const error=new Error(`${type} SourceBuffer error. MediaSource readyState: ${(_this$mediaSource3=this.mediaSource)==null?void 0:_this$mediaSource3.readyState}`);this.error(`${error}`,event);// according to http://www.w3.org/TR/media-source/#sourcebuffer-append-error
|
||
// SourceBuffer errors are not necessarily fatal; if so, the HTMLMediaElement will fire an error event
|
||
this.hls.trigger(Events.ERROR,{type:ErrorTypes.MEDIA_ERROR,details:ErrorDetails.BUFFER_APPENDING_ERROR,sourceBufferName:type,error,fatal:false});// updateend is always fired after error, so we'll allow that to shift the current operation off of the queue
|
||
const operation=this.operationQueue.current(type);if(operation){operation.onError(error);}}// This method must result in an updateend event; if remove is not called, _onSBUpdateEnd must be called manually
|
||
removeExecutor(type,startOffset,endOffset){const{media,mediaSource,operationQueue,sourceBuffer}=this;const sb=sourceBuffer[type];if(!media||!mediaSource||!sb){this.warn(`Attempting to remove from the ${type} SourceBuffer, but it does not exist`);operationQueue.shiftAndExecuteNext(type);return;}const mediaDuration=isFiniteNumber(media.duration)?media.duration:Infinity;const msDuration=isFiniteNumber(mediaSource.duration)?mediaSource.duration:Infinity;const removeStart=Math.max(0,startOffset);const removeEnd=Math.min(endOffset,mediaDuration,msDuration);if(removeEnd>removeStart&&(!sb.ending||sb.ended)){sb.ended=false;this.log(`Removing [${removeStart},${removeEnd}] from the ${type} SourceBuffer`);sb.remove(removeStart,removeEnd);}else {// Cycle the queue
|
||
operationQueue.shiftAndExecuteNext(type);}}// This method must result in an updateend event; if append is not called, _onSBUpdateEnd must be called manually
|
||
appendExecutor(data,type){const sb=this.sourceBuffer[type];if(!sb){if(!this.pendingTracks[type]){throw new Error(`Attempting to append to the ${type} SourceBuffer, but it does not exist`);}return;}sb.ended=false;sb.appendBuffer(data);}// Enqueues an operation to each SourceBuffer queue which, upon execution, resolves a promise. When all promises
|
||
// resolve, the onUnblocked function is executed. Functions calling this method do not need to unblock the queue
|
||
// upon completion, since we already do it here
|
||
blockBuffers(onUnblocked,buffers=this.getSourceBufferTypes()){if(!buffers.length){this.log('Blocking operation requested, but no SourceBuffers exist');Promise.resolve().then(onUnblocked);return;}const{operationQueue}=this;// logger.debug(`[buffer-controller]: Blocking ${buffers} SourceBuffer`);
|
||
const blockingOperations=buffers.map(type=>operationQueue.appendBlocker(type));Promise.all(blockingOperations).then(()=>{// logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`);
|
||
onUnblocked();buffers.forEach(type=>{const sb=this.sourceBuffer[type];// Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to
|
||
// true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration)
|
||
// While this is a workaround, it's probably useful to have around
|
||
if(!(sb!=null&&sb.updating)){operationQueue.shiftAndExecuteNext(type);}});});}getSourceBufferTypes(){return Object.keys(this.sourceBuffer);}addBufferListener(type,event,fn){const buffer=this.sourceBuffer[type];if(!buffer){return;}const listener=fn.bind(this,type);this.listeners[type].push({event,listener});buffer.addEventListener(event,listener);}removeBufferListeners(type){const buffer=this.sourceBuffer[type];if(!buffer){return;}this.listeners[type].forEach(l=>{buffer.removeEventListener(l.event,l.listener);});}}function removeSourceChildren(node){const sourceChildren=node.querySelectorAll('source');[].slice.call(sourceChildren).forEach(source=>{node.removeChild(source);});}function addSource(media,url){const source=self.document.createElement('source');source.type='video/mp4';source.src=url;media.appendChild(source);}/**
|
||
*
|
||
* This code was ported from the dash.js project at:
|
||
* https://github.com/Dash-Industry-Forum/dash.js/blob/development/externals/cea608-parser.js
|
||
* https://github.com/Dash-Industry-Forum/dash.js/commit/8269b26a761e0853bb21d78780ed945144ecdd4d#diff-71bc295a2d6b6b7093a1d3290d53a4b2
|
||
*
|
||
* The original copyright appears below:
|
||
*
|
||
* The copyright in this software is being made available under the BSD License,
|
||
* included below. This software may be subject to other third party and contributor
|
||
* rights, including patent rights, and no such rights are granted under this license.
|
||
*
|
||
* Copyright (c) 2015-2016, DASH Industry Forum.
|
||
* All rights reserved.
|
||
*
|
||
* Redistribution and use in source and binary forms, with or without modification,
|
||
* are permitted provided that the following conditions are met:
|
||
* 1. Redistributions of source code must retain the above copyright notice, this
|
||
* list of conditions and the following disclaimer.
|
||
* * Redistributions in binary form must reproduce the above copyright notice,
|
||
* this list of conditions and the following disclaimer in the documentation and/or
|
||
* other materials provided with the distribution.
|
||
* 2. Neither the name of Dash Industry Forum nor the names of its
|
||
* contributors may be used to endorse or promote products derived from this software
|
||
* without specific prior written permission.
|
||
*
|
||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY
|
||
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
|
||
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||
* POSSIBILITY OF SUCH DAMAGE.
|
||
*/ /**
|
||
* Exceptions from regular ASCII. CodePoints are mapped to UTF-16 codes
|
||
*/const specialCea608CharsCodes={0x2a:0xe1,// lowercase a, acute accent
|
||
0x5c:0xe9,// lowercase e, acute accent
|
||
0x5e:0xed,// lowercase i, acute accent
|
||
0x5f:0xf3,// lowercase o, acute accent
|
||
0x60:0xfa,// lowercase u, acute accent
|
||
0x7b:0xe7,// lowercase c with cedilla
|
||
0x7c:0xf7,// division symbol
|
||
0x7d:0xd1,// uppercase N tilde
|
||
0x7e:0xf1,// lowercase n tilde
|
||
0x7f:0x2588,// Full block
|
||
// THIS BLOCK INCLUDES THE 16 EXTENDED (TWO-BYTE) LINE 21 CHARACTERS
|
||
// THAT COME FROM HI BYTE=0x11 AND LOW BETWEEN 0x30 AND 0x3F
|
||
// THIS MEANS THAT \x50 MUST BE ADDED TO THE VALUES
|
||
0x80:0xae,// Registered symbol (R)
|
||
0x81:0xb0,// degree sign
|
||
0x82:0xbd,// 1/2 symbol
|
||
0x83:0xbf,// Inverted (open) question mark
|
||
0x84:0x2122,// Trademark symbol (TM)
|
||
0x85:0xa2,// Cents symbol
|
||
0x86:0xa3,// Pounds sterling
|
||
0x87:0x266a,// Music 8'th note
|
||
0x88:0xe0,// lowercase a, grave accent
|
||
0x89:0x20,// transparent space (regular)
|
||
0x8a:0xe8,// lowercase e, grave accent
|
||
0x8b:0xe2,// lowercase a, circumflex accent
|
||
0x8c:0xea,// lowercase e, circumflex accent
|
||
0x8d:0xee,// lowercase i, circumflex accent
|
||
0x8e:0xf4,// lowercase o, circumflex accent
|
||
0x8f:0xfb,// lowercase u, circumflex accent
|
||
// THIS BLOCK INCLUDES THE 32 EXTENDED (TWO-BYTE) LINE 21 CHARACTERS
|
||
// THAT COME FROM HI BYTE=0x12 AND LOW BETWEEN 0x20 AND 0x3F
|
||
0x90:0xc1,// capital letter A with acute
|
||
0x91:0xc9,// capital letter E with acute
|
||
0x92:0xd3,// capital letter O with acute
|
||
0x93:0xda,// capital letter U with acute
|
||
0x94:0xdc,// capital letter U with diaresis
|
||
0x95:0xfc,// lowercase letter U with diaeresis
|
||
0x96:0x2018,// opening single quote
|
||
0x97:0xa1,// inverted exclamation mark
|
||
0x98:0x2a,// asterisk
|
||
0x99:0x2019,// closing single quote
|
||
0x9a:0x2501,// box drawings heavy horizontal
|
||
0x9b:0xa9,// copyright sign
|
||
0x9c:0x2120,// Service mark
|
||
0x9d:0x2022,// (round) bullet
|
||
0x9e:0x201c,// Left double quotation mark
|
||
0x9f:0x201d,// Right double quotation mark
|
||
0xa0:0xc0,// uppercase A, grave accent
|
||
0xa1:0xc2,// uppercase A, circumflex
|
||
0xa2:0xc7,// uppercase C with cedilla
|
||
0xa3:0xc8,// uppercase E, grave accent
|
||
0xa4:0xca,// uppercase E, circumflex
|
||
0xa5:0xcb,// capital letter E with diaresis
|
||
0xa6:0xeb,// lowercase letter e with diaresis
|
||
0xa7:0xce,// uppercase I, circumflex
|
||
0xa8:0xcf,// uppercase I, with diaresis
|
||
0xa9:0xef,// lowercase i, with diaresis
|
||
0xaa:0xd4,// uppercase O, circumflex
|
||
0xab:0xd9,// uppercase U, grave accent
|
||
0xac:0xf9,// lowercase u, grave accent
|
||
0xad:0xdb,// uppercase U, circumflex
|
||
0xae:0xab,// left-pointing double angle quotation mark
|
||
0xaf:0xbb,// right-pointing double angle quotation mark
|
||
// THIS BLOCK INCLUDES THE 32 EXTENDED (TWO-BYTE) LINE 21 CHARACTERS
|
||
// THAT COME FROM HI BYTE=0x13 AND LOW BETWEEN 0x20 AND 0x3F
|
||
0xb0:0xc3,// Uppercase A, tilde
|
||
0xb1:0xe3,// Lowercase a, tilde
|
||
0xb2:0xcd,// Uppercase I, acute accent
|
||
0xb3:0xcc,// Uppercase I, grave accent
|
||
0xb4:0xec,// Lowercase i, grave accent
|
||
0xb5:0xd2,// Uppercase O, grave accent
|
||
0xb6:0xf2,// Lowercase o, grave accent
|
||
0xb7:0xd5,// Uppercase O, tilde
|
||
0xb8:0xf5,// Lowercase o, tilde
|
||
0xb9:0x7b,// Open curly brace
|
||
0xba:0x7d,// Closing curly brace
|
||
0xbb:0x5c,// Backslash
|
||
0xbc:0x5e,// Caret
|
||
0xbd:0x5f,// Underscore
|
||
0xbe:0x7c,// Pipe (vertical line)
|
||
0xbf:0x223c,// Tilde operator
|
||
0xc0:0xc4,// Uppercase A, umlaut
|
||
0xc1:0xe4,// Lowercase A, umlaut
|
||
0xc2:0xd6,// Uppercase O, umlaut
|
||
0xc3:0xf6,// Lowercase o, umlaut
|
||
0xc4:0xdf,// Esszett (sharp S)
|
||
0xc5:0xa5,// Yen symbol
|
||
0xc6:0xa4,// Generic currency sign
|
||
0xc7:0x2503,// Box drawings heavy vertical
|
||
0xc8:0xc5,// Uppercase A, ring
|
||
0xc9:0xe5,// Lowercase A, ring
|
||
0xca:0xd8,// Uppercase O, stroke
|
||
0xcb:0xf8,// Lowercase o, strok
|
||
0xcc:0x250f,// Box drawings heavy down and right
|
||
0xcd:0x2513,// Box drawings heavy down and left
|
||
0xce:0x2517,// Box drawings heavy up and right
|
||
0xcf:0x251b// Box drawings heavy up and left
|
||
};/**
|
||
* Utils
|
||
*/const getCharForByte=byte=>String.fromCharCode(specialCea608CharsCodes[byte]||byte);const NR_ROWS=15;const NR_COLS=100;// Tables to look up row from PAC data
|
||
const rowsLowCh1={0x11:1,0x12:3,0x15:5,0x16:7,0x17:9,0x10:11,0x13:12,0x14:14};const rowsHighCh1={0x11:2,0x12:4,0x15:6,0x16:8,0x17:10,0x13:13,0x14:15};const rowsLowCh2={0x19:1,0x1a:3,0x1d:5,0x1e:7,0x1f:9,0x18:11,0x1b:12,0x1c:14};const rowsHighCh2={0x19:2,0x1a:4,0x1d:6,0x1e:8,0x1f:10,0x1b:13,0x1c:15};const backgroundColors=['white','green','blue','cyan','red','yellow','magenta','black','transparent'];class CaptionsLogger{constructor(){this.time=null;this.verboseLevel=0;}log(severity,msg){if(this.verboseLevel>=severity){const m=typeof msg==='function'?msg():msg;logger.log(`${this.time} [${severity}] ${m}`);}}}const numArrayToHexArray=function numArrayToHexArray(numArray){const hexArray=[];for(let j=0;j<numArray.length;j++){hexArray.push(numArray[j].toString(16));}return hexArray;};class PenState{constructor(){this.foreground='white';this.underline=false;this.italics=false;this.background='black';this.flash=false;}reset(){this.foreground='white';this.underline=false;this.italics=false;this.background='black';this.flash=false;}setStyles(styles){const attribs=['foreground','underline','italics','background','flash'];for(let i=0;i<attribs.length;i++){const style=attribs[i];if(styles.hasOwnProperty(style)){this[style]=styles[style];}}}isDefault(){return this.foreground==='white'&&!this.underline&&!this.italics&&this.background==='black'&&!this.flash;}equals(other){return this.foreground===other.foreground&&this.underline===other.underline&&this.italics===other.italics&&this.background===other.background&&this.flash===other.flash;}copy(newPenState){this.foreground=newPenState.foreground;this.underline=newPenState.underline;this.italics=newPenState.italics;this.background=newPenState.background;this.flash=newPenState.flash;}toString(){return 'color='+this.foreground+', underline='+this.underline+', italics='+this.italics+', background='+this.background+', flash='+this.flash;}}/**
|
||
* Unicode character with styling and background.
|
||
* @constructor
|
||
*/class StyledUnicodeChar{constructor(){this.uchar=' ';this.penState=new PenState();}reset(){this.uchar=' ';this.penState.reset();}setChar(uchar,newPenState){this.uchar=uchar;this.penState.copy(newPenState);}setPenState(newPenState){this.penState.copy(newPenState);}equals(other){return this.uchar===other.uchar&&this.penState.equals(other.penState);}copy(newChar){this.uchar=newChar.uchar;this.penState.copy(newChar.penState);}isEmpty(){return this.uchar===' '&&this.penState.isDefault();}}/**
|
||
* CEA-608 row consisting of NR_COLS instances of StyledUnicodeChar.
|
||
* @constructor
|
||
*/class Row{constructor(logger){this.chars=[];this.pos=0;this.currPenState=new PenState();this.cueStartTime=null;this.logger=void 0;for(let i=0;i<NR_COLS;i++){this.chars.push(new StyledUnicodeChar());}this.logger=logger;}equals(other){for(let i=0;i<NR_COLS;i++){if(!this.chars[i].equals(other.chars[i])){return false;}}return true;}copy(other){for(let i=0;i<NR_COLS;i++){this.chars[i].copy(other.chars[i]);}}isEmpty(){let empty=true;for(let i=0;i<NR_COLS;i++){if(!this.chars[i].isEmpty()){empty=false;break;}}return empty;}/**
|
||
* Set the cursor to a valid column.
|
||
*/setCursor(absPos){if(this.pos!==absPos){this.pos=absPos;}if(this.pos<0){this.logger.log(3,'Negative cursor position '+this.pos);this.pos=0;}else if(this.pos>NR_COLS){this.logger.log(3,'Too large cursor position '+this.pos);this.pos=NR_COLS;}}/**
|
||
* Move the cursor relative to current position.
|
||
*/moveCursor(relPos){const newPos=this.pos+relPos;if(relPos>1){for(let i=this.pos+1;i<newPos+1;i++){this.chars[i].setPenState(this.currPenState);}}this.setCursor(newPos);}/**
|
||
* Backspace, move one step back and clear character.
|
||
*/backSpace(){this.moveCursor(-1);this.chars[this.pos].setChar(' ',this.currPenState);}insertChar(byte){if(byte>=0x90){// Extended char
|
||
this.backSpace();}const char=getCharForByte(byte);if(this.pos>=NR_COLS){this.logger.log(0,()=>'Cannot insert '+byte.toString(16)+' ('+char+') at position '+this.pos+'. Skipping it!');return;}this.chars[this.pos].setChar(char,this.currPenState);this.moveCursor(1);}clearFromPos(startPos){let i;for(i=startPos;i<NR_COLS;i++){this.chars[i].reset();}}clear(){this.clearFromPos(0);this.pos=0;this.currPenState.reset();}clearToEndOfRow(){this.clearFromPos(this.pos);}getTextString(){const chars=[];let empty=true;for(let i=0;i<NR_COLS;i++){const char=this.chars[i].uchar;if(char!==' '){empty=false;}chars.push(char);}if(empty){return '';}else {return chars.join('');}}setPenStyles(styles){this.currPenState.setStyles(styles);const currChar=this.chars[this.pos];currChar.setPenState(this.currPenState);}}/**
|
||
* Keep a CEA-608 screen of 32x15 styled characters
|
||
* @constructor
|
||
*/class CaptionScreen{constructor(logger){this.rows=[];this.currRow=NR_ROWS-1;this.nrRollUpRows=null;this.lastOutputScreen=null;this.logger=void 0;for(let i=0;i<NR_ROWS;i++){this.rows.push(new Row(logger));}this.logger=logger;}reset(){for(let i=0;i<NR_ROWS;i++){this.rows[i].clear();}this.currRow=NR_ROWS-1;}equals(other){let equal=true;for(let i=0;i<NR_ROWS;i++){if(!this.rows[i].equals(other.rows[i])){equal=false;break;}}return equal;}copy(other){for(let i=0;i<NR_ROWS;i++){this.rows[i].copy(other.rows[i]);}}isEmpty(){let empty=true;for(let i=0;i<NR_ROWS;i++){if(!this.rows[i].isEmpty()){empty=false;break;}}return empty;}backSpace(){const row=this.rows[this.currRow];row.backSpace();}clearToEndOfRow(){const row=this.rows[this.currRow];row.clearToEndOfRow();}/**
|
||
* Insert a character (without styling) in the current row.
|
||
*/insertChar(char){const row=this.rows[this.currRow];row.insertChar(char);}setPen(styles){const row=this.rows[this.currRow];row.setPenStyles(styles);}moveCursor(relPos){const row=this.rows[this.currRow];row.moveCursor(relPos);}setCursor(absPos){this.logger.log(2,'setCursor: '+absPos);const row=this.rows[this.currRow];row.setCursor(absPos);}setPAC(pacData){this.logger.log(2,()=>'pacData = '+JSON.stringify(pacData));let newRow=pacData.row-1;if(this.nrRollUpRows&&newRow<this.nrRollUpRows-1){newRow=this.nrRollUpRows-1;}// Make sure this only affects Roll-up Captions by checking this.nrRollUpRows
|
||
if(this.nrRollUpRows&&this.currRow!==newRow){// clear all rows first
|
||
for(let i=0;i<NR_ROWS;i++){this.rows[i].clear();}// Copy this.nrRollUpRows rows from lastOutputScreen and place it in the newRow location
|
||
// topRowIndex - the start of rows to copy (inclusive index)
|
||
const topRowIndex=this.currRow+1-this.nrRollUpRows;// We only copy if the last position was already shown.
|
||
// We use the cueStartTime value to check this.
|
||
const lastOutputScreen=this.lastOutputScreen;if(lastOutputScreen){const prevLineTime=lastOutputScreen.rows[topRowIndex].cueStartTime;const time=this.logger.time;if(prevLineTime!==null&&time!==null&&prevLineTime<time){for(let i=0;i<this.nrRollUpRows;i++){this.rows[newRow-this.nrRollUpRows+i+1].copy(lastOutputScreen.rows[topRowIndex+i]);}}}}this.currRow=newRow;const row=this.rows[this.currRow];if(pacData.indent!==null){const indent=pacData.indent;const prevPos=Math.max(indent-1,0);row.setCursor(pacData.indent);pacData.color=row.chars[prevPos].penState.foreground;}const styles={foreground:pacData.color,underline:pacData.underline,italics:pacData.italics,background:'black',flash:false};this.setPen(styles);}/**
|
||
* Set background/extra foreground, but first do back_space, and then insert space (backwards compatibility).
|
||
*/setBkgData(bkgData){this.logger.log(2,()=>'bkgData = '+JSON.stringify(bkgData));this.backSpace();this.setPen(bkgData);this.insertChar(0x20);// Space
|
||
}setRollUpRows(nrRows){this.nrRollUpRows=nrRows;}rollUp(){if(this.nrRollUpRows===null){this.logger.log(3,'roll_up but nrRollUpRows not set yet');return;// Not properly setup
|
||
}this.logger.log(1,()=>this.getDisplayText());const topRowIndex=this.currRow+1-this.nrRollUpRows;const topRow=this.rows.splice(topRowIndex,1)[0];topRow.clear();this.rows.splice(this.currRow,0,topRow);this.logger.log(2,'Rolling up');// this.logger.log(VerboseLevel.TEXT, this.get_display_text())
|
||
}/**
|
||
* Get all non-empty rows with as unicode text.
|
||
*/getDisplayText(asOneRow){asOneRow=asOneRow||false;const displayText=[];let text='';let rowNr=-1;for(let i=0;i<NR_ROWS;i++){const rowText=this.rows[i].getTextString();if(rowText){rowNr=i+1;if(asOneRow){displayText.push('Row '+rowNr+": '"+rowText+"'");}else {displayText.push(rowText.trim());}}}if(displayText.length>0){if(asOneRow){text='['+displayText.join(' | ')+']';}else {text=displayText.join('\n');}}return text;}getTextAndFormat(){return this.rows;}}// var modes = ['MODE_ROLL-UP', 'MODE_POP-ON', 'MODE_PAINT-ON', 'MODE_TEXT'];
|
||
class Cea608Channel{constructor(channelNumber,outputFilter,logger){this.chNr=void 0;this.outputFilter=void 0;this.mode=void 0;this.verbose=void 0;this.displayedMemory=void 0;this.nonDisplayedMemory=void 0;this.lastOutputScreen=void 0;this.currRollUpRow=void 0;this.writeScreen=void 0;this.cueStartTime=void 0;this.logger=void 0;this.chNr=channelNumber;this.outputFilter=outputFilter;this.mode=null;this.verbose=0;this.displayedMemory=new CaptionScreen(logger);this.nonDisplayedMemory=new CaptionScreen(logger);this.lastOutputScreen=new CaptionScreen(logger);this.currRollUpRow=this.displayedMemory.rows[NR_ROWS-1];this.writeScreen=this.displayedMemory;this.mode=null;this.cueStartTime=null;// Keeps track of where a cue started.
|
||
this.logger=logger;}reset(){this.mode=null;this.displayedMemory.reset();this.nonDisplayedMemory.reset();this.lastOutputScreen.reset();this.outputFilter.reset();this.currRollUpRow=this.displayedMemory.rows[NR_ROWS-1];this.writeScreen=this.displayedMemory;this.mode=null;this.cueStartTime=null;}getHandler(){return this.outputFilter;}setHandler(newHandler){this.outputFilter=newHandler;}setPAC(pacData){this.writeScreen.setPAC(pacData);}setBkgData(bkgData){this.writeScreen.setBkgData(bkgData);}setMode(newMode){if(newMode===this.mode){return;}this.mode=newMode;this.logger.log(2,()=>'MODE='+newMode);if(this.mode==='MODE_POP-ON'){this.writeScreen=this.nonDisplayedMemory;}else {this.writeScreen=this.displayedMemory;this.writeScreen.reset();}if(this.mode!=='MODE_ROLL-UP'){this.displayedMemory.nrRollUpRows=null;this.nonDisplayedMemory.nrRollUpRows=null;}this.mode=newMode;}insertChars(chars){for(let i=0;i<chars.length;i++){this.writeScreen.insertChar(chars[i]);}const screen=this.writeScreen===this.displayedMemory?'DISP':'NON_DISP';this.logger.log(2,()=>screen+': '+this.writeScreen.getDisplayText(true));if(this.mode==='MODE_PAINT-ON'||this.mode==='MODE_ROLL-UP'){this.logger.log(1,()=>'DISPLAYED: '+this.displayedMemory.getDisplayText(true));this.outputDataUpdate();}}ccRCL(){// Resume Caption Loading (switch mode to Pop On)
|
||
this.logger.log(2,'RCL - Resume Caption Loading');this.setMode('MODE_POP-ON');}ccBS(){// BackSpace
|
||
this.logger.log(2,'BS - BackSpace');if(this.mode==='MODE_TEXT'){return;}this.writeScreen.backSpace();if(this.writeScreen===this.displayedMemory){this.outputDataUpdate();}}ccAOF(){// Reserved (formerly Alarm Off)
|
||
}ccAON(){// Reserved (formerly Alarm On)
|
||
}ccDER(){// Delete to End of Row
|
||
this.logger.log(2,'DER- Delete to End of Row');this.writeScreen.clearToEndOfRow();this.outputDataUpdate();}ccRU(nrRows){// Roll-Up Captions-2,3,or 4 Rows
|
||
this.logger.log(2,'RU('+nrRows+') - Roll Up');this.writeScreen=this.displayedMemory;this.setMode('MODE_ROLL-UP');this.writeScreen.setRollUpRows(nrRows);}ccFON(){// Flash On
|
||
this.logger.log(2,'FON - Flash On');this.writeScreen.setPen({flash:true});}ccRDC(){// Resume Direct Captioning (switch mode to PaintOn)
|
||
this.logger.log(2,'RDC - Resume Direct Captioning');this.setMode('MODE_PAINT-ON');}ccTR(){// Text Restart in text mode (not supported, however)
|
||
this.logger.log(2,'TR');this.setMode('MODE_TEXT');}ccRTD(){// Resume Text Display in Text mode (not supported, however)
|
||
this.logger.log(2,'RTD');this.setMode('MODE_TEXT');}ccEDM(){// Erase Displayed Memory
|
||
this.logger.log(2,'EDM - Erase Displayed Memory');this.displayedMemory.reset();this.outputDataUpdate(true);}ccCR(){// Carriage Return
|
||
this.logger.log(2,'CR - Carriage Return');this.writeScreen.rollUp();this.outputDataUpdate(true);}ccENM(){// Erase Non-Displayed Memory
|
||
this.logger.log(2,'ENM - Erase Non-displayed Memory');this.nonDisplayedMemory.reset();}ccEOC(){// End of Caption (Flip Memories)
|
||
this.logger.log(2,'EOC - End Of Caption');if(this.mode==='MODE_POP-ON'){const tmp=this.displayedMemory;this.displayedMemory=this.nonDisplayedMemory;this.nonDisplayedMemory=tmp;this.writeScreen=this.nonDisplayedMemory;this.logger.log(1,()=>'DISP: '+this.displayedMemory.getDisplayText());}this.outputDataUpdate(true);}ccTO(nrCols){// Tab Offset 1,2, or 3 columns
|
||
this.logger.log(2,'TO('+nrCols+') - Tab Offset');this.writeScreen.moveCursor(nrCols);}ccMIDROW(secondByte){// Parse MIDROW command
|
||
const styles={flash:false};styles.underline=secondByte%2===1;styles.italics=secondByte>=0x2e;if(!styles.italics){const colorIndex=Math.floor(secondByte/2)-0x10;const colors=['white','green','blue','cyan','red','yellow','magenta'];styles.foreground=colors[colorIndex];}else {styles.foreground='white';}this.logger.log(2,'MIDROW: '+JSON.stringify(styles));this.writeScreen.setPen(styles);}outputDataUpdate(dispatch=false){const time=this.logger.time;if(time===null){return;}if(this.outputFilter){if(this.cueStartTime===null&&!this.displayedMemory.isEmpty()){// Start of a new cue
|
||
this.cueStartTime=time;}else {if(!this.displayedMemory.equals(this.lastOutputScreen)){this.outputFilter.newCue(this.cueStartTime,time,this.lastOutputScreen);if(dispatch&&this.outputFilter.dispatchCue){this.outputFilter.dispatchCue();}this.cueStartTime=this.displayedMemory.isEmpty()?null:time;}}this.lastOutputScreen.copy(this.displayedMemory);}}cueSplitAtTime(t){if(this.outputFilter){if(!this.displayedMemory.isEmpty()){if(this.outputFilter.newCue){this.outputFilter.newCue(this.cueStartTime,t,this.displayedMemory);}this.cueStartTime=t;}}}}// Will be 1 or 2 when parsing captions
|
||
class Cea608Parser{constructor(field,out1,out2){this.channels=void 0;this.currentChannel=0;this.cmdHistory=createCmdHistory();this.logger=void 0;const logger=this.logger=new CaptionsLogger();this.channels=[null,new Cea608Channel(field,out1,logger),new Cea608Channel(field+1,out2,logger)];}getHandler(channel){return this.channels[channel].getHandler();}setHandler(channel,newHandler){this.channels[channel].setHandler(newHandler);}/**
|
||
* Add data for time t in forms of list of bytes (unsigned ints). The bytes are treated as pairs.
|
||
*/addData(time,byteList){this.logger.time=time;for(let i=0;i<byteList.length;i+=2){const a=byteList[i]&0x7f;const b=byteList[i+1]&0x7f;let cmdFound=false;let charsFound=null;if(a===0&&b===0){continue;}else {this.logger.log(3,()=>'['+numArrayToHexArray([byteList[i],byteList[i+1]])+'] -> ('+numArrayToHexArray([a,b])+')');}const cmdHistory=this.cmdHistory;const isControlCode=a>=0x10&&a<=0x1f;if(isControlCode){// Skip redundant control codes
|
||
if(hasCmdRepeated(a,b,cmdHistory)){setLastCmd(null,null,cmdHistory);this.logger.log(3,()=>'Repeated command ('+numArrayToHexArray([a,b])+') is dropped');continue;}setLastCmd(a,b,this.cmdHistory);cmdFound=this.parseCmd(a,b);if(!cmdFound){cmdFound=this.parseMidrow(a,b);}if(!cmdFound){cmdFound=this.parsePAC(a,b);}if(!cmdFound){cmdFound=this.parseBackgroundAttributes(a,b);}}else {setLastCmd(null,null,cmdHistory);}if(!cmdFound){charsFound=this.parseChars(a,b);if(charsFound){const currChNr=this.currentChannel;if(currChNr&&currChNr>0){const channel=this.channels[currChNr];channel.insertChars(charsFound);}else {this.logger.log(2,'No channel found yet. TEXT-MODE?');}}}if(!cmdFound&&!charsFound){this.logger.log(2,()=>"Couldn't parse cleaned data "+numArrayToHexArray([a,b])+' orig: '+numArrayToHexArray([byteList[i],byteList[i+1]]));}}}/**
|
||
* Parse Command.
|
||
* @returns True if a command was found
|
||
*/parseCmd(a,b){const cond1=(a===0x14||a===0x1c||a===0x15||a===0x1d)&&b>=0x20&&b<=0x2f;const cond2=(a===0x17||a===0x1f)&&b>=0x21&&b<=0x23;if(!(cond1||cond2)){return false;}const chNr=a===0x14||a===0x15||a===0x17?1:2;const channel=this.channels[chNr];if(a===0x14||a===0x15||a===0x1c||a===0x1d){if(b===0x20){channel.ccRCL();}else if(b===0x21){channel.ccBS();}else if(b===0x22){channel.ccAOF();}else if(b===0x23){channel.ccAON();}else if(b===0x24){channel.ccDER();}else if(b===0x25){channel.ccRU(2);}else if(b===0x26){channel.ccRU(3);}else if(b===0x27){channel.ccRU(4);}else if(b===0x28){channel.ccFON();}else if(b===0x29){channel.ccRDC();}else if(b===0x2a){channel.ccTR();}else if(b===0x2b){channel.ccRTD();}else if(b===0x2c){channel.ccEDM();}else if(b===0x2d){channel.ccCR();}else if(b===0x2e){channel.ccENM();}else if(b===0x2f){channel.ccEOC();}}else {// a == 0x17 || a == 0x1F
|
||
channel.ccTO(b-0x20);}this.currentChannel=chNr;return true;}/**
|
||
* Parse midrow styling command
|
||
*/parseMidrow(a,b){let chNr=0;if((a===0x11||a===0x19)&&b>=0x20&&b<=0x2f){if(a===0x11){chNr=1;}else {chNr=2;}if(chNr!==this.currentChannel){this.logger.log(0,'Mismatch channel in midrow parsing');return false;}const channel=this.channels[chNr];if(!channel){return false;}channel.ccMIDROW(b);this.logger.log(3,()=>'MIDROW ('+numArrayToHexArray([a,b])+')');return true;}return false;}/**
|
||
* Parse Preable Access Codes (Table 53).
|
||
* @returns {Boolean} Tells if PAC found
|
||
*/parsePAC(a,b){let row;const case1=(a>=0x11&&a<=0x17||a>=0x19&&a<=0x1f)&&b>=0x40&&b<=0x7f;const case2=(a===0x10||a===0x18)&&b>=0x40&&b<=0x5f;if(!(case1||case2)){return false;}const chNr=a<=0x17?1:2;if(b>=0x40&&b<=0x5f){row=chNr===1?rowsLowCh1[a]:rowsLowCh2[a];}else {// 0x60 <= b <= 0x7F
|
||
row=chNr===1?rowsHighCh1[a]:rowsHighCh2[a];}const channel=this.channels[chNr];if(!channel){return false;}channel.setPAC(this.interpretPAC(row,b));this.currentChannel=chNr;return true;}/**
|
||
* Interpret the second byte of the pac, and return the information.
|
||
* @returns pacData with style parameters
|
||
*/interpretPAC(row,byte){let pacIndex;const pacData={color:null,italics:false,indent:null,underline:false,row:row};if(byte>0x5f){pacIndex=byte-0x60;}else {pacIndex=byte-0x40;}pacData.underline=(pacIndex&1)===1;if(pacIndex<=0xd){pacData.color=['white','green','blue','cyan','red','yellow','magenta','white'][Math.floor(pacIndex/2)];}else if(pacIndex<=0xf){pacData.italics=true;pacData.color='white';}else {pacData.indent=Math.floor((pacIndex-0x10)/2)*4;}return pacData;// Note that row has zero offset. The spec uses 1.
|
||
}/**
|
||
* Parse characters.
|
||
* @returns An array with 1 to 2 codes corresponding to chars, if found. null otherwise.
|
||
*/parseChars(a,b){let channelNr;let charCodes=null;let charCode1=null;if(a>=0x19){channelNr=2;charCode1=a-8;}else {channelNr=1;charCode1=a;}if(charCode1>=0x11&&charCode1<=0x13){// Special character
|
||
let oneCode;if(charCode1===0x11){oneCode=b+0x50;}else if(charCode1===0x12){oneCode=b+0x70;}else {oneCode=b+0x90;}this.logger.log(2,()=>"Special char '"+getCharForByte(oneCode)+"' in channel "+channelNr);charCodes=[oneCode];}else if(a>=0x20&&a<=0x7f){charCodes=b===0?[a]:[a,b];}if(charCodes){this.logger.log(3,()=>'Char codes = '+numArrayToHexArray(charCodes).join(','));}return charCodes;}/**
|
||
* Parse extended background attributes as well as new foreground color black.
|
||
* @returns True if background attributes are found
|
||
*/parseBackgroundAttributes(a,b){const case1=(a===0x10||a===0x18)&&b>=0x20&&b<=0x2f;const case2=(a===0x17||a===0x1f)&&b>=0x2d&&b<=0x2f;if(!(case1||case2)){return false;}let index;const bkgData={};if(a===0x10||a===0x18){index=Math.floor((b-0x20)/2);bkgData.background=backgroundColors[index];if(b%2===1){bkgData.background=bkgData.background+'_semi';}}else if(b===0x2d){bkgData.background='transparent';}else {bkgData.foreground='black';if(b===0x2f){bkgData.underline=true;}}const chNr=a<=0x17?1:2;const channel=this.channels[chNr];channel.setBkgData(bkgData);return true;}/**
|
||
* Reset state of parser and its channels.
|
||
*/reset(){for(let i=0;i<Object.keys(this.channels).length;i++){const channel=this.channels[i];if(channel){channel.reset();}}setLastCmd(null,null,this.cmdHistory);}/**
|
||
* Trigger the generation of a cue, and the start of a new one if displayScreens are not empty.
|
||
*/cueSplitAtTime(t){for(let i=0;i<this.channels.length;i++){const channel=this.channels[i];if(channel){channel.cueSplitAtTime(t);}}}}function setLastCmd(a,b,cmdHistory){cmdHistory.a=a;cmdHistory.b=b;}function hasCmdRepeated(a,b,cmdHistory){return cmdHistory.a===a&&cmdHistory.b===b;}function createCmdHistory(){return {a:null,b:null};}class OutputFilter{constructor(timelineController,trackName){this.timelineController=void 0;this.cueRanges=[];this.trackName=void 0;this.startTime=null;this.endTime=null;this.screen=null;this.timelineController=timelineController;this.trackName=trackName;}dispatchCue(){if(this.startTime===null){return;}this.timelineController.addCues(this.trackName,this.startTime,this.endTime,this.screen,this.cueRanges);this.startTime=null;}newCue(startTime,endTime,screen){if(this.startTime===null||this.startTime>startTime){this.startTime=startTime;}this.endTime=endTime;this.screen=screen;this.timelineController.createCaptionsTrack(this.trackName);}reset(){this.cueRanges=[];this.startTime=null;}}/**
|
||
* Copyright 2013 vtt.js Contributors
|
||
*
|
||
* Licensed under the Apache License, Version 2.0 (the 'License');
|
||
* you may not use this file except in compliance with the License.
|
||
* You may obtain a copy of the License at
|
||
*
|
||
* http://www.apache.org/licenses/LICENSE-2.0
|
||
*
|
||
* Unless required by applicable law or agreed to in writing, software
|
||
* distributed under the License is distributed on an 'AS IS' BASIS,
|
||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||
* See the License for the specific language governing permissions and
|
||
* limitations under the License.
|
||
*/var VTTCue=function(){if(optionalSelf!=null&&optionalSelf.VTTCue){return self.VTTCue;}const AllowedDirections=['','lr','rl'];const AllowedAlignments=['start','middle','end','left','right'];function isAllowedValue(allowed,value){if(typeof value!=='string'){return false;}// necessary for assuring the generic conforms to the Array interface
|
||
if(!Array.isArray(allowed)){return false;}// reset the type so that the next narrowing works well
|
||
const lcValue=value.toLowerCase();// use the allow list to narrow the type to a specific subset of strings
|
||
if(~allowed.indexOf(lcValue)){return lcValue;}return false;}function findDirectionSetting(value){return isAllowedValue(AllowedDirections,value);}function findAlignSetting(value){return isAllowedValue(AllowedAlignments,value);}function extend(obj,...rest){let i=1;for(;i<arguments.length;i++){const cobj=arguments[i];for(const p in cobj){obj[p]=cobj[p];}}return obj;}function VTTCue(startTime,endTime,text){const cue=this;const baseObj={enumerable:true};/**
|
||
* Shim implementation specific properties. These properties are not in
|
||
* the spec.
|
||
*/ // Lets us know when the VTTCue's data has changed in such a way that we need
|
||
// to recompute its display state. This lets us compute its display state
|
||
// lazily.
|
||
cue.hasBeenReset=false;/**
|
||
* VTTCue and TextTrackCue properties
|
||
* http://dev.w3.org/html5/webvtt/#vttcue-interface
|
||
*/let _id='';let _pauseOnExit=false;let _startTime=startTime;let _endTime=endTime;let _text=text;let _region=null;let _vertical='';let _snapToLines=true;let _line='auto';let _lineAlign='start';let _position=50;let _positionAlign='middle';let _size=50;let _align='middle';Object.defineProperty(cue,'id',extend({},baseObj,{get:function(){return _id;},set:function(value){_id=''+value;}}));Object.defineProperty(cue,'pauseOnExit',extend({},baseObj,{get:function(){return _pauseOnExit;},set:function(value){_pauseOnExit=!!value;}}));Object.defineProperty(cue,'startTime',extend({},baseObj,{get:function(){return _startTime;},set:function(value){if(typeof value!=='number'){throw new TypeError('Start time must be set to a number.');}_startTime=value;this.hasBeenReset=true;}}));Object.defineProperty(cue,'endTime',extend({},baseObj,{get:function(){return _endTime;},set:function(value){if(typeof value!=='number'){throw new TypeError('End time must be set to a number.');}_endTime=value;this.hasBeenReset=true;}}));Object.defineProperty(cue,'text',extend({},baseObj,{get:function(){return _text;},set:function(value){_text=''+value;this.hasBeenReset=true;}}));// todo: implement VTTRegion polyfill?
|
||
Object.defineProperty(cue,'region',extend({},baseObj,{get:function(){return _region;},set:function(value){_region=value;this.hasBeenReset=true;}}));Object.defineProperty(cue,'vertical',extend({},baseObj,{get:function(){return _vertical;},set:function(value){const setting=findDirectionSetting(value);// Have to check for false because the setting an be an empty string.
|
||
if(setting===false){throw new SyntaxError('An invalid or illegal string was specified.');}_vertical=setting;this.hasBeenReset=true;}}));Object.defineProperty(cue,'snapToLines',extend({},baseObj,{get:function(){return _snapToLines;},set:function(value){_snapToLines=!!value;this.hasBeenReset=true;}}));Object.defineProperty(cue,'line',extend({},baseObj,{get:function(){return _line;},set:function(value){if(typeof value!=='number'&&value!=='auto'){throw new SyntaxError('An invalid number or illegal string was specified.');}_line=value;this.hasBeenReset=true;}}));Object.defineProperty(cue,'lineAlign',extend({},baseObj,{get:function(){return _lineAlign;},set:function(value){const setting=findAlignSetting(value);if(!setting){throw new SyntaxError('An invalid or illegal string was specified.');}_lineAlign=setting;this.hasBeenReset=true;}}));Object.defineProperty(cue,'position',extend({},baseObj,{get:function(){return _position;},set:function(value){if(value<0||value>100){throw new Error('Position must be between 0 and 100.');}_position=value;this.hasBeenReset=true;}}));Object.defineProperty(cue,'positionAlign',extend({},baseObj,{get:function(){return _positionAlign;},set:function(value){const setting=findAlignSetting(value);if(!setting){throw new SyntaxError('An invalid or illegal string was specified.');}_positionAlign=setting;this.hasBeenReset=true;}}));Object.defineProperty(cue,'size',extend({},baseObj,{get:function(){return _size;},set:function(value){if(value<0||value>100){throw new Error('Size must be between 0 and 100.');}_size=value;this.hasBeenReset=true;}}));Object.defineProperty(cue,'align',extend({},baseObj,{get:function(){return _align;},set:function(value){const setting=findAlignSetting(value);if(!setting){throw new SyntaxError('An invalid or illegal string was specified.');}_align=setting;this.hasBeenReset=true;}}));/**
|
||
* Other <track> spec defined properties
|
||
*/ // http://www.whatwg.org/specs/web-apps/current-work/multipage/the-video-element.html#text-track-cue-display-state
|
||
cue.displayState=undefined;}/**
|
||
* VTTCue methods
|
||
*/VTTCue.prototype.getCueAsHTML=function(){// Assume WebVTT.convertCueToDOMTree is on the global.
|
||
const WebVTT=self.WebVTT;return WebVTT.convertCueToDOMTree(self,this.text);};// this is a polyfill hack
|
||
return VTTCue;}();/*
|
||
* Source: https://github.com/mozilla/vtt.js/blob/master/dist/vtt.js
|
||
*/class StringDecoder{// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||
decode(data,options){if(!data){return '';}if(typeof data!=='string'){throw new Error('Error - expected string data.');}return decodeURIComponent(encodeURIComponent(data));}}// Try to parse input as a time stamp.
|
||
function parseTimeStamp(input){function computeSeconds(h,m,s,f){return (h|0)*3600+(m|0)*60+(s|0)+parseFloat(f||0);}const m=input.match(/^(?:(\d+):)?(\d{2}):(\d{2})(\.\d+)?/);if(!m){return null;}if(parseFloat(m[2])>59){// Timestamp takes the form of [hours]:[minutes].[milliseconds]
|
||
// First position is hours as it's over 59.
|
||
return computeSeconds(m[2],m[3],0,m[4]);}// Timestamp takes the form of [hours (optional)]:[minutes]:[seconds].[milliseconds]
|
||
return computeSeconds(m[1],m[2],m[3],m[4]);}// A settings object holds key/value pairs and will ignore anything but the first
|
||
// assignment to a specific key.
|
||
class Settings{constructor(){this.values=Object.create(null);}// Only accept the first assignment to any key.
|
||
set(k,v){if(!this.get(k)&&v!==''){this.values[k]=v;}}// Return the value for a key, or a default value.
|
||
// If 'defaultKey' is passed then 'dflt' is assumed to be an object with
|
||
// a number of possible default values as properties where 'defaultKey' is
|
||
// the key of the property that will be chosen; otherwise it's assumed to be
|
||
// a single value.
|
||
get(k,dflt,defaultKey){if(defaultKey){return this.has(k)?this.values[k]:dflt[defaultKey];}return this.has(k)?this.values[k]:dflt;}// Check whether we have a value for a key.
|
||
has(k){return k in this.values;}// Accept a setting if its one of the given alternatives.
|
||
alt(k,v,a){for(let n=0;n<a.length;++n){if(v===a[n]){this.set(k,v);break;}}}// Accept a setting if its a valid (signed) integer.
|
||
integer(k,v){if(/^-?\d+$/.test(v)){// integer
|
||
this.set(k,parseInt(v,10));}}// Accept a setting if its a valid percentage.
|
||
percent(k,v){if(/^([\d]{1,3})(\.[\d]*)?%$/.test(v)){const percent=parseFloat(v);if(percent>=0&&percent<=100){this.set(k,percent);return true;}}return false;}}// Helper function to parse input into groups separated by 'groupDelim', and
|
||
// interpret each group as a key/value pair separated by 'keyValueDelim'.
|
||
function parseOptions(input,callback,keyValueDelim,groupDelim){const groups=groupDelim?input.split(groupDelim):[input];for(const i in groups){if(typeof groups[i]!=='string'){continue;}const kv=groups[i].split(keyValueDelim);if(kv.length!==2){continue;}const k=kv[0];const v=kv[1];callback(k,v);}}const defaults=new VTTCue(0,0,'');// 'middle' was changed to 'center' in the spec: https://github.com/w3c/webvtt/pull/244
|
||
// Safari doesn't yet support this change, but FF and Chrome do.
|
||
const center=defaults.align==='middle'?'middle':'center';function parseCue(input,cue,regionList){// Remember the original input if we need to throw an error.
|
||
const oInput=input;// 4.1 WebVTT timestamp
|
||
function consumeTimeStamp(){const ts=parseTimeStamp(input);if(ts===null){throw new Error('Malformed timestamp: '+oInput);}// Remove time stamp from input.
|
||
input=input.replace(/^[^\sa-zA-Z-]+/,'');return ts;}// 4.4.2 WebVTT cue settings
|
||
function consumeCueSettings(input,cue){const settings=new Settings();parseOptions(input,function(k,v){let vals;switch(k){case'region':// Find the last region we parsed with the same region id.
|
||
for(let i=regionList.length-1;i>=0;i--){if(regionList[i].id===v){settings.set(k,regionList[i].region);break;}}break;case'vertical':settings.alt(k,v,['rl','lr']);break;case'line':vals=v.split(',');settings.integer(k,vals[0]);if(settings.percent(k,vals[0])){settings.set('snapToLines',false);}settings.alt(k,vals[0],['auto']);if(vals.length===2){settings.alt('lineAlign',vals[1],['start',center,'end']);}break;case'position':vals=v.split(',');settings.percent(k,vals[0]);if(vals.length===2){settings.alt('positionAlign',vals[1],['start',center,'end','line-left','line-right','auto']);}break;case'size':settings.percent(k,v);break;case'align':settings.alt(k,v,['start',center,'end','left','right']);break;}},/:/,/\s/);// Apply default values for any missing fields.
|
||
cue.region=settings.get('region',null);cue.vertical=settings.get('vertical','');let line=settings.get('line','auto');if(line==='auto'&&defaults.line===-1){// set numeric line number for Safari
|
||
line=-1;}cue.line=line;cue.lineAlign=settings.get('lineAlign','start');cue.snapToLines=settings.get('snapToLines',true);cue.size=settings.get('size',100);cue.align=settings.get('align',center);let position=settings.get('position','auto');if(position==='auto'&&defaults.position===50){// set numeric position for Safari
|
||
position=cue.align==='start'||cue.align==='left'?0:cue.align==='end'||cue.align==='right'?100:50;}cue.position=position;}function skipWhitespace(){input=input.replace(/^\s+/,'');}// 4.1 WebVTT cue timings.
|
||
skipWhitespace();cue.startTime=consumeTimeStamp();// (1) collect cue start time
|
||
skipWhitespace();if(input.slice(0,3)!=='-->'){// (3) next characters must match '-->'
|
||
throw new Error("Malformed time stamp (time stamps must be separated by '-->'): "+oInput);}input=input.slice(3);skipWhitespace();cue.endTime=consumeTimeStamp();// (5) collect cue end time
|
||
// 4.1 WebVTT cue settings list.
|
||
skipWhitespace();consumeCueSettings(input,cue);}function fixLineBreaks(input){return input.replace(/<br(?: \/)?>/gi,'\n');}class VTTParser{constructor(){this.state='INITIAL';this.buffer='';this.decoder=new StringDecoder();this.regionList=[];this.cue=null;this.oncue=void 0;this.onparsingerror=void 0;this.onflush=void 0;}parse(data){const _this=this;// If there is no data then we won't decode it, but will just try to parse
|
||
// whatever is in buffer already. This may occur in circumstances, for
|
||
// example when flush() is called.
|
||
if(data){// Try to decode the data that we received.
|
||
_this.buffer+=_this.decoder.decode(data,{stream:true});}function collectNextLine(){let buffer=_this.buffer;let pos=0;buffer=fixLineBreaks(buffer);while(pos<buffer.length&&buffer[pos]!=='\r'&&buffer[pos]!=='\n'){++pos;}const line=buffer.slice(0,pos);// Advance the buffer early in case we fail below.
|
||
if(buffer[pos]==='\r'){++pos;}if(buffer[pos]==='\n'){++pos;}_this.buffer=buffer.slice(pos);return line;}// 3.2 WebVTT metadata header syntax
|
||
function parseHeader(input){parseOptions(input,function(k,v){// switch (k) {
|
||
// case 'region':
|
||
// 3.3 WebVTT region metadata header syntax
|
||
// console.log('parse region', v);
|
||
// parseRegion(v);
|
||
// break;
|
||
// }
|
||
},/:/);}// 5.1 WebVTT file parsing.
|
||
try{let line='';if(_this.state==='INITIAL'){// We can't start parsing until we have the first line.
|
||
if(!/\r\n|\n/.test(_this.buffer)){return this;}line=collectNextLine();// strip of UTF-8 BOM if any
|
||
// https://en.wikipedia.org/wiki/Byte_order_mark#UTF-8
|
||
const m=line.match(/^()?WEBVTT([ \t].*)?$/);if(!(m!=null&&m[0])){throw new Error('Malformed WebVTT signature.');}_this.state='HEADER';}let alreadyCollectedLine=false;while(_this.buffer){// We can't parse a line until we have the full line.
|
||
if(!/\r\n|\n/.test(_this.buffer)){return this;}if(!alreadyCollectedLine){line=collectNextLine();}else {alreadyCollectedLine=false;}switch(_this.state){case'HEADER':// 13-18 - Allow a header (metadata) under the WEBVTT line.
|
||
if(/:/.test(line)){parseHeader(line);}else if(!line){// An empty line terminates the header and starts the body (cues).
|
||
_this.state='ID';}continue;case'NOTE':// Ignore NOTE blocks.
|
||
if(!line){_this.state='ID';}continue;case'ID':// Check for the start of NOTE blocks.
|
||
if(/^NOTE($|[ \t])/.test(line)){_this.state='NOTE';break;}// 19-29 - Allow any number of line terminators, then initialize new cue values.
|
||
if(!line){continue;}_this.cue=new VTTCue(0,0,'');_this.state='CUE';// 30-39 - Check if self line contains an optional identifier or timing data.
|
||
if(line.indexOf('-->')===-1){_this.cue.id=line;continue;}// Process line as start of a cue.
|
||
/* falls through */case'CUE':// 40 - Collect cue timings and settings.
|
||
if(!_this.cue){_this.state='BADCUE';continue;}try{parseCue(line,_this.cue,_this.regionList);}catch(e){// In case of an error ignore rest of the cue.
|
||
_this.cue=null;_this.state='BADCUE';continue;}_this.state='CUETEXT';continue;case'CUETEXT':{const hasSubstring=line.indexOf('-->')!==-1;// 34 - If we have an empty line then report the cue.
|
||
// 35 - If we have the special substring '-->' then report the cue,
|
||
// but do not collect the line as we need to process the current
|
||
// one as a new cue.
|
||
if(!line||hasSubstring&&(alreadyCollectedLine=true)){// We are done parsing self cue.
|
||
if(_this.oncue&&_this.cue){_this.oncue(_this.cue);}_this.cue=null;_this.state='ID';continue;}if(_this.cue===null){continue;}if(_this.cue.text){_this.cue.text+='\n';}_this.cue.text+=line;}continue;case'BADCUE':// 54-62 - Collect and discard the remaining cue.
|
||
if(!line){_this.state='ID';}}}}catch(e){// If we are currently parsing a cue, report what we have.
|
||
if(_this.state==='CUETEXT'&&_this.cue&&_this.oncue){_this.oncue(_this.cue);}_this.cue=null;// Enter BADWEBVTT state if header was not parsed correctly otherwise
|
||
// another exception occurred so enter BADCUE state.
|
||
_this.state=_this.state==='INITIAL'?'BADWEBVTT':'BADCUE';}return this;}flush(){const _this=this;try{// Finish decoding the stream.
|
||
// _this.buffer += _this.decoder.decode();
|
||
// Synthesize the end of the current cue or region.
|
||
if(_this.cue||_this.state==='HEADER'){_this.buffer+='\n\n';_this.parse();}// If we've flushed, parsed, and we're still on the INITIAL state then
|
||
// that means we don't have enough of the stream to parse the first
|
||
// line.
|
||
if(_this.state==='INITIAL'||_this.state==='BADWEBVTT'){throw new Error('Malformed WebVTT signature.');}}catch(e){if(_this.onparsingerror){_this.onparsingerror(e);}}if(_this.onflush){_this.onflush();}return this;}}const LINEBREAKS=/\r\n|\n\r|\n|\r/g;// String.prototype.startsWith is not supported in IE11
|
||
const startsWith=function startsWith(inputString,searchString,position=0){return inputString.slice(position,position+searchString.length)===searchString;};const cueString2millis=function cueString2millis(timeString){let ts=parseInt(timeString.slice(-3));const secs=parseInt(timeString.slice(-6,-4));const mins=parseInt(timeString.slice(-9,-7));const hours=timeString.length>9?parseInt(timeString.substring(0,timeString.indexOf(':'))):0;if(!isFiniteNumber(ts)||!isFiniteNumber(secs)||!isFiniteNumber(mins)||!isFiniteNumber(hours)){throw Error(`Malformed X-TIMESTAMP-MAP: Local:${timeString}`);}ts+=1000*secs;ts+=60*1000*mins;ts+=60*60*1000*hours;return ts;};// From https://github.com/darkskyapp/string-hash
|
||
const hash=function hash(text){let _hash=5381;let i=text.length;while(i){_hash=_hash*33^text.charCodeAt(--i);}return (_hash>>>0).toString();};// Create a unique hash id for a cue based on start/end times and text.
|
||
// This helps timeline-controller to avoid showing repeated captions.
|
||
function generateCueId(startTime,endTime,text){return hash(startTime.toString())+hash(endTime.toString())+hash(text);}const calculateOffset=function calculateOffset(vttCCs,cc,presentationTime){let currCC=vttCCs[cc];let prevCC=vttCCs[currCC.prevCC];// This is the first discontinuity or cues have been processed since the last discontinuity
|
||
// Offset = current discontinuity time
|
||
if(!prevCC||!prevCC.new&&currCC.new){vttCCs.ccOffset=vttCCs.presentationOffset=currCC.start;currCC.new=false;return;}// There have been discontinuities since cues were last parsed.
|
||
// Offset = time elapsed
|
||
while((_prevCC=prevCC)!=null&&_prevCC.new){var _prevCC;vttCCs.ccOffset+=currCC.start-prevCC.start;currCC.new=false;currCC=prevCC;prevCC=vttCCs[currCC.prevCC];}vttCCs.presentationOffset=presentationTime;};function parseWebVTT(vttByteArray,initPTS,vttCCs,cc,timeOffset,callBack,errorCallBack){const parser=new VTTParser();// Convert byteArray into string, replacing any somewhat exotic linefeeds with "\n", then split on that character.
|
||
// Uint8Array.prototype.reduce is not implemented in IE11
|
||
const vttLines=utf8ArrayToStr(new Uint8Array(vttByteArray)).trim().replace(LINEBREAKS,'\n').split('\n');const cues=[];const init90kHz=initPTS?toMpegTsClockFromTimescale(initPTS.baseTime,initPTS.timescale):0;let cueTime='00:00.000';let timestampMapMPEGTS=0;let timestampMapLOCAL=0;let parsingError;let inHeader=true;parser.oncue=function(cue){// Adjust cue timing; clamp cues to start no earlier than - and drop cues that don't end after - 0 on timeline.
|
||
const currCC=vttCCs[cc];let cueOffset=vttCCs.ccOffset;// Calculate subtitle PTS offset
|
||
const webVttMpegTsMapOffset=(timestampMapMPEGTS-init90kHz)/90000;// Update offsets for new discontinuities
|
||
if(currCC!=null&&currCC.new){if(timestampMapLOCAL!==undefined){// When local time is provided, offset = discontinuity start time - local time
|
||
cueOffset=vttCCs.ccOffset=currCC.start;}else {calculateOffset(vttCCs,cc,webVttMpegTsMapOffset);}}if(webVttMpegTsMapOffset){if(!initPTS){parsingError=new Error('Missing initPTS for VTT MPEGTS');return;}// If we have MPEGTS, offset = presentation time + discontinuity offset
|
||
cueOffset=webVttMpegTsMapOffset-vttCCs.presentationOffset;}const duration=cue.endTime-cue.startTime;const startTime=normalizePts((cue.startTime+cueOffset-timestampMapLOCAL)*90000,timeOffset*90000)/90000;cue.startTime=Math.max(startTime,0);cue.endTime=Math.max(startTime+duration,0);//trim trailing webvtt block whitespaces
|
||
const text=cue.text.trim();// Fix encoding of special characters
|
||
cue.text=decodeURIComponent(encodeURIComponent(text));// If the cue was not assigned an id from the VTT file (line above the content), create one.
|
||
if(!cue.id){cue.id=generateCueId(cue.startTime,cue.endTime,text);}if(cue.endTime>0){cues.push(cue);}};parser.onparsingerror=function(error){parsingError=error;};parser.onflush=function(){if(parsingError){errorCallBack(parsingError);return;}callBack(cues);};// Go through contents line by line.
|
||
vttLines.forEach(line=>{if(inHeader){// Look for X-TIMESTAMP-MAP in header.
|
||
if(startsWith(line,'X-TIMESTAMP-MAP=')){// Once found, no more are allowed anyway, so stop searching.
|
||
inHeader=false;// Extract LOCAL and MPEGTS.
|
||
line.slice(16).split(',').forEach(timestamp=>{if(startsWith(timestamp,'LOCAL:')){cueTime=timestamp.slice(6);}else if(startsWith(timestamp,'MPEGTS:')){timestampMapMPEGTS=parseInt(timestamp.slice(7));}});try{// Convert cue time to seconds
|
||
timestampMapLOCAL=cueString2millis(cueTime)/1000;}catch(error){parsingError=error;}// Return without parsing X-TIMESTAMP-MAP line.
|
||
return;}else if(line===''){inHeader=false;}}// Parse line by default.
|
||
parser.parse(line+'\n');});parser.flush();}const IMSC1_CODEC='stpp.ttml.im1t';// Time format: h:m:s:frames(.subframes)
|
||
const HMSF_REGEX=/^(\d{2,}):(\d{2}):(\d{2}):(\d{2})\.?(\d+)?$/;// Time format: hours, minutes, seconds, milliseconds, frames, ticks
|
||
const TIME_UNIT_REGEX=/^(\d*(?:\.\d*)?)(h|m|s|ms|f|t)$/;const textAlignToLineAlign={left:'start',center:'center',right:'end',start:'start',end:'end'};function parseIMSC1(payload,initPTS,callBack,errorCallBack){const results=findBox(new Uint8Array(payload),['mdat']);if(results.length===0){errorCallBack(new Error('Could not parse IMSC1 mdat'));return;}const ttmlList=results.map(mdat=>utf8ArrayToStr(mdat));const syncTime=toTimescaleFromScale(initPTS.baseTime,1,initPTS.timescale);try{ttmlList.forEach(ttml=>callBack(parseTTML(ttml,syncTime)));}catch(error){errorCallBack(error);}}function parseTTML(ttml,syncTime){const parser=new DOMParser();const xmlDoc=parser.parseFromString(ttml,'text/xml');const tt=xmlDoc.getElementsByTagName('tt')[0];if(!tt){throw new Error('Invalid ttml');}const defaultRateInfo={frameRate:30,subFrameRate:1,frameRateMultiplier:0,tickRate:0};const rateInfo=Object.keys(defaultRateInfo).reduce((result,key)=>{result[key]=tt.getAttribute(`ttp:${key}`)||defaultRateInfo[key];return result;},{});const trim=tt.getAttribute('xml:space')!=='preserve';const styleElements=collectionToDictionary(getElementCollection(tt,'styling','style'));const regionElements=collectionToDictionary(getElementCollection(tt,'layout','region'));const cueElements=getElementCollection(tt,'body','[begin]');return [].map.call(cueElements,cueElement=>{const cueText=getTextContent(cueElement,trim);if(!cueText||!cueElement.hasAttribute('begin')){return null;}const startTime=parseTtmlTime(cueElement.getAttribute('begin'),rateInfo);const duration=parseTtmlTime(cueElement.getAttribute('dur'),rateInfo);let endTime=parseTtmlTime(cueElement.getAttribute('end'),rateInfo);if(startTime===null){throw timestampParsingError(cueElement);}if(endTime===null){if(duration===null){throw timestampParsingError(cueElement);}endTime=startTime+duration;}const cue=new VTTCue(startTime-syncTime,endTime-syncTime,cueText);cue.id=generateCueId(cue.startTime,cue.endTime,cue.text);const region=regionElements[cueElement.getAttribute('region')];const style=styleElements[cueElement.getAttribute('style')];// Apply styles to cue
|
||
const styles=getTtmlStyles(region,style,styleElements);const{textAlign}=styles;if(textAlign){// cue.positionAlign not settable in FF~2016
|
||
const lineAlign=textAlignToLineAlign[textAlign];if(lineAlign){cue.lineAlign=lineAlign;}cue.align=textAlign;}_extends(cue,styles);return cue;}).filter(cue=>cue!==null);}function getElementCollection(fromElement,parentName,childName){const parent=fromElement.getElementsByTagName(parentName)[0];if(parent){return [].slice.call(parent.querySelectorAll(childName));}return [];}function collectionToDictionary(elementsWithId){return elementsWithId.reduce((dict,element)=>{const id=element.getAttribute('xml:id');if(id){dict[id]=element;}return dict;},{});}function getTextContent(element,trim){return [].slice.call(element.childNodes).reduce((str,node,i)=>{var _node$childNodes;if(node.nodeName==='br'&&i){return str+'\n';}if((_node$childNodes=node.childNodes)!=null&&_node$childNodes.length){return getTextContent(node,trim);}else if(trim){return str+node.textContent.trim().replace(/\s+/g,' ');}return str+node.textContent;},'');}function getTtmlStyles(region,style,styleElements){const ttsNs='http://www.w3.org/ns/ttml#styling';let regionStyle=null;const styleAttributes=['displayAlign','textAlign','color','backgroundColor','fontSize','fontFamily'// 'fontWeight',
|
||
// 'lineHeight',
|
||
// 'wrapOption',
|
||
// 'fontStyle',
|
||
// 'direction',
|
||
// 'writingMode'
|
||
];const regionStyleName=region!=null&®ion.hasAttribute('style')?region.getAttribute('style'):null;if(regionStyleName&&styleElements.hasOwnProperty(regionStyleName)){regionStyle=styleElements[regionStyleName];}return styleAttributes.reduce((styles,name)=>{const value=getAttributeNS(style,ttsNs,name)||getAttributeNS(region,ttsNs,name)||getAttributeNS(regionStyle,ttsNs,name);if(value){styles[name]=value;}return styles;},{});}function getAttributeNS(element,ns,name){if(!element){return null;}return element.hasAttributeNS(ns,name)?element.getAttributeNS(ns,name):null;}function timestampParsingError(node){return new Error(`Could not parse ttml timestamp ${node}`);}function parseTtmlTime(timeAttributeValue,rateInfo){if(!timeAttributeValue){return null;}let seconds=parseTimeStamp(timeAttributeValue);if(seconds===null){if(HMSF_REGEX.test(timeAttributeValue)){seconds=parseHoursMinutesSecondsFrames(timeAttributeValue,rateInfo);}else if(TIME_UNIT_REGEX.test(timeAttributeValue)){seconds=parseTimeUnits(timeAttributeValue,rateInfo);}}return seconds;}function parseHoursMinutesSecondsFrames(timeAttributeValue,rateInfo){const m=HMSF_REGEX.exec(timeAttributeValue);const frames=(m[4]|0)+(m[5]|0)/rateInfo.subFrameRate;return (m[1]|0)*3600+(m[2]|0)*60+(m[3]|0)+frames/rateInfo.frameRate;}function parseTimeUnits(timeAttributeValue,rateInfo){const m=TIME_UNIT_REGEX.exec(timeAttributeValue);const value=Number(m[1]);const unit=m[2];switch(unit){case'h':return value*3600;case'm':return value*60;case'ms':return value*1000;case'f':return value/rateInfo.frameRate;case't':return value/rateInfo.tickRate;}return value;}class TimelineController{constructor(hls){this.hls=void 0;this.media=null;this.config=void 0;this.enabled=true;this.Cues=void 0;this.textTracks=[];this.tracks=[];this.initPTS=[];this.unparsedVttFrags=[];this.captionsTracks={};this.nonNativeCaptionsTracks={};this.cea608Parser1=void 0;this.cea608Parser2=void 0;this.lastCc=-1;// Last video (CEA-608) fragment CC
|
||
this.lastSn=-1;// Last video (CEA-608) fragment MSN
|
||
this.lastPartIndex=-1;// Last video (CEA-608) fragment Part Index
|
||
this.prevCC=-1;// Last subtitle fragment CC
|
||
this.vttCCs=newVTTCCs();this.captionsProperties=void 0;this.hls=hls;this.config=hls.config;this.Cues=hls.config.cueHandler;this.captionsProperties={textTrack1:{label:this.config.captionsTextTrack1Label,languageCode:this.config.captionsTextTrack1LanguageCode},textTrack2:{label:this.config.captionsTextTrack2Label,languageCode:this.config.captionsTextTrack2LanguageCode},textTrack3:{label:this.config.captionsTextTrack3Label,languageCode:this.config.captionsTextTrack3LanguageCode},textTrack4:{label:this.config.captionsTextTrack4Label,languageCode:this.config.captionsTextTrack4LanguageCode}};hls.on(Events.MEDIA_ATTACHING,this.onMediaAttaching,this);hls.on(Events.MEDIA_DETACHING,this.onMediaDetaching,this);hls.on(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.on(Events.MANIFEST_LOADED,this.onManifestLoaded,this);hls.on(Events.SUBTITLE_TRACKS_UPDATED,this.onSubtitleTracksUpdated,this);hls.on(Events.FRAG_LOADING,this.onFragLoading,this);hls.on(Events.FRAG_LOADED,this.onFragLoaded,this);hls.on(Events.FRAG_PARSING_USERDATA,this.onFragParsingUserdata,this);hls.on(Events.FRAG_DECRYPTED,this.onFragDecrypted,this);hls.on(Events.INIT_PTS_FOUND,this.onInitPtsFound,this);hls.on(Events.SUBTITLE_TRACKS_CLEARED,this.onSubtitleTracksCleared,this);hls.on(Events.BUFFER_FLUSHING,this.onBufferFlushing,this);}destroy(){const{hls}=this;hls.off(Events.MEDIA_ATTACHING,this.onMediaAttaching,this);hls.off(Events.MEDIA_DETACHING,this.onMediaDetaching,this);hls.off(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.off(Events.MANIFEST_LOADED,this.onManifestLoaded,this);hls.off(Events.SUBTITLE_TRACKS_UPDATED,this.onSubtitleTracksUpdated,this);hls.off(Events.FRAG_LOADING,this.onFragLoading,this);hls.off(Events.FRAG_LOADED,this.onFragLoaded,this);hls.off(Events.FRAG_PARSING_USERDATA,this.onFragParsingUserdata,this);hls.off(Events.FRAG_DECRYPTED,this.onFragDecrypted,this);hls.off(Events.INIT_PTS_FOUND,this.onInitPtsFound,this);hls.off(Events.SUBTITLE_TRACKS_CLEARED,this.onSubtitleTracksCleared,this);hls.off(Events.BUFFER_FLUSHING,this.onBufferFlushing,this);// @ts-ignore
|
||
this.hls=this.config=null;this.cea608Parser1=this.cea608Parser2=undefined;}initCea608Parsers(){if(this.config.enableCEA708Captions&&(!this.cea608Parser1||!this.cea608Parser2)){const channel1=new OutputFilter(this,'textTrack1');const channel2=new OutputFilter(this,'textTrack2');const channel3=new OutputFilter(this,'textTrack3');const channel4=new OutputFilter(this,'textTrack4');this.cea608Parser1=new Cea608Parser(1,channel1,channel2);this.cea608Parser2=new Cea608Parser(3,channel3,channel4);}}addCues(trackName,startTime,endTime,screen,cueRanges){// skip cues which overlap more than 50% with previously parsed time ranges
|
||
let merged=false;for(let i=cueRanges.length;i--;){const cueRange=cueRanges[i];const overlap=intersection(cueRange[0],cueRange[1],startTime,endTime);if(overlap>=0){cueRange[0]=Math.min(cueRange[0],startTime);cueRange[1]=Math.max(cueRange[1],endTime);merged=true;if(overlap/(endTime-startTime)>0.5){return;}}}if(!merged){cueRanges.push([startTime,endTime]);}if(this.config.renderTextTracksNatively){const track=this.captionsTracks[trackName];this.Cues.newCue(track,startTime,endTime,screen);}else {const cues=this.Cues.newCue(null,startTime,endTime,screen);this.hls.trigger(Events.CUES_PARSED,{type:'captions',cues,track:trackName});}}// Triggered when an initial PTS is found; used for synchronisation of WebVTT.
|
||
onInitPtsFound(event,{frag,id,initPTS,timescale}){const{unparsedVttFrags}=this;if(id==='main'){this.initPTS[frag.cc]={baseTime:initPTS,timescale};}// Due to asynchronous processing, initial PTS may arrive later than the first VTT fragments are loaded.
|
||
// Parse any unparsed fragments upon receiving the initial PTS.
|
||
if(unparsedVttFrags.length){this.unparsedVttFrags=[];unparsedVttFrags.forEach(frag=>{this.onFragLoaded(Events.FRAG_LOADED,frag);});}}getExistingTrack(label,language){const{media}=this;if(media){for(let i=0;i<media.textTracks.length;i++){const textTrack=media.textTracks[i];if(canReuseVttTextTrack(textTrack,{name:label,lang:language,attrs:{}})){return textTrack;}}}return null;}createCaptionsTrack(trackName){if(this.config.renderTextTracksNatively){this.createNativeTrack(trackName);}else {this.createNonNativeTrack(trackName);}}createNativeTrack(trackName){if(this.captionsTracks[trackName]){return;}const{captionsProperties,captionsTracks,media}=this;const{label,languageCode}=captionsProperties[trackName];// Enable reuse of existing text track.
|
||
const existingTrack=this.getExistingTrack(label,languageCode);if(!existingTrack){const textTrack=this.createTextTrack('captions',label,languageCode);if(textTrack){// Set a special property on the track so we know it's managed by Hls.js
|
||
textTrack[trackName]=true;captionsTracks[trackName]=textTrack;}}else {captionsTracks[trackName]=existingTrack;clearCurrentCues(captionsTracks[trackName]);sendAddTrackEvent(captionsTracks[trackName],media);}}createNonNativeTrack(trackName){if(this.nonNativeCaptionsTracks[trackName]){return;}// Create a list of a single track for the provider to consume
|
||
const trackProperties=this.captionsProperties[trackName];if(!trackProperties){return;}const label=trackProperties.label;const track={_id:trackName,label,kind:'captions',default:trackProperties.media?!!trackProperties.media.default:false,closedCaptions:trackProperties.media};this.nonNativeCaptionsTracks[trackName]=track;this.hls.trigger(Events.NON_NATIVE_TEXT_TRACKS_FOUND,{tracks:[track]});}createTextTrack(kind,label,lang){const media=this.media;if(!media){return;}return media.addTextTrack(kind,label,lang);}onMediaAttaching(event,data){this.media=data.media;this._cleanTracks();}onMediaDetaching(){const{captionsTracks}=this;Object.keys(captionsTracks).forEach(trackName=>{clearCurrentCues(captionsTracks[trackName]);delete captionsTracks[trackName];});this.nonNativeCaptionsTracks={};}onManifestLoading(){// Detect discontinuity in video fragment (CEA-608) parsing
|
||
this.lastCc=-1;this.lastSn=-1;this.lastPartIndex=-1;// Detect discontinuity in subtitle manifests
|
||
this.prevCC=-1;this.vttCCs=newVTTCCs();// Reset tracks
|
||
this._cleanTracks();this.tracks=[];this.captionsTracks={};this.nonNativeCaptionsTracks={};this.textTracks=[];this.unparsedVttFrags=[];this.initPTS=[];if(this.cea608Parser1&&this.cea608Parser2){this.cea608Parser1.reset();this.cea608Parser2.reset();}}_cleanTracks(){// clear outdated subtitles
|
||
const{media}=this;if(!media){return;}const textTracks=media.textTracks;if(textTracks){for(let i=0;i<textTracks.length;i++){clearCurrentCues(textTracks[i]);}}}onSubtitleTracksUpdated(event,data){const tracks=data.subtitleTracks||[];const hasIMSC1=tracks.some(track=>track.textCodec===IMSC1_CODEC);if(this.config.enableWebVTT||hasIMSC1&&this.config.enableIMSC1){const listIsIdentical=subtitleOptionsIdentical(this.tracks,tracks);if(listIsIdentical){this.tracks=tracks;return;}this.textTracks=[];this.tracks=tracks;if(this.config.renderTextTracksNatively){const media=this.media;const inUseTracks=media?filterSubtitleTracks(media.textTracks):null;this.tracks.forEach((track,index)=>{// Reuse tracks with the same label and lang, but do not reuse 608/708 tracks
|
||
let textTrack;if(inUseTracks){let inUseTrack=null;for(let i=0;i<inUseTracks.length;i++){if(inUseTracks[i]&&canReuseVttTextTrack(inUseTracks[i],track)){inUseTrack=inUseTracks[i];inUseTracks[i]=null;break;}}if(inUseTrack){textTrack=inUseTrack;}}if(textTrack){clearCurrentCues(textTrack);}else {const textTrackKind=captionsOrSubtitlesFromCharacteristics(track);textTrack=this.createTextTrack(textTrackKind,track.name,track.lang);if(textTrack){textTrack.mode='disabled';}}if(textTrack){this.textTracks.push(textTrack);}});// Warn when video element has captions or subtitle TextTracks carried over from another source
|
||
if(inUseTracks!=null&&inUseTracks.length){const unusedTextTracks=inUseTracks.filter(t=>t!==null).map(t=>t.label);if(unusedTextTracks.length){logger.warn(`Media element contains unused subtitle tracks: ${unusedTextTracks.join(', ')}. Replace media element for each source to clear TextTracks and captions menu.`);}}}else if(this.tracks.length){// Create a list of tracks for the provider to consume
|
||
const tracksList=this.tracks.map(track=>{return {label:track.name,kind:track.type.toLowerCase(),default:track.default,subtitleTrack:track};});this.hls.trigger(Events.NON_NATIVE_TEXT_TRACKS_FOUND,{tracks:tracksList});}}}onManifestLoaded(event,data){if(this.config.enableCEA708Captions&&data.captions){data.captions.forEach(captionsTrack=>{const instreamIdMatch=/(?:CC|SERVICE)([1-4])/.exec(captionsTrack.instreamId);if(!instreamIdMatch){return;}const trackName=`textTrack${instreamIdMatch[1]}`;const trackProperties=this.captionsProperties[trackName];if(!trackProperties){return;}trackProperties.label=captionsTrack.name;if(captionsTrack.lang){// optional attribute
|
||
trackProperties.languageCode=captionsTrack.lang;}trackProperties.media=captionsTrack;});}}closedCaptionsForLevel(frag){const level=this.hls.levels[frag.level];return level==null?void 0:level.attrs['CLOSED-CAPTIONS'];}onFragLoading(event,data){// if this frag isn't contiguous, clear the parser so cues with bad start/end times aren't added to the textTrack
|
||
if(this.enabled&&data.frag.type===PlaylistLevelType.MAIN){var _data$part$index,_data$part;const{cea608Parser1,cea608Parser2,lastSn}=this;const{cc,sn}=data.frag;const partIndex=(_data$part$index=(_data$part=data.part)==null?void 0:_data$part.index)!=null?_data$part$index:-1;if(cea608Parser1&&cea608Parser2){if(sn!==lastSn+1||sn===lastSn&&partIndex!==this.lastPartIndex+1||cc!==this.lastCc){cea608Parser1.reset();cea608Parser2.reset();}}this.lastCc=cc;this.lastSn=sn;this.lastPartIndex=partIndex;}}onFragLoaded(event,data){const{frag,payload}=data;if(frag.type===PlaylistLevelType.SUBTITLE){// If fragment is subtitle type, parse as WebVTT.
|
||
if(payload.byteLength){const decryptData=frag.decryptdata;// fragment after decryption has a stats object
|
||
const decrypted=('stats'in data);// If the subtitles are not encrypted, parse VTTs now. Otherwise, we need to wait.
|
||
if(decryptData==null||!decryptData.encrypted||decrypted){const trackPlaylistMedia=this.tracks[frag.level];const vttCCs=this.vttCCs;if(!vttCCs[frag.cc]){vttCCs[frag.cc]={start:frag.start,prevCC:this.prevCC,new:true};this.prevCC=frag.cc;}if(trackPlaylistMedia&&trackPlaylistMedia.textCodec===IMSC1_CODEC){this._parseIMSC1(frag,payload);}else {this._parseVTTs(data);}}}else {// In case there is no payload, finish unsuccessfully.
|
||
this.hls.trigger(Events.SUBTITLE_FRAG_PROCESSED,{success:false,frag,error:new Error('Empty subtitle payload')});}}}_parseIMSC1(frag,payload){const hls=this.hls;parseIMSC1(payload,this.initPTS[frag.cc],cues=>{this._appendCues(cues,frag.level);hls.trigger(Events.SUBTITLE_FRAG_PROCESSED,{success:true,frag:frag});},error=>{logger.log(`Failed to parse IMSC1: ${error}`);hls.trigger(Events.SUBTITLE_FRAG_PROCESSED,{success:false,frag:frag,error});});}_parseVTTs(data){var _frag$initSegment;const{frag,payload}=data;// We need an initial synchronisation PTS. Store fragments as long as none has arrived
|
||
const{initPTS,unparsedVttFrags}=this;const maxAvCC=initPTS.length-1;if(!initPTS[frag.cc]&&maxAvCC===-1){unparsedVttFrags.push(data);return;}const hls=this.hls;// Parse the WebVTT file contents.
|
||
const payloadWebVTT=(_frag$initSegment=frag.initSegment)!=null&&_frag$initSegment.data?appendUint8Array(frag.initSegment.data,new Uint8Array(payload)):payload;parseWebVTT(payloadWebVTT,this.initPTS[frag.cc],this.vttCCs,frag.cc,frag.start,cues=>{this._appendCues(cues,frag.level);hls.trigger(Events.SUBTITLE_FRAG_PROCESSED,{success:true,frag:frag});},error=>{const missingInitPTS=error.message==='Missing initPTS for VTT MPEGTS';if(missingInitPTS){unparsedVttFrags.push(data);}else {this._fallbackToIMSC1(frag,payload);}// Something went wrong while parsing. Trigger event with success false.
|
||
logger.log(`Failed to parse VTT cue: ${error}`);if(missingInitPTS&&maxAvCC>frag.cc){return;}hls.trigger(Events.SUBTITLE_FRAG_PROCESSED,{success:false,frag:frag,error});});}_fallbackToIMSC1(frag,payload){// If textCodec is unknown, try parsing as IMSC1. Set textCodec based on the result
|
||
const trackPlaylistMedia=this.tracks[frag.level];if(!trackPlaylistMedia.textCodec){parseIMSC1(payload,this.initPTS[frag.cc],()=>{trackPlaylistMedia.textCodec=IMSC1_CODEC;this._parseIMSC1(frag,payload);},()=>{trackPlaylistMedia.textCodec='wvtt';});}}_appendCues(cues,fragLevel){const hls=this.hls;if(this.config.renderTextTracksNatively){const textTrack=this.textTracks[fragLevel];// WebVTTParser.parse is an async method and if the currently selected text track mode is set to "disabled"
|
||
// before parsing is done then don't try to access currentTrack.cues.getCueById as cues will be null
|
||
// and trying to access getCueById method of cues will throw an exception
|
||
// Because we check if the mode is disabled, we can force check `cues` below. They can't be null.
|
||
if(!textTrack||textTrack.mode==='disabled'){return;}cues.forEach(cue=>addCueToTrack(textTrack,cue));}else {const currentTrack=this.tracks[fragLevel];if(!currentTrack){return;}const track=currentTrack.default?'default':'subtitles'+fragLevel;hls.trigger(Events.CUES_PARSED,{type:'subtitles',cues,track});}}onFragDecrypted(event,data){const{frag}=data;if(frag.type===PlaylistLevelType.SUBTITLE){this.onFragLoaded(Events.FRAG_LOADED,data);}}onSubtitleTracksCleared(){this.tracks=[];this.captionsTracks={};}onFragParsingUserdata(event,data){this.initCea608Parsers();const{cea608Parser1,cea608Parser2}=this;if(!this.enabled||!cea608Parser1||!cea608Parser2){return;}const{frag,samples}=data;if(frag.type===PlaylistLevelType.MAIN&&this.closedCaptionsForLevel(frag)==='NONE'){return;}// If the event contains captions (found in the bytes property), push all bytes into the parser immediately
|
||
// It will create the proper timestamps based on the PTS value
|
||
for(let i=0;i<samples.length;i++){const ccBytes=samples[i].bytes;if(ccBytes){const ccdatas=this.extractCea608Data(ccBytes);cea608Parser1.addData(samples[i].pts,ccdatas[0]);cea608Parser2.addData(samples[i].pts,ccdatas[1]);}}}onBufferFlushing(event,{startOffset,endOffset,endOffsetSubtitles,type}){const{media}=this;if(!media||media.currentTime<endOffset){return;}// Clear 608 caption cues from the captions TextTracks when the video back buffer is flushed
|
||
// Forward cues are never removed because we can loose streamed 608 content from recent fragments
|
||
if(!type||type==='video'){const{captionsTracks}=this;Object.keys(captionsTracks).forEach(trackName=>removeCuesInRange(captionsTracks[trackName],startOffset,endOffset));}if(this.config.renderTextTracksNatively){// Clear VTT/IMSC1 subtitle cues from the subtitle TextTracks when the back buffer is flushed
|
||
if(startOffset===0&&endOffsetSubtitles!==undefined){const{textTracks}=this;Object.keys(textTracks).forEach(trackName=>removeCuesInRange(textTracks[trackName],startOffset,endOffsetSubtitles));}}}extractCea608Data(byteArray){const actualCCBytes=[[],[]];const count=byteArray[0]&0x1f;let position=2;for(let j=0;j<count;j++){const tmpByte=byteArray[position++];const ccbyte1=0x7f&byteArray[position++];const ccbyte2=0x7f&byteArray[position++];if(ccbyte1===0&&ccbyte2===0){continue;}const ccValid=(0x04&tmpByte)!==0;// Support all four channels
|
||
if(ccValid){const ccType=0x03&tmpByte;if(0x00/* CEA608 field1*/===ccType||0x01/* CEA608 field2*/===ccType){// Exclude CEA708 CC data.
|
||
actualCCBytes[ccType].push(ccbyte1);actualCCBytes[ccType].push(ccbyte2);}}}return actualCCBytes;}}function captionsOrSubtitlesFromCharacteristics(track){if(track.characteristics){if(/transcribes-spoken-dialog/gi.test(track.characteristics)&&/describes-music-and-sound/gi.test(track.characteristics)){return 'captions';}}return 'subtitles';}function canReuseVttTextTrack(inUseTrack,manifestTrack){return !!inUseTrack&&inUseTrack.kind===captionsOrSubtitlesFromCharacteristics(manifestTrack)&&subtitleTrackMatchesTextTrack(manifestTrack,inUseTrack);}function intersection(x1,x2,y1,y2){return Math.min(x2,y2)-Math.max(x1,y1);}function newVTTCCs(){return {ccOffset:0,presentationOffset:0,0:{start:0,prevCC:-1,new:true}};}class CapLevelController{constructor(hls){this.hls=void 0;this.autoLevelCapping=void 0;this.firstLevel=void 0;this.media=void 0;this.restrictedLevels=void 0;this.timer=void 0;this.clientRect=void 0;this.streamController=void 0;this.hls=hls;this.autoLevelCapping=Number.POSITIVE_INFINITY;this.firstLevel=-1;this.media=null;this.restrictedLevels=[];this.timer=undefined;this.clientRect=null;this.registerListeners();}setStreamController(streamController){this.streamController=streamController;}destroy(){if(this.hls){this.unregisterListener();}if(this.timer){this.stopCapping();}this.media=null;this.clientRect=null;// @ts-ignore
|
||
this.hls=this.streamController=null;}registerListeners(){const{hls}=this;hls.on(Events.FPS_DROP_LEVEL_CAPPING,this.onFpsDropLevelCapping,this);hls.on(Events.MEDIA_ATTACHING,this.onMediaAttaching,this);hls.on(Events.MANIFEST_PARSED,this.onManifestParsed,this);hls.on(Events.LEVELS_UPDATED,this.onLevelsUpdated,this);hls.on(Events.BUFFER_CODECS,this.onBufferCodecs,this);hls.on(Events.MEDIA_DETACHING,this.onMediaDetaching,this);}unregisterListener(){const{hls}=this;hls.off(Events.FPS_DROP_LEVEL_CAPPING,this.onFpsDropLevelCapping,this);hls.off(Events.MEDIA_ATTACHING,this.onMediaAttaching,this);hls.off(Events.MANIFEST_PARSED,this.onManifestParsed,this);hls.off(Events.LEVELS_UPDATED,this.onLevelsUpdated,this);hls.off(Events.BUFFER_CODECS,this.onBufferCodecs,this);hls.off(Events.MEDIA_DETACHING,this.onMediaDetaching,this);}onFpsDropLevelCapping(event,data){// Don't add a restricted level more than once
|
||
const level=this.hls.levels[data.droppedLevel];if(this.isLevelAllowed(level)){this.restrictedLevels.push({bitrate:level.bitrate,height:level.height,width:level.width});}}onMediaAttaching(event,data){this.media=data.media instanceof HTMLVideoElement?data.media:null;this.clientRect=null;if(this.timer&&this.hls.levels.length){this.detectPlayerSize();}}onManifestParsed(event,data){const hls=this.hls;this.restrictedLevels=[];this.firstLevel=data.firstLevel;if(hls.config.capLevelToPlayerSize&&data.video){// Start capping immediately if the manifest has signaled video codecs
|
||
this.startCapping();}}onLevelsUpdated(event,data){if(this.timer&&isFiniteNumber(this.autoLevelCapping)){this.detectPlayerSize();}}// Only activate capping when playing a video stream; otherwise, multi-bitrate audio-only streams will be restricted
|
||
// to the first level
|
||
onBufferCodecs(event,data){const hls=this.hls;if(hls.config.capLevelToPlayerSize&&data.video){// If the manifest did not signal a video codec capping has been deferred until we're certain video is present
|
||
this.startCapping();}}onMediaDetaching(){this.stopCapping();}detectPlayerSize(){if(this.media){if(this.mediaHeight<=0||this.mediaWidth<=0){this.clientRect=null;return;}const levels=this.hls.levels;if(levels.length){const hls=this.hls;const maxLevel=this.getMaxLevel(levels.length-1);if(maxLevel!==this.autoLevelCapping){logger.log(`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`);}hls.autoLevelCapping=maxLevel;if(hls.autoLevelCapping>this.autoLevelCapping&&this.streamController){// if auto level capping has a higher value for the previous one, flush the buffer using nextLevelSwitch
|
||
// usually happen when the user go to the fullscreen mode.
|
||
this.streamController.nextLevelSwitch();}this.autoLevelCapping=hls.autoLevelCapping;}}}/*
|
||
* returns level should be the one with the dimensions equal or greater than the media (player) dimensions (so the video will be downscaled)
|
||
*/getMaxLevel(capLevelIndex){const levels=this.hls.levels;if(!levels.length){return -1;}const validLevels=levels.filter((level,index)=>this.isLevelAllowed(level)&&index<=capLevelIndex);this.clientRect=null;return CapLevelController.getMaxLevelByMediaSize(validLevels,this.mediaWidth,this.mediaHeight);}startCapping(){if(this.timer){// Don't reset capping if started twice; this can happen if the manifest signals a video codec
|
||
return;}this.autoLevelCapping=Number.POSITIVE_INFINITY;self.clearInterval(this.timer);this.timer=self.setInterval(this.detectPlayerSize.bind(this),1000);this.detectPlayerSize();}stopCapping(){this.restrictedLevels=[];this.firstLevel=-1;this.autoLevelCapping=Number.POSITIVE_INFINITY;if(this.timer){self.clearInterval(this.timer);this.timer=undefined;}}getDimensions(){if(this.clientRect){return this.clientRect;}const media=this.media;const boundsRect={width:0,height:0};if(media){const clientRect=media.getBoundingClientRect();boundsRect.width=clientRect.width;boundsRect.height=clientRect.height;if(!boundsRect.width&&!boundsRect.height){// When the media element has no width or height (equivalent to not being in the DOM),
|
||
// then use its width and height attributes (media.width, media.height)
|
||
boundsRect.width=clientRect.right-clientRect.left||media.width||0;boundsRect.height=clientRect.bottom-clientRect.top||media.height||0;}}this.clientRect=boundsRect;return boundsRect;}get mediaWidth(){return this.getDimensions().width*this.contentScaleFactor;}get mediaHeight(){return this.getDimensions().height*this.contentScaleFactor;}get contentScaleFactor(){let pixelRatio=1;if(!this.hls.config.ignoreDevicePixelRatio){try{pixelRatio=self.devicePixelRatio;}catch(e){/* no-op */}}return pixelRatio;}isLevelAllowed(level){const restrictedLevels=this.restrictedLevels;return !restrictedLevels.some(restrictedLevel=>{return level.bitrate===restrictedLevel.bitrate&&level.width===restrictedLevel.width&&level.height===restrictedLevel.height;});}static getMaxLevelByMediaSize(levels,width,height){if(!(levels!=null&&levels.length)){return -1;}// Levels can have the same dimensions but differing bandwidths - since levels are ordered, we can look to the next
|
||
// to determine whether we've chosen the greatest bandwidth for the media's dimensions
|
||
const atGreatestBandwidth=(curLevel,nextLevel)=>{if(!nextLevel){return true;}return curLevel.width!==nextLevel.width||curLevel.height!==nextLevel.height;};// If we run through the loop without breaking, the media's dimensions are greater than every level, so default to
|
||
// the max level
|
||
let maxLevelIndex=levels.length-1;// Prevent changes in aspect-ratio from causing capping to toggle back and forth
|
||
const squareSize=Math.max(width,height);for(let i=0;i<levels.length;i+=1){const level=levels[i];if((level.width>=squareSize||level.height>=squareSize)&&atGreatestBandwidth(level,levels[i+1])){maxLevelIndex=i;break;}}return maxLevelIndex;}}class FPSController{constructor(hls){this.hls=void 0;this.isVideoPlaybackQualityAvailable=false;this.timer=void 0;this.media=null;this.lastTime=void 0;this.lastDroppedFrames=0;this.lastDecodedFrames=0;// stream controller must be provided as a dependency!
|
||
this.streamController=void 0;this.hls=hls;this.registerListeners();}setStreamController(streamController){this.streamController=streamController;}registerListeners(){this.hls.on(Events.MEDIA_ATTACHING,this.onMediaAttaching,this);}unregisterListeners(){this.hls.off(Events.MEDIA_ATTACHING,this.onMediaAttaching,this);}destroy(){if(this.timer){clearInterval(this.timer);}this.unregisterListeners();this.isVideoPlaybackQualityAvailable=false;this.media=null;}onMediaAttaching(event,data){const config=this.hls.config;if(config.capLevelOnFPSDrop){const media=data.media instanceof self.HTMLVideoElement?data.media:null;this.media=media;if(media&&typeof media.getVideoPlaybackQuality==='function'){this.isVideoPlaybackQualityAvailable=true;}self.clearInterval(this.timer);this.timer=self.setInterval(this.checkFPSInterval.bind(this),config.fpsDroppedMonitoringPeriod);}}checkFPS(video,decodedFrames,droppedFrames){const currentTime=performance.now();if(decodedFrames){if(this.lastTime){const currentPeriod=currentTime-this.lastTime;const currentDropped=droppedFrames-this.lastDroppedFrames;const currentDecoded=decodedFrames-this.lastDecodedFrames;const droppedFPS=1000*currentDropped/currentPeriod;const hls=this.hls;hls.trigger(Events.FPS_DROP,{currentDropped:currentDropped,currentDecoded:currentDecoded,totalDroppedFrames:droppedFrames});if(droppedFPS>0){// logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
|
||
if(currentDropped>hls.config.fpsDroppedMonitoringThreshold*currentDecoded){let currentLevel=hls.currentLevel;logger.warn('drop FPS ratio greater than max allowed value for currentLevel: '+currentLevel);if(currentLevel>0&&(hls.autoLevelCapping===-1||hls.autoLevelCapping>=currentLevel)){currentLevel=currentLevel-1;hls.trigger(Events.FPS_DROP_LEVEL_CAPPING,{level:currentLevel,droppedLevel:hls.currentLevel});hls.autoLevelCapping=currentLevel;this.streamController.nextLevelSwitch();}}}}this.lastTime=currentTime;this.lastDroppedFrames=droppedFrames;this.lastDecodedFrames=decodedFrames;}}checkFPSInterval(){const video=this.media;if(video){if(this.isVideoPlaybackQualityAvailable){const videoPlaybackQuality=video.getVideoPlaybackQuality();this.checkFPS(video,videoPlaybackQuality.totalVideoFrames,videoPlaybackQuality.droppedVideoFrames);}else {// HTMLVideoElement doesn't include the webkit types
|
||
this.checkFPS(video,video.webkitDecodedFrameCount,video.webkitDroppedFrameCount);}}}}const LOGGER_PREFIX='[eme]';/**
|
||
* Controller to deal with encrypted media extensions (EME)
|
||
* @see https://developer.mozilla.org/en-US/docs/Web/API/Encrypted_Media_Extensions_API
|
||
*
|
||
* @class
|
||
* @constructor
|
||
*/class EMEController{constructor(hls){this.hls=void 0;this.config=void 0;this.media=null;this.keyFormatPromise=null;this.keySystemAccessPromises={};this._requestLicenseFailureCount=0;this.mediaKeySessions=[];this.keyIdToKeySessionPromise={};this.setMediaKeysQueue=EMEController.CDMCleanupPromise?[EMEController.CDMCleanupPromise]:[];this.onMediaEncrypted=this._onMediaEncrypted.bind(this);this.onWaitingForKey=this._onWaitingForKey.bind(this);this.debug=logger.debug.bind(logger,LOGGER_PREFIX);this.log=logger.log.bind(logger,LOGGER_PREFIX);this.warn=logger.warn.bind(logger,LOGGER_PREFIX);this.error=logger.error.bind(logger,LOGGER_PREFIX);this.hls=hls;this.config=hls.config;this.registerListeners();}destroy(){this.unregisterListeners();this.onMediaDetached();// Remove any references that could be held in config options or callbacks
|
||
const config=this.config;config.requestMediaKeySystemAccessFunc=null;config.licenseXhrSetup=config.licenseResponseCallback=undefined;config.drmSystems=config.drmSystemOptions={};// @ts-ignore
|
||
this.hls=this.onMediaEncrypted=this.onWaitingForKey=this.keyIdToKeySessionPromise=null;// @ts-ignore
|
||
this.config=null;}registerListeners(){this.hls.on(Events.MEDIA_ATTACHED,this.onMediaAttached,this);this.hls.on(Events.MEDIA_DETACHED,this.onMediaDetached,this);this.hls.on(Events.MANIFEST_LOADING,this.onManifestLoading,this);this.hls.on(Events.MANIFEST_LOADED,this.onManifestLoaded,this);}unregisterListeners(){this.hls.off(Events.MEDIA_ATTACHED,this.onMediaAttached,this);this.hls.off(Events.MEDIA_DETACHED,this.onMediaDetached,this);this.hls.off(Events.MANIFEST_LOADING,this.onManifestLoading,this);this.hls.off(Events.MANIFEST_LOADED,this.onManifestLoaded,this);}getLicenseServerUrl(keySystem){const{drmSystems,widevineLicenseUrl}=this.config;const keySystemConfiguration=drmSystems[keySystem];if(keySystemConfiguration){return keySystemConfiguration.licenseUrl;}// For backward compatibility
|
||
if(keySystem===KeySystems.WIDEVINE&&widevineLicenseUrl){return widevineLicenseUrl;}throw new Error(`no license server URL configured for key-system "${keySystem}"`);}getServerCertificateUrl(keySystem){const{drmSystems}=this.config;const keySystemConfiguration=drmSystems[keySystem];if(keySystemConfiguration){return keySystemConfiguration.serverCertificateUrl;}else {this.log(`No Server Certificate in config.drmSystems["${keySystem}"]`);}}attemptKeySystemAccess(keySystemsToAttempt){const levels=this.hls.levels;const uniqueCodec=(value,i,a)=>!!value&&a.indexOf(value)===i;const audioCodecs=levels.map(level=>level.audioCodec).filter(uniqueCodec);const videoCodecs=levels.map(level=>level.videoCodec).filter(uniqueCodec);if(audioCodecs.length+videoCodecs.length===0){videoCodecs.push('avc1.42e01e');}return new Promise((resolve,reject)=>{const attempt=keySystems=>{const keySystem=keySystems.shift();this.getMediaKeysPromise(keySystem,audioCodecs,videoCodecs).then(mediaKeys=>resolve({keySystem,mediaKeys})).catch(error=>{if(keySystems.length){attempt(keySystems);}else if(error instanceof EMEKeyError){reject(error);}else {reject(new EMEKeyError({type:ErrorTypes.KEY_SYSTEM_ERROR,details:ErrorDetails.KEY_SYSTEM_NO_ACCESS,error,fatal:true},error.message));}});};attempt(keySystemsToAttempt);});}requestMediaKeySystemAccess(keySystem,supportedConfigurations){const{requestMediaKeySystemAccessFunc}=this.config;if(!(typeof requestMediaKeySystemAccessFunc==='function')){let errMessage=`Configured requestMediaKeySystemAccess is not a function ${requestMediaKeySystemAccessFunc}`;if(requestMediaKeySystemAccess===null&&self.location.protocol==='http:'){errMessage=`navigator.requestMediaKeySystemAccess is not available over insecure protocol ${location.protocol}`;}return Promise.reject(new Error(errMessage));}return requestMediaKeySystemAccessFunc(keySystem,supportedConfigurations);}getMediaKeysPromise(keySystem,audioCodecs,videoCodecs){// This can throw, but is caught in event handler callpath
|
||
const mediaKeySystemConfigs=getSupportedMediaKeySystemConfigurations(keySystem,audioCodecs,videoCodecs,this.config.drmSystemOptions);const keySystemAccessPromises=this.keySystemAccessPromises[keySystem];let keySystemAccess=keySystemAccessPromises==null?void 0:keySystemAccessPromises.keySystemAccess;if(!keySystemAccess){this.log(`Requesting encrypted media "${keySystem}" key-system access with config: ${JSON.stringify(mediaKeySystemConfigs)}`);keySystemAccess=this.requestMediaKeySystemAccess(keySystem,mediaKeySystemConfigs);const _keySystemAccessPromises=this.keySystemAccessPromises[keySystem]={keySystemAccess};keySystemAccess.catch(error=>{this.log(`Failed to obtain access to key-system "${keySystem}": ${error}`);});return keySystemAccess.then(mediaKeySystemAccess=>{this.log(`Access for key-system "${mediaKeySystemAccess.keySystem}" obtained`);const certificateRequest=this.fetchServerCertificate(keySystem);this.log(`Create media-keys for "${keySystem}"`);_keySystemAccessPromises.mediaKeys=mediaKeySystemAccess.createMediaKeys().then(mediaKeys=>{this.log(`Media-keys created for "${keySystem}"`);return certificateRequest.then(certificate=>{if(certificate){return this.setMediaKeysServerCertificate(mediaKeys,keySystem,certificate);}return mediaKeys;});});_keySystemAccessPromises.mediaKeys.catch(error=>{this.error(`Failed to create media-keys for "${keySystem}"}: ${error}`);});return _keySystemAccessPromises.mediaKeys;});}return keySystemAccess.then(()=>keySystemAccessPromises.mediaKeys);}createMediaKeySessionContext({decryptdata,keySystem,mediaKeys}){this.log(`Creating key-system session "${keySystem}" keyId: ${Hex.hexDump(decryptdata.keyId||[])}`);const mediaKeysSession=mediaKeys.createSession();const mediaKeySessionContext={decryptdata,keySystem,mediaKeys,mediaKeysSession,keyStatus:'status-pending'};this.mediaKeySessions.push(mediaKeySessionContext);return mediaKeySessionContext;}renewKeySession(mediaKeySessionContext){const decryptdata=mediaKeySessionContext.decryptdata;if(decryptdata.pssh){const keySessionContext=this.createMediaKeySessionContext(mediaKeySessionContext);const keyId=this.getKeyIdString(decryptdata);const scheme='cenc';this.keyIdToKeySessionPromise[keyId]=this.generateRequestWithPreferredKeySession(keySessionContext,scheme,decryptdata.pssh,'expired');}else {this.warn(`Could not renew expired session. Missing pssh initData.`);}this.removeSession(mediaKeySessionContext);}getKeyIdString(decryptdata){if(!decryptdata){throw new Error('Could not read keyId of undefined decryptdata');}if(decryptdata.keyId===null){throw new Error('keyId is null');}return Hex.hexDump(decryptdata.keyId);}updateKeySession(mediaKeySessionContext,data){var _mediaKeySessionConte;const keySession=mediaKeySessionContext.mediaKeysSession;this.log(`Updating key-session "${keySession.sessionId}" for keyID ${Hex.hexDump(((_mediaKeySessionConte=mediaKeySessionContext.decryptdata)==null?void 0:_mediaKeySessionConte.keyId)||[])}
|
||
} (data length: ${data?data.byteLength:data})`);return keySession.update(data);}selectKeySystemFormat(frag){const keyFormats=Object.keys(frag.levelkeys||{});if(!this.keyFormatPromise){this.log(`Selecting key-system from fragment (sn: ${frag.sn} ${frag.type}: ${frag.level}) key formats ${keyFormats.join(', ')}`);this.keyFormatPromise=this.getKeyFormatPromise(keyFormats);}return this.keyFormatPromise;}getKeyFormatPromise(keyFormats){return new Promise((resolve,reject)=>{const keySystemsInConfig=getKeySystemsForConfig(this.config);const keySystemsToAttempt=keyFormats.map(keySystemFormatToKeySystemDomain).filter(value=>!!value&&keySystemsInConfig.indexOf(value)!==-1);return this.getKeySystemSelectionPromise(keySystemsToAttempt).then(({keySystem})=>{const keySystemFormat=keySystemDomainToKeySystemFormat(keySystem);if(keySystemFormat){resolve(keySystemFormat);}else {reject(new Error(`Unable to find format for key-system "${keySystem}"`));}}).catch(reject);});}loadKey(data){const decryptdata=data.keyInfo.decryptdata;const keyId=this.getKeyIdString(decryptdata);const keyDetails=`(keyId: ${keyId} format: "${decryptdata.keyFormat}" method: ${decryptdata.method} uri: ${decryptdata.uri})`;this.log(`Starting session for key ${keyDetails}`);let keySessionContextPromise=this.keyIdToKeySessionPromise[keyId];if(!keySessionContextPromise){keySessionContextPromise=this.keyIdToKeySessionPromise[keyId]=this.getKeySystemForKeyPromise(decryptdata).then(({keySystem,mediaKeys})=>{this.throwIfDestroyed();this.log(`Handle encrypted media sn: ${data.frag.sn} ${data.frag.type}: ${data.frag.level} using key ${keyDetails}`);return this.attemptSetMediaKeys(keySystem,mediaKeys).then(()=>{this.throwIfDestroyed();const keySessionContext=this.createMediaKeySessionContext({keySystem,mediaKeys,decryptdata});const scheme='cenc';return this.generateRequestWithPreferredKeySession(keySessionContext,scheme,decryptdata.pssh,'playlist-key');});});keySessionContextPromise.catch(error=>this.handleError(error));}return keySessionContextPromise;}throwIfDestroyed(message='Invalid state'){if(!this.hls){throw new Error('invalid state');}}handleError(error){if(!this.hls){return;}this.error(error.message);if(error instanceof EMEKeyError){this.hls.trigger(Events.ERROR,error.data);}else {this.hls.trigger(Events.ERROR,{type:ErrorTypes.KEY_SYSTEM_ERROR,details:ErrorDetails.KEY_SYSTEM_NO_KEYS,error,fatal:true});}}getKeySystemForKeyPromise(decryptdata){const keyId=this.getKeyIdString(decryptdata);const mediaKeySessionContext=this.keyIdToKeySessionPromise[keyId];if(!mediaKeySessionContext){const keySystem=keySystemFormatToKeySystemDomain(decryptdata.keyFormat);const keySystemsToAttempt=keySystem?[keySystem]:getKeySystemsForConfig(this.config);return this.attemptKeySystemAccess(keySystemsToAttempt);}return mediaKeySessionContext;}getKeySystemSelectionPromise(keySystemsToAttempt){if(!keySystemsToAttempt.length){keySystemsToAttempt=getKeySystemsForConfig(this.config);}if(keySystemsToAttempt.length===0){throw new EMEKeyError({type:ErrorTypes.KEY_SYSTEM_ERROR,details:ErrorDetails.KEY_SYSTEM_NO_CONFIGURED_LICENSE,fatal:true},`Missing key-system license configuration options ${JSON.stringify({drmSystems:this.config.drmSystems})}`);}return this.attemptKeySystemAccess(keySystemsToAttempt);}_onMediaEncrypted(event){const{initDataType,initData}=event;this.debug(`"${event.type}" event: init data type: "${initDataType}"`);// Ignore event when initData is null
|
||
if(initData===null){return;}let keyId;let keySystemDomain;if(initDataType==='sinf'&&this.config.drmSystems[KeySystems.FAIRPLAY]){// Match sinf keyId to playlist skd://keyId=
|
||
const json=bin2str(new Uint8Array(initData));try{const sinf=base64Decode(JSON.parse(json).sinf);const tenc=parseSinf(new Uint8Array(sinf));if(!tenc){return;}keyId=tenc.subarray(8,24);keySystemDomain=KeySystems.FAIRPLAY;}catch(error){this.warn('Failed to parse sinf "encrypted" event message initData');return;}}else {// Support clear-lead key-session creation (otherwise depend on playlist keys)
|
||
const psshInfo=parsePssh(initData);if(psshInfo===null){return;}if(psshInfo.version===0&&psshInfo.systemId===KeySystemIds.WIDEVINE&&psshInfo.data){keyId=psshInfo.data.subarray(8,24);}keySystemDomain=keySystemIdToKeySystemDomain(psshInfo.systemId);}if(!keySystemDomain||!keyId){return;}const keyIdHex=Hex.hexDump(keyId);const{keyIdToKeySessionPromise,mediaKeySessions}=this;let keySessionContextPromise=keyIdToKeySessionPromise[keyIdHex];for(let i=0;i<mediaKeySessions.length;i++){// Match playlist key
|
||
const keyContext=mediaKeySessions[i];const decryptdata=keyContext.decryptdata;if(decryptdata.pssh||!decryptdata.keyId){continue;}const oldKeyIdHex=Hex.hexDump(decryptdata.keyId);if(keyIdHex===oldKeyIdHex||decryptdata.uri.replace(/-/g,'').indexOf(keyIdHex)!==-1){keySessionContextPromise=keyIdToKeySessionPromise[oldKeyIdHex];delete keyIdToKeySessionPromise[oldKeyIdHex];decryptdata.pssh=new Uint8Array(initData);decryptdata.keyId=keyId;keySessionContextPromise=keyIdToKeySessionPromise[keyIdHex]=keySessionContextPromise.then(()=>{return this.generateRequestWithPreferredKeySession(keyContext,initDataType,initData,'encrypted-event-key-match');});break;}}if(!keySessionContextPromise){// Clear-lead key (not encountered in playlist)
|
||
keySessionContextPromise=keyIdToKeySessionPromise[keyIdHex]=this.getKeySystemSelectionPromise([keySystemDomain]).then(({keySystem,mediaKeys})=>{var _keySystemToKeySystem;this.throwIfDestroyed();const decryptdata=new LevelKey('ISO-23001-7',keyIdHex,(_keySystemToKeySystem=keySystemDomainToKeySystemFormat(keySystem))!=null?_keySystemToKeySystem:'');decryptdata.pssh=new Uint8Array(initData);decryptdata.keyId=keyId;return this.attemptSetMediaKeys(keySystem,mediaKeys).then(()=>{this.throwIfDestroyed();const keySessionContext=this.createMediaKeySessionContext({decryptdata,keySystem,mediaKeys});return this.generateRequestWithPreferredKeySession(keySessionContext,initDataType,initData,'encrypted-event-no-match');});});}keySessionContextPromise.catch(error=>this.handleError(error));}_onWaitingForKey(event){this.log(`"${event.type}" event`);}attemptSetMediaKeys(keySystem,mediaKeys){const queue=this.setMediaKeysQueue.slice();this.log(`Setting media-keys for "${keySystem}"`);// Only one setMediaKeys() can run at one time, and multiple setMediaKeys() operations
|
||
// can be queued for execution for multiple key sessions.
|
||
const setMediaKeysPromise=Promise.all(queue).then(()=>{if(!this.media){throw new Error('Attempted to set mediaKeys without media element attached');}return this.media.setMediaKeys(mediaKeys);});this.setMediaKeysQueue.push(setMediaKeysPromise);return setMediaKeysPromise.then(()=>{this.log(`Media-keys set for "${keySystem}"`);queue.push(setMediaKeysPromise);this.setMediaKeysQueue=this.setMediaKeysQueue.filter(p=>queue.indexOf(p)===-1);});}generateRequestWithPreferredKeySession(context,initDataType,initData,reason){var _this$config$drmSyste,_this$config$drmSyste2;const generateRequestFilter=(_this$config$drmSyste=this.config.drmSystems)==null?void 0:(_this$config$drmSyste2=_this$config$drmSyste[context.keySystem])==null?void 0:_this$config$drmSyste2.generateRequest;if(generateRequestFilter){try{const mappedInitData=generateRequestFilter.call(this.hls,initDataType,initData,context);if(!mappedInitData){throw new Error('Invalid response from configured generateRequest filter');}initDataType=mappedInitData.initDataType;initData=context.decryptdata.pssh=mappedInitData.initData?new Uint8Array(mappedInitData.initData):null;}catch(error){var _this$hls;this.warn(error.message);if((_this$hls=this.hls)!=null&&_this$hls.config.debug){throw error;}}}if(initData===null){this.log(`Skipping key-session request for "${reason}" (no initData)`);return Promise.resolve(context);}const keyId=this.getKeyIdString(context.decryptdata);this.log(`Generating key-session request for "${reason}": ${keyId} (init data type: ${initDataType} length: ${initData?initData.byteLength:null})`);const licenseStatus=new EventEmitter();const onmessage=context._onmessage=event=>{const keySession=context.mediaKeysSession;if(!keySession){licenseStatus.emit('error',new Error('invalid state'));return;}const{messageType,message}=event;this.log(`"${messageType}" message event for session "${keySession.sessionId}" message size: ${message.byteLength}`);if(messageType==='license-request'||messageType==='license-renewal'){this.renewLicense(context,message).catch(error=>{this.handleError(error);licenseStatus.emit('error',error);});}else if(messageType==='license-release'){if(context.keySystem===KeySystems.FAIRPLAY){this.updateKeySession(context,strToUtf8array('acknowledged'));this.removeSession(context);}}else {this.warn(`unhandled media key message type "${messageType}"`);}};const onkeystatuseschange=context._onkeystatuseschange=event=>{const keySession=context.mediaKeysSession;if(!keySession){licenseStatus.emit('error',new Error('invalid state'));return;}this.onKeyStatusChange(context);const keyStatus=context.keyStatus;licenseStatus.emit('keyStatus',keyStatus);if(keyStatus==='expired'){this.warn(`${context.keySystem} expired for key ${keyId}`);this.renewKeySession(context);}};context.mediaKeysSession.addEventListener('message',onmessage);context.mediaKeysSession.addEventListener('keystatuseschange',onkeystatuseschange);const keyUsablePromise=new Promise((resolve,reject)=>{licenseStatus.on('error',reject);licenseStatus.on('keyStatus',keyStatus=>{if(keyStatus.startsWith('usable')){resolve();}else if(keyStatus==='output-restricted'){reject(new EMEKeyError({type:ErrorTypes.KEY_SYSTEM_ERROR,details:ErrorDetails.KEY_SYSTEM_STATUS_OUTPUT_RESTRICTED,fatal:false},'HDCP level output restricted'));}else if(keyStatus==='internal-error'){reject(new EMEKeyError({type:ErrorTypes.KEY_SYSTEM_ERROR,details:ErrorDetails.KEY_SYSTEM_STATUS_INTERNAL_ERROR,fatal:true},`key status changed to "${keyStatus}"`));}else if(keyStatus==='expired'){reject(new Error('key expired while generating request'));}else {this.warn(`unhandled key status change "${keyStatus}"`);}});});return context.mediaKeysSession.generateRequest(initDataType,initData).then(()=>{var _context$mediaKeysSes;this.log(`Request generated for key-session "${(_context$mediaKeysSes=context.mediaKeysSession)==null?void 0:_context$mediaKeysSes.sessionId}" keyId: ${keyId}`);}).catch(error=>{throw new EMEKeyError({type:ErrorTypes.KEY_SYSTEM_ERROR,details:ErrorDetails.KEY_SYSTEM_NO_SESSION,error,fatal:false},`Error generating key-session request: ${error}`);}).then(()=>keyUsablePromise).catch(error=>{licenseStatus.removeAllListeners();this.removeSession(context);throw error;}).then(()=>{licenseStatus.removeAllListeners();return context;});}onKeyStatusChange(mediaKeySessionContext){mediaKeySessionContext.mediaKeysSession.keyStatuses.forEach((status,keyId)=>{this.log(`key status change "${status}" for keyStatuses keyId: ${Hex.hexDump('buffer'in keyId?new Uint8Array(keyId.buffer,keyId.byteOffset,keyId.byteLength):new Uint8Array(keyId))} session keyId: ${Hex.hexDump(new Uint8Array(mediaKeySessionContext.decryptdata.keyId||[]))} uri: ${mediaKeySessionContext.decryptdata.uri}`);mediaKeySessionContext.keyStatus=status;});}fetchServerCertificate(keySystem){const config=this.config;const Loader=config.loader;const certLoader=new Loader(config);const url=this.getServerCertificateUrl(keySystem);if(!url){return Promise.resolve();}this.log(`Fetching server certificate for "${keySystem}"`);return new Promise((resolve,reject)=>{const loaderContext={responseType:'arraybuffer',url};const loadPolicy=config.certLoadPolicy.default;const loaderConfig={loadPolicy,timeout:loadPolicy.maxLoadTimeMs,maxRetry:0,retryDelay:0,maxRetryDelay:0};const loaderCallbacks={onSuccess:(response,stats,context,networkDetails)=>{resolve(response.data);},onError:(response,contex,networkDetails,stats)=>{reject(new EMEKeyError({type:ErrorTypes.KEY_SYSTEM_ERROR,details:ErrorDetails.KEY_SYSTEM_SERVER_CERTIFICATE_REQUEST_FAILED,fatal:true,networkDetails,response:_objectSpread2({url:loaderContext.url,data:undefined},response)},`"${keySystem}" certificate request failed (${url}). Status: ${response.code} (${response.text})`));},onTimeout:(stats,context,networkDetails)=>{reject(new EMEKeyError({type:ErrorTypes.KEY_SYSTEM_ERROR,details:ErrorDetails.KEY_SYSTEM_SERVER_CERTIFICATE_REQUEST_FAILED,fatal:true,networkDetails,response:{url:loaderContext.url,data:undefined}},`"${keySystem}" certificate request timed out (${url})`));},onAbort:(stats,context,networkDetails)=>{reject(new Error('aborted'));}};certLoader.load(loaderContext,loaderConfig,loaderCallbacks);});}setMediaKeysServerCertificate(mediaKeys,keySystem,cert){return new Promise((resolve,reject)=>{mediaKeys.setServerCertificate(cert).then(success=>{this.log(`setServerCertificate ${success?'success':'not supported by CDM'} (${cert==null?void 0:cert.byteLength}) on "${keySystem}"`);resolve(mediaKeys);}).catch(error=>{reject(new EMEKeyError({type:ErrorTypes.KEY_SYSTEM_ERROR,details:ErrorDetails.KEY_SYSTEM_SERVER_CERTIFICATE_UPDATE_FAILED,error,fatal:true},error.message));});});}renewLicense(context,keyMessage){return this.requestLicense(context,new Uint8Array(keyMessage)).then(data=>{return this.updateKeySession(context,new Uint8Array(data)).catch(error=>{throw new EMEKeyError({type:ErrorTypes.KEY_SYSTEM_ERROR,details:ErrorDetails.KEY_SYSTEM_SESSION_UPDATE_FAILED,error,fatal:true},error.message);});});}unpackPlayReadyKeyMessage(xhr,licenseChallenge){// On Edge, the raw license message is UTF-16-encoded XML. We need
|
||
// to unpack the Challenge element (base64-encoded string containing the
|
||
// actual license request) and any HttpHeader elements (sent as request
|
||
// headers).
|
||
// For PlayReady CDMs, we need to dig the Challenge out of the XML.
|
||
const xmlString=String.fromCharCode.apply(null,new Uint16Array(licenseChallenge.buffer));if(!xmlString.includes('PlayReadyKeyMessage')){// This does not appear to be a wrapped message as on Edge. Some
|
||
// clients do not need this unwrapping, so we will assume this is one of
|
||
// them. Note that "xml" at this point probably looks like random
|
||
// garbage, since we interpreted UTF-8 as UTF-16.
|
||
xhr.setRequestHeader('Content-Type','text/xml; charset=utf-8');return licenseChallenge;}const keyMessageXml=new DOMParser().parseFromString(xmlString,'application/xml');// Set request headers.
|
||
const headers=keyMessageXml.querySelectorAll('HttpHeader');if(headers.length>0){let header;for(let i=0,len=headers.length;i<len;i++){var _header$querySelector,_header$querySelector2;header=headers[i];const name=(_header$querySelector=header.querySelector('name'))==null?void 0:_header$querySelector.textContent;const value=(_header$querySelector2=header.querySelector('value'))==null?void 0:_header$querySelector2.textContent;if(name&&value){xhr.setRequestHeader(name,value);}}}const challengeElement=keyMessageXml.querySelector('Challenge');const challengeText=challengeElement==null?void 0:challengeElement.textContent;if(!challengeText){throw new Error(`Cannot find <Challenge> in key message`);}return strToUtf8array(atob(challengeText));}setupLicenseXHR(xhr,url,keysListItem,licenseChallenge){const licenseXhrSetup=this.config.licenseXhrSetup;if(!licenseXhrSetup){xhr.open('POST',url,true);return Promise.resolve({xhr,licenseChallenge});}return Promise.resolve().then(()=>{if(!keysListItem.decryptdata){throw new Error('Key removed');}return licenseXhrSetup.call(this.hls,xhr,url,keysListItem,licenseChallenge);}).catch(error=>{if(!keysListItem.decryptdata){// Key session removed. Cancel license request.
|
||
throw error;}// let's try to open before running setup
|
||
xhr.open('POST',url,true);return licenseXhrSetup.call(this.hls,xhr,url,keysListItem,licenseChallenge);}).then(licenseXhrSetupResult=>{// if licenseXhrSetup did not yet call open, let's do it now
|
||
if(!xhr.readyState){xhr.open('POST',url,true);}const finalLicenseChallenge=licenseXhrSetupResult?licenseXhrSetupResult:licenseChallenge;return {xhr,licenseChallenge:finalLicenseChallenge};});}requestLicense(keySessionContext,licenseChallenge){const keyLoadPolicy=this.config.keyLoadPolicy.default;return new Promise((resolve,reject)=>{const url=this.getLicenseServerUrl(keySessionContext.keySystem);this.log(`Sending license request to URL: ${url}`);const xhr=new XMLHttpRequest();xhr.responseType='arraybuffer';xhr.onreadystatechange=()=>{if(!this.hls||!keySessionContext.mediaKeysSession){return reject(new Error('invalid state'));}if(xhr.readyState===4){if(xhr.status===200){this._requestLicenseFailureCount=0;let data=xhr.response;this.log(`License received ${data instanceof ArrayBuffer?data.byteLength:data}`);const licenseResponseCallback=this.config.licenseResponseCallback;if(licenseResponseCallback){try{data=licenseResponseCallback.call(this.hls,xhr,url,keySessionContext);}catch(error){this.error(error);}}resolve(data);}else {const retryConfig=keyLoadPolicy.errorRetry;const maxNumRetry=retryConfig?retryConfig.maxNumRetry:0;this._requestLicenseFailureCount++;if(this._requestLicenseFailureCount>maxNumRetry||xhr.status>=400&&xhr.status<500){reject(new EMEKeyError({type:ErrorTypes.KEY_SYSTEM_ERROR,details:ErrorDetails.KEY_SYSTEM_LICENSE_REQUEST_FAILED,fatal:true,networkDetails:xhr,response:{url,data:undefined,code:xhr.status,text:xhr.statusText}},`License Request XHR failed (${url}). Status: ${xhr.status} (${xhr.statusText})`));}else {const attemptsLeft=maxNumRetry-this._requestLicenseFailureCount+1;this.warn(`Retrying license request, ${attemptsLeft} attempts left`);this.requestLicense(keySessionContext,licenseChallenge).then(resolve,reject);}}}};if(keySessionContext.licenseXhr&&keySessionContext.licenseXhr.readyState!==XMLHttpRequest.DONE){keySessionContext.licenseXhr.abort();}keySessionContext.licenseXhr=xhr;this.setupLicenseXHR(xhr,url,keySessionContext,licenseChallenge).then(({xhr,licenseChallenge})=>{if(keySessionContext.keySystem==KeySystems.PLAYREADY){licenseChallenge=this.unpackPlayReadyKeyMessage(xhr,licenseChallenge);}xhr.send(licenseChallenge);});});}onMediaAttached(event,data){if(!this.config.emeEnabled){return;}const media=data.media;// keep reference of media
|
||
this.media=media;media.addEventListener('encrypted',this.onMediaEncrypted);media.addEventListener('waitingforkey',this.onWaitingForKey);}onMediaDetached(){const media=this.media;const mediaKeysList=this.mediaKeySessions;if(media){media.removeEventListener('encrypted',this.onMediaEncrypted);media.removeEventListener('waitingforkey',this.onWaitingForKey);this.media=null;}this._requestLicenseFailureCount=0;this.setMediaKeysQueue=[];this.mediaKeySessions=[];this.keyIdToKeySessionPromise={};LevelKey.clearKeyUriToKeyIdMap();// Close all sessions and remove media keys from the video element.
|
||
const keySessionCount=mediaKeysList.length;EMEController.CDMCleanupPromise=Promise.all(mediaKeysList.map(mediaKeySessionContext=>this.removeSession(mediaKeySessionContext)).concat(media==null?void 0:media.setMediaKeys(null).catch(error=>{this.log(`Could not clear media keys: ${error}`);}))).then(()=>{if(keySessionCount){this.log('finished closing key sessions and clearing media keys');mediaKeysList.length=0;}}).catch(error=>{this.log(`Could not close sessions and clear media keys: ${error}`);});}onManifestLoading(){this.keyFormatPromise=null;}onManifestLoaded(event,{sessionKeys}){if(!sessionKeys||!this.config.emeEnabled){return;}if(!this.keyFormatPromise){const keyFormats=sessionKeys.reduce((formats,sessionKey)=>{if(formats.indexOf(sessionKey.keyFormat)===-1){formats.push(sessionKey.keyFormat);}return formats;},[]);this.log(`Selecting key-system from session-keys ${keyFormats.join(', ')}`);this.keyFormatPromise=this.getKeyFormatPromise(keyFormats);}}removeSession(mediaKeySessionContext){const{mediaKeysSession,licenseXhr}=mediaKeySessionContext;if(mediaKeysSession){this.log(`Remove licenses and keys and close session ${mediaKeysSession.sessionId}`);if(mediaKeySessionContext._onmessage){mediaKeysSession.removeEventListener('message',mediaKeySessionContext._onmessage);mediaKeySessionContext._onmessage=undefined;}if(mediaKeySessionContext._onkeystatuseschange){mediaKeysSession.removeEventListener('keystatuseschange',mediaKeySessionContext._onkeystatuseschange);mediaKeySessionContext._onkeystatuseschange=undefined;}if(licenseXhr&&licenseXhr.readyState!==XMLHttpRequest.DONE){licenseXhr.abort();}mediaKeySessionContext.mediaKeysSession=mediaKeySessionContext.decryptdata=mediaKeySessionContext.licenseXhr=undefined;const index=this.mediaKeySessions.indexOf(mediaKeySessionContext);if(index>-1){this.mediaKeySessions.splice(index,1);}return mediaKeysSession.remove().catch(error=>{this.log(`Could not remove session: ${error}`);}).then(()=>{return mediaKeysSession.close();}).catch(error=>{this.log(`Could not close session: ${error}`);});}}}EMEController.CDMCleanupPromise=void 0;class EMEKeyError extends Error{constructor(data,message){super(message);this.data=void 0;data.error||(data.error=new Error(message));this.data=data;data.err=data.error;}}/**
|
||
* Common Media Object Type
|
||
*
|
||
* @group CMCD
|
||
* @group CMSD
|
||
*
|
||
* @beta
|
||
*/var CmObjectType;(function(CmObjectType){/**
|
||
* text file, such as a manifest or playlist
|
||
*/CmObjectType["MANIFEST"]="m";/**
|
||
* audio only
|
||
*/CmObjectType["AUDIO"]="a";/**
|
||
* video only
|
||
*/CmObjectType["VIDEO"]="v";/**
|
||
* muxed audio and video
|
||
*/CmObjectType["MUXED"]="av";/**
|
||
* init segment
|
||
*/CmObjectType["INIT"]="i";/**
|
||
* caption or subtitle
|
||
*/CmObjectType["CAPTION"]="c";/**
|
||
* ISOBMFF timed text track
|
||
*/CmObjectType["TIMED_TEXT"]="tt";/**
|
||
* cryptographic key, license or certificate.
|
||
*/CmObjectType["KEY"]="k";/**
|
||
* other
|
||
*/CmObjectType["OTHER"]="o";})(CmObjectType||(CmObjectType={}));/**
|
||
* Common Media Streaming Format
|
||
*
|
||
* @group CMCD
|
||
* @group CMSD
|
||
*
|
||
* @beta
|
||
*/var CmStreamingFormat;(function(CmStreamingFormat){/**
|
||
* MPEG DASH
|
||
*/CmStreamingFormat["DASH"]="d";/**
|
||
* HTTP Live Streaming (HLS)
|
||
*/CmStreamingFormat["HLS"]="h";/**
|
||
* Smooth Streaming
|
||
*/CmStreamingFormat["SMOOTH"]="s";/**
|
||
* Other
|
||
*/CmStreamingFormat["OTHER"]="o";})(CmStreamingFormat||(CmStreamingFormat={}));/**
|
||
* CMCD header fields.
|
||
*
|
||
* @group CMCD
|
||
*
|
||
* @beta
|
||
*/var CmcdHeaderField;(function(CmcdHeaderField){/**
|
||
* keys whose values vary with the object being requested.
|
||
*/CmcdHeaderField["OBJECT"]="CMCD-Object";/**
|
||
* keys whose values vary with each request.
|
||
*/CmcdHeaderField["REQUEST"]="CMCD-Request";/**
|
||
* keys whose values are expected to be invariant over the life of the session.
|
||
*/CmcdHeaderField["SESSION"]="CMCD-Session";/**
|
||
* keys whose values do not vary with every request or object.
|
||
*/CmcdHeaderField["STATUS"]="CMCD-Status";})(CmcdHeaderField||(CmcdHeaderField={}));/**
|
||
* The map of CMCD header fields to official CMCD keys.
|
||
*
|
||
* @internal
|
||
*
|
||
* @group CMCD
|
||
*/const CmcdHeaderMap={[CmcdHeaderField.OBJECT]:['br','d','ot','tb'],[CmcdHeaderField.REQUEST]:['bl','dl','mtp','nor','nrr','su'],[CmcdHeaderField.SESSION]:['cid','pr','sf','sid','st','v'],[CmcdHeaderField.STATUS]:['bs','rtp']};/**
|
||
* Structured Field Item
|
||
*
|
||
* @group Structured Field
|
||
*
|
||
* @beta
|
||
*/class SfItem{constructor(value,params){this.value=void 0;this.params=void 0;if(Array.isArray(value)){value=value.map(v=>v instanceof SfItem?v:new SfItem(v));}this.value=value;this.params=params;}}/**
|
||
* A class to represent structured field tokens when `Symbol` is not available.
|
||
*
|
||
* @group Structured Field
|
||
*
|
||
* @beta
|
||
*/class SfToken{constructor(description){this.description=void 0;this.description=description;}}const DICT='Dict';function format(value){if(Array.isArray(value)){return JSON.stringify(value);}if(value instanceof Map){return 'Map{}';}if(value instanceof Set){return 'Set{}';}if(typeof value==='object'){return JSON.stringify(value);}return String(value);}function throwError(action,src,type,cause){return new Error(`failed to ${action} "${format(src)}" as ${type}`,{cause});}const BARE_ITEM='Bare Item';const BOOLEAN='Boolean';const BYTES='Byte Sequence';const DECIMAL='Decimal';const INTEGER='Integer';function isInvalidInt(value){return value<-999999999999999||999999999999999<value;}const STRING_REGEX=/[\x00-\x1f\x7f]+/;// eslint-disable-line no-control-regex
|
||
const TOKEN='Token';const KEY='Key';function serializeError(src,type,cause){return throwError('serialize',src,type,cause);}// 4.1.9. Serializing a Boolean
|
||
//
|
||
// Given a Boolean as input_boolean, return an ASCII string suitable for
|
||
// use in a HTTP field value.
|
||
//
|
||
// 1. If input_boolean is not a boolean, fail serialization.
|
||
//
|
||
// 2. Let output be an empty string.
|
||
//
|
||
// 3. Append "?" to output.
|
||
//
|
||
// 4. If input_boolean is true, append "1" to output.
|
||
//
|
||
// 5. If input_boolean is false, append "0" to output.
|
||
//
|
||
// 6. Return output.
|
||
function serializeBoolean(value){if(typeof value!=='boolean'){throw serializeError(value,BOOLEAN);}return value?'?1':'?0';}/**
|
||
* Encodes binary data to base64
|
||
*
|
||
* @param binary - The binary data to encode
|
||
* @returns The base64 encoded string
|
||
*
|
||
* @group Utils
|
||
*
|
||
* @beta
|
||
*/function base64encode(binary){return btoa(String.fromCharCode(...binary));}// 4.1.8. Serializing a Byte Sequence
|
||
//
|
||
// Given a Byte Sequence as input_bytes, return an ASCII string suitable
|
||
// for use in a HTTP field value.
|
||
//
|
||
// 1. If input_bytes is not a sequence of bytes, fail serialization.
|
||
//
|
||
// 2. Let output be an empty string.
|
||
//
|
||
// 3. Append ":" to output.
|
||
//
|
||
// 4. Append the result of base64-encoding input_bytes as per
|
||
// [RFC4648], Section 4, taking account of the requirements below.
|
||
//
|
||
// 5. Append ":" to output.
|
||
//
|
||
// 6. Return output.
|
||
//
|
||
// The encoded data is required to be padded with "=", as per [RFC4648],
|
||
// Section 3.2.
|
||
//
|
||
// Likewise, encoded data SHOULD have pad bits set to zero, as per
|
||
// [RFC4648], Section 3.5, unless it is not possible to do so due to
|
||
// implementation constraints.
|
||
function serializeByteSequence(value){if(ArrayBuffer.isView(value)===false){throw serializeError(value,BYTES);}return `:${base64encode(value)}:`;}// 4.1.4. Serializing an Integer
|
||
//
|
||
// Given an Integer as input_integer, return an ASCII string suitable
|
||
// for use in a HTTP field value.
|
||
//
|
||
// 1. If input_integer is not an integer in the range of
|
||
// -999,999,999,999,999 to 999,999,999,999,999 inclusive, fail
|
||
// serialization.
|
||
//
|
||
// 2. Let output be an empty string.
|
||
//
|
||
// 3. If input_integer is less than (but not equal to) 0, append "-" to
|
||
// output.
|
||
//
|
||
// 4. Append input_integer's numeric value represented in base 10 using
|
||
// only decimal digits to output.
|
||
//
|
||
// 5. Return output.
|
||
function serializeInteger(value){if(isInvalidInt(value)){throw serializeError(value,INTEGER);}return value.toString();}// 4.1.10. Serializing a Date
|
||
//
|
||
// Given a Date as input_integer, return an ASCII string suitable for
|
||
// use in an HTTP field value.
|
||
// 1. Let output be "@".
|
||
// 2. Append to output the result of running Serializing an Integer
|
||
// with input_date (Section 4.1.4).
|
||
// 3. Return output.
|
||
function serializeDate(value){return `@${serializeInteger(value.getTime()/1000)}`;}/**
|
||
* This implements the rounding procedure described in step 2 of the "Serializing a Decimal" specification.
|
||
* This rounding style is known as "even rounding", "banker's rounding", or "commercial rounding".
|
||
*
|
||
* @param value - The value to round
|
||
* @param precision - The number of decimal places to round to
|
||
* @returns The rounded value
|
||
*
|
||
* @group Utils
|
||
*
|
||
* @beta
|
||
*/function roundToEven(value,precision){if(value<0){return -roundToEven(-value,precision);}const decimalShift=Math.pow(10,precision);const isEquidistant=Math.abs(value*decimalShift%1-0.5)<Number.EPSILON;if(isEquidistant){// If the tail of the decimal place is 'equidistant' we round to the nearest even value
|
||
const flooredValue=Math.floor(value*decimalShift);return (flooredValue%2===0?flooredValue:flooredValue+1)/decimalShift;}else {// Otherwise, proceed as normal
|
||
return Math.round(value*decimalShift)/decimalShift;}}// 4.1.5. Serializing a Decimal
|
||
//
|
||
// Given a decimal number as input_decimal, return an ASCII string
|
||
// suitable for use in a HTTP field value.
|
||
//
|
||
// 1. If input_decimal is not a decimal number, fail serialization.
|
||
//
|
||
// 2. If input_decimal has more than three significant digits to the
|
||
// right of the decimal point, round it to three decimal places,
|
||
// rounding the final digit to the nearest value, or to the even
|
||
// value if it is equidistant.
|
||
//
|
||
// 3. If input_decimal has more than 12 significant digits to the left
|
||
// of the decimal point after rounding, fail serialization.
|
||
//
|
||
// 4. Let output be an empty string.
|
||
//
|
||
// 5. If input_decimal is less than (but not equal to) 0, append "-"
|
||
// to output.
|
||
//
|
||
// 6. Append input_decimal's integer component represented in base 10
|
||
// (using only decimal digits) to output; if it is zero, append
|
||
// "0".
|
||
//
|
||
// 7. Append "." to output.
|
||
//
|
||
// 8. If input_decimal's fractional component is zero, append "0" to
|
||
// output.
|
||
//
|
||
// 9. Otherwise, append the significant digits of input_decimal's
|
||
// fractional component represented in base 10 (using only decimal
|
||
// digits) to output.
|
||
//
|
||
// 10. Return output.
|
||
function serializeDecimal(value){const roundedValue=roundToEven(value,3);// round to 3 decimal places
|
||
if(Math.floor(Math.abs(roundedValue)).toString().length>12){throw serializeError(value,DECIMAL);}const stringValue=roundedValue.toString();return stringValue.includes('.')?stringValue:`${stringValue}.0`;}const STRING='String';// 4.1.6. Serializing a String
|
||
//
|
||
// Given a String as input_string, return an ASCII string suitable for
|
||
// use in a HTTP field value.
|
||
//
|
||
// 1. Convert input_string into a sequence of ASCII characters; if
|
||
// conversion fails, fail serialization.
|
||
//
|
||
// 2. If input_string contains characters in the range %x00-1f or %x7f
|
||
// (i.e., not in VCHAR or SP), fail serialization.
|
||
//
|
||
// 3. Let output be the string DQUOTE.
|
||
//
|
||
// 4. For each character char in input_string:
|
||
//
|
||
// 1. If char is "\" or DQUOTE:
|
||
//
|
||
// 1. Append "\" to output.
|
||
//
|
||
// 2. Append char to output.
|
||
//
|
||
// 5. Append DQUOTE to output.
|
||
//
|
||
// 6. Return output.
|
||
function serializeString(value){if(STRING_REGEX.test(value)){throw serializeError(value,STRING);}return `"${value.replace(/\\/g,`\\\\`).replace(/"/g,`\\"`)}"`;}function symbolToStr(symbol){return symbol.description||symbol.toString().slice(7,-1);}function serializeToken(token){const value=symbolToStr(token);if(/^([a-zA-Z*])([!#$%&'*+\-.^_`|~\w:/]*)$/.test(value)===false){throw serializeError(value,TOKEN);}return value;}// 4.1.3.1. Serializing a Bare Item
|
||
//
|
||
// Given an Item as input_item, return an ASCII string suitable for use
|
||
// in a HTTP field value.
|
||
//
|
||
// 1. If input_item is an Integer, return the result of running
|
||
// Serializing an Integer (Section 4.1.4) with input_item.
|
||
//
|
||
// 2. If input_item is a Decimal, return the result of running
|
||
// Serializing a Decimal (Section 4.1.5) with input_item.
|
||
//
|
||
// 3. If input_item is a String, return the result of running
|
||
// Serializing a String (Section 4.1.6) with input_item.
|
||
//
|
||
// 4. If input_item is a Token, return the result of running
|
||
// Serializing a Token (Section 4.1.7) with input_item.
|
||
//
|
||
// 5. If input_item is a Boolean, return the result of running
|
||
// Serializing a Boolean (Section 4.1.9) with input_item.
|
||
//
|
||
// 6. If input_item is a Byte Sequence, return the result of running
|
||
// Serializing a Byte Sequence (Section 4.1.8) with input_item.
|
||
//
|
||
// 7. If input_item is a Date, return the result of running Serializing
|
||
// a Date (Section 4.1.10) with input_item.
|
||
//
|
||
// 8. Otherwise, fail serialization.
|
||
function serializeBareItem(value){switch(typeof value){case'number':if(!isFiniteNumber(value)){throw serializeError(value,BARE_ITEM);}if(Number.isInteger(value)){return serializeInteger(value);}return serializeDecimal(value);case'string':return serializeString(value);case'symbol':return serializeToken(value);case'boolean':return serializeBoolean(value);case'object':if(value instanceof Date){return serializeDate(value);}if(value instanceof Uint8Array){return serializeByteSequence(value);}if(value instanceof SfToken){return serializeToken(value);}default:// fail
|
||
throw serializeError(value,BARE_ITEM);}}// 4.1.1.3. Serializing a Key
|
||
//
|
||
// Given a key as input_key, return an ASCII string suitable for use in
|
||
// a HTTP field value.
|
||
//
|
||
// 1. Convert input_key into a sequence of ASCII characters; if
|
||
// conversion fails, fail serialization.
|
||
//
|
||
// 2. If input_key contains characters not in lcalpha, DIGIT, "_", "-",
|
||
// ".", or "*" fail serialization.
|
||
//
|
||
// 3. If the first character of input_key is not lcalpha or "*", fail
|
||
// serialization.
|
||
//
|
||
// 4. Let output be an empty string.
|
||
//
|
||
// 5. Append input_key to output.
|
||
//
|
||
// 6. Return output.
|
||
function serializeKey(value){if(/^[a-z*][a-z0-9\-_.*]*$/.test(value)===false){throw serializeError(value,KEY);}return value;}// 4.1.1.2. Serializing Parameters
|
||
//
|
||
// Given an ordered Dictionary as input_parameters (each member having a
|
||
// param_name and a param_value), return an ASCII string suitable for
|
||
// use in a HTTP field value.
|
||
//
|
||
// 1. Let output be an empty string.
|
||
//
|
||
// 2. For each param_name with a value of param_value in
|
||
// input_parameters:
|
||
//
|
||
// 1. Append ";" to output.
|
||
//
|
||
// 2. Append the result of running Serializing a Key
|
||
// (Section 4.1.1.3) with param_name to output.
|
||
//
|
||
// 3. If param_value is not Boolean true:
|
||
//
|
||
// 1. Append "=" to output.
|
||
//
|
||
// 2. Append the result of running Serializing a bare Item
|
||
// (Section 4.1.3.1) with param_value to output.
|
||
//
|
||
// 3. Return output.
|
||
function serializeParams(params){if(params==null){return '';}return Object.entries(params).map(([key,value])=>{if(value===true){return `;${serializeKey(key)}`;// omit true
|
||
}return `;${serializeKey(key)}=${serializeBareItem(value)}`;}).join('');}// 4.1.3. Serializing an Item
|
||
//
|
||
// Given an Item as bare_item and Parameters as item_parameters, return
|
||
// an ASCII string suitable for use in a HTTP field value.
|
||
//
|
||
// 1. Let output be an empty string.
|
||
//
|
||
// 2. Append the result of running Serializing a Bare Item
|
||
// Section 4.1.3.1 with bare_item to output.
|
||
//
|
||
// 3. Append the result of running Serializing Parameters
|
||
// Section 4.1.1.2 with item_parameters to output.
|
||
//
|
||
// 4. Return output.
|
||
function serializeItem(value){if(value instanceof SfItem){return `${serializeBareItem(value.value)}${serializeParams(value.params)}`;}else {return serializeBareItem(value);}}// 4.1.1.1. Serializing an Inner List
|
||
//
|
||
// Given an array of (member_value, parameters) tuples as inner_list,
|
||
// and parameters as list_parameters, return an ASCII string suitable
|
||
// for use in a HTTP field value.
|
||
//
|
||
// 1. Let output be the string "(".
|
||
//
|
||
// 2. For each (member_value, parameters) of inner_list:
|
||
//
|
||
// 1. Append the result of running Serializing an Item
|
||
// (Section 4.1.3) with (member_value, parameters) to output.
|
||
//
|
||
// 2. If more values remain in inner_list, append a single SP to
|
||
// output.
|
||
//
|
||
// 3. Append ")" to output.
|
||
//
|
||
// 4. Append the result of running Serializing Parameters
|
||
// (Section 4.1.1.2) with list_parameters to output.
|
||
//
|
||
// 5. Return output.
|
||
function serializeInnerList(value){return `(${value.value.map(serializeItem).join(' ')})${serializeParams(value.params)}`;}// 4.1.2. Serializing a Dictionary
|
||
//
|
||
// Given an ordered Dictionary as input_dictionary (each member having a
|
||
// member_name and a tuple value of (member_value, parameters)), return
|
||
// an ASCII string suitable for use in a HTTP field value.
|
||
//
|
||
// 1. Let output be an empty string.
|
||
//
|
||
// 2. For each member_name with a value of (member_value, parameters)
|
||
// in input_dictionary:
|
||
//
|
||
// 1. Append the result of running Serializing a Key
|
||
// (Section 4.1.1.3) with member's member_name to output.
|
||
//
|
||
// 2. If member_value is Boolean true:
|
||
//
|
||
// 1. Append the result of running Serializing Parameters
|
||
// (Section 4.1.1.2) with parameters to output.
|
||
//
|
||
// 3. Otherwise:
|
||
//
|
||
// 1. Append "=" to output.
|
||
//
|
||
// 2. If member_value is an array, append the result of running
|
||
// Serializing an Inner List (Section 4.1.1.1) with
|
||
// (member_value, parameters) to output.
|
||
//
|
||
// 3. Otherwise, append the result of running Serializing an
|
||
// Item (Section 4.1.3) with (member_value, parameters) to
|
||
// output.
|
||
//
|
||
// 4. If more members remain in input_dictionary:
|
||
//
|
||
// 1. Append "," to output.
|
||
//
|
||
// 2. Append a single SP to output.
|
||
//
|
||
// 3. Return output.
|
||
function serializeDict(dict,options={whitespace:true}){if(typeof dict!=='object'){throw serializeError(dict,DICT);}const entries=dict instanceof Map?dict.entries():Object.entries(dict);const optionalWhiteSpace=options!=null&&options.whitespace?' ':'';return Array.from(entries).map(([key,item])=>{if(item instanceof SfItem===false){item=new SfItem(item);}let output=serializeKey(key);if(item.value===true){output+=serializeParams(item.params);}else {output+='=';if(Array.isArray(item.value)){output+=serializeInnerList(item);}else {output+=serializeItem(item);}}return output;}).join(`,${optionalWhiteSpace}`);}/**
|
||
* Encode an object into a structured field dictionary
|
||
*
|
||
* @param input - The structured field dictionary to encode
|
||
* @returns The structured field string
|
||
*
|
||
* @group Structured Field
|
||
*
|
||
* @beta
|
||
*/function encodeSfDict(value,options){return serializeDict(value,options);}/**
|
||
* Checks if the given key is a token field.
|
||
*
|
||
* @param key - The key to check.
|
||
*
|
||
* @returns `true` if the key is a token field.
|
||
*
|
||
* @internal
|
||
*
|
||
* @group CMCD
|
||
*/const isTokenField=key=>key==='ot'||key==='sf'||key==='st';const isValid=value=>{if(typeof value==='number'){return isFiniteNumber(value);}return value!=null&&value!==''&&value!==false;};/**
|
||
* Constructs a relative path from a URL.
|
||
*
|
||
* @param url - The destination URL
|
||
* @param base - The base URL
|
||
* @returns The relative path
|
||
*
|
||
* @group Utils
|
||
*
|
||
* @beta
|
||
*/function urlToRelativePath(url,base){const to=new URL(url);const from=new URL(base);if(to.origin!==from.origin){return url;}const toPath=to.pathname.split('/').slice(1);const fromPath=from.pathname.split('/').slice(1,-1);// remove common parents
|
||
while(toPath[0]===fromPath[0]){toPath.shift();fromPath.shift();}// add back paths
|
||
while(fromPath.length){fromPath.shift();toPath.unshift('..');}return toPath.join('/');}/**
|
||
* Generate a random v4 UUID
|
||
*
|
||
* @returns A random v4 UUID
|
||
*
|
||
* @group Utils
|
||
*
|
||
* @beta
|
||
*/function uuid(){try{return crypto.randomUUID();}catch(error){try{const url=URL.createObjectURL(new Blob());const uuid=url.toString();URL.revokeObjectURL(url);return uuid.slice(uuid.lastIndexOf('/')+1);}catch(error){let dt=new Date().getTime();const uuid='xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g,c=>{const r=(dt+Math.random()*16)%16|0;dt=Math.floor(dt/16);return (c=='x'?r:r&0x3|0x8).toString(16);});return uuid;}}}const toRounded=value=>Math.round(value);const toUrlSafe=(value,options)=>{if(options!=null&&options.baseUrl){value=urlToRelativePath(value,options.baseUrl);}return encodeURIComponent(value);};const toHundred=value=>toRounded(value/100)*100;/**
|
||
* The default formatters for CMCD values.
|
||
*
|
||
* @group CMCD
|
||
*
|
||
* @beta
|
||
*/const CmcdFormatters={/**
|
||
* Bitrate (kbps) rounded integer
|
||
*/br:toRounded,/**
|
||
* Duration (milliseconds) rounded integer
|
||
*/d:toRounded,/**
|
||
* Buffer Length (milliseconds) rounded nearest 100ms
|
||
*/bl:toHundred,/**
|
||
* Deadline (milliseconds) rounded nearest 100ms
|
||
*/dl:toHundred,/**
|
||
* Measured Throughput (kbps) rounded nearest 100kbps
|
||
*/mtp:toHundred,/**
|
||
* Next Object Request URL encoded
|
||
*/nor:toUrlSafe,/**
|
||
* Requested maximum throughput (kbps) rounded nearest 100kbps
|
||
*/rtp:toHundred,/**
|
||
* Top Bitrate (kbps) rounded integer
|
||
*/tb:toRounded};/**
|
||
* Internal CMCD processing function.
|
||
*
|
||
* @param obj - The CMCD object to process.
|
||
* @param map - The mapping function to use.
|
||
* @param options - Options for encoding.
|
||
*
|
||
* @internal
|
||
*
|
||
* @group CMCD
|
||
*/function processCmcd(obj,options){const results={};if(obj==null||typeof obj!=='object'){return results;}const keys=Object.keys(obj).sort();const formatters=_extends({},CmcdFormatters,options==null?void 0:options.formatters);const filter=options==null?void 0:options.filter;keys.forEach(key=>{if(filter!=null&&filter(key)){return;}let value=obj[key];const formatter=formatters[key];if(formatter){value=formatter(value,options);}// Version should only be reported if not equal to 1.
|
||
if(key==='v'&&value===1){return;}// Playback rate should only be sent if not equal to 1.
|
||
if(key=='pr'&&value===1){return;}// ignore invalid values
|
||
if(!isValid(value)){return;}if(isTokenField(key)&&typeof value==='string'){value=new SfToken(value);}results[key]=value;});return results;}/**
|
||
* Encode a CMCD object to a string.
|
||
*
|
||
* @param cmcd - The CMCD object to encode.
|
||
* @param options - Options for encoding.
|
||
*
|
||
* @returns The encoded CMCD string.
|
||
*
|
||
* @group CMCD
|
||
*
|
||
* @beta
|
||
*/function encodeCmcd(cmcd,options={}){if(!cmcd){return '';}return encodeSfDict(processCmcd(cmcd,options),_extends({whitespace:false},options));}/**
|
||
* Convert a CMCD data object to request headers
|
||
*
|
||
* @param cmcd - The CMCD data object to convert.
|
||
* @param options - Options for encoding the CMCD object.
|
||
*
|
||
* @returns The CMCD header shards.
|
||
*
|
||
* @group CMCD
|
||
*
|
||
* @beta
|
||
*/function toCmcdHeaders(cmcd,options={}){if(!cmcd){return {};}const entries=Object.entries(cmcd);const headerMap=Object.entries(CmcdHeaderMap).concat(Object.entries((options==null?void 0:options.customHeaderMap)||{}));const shards=entries.reduce((acc,entry)=>{var _headerMap$find,_acc$field;const[key,value]=entry;const field=((_headerMap$find=headerMap.find(entry=>entry[1].includes(key)))==null?void 0:_headerMap$find[0])||CmcdHeaderField.REQUEST;(_acc$field=acc[field])!=null?_acc$field:acc[field]={};acc[field][key]=value;return acc;},{});return Object.entries(shards).reduce((acc,[field,value])=>{acc[field]=encodeCmcd(value,options);return acc;},{});}/**
|
||
* Append CMCD query args to a header object.
|
||
*
|
||
* @param headers - The headers to append to.
|
||
* @param cmcd - The CMCD object to append.
|
||
* @param customHeaderMap - A map of custom CMCD keys to header fields.
|
||
*
|
||
* @returns The headers with the CMCD header shards appended.
|
||
*
|
||
* @group CMCD
|
||
*
|
||
* @beta
|
||
*/function appendCmcdHeaders(headers,cmcd,options){return _extends(headers,toCmcdHeaders(cmcd,options));}/**
|
||
* CMCD parameter name.
|
||
*
|
||
* @group CMCD
|
||
*
|
||
* @beta
|
||
*/const CMCD_PARAM='CMCD';/**
|
||
* Convert a CMCD data object to a query arg.
|
||
*
|
||
* @param cmcd - The CMCD object to convert.
|
||
* @param options - Options for encoding the CMCD object.
|
||
*
|
||
* @returns The CMCD query arg.
|
||
*
|
||
* @group CMCD
|
||
*
|
||
* @beta
|
||
*/function toCmcdQuery(cmcd,options={}){if(!cmcd){return '';}const params=encodeCmcd(cmcd,options);return `${CMCD_PARAM}=${encodeURIComponent(params)}`;}const REGEX=/CMCD=[^&#]+/;/**
|
||
* Append CMCD query args to a URL.
|
||
*
|
||
* @param url - The URL to append to.
|
||
* @param cmcd - The CMCD object to append.
|
||
* @param options - Options for encoding the CMCD object.
|
||
*
|
||
* @returns The URL with the CMCD query args appended.
|
||
*
|
||
* @group CMCD
|
||
*
|
||
* @beta
|
||
*/function appendCmcdQuery(url,cmcd,options){// TODO: Replace with URLSearchParams once we drop Safari < 10.1 & Chrome < 49 support.
|
||
// https://developer.mozilla.org/en-US/docs/Web/API/URLSearchParams
|
||
const query=toCmcdQuery(cmcd,options);if(!query){return url;}if(REGEX.test(url)){return url.replace(REGEX,query);}const separator=url.includes('?')?'&':'?';return `${url}${separator}${query}`;}/**
|
||
* Controller to deal with Common Media Client Data (CMCD)
|
||
* @see https://cdn.cta.tech/cta/media/media/resources/standards/pdfs/cta-5004-final.pdf
|
||
*/class CMCDController{// eslint-disable-line no-restricted-globals
|
||
constructor(hls){this.hls=void 0;this.config=void 0;this.media=void 0;this.sid=void 0;this.cid=void 0;this.useHeaders=false;this.includeKeys=void 0;this.initialized=false;this.starved=false;this.buffering=true;this.audioBuffer=void 0;// eslint-disable-line no-restricted-globals
|
||
this.videoBuffer=void 0;this.onWaiting=()=>{if(this.initialized){this.starved=true;}this.buffering=true;};this.onPlaying=()=>{if(!this.initialized){this.initialized=true;}this.buffering=false;};/**
|
||
* Apply CMCD data to a manifest request.
|
||
*/this.applyPlaylistData=context=>{try{this.apply(context,{ot:CmObjectType.MANIFEST,su:!this.initialized});}catch(error){logger.warn('Could not generate manifest CMCD data.',error);}};/**
|
||
* Apply CMCD data to a segment request
|
||
*/this.applyFragmentData=context=>{try{const fragment=context.frag;const level=this.hls.levels[fragment.level];const ot=this.getObjectType(fragment);const data={d:fragment.duration*1000,ot};if(ot===CmObjectType.VIDEO||ot===CmObjectType.AUDIO||ot==CmObjectType.MUXED){data.br=level.bitrate/1000;data.tb=this.getTopBandwidth(ot)/1000;data.bl=this.getBufferLength(ot);}this.apply(context,data);}catch(error){logger.warn('Could not generate segment CMCD data.',error);}};this.hls=hls;const config=this.config=hls.config;const{cmcd}=config;if(cmcd!=null){config.pLoader=this.createPlaylistLoader();config.fLoader=this.createFragmentLoader();this.sid=cmcd.sessionId||uuid();this.cid=cmcd.contentId;this.useHeaders=cmcd.useHeaders===true;this.includeKeys=cmcd.includeKeys;this.registerListeners();}}registerListeners(){const hls=this.hls;hls.on(Events.MEDIA_ATTACHED,this.onMediaAttached,this);hls.on(Events.MEDIA_DETACHED,this.onMediaDetached,this);hls.on(Events.BUFFER_CREATED,this.onBufferCreated,this);}unregisterListeners(){const hls=this.hls;hls.off(Events.MEDIA_ATTACHED,this.onMediaAttached,this);hls.off(Events.MEDIA_DETACHED,this.onMediaDetached,this);hls.off(Events.BUFFER_CREATED,this.onBufferCreated,this);}destroy(){this.unregisterListeners();this.onMediaDetached();// @ts-ignore
|
||
this.hls=this.config=this.audioBuffer=this.videoBuffer=null;// @ts-ignore
|
||
this.onWaiting=this.onPlaying=null;}onMediaAttached(event,data){this.media=data.media;this.media.addEventListener('waiting',this.onWaiting);this.media.addEventListener('playing',this.onPlaying);}onMediaDetached(){if(!this.media){return;}this.media.removeEventListener('waiting',this.onWaiting);this.media.removeEventListener('playing',this.onPlaying);// @ts-ignore
|
||
this.media=null;}onBufferCreated(event,data){var _data$tracks$audio,_data$tracks$video;this.audioBuffer=(_data$tracks$audio=data.tracks.audio)==null?void 0:_data$tracks$audio.buffer;this.videoBuffer=(_data$tracks$video=data.tracks.video)==null?void 0:_data$tracks$video.buffer;}/**
|
||
* Create baseline CMCD data
|
||
*/createData(){var _this$media;return {v:1,sf:CmStreamingFormat.HLS,sid:this.sid,cid:this.cid,pr:(_this$media=this.media)==null?void 0:_this$media.playbackRate,mtp:this.hls.bandwidthEstimate/1000};}/**
|
||
* Apply CMCD data to a request.
|
||
*/apply(context,data={}){// apply baseline data
|
||
_extends(data,this.createData());const isVideo=data.ot===CmObjectType.INIT||data.ot===CmObjectType.VIDEO||data.ot===CmObjectType.MUXED;if(this.starved&&isVideo){data.bs=true;data.su=true;this.starved=false;}if(data.su==null){data.su=this.buffering;}// TODO: Implement rtp, nrr, nor, dl
|
||
const{includeKeys}=this;if(includeKeys){data=Object.keys(data).reduce((acc,key)=>{includeKeys.includes(key)&&(acc[key]=data[key]);return acc;},{});}if(this.useHeaders){if(!context.headers){context.headers={};}appendCmcdHeaders(context.headers,data);}else {context.url=appendCmcdQuery(context.url,data);}}/**
|
||
* The CMCD object type.
|
||
*/getObjectType(fragment){const{type}=fragment;if(type==='subtitle'){return CmObjectType.TIMED_TEXT;}if(fragment.sn==='initSegment'){return CmObjectType.INIT;}if(type==='audio'){return CmObjectType.AUDIO;}if(type==='main'){if(!this.hls.audioTracks.length){return CmObjectType.MUXED;}return CmObjectType.VIDEO;}return undefined;}/**
|
||
* Get the highest bitrate.
|
||
*/getTopBandwidth(type){let bitrate=0;let levels;const hls=this.hls;if(type===CmObjectType.AUDIO){levels=hls.audioTracks;}else {const max=hls.maxAutoLevel;const len=max>-1?max+1:hls.levels.length;levels=hls.levels.slice(0,len);}for(const level of levels){if(level.bitrate>bitrate){bitrate=level.bitrate;}}return bitrate>0?bitrate:NaN;}/**
|
||
* Get the buffer length for a media type in milliseconds
|
||
*/getBufferLength(type){const media=this.hls.media;const buffer=type===CmObjectType.AUDIO?this.audioBuffer:this.videoBuffer;if(!buffer||!media){return NaN;}const info=BufferHelper.bufferInfo(buffer,media.currentTime,this.config.maxBufferHole);return info.len*1000;}/**
|
||
* Create a playlist loader
|
||
*/createPlaylistLoader(){const{pLoader}=this.config;const apply=this.applyPlaylistData;const Ctor=pLoader||this.config.loader;return class CmcdPlaylistLoader{constructor(config){this.loader=void 0;this.loader=new Ctor(config);}get stats(){return this.loader.stats;}get context(){return this.loader.context;}destroy(){this.loader.destroy();}abort(){this.loader.abort();}load(context,config,callbacks){apply(context);this.loader.load(context,config,callbacks);}};}/**
|
||
* Create a playlist loader
|
||
*/createFragmentLoader(){const{fLoader}=this.config;const apply=this.applyFragmentData;const Ctor=fLoader||this.config.loader;return class CmcdFragmentLoader{constructor(config){this.loader=void 0;this.loader=new Ctor(config);}get stats(){return this.loader.stats;}get context(){return this.loader.context;}destroy(){this.loader.destroy();}abort(){this.loader.abort();}load(context,config,callbacks){apply(context);this.loader.load(context,config,callbacks);}};}}const PATHWAY_PENALTY_DURATION_MS=300000;class ContentSteeringController{constructor(hls){this.hls=void 0;this.log=void 0;this.loader=null;this.uri=null;this.pathwayId='.';this.pathwayPriority=null;this.timeToLoad=300;this.reloadTimer=-1;this.updated=0;this.started=false;this.enabled=true;this.levels=null;this.audioTracks=null;this.subtitleTracks=null;this.penalizedPathways={};this.hls=hls;this.log=logger.log.bind(logger,`[content-steering]:`);this.registerListeners();}registerListeners(){const hls=this.hls;hls.on(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.on(Events.MANIFEST_LOADED,this.onManifestLoaded,this);hls.on(Events.MANIFEST_PARSED,this.onManifestParsed,this);hls.on(Events.ERROR,this.onError,this);}unregisterListeners(){const hls=this.hls;if(!hls){return;}hls.off(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.off(Events.MANIFEST_LOADED,this.onManifestLoaded,this);hls.off(Events.MANIFEST_PARSED,this.onManifestParsed,this);hls.off(Events.ERROR,this.onError,this);}startLoad(){this.started=true;this.clearTimeout();if(this.enabled&&this.uri){if(this.updated){const ttl=this.timeToLoad*1000-(performance.now()-this.updated);if(ttl>0){this.scheduleRefresh(this.uri,ttl);return;}}this.loadSteeringManifest(this.uri);}}stopLoad(){this.started=false;if(this.loader){this.loader.destroy();this.loader=null;}this.clearTimeout();}clearTimeout(){if(this.reloadTimer!==-1){self.clearTimeout(this.reloadTimer);this.reloadTimer=-1;}}destroy(){this.unregisterListeners();this.stopLoad();// @ts-ignore
|
||
this.hls=null;this.levels=this.audioTracks=this.subtitleTracks=null;}removeLevel(levelToRemove){const levels=this.levels;if(levels){this.levels=levels.filter(level=>level!==levelToRemove);}}onManifestLoading(){this.stopLoad();this.enabled=true;this.timeToLoad=300;this.updated=0;this.uri=null;this.pathwayId='.';this.levels=this.audioTracks=this.subtitleTracks=null;}onManifestLoaded(event,data){const{contentSteering}=data;if(contentSteering===null){return;}this.pathwayId=contentSteering.pathwayId;this.uri=contentSteering.uri;if(this.started){this.startLoad();}}onManifestParsed(event,data){this.audioTracks=data.audioTracks;this.subtitleTracks=data.subtitleTracks;}onError(event,data){const{errorAction}=data;if((errorAction==null?void 0:errorAction.action)===NetworkErrorAction.SendAlternateToPenaltyBox&&errorAction.flags===ErrorActionFlags.MoveAllAlternatesMatchingHost){const levels=this.levels;let pathwayPriority=this.pathwayPriority;let errorPathway=this.pathwayId;if(data.context){const{groupId,pathwayId,type}=data.context;if(groupId&&levels){errorPathway=this.getPathwayForGroupId(groupId,type,errorPathway);}else if(pathwayId){errorPathway=pathwayId;}}if(!(errorPathway in this.penalizedPathways)){this.penalizedPathways[errorPathway]=performance.now();}if(!pathwayPriority&&levels){// If PATHWAY-PRIORITY was not provided, list pathways for error handling
|
||
pathwayPriority=levels.reduce((pathways,level)=>{if(pathways.indexOf(level.pathwayId)===-1){pathways.push(level.pathwayId);}return pathways;},[]);}if(pathwayPriority&&pathwayPriority.length>1){this.updatePathwayPriority(pathwayPriority);errorAction.resolved=this.pathwayId!==errorPathway;}if(!errorAction.resolved){logger.warn(`Could not resolve ${data.details} ("${data.error.message}") with content-steering for Pathway: ${errorPathway} levels: ${levels?levels.length:levels} priorities: ${JSON.stringify(pathwayPriority)} penalized: ${JSON.stringify(this.penalizedPathways)}`);}}}filterParsedLevels(levels){// Filter levels to only include those that are in the initial pathway
|
||
this.levels=levels;let pathwayLevels=this.getLevelsForPathway(this.pathwayId);if(pathwayLevels.length===0){const pathwayId=levels[0].pathwayId;this.log(`No levels found in Pathway ${this.pathwayId}. Setting initial Pathway to "${pathwayId}"`);pathwayLevels=this.getLevelsForPathway(pathwayId);this.pathwayId=pathwayId;}if(pathwayLevels.length!==levels.length){this.log(`Found ${pathwayLevels.length}/${levels.length} levels in Pathway "${this.pathwayId}"`);return pathwayLevels;}return levels;}getLevelsForPathway(pathwayId){if(this.levels===null){return [];}return this.levels.filter(level=>pathwayId===level.pathwayId);}updatePathwayPriority(pathwayPriority){this.pathwayPriority=pathwayPriority;let levels;// Evaluate if we should remove the pathway from the penalized list
|
||
const penalizedPathways=this.penalizedPathways;const now=performance.now();Object.keys(penalizedPathways).forEach(pathwayId=>{if(now-penalizedPathways[pathwayId]>PATHWAY_PENALTY_DURATION_MS){delete penalizedPathways[pathwayId];}});for(let i=0;i<pathwayPriority.length;i++){const pathwayId=pathwayPriority[i];if(pathwayId in penalizedPathways){continue;}if(pathwayId===this.pathwayId){return;}const selectedIndex=this.hls.nextLoadLevel;const selectedLevel=this.hls.levels[selectedIndex];levels=this.getLevelsForPathway(pathwayId);if(levels.length>0){this.log(`Setting Pathway to "${pathwayId}"`);this.pathwayId=pathwayId;reassignFragmentLevelIndexes(levels);this.hls.trigger(Events.LEVELS_UPDATED,{levels});// Set LevelController's level to trigger LEVEL_SWITCHING which loads playlist if needed
|
||
const levelAfterChange=this.hls.levels[selectedIndex];if(selectedLevel&&levelAfterChange&&this.levels){if(levelAfterChange.attrs['STABLE-VARIANT-ID']!==selectedLevel.attrs['STABLE-VARIANT-ID']&&levelAfterChange.bitrate!==selectedLevel.bitrate){this.log(`Unstable Pathways change from bitrate ${selectedLevel.bitrate} to ${levelAfterChange.bitrate}`);}this.hls.nextLoadLevel=selectedIndex;}break;}}}getPathwayForGroupId(groupId,type,defaultPathway){const levels=this.getLevelsForPathway(defaultPathway).concat(this.levels||[]);for(let i=0;i<levels.length;i++){if(type===PlaylistContextType.AUDIO_TRACK&&levels[i].hasAudioGroup(groupId)||type===PlaylistContextType.SUBTITLE_TRACK&&levels[i].hasSubtitleGroup(groupId)){return levels[i].pathwayId;}}return defaultPathway;}clonePathways(pathwayClones){const levels=this.levels;if(!levels){return;}const audioGroupCloneMap={};const subtitleGroupCloneMap={};pathwayClones.forEach(pathwayClone=>{const{ID:cloneId,'BASE-ID':baseId,'URI-REPLACEMENT':uriReplacement}=pathwayClone;if(levels.some(level=>level.pathwayId===cloneId)){return;}const clonedVariants=this.getLevelsForPathway(baseId).map(baseLevel=>{const attributes=new AttrList(baseLevel.attrs);attributes['PATHWAY-ID']=cloneId;const clonedAudioGroupId=attributes.AUDIO&&`${attributes.AUDIO}_clone_${cloneId}`;const clonedSubtitleGroupId=attributes.SUBTITLES&&`${attributes.SUBTITLES}_clone_${cloneId}`;if(clonedAudioGroupId){audioGroupCloneMap[attributes.AUDIO]=clonedAudioGroupId;attributes.AUDIO=clonedAudioGroupId;}if(clonedSubtitleGroupId){subtitleGroupCloneMap[attributes.SUBTITLES]=clonedSubtitleGroupId;attributes.SUBTITLES=clonedSubtitleGroupId;}const url=performUriReplacement(baseLevel.uri,attributes['STABLE-VARIANT-ID'],'PER-VARIANT-URIS',uriReplacement);const clonedLevel=new Level({attrs:attributes,audioCodec:baseLevel.audioCodec,bitrate:baseLevel.bitrate,height:baseLevel.height,name:baseLevel.name,url,videoCodec:baseLevel.videoCodec,width:baseLevel.width});if(baseLevel.audioGroups){for(let i=1;i<baseLevel.audioGroups.length;i++){clonedLevel.addGroupId('audio',`${baseLevel.audioGroups[i]}_clone_${cloneId}`);}}if(baseLevel.subtitleGroups){for(let i=1;i<baseLevel.subtitleGroups.length;i++){clonedLevel.addGroupId('text',`${baseLevel.subtitleGroups[i]}_clone_${cloneId}`);}}return clonedLevel;});levels.push(...clonedVariants);cloneRenditionGroups(this.audioTracks,audioGroupCloneMap,uriReplacement,cloneId);cloneRenditionGroups(this.subtitleTracks,subtitleGroupCloneMap,uriReplacement,cloneId);});}loadSteeringManifest(uri){const config=this.hls.config;const Loader=config.loader;if(this.loader){this.loader.destroy();}this.loader=new Loader(config);let url;try{url=new self.URL(uri);}catch(error){this.enabled=false;this.log(`Failed to parse Steering Manifest URI: ${uri}`);return;}if(url.protocol!=='data:'){const throughput=(this.hls.bandwidthEstimate||config.abrEwmaDefaultEstimate)|0;url.searchParams.set('_HLS_pathway',this.pathwayId);url.searchParams.set('_HLS_throughput',''+throughput);}const context={responseType:'json',url:url.href};const loadPolicy=config.steeringManifestLoadPolicy.default;const legacyRetryCompatibility=loadPolicy.errorRetry||loadPolicy.timeoutRetry||{};const loaderConfig={loadPolicy,timeout:loadPolicy.maxLoadTimeMs,maxRetry:legacyRetryCompatibility.maxNumRetry||0,retryDelay:legacyRetryCompatibility.retryDelayMs||0,maxRetryDelay:legacyRetryCompatibility.maxRetryDelayMs||0};const callbacks={onSuccess:(response,stats,context,networkDetails)=>{this.log(`Loaded steering manifest: "${url}"`);const steeringData=response.data;if(steeringData.VERSION!==1){this.log(`Steering VERSION ${steeringData.VERSION} not supported!`);return;}this.updated=performance.now();this.timeToLoad=steeringData.TTL;const{'RELOAD-URI':reloadUri,'PATHWAY-CLONES':pathwayClones,'PATHWAY-PRIORITY':pathwayPriority}=steeringData;if(reloadUri){try{this.uri=new self.URL(reloadUri,url).href;}catch(error){this.enabled=false;this.log(`Failed to parse Steering Manifest RELOAD-URI: ${reloadUri}`);return;}}this.scheduleRefresh(this.uri||context.url);if(pathwayClones){this.clonePathways(pathwayClones);}const loadedSteeringData={steeringManifest:steeringData,url:url.toString()};this.hls.trigger(Events.STEERING_MANIFEST_LOADED,loadedSteeringData);if(pathwayPriority){this.updatePathwayPriority(pathwayPriority);}},onError:(error,context,networkDetails,stats)=>{this.log(`Error loading steering manifest: ${error.code} ${error.text} (${context.url})`);this.stopLoad();if(error.code===410){this.enabled=false;this.log(`Steering manifest ${context.url} no longer available`);return;}let ttl=this.timeToLoad*1000;if(error.code===429){const loader=this.loader;if(typeof(loader==null?void 0:loader.getResponseHeader)==='function'){const retryAfter=loader.getResponseHeader('Retry-After');if(retryAfter){ttl=parseFloat(retryAfter)*1000;}}this.log(`Steering manifest ${context.url} rate limited`);return;}this.scheduleRefresh(this.uri||context.url,ttl);},onTimeout:(stats,context,networkDetails)=>{this.log(`Timeout loading steering manifest (${context.url})`);this.scheduleRefresh(this.uri||context.url);}};this.log(`Requesting steering manifest: ${url}`);this.loader.load(context,loaderConfig,callbacks);}scheduleRefresh(uri,ttlMs=this.timeToLoad*1000){this.clearTimeout();this.reloadTimer=self.setTimeout(()=>{var _this$hls;const media=(_this$hls=this.hls)==null?void 0:_this$hls.media;if(media&&!media.ended){this.loadSteeringManifest(uri);return;}this.scheduleRefresh(uri,this.timeToLoad*1000);},ttlMs);}}function cloneRenditionGroups(tracks,groupCloneMap,uriReplacement,cloneId){if(!tracks){return;}Object.keys(groupCloneMap).forEach(audioGroupId=>{const clonedTracks=tracks.filter(track=>track.groupId===audioGroupId).map(track=>{const clonedTrack=_extends({},track);clonedTrack.details=undefined;clonedTrack.attrs=new AttrList(clonedTrack.attrs);clonedTrack.url=clonedTrack.attrs.URI=performUriReplacement(track.url,track.attrs['STABLE-RENDITION-ID'],'PER-RENDITION-URIS',uriReplacement);clonedTrack.groupId=clonedTrack.attrs['GROUP-ID']=groupCloneMap[audioGroupId];clonedTrack.attrs['PATHWAY-ID']=cloneId;return clonedTrack;});tracks.push(...clonedTracks);});}function performUriReplacement(uri,stableId,perOptionKey,uriReplacement){const{HOST:host,PARAMS:params,[perOptionKey]:perOptionUris}=uriReplacement;let perVariantUri;if(stableId){perVariantUri=perOptionUris==null?void 0:perOptionUris[stableId];if(perVariantUri){uri=perVariantUri;}}const url=new self.URL(uri);if(host&&!perVariantUri){url.host=host;}if(params){Object.keys(params).sort().forEach(key=>{if(key){url.searchParams.set(key,params[key]);}});}return url.href;}const AGE_HEADER_LINE_REGEX=/^age:\s*[\d.]+\s*$/im;class XhrLoader{constructor(config){this.xhrSetup=void 0;this.requestTimeout=void 0;this.retryTimeout=void 0;this.retryDelay=void 0;this.config=null;this.callbacks=null;this.context=null;this.loader=null;this.stats=void 0;this.xhrSetup=config?config.xhrSetup||null:null;this.stats=new LoadStats();this.retryDelay=0;}destroy(){this.callbacks=null;this.abortInternal();this.loader=null;this.config=null;this.context=null;this.xhrSetup=null;}abortInternal(){const loader=this.loader;self.clearTimeout(this.requestTimeout);self.clearTimeout(this.retryTimeout);if(loader){loader.onreadystatechange=null;loader.onprogress=null;if(loader.readyState!==4){this.stats.aborted=true;loader.abort();}}}abort(){var _this$callbacks;this.abortInternal();if((_this$callbacks=this.callbacks)!=null&&_this$callbacks.onAbort){this.callbacks.onAbort(this.stats,this.context,this.loader);}}load(context,config,callbacks){if(this.stats.loading.start){throw new Error('Loader can only be used once.');}this.stats.loading.start=self.performance.now();this.context=context;this.config=config;this.callbacks=callbacks;this.loadInternal();}loadInternal(){const{config,context}=this;if(!config||!context){return;}const xhr=this.loader=new self.XMLHttpRequest();const stats=this.stats;stats.loading.first=0;stats.loaded=0;stats.aborted=false;const xhrSetup=this.xhrSetup;if(xhrSetup){Promise.resolve().then(()=>{if(this.loader!==xhr||this.stats.aborted)return;return xhrSetup(xhr,context.url);}).catch(error=>{if(this.loader!==xhr||this.stats.aborted)return;xhr.open('GET',context.url,true);return xhrSetup(xhr,context.url);}).then(()=>{if(this.loader!==xhr||this.stats.aborted)return;this.openAndSendXhr(xhr,context,config);}).catch(error=>{// IE11 throws an exception on xhr.open if attempting to access an HTTP resource over HTTPS
|
||
this.callbacks.onError({code:xhr.status,text:error.message},context,xhr,stats);return;});}else {this.openAndSendXhr(xhr,context,config);}}openAndSendXhr(xhr,context,config){if(!xhr.readyState){xhr.open('GET',context.url,true);}const headers=context.headers;const{maxTimeToFirstByteMs,maxLoadTimeMs}=config.loadPolicy;if(headers){for(const header in headers){xhr.setRequestHeader(header,headers[header]);}}if(context.rangeEnd){xhr.setRequestHeader('Range','bytes='+context.rangeStart+'-'+(context.rangeEnd-1));}xhr.onreadystatechange=this.readystatechange.bind(this);xhr.onprogress=this.loadprogress.bind(this);xhr.responseType=context.responseType;// setup timeout before we perform request
|
||
self.clearTimeout(this.requestTimeout);config.timeout=maxTimeToFirstByteMs&&isFiniteNumber(maxTimeToFirstByteMs)?maxTimeToFirstByteMs:maxLoadTimeMs;this.requestTimeout=self.setTimeout(this.loadtimeout.bind(this),config.timeout);xhr.send();}readystatechange(){const{context,loader:xhr,stats}=this;if(!context||!xhr){return;}const readyState=xhr.readyState;const config=this.config;// don't proceed if xhr has been aborted
|
||
if(stats.aborted){return;}// >= HEADERS_RECEIVED
|
||
if(readyState>=2){if(stats.loading.first===0){stats.loading.first=Math.max(self.performance.now(),stats.loading.start);// readyState >= 2 AND readyState !==4 (readyState = HEADERS_RECEIVED || LOADING) rearm timeout as xhr not finished yet
|
||
if(config.timeout!==config.loadPolicy.maxLoadTimeMs){self.clearTimeout(this.requestTimeout);config.timeout=config.loadPolicy.maxLoadTimeMs;this.requestTimeout=self.setTimeout(this.loadtimeout.bind(this),config.loadPolicy.maxLoadTimeMs-(stats.loading.first-stats.loading.start));}}if(readyState===4){self.clearTimeout(this.requestTimeout);xhr.onreadystatechange=null;xhr.onprogress=null;const status=xhr.status;// http status between 200 to 299 are all successful
|
||
const useResponse=xhr.responseType!=='text';if(status>=200&&status<300&&(useResponse&&xhr.response||xhr.responseText!==null)){stats.loading.end=Math.max(self.performance.now(),stats.loading.first);const data=useResponse?xhr.response:xhr.responseText;const len=xhr.responseType==='arraybuffer'?data.byteLength:data.length;stats.loaded=stats.total=len;stats.bwEstimate=stats.total*8000/(stats.loading.end-stats.loading.first);if(!this.callbacks){return;}const onProgress=this.callbacks.onProgress;if(onProgress){onProgress(stats,context,data,xhr);}if(!this.callbacks){return;}const response={url:xhr.responseURL,data:data,code:status};this.callbacks.onSuccess(response,stats,context,xhr);}else {const retryConfig=config.loadPolicy.errorRetry;const retryCount=stats.retry;// if max nb of retries reached or if http status between 400 and 499 (such error cannot be recovered, retrying is useless), return error
|
||
const response={url:context.url,data:undefined,code:status};if(shouldRetry(retryConfig,retryCount,false,response)){this.retry(retryConfig);}else {logger.error(`${status} while loading ${context.url}`);this.callbacks.onError({code:status,text:xhr.statusText},context,xhr,stats);}}}}}loadtimeout(){if(!this.config)return;const retryConfig=this.config.loadPolicy.timeoutRetry;const retryCount=this.stats.retry;if(shouldRetry(retryConfig,retryCount,true)){this.retry(retryConfig);}else {var _this$context;logger.warn(`timeout while loading ${(_this$context=this.context)==null?void 0:_this$context.url}`);const callbacks=this.callbacks;if(callbacks){this.abortInternal();callbacks.onTimeout(this.stats,this.context,this.loader);}}}retry(retryConfig){const{context,stats}=this;this.retryDelay=getRetryDelay(retryConfig,stats.retry);stats.retry++;logger.warn(`${status?'HTTP Status '+status:'Timeout'} while loading ${context==null?void 0:context.url}, retrying ${stats.retry}/${retryConfig.maxNumRetry} in ${this.retryDelay}ms`);// abort and reset internal state
|
||
this.abortInternal();this.loader=null;// schedule retry
|
||
self.clearTimeout(this.retryTimeout);this.retryTimeout=self.setTimeout(this.loadInternal.bind(this),this.retryDelay);}loadprogress(event){const stats=this.stats;stats.loaded=event.loaded;if(event.lengthComputable){stats.total=event.total;}}getCacheAge(){let result=null;if(this.loader&&AGE_HEADER_LINE_REGEX.test(this.loader.getAllResponseHeaders())){const ageHeader=this.loader.getResponseHeader('age');result=ageHeader?parseFloat(ageHeader):null;}return result;}getResponseHeader(name){if(this.loader&&new RegExp(`^${name}:\\s*[\\d.]+\\s*$`,'im').test(this.loader.getAllResponseHeaders())){return this.loader.getResponseHeader(name);}return null;}}function fetchSupported(){if(// @ts-ignore
|
||
self.fetch&&self.AbortController&&self.ReadableStream&&self.Request){try{new self.ReadableStream({});// eslint-disable-line no-new
|
||
return true;}catch(e){/* noop */}}return false;}const BYTERANGE=/(\d+)-(\d+)\/(\d+)/;class FetchLoader{constructor(config/* HlsConfig */){this.fetchSetup=void 0;this.requestTimeout=void 0;this.request=null;this.response=null;this.controller=void 0;this.context=null;this.config=null;this.callbacks=null;this.stats=void 0;this.loader=null;this.fetchSetup=config.fetchSetup||getRequest;this.controller=new self.AbortController();this.stats=new LoadStats();}destroy(){this.loader=this.callbacks=this.context=this.config=this.request=null;this.abortInternal();this.response=null;// @ts-ignore
|
||
this.fetchSetup=this.controller=this.stats=null;}abortInternal(){if(this.controller&&!this.stats.loading.end){this.stats.aborted=true;this.controller.abort();}}abort(){var _this$callbacks;this.abortInternal();if((_this$callbacks=this.callbacks)!=null&&_this$callbacks.onAbort){this.callbacks.onAbort(this.stats,this.context,this.response);}}load(context,config,callbacks){const stats=this.stats;if(stats.loading.start){throw new Error('Loader can only be used once.');}stats.loading.start=self.performance.now();const initParams=getRequestParameters(context,this.controller.signal);const onProgress=callbacks.onProgress;const isArrayBuffer=context.responseType==='arraybuffer';const LENGTH=isArrayBuffer?'byteLength':'length';const{maxTimeToFirstByteMs,maxLoadTimeMs}=config.loadPolicy;this.context=context;this.config=config;this.callbacks=callbacks;this.request=this.fetchSetup(context,initParams);self.clearTimeout(this.requestTimeout);config.timeout=maxTimeToFirstByteMs&&isFiniteNumber(maxTimeToFirstByteMs)?maxTimeToFirstByteMs:maxLoadTimeMs;this.requestTimeout=self.setTimeout(()=>{this.abortInternal();callbacks.onTimeout(stats,context,this.response);},config.timeout);self.fetch(this.request).then(response=>{this.response=this.loader=response;const first=Math.max(self.performance.now(),stats.loading.start);self.clearTimeout(this.requestTimeout);config.timeout=maxLoadTimeMs;this.requestTimeout=self.setTimeout(()=>{this.abortInternal();callbacks.onTimeout(stats,context,this.response);},maxLoadTimeMs-(first-stats.loading.start));if(!response.ok){const{status,statusText}=response;throw new FetchError(statusText||'fetch, bad network response',status,response);}stats.loading.first=first;stats.total=getContentLength(response.headers)||stats.total;if(onProgress&&isFiniteNumber(config.highWaterMark)){return this.loadProgressively(response,stats,context,config.highWaterMark,onProgress);}if(isArrayBuffer){return response.arrayBuffer();}if(context.responseType==='json'){return response.json();}return response.text();}).then(responseData=>{const response=this.response;if(!response){throw new Error('loader destroyed');}self.clearTimeout(this.requestTimeout);stats.loading.end=Math.max(self.performance.now(),stats.loading.first);const total=responseData[LENGTH];if(total){stats.loaded=stats.total=total;}const loaderResponse={url:response.url,data:responseData,code:response.status};if(onProgress&&!isFiniteNumber(config.highWaterMark)){onProgress(stats,context,responseData,response);}callbacks.onSuccess(loaderResponse,stats,context,response);}).catch(error=>{self.clearTimeout(this.requestTimeout);if(stats.aborted){return;}// CORS errors result in an undefined code. Set it to 0 here to align with XHR's behavior
|
||
// when destroying, 'error' itself can be undefined
|
||
const code=!error?0:error.code||0;const text=!error?null:error.message;callbacks.onError({code,text},context,error?error.details:null,stats);});}getCacheAge(){let result=null;if(this.response){const ageHeader=this.response.headers.get('age');result=ageHeader?parseFloat(ageHeader):null;}return result;}getResponseHeader(name){return this.response?this.response.headers.get(name):null;}loadProgressively(response,stats,context,highWaterMark=0,onProgress){const chunkCache=new ChunkCache();const reader=response.body.getReader();const pump=()=>{return reader.read().then(data=>{if(data.done){if(chunkCache.dataLength){onProgress(stats,context,chunkCache.flush(),response);}return Promise.resolve(new ArrayBuffer(0));}const chunk=data.value;const len=chunk.length;stats.loaded+=len;if(len<highWaterMark||chunkCache.dataLength){// The current chunk is too small to to be emitted or the cache already has data
|
||
// Push it to the cache
|
||
chunkCache.push(chunk);if(chunkCache.dataLength>=highWaterMark){// flush in order to join the typed arrays
|
||
onProgress(stats,context,chunkCache.flush(),response);}}else {// If there's nothing cached already, and the chache is large enough
|
||
// just emit the progress event
|
||
onProgress(stats,context,chunk,response);}return pump();}).catch(()=>{/* aborted */return Promise.reject();});};return pump();}}function getRequestParameters(context,signal){const initParams={method:'GET',mode:'cors',credentials:'same-origin',signal,headers:new self.Headers(_extends({},context.headers))};if(context.rangeEnd){initParams.headers.set('Range','bytes='+context.rangeStart+'-'+String(context.rangeEnd-1));}return initParams;}function getByteRangeLength(byteRangeHeader){const result=BYTERANGE.exec(byteRangeHeader);if(result){return parseInt(result[2])-parseInt(result[1])+1;}}function getContentLength(headers){const contentRange=headers.get('Content-Range');if(contentRange){const byteRangeLength=getByteRangeLength(contentRange);if(isFiniteNumber(byteRangeLength)){return byteRangeLength;}}const contentLength=headers.get('Content-Length');if(contentLength){return parseInt(contentLength);}}function getRequest(context,initParams){return new self.Request(context.url,initParams);}class FetchError extends Error{constructor(message,code,details){super(message);this.code=void 0;this.details=void 0;this.code=code;this.details=details;}}const WHITESPACE_CHAR=/\s/;const Cues={newCue(track,startTime,endTime,captionScreen){const result=[];let row;// the type data states this is VTTCue, but it can potentially be a TextTrackCue on old browsers
|
||
let cue;let indenting;let indent;let text;const Cue=self.VTTCue||self.TextTrackCue;for(let r=0;r<captionScreen.rows.length;r++){row=captionScreen.rows[r];indenting=true;indent=0;text='';if(!row.isEmpty()){var _track$cues;for(let c=0;c<row.chars.length;c++){if(WHITESPACE_CHAR.test(row.chars[c].uchar)&&indenting){indent++;}else {text+=row.chars[c].uchar;indenting=false;}}// To be used for cleaning-up orphaned roll-up captions
|
||
row.cueStartTime=startTime;// Give a slight bump to the endTime if it's equal to startTime to avoid a SyntaxError in IE
|
||
if(startTime===endTime){endTime+=0.0001;}if(indent>=16){indent--;}else {indent++;}const cueText=fixLineBreaks(text.trim());const id=generateCueId(startTime,endTime,cueText);// If this cue already exists in the track do not push it
|
||
if(!(track!=null&&(_track$cues=track.cues)!=null&&_track$cues.getCueById(id))){cue=new Cue(startTime,endTime,cueText);cue.id=id;cue.line=r+1;cue.align='left';// Clamp the position between 10 and 80 percent (CEA-608 PAC indent code)
|
||
// https://dvcs.w3.org/hg/text-tracks/raw-file/default/608toVTT/608toVTT.html#positioning-in-cea-608
|
||
// Firefox throws an exception and captions break with out of bounds 0-100 values
|
||
cue.position=10+Math.min(80,Math.floor(indent*8/32)*10);result.push(cue);}}}if(track&&result.length){// Sort bottom cues in reverse order so that they render in line order when overlapping in Chrome
|
||
result.sort((cueA,cueB)=>{if(cueA.line==='auto'||cueB.line==='auto'){return 0;}if(cueA.line>8&&cueB.line>8){return cueB.line-cueA.line;}return cueA.line-cueB.line;});result.forEach(cue=>addCueToTrack(track,cue));}return result;}};/**
|
||
* @deprecated use fragLoadPolicy.default
|
||
*/ /**
|
||
* @deprecated use manifestLoadPolicy.default and playlistLoadPolicy.default
|
||
*/const defaultLoadPolicy={maxTimeToFirstByteMs:8000,maxLoadTimeMs:20000,timeoutRetry:null,errorRetry:null};/**
|
||
* @ignore
|
||
* If possible, keep hlsDefaultConfig shallow
|
||
* It is cloned whenever a new Hls instance is created, by keeping the config
|
||
* shallow the properties are cloned, and we don't end up manipulating the default
|
||
*/const hlsDefaultConfig=_objectSpread2(_objectSpread2({autoStartLoad:true,// used by stream-controller
|
||
startPosition:-1,// used by stream-controller
|
||
defaultAudioCodec:undefined,// used by stream-controller
|
||
debug:false,// used by logger
|
||
capLevelOnFPSDrop:false,// used by fps-controller
|
||
capLevelToPlayerSize:false,// used by cap-level-controller
|
||
ignoreDevicePixelRatio:false,// used by cap-level-controller
|
||
preferManagedMediaSource:true,initialLiveManifestSize:1,// used by stream-controller
|
||
maxBufferLength:30,// used by stream-controller
|
||
backBufferLength:Infinity,// used by buffer-controller
|
||
frontBufferFlushThreshold:Infinity,maxBufferSize:60*1000*1000,// used by stream-controller
|
||
maxBufferHole:0.1,// used by stream-controller
|
||
highBufferWatchdogPeriod:2,// used by stream-controller
|
||
nudgeOffset:0.1,// used by stream-controller
|
||
nudgeMaxRetry:3,// used by stream-controller
|
||
maxFragLookUpTolerance:0.25,// used by stream-controller
|
||
liveSyncDurationCount:3,// used by latency-controller
|
||
liveMaxLatencyDurationCount:Infinity,// used by latency-controller
|
||
liveSyncDuration:undefined,// used by latency-controller
|
||
liveMaxLatencyDuration:undefined,// used by latency-controller
|
||
maxLiveSyncPlaybackRate:1,// used by latency-controller
|
||
liveDurationInfinity:false,// used by buffer-controller
|
||
/**
|
||
* @deprecated use backBufferLength
|
||
*/liveBackBufferLength:null,// used by buffer-controller
|
||
maxMaxBufferLength:600,// used by stream-controller
|
||
enableWorker:true,// used by transmuxer
|
||
workerPath:null,// used by transmuxer
|
||
enableSoftwareAES:true,// used by decrypter
|
||
startLevel:undefined,// used by level-controller
|
||
startFragPrefetch:false,// used by stream-controller
|
||
fpsDroppedMonitoringPeriod:5000,// used by fps-controller
|
||
fpsDroppedMonitoringThreshold:0.2,// used by fps-controller
|
||
appendErrorMaxRetry:3,// used by buffer-controller
|
||
loader:XhrLoader,// loader: FetchLoader,
|
||
fLoader:undefined,// used by fragment-loader
|
||
pLoader:undefined,// used by playlist-loader
|
||
xhrSetup:undefined,// used by xhr-loader
|
||
licenseXhrSetup:undefined,// used by eme-controller
|
||
licenseResponseCallback:undefined,// used by eme-controller
|
||
abrController:AbrController,bufferController:BufferController,capLevelController:CapLevelController,errorController:ErrorController,fpsController:FPSController,stretchShortVideoTrack:false,// used by mp4-remuxer
|
||
maxAudioFramesDrift:1,// used by mp4-remuxer
|
||
forceKeyFrameOnDiscontinuity:true,// used by ts-demuxer
|
||
abrEwmaFastLive:3,// used by abr-controller
|
||
abrEwmaSlowLive:9,// used by abr-controller
|
||
abrEwmaFastVoD:3,// used by abr-controller
|
||
abrEwmaSlowVoD:9,// used by abr-controller
|
||
abrEwmaDefaultEstimate:5e5,// 500 kbps // used by abr-controller
|
||
abrEwmaDefaultEstimateMax:5e6,// 5 mbps
|
||
abrBandWidthFactor:0.95,// used by abr-controller
|
||
abrBandWidthUpFactor:0.7,// used by abr-controller
|
||
abrMaxWithRealBitrate:false,// used by abr-controller
|
||
maxStarvationDelay:4,// used by abr-controller
|
||
maxLoadingDelay:4,// used by abr-controller
|
||
minAutoBitrate:0,// used by hls
|
||
emeEnabled:false,// used by eme-controller
|
||
widevineLicenseUrl:undefined,// used by eme-controller
|
||
drmSystems:{},// used by eme-controller
|
||
drmSystemOptions:{},// used by eme-controller
|
||
requestMediaKeySystemAccessFunc:requestMediaKeySystemAccess,// used by eme-controller
|
||
testBandwidth:true,progressive:false,lowLatencyMode:true,cmcd:undefined,enableDateRangeMetadataCues:true,enableEmsgMetadataCues:true,enableID3MetadataCues:true,useMediaCapabilities:true,certLoadPolicy:{default:defaultLoadPolicy},keyLoadPolicy:{default:{maxTimeToFirstByteMs:8000,maxLoadTimeMs:20000,timeoutRetry:{maxNumRetry:1,retryDelayMs:1000,maxRetryDelayMs:20000,backoff:'linear'},errorRetry:{maxNumRetry:8,retryDelayMs:1000,maxRetryDelayMs:20000,backoff:'linear'}}},manifestLoadPolicy:{default:{maxTimeToFirstByteMs:Infinity,maxLoadTimeMs:20000,timeoutRetry:{maxNumRetry:2,retryDelayMs:0,maxRetryDelayMs:0},errorRetry:{maxNumRetry:1,retryDelayMs:1000,maxRetryDelayMs:8000}}},playlistLoadPolicy:{default:{maxTimeToFirstByteMs:10000,maxLoadTimeMs:20000,timeoutRetry:{maxNumRetry:2,retryDelayMs:0,maxRetryDelayMs:0},errorRetry:{maxNumRetry:2,retryDelayMs:1000,maxRetryDelayMs:8000}}},fragLoadPolicy:{default:{maxTimeToFirstByteMs:10000,maxLoadTimeMs:120000,timeoutRetry:{maxNumRetry:4,retryDelayMs:0,maxRetryDelayMs:0},errorRetry:{maxNumRetry:6,retryDelayMs:1000,maxRetryDelayMs:8000}}},steeringManifestLoadPolicy:{default:{maxTimeToFirstByteMs:10000,maxLoadTimeMs:20000,timeoutRetry:{maxNumRetry:2,retryDelayMs:0,maxRetryDelayMs:0},errorRetry:{maxNumRetry:1,retryDelayMs:1000,maxRetryDelayMs:8000}}},// These default settings are deprecated in favor of the above policies
|
||
// and are maintained for backwards compatibility
|
||
manifestLoadingTimeOut:10000,manifestLoadingMaxRetry:1,manifestLoadingRetryDelay:1000,manifestLoadingMaxRetryTimeout:64000,levelLoadingTimeOut:10000,levelLoadingMaxRetry:4,levelLoadingRetryDelay:1000,levelLoadingMaxRetryTimeout:64000,fragLoadingTimeOut:20000,fragLoadingMaxRetry:6,fragLoadingRetryDelay:1000,fragLoadingMaxRetryTimeout:64000},timelineConfig()),{},{subtitleStreamController:SubtitleStreamController,subtitleTrackController:SubtitleTrackController,timelineController:TimelineController,audioStreamController:AudioStreamController,audioTrackController:AudioTrackController,emeController:EMEController,cmcdController:CMCDController,contentSteeringController:ContentSteeringController});function timelineConfig(){return {cueHandler:Cues,// used by timeline-controller
|
||
enableWebVTT:true,// used by timeline-controller
|
||
enableIMSC1:true,// used by timeline-controller
|
||
enableCEA708Captions:true,// used by timeline-controller
|
||
captionsTextTrack1Label:'English',// used by timeline-controller
|
||
captionsTextTrack1LanguageCode:'en',// used by timeline-controller
|
||
captionsTextTrack2Label:'Spanish',// used by timeline-controller
|
||
captionsTextTrack2LanguageCode:'es',// used by timeline-controller
|
||
captionsTextTrack3Label:'Unknown CC',// used by timeline-controller
|
||
captionsTextTrack3LanguageCode:'',// used by timeline-controller
|
||
captionsTextTrack4Label:'Unknown CC',// used by timeline-controller
|
||
captionsTextTrack4LanguageCode:'',// used by timeline-controller
|
||
renderTextTracksNatively:true};}/**
|
||
* @ignore
|
||
*/function mergeConfig(defaultConfig,userConfig){if((userConfig.liveSyncDurationCount||userConfig.liveMaxLatencyDurationCount)&&(userConfig.liveSyncDuration||userConfig.liveMaxLatencyDuration)){throw new Error("Illegal hls.js config: don't mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration");}if(userConfig.liveMaxLatencyDurationCount!==undefined&&(userConfig.liveSyncDurationCount===undefined||userConfig.liveMaxLatencyDurationCount<=userConfig.liveSyncDurationCount)){throw new Error('Illegal hls.js config: "liveMaxLatencyDurationCount" must be greater than "liveSyncDurationCount"');}if(userConfig.liveMaxLatencyDuration!==undefined&&(userConfig.liveSyncDuration===undefined||userConfig.liveMaxLatencyDuration<=userConfig.liveSyncDuration)){throw new Error('Illegal hls.js config: "liveMaxLatencyDuration" must be greater than "liveSyncDuration"');}const defaultsCopy=deepCpy(defaultConfig);// Backwards compatibility with deprecated config values
|
||
const deprecatedSettingTypes=['manifest','level','frag'];const deprecatedSettings=['TimeOut','MaxRetry','RetryDelay','MaxRetryTimeout'];deprecatedSettingTypes.forEach(type=>{const policyName=`${type==='level'?'playlist':type}LoadPolicy`;const policyNotSet=userConfig[policyName]===undefined;const report=[];deprecatedSettings.forEach(setting=>{const deprecatedSetting=`${type}Loading${setting}`;const value=userConfig[deprecatedSetting];if(value!==undefined&&policyNotSet){report.push(deprecatedSetting);const settings=defaultsCopy[policyName].default;userConfig[policyName]={default:settings};switch(setting){case'TimeOut':settings.maxLoadTimeMs=value;settings.maxTimeToFirstByteMs=value;break;case'MaxRetry':settings.errorRetry.maxNumRetry=value;settings.timeoutRetry.maxNumRetry=value;break;case'RetryDelay':settings.errorRetry.retryDelayMs=value;settings.timeoutRetry.retryDelayMs=value;break;case'MaxRetryTimeout':settings.errorRetry.maxRetryDelayMs=value;settings.timeoutRetry.maxRetryDelayMs=value;break;}}});if(report.length){logger.warn(`hls.js config: "${report.join('", "')}" setting(s) are deprecated, use "${policyName}": ${JSON.stringify(userConfig[policyName])}`);}});return _objectSpread2(_objectSpread2({},defaultsCopy),userConfig);}function deepCpy(obj){if(obj&&typeof obj==='object'){if(Array.isArray(obj)){return obj.map(deepCpy);}return Object.keys(obj).reduce((result,key)=>{result[key]=deepCpy(obj[key]);return result;},{});}return obj;}/**
|
||
* @ignore
|
||
*/function enableStreamingMode(config){const currentLoader=config.loader;if(currentLoader!==FetchLoader&¤tLoader!==XhrLoader){// If a developer has configured their own loader, respect that choice
|
||
logger.log('[config]: Custom loader detected, cannot enable progressive streaming');config.progressive=false;}else {const canStreamProgressively=fetchSupported();if(canStreamProgressively){config.loader=FetchLoader;config.progressive=true;config.enableSoftwareAES=true;logger.log('[config]: Progressive streaming enabled, using FetchLoader');}}}let chromeOrFirefox;class LevelController extends BasePlaylistController{constructor(hls,contentSteeringController){super(hls,'[level-controller]');this._levels=[];this._firstLevel=-1;this._maxAutoLevel=-1;this._startLevel=void 0;this.currentLevel=null;this.currentLevelIndex=-1;this.manualLevelIndex=-1;this.steering=void 0;this.onParsedComplete=void 0;this.steering=contentSteeringController;this._registerListeners();}_registerListeners(){const{hls}=this;hls.on(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.on(Events.MANIFEST_LOADED,this.onManifestLoaded,this);hls.on(Events.LEVEL_LOADED,this.onLevelLoaded,this);hls.on(Events.LEVELS_UPDATED,this.onLevelsUpdated,this);hls.on(Events.FRAG_BUFFERED,this.onFragBuffered,this);hls.on(Events.ERROR,this.onError,this);}_unregisterListeners(){const{hls}=this;hls.off(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.off(Events.MANIFEST_LOADED,this.onManifestLoaded,this);hls.off(Events.LEVEL_LOADED,this.onLevelLoaded,this);hls.off(Events.LEVELS_UPDATED,this.onLevelsUpdated,this);hls.off(Events.FRAG_BUFFERED,this.onFragBuffered,this);hls.off(Events.ERROR,this.onError,this);}destroy(){this._unregisterListeners();this.steering=null;this.resetLevels();super.destroy();}stopLoad(){const levels=this._levels;// clean up live level details to force reload them, and reset load errors
|
||
levels.forEach(level=>{level.loadError=0;level.fragmentError=0;});super.stopLoad();}resetLevels(){this._startLevel=undefined;this.manualLevelIndex=-1;this.currentLevelIndex=-1;this.currentLevel=null;this._levels=[];this._maxAutoLevel=-1;}onManifestLoading(event,data){this.resetLevels();}onManifestLoaded(event,data){const preferManagedMediaSource=this.hls.config.preferManagedMediaSource;const levels=[];const redundantSet={};const generatePathwaySet={};let resolutionFound=false;let videoCodecFound=false;let audioCodecFound=false;data.levels.forEach(levelParsed=>{var _audioCodec,_videoCodec;const attributes=levelParsed.attrs;// erase audio codec info if browser does not support mp4a.40.34.
|
||
// demuxer will autodetect codec and fallback to mpeg/audio
|
||
let{audioCodec,videoCodec}=levelParsed;if(((_audioCodec=audioCodec)==null?void 0:_audioCodec.indexOf('mp4a.40.34'))!==-1){chromeOrFirefox||(chromeOrFirefox=/chrome|firefox/i.test(navigator.userAgent));if(chromeOrFirefox){levelParsed.audioCodec=audioCodec=undefined;}}if(audioCodec){levelParsed.audioCodec=audioCodec=getCodecCompatibleName(audioCodec,preferManagedMediaSource);}if(((_videoCodec=videoCodec)==null?void 0:_videoCodec.indexOf('avc1'))===0){videoCodec=levelParsed.videoCodec=convertAVC1ToAVCOTI(videoCodec);}// only keep levels with supported audio/video codecs
|
||
const{width,height,unknownCodecs}=levelParsed;resolutionFound||(resolutionFound=!!(width&&height));videoCodecFound||(videoCodecFound=!!videoCodec);audioCodecFound||(audioCodecFound=!!audioCodec);if(unknownCodecs!=null&&unknownCodecs.length||audioCodec&&!areCodecsMediaSourceSupported(audioCodec,'audio',preferManagedMediaSource)||videoCodec&&!areCodecsMediaSourceSupported(videoCodec,'video',preferManagedMediaSource)){return;}const{CODECS,'FRAME-RATE':FRAMERATE,'HDCP-LEVEL':HDCP,'PATHWAY-ID':PATHWAY,RESOLUTION,'VIDEO-RANGE':VIDEO_RANGE}=attributes;const contentSteeringPrefix=`${PATHWAY||'.'}-`;const levelKey=`${contentSteeringPrefix}${levelParsed.bitrate}-${RESOLUTION}-${FRAMERATE}-${CODECS}-${VIDEO_RANGE}-${HDCP}`;if(!redundantSet[levelKey]){const level=new Level(levelParsed);redundantSet[levelKey]=level;generatePathwaySet[levelKey]=1;levels.push(level);}else if(redundantSet[levelKey].uri!==levelParsed.url&&!levelParsed.attrs['PATHWAY-ID']){// Assign Pathway IDs to Redundant Streams (default Pathways is ".". Redundant Streams "..", "...", and so on.)
|
||
// Content Steering controller to handles Pathway fallback on error
|
||
const pathwayCount=generatePathwaySet[levelKey]+=1;levelParsed.attrs['PATHWAY-ID']=new Array(pathwayCount+1).join('.');const level=new Level(levelParsed);redundantSet[levelKey]=level;levels.push(level);}else {redundantSet[levelKey].addGroupId('audio',attributes.AUDIO);redundantSet[levelKey].addGroupId('text',attributes.SUBTITLES);}});this.filterAndSortMediaOptions(levels,data,resolutionFound,videoCodecFound,audioCodecFound);}filterAndSortMediaOptions(filteredLevels,data,resolutionFound,videoCodecFound,audioCodecFound){let audioTracks=[];let subtitleTracks=[];let levels=filteredLevels;// remove audio-only and invalid video-range levels if we also have levels with video codecs or RESOLUTION signalled
|
||
if((resolutionFound||videoCodecFound)&&audioCodecFound){levels=levels.filter(({videoCodec,videoRange,width,height})=>(!!videoCodec||!!(width&&height))&&isVideoRange(videoRange));}if(levels.length===0){// Dispatch error after MANIFEST_LOADED is done propagating
|
||
Promise.resolve().then(()=>{if(this.hls){if(data.levels.length){this.warn(`One or more CODECS in variant not supported: ${JSON.stringify(data.levels[0].attrs)}`);}const error=new Error('no level with compatible codecs found in manifest');this.hls.trigger(Events.ERROR,{type:ErrorTypes.MEDIA_ERROR,details:ErrorDetails.MANIFEST_INCOMPATIBLE_CODECS_ERROR,fatal:true,url:data.url,error,reason:error.message});}});return;}if(data.audioTracks){const{preferManagedMediaSource}=this.hls.config;audioTracks=data.audioTracks.filter(track=>!track.audioCodec||areCodecsMediaSourceSupported(track.audioCodec,'audio',preferManagedMediaSource));// Assign ids after filtering as array indices by group-id
|
||
assignTrackIdsByGroup(audioTracks);}if(data.subtitles){subtitleTracks=data.subtitles;assignTrackIdsByGroup(subtitleTracks);}// start bitrate is the first bitrate of the manifest
|
||
const unsortedLevels=levels.slice(0);// sort levels from lowest to highest
|
||
levels.sort((a,b)=>{if(a.attrs['HDCP-LEVEL']!==b.attrs['HDCP-LEVEL']){return (a.attrs['HDCP-LEVEL']||'')>(b.attrs['HDCP-LEVEL']||'')?1:-1;}// sort on height before bitrate for cap-level-controller
|
||
if(resolutionFound&&a.height!==b.height){return a.height-b.height;}if(a.frameRate!==b.frameRate){return a.frameRate-b.frameRate;}if(a.videoRange!==b.videoRange){return VideoRangeValues.indexOf(a.videoRange)-VideoRangeValues.indexOf(b.videoRange);}if(a.videoCodec!==b.videoCodec){const valueA=videoCodecPreferenceValue(a.videoCodec);const valueB=videoCodecPreferenceValue(b.videoCodec);if(valueA!==valueB){return valueB-valueA;}}if(a.uri===b.uri&&a.codecSet!==b.codecSet){const valueA=codecsSetSelectionPreferenceValue(a.codecSet);const valueB=codecsSetSelectionPreferenceValue(b.codecSet);if(valueA!==valueB){return valueB-valueA;}}if(a.averageBitrate!==b.averageBitrate){return a.averageBitrate-b.averageBitrate;}return 0;});let firstLevelInPlaylist=unsortedLevels[0];if(this.steering){levels=this.steering.filterParsedLevels(levels);if(levels.length!==unsortedLevels.length){for(let i=0;i<unsortedLevels.length;i++){if(unsortedLevels[i].pathwayId===levels[0].pathwayId){firstLevelInPlaylist=unsortedLevels[i];break;}}}}this._levels=levels;// find index of first level in sorted levels
|
||
for(let i=0;i<levels.length;i++){if(levels[i]===firstLevelInPlaylist){var _this$hls$userConfig;this._firstLevel=i;const firstLevelBitrate=firstLevelInPlaylist.bitrate;const bandwidthEstimate=this.hls.bandwidthEstimate;this.log(`manifest loaded, ${levels.length} level(s) found, first bitrate: ${firstLevelBitrate}`);// Update default bwe to first variant bitrate as long it has not been configured or set
|
||
if(((_this$hls$userConfig=this.hls.userConfig)==null?void 0:_this$hls$userConfig.abrEwmaDefaultEstimate)===undefined){const startingBwEstimate=Math.min(firstLevelBitrate,this.hls.config.abrEwmaDefaultEstimateMax);if(startingBwEstimate>bandwidthEstimate&&bandwidthEstimate===hlsDefaultConfig.abrEwmaDefaultEstimate){this.hls.bandwidthEstimate=startingBwEstimate;}}break;}}// Audio is only alternate if manifest include a URI along with the audio group tag,
|
||
// and this is not an audio-only stream where levels contain audio-only
|
||
const audioOnly=audioCodecFound&&!videoCodecFound;const edata={levels,audioTracks,subtitleTracks,sessionData:data.sessionData,sessionKeys:data.sessionKeys,firstLevel:this._firstLevel,stats:data.stats,audio:audioCodecFound,video:videoCodecFound,altAudio:!audioOnly&&audioTracks.some(t=>!!t.url)};this.hls.trigger(Events.MANIFEST_PARSED,edata);// Initiate loading after all controllers have received MANIFEST_PARSED
|
||
if(this.hls.config.autoStartLoad||this.hls.forceStartLoad){this.hls.startLoad(this.hls.config.startPosition);}}get levels(){if(this._levels.length===0){return null;}return this._levels;}get level(){return this.currentLevelIndex;}set level(newLevel){const levels=this._levels;if(levels.length===0){return;}// check if level idx is valid
|
||
if(newLevel<0||newLevel>=levels.length){// invalid level id given, trigger error
|
||
const error=new Error('invalid level idx');const fatal=newLevel<0;this.hls.trigger(Events.ERROR,{type:ErrorTypes.OTHER_ERROR,details:ErrorDetails.LEVEL_SWITCH_ERROR,level:newLevel,fatal,error,reason:error.message});if(fatal){return;}newLevel=Math.min(newLevel,levels.length-1);}const lastLevelIndex=this.currentLevelIndex;const lastLevel=this.currentLevel;const lastPathwayId=lastLevel?lastLevel.attrs['PATHWAY-ID']:undefined;const level=levels[newLevel];const pathwayId=level.attrs['PATHWAY-ID'];this.currentLevelIndex=newLevel;this.currentLevel=level;if(lastLevelIndex===newLevel&&level.details&&lastLevel&&lastPathwayId===pathwayId){return;}this.log(`Switching to level ${newLevel} (${level.height?level.height+'p ':''}${level.videoRange?level.videoRange+' ':''}${level.codecSet?level.codecSet+' ':''}@${level.bitrate})${pathwayId?' with Pathway '+pathwayId:''} from level ${lastLevelIndex}${lastPathwayId?' with Pathway '+lastPathwayId:''}`);const levelSwitchingData={level:newLevel,attrs:level.attrs,details:level.details,bitrate:level.bitrate,averageBitrate:level.averageBitrate,maxBitrate:level.maxBitrate,realBitrate:level.realBitrate,width:level.width,height:level.height,codecSet:level.codecSet,audioCodec:level.audioCodec,videoCodec:level.videoCodec,audioGroups:level.audioGroups,subtitleGroups:level.subtitleGroups,loaded:level.loaded,loadError:level.loadError,fragmentError:level.fragmentError,name:level.name,id:level.id,uri:level.uri,url:level.url,urlId:0,audioGroupIds:level.audioGroupIds,textGroupIds:level.textGroupIds};this.hls.trigger(Events.LEVEL_SWITCHING,levelSwitchingData);// check if we need to load playlist for this level
|
||
const levelDetails=level.details;if(!levelDetails||levelDetails.live){// level not retrieved yet, or live playlist we need to (re)load it
|
||
const hlsUrlParameters=this.switchParams(level.uri,lastLevel==null?void 0:lastLevel.details,levelDetails);this.loadPlaylist(hlsUrlParameters);}}get manualLevel(){return this.manualLevelIndex;}set manualLevel(newLevel){this.manualLevelIndex=newLevel;if(this._startLevel===undefined){this._startLevel=newLevel;}if(newLevel!==-1){this.level=newLevel;}}get firstLevel(){return this._firstLevel;}set firstLevel(newLevel){this._firstLevel=newLevel;}get startLevel(){// Setting hls.startLevel (this._startLevel) overrides config.startLevel
|
||
if(this._startLevel===undefined){const configStartLevel=this.hls.config.startLevel;if(configStartLevel!==undefined){return configStartLevel;}return this.hls.firstAutoLevel;}return this._startLevel;}set startLevel(newLevel){this._startLevel=newLevel;}onError(event,data){if(data.fatal||!data.context){return;}if(data.context.type===PlaylistContextType.LEVEL&&data.context.level===this.level){this.checkRetry(data);}}// reset errors on the successful load of a fragment
|
||
onFragBuffered(event,{frag}){if(frag!==undefined&&frag.type===PlaylistLevelType.MAIN){const el=frag.elementaryStreams;if(!Object.keys(el).some(type=>!!el[type])){return;}const level=this._levels[frag.level];if(level!=null&&level.loadError){this.log(`Resetting level error count of ${level.loadError} on frag buffered`);level.loadError=0;}}}onLevelLoaded(event,data){var _data$deliveryDirecti2;const{level,details}=data;const curLevel=this._levels[level];if(!curLevel){var _data$deliveryDirecti;this.warn(`Invalid level index ${level}`);if((_data$deliveryDirecti=data.deliveryDirectives)!=null&&_data$deliveryDirecti.skip){details.deltaUpdateFailed=true;}return;}// only process level loaded events matching with expected level
|
||
if(level===this.currentLevelIndex){// reset level load error counter on successful level loaded only if there is no issues with fragments
|
||
if(curLevel.fragmentError===0){curLevel.loadError=0;}this.playlistLoaded(level,data,curLevel.details);}else if((_data$deliveryDirecti2=data.deliveryDirectives)!=null&&_data$deliveryDirecti2.skip){// received a delta playlist update that cannot be merged
|
||
details.deltaUpdateFailed=true;}}loadPlaylist(hlsUrlParameters){super.loadPlaylist();const currentLevelIndex=this.currentLevelIndex;const currentLevel=this.currentLevel;if(currentLevel&&this.shouldLoadPlaylist(currentLevel)){let url=currentLevel.uri;if(hlsUrlParameters){try{url=hlsUrlParameters.addDirectives(url);}catch(error){this.warn(`Could not construct new URL with HLS Delivery Directives: ${error}`);}}const pathwayId=currentLevel.attrs['PATHWAY-ID'];this.log(`Loading level index ${currentLevelIndex}${(hlsUrlParameters==null?void 0:hlsUrlParameters.msn)!==undefined?' at sn '+hlsUrlParameters.msn+' part '+hlsUrlParameters.part:''} with${pathwayId?' Pathway '+pathwayId:''} ${url}`);// console.log('Current audio track group ID:', this.hls.audioTracks[this.hls.audioTrack].groupId);
|
||
// console.log('New video quality level audio group id:', levelObject.attrs.AUDIO, level);
|
||
this.clearTimer();this.hls.trigger(Events.LEVEL_LOADING,{url,level:currentLevelIndex,pathwayId:currentLevel.attrs['PATHWAY-ID'],id:0,// Deprecated Level urlId
|
||
deliveryDirectives:hlsUrlParameters||null});}}get nextLoadLevel(){if(this.manualLevelIndex!==-1){return this.manualLevelIndex;}else {return this.hls.nextAutoLevel;}}set nextLoadLevel(nextLevel){this.level=nextLevel;if(this.manualLevelIndex===-1){this.hls.nextAutoLevel=nextLevel;}}removeLevel(levelIndex){var _this$currentLevel;const levels=this._levels.filter((level,index)=>{if(index!==levelIndex){return true;}if(this.steering){this.steering.removeLevel(level);}if(level===this.currentLevel){this.currentLevel=null;this.currentLevelIndex=-1;if(level.details){level.details.fragments.forEach(f=>f.level=-1);}}return false;});reassignFragmentLevelIndexes(levels);this._levels=levels;if(this.currentLevelIndex>-1&&(_this$currentLevel=this.currentLevel)!=null&&_this$currentLevel.details){this.currentLevelIndex=this.currentLevel.details.fragments[0].level;}this.hls.trigger(Events.LEVELS_UPDATED,{levels});}onLevelsUpdated(event,{levels}){this._levels=levels;}checkMaxAutoUpdated(){const{autoLevelCapping,maxAutoLevel,maxHdcpLevel}=this.hls;if(this._maxAutoLevel!==maxAutoLevel){this._maxAutoLevel=maxAutoLevel;this.hls.trigger(Events.MAX_AUTO_LEVEL_UPDATED,{autoLevelCapping,levels:this.levels,maxAutoLevel,minAutoLevel:this.hls.minAutoLevel,maxHdcpLevel});}}}function assignTrackIdsByGroup(tracks){const groups={};tracks.forEach(track=>{const groupId=track.groupId||'';track.id=groups[groupId]=groups[groupId]||0;groups[groupId]++;});}class KeyLoader{constructor(config){this.config=void 0;this.keyUriToKeyInfo={};this.emeController=null;this.config=config;}abort(type){for(const uri in this.keyUriToKeyInfo){const loader=this.keyUriToKeyInfo[uri].loader;if(loader){var _loader$context;if(type&&type!==((_loader$context=loader.context)==null?void 0:_loader$context.frag.type)){return;}loader.abort();}}}detach(){for(const uri in this.keyUriToKeyInfo){const keyInfo=this.keyUriToKeyInfo[uri];// Remove cached EME keys on detach
|
||
if(keyInfo.mediaKeySessionContext||keyInfo.decryptdata.isCommonEncryption){delete this.keyUriToKeyInfo[uri];}}}destroy(){this.detach();for(const uri in this.keyUriToKeyInfo){const loader=this.keyUriToKeyInfo[uri].loader;if(loader){loader.destroy();}}this.keyUriToKeyInfo={};}createKeyLoadError(frag,details=ErrorDetails.KEY_LOAD_ERROR,error,networkDetails,response){return new LoadError({type:ErrorTypes.NETWORK_ERROR,details,fatal:false,frag,response,error,networkDetails});}loadClear(loadingFrag,encryptedFragments){if(this.emeController&&this.config.emeEnabled){// access key-system with nearest key on start (loaidng frag is unencrypted)
|
||
const{sn,cc}=loadingFrag;for(let i=0;i<encryptedFragments.length;i++){const frag=encryptedFragments[i];if(cc<=frag.cc&&(sn==='initSegment'||frag.sn==='initSegment'||sn<frag.sn)){this.emeController.selectKeySystemFormat(frag).then(keySystemFormat=>{frag.setKeyFormat(keySystemFormat);});break;}}}}load(frag){if(!frag.decryptdata&&frag.encrypted&&this.emeController){// Multiple keys, but none selected, resolve in eme-controller
|
||
return this.emeController.selectKeySystemFormat(frag).then(keySystemFormat=>{return this.loadInternal(frag,keySystemFormat);});}return this.loadInternal(frag);}loadInternal(frag,keySystemFormat){var _keyInfo,_keyInfo2;if(keySystemFormat){frag.setKeyFormat(keySystemFormat);}const decryptdata=frag.decryptdata;if(!decryptdata){const error=new Error(keySystemFormat?`Expected frag.decryptdata to be defined after setting format ${keySystemFormat}`:'Missing decryption data on fragment in onKeyLoading');return Promise.reject(this.createKeyLoadError(frag,ErrorDetails.KEY_LOAD_ERROR,error));}const uri=decryptdata.uri;if(!uri){return Promise.reject(this.createKeyLoadError(frag,ErrorDetails.KEY_LOAD_ERROR,new Error(`Invalid key URI: "${uri}"`)));}let keyInfo=this.keyUriToKeyInfo[uri];if((_keyInfo=keyInfo)!=null&&_keyInfo.decryptdata.key){decryptdata.key=keyInfo.decryptdata.key;return Promise.resolve({frag,keyInfo});}// Return key load promise as long as it does not have a mediakey session with an unusable key status
|
||
if((_keyInfo2=keyInfo)!=null&&_keyInfo2.keyLoadPromise){var _keyInfo$mediaKeySess;switch((_keyInfo$mediaKeySess=keyInfo.mediaKeySessionContext)==null?void 0:_keyInfo$mediaKeySess.keyStatus){case undefined:case'status-pending':case'usable':case'usable-in-future':return keyInfo.keyLoadPromise.then(keyLoadedData=>{// Return the correct fragment with updated decryptdata key and loaded keyInfo
|
||
decryptdata.key=keyLoadedData.keyInfo.decryptdata.key;return {frag,keyInfo};});}// If we have a key session and status and it is not pending or usable, continue
|
||
// This will go back to the eme-controller for expired keys to get a new keyLoadPromise
|
||
}// Load the key or return the loading promise
|
||
keyInfo=this.keyUriToKeyInfo[uri]={decryptdata,keyLoadPromise:null,loader:null,mediaKeySessionContext:null};switch(decryptdata.method){case'ISO-23001-7':case'SAMPLE-AES':case'SAMPLE-AES-CENC':case'SAMPLE-AES-CTR':if(decryptdata.keyFormat==='identity'){// loadKeyHTTP handles http(s) and data URLs
|
||
return this.loadKeyHTTP(keyInfo,frag);}return this.loadKeyEME(keyInfo,frag);case'AES-128':return this.loadKeyHTTP(keyInfo,frag);default:return Promise.reject(this.createKeyLoadError(frag,ErrorDetails.KEY_LOAD_ERROR,new Error(`Key supplied with unsupported METHOD: "${decryptdata.method}"`)));}}loadKeyEME(keyInfo,frag){const keyLoadedData={frag,keyInfo};if(this.emeController&&this.config.emeEnabled){const keySessionContextPromise=this.emeController.loadKey(keyLoadedData);if(keySessionContextPromise){return (keyInfo.keyLoadPromise=keySessionContextPromise.then(keySessionContext=>{keyInfo.mediaKeySessionContext=keySessionContext;return keyLoadedData;})).catch(error=>{// Remove promise for license renewal or retry
|
||
keyInfo.keyLoadPromise=null;throw error;});}}return Promise.resolve(keyLoadedData);}loadKeyHTTP(keyInfo,frag){const config=this.config;const Loader=config.loader;const keyLoader=new Loader(config);frag.keyLoader=keyInfo.loader=keyLoader;return keyInfo.keyLoadPromise=new Promise((resolve,reject)=>{const loaderContext={keyInfo,frag,responseType:'arraybuffer',url:keyInfo.decryptdata.uri};// maxRetry is 0 so that instead of retrying the same key on the same variant multiple times,
|
||
// key-loader will trigger an error and rely on stream-controller to handle retry logic.
|
||
// this will also align retry logic with fragment-loader
|
||
const loadPolicy=config.keyLoadPolicy.default;const loaderConfig={loadPolicy,timeout:loadPolicy.maxLoadTimeMs,maxRetry:0,retryDelay:0,maxRetryDelay:0};const loaderCallbacks={onSuccess:(response,stats,context,networkDetails)=>{const{frag,keyInfo,url:uri}=context;if(!frag.decryptdata||keyInfo!==this.keyUriToKeyInfo[uri]){return reject(this.createKeyLoadError(frag,ErrorDetails.KEY_LOAD_ERROR,new Error('after key load, decryptdata unset or changed'),networkDetails));}keyInfo.decryptdata.key=frag.decryptdata.key=new Uint8Array(response.data);// detach fragment key loader on load success
|
||
frag.keyLoader=null;keyInfo.loader=null;resolve({frag,keyInfo});},onError:(response,context,networkDetails,stats)=>{this.resetLoader(context);reject(this.createKeyLoadError(frag,ErrorDetails.KEY_LOAD_ERROR,new Error(`HTTP Error ${response.code} loading key ${response.text}`),networkDetails,_objectSpread2({url:loaderContext.url,data:undefined},response)));},onTimeout:(stats,context,networkDetails)=>{this.resetLoader(context);reject(this.createKeyLoadError(frag,ErrorDetails.KEY_LOAD_TIMEOUT,new Error('key loading timed out'),networkDetails));},onAbort:(stats,context,networkDetails)=>{this.resetLoader(context);reject(this.createKeyLoadError(frag,ErrorDetails.INTERNAL_ABORTED,new Error('key loading aborted'),networkDetails));}};keyLoader.load(loaderContext,loaderConfig,loaderCallbacks);});}resetLoader(context){const{frag,keyInfo,url:uri}=context;const loader=keyInfo.loader;if(frag.keyLoader===loader){frag.keyLoader=null;keyInfo.loader=null;}delete this.keyUriToKeyInfo[uri];if(loader){loader.destroy();}}}function getSourceBuffer(){return self.SourceBuffer||self.WebKitSourceBuffer;}function isMSESupported(){const mediaSource=getMediaSource();if(!mediaSource){return false;}// if SourceBuffer is exposed ensure its API is valid
|
||
// Older browsers do not expose SourceBuffer globally so checking SourceBuffer.prototype is impossible
|
||
const sourceBuffer=getSourceBuffer();return !sourceBuffer||sourceBuffer.prototype&&typeof sourceBuffer.prototype.appendBuffer==='function'&&typeof sourceBuffer.prototype.remove==='function';}function isSupported(){if(!isMSESupported()){return false;}const mediaSource=getMediaSource();return typeof(mediaSource==null?void 0:mediaSource.isTypeSupported)==='function'&&(['avc1.42E01E,mp4a.40.2','av01.0.01M.08','vp09.00.50.08'].some(codecsForVideoContainer=>mediaSource.isTypeSupported(mimeTypeForCodec(codecsForVideoContainer,'video')))||['mp4a.40.2','fLaC'].some(codecForAudioContainer=>mediaSource.isTypeSupported(mimeTypeForCodec(codecForAudioContainer,'audio'))));}function changeTypeSupported(){var _sourceBuffer$prototy;const sourceBuffer=getSourceBuffer();return typeof(sourceBuffer==null?void 0:(_sourceBuffer$prototy=sourceBuffer.prototype)==null?void 0:_sourceBuffer$prototy.changeType)==='function';}const STALL_MINIMUM_DURATION_MS=250;const MAX_START_GAP_JUMP=2.0;const SKIP_BUFFER_HOLE_STEP_SECONDS=0.1;const SKIP_BUFFER_RANGE_START=0.05;class GapController{constructor(config,media,fragmentTracker,hls){this.config=void 0;this.media=null;this.fragmentTracker=void 0;this.hls=void 0;this.nudgeRetry=0;this.stallReported=false;this.stalled=null;this.moved=false;this.seeking=false;this.config=config;this.media=media;this.fragmentTracker=fragmentTracker;this.hls=hls;}destroy(){this.media=null;// @ts-ignore
|
||
this.hls=this.fragmentTracker=null;}/**
|
||
* Checks if the playhead is stuck within a gap, and if so, attempts to free it.
|
||
* A gap is an unbuffered range between two buffered ranges (or the start and the first buffered range).
|
||
*
|
||
* @param lastCurrentTime - Previously read playhead position
|
||
*/poll(lastCurrentTime,activeFrag){const{config,media,stalled}=this;if(media===null){return;}const{currentTime,seeking}=media;const seeked=this.seeking&&!seeking;const beginSeek=!this.seeking&&seeking;this.seeking=seeking;// The playhead is moving, no-op
|
||
if(currentTime!==lastCurrentTime){this.moved=true;if(!seeking){this.nudgeRetry=0;}if(stalled!==null){// The playhead is now moving, but was previously stalled
|
||
if(this.stallReported){const _stalledDuration=self.performance.now()-stalled;logger.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(_stalledDuration)}ms`);this.stallReported=false;}this.stalled=null;}return;}// Clear stalled state when beginning or finishing seeking so that we don't report stalls coming out of a seek
|
||
if(beginSeek||seeked){this.stalled=null;return;}// The playhead should not be moving
|
||
if(media.paused&&!seeking||media.ended||media.playbackRate===0||!BufferHelper.getBuffered(media).length){this.nudgeRetry=0;return;}const bufferInfo=BufferHelper.bufferInfo(media,currentTime,0);const nextStart=bufferInfo.nextStart||0;if(seeking){// Waiting for seeking in a buffered range to complete
|
||
const hasEnoughBuffer=bufferInfo.len>MAX_START_GAP_JUMP;// Next buffered range is too far ahead to jump to while still seeking
|
||
const noBufferGap=!nextStart||activeFrag&&activeFrag.start<=currentTime||nextStart-currentTime>MAX_START_GAP_JUMP&&!this.fragmentTracker.getPartialFragment(currentTime);if(hasEnoughBuffer||noBufferGap){return;}// Reset moved state when seeking to a point in or before a gap
|
||
this.moved=false;}// Skip start gaps if we haven't played, but the last poll detected the start of a stall
|
||
// The addition poll gives the browser a chance to jump the gap for us
|
||
if(!this.moved&&this.stalled!==null){var _level$details;// There is no playable buffer (seeked, waiting for buffer)
|
||
const isBuffered=bufferInfo.len>0;if(!isBuffered&&!nextStart){return;}// Jump start gaps within jump threshold
|
||
const startJump=Math.max(nextStart,bufferInfo.start||0)-currentTime;// When joining a live stream with audio tracks, account for live playlist window sliding by allowing
|
||
// a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
|
||
// that begins over 1 target duration after the video start position.
|
||
const level=this.hls.levels?this.hls.levels[this.hls.currentLevel]:null;const isLive=level==null?void 0:(_level$details=level.details)==null?void 0:_level$details.live;const maxStartGapJump=isLive?level.details.targetduration*2:MAX_START_GAP_JUMP;const partialOrGap=this.fragmentTracker.getPartialFragment(currentTime);if(startJump>0&&(startJump<=maxStartGapJump||partialOrGap)){if(!media.paused){this._trySkipBufferHole(partialOrGap);}return;}}// Start tracking stall time
|
||
const tnow=self.performance.now();if(stalled===null){this.stalled=tnow;return;}const stalledDuration=tnow-stalled;if(!seeking&&stalledDuration>=STALL_MINIMUM_DURATION_MS){// Report stalling after trying to fix
|
||
this._reportStall(bufferInfo);if(!this.media){return;}}const bufferedWithHoles=BufferHelper.bufferInfo(media,currentTime,config.maxBufferHole);this._tryFixBufferStall(bufferedWithHoles,stalledDuration);}/**
|
||
* Detects and attempts to fix known buffer stalling issues.
|
||
* @param bufferInfo - The properties of the current buffer.
|
||
* @param stalledDurationMs - The amount of time Hls.js has been stalling for.
|
||
* @private
|
||
*/_tryFixBufferStall(bufferInfo,stalledDurationMs){const{config,fragmentTracker,media}=this;if(media===null){return;}const currentTime=media.currentTime;const partial=fragmentTracker.getPartialFragment(currentTime);if(partial){// Try to skip over the buffer hole caused by a partial fragment
|
||
// This method isn't limited by the size of the gap between buffered ranges
|
||
const targetTime=this._trySkipBufferHole(partial);// we return here in this case, meaning
|
||
// the branch below only executes when we haven't seeked to a new position
|
||
if(targetTime||!this.media){return;}}// if we haven't had to skip over a buffer hole of a partial fragment
|
||
// we may just have to "nudge" the playlist as the browser decoding/rendering engine
|
||
// needs to cross some sort of threshold covering all source-buffers content
|
||
// to start playing properly.
|
||
if((bufferInfo.len>config.maxBufferHole||bufferInfo.nextStart&&bufferInfo.nextStart-currentTime<config.maxBufferHole)&&stalledDurationMs>config.highBufferWatchdogPeriod*1000){logger.warn('Trying to nudge playhead over buffer-hole');// Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
|
||
// We only try to jump the hole if it's under the configured size
|
||
// Reset stalled so to rearm watchdog timer
|
||
this.stalled=null;this._tryNudgeBuffer();}}/**
|
||
* Triggers a BUFFER_STALLED_ERROR event, but only once per stall period.
|
||
* @param bufferLen - The playhead distance from the end of the current buffer segment.
|
||
* @private
|
||
*/_reportStall(bufferInfo){const{hls,media,stallReported}=this;if(!stallReported&&media){// Report stalled error once
|
||
this.stallReported=true;const error=new Error(`Playback stalling at @${media.currentTime} due to low buffer (${JSON.stringify(bufferInfo)})`);logger.warn(error.message);hls.trigger(Events.ERROR,{type:ErrorTypes.MEDIA_ERROR,details:ErrorDetails.BUFFER_STALLED_ERROR,fatal:false,error,buffer:bufferInfo.len});}}/**
|
||
* Attempts to fix buffer stalls by jumping over known gaps caused by partial fragments
|
||
* @param partial - The partial fragment found at the current time (where playback is stalling).
|
||
* @private
|
||
*/_trySkipBufferHole(partial){const{config,hls,media}=this;if(media===null){return 0;}// Check if currentTime is between unbuffered regions of partial fragments
|
||
const currentTime=media.currentTime;const bufferInfo=BufferHelper.bufferInfo(media,currentTime,0);const startTime=currentTime<bufferInfo.start?bufferInfo.start:bufferInfo.nextStart;if(startTime){const bufferStarved=bufferInfo.len<=config.maxBufferHole;const waiting=bufferInfo.len>0&&bufferInfo.len<1&&media.readyState<3;const gapLength=startTime-currentTime;if(gapLength>0&&(bufferStarved||waiting)){// Only allow large gaps to be skipped if it is a start gap, or all fragments in skip range are partial
|
||
if(gapLength>config.maxBufferHole){const{fragmentTracker}=this;let startGap=false;if(currentTime===0){const startFrag=fragmentTracker.getAppendedFrag(0,PlaylistLevelType.MAIN);if(startFrag&&startTime<startFrag.end){startGap=true;}}if(!startGap){const startProvisioned=partial||fragmentTracker.getAppendedFrag(currentTime,PlaylistLevelType.MAIN);if(startProvisioned){let moreToLoad=false;let pos=startProvisioned.end;while(pos<startTime){const provisioned=fragmentTracker.getPartialFragment(pos);if(provisioned){pos+=provisioned.duration;}else {moreToLoad=true;break;}}if(moreToLoad){return 0;}}}}const targetTime=Math.max(startTime+SKIP_BUFFER_RANGE_START,currentTime+SKIP_BUFFER_HOLE_STEP_SECONDS);logger.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);this.moved=true;this.stalled=null;media.currentTime=targetTime;if(partial&&!partial.gap){const error=new Error(`fragment loaded with buffer holes, seeking from ${currentTime} to ${targetTime}`);hls.trigger(Events.ERROR,{type:ErrorTypes.MEDIA_ERROR,details:ErrorDetails.BUFFER_SEEK_OVER_HOLE,fatal:false,error,reason:error.message,frag:partial});}return targetTime;}}return 0;}/**
|
||
* Attempts to fix buffer stalls by advancing the mediaElement's current time by a small amount.
|
||
* @private
|
||
*/_tryNudgeBuffer(){const{config,hls,media,nudgeRetry}=this;if(media===null){return;}const currentTime=media.currentTime;this.nudgeRetry++;if(nudgeRetry<config.nudgeMaxRetry){const targetTime=currentTime+(nudgeRetry+1)*config.nudgeOffset;// playback stalled in buffered area ... let's nudge currentTime to try to overcome this
|
||
const error=new Error(`Nudging 'currentTime' from ${currentTime} to ${targetTime}`);logger.warn(error.message);media.currentTime=targetTime;hls.trigger(Events.ERROR,{type:ErrorTypes.MEDIA_ERROR,details:ErrorDetails.BUFFER_NUDGE_ON_STALL,error,fatal:false});}else {const error=new Error(`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`);logger.error(error.message);hls.trigger(Events.ERROR,{type:ErrorTypes.MEDIA_ERROR,details:ErrorDetails.BUFFER_STALLED_ERROR,error,fatal:true});}}}const TICK_INTERVAL=100;// how often to tick in ms
|
||
class StreamController extends BaseStreamController{constructor(hls,fragmentTracker,keyLoader){super(hls,fragmentTracker,keyLoader,'[stream-controller]',PlaylistLevelType.MAIN);this.audioCodecSwap=false;this.gapController=null;this.level=-1;this._forceStartLoad=false;this.altAudio=false;this.audioOnly=false;this.fragPlaying=null;this.onvplaying=null;this.onvseeked=null;this.fragLastKbps=0;this.couldBacktrack=false;this.backtrackFragment=null;this.audioCodecSwitch=false;this.videoBuffer=null;this._registerListeners();}_registerListeners(){const{hls}=this;hls.on(Events.MEDIA_ATTACHED,this.onMediaAttached,this);hls.on(Events.MEDIA_DETACHING,this.onMediaDetaching,this);hls.on(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.on(Events.MANIFEST_PARSED,this.onManifestParsed,this);hls.on(Events.LEVEL_LOADING,this.onLevelLoading,this);hls.on(Events.LEVEL_LOADED,this.onLevelLoaded,this);hls.on(Events.FRAG_LOAD_EMERGENCY_ABORTED,this.onFragLoadEmergencyAborted,this);hls.on(Events.ERROR,this.onError,this);hls.on(Events.AUDIO_TRACK_SWITCHING,this.onAudioTrackSwitching,this);hls.on(Events.AUDIO_TRACK_SWITCHED,this.onAudioTrackSwitched,this);hls.on(Events.BUFFER_CREATED,this.onBufferCreated,this);hls.on(Events.BUFFER_FLUSHED,this.onBufferFlushed,this);hls.on(Events.LEVELS_UPDATED,this.onLevelsUpdated,this);hls.on(Events.FRAG_BUFFERED,this.onFragBuffered,this);}_unregisterListeners(){const{hls}=this;hls.off(Events.MEDIA_ATTACHED,this.onMediaAttached,this);hls.off(Events.MEDIA_DETACHING,this.onMediaDetaching,this);hls.off(Events.MANIFEST_LOADING,this.onManifestLoading,this);hls.off(Events.MANIFEST_PARSED,this.onManifestParsed,this);hls.off(Events.LEVEL_LOADED,this.onLevelLoaded,this);hls.off(Events.FRAG_LOAD_EMERGENCY_ABORTED,this.onFragLoadEmergencyAborted,this);hls.off(Events.ERROR,this.onError,this);hls.off(Events.AUDIO_TRACK_SWITCHING,this.onAudioTrackSwitching,this);hls.off(Events.AUDIO_TRACK_SWITCHED,this.onAudioTrackSwitched,this);hls.off(Events.BUFFER_CREATED,this.onBufferCreated,this);hls.off(Events.BUFFER_FLUSHED,this.onBufferFlushed,this);hls.off(Events.LEVELS_UPDATED,this.onLevelsUpdated,this);hls.off(Events.FRAG_BUFFERED,this.onFragBuffered,this);}onHandlerDestroying(){this._unregisterListeners();super.onHandlerDestroying();}startLoad(startPosition){if(this.levels){const{lastCurrentTime,hls}=this;this.stopLoad();this.setInterval(TICK_INTERVAL);this.level=-1;if(!this.startFragRequested){// determine load level
|
||
let startLevel=hls.startLevel;if(startLevel===-1){if(hls.config.testBandwidth&&this.levels.length>1){// -1 : guess start Level by doing a bitrate test by loading first fragment of lowest quality level
|
||
startLevel=0;this.bitrateTest=true;}else {startLevel=hls.firstAutoLevel;}}// set new level to playlist loader : this will trigger start level load
|
||
// hls.nextLoadLevel remains until it is set to a new value or until a new frag is successfully loaded
|
||
hls.nextLoadLevel=startLevel;this.level=hls.loadLevel;this.loadedmetadata=false;}// if startPosition undefined but lastCurrentTime set, set startPosition to last currentTime
|
||
if(lastCurrentTime>0&&startPosition===-1){this.log(`Override startPosition with lastCurrentTime @${lastCurrentTime.toFixed(3)}`);startPosition=lastCurrentTime;}this.state=State.IDLE;this.nextLoadPosition=this.startPosition=this.lastCurrentTime=startPosition;this.tick();}else {this._forceStartLoad=true;this.state=State.STOPPED;}}stopLoad(){this._forceStartLoad=false;super.stopLoad();}doTick(){switch(this.state){case State.WAITING_LEVEL:{const{levels,level}=this;const currentLevel=levels==null?void 0:levels[level];const details=currentLevel==null?void 0:currentLevel.details;if(details&&(!details.live||this.levelLastLoaded===currentLevel)){if(this.waitForCdnTuneIn(details)){break;}this.state=State.IDLE;break;}else if(this.hls.nextLoadLevel!==this.level){this.state=State.IDLE;break;}break;}case State.FRAG_LOADING_WAITING_RETRY:{var _this$media;const now=self.performance.now();const retryDate=this.retryDate;// if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
|
||
if(!retryDate||now>=retryDate||(_this$media=this.media)!=null&&_this$media.seeking){const{levels,level}=this;const currentLevel=levels==null?void 0:levels[level];this.resetStartWhenNotLoaded(currentLevel||null);this.state=State.IDLE;}}break;}if(this.state===State.IDLE){this.doTickIdle();}this.onTickEnd();}onTickEnd(){super.onTickEnd();this.checkBuffer();this.checkFragmentChanged();}doTickIdle(){const{hls,levelLastLoaded,levels,media}=this;// if start level not parsed yet OR
|
||
// if video not attached AND start fragment already requested OR start frag prefetch not enabled
|
||
// exit loop, as we either need more info (level not parsed) or we need media to be attached to load new fragment
|
||
if(levelLastLoaded===null||!media&&(this.startFragRequested||!hls.config.startFragPrefetch)){return;}// If the "main" level is audio-only but we are loading an alternate track in the same group, do not load anything
|
||
if(this.altAudio&&this.audioOnly){return;}const level=hls.nextLoadLevel;if(!(levels!=null&&levels[level])){return;}const levelInfo=levels[level];// if buffer length is less than maxBufLen try to load a new fragment
|
||
const bufferInfo=this.getMainFwdBufferInfo();if(bufferInfo===null){return;}const lastDetails=this.getLevelDetails();if(lastDetails&&this._streamEnded(bufferInfo,lastDetails)){const data={};if(this.altAudio){data.type='video';}this.hls.trigger(Events.BUFFER_EOS,data);this.state=State.ENDED;return;}// set next load level : this will trigger a playlist load if needed
|
||
if(hls.loadLevel!==level&&hls.manualLevel===-1){this.log(`Adapting to level ${level} from level ${this.level}`);}this.level=hls.nextLoadLevel=level;const levelDetails=levelInfo.details;// if level info not retrieved yet, switch state and wait for level retrieval
|
||
// if live playlist, ensure that new playlist has been refreshed to avoid loading/try to load
|
||
// a useless and outdated fragment (that might even introduce load error if it is already out of the live playlist)
|
||
if(!levelDetails||this.state===State.WAITING_LEVEL||levelDetails.live&&this.levelLastLoaded!==levelInfo){this.level=level;this.state=State.WAITING_LEVEL;return;}const bufferLen=bufferInfo.len;// compute max Buffer Length that we could get from this load level, based on level bitrate. don't buffer more than 60 MB and more than 30s
|
||
const maxBufLen=this.getMaxBufferLength(levelInfo.maxBitrate);// Stay idle if we are still with buffer margins
|
||
if(bufferLen>=maxBufLen){return;}if(this.backtrackFragment&&this.backtrackFragment.start>bufferInfo.end){this.backtrackFragment=null;}const targetBufferTime=this.backtrackFragment?this.backtrackFragment.start:bufferInfo.end;let frag=this.getNextFragment(targetBufferTime,levelDetails);// Avoid backtracking by loading an earlier segment in streams with segments that do not start with a key frame (flagged by `couldBacktrack`)
|
||
if(this.couldBacktrack&&!this.fragPrevious&&frag&&frag.sn!=='initSegment'&&this.fragmentTracker.getState(frag)!==FragmentState.OK){var _this$backtrackFragme;const backtrackSn=((_this$backtrackFragme=this.backtrackFragment)!=null?_this$backtrackFragme:frag).sn;const fragIdx=backtrackSn-levelDetails.startSN;const backtrackFrag=levelDetails.fragments[fragIdx-1];if(backtrackFrag&&frag.cc===backtrackFrag.cc){frag=backtrackFrag;this.fragmentTracker.removeFragment(backtrackFrag);}}else if(this.backtrackFragment&&bufferInfo.len){this.backtrackFragment=null;}// Avoid loop loading by using nextLoadPosition set for backtracking and skipping consecutive GAP tags
|
||
if(frag&&this.isLoopLoading(frag,targetBufferTime)){const gapStart=frag.gap;if(!gapStart){// Cleanup the fragment tracker before trying to find the next unbuffered fragment
|
||
const type=this.audioOnly&&!this.altAudio?ElementaryStreamTypes.AUDIO:ElementaryStreamTypes.VIDEO;const mediaBuffer=(type===ElementaryStreamTypes.VIDEO?this.videoBuffer:this.mediaBuffer)||this.media;if(mediaBuffer){this.afterBufferFlushed(mediaBuffer,type,PlaylistLevelType.MAIN);}}frag=this.getNextFragmentLoopLoading(frag,levelDetails,bufferInfo,PlaylistLevelType.MAIN,maxBufLen);}if(!frag){return;}if(frag.initSegment&&!frag.initSegment.data&&!this.bitrateTest){frag=frag.initSegment;}this.loadFragment(frag,levelInfo,targetBufferTime);}loadFragment(frag,level,targetBufferTime){// Check if fragment is not loaded
|
||
const fragState=this.fragmentTracker.getState(frag);this.fragCurrent=frag;if(fragState===FragmentState.NOT_LOADED||fragState===FragmentState.PARTIAL){if(frag.sn==='initSegment'){this._loadInitSegment(frag,level);}else if(this.bitrateTest){this.log(`Fragment ${frag.sn} of level ${frag.level} is being downloaded to test bitrate and will not be buffered`);this._loadBitrateTestFrag(frag,level);}else {this.startFragRequested=true;super.loadFragment(frag,level,targetBufferTime);}}else {this.clearTrackerIfNeeded(frag);}}getBufferedFrag(position){return this.fragmentTracker.getBufferedFrag(position,PlaylistLevelType.MAIN);}followingBufferedFrag(frag){if(frag){// try to get range of next fragment (500ms after this range)
|
||
return this.getBufferedFrag(frag.end+0.5);}return null;}/*
|
||
on immediate level switch :
|
||
- pause playback if playing
|
||
- cancel any pending load request
|
||
- and trigger a buffer flush
|
||
*/immediateLevelSwitch(){this.abortCurrentFrag();this.flushMainBuffer(0,Number.POSITIVE_INFINITY);}/**
|
||
* try to switch ASAP without breaking video playback:
|
||
* in order to ensure smooth but quick level switching,
|
||
* we need to find the next flushable buffer range
|
||
* we should take into account new segment fetch time
|
||
*/nextLevelSwitch(){const{levels,media}=this;// ensure that media is defined and that metadata are available (to retrieve currentTime)
|
||
if(media!=null&&media.readyState){let fetchdelay;const fragPlayingCurrent=this.getAppendedFrag(media.currentTime);if(fragPlayingCurrent&&fragPlayingCurrent.start>1){// flush buffer preceding current fragment (flush until current fragment start offset)
|
||
// minus 1s to avoid video freezing, that could happen if we flush keyframe of current video ...
|
||
this.flushMainBuffer(0,fragPlayingCurrent.start-1);}const levelDetails=this.getLevelDetails();if(levelDetails!=null&&levelDetails.live){const bufferInfo=this.getMainFwdBufferInfo();// Do not flush in live stream with low buffer
|
||
if(!bufferInfo||bufferInfo.len<levelDetails.targetduration*2){return;}}if(!media.paused&&levels){// add a safety delay of 1s
|
||
const nextLevelId=this.hls.nextLoadLevel;const nextLevel=levels[nextLevelId];const fragLastKbps=this.fragLastKbps;if(fragLastKbps&&this.fragCurrent){fetchdelay=this.fragCurrent.duration*nextLevel.maxBitrate/(1000*fragLastKbps)+1;}else {fetchdelay=0;}}else {fetchdelay=0;}// this.log('fetchdelay:'+fetchdelay);
|
||
// find buffer range that will be reached once new fragment will be fetched
|
||
const bufferedFrag=this.getBufferedFrag(media.currentTime+fetchdelay);if(bufferedFrag){// we can flush buffer range following this one without stalling playback
|
||
const nextBufferedFrag=this.followingBufferedFrag(bufferedFrag);if(nextBufferedFrag){// if we are here, we can also cancel any loading/demuxing in progress, as they are useless
|
||
this.abortCurrentFrag();// start flush position is in next buffered frag. Leave some padding for non-independent segments and smoother playback.
|
||
const maxStart=nextBufferedFrag.maxStartPTS?nextBufferedFrag.maxStartPTS:nextBufferedFrag.start;const fragDuration=nextBufferedFrag.duration;const startPts=Math.max(bufferedFrag.end,maxStart+Math.min(Math.max(fragDuration-this.config.maxFragLookUpTolerance,fragDuration*(this.couldBacktrack?0.5:0.125)),fragDuration*(this.couldBacktrack?0.75:0.25)));this.flushMainBuffer(startPts,Number.POSITIVE_INFINITY);}}}}abortCurrentFrag(){const fragCurrent=this.fragCurrent;this.fragCurrent=null;this.backtrackFragment=null;if(fragCurrent){fragCurrent.abortRequests();this.fragmentTracker.removeFragment(fragCurrent);}switch(this.state){case State.KEY_LOADING:case State.FRAG_LOADING:case State.FRAG_LOADING_WAITING_RETRY:case State.PARSING:case State.PARSED:this.state=State.IDLE;break;}this.nextLoadPosition=this.getLoadPosition();}flushMainBuffer(startOffset,endOffset){super.flushMainBuffer(startOffset,endOffset,this.altAudio?'video':null);}onMediaAttached(event,data){super.onMediaAttached(event,data);const media=data.media;this.onvplaying=this.onMediaPlaying.bind(this);this.onvseeked=this.onMediaSeeked.bind(this);media.addEventListener('playing',this.onvplaying);media.addEventListener('seeked',this.onvseeked);this.gapController=new GapController(this.config,media,this.fragmentTracker,this.hls);}onMediaDetaching(){const{media}=this;if(media&&this.onvplaying&&this.onvseeked){media.removeEventListener('playing',this.onvplaying);media.removeEventListener('seeked',this.onvseeked);this.onvplaying=this.onvseeked=null;this.videoBuffer=null;}this.fragPlaying=null;if(this.gapController){this.gapController.destroy();this.gapController=null;}super.onMediaDetaching();}onMediaPlaying(){// tick to speed up FRAG_CHANGED triggering
|
||
this.tick();}onMediaSeeked(){const media=this.media;const currentTime=media?media.currentTime:null;if(isFiniteNumber(currentTime)){this.log(`Media seeked to ${currentTime.toFixed(3)}`);}// If seeked was issued before buffer was appended do not tick immediately
|
||
const bufferInfo=this.getMainFwdBufferInfo();if(bufferInfo===null||bufferInfo.len===0){this.warn(`Main forward buffer length on "seeked" event ${bufferInfo?bufferInfo.len:'empty'})`);return;}// tick to speed up FRAG_CHANGED triggering
|
||
this.tick();}onManifestLoading(){// reset buffer on manifest loading
|
||
this.log('Trigger BUFFER_RESET');this.hls.trigger(Events.BUFFER_RESET,undefined);this.fragmentTracker.removeAllFragments();this.couldBacktrack=false;this.startPosition=this.lastCurrentTime=this.fragLastKbps=0;this.levels=this.fragPlaying=this.backtrackFragment=this.levelLastLoaded=null;this.altAudio=this.audioOnly=this.startFragRequested=false;}onManifestParsed(event,data){// detect if we have different kind of audio codecs used amongst playlists
|
||
let aac=false;let heaac=false;data.levels.forEach(level=>{const codec=level.audioCodec;if(codec){aac=aac||codec.indexOf('mp4a.40.2')!==-1;heaac=heaac||codec.indexOf('mp4a.40.5')!==-1;}});this.audioCodecSwitch=aac&&heaac&&!changeTypeSupported();if(this.audioCodecSwitch){this.log('Both AAC/HE-AAC audio found in levels; declaring level codec as HE-AAC');}this.levels=data.levels;this.startFragRequested=false;}onLevelLoading(event,data){const{levels}=this;if(!levels||this.state!==State.IDLE){return;}const level=levels[data.level];if(!level.details||level.details.live&&this.levelLastLoaded!==level||this.waitForCdnTuneIn(level.details)){this.state=State.WAITING_LEVEL;}}onLevelLoaded(event,data){var _curLevel$details;const{levels}=this;const newLevelId=data.level;const newDetails=data.details;const duration=newDetails.totalduration;if(!levels){this.warn(`Levels were reset while loading level ${newLevelId}`);return;}this.log(`Level ${newLevelId} loaded [${newDetails.startSN},${newDetails.endSN}]${newDetails.lastPartSn?`[part-${newDetails.lastPartSn}-${newDetails.lastPartIndex}]`:''}, cc [${newDetails.startCC}, ${newDetails.endCC}] duration:${duration}`);const curLevel=levels[newLevelId];const fragCurrent=this.fragCurrent;if(fragCurrent&&(this.state===State.FRAG_LOADING||this.state===State.FRAG_LOADING_WAITING_RETRY)){if(fragCurrent.level!==data.level&&fragCurrent.loader){this.abortCurrentFrag();}}let sliding=0;if(newDetails.live||(_curLevel$details=curLevel.details)!=null&&_curLevel$details.live){var _this$levelLastLoaded;this.checkLiveUpdate(newDetails);if(newDetails.deltaUpdateFailed){return;}sliding=this.alignPlaylists(newDetails,curLevel.details,(_this$levelLastLoaded=this.levelLastLoaded)==null?void 0:_this$levelLastLoaded.details);}// override level info
|
||
curLevel.details=newDetails;this.levelLastLoaded=curLevel;this.hls.trigger(Events.LEVEL_UPDATED,{details:newDetails,level:newLevelId});// only switch back to IDLE state if we were waiting for level to start downloading a new fragment
|
||
if(this.state===State.WAITING_LEVEL){if(this.waitForCdnTuneIn(newDetails)){// Wait for Low-Latency CDN Tune-in
|
||
return;}this.state=State.IDLE;}if(!this.startFragRequested){this.setStartPosition(newDetails,sliding);}else if(newDetails.live){this.synchronizeToLiveEdge(newDetails);}// trigger handler right now
|
||
this.tick();}_handleFragmentLoadProgress(data){var _frag$initSegment;const{frag,part,payload}=data;const{levels}=this;if(!levels){this.warn(`Levels were reset while fragment load was in progress. Fragment ${frag.sn} of level ${frag.level} will not be buffered`);return;}const currentLevel=levels[frag.level];const details=currentLevel.details;if(!details){this.warn(`Dropping fragment ${frag.sn} of level ${frag.level} after level details were reset`);this.fragmentTracker.removeFragment(frag);return;}const videoCodec=currentLevel.videoCodec;// time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
|
||
const accurateTimeOffset=details.PTSKnown||!details.live;const initSegmentData=(_frag$initSegment=frag.initSegment)==null?void 0:_frag$initSegment.data;const audioCodec=this._getAudioCodec(currentLevel);// transmux the MPEG-TS data to ISO-BMFF segments
|
||
// this.log(`Transmuxing ${frag.sn} of [${details.startSN} ,${details.endSN}],level ${frag.level}, cc ${frag.cc}`);
|
||
const transmuxer=this.transmuxer=this.transmuxer||new TransmuxerInterface(this.hls,PlaylistLevelType.MAIN,this._handleTransmuxComplete.bind(this),this._handleTransmuxerFlush.bind(this));const partIndex=part?part.index:-1;const partial=partIndex!==-1;const chunkMeta=new ChunkMetadata(frag.level,frag.sn,frag.stats.chunkCount,payload.byteLength,partIndex,partial);const initPTS=this.initPTS[frag.cc];transmuxer.push(payload,initSegmentData,audioCodec,videoCodec,frag,part,details.totalduration,accurateTimeOffset,chunkMeta,initPTS);}onAudioTrackSwitching(event,data){// if any URL found on new audio track, it is an alternate audio track
|
||
const fromAltAudio=this.altAudio;const altAudio=!!data.url;// if we switch on main audio, ensure that main fragment scheduling is synced with media.buffered
|
||
// don't do anything if we switch to alt audio: audio stream controller is handling it.
|
||
// we will just have to change buffer scheduling on audioTrackSwitched
|
||
if(!altAudio){if(this.mediaBuffer!==this.media){this.log('Switching on main audio, use media.buffered to schedule main fragment loading');this.mediaBuffer=this.media;const fragCurrent=this.fragCurrent;// we need to refill audio buffer from main: cancel any frag loading to speed up audio switch
|
||
if(fragCurrent){this.log('Switching to main audio track, cancel main fragment load');fragCurrent.abortRequests();this.fragmentTracker.removeFragment(fragCurrent);}// destroy transmuxer to force init segment generation (following audio switch)
|
||
this.resetTransmuxer();// switch to IDLE state to load new fragment
|
||
this.resetLoadingState();}else if(this.audioOnly){// Reset audio transmuxer so when switching back to main audio we're not still appending where we left off
|
||
this.resetTransmuxer();}const hls=this.hls;// If switching from alt to main audio, flush all audio and trigger track switched
|
||
if(fromAltAudio){hls.trigger(Events.BUFFER_FLUSHING,{startOffset:0,endOffset:Number.POSITIVE_INFINITY,type:null});this.fragmentTracker.removeAllFragments();}hls.trigger(Events.AUDIO_TRACK_SWITCHED,data);}}onAudioTrackSwitched(event,data){const trackId=data.id;const altAudio=!!this.hls.audioTracks[trackId].url;if(altAudio){const videoBuffer=this.videoBuffer;// if we switched on alternate audio, ensure that main fragment scheduling is synced with video sourcebuffer buffered
|
||
if(videoBuffer&&this.mediaBuffer!==videoBuffer){this.log('Switching on alternate audio, use video.buffered to schedule main fragment loading');this.mediaBuffer=videoBuffer;}}this.altAudio=altAudio;this.tick();}onBufferCreated(event,data){const tracks=data.tracks;let mediaTrack;let name;let alternate=false;for(const type in tracks){const track=tracks[type];if(track.id==='main'){name=type;mediaTrack=track;// keep video source buffer reference
|
||
if(type==='video'){const videoTrack=tracks[type];if(videoTrack){this.videoBuffer=videoTrack.buffer;}}}else {alternate=true;}}if(alternate&&mediaTrack){this.log(`Alternate track found, use ${name}.buffered to schedule main fragment loading`);this.mediaBuffer=mediaTrack.buffer;}else {this.mediaBuffer=this.media;}}onFragBuffered(event,data){const{frag,part}=data;if(frag&&frag.type!==PlaylistLevelType.MAIN){return;}if(this.fragContextChanged(frag)){// If a level switch was requested while a fragment was buffering, it will emit the FRAG_BUFFERED event upon completion
|
||
// Avoid setting state back to IDLE, since that will interfere with a level switch
|
||
this.warn(`Fragment ${frag.sn}${part?' p: '+part.index:''} of level ${frag.level} finished buffering, but was aborted. state: ${this.state}`);if(this.state===State.PARSED){this.state=State.IDLE;}return;}const stats=part?part.stats:frag.stats;this.fragLastKbps=Math.round(8*stats.total/(stats.buffering.end-stats.loading.first));if(frag.sn!=='initSegment'){this.fragPrevious=frag;}this.fragBufferedComplete(frag,part);}onError(event,data){var _data$context;if(data.fatal){this.state=State.ERROR;return;}switch(data.details){case ErrorDetails.FRAG_GAP:case ErrorDetails.FRAG_PARSING_ERROR:case ErrorDetails.FRAG_DECRYPT_ERROR:case ErrorDetails.FRAG_LOAD_ERROR:case ErrorDetails.FRAG_LOAD_TIMEOUT:case ErrorDetails.KEY_LOAD_ERROR:case ErrorDetails.KEY_LOAD_TIMEOUT:this.onFragmentOrKeyLoadError(PlaylistLevelType.MAIN,data);break;case ErrorDetails.LEVEL_LOAD_ERROR:case ErrorDetails.LEVEL_LOAD_TIMEOUT:case ErrorDetails.LEVEL_PARSING_ERROR:// in case of non fatal error while loading level, if level controller is not retrying to load level, switch back to IDLE
|
||
if(!data.levelRetry&&this.state===State.WAITING_LEVEL&&((_data$context=data.context)==null?void 0:_data$context.type)===PlaylistContextType.LEVEL){this.state=State.IDLE;}break;case ErrorDetails.BUFFER_APPEND_ERROR:case ErrorDetails.BUFFER_FULL_ERROR:if(!data.parent||data.parent!=='main'){return;}if(data.details===ErrorDetails.BUFFER_APPEND_ERROR){this.resetLoadingState();return;}if(this.reduceLengthAndFlushBuffer(data)){this.flushMainBuffer(0,Number.POSITIVE_INFINITY);}break;case ErrorDetails.INTERNAL_EXCEPTION:this.recoverWorkerError(data);break;}}// Checks the health of the buffer and attempts to resolve playback stalls.
|
||
checkBuffer(){const{media,gapController}=this;if(!media||!gapController||!media.readyState){// Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0)
|
||
return;}if(this.loadedmetadata||!BufferHelper.getBuffered(media).length){// Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
|
||
const activeFrag=this.state!==State.IDLE?this.fragCurrent:null;gapController.poll(this.lastCurrentTime,activeFrag);}this.lastCurrentTime=media.currentTime;}onFragLoadEmergencyAborted(){this.state=State.IDLE;// if loadedmetadata is not set, it means that we are emergency switch down on first frag
|
||
// in that case, reset startFragRequested flag
|
||
if(!this.loadedmetadata){this.startFragRequested=false;this.nextLoadPosition=this.startPosition;}this.tickImmediate();}onBufferFlushed(event,{type}){if(type!==ElementaryStreamTypes.AUDIO||this.audioOnly&&!this.altAudio){const mediaBuffer=(type===ElementaryStreamTypes.VIDEO?this.videoBuffer:this.mediaBuffer)||this.media;this.afterBufferFlushed(mediaBuffer,type,PlaylistLevelType.MAIN);this.tick();}}onLevelsUpdated(event,data){if(this.level>-1&&this.fragCurrent){this.level=this.fragCurrent.level;}this.levels=data.levels;}swapAudioCodec(){this.audioCodecSwap=!this.audioCodecSwap;}/**
|
||
* Seeks to the set startPosition if not equal to the mediaElement's current time.
|
||
*/seekToStartPos(){const{media}=this;if(!media){return;}const currentTime=media.currentTime;let startPosition=this.startPosition;// only adjust currentTime if different from startPosition or if startPosition not buffered
|
||
// at that stage, there should be only one buffered range, as we reach that code after first fragment has been buffered
|
||
if(startPosition>=0&¤tTime<startPosition){if(media.seeking){this.log(`could not seek to ${startPosition}, already seeking at ${currentTime}`);return;}const buffered=BufferHelper.getBuffered(media);const bufferStart=buffered.length?buffered.start(0):0;const delta=bufferStart-startPosition;if(delta>0&&(delta<this.config.maxBufferHole||delta<this.config.maxFragLookUpTolerance)){this.log(`adjusting start position by ${delta} to match buffer start`);startPosition+=delta;this.startPosition=startPosition;}this.log(`seek to target start position ${startPosition} from current time ${currentTime}`);media.currentTime=startPosition;}}_getAudioCodec(currentLevel){let audioCodec=this.config.defaultAudioCodec||currentLevel.audioCodec;if(this.audioCodecSwap&&audioCodec){this.log('Swapping audio codec');if(audioCodec.indexOf('mp4a.40.5')!==-1){audioCodec='mp4a.40.2';}else {audioCodec='mp4a.40.5';}}return audioCodec;}_loadBitrateTestFrag(frag,level){frag.bitrateTest=true;this._doFragLoad(frag,level).then(data=>{const{hls}=this;if(!data||this.fragContextChanged(frag)){return;}level.fragmentError=0;this.state=State.IDLE;this.startFragRequested=false;this.bitrateTest=false;const stats=frag.stats;// Bitrate tests fragments are neither parsed nor buffered
|
||
stats.parsing.start=stats.parsing.end=stats.buffering.start=stats.buffering.end=self.performance.now();hls.trigger(Events.FRAG_LOADED,data);frag.bitrateTest=false;});}_handleTransmuxComplete(transmuxResult){var _id3$samples;const id='main';const{hls}=this;const{remuxResult,chunkMeta}=transmuxResult;const context=this.getCurrentContext(chunkMeta);if(!context){this.resetWhenMissingContext(chunkMeta);return;}const{frag,part,level}=context;const{video,text,id3,initSegment}=remuxResult;const{details}=level;// The audio-stream-controller handles audio buffering if Hls.js is playing an alternate audio track
|
||
const audio=this.altAudio?undefined:remuxResult.audio;// Check if the current fragment has been aborted. We check this by first seeing if we're still playing the current level.
|
||
// If we are, subsequently check if the currently loading fragment (fragCurrent) has changed.
|
||
if(this.fragContextChanged(frag)){this.fragmentTracker.removeFragment(frag);return;}this.state=State.PARSING;if(initSegment){if(initSegment!=null&&initSegment.tracks){const mapFragment=frag.initSegment||frag;this._bufferInitSegment(level,initSegment.tracks,mapFragment,chunkMeta);hls.trigger(Events.FRAG_PARSING_INIT_SEGMENT,{frag:mapFragment,id,tracks:initSegment.tracks});}// This would be nice if Number.isFinite acted as a typeguard, but it doesn't. See: https://github.com/Microsoft/TypeScript/issues/10038
|
||
const initPTS=initSegment.initPTS;const timescale=initSegment.timescale;if(isFiniteNumber(initPTS)){this.initPTS[frag.cc]={baseTime:initPTS,timescale};hls.trigger(Events.INIT_PTS_FOUND,{frag,id,initPTS,timescale});}}// Avoid buffering if backtracking this fragment
|
||
if(video&&details&&frag.sn!=='initSegment'){const prevFrag=details.fragments[frag.sn-1-details.startSN];const isFirstFragment=frag.sn===details.startSN;const isFirstInDiscontinuity=!prevFrag||frag.cc>prevFrag.cc;if(remuxResult.independent!==false){const{startPTS,endPTS,startDTS,endDTS}=video;if(part){part.elementaryStreams[video.type]={startPTS,endPTS,startDTS,endDTS};}else {if(video.firstKeyFrame&&video.independent&&chunkMeta.id===1&&!isFirstInDiscontinuity){this.couldBacktrack=true;}if(video.dropped&&video.independent){// Backtrack if dropped frames create a gap after currentTime
|
||
const bufferInfo=this.getMainFwdBufferInfo();const targetBufferTime=(bufferInfo?bufferInfo.end:this.getLoadPosition())+this.config.maxBufferHole;const startTime=video.firstKeyFramePTS?video.firstKeyFramePTS:startPTS;if(!isFirstFragment&&targetBufferTime<startTime-this.config.maxBufferHole&&!isFirstInDiscontinuity){this.backtrack(frag);return;}else if(isFirstInDiscontinuity){// Mark segment with a gap to avoid loop loading
|
||
frag.gap=true;}// Set video stream start to fragment start so that truncated samples do not distort the timeline, and mark it partial
|
||
frag.setElementaryStreamInfo(video.type,frag.start,endPTS,frag.start,endDTS,true);}else if(isFirstFragment&&startPTS>MAX_START_GAP_JUMP){// Mark segment with a gap to skip large start gap
|
||
frag.gap=true;}}frag.setElementaryStreamInfo(video.type,startPTS,endPTS,startDTS,endDTS);if(this.backtrackFragment){this.backtrackFragment=frag;}this.bufferFragmentData(video,frag,part,chunkMeta,isFirstFragment||isFirstInDiscontinuity);}else if(isFirstFragment||isFirstInDiscontinuity){// Mark segment with a gap to avoid loop loading
|
||
frag.gap=true;}else {this.backtrack(frag);return;}}if(audio){const{startPTS,endPTS,startDTS,endDTS}=audio;if(part){part.elementaryStreams[ElementaryStreamTypes.AUDIO]={startPTS,endPTS,startDTS,endDTS};}frag.setElementaryStreamInfo(ElementaryStreamTypes.AUDIO,startPTS,endPTS,startDTS,endDTS);this.bufferFragmentData(audio,frag,part,chunkMeta);}if(details&&id3!=null&&(_id3$samples=id3.samples)!=null&&_id3$samples.length){const emittedID3={id,frag,details,samples:id3.samples};hls.trigger(Events.FRAG_PARSING_METADATA,emittedID3);}if(details&&text){const emittedText={id,frag,details,samples:text.samples};hls.trigger(Events.FRAG_PARSING_USERDATA,emittedText);}}_bufferInitSegment(currentLevel,tracks,frag,chunkMeta){if(this.state!==State.PARSING){return;}this.audioOnly=!!tracks.audio&&!tracks.video;// if audio track is expected to come from audio stream controller, discard any coming from main
|
||
if(this.altAudio&&!this.audioOnly){delete tracks.audio;}// include levelCodec in audio and video tracks
|
||
const{audio,video,audiovideo}=tracks;if(audio){let audioCodec=currentLevel.audioCodec;const ua=navigator.userAgent.toLowerCase();if(this.audioCodecSwitch){if(audioCodec){if(audioCodec.indexOf('mp4a.40.5')!==-1){audioCodec='mp4a.40.2';}else {audioCodec='mp4a.40.5';}}// In the case that AAC and HE-AAC audio codecs are signalled in manifest,
|
||
// force HE-AAC, as it seems that most browsers prefers it.
|
||
// don't force HE-AAC if mono stream, or in Firefox
|
||
const audioMetadata=audio.metadata;if(audioMetadata&&'channelCount'in audioMetadata&&(audioMetadata.channelCount||1)!==1&&ua.indexOf('firefox')===-1){audioCodec='mp4a.40.5';}}// HE-AAC is broken on Android, always signal audio codec as AAC even if variant manifest states otherwise
|
||
if(audioCodec&&audioCodec.indexOf('mp4a.40.5')!==-1&&ua.indexOf('android')!==-1&&audio.container!=='audio/mpeg'){// Exclude mpeg audio
|
||
audioCodec='mp4a.40.2';this.log(`Android: force audio codec to ${audioCodec}`);}if(currentLevel.audioCodec&¤tLevel.audioCodec!==audioCodec){this.log(`Swapping manifest audio codec "${currentLevel.audioCodec}" for "${audioCodec}"`);}audio.levelCodec=audioCodec;audio.id='main';this.log(`Init audio buffer, container:${audio.container}, codecs[selected/level/parsed]=[${audioCodec||''}/${currentLevel.audioCodec||''}/${audio.codec}]`);}if(video){video.levelCodec=currentLevel.videoCodec;video.id='main';this.log(`Init video buffer, container:${video.container}, codecs[level/parsed]=[${currentLevel.videoCodec||''}/${video.codec}]`);}if(audiovideo){this.log(`Init audiovideo buffer, container:${audiovideo.container}, codecs[level/parsed]=[${currentLevel.codecs}/${audiovideo.codec}]`);}this.hls.trigger(Events.BUFFER_CODECS,tracks);// loop through tracks that are going to be provided to bufferController
|
||
Object.keys(tracks).forEach(trackName=>{const track=tracks[trackName];const initSegment=track.initSegment;if(initSegment!=null&&initSegment.byteLength){this.hls.trigger(Events.BUFFER_APPENDING,{type:trackName,data:initSegment,frag,part:null,chunkMeta,parent:frag.type});}});// trigger handler right now
|
||
this.tickImmediate();}getMainFwdBufferInfo(){return this.getFwdBufferInfo(this.mediaBuffer?this.mediaBuffer:this.media,PlaylistLevelType.MAIN);}backtrack(frag){this.couldBacktrack=true;// Causes findFragments to backtrack through fragments to find the keyframe
|
||
this.backtrackFragment=frag;this.resetTransmuxer();this.flushBufferGap(frag);this.fragmentTracker.removeFragment(frag);this.fragPrevious=null;this.nextLoadPosition=frag.start;this.state=State.IDLE;}checkFragmentChanged(){const video=this.media;let fragPlayingCurrent=null;if(video&&video.readyState>1&&video.seeking===false){const currentTime=video.currentTime;/* if video element is in seeked state, currentTime can only increase.
|
||
(assuming that playback rate is positive ...)
|
||
As sometimes currentTime jumps back to zero after a
|
||
media decode error, check this, to avoid seeking back to
|
||
wrong position after a media decode error
|
||
*/if(BufferHelper.isBuffered(video,currentTime)){fragPlayingCurrent=this.getAppendedFrag(currentTime);}else if(BufferHelper.isBuffered(video,currentTime+0.1)){/* ensure that FRAG_CHANGED event is triggered at startup,
|
||
when first video frame is displayed and playback is paused.
|
||
add a tolerance of 100ms, in case current position is not buffered,
|
||
check if current pos+100ms is buffered and use that buffer range
|
||
for FRAG_CHANGED event reporting */fragPlayingCurrent=this.getAppendedFrag(currentTime+0.1);}if(fragPlayingCurrent){this.backtrackFragment=null;const fragPlaying=this.fragPlaying;const fragCurrentLevel=fragPlayingCurrent.level;if(!fragPlaying||fragPlayingCurrent.sn!==fragPlaying.sn||fragPlaying.level!==fragCurrentLevel){this.fragPlaying=fragPlayingCurrent;this.hls.trigger(Events.FRAG_CHANGED,{frag:fragPlayingCurrent});if(!fragPlaying||fragPlaying.level!==fragCurrentLevel){this.hls.trigger(Events.LEVEL_SWITCHED,{level:fragCurrentLevel});}}}}}get nextLevel(){const frag=this.nextBufferedFrag;if(frag){return frag.level;}return -1;}get currentFrag(){const media=this.media;if(media){return this.fragPlaying||this.getAppendedFrag(media.currentTime);}return null;}get currentProgramDateTime(){const media=this.media;if(media){const currentTime=media.currentTime;const frag=this.currentFrag;if(frag&&isFiniteNumber(currentTime)&&isFiniteNumber(frag.programDateTime)){const epocMs=frag.programDateTime+(currentTime-frag.start)*1000;return new Date(epocMs);}}return null;}get currentLevel(){const frag=this.currentFrag;if(frag){return frag.level;}return -1;}get nextBufferedFrag(){const frag=this.currentFrag;if(frag){return this.followingBufferedFrag(frag);}return null;}get forceStartLoad(){return this._forceStartLoad;}}/**
|
||
* The `Hls` class is the core of the HLS.js library used to instantiate player instances.
|
||
* @public
|
||
*/class Hls{/**
|
||
* Get the video-dev/hls.js package version.
|
||
*/static get version(){return "1.5.13";}/**
|
||
* Check if the required MediaSource Extensions are available.
|
||
*/static isMSESupported(){return isMSESupported();}/**
|
||
* Check if MediaSource Extensions are available and isTypeSupported checks pass for any baseline codecs.
|
||
*/static isSupported(){return isSupported();}/**
|
||
* Get the MediaSource global used for MSE playback (ManagedMediaSource, MediaSource, or WebKitMediaSource).
|
||
*/static getMediaSource(){return getMediaSource();}static get Events(){return Events;}static get ErrorTypes(){return ErrorTypes;}static get ErrorDetails(){return ErrorDetails;}/**
|
||
* Get the default configuration applied to new instances.
|
||
*/static get DefaultConfig(){if(!Hls.defaultConfig){return hlsDefaultConfig;}return Hls.defaultConfig;}/**
|
||
* Replace the default configuration applied to new instances.
|
||
*/static set DefaultConfig(defaultConfig){Hls.defaultConfig=defaultConfig;}/**
|
||
* Creates an instance of an HLS client that can attach to exactly one `HTMLMediaElement`.
|
||
* @param userConfig - Configuration options applied over `Hls.DefaultConfig`
|
||
*/constructor(userConfig={}){/**
|
||
* The runtime configuration used by the player. At instantiation this is combination of `hls.userConfig` merged over `Hls.DefaultConfig`.
|
||
*/this.config=void 0;/**
|
||
* The configuration object provided on player instantiation.
|
||
*/this.userConfig=void 0;this.coreComponents=void 0;this.networkControllers=void 0;this.started=false;this._emitter=new EventEmitter();this._autoLevelCapping=-1;this._maxHdcpLevel=null;this.abrController=void 0;this.bufferController=void 0;this.capLevelController=void 0;this.latencyController=void 0;this.levelController=void 0;this.streamController=void 0;this.audioTrackController=void 0;this.subtitleTrackController=void 0;this.emeController=void 0;this.cmcdController=void 0;this._media=null;this.url=null;this.triggeringException=void 0;enableLogs(userConfig.debug||false,'Hls instance');const config=this.config=mergeConfig(Hls.DefaultConfig,userConfig);this.userConfig=userConfig;if(config.progressive){enableStreamingMode(config);}// core controllers and network loaders
|
||
const{abrController:ConfigAbrController,bufferController:ConfigBufferController,capLevelController:ConfigCapLevelController,errorController:ConfigErrorController,fpsController:ConfigFpsController}=config;const errorController=new ConfigErrorController(this);const abrController=this.abrController=new ConfigAbrController(this);const bufferController=this.bufferController=new ConfigBufferController(this);const capLevelController=this.capLevelController=new ConfigCapLevelController(this);const fpsController=new ConfigFpsController(this);const playListLoader=new PlaylistLoader(this);const id3TrackController=new ID3TrackController(this);const ConfigContentSteeringController=config.contentSteeringController;// ConentSteeringController is defined before LevelController to receive Multivariant Playlist events first
|
||
const contentSteering=ConfigContentSteeringController?new ConfigContentSteeringController(this):null;const levelController=this.levelController=new LevelController(this,contentSteering);// FragmentTracker must be defined before StreamController because the order of event handling is important
|
||
const fragmentTracker=new FragmentTracker(this);const keyLoader=new KeyLoader(this.config);const streamController=this.streamController=new StreamController(this,fragmentTracker,keyLoader);// Cap level controller uses streamController to flush the buffer
|
||
capLevelController.setStreamController(streamController);// fpsController uses streamController to switch when frames are being dropped
|
||
fpsController.setStreamController(streamController);const networkControllers=[playListLoader,levelController,streamController];if(contentSteering){networkControllers.splice(1,0,contentSteering);}this.networkControllers=networkControllers;const coreComponents=[abrController,bufferController,capLevelController,fpsController,id3TrackController,fragmentTracker];this.audioTrackController=this.createController(config.audioTrackController,networkControllers);const AudioStreamControllerClass=config.audioStreamController;if(AudioStreamControllerClass){networkControllers.push(new AudioStreamControllerClass(this,fragmentTracker,keyLoader));}// subtitleTrackController must be defined before subtitleStreamController because the order of event handling is important
|
||
this.subtitleTrackController=this.createController(config.subtitleTrackController,networkControllers);const SubtitleStreamControllerClass=config.subtitleStreamController;if(SubtitleStreamControllerClass){networkControllers.push(new SubtitleStreamControllerClass(this,fragmentTracker,keyLoader));}this.createController(config.timelineController,coreComponents);keyLoader.emeController=this.emeController=this.createController(config.emeController,coreComponents);this.cmcdController=this.createController(config.cmcdController,coreComponents);this.latencyController=this.createController(LatencyController,coreComponents);this.coreComponents=coreComponents;// Error controller handles errors before and after all other controllers
|
||
// This listener will be invoked after all other controllers error listeners
|
||
networkControllers.push(errorController);const onErrorOut=errorController.onErrorOut;if(typeof onErrorOut==='function'){this.on(Events.ERROR,onErrorOut,errorController);}}createController(ControllerClass,components){if(ControllerClass){const controllerInstance=new ControllerClass(this);if(components){components.push(controllerInstance);}return controllerInstance;}return null;}// Delegate the EventEmitter through the public API of Hls.js
|
||
on(event,listener,context=this){this._emitter.on(event,listener,context);}once(event,listener,context=this){this._emitter.once(event,listener,context);}removeAllListeners(event){this._emitter.removeAllListeners(event);}off(event,listener,context=this,once){this._emitter.off(event,listener,context,once);}listeners(event){return this._emitter.listeners(event);}emit(event,name,eventObject){return this._emitter.emit(event,name,eventObject);}trigger(event,eventObject){if(this.config.debug){return this.emit(event,event,eventObject);}else {try{return this.emit(event,event,eventObject);}catch(error){logger.error('An internal error happened while handling event '+event+'. Error message: "'+error.message+'". Here is a stacktrace:',error);// Prevent recursion in error event handlers that throw #5497
|
||
if(!this.triggeringException){this.triggeringException=true;const fatal=event===Events.ERROR;this.trigger(Events.ERROR,{type:ErrorTypes.OTHER_ERROR,details:ErrorDetails.INTERNAL_EXCEPTION,fatal,event,error});this.triggeringException=false;}}}return false;}listenerCount(event){return this._emitter.listenerCount(event);}/**
|
||
* Dispose of the instance
|
||
*/destroy(){logger.log('destroy');this.trigger(Events.DESTROYING,undefined);this.detachMedia();this.removeAllListeners();this._autoLevelCapping=-1;this.url=null;this.networkControllers.forEach(component=>component.destroy());this.networkControllers.length=0;this.coreComponents.forEach(component=>component.destroy());this.coreComponents.length=0;// Remove any references that could be held in config options or callbacks
|
||
const config=this.config;config.xhrSetup=config.fetchSetup=undefined;// @ts-ignore
|
||
this.userConfig=null;}/**
|
||
* Attaches Hls.js to a media element
|
||
*/attachMedia(media){logger.log('attachMedia');this._media=media;this.trigger(Events.MEDIA_ATTACHING,{media:media});}/**
|
||
* Detach Hls.js from the media
|
||
*/detachMedia(){logger.log('detachMedia');this.trigger(Events.MEDIA_DETACHING,undefined);this._media=null;}/**
|
||
* Set the source URL. Can be relative or absolute.
|
||
*/loadSource(url){this.stopLoad();const media=this.media;const loadedSource=this.url;const loadingSource=this.url=urlToolkitExports.buildAbsoluteURL(self.location.href,url,{alwaysNormalize:true});this._autoLevelCapping=-1;this._maxHdcpLevel=null;logger.log(`loadSource:${loadingSource}`);if(media&&loadedSource&&(loadedSource!==loadingSource||this.bufferController.hasSourceTypes())){this.detachMedia();this.attachMedia(media);}// when attaching to a source URL, trigger a playlist load
|
||
this.trigger(Events.MANIFEST_LOADING,{url:url});}/**
|
||
* Start loading data from the stream source.
|
||
* Depending on default config, client starts loading automatically when a source is set.
|
||
*
|
||
* @param startPosition - Set the start position to stream from.
|
||
* Defaults to -1 (None: starts from earliest point)
|
||
*/startLoad(startPosition=-1){logger.log(`startLoad(${startPosition})`);this.started=true;this.networkControllers.forEach(controller=>{controller.startLoad(startPosition);});}/**
|
||
* Stop loading of any stream data.
|
||
*/stopLoad(){logger.log('stopLoad');this.started=false;this.networkControllers.forEach(controller=>{controller.stopLoad();});}/**
|
||
* Resumes stream controller segment loading if previously started.
|
||
*/resumeBuffering(){if(this.started){this.networkControllers.forEach(controller=>{if('fragmentLoader'in controller){controller.startLoad(-1);}});}}/**
|
||
* Stops stream controller segment loading without changing 'started' state like stopLoad().
|
||
* This allows for media buffering to be paused without interupting playlist loading.
|
||
*/pauseBuffering(){this.networkControllers.forEach(controller=>{if('fragmentLoader'in controller){controller.stopLoad();}});}/**
|
||
* Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1)
|
||
*/swapAudioCodec(){logger.log('swapAudioCodec');this.streamController.swapAudioCodec();}/**
|
||
* When the media-element fails, this allows to detach and then re-attach it
|
||
* as one call (convenience method).
|
||
*
|
||
* Automatic recovery of media-errors by this process is configurable.
|
||
*/recoverMediaError(){logger.log('recoverMediaError');const media=this._media;this.detachMedia();if(media){this.attachMedia(media);}}removeLevel(levelIndex){this.levelController.removeLevel(levelIndex);}/**
|
||
* @returns an array of levels (variants) sorted by HDCP-LEVEL, RESOLUTION (height), FRAME-RATE, CODECS, VIDEO-RANGE, and BANDWIDTH
|
||
*/get levels(){const levels=this.levelController.levels;return levels?levels:[];}/**
|
||
* Index of quality level (variant) currently played
|
||
*/get currentLevel(){return this.streamController.currentLevel;}/**
|
||
* Set quality level index immediately. This will flush the current buffer to replace the quality asap. That means playback will interrupt at least shortly to re-buffer and re-sync eventually. Set to -1 for automatic level selection.
|
||
*/set currentLevel(newLevel){logger.log(`set currentLevel:${newLevel}`);this.levelController.manualLevel=newLevel;this.streamController.immediateLevelSwitch();}/**
|
||
* Index of next quality level loaded as scheduled by stream controller.
|
||
*/get nextLevel(){return this.streamController.nextLevel;}/**
|
||
* Set quality level index for next loaded data.
|
||
* This will switch the video quality asap, without interrupting playback.
|
||
* May abort current loading of data, and flush parts of buffer (outside currently played fragment region).
|
||
* @param newLevel - Pass -1 for automatic level selection
|
||
*/set nextLevel(newLevel){logger.log(`set nextLevel:${newLevel}`);this.levelController.manualLevel=newLevel;this.streamController.nextLevelSwitch();}/**
|
||
* Return the quality level of the currently or last (of none is loaded currently) segment
|
||
*/get loadLevel(){return this.levelController.level;}/**
|
||
* Set quality level index for next loaded data in a conservative way.
|
||
* This will switch the quality without flushing, but interrupt current loading.
|
||
* Thus the moment when the quality switch will appear in effect will only be after the already existing buffer.
|
||
* @param newLevel - Pass -1 for automatic level selection
|
||
*/set loadLevel(newLevel){logger.log(`set loadLevel:${newLevel}`);this.levelController.manualLevel=newLevel;}/**
|
||
* get next quality level loaded
|
||
*/get nextLoadLevel(){return this.levelController.nextLoadLevel;}/**
|
||
* Set quality level of next loaded segment in a fully "non-destructive" way.
|
||
* Same as `loadLevel` but will wait for next switch (until current loading is done).
|
||
*/set nextLoadLevel(level){this.levelController.nextLoadLevel=level;}/**
|
||
* Return "first level": like a default level, if not set,
|
||
* falls back to index of first level referenced in manifest
|
||
*/get firstLevel(){return Math.max(this.levelController.firstLevel,this.minAutoLevel);}/**
|
||
* Sets "first-level", see getter.
|
||
*/set firstLevel(newLevel){logger.log(`set firstLevel:${newLevel}`);this.levelController.firstLevel=newLevel;}/**
|
||
* Return the desired start level for the first fragment that will be loaded.
|
||
* The default value of -1 indicates automatic start level selection.
|
||
* Setting hls.nextAutoLevel without setting a startLevel will result in
|
||
* the nextAutoLevel value being used for one fragment load.
|
||
*/get startLevel(){const startLevel=this.levelController.startLevel;if(startLevel===-1&&this.abrController.forcedAutoLevel>-1){return this.abrController.forcedAutoLevel;}return startLevel;}/**
|
||
* set start level (level of first fragment that will be played back)
|
||
* if not overrided by user, first level appearing in manifest will be used as start level
|
||
* if -1 : automatic start level selection, playback will start from level matching download bandwidth
|
||
* (determined from download of first segment)
|
||
*/set startLevel(newLevel){logger.log(`set startLevel:${newLevel}`);// if not in automatic start level detection, ensure startLevel is greater than minAutoLevel
|
||
if(newLevel!==-1){newLevel=Math.max(newLevel,this.minAutoLevel);}this.levelController.startLevel=newLevel;}/**
|
||
* Whether level capping is enabled.
|
||
* Default value is set via `config.capLevelToPlayerSize`.
|
||
*/get capLevelToPlayerSize(){return this.config.capLevelToPlayerSize;}/**
|
||
* Enables or disables level capping. If disabled after previously enabled, `nextLevelSwitch` will be immediately called.
|
||
*/set capLevelToPlayerSize(shouldStartCapping){const newCapLevelToPlayerSize=!!shouldStartCapping;if(newCapLevelToPlayerSize!==this.config.capLevelToPlayerSize){if(newCapLevelToPlayerSize){this.capLevelController.startCapping();// If capping occurs, nextLevelSwitch will happen based on size.
|
||
}else {this.capLevelController.stopCapping();this.autoLevelCapping=-1;this.streamController.nextLevelSwitch();// Now we're uncapped, get the next level asap.
|
||
}this.config.capLevelToPlayerSize=newCapLevelToPlayerSize;}}/**
|
||
* Capping/max level value that should be used by automatic level selection algorithm (`ABRController`)
|
||
*/get autoLevelCapping(){return this._autoLevelCapping;}/**
|
||
* Returns the current bandwidth estimate in bits per second, when available. Otherwise, `NaN` is returned.
|
||
*/get bandwidthEstimate(){const{bwEstimator}=this.abrController;if(!bwEstimator){return NaN;}return bwEstimator.getEstimate();}set bandwidthEstimate(abrEwmaDefaultEstimate){this.abrController.resetEstimator(abrEwmaDefaultEstimate);}/**
|
||
* get time to first byte estimate
|
||
* @type {number}
|
||
*/get ttfbEstimate(){const{bwEstimator}=this.abrController;if(!bwEstimator){return NaN;}return bwEstimator.getEstimateTTFB();}/**
|
||
* Capping/max level value that should be used by automatic level selection algorithm (`ABRController`)
|
||
*/set autoLevelCapping(newLevel){if(this._autoLevelCapping!==newLevel){logger.log(`set autoLevelCapping:${newLevel}`);this._autoLevelCapping=newLevel;this.levelController.checkMaxAutoUpdated();}}get maxHdcpLevel(){return this._maxHdcpLevel;}set maxHdcpLevel(value){if(isHdcpLevel(value)&&this._maxHdcpLevel!==value){this._maxHdcpLevel=value;this.levelController.checkMaxAutoUpdated();}}/**
|
||
* True when automatic level selection enabled
|
||
*/get autoLevelEnabled(){return this.levelController.manualLevel===-1;}/**
|
||
* Level set manually (if any)
|
||
*/get manualLevel(){return this.levelController.manualLevel;}/**
|
||
* min level selectable in auto mode according to config.minAutoBitrate
|
||
*/get minAutoLevel(){const{levels,config:{minAutoBitrate}}=this;if(!levels)return 0;const len=levels.length;for(let i=0;i<len;i++){if(levels[i].maxBitrate>=minAutoBitrate){return i;}}return 0;}/**
|
||
* max level selectable in auto mode according to autoLevelCapping
|
||
*/get maxAutoLevel(){const{levels,autoLevelCapping,maxHdcpLevel}=this;let maxAutoLevel;if(autoLevelCapping===-1&&levels!=null&&levels.length){maxAutoLevel=levels.length-1;}else {maxAutoLevel=autoLevelCapping;}if(maxHdcpLevel){for(let i=maxAutoLevel;i--;){const hdcpLevel=levels[i].attrs['HDCP-LEVEL'];if(hdcpLevel&&hdcpLevel<=maxHdcpLevel){return i;}}}return maxAutoLevel;}get firstAutoLevel(){return this.abrController.firstAutoLevel;}/**
|
||
* next automatically selected quality level
|
||
*/get nextAutoLevel(){return this.abrController.nextAutoLevel;}/**
|
||
* this setter is used to force next auto level.
|
||
* this is useful to force a switch down in auto mode:
|
||
* in case of load error on level N, hls.js can set nextAutoLevel to N-1 for example)
|
||
* forced value is valid for one fragment. upon successful frag loading at forced level,
|
||
* this value will be resetted to -1 by ABR controller.
|
||
*/set nextAutoLevel(nextLevel){this.abrController.nextAutoLevel=nextLevel;}/**
|
||
* get the datetime value relative to media.currentTime for the active level Program Date Time if present
|
||
*/get playingDate(){return this.streamController.currentProgramDateTime;}get mainForwardBufferInfo(){return this.streamController.getMainFwdBufferInfo();}/**
|
||
* Find and select the best matching audio track, making a level switch when a Group change is necessary.
|
||
* Updates `hls.config.audioPreference`. Returns the selected track, or null when no matching track is found.
|
||
*/setAudioOption(audioOption){var _this$audioTrackContr;return (_this$audioTrackContr=this.audioTrackController)==null?void 0:_this$audioTrackContr.setAudioOption(audioOption);}/**
|
||
* Find and select the best matching subtitle track, making a level switch when a Group change is necessary.
|
||
* Updates `hls.config.subtitlePreference`. Returns the selected track, or null when no matching track is found.
|
||
*/setSubtitleOption(subtitleOption){var _this$subtitleTrackCo;(_this$subtitleTrackCo=this.subtitleTrackController)==null?void 0:_this$subtitleTrackCo.setSubtitleOption(subtitleOption);return null;}/**
|
||
* Get the complete list of audio tracks across all media groups
|
||
*/get allAudioTracks(){const audioTrackController=this.audioTrackController;return audioTrackController?audioTrackController.allAudioTracks:[];}/**
|
||
* Get the list of selectable audio tracks
|
||
*/get audioTracks(){const audioTrackController=this.audioTrackController;return audioTrackController?audioTrackController.audioTracks:[];}/**
|
||
* index of the selected audio track (index in audio track lists)
|
||
*/get audioTrack(){const audioTrackController=this.audioTrackController;return audioTrackController?audioTrackController.audioTrack:-1;}/**
|
||
* selects an audio track, based on its index in audio track lists
|
||
*/set audioTrack(audioTrackId){const audioTrackController=this.audioTrackController;if(audioTrackController){audioTrackController.audioTrack=audioTrackId;}}/**
|
||
* get the complete list of subtitle tracks across all media groups
|
||
*/get allSubtitleTracks(){const subtitleTrackController=this.subtitleTrackController;return subtitleTrackController?subtitleTrackController.allSubtitleTracks:[];}/**
|
||
* get alternate subtitle tracks list from playlist
|
||
*/get subtitleTracks(){const subtitleTrackController=this.subtitleTrackController;return subtitleTrackController?subtitleTrackController.subtitleTracks:[];}/**
|
||
* index of the selected subtitle track (index in subtitle track lists)
|
||
*/get subtitleTrack(){const subtitleTrackController=this.subtitleTrackController;return subtitleTrackController?subtitleTrackController.subtitleTrack:-1;}get media(){return this._media;}/**
|
||
* select an subtitle track, based on its index in subtitle track lists
|
||
*/set subtitleTrack(subtitleTrackId){const subtitleTrackController=this.subtitleTrackController;if(subtitleTrackController){subtitleTrackController.subtitleTrack=subtitleTrackId;}}/**
|
||
* Whether subtitle display is enabled or not
|
||
*/get subtitleDisplay(){const subtitleTrackController=this.subtitleTrackController;return subtitleTrackController?subtitleTrackController.subtitleDisplay:false;}/**
|
||
* Enable/disable subtitle display rendering
|
||
*/set subtitleDisplay(value){const subtitleTrackController=this.subtitleTrackController;if(subtitleTrackController){subtitleTrackController.subtitleDisplay=value;}}/**
|
||
* get mode for Low-Latency HLS loading
|
||
*/get lowLatencyMode(){return this.config.lowLatencyMode;}/**
|
||
* Enable/disable Low-Latency HLS part playlist and segment loading, and start live streams at playlist PART-HOLD-BACK rather than HOLD-BACK.
|
||
*/set lowLatencyMode(mode){this.config.lowLatencyMode=mode;}/**
|
||
* Position (in seconds) of live sync point (ie edge of live position minus safety delay defined by ```hls.config.liveSyncDuration```)
|
||
* @returns null prior to loading live Playlist
|
||
*/get liveSyncPosition(){return this.latencyController.liveSyncPosition;}/**
|
||
* Estimated position (in seconds) of live edge (ie edge of live playlist plus time sync playlist advanced)
|
||
* @returns 0 before first playlist is loaded
|
||
*/get latency(){return this.latencyController.latency;}/**
|
||
* maximum distance from the edge before the player seeks forward to ```hls.liveSyncPosition```
|
||
* configured using ```liveMaxLatencyDurationCount``` (multiple of target duration) or ```liveMaxLatencyDuration```
|
||
* @returns 0 before first playlist is loaded
|
||
*/get maxLatency(){return this.latencyController.maxLatency;}/**
|
||
* target distance from the edge as calculated by the latency controller
|
||
*/get targetLatency(){return this.latencyController.targetLatency;}/**
|
||
* the rate at which the edge of the current live playlist is advancing or 1 if there is none
|
||
*/get drift(){return this.latencyController.drift;}/**
|
||
* set to true when startLoad is called before MANIFEST_PARSED event
|
||
*/get forceStartLoad(){return this.streamController.forceStartLoad;}}Hls.defaultConfig=void 0;
|
||
|
||
var hls = /*#__PURE__*/Object.freeze({
|
||
__proto__: null,
|
||
AbrController: AbrController,
|
||
AttrList: AttrList,
|
||
AudioStreamController: AudioStreamController,
|
||
AudioTrackController: AudioTrackController,
|
||
BasePlaylistController: BasePlaylistController,
|
||
BaseSegment: BaseSegment,
|
||
BaseStreamController: BaseStreamController,
|
||
BufferController: BufferController,
|
||
CMCDController: CMCDController,
|
||
CapLevelController: CapLevelController,
|
||
ChunkMetadata: ChunkMetadata,
|
||
ContentSteeringController: ContentSteeringController,
|
||
DateRange: DateRange,
|
||
EMEController: EMEController,
|
||
ErrorActionFlags: ErrorActionFlags,
|
||
ErrorController: ErrorController,
|
||
ErrorDetails: ErrorDetails,
|
||
ErrorTypes: ErrorTypes,
|
||
Events: Events,
|
||
FPSController: FPSController,
|
||
Fragment: Fragment,
|
||
Hls: Hls,
|
||
HlsSkip: HlsSkip,
|
||
HlsUrlParameters: HlsUrlParameters,
|
||
KeySystemFormats: KeySystemFormats,
|
||
KeySystems: KeySystems,
|
||
Level: Level,
|
||
LevelDetails: LevelDetails,
|
||
LevelKey: LevelKey,
|
||
LoadStats: LoadStats,
|
||
MetadataSchema: MetadataSchema,
|
||
NetworkErrorAction: NetworkErrorAction,
|
||
Part: Part,
|
||
PlaylistLevelType: PlaylistLevelType,
|
||
SubtitleStreamController: SubtitleStreamController,
|
||
SubtitleTrackController: SubtitleTrackController,
|
||
TimelineController: TimelineController,
|
||
'default': Hls,
|
||
getMediaSource: getMediaSource,
|
||
isMSESupported: isMSESupported,
|
||
isSupported: isSupported
|
||
});
|
||
|
||
var require$$1 = getCjsExportFromNamespace(hls);
|
||
|
||
var vueAplayer_min = createCommonjsModule(function (module, exports) {
|
||
!function (t, e) {
|
||
module.exports = e(vue, function () {
|
||
try {
|
||
return require$$1;
|
||
} catch (t) {}
|
||
}()) ;
|
||
}("undefined" != typeof self ? self : commonjsGlobal, function (t, e) {
|
||
return function (t) {
|
||
function e(a) {
|
||
if (i[a]) return i[a].exports;
|
||
var r = i[a] = {
|
||
i: a,
|
||
l: !1,
|
||
exports: {}
|
||
};
|
||
return t[a].call(r.exports, r, r.exports, e), r.l = !0, r.exports;
|
||
}
|
||
var i = {};
|
||
return e.m = t, e.c = i, e.d = function (t, i, a) {
|
||
e.o(t, i) || Object.defineProperty(t, i, {
|
||
configurable: !1,
|
||
enumerable: !0,
|
||
get: a
|
||
});
|
||
}, e.n = function (t) {
|
||
var i = t && t.__esModule ? function () {
|
||
return t.default;
|
||
} : function () {
|
||
return t;
|
||
};
|
||
return e.d(i, "a", i), i;
|
||
}, e.o = function (t, e) {
|
||
return Object.prototype.hasOwnProperty.call(t, e);
|
||
}, e.p = "", e(e.s = 15);
|
||
}([function (t, e, i) {
|
||
|
||
function a(t, e, i, a, r, n, o, s) {
|
||
t = t || {};
|
||
var l = typeof t.default;
|
||
"object" !== l && "function" !== l || (t = t.default);
|
||
var u = "function" == typeof t ? t.options : t;
|
||
e && (u.render = e, u.staticRenderFns = i, u._compiled = !0), a && (u.functional = !0), n && (u._scopeId = n);
|
||
var c;
|
||
if (o ? (c = function (t) {
|
||
t = t || this.$vnode && this.$vnode.ssrContext || this.parent && this.parent.$vnode && this.parent.$vnode.ssrContext, t || "undefined" == typeof __VUE_SSR_CONTEXT__ || (t = __VUE_SSR_CONTEXT__), r && r.call(this, t), t && t._registeredComponents && t._registeredComponents.add(o);
|
||
}, u._ssrRegister = c) : r && (c = s ? function () {
|
||
r.call(this, this.$root.$options.shadowRoot);
|
||
} : r), c) if (u.functional) {
|
||
u._injectStyles = c;
|
||
var p = u.render;
|
||
u.render = function (t, e) {
|
||
return c.call(e), p(t, e);
|
||
};
|
||
} else {
|
||
var h = u.beforeCreate;
|
||
u.beforeCreate = h ? [].concat(h, c) : [c];
|
||
}
|
||
return {
|
||
exports: t,
|
||
options: u
|
||
};
|
||
}
|
||
e.a = a;
|
||
}, function (t, e) {
|
||
function i(t, e) {
|
||
var i = t[1] || "",
|
||
r = t[3];
|
||
if (!r) return i;
|
||
if (e && "function" == typeof btoa) {
|
||
var n = a(r);
|
||
return [i].concat(r.sources.map(function (t) {
|
||
return "/*# sourceURL=" + r.sourceRoot + t + " */";
|
||
})).concat([n]).join("\n");
|
||
}
|
||
return [i].join("\n");
|
||
}
|
||
function a(t) {
|
||
return "/*# sourceMappingURL=data:application/json;charset=utf-8;base64," + btoa(unescape(encodeURIComponent(JSON.stringify(t)))) + " */";
|
||
}
|
||
t.exports = function (t) {
|
||
var e = [];
|
||
return e.toString = function () {
|
||
return this.map(function (e) {
|
||
var a = i(e, t);
|
||
return e[2] ? "@media " + e[2] + "{" + a + "}" : a;
|
||
}).join("");
|
||
}, e.i = function (t, i) {
|
||
"string" == typeof t && (t = [[null, t, ""]]);
|
||
for (var a = {}, r = 0; r < this.length; r++) {
|
||
var n = this[r][0];
|
||
"number" == typeof n && (a[n] = !0);
|
||
}
|
||
for (r = 0; r < t.length; r++) {
|
||
var o = t[r];
|
||
"number" == typeof o[0] && a[o[0]] || (i && !o[2] ? o[2] = i : i && (o[2] = "(" + o[2] + ") and (" + i + ")"), e.push(o));
|
||
}
|
||
}, e;
|
||
};
|
||
}, function (t, e, i) {
|
||
|
||
function a(t, e, i, a) {
|
||
v = i, m = a || {};
|
||
var n = Object(u.a)(t, e);
|
||
return r(n), function (e) {
|
||
for (var i = [], a = 0; a < n.length; a++) {
|
||
var o = n[a],
|
||
s = p[o.id];
|
||
s.refs--, i.push(s);
|
||
}
|
||
e ? (n = Object(u.a)(t, e), r(n)) : n = [];
|
||
for (var a = 0; a < i.length; a++) {
|
||
var s = i[a];
|
||
if (0 === s.refs) {
|
||
for (var l = 0; l < s.parts.length; l++) s.parts[l]();
|
||
delete p[s.id];
|
||
}
|
||
}
|
||
};
|
||
}
|
||
function r(t) {
|
||
for (var e = 0; e < t.length; e++) {
|
||
var i = t[e],
|
||
a = p[i.id];
|
||
if (a) {
|
||
a.refs++;
|
||
for (var r = 0; r < a.parts.length; r++) a.parts[r](i.parts[r]);
|
||
for (; r < i.parts.length; r++) a.parts.push(o(i.parts[r]));
|
||
a.parts.length > i.parts.length && (a.parts.length = i.parts.length);
|
||
} else {
|
||
for (var n = [], r = 0; r < i.parts.length; r++) n.push(o(i.parts[r]));
|
||
p[i.id] = {
|
||
id: i.id,
|
||
refs: 1,
|
||
parts: n
|
||
};
|
||
}
|
||
}
|
||
}
|
||
function n() {
|
||
var t = document.createElement("style");
|
||
return t.type = "text/css", h.appendChild(t), t;
|
||
}
|
||
function o(t) {
|
||
var e,
|
||
i,
|
||
a = document.querySelector("style[" + g + '~="' + t.id + '"]');
|
||
if (a) {
|
||
if (v) return y;
|
||
a.parentNode.removeChild(a);
|
||
}
|
||
if (b) {
|
||
var r = f++;
|
||
a = d || (d = n()), e = s.bind(null, a, r, !1), i = s.bind(null, a, r, !0);
|
||
} else a = n(), e = l.bind(null, a), i = function () {
|
||
a.parentNode.removeChild(a);
|
||
};
|
||
return e(t), function (a) {
|
||
if (a) {
|
||
if (a.css === t.css && a.media === t.media && a.sourceMap === t.sourceMap) return;
|
||
e(t = a);
|
||
} else i();
|
||
};
|
||
}
|
||
function s(t, e, i, a) {
|
||
var r = i ? "" : a.css;
|
||
if (t.styleSheet) t.styleSheet.cssText = x(e, r);else {
|
||
var n = document.createTextNode(r),
|
||
o = t.childNodes;
|
||
o[e] && t.removeChild(o[e]), o.length ? t.insertBefore(n, o[e]) : t.appendChild(n);
|
||
}
|
||
}
|
||
function l(t, e) {
|
||
var i = e.css,
|
||
a = e.media,
|
||
r = e.sourceMap;
|
||
if (a && t.setAttribute("media", a), m.ssrId && t.setAttribute(g, e.id), r && (i += "\n/*# sourceURL=" + r.sources[0] + " */", i += "\n/*# sourceMappingURL=data:application/json;base64," + btoa(unescape(encodeURIComponent(JSON.stringify(r)))) + " */"), t.styleSheet) t.styleSheet.cssText = i;else {
|
||
for (; t.firstChild;) t.removeChild(t.firstChild);
|
||
t.appendChild(document.createTextNode(i));
|
||
}
|
||
}
|
||
Object.defineProperty(e, "__esModule", {
|
||
value: !0
|
||
}), e.default = a;
|
||
var u = i(18),
|
||
c = "undefined" != typeof document;
|
||
if ("undefined" != typeof DEBUG && DEBUG && !c) throw new Error("vue-style-loader cannot be used in a non-browser environment. Use { target: 'node' } in your Webpack config to indicate a server-rendering environment.");
|
||
var p = {},
|
||
h = c && (document.head || document.getElementsByTagName("head")[0]),
|
||
d = null,
|
||
f = 0,
|
||
v = !1,
|
||
y = function () {},
|
||
m = null,
|
||
g = "data-vue-ssr-id",
|
||
b = "undefined" != typeof navigator && /msie [6-9]\b/.test(navigator.userAgent.toLowerCase()),
|
||
x = function () {
|
||
var t = [];
|
||
return function (e, i) {
|
||
return t[e] = i, t.filter(Boolean).join("\n");
|
||
};
|
||
}();
|
||
}, function (t, e, i) {
|
||
|
||
function a(t) {
|
||
if (t) {
|
||
t = t.replace(/([^\]^\n])\[/g, function (t, e) {
|
||
return e + "\n[";
|
||
});
|
||
for (var e = t.split("\n"), i = [], a = e.length, r = 0; r < a; r++) {
|
||
var n = e[r].match(/\[(\d{2}):(\d{2})(\.(\d{2,3}))?]/g),
|
||
o = e[r].replace(/.*\[(\d{2}):(\d{2})(\.(\d{2,3}))?]/g, "").replace(/<(\d{2}):(\d{2})(\.(\d{2,3}))?>/g, "").replace(/^\s+|\s+$/g, "");
|
||
if (n) for (var s = n.length, l = 0; l < s; l++) {
|
||
var u = /\[(\d{2}):(\d{2})(\.(\d{2,3}))?]/.exec(n[l]),
|
||
c = 60 * u[1],
|
||
p = parseInt(u[2]),
|
||
h = u[4] ? parseInt(u[4]) / (2 === (u[4] + "").length ? 100 : 1e3) : 0,
|
||
d = c + p + h;
|
||
i.push([d, o]);
|
||
}
|
||
}
|
||
return i.sort(function (t, e) {
|
||
return t[0] - e[0];
|
||
}), i;
|
||
}
|
||
return [];
|
||
}
|
||
function r(t, e) {
|
||
if (t === e) return 0;
|
||
var i = t.split(".").map(Number),
|
||
a = u(i, 3),
|
||
r = a[0],
|
||
n = a[1],
|
||
o = a[2],
|
||
s = e.split(".").map(Number),
|
||
l = u(s, 3),
|
||
c = l[0],
|
||
p = l[1],
|
||
h = l[2];
|
||
if (r > c) return 1;
|
||
if (r === c) {
|
||
if (n > p) return 1;
|
||
if (n === p && o > h) return 1;
|
||
}
|
||
return -1;
|
||
}
|
||
function n(t) {
|
||
return console.warn("[Vue-APlayer] " + t);
|
||
}
|
||
function o(t, e, i) {
|
||
return n("'" + t + "' is deprecated since v" + e + ", and will be removed in future releases, use '" + i + "' instead");
|
||
}
|
||
function s(t) {
|
||
for (var e = t.offsetLeft, i = t.offsetParent, a = void 0; null !== i;) e += i.offsetLeft, i = i.offsetParent;
|
||
return a = document.body.scrollLeft + document.documentElement.scrollLeft, e - a;
|
||
}
|
||
function l(t) {
|
||
for (var e = t.offsetTop, i = t.offsetParent, a = void 0; null !== i;) e += i.offsetTop, i = i.offsetParent;
|
||
return a = document.body.scrollTop + document.documentElement.scrollTop, e - a;
|
||
}
|
||
e.d = a, e.e = r, e.f = n, e.a = o, e.b = s, e.c = l;
|
||
var u = function () {
|
||
function t(t, e) {
|
||
var i = [],
|
||
a = !0,
|
||
r = !1,
|
||
n = void 0;
|
||
try {
|
||
for (var o, s = t[Symbol.iterator](); !(a = (o = s.next()).done) && (i.push(o.value), !e || i.length !== e); a = !0);
|
||
} catch (t) {
|
||
r = !0, n = t;
|
||
} finally {
|
||
try {
|
||
!a && s.return && s.return();
|
||
} finally {
|
||
if (r) throw n;
|
||
}
|
||
}
|
||
return i;
|
||
}
|
||
return function (e, i) {
|
||
if (Array.isArray(e)) return e;
|
||
if (Symbol.iterator in Object(e)) return t(e, i);
|
||
throw new TypeError("Invalid attempt to destructure non-iterable instance");
|
||
};
|
||
}();
|
||
}, function (t, e, i) {
|
||
|
||
function a(t) {
|
||
i(25);
|
||
}
|
||
var r = i(7),
|
||
n = i(44),
|
||
o = i(0),
|
||
s = a,
|
||
l = Object(o.a)(r.a, n.a, n.b, !1, s, null, null);
|
||
e.a = l.exports;
|
||
}, function (t, e, i) {
|
||
|
||
function a(t) {
|
||
if (Array.isArray(t)) {
|
||
for (var e = 0, i = Array(t.length); e < t.length; e++) i[e] = t[e];
|
||
return i;
|
||
}
|
||
return Array.from(t);
|
||
}
|
||
var r = i(19),
|
||
n = i.n(r),
|
||
o = i(20),
|
||
s = i(46),
|
||
l = i(50),
|
||
u = i(62),
|
||
c = i(3),
|
||
p = function () {
|
||
function t(t, e) {
|
||
var i = [],
|
||
a = !0,
|
||
r = !1,
|
||
n = void 0;
|
||
try {
|
||
for (var o, s = t[Symbol.iterator](); !(a = (o = s.next()).done) && (i.push(o.value), !e || i.length !== e); a = !0);
|
||
} catch (t) {
|
||
r = !0, n = t;
|
||
} finally {
|
||
try {
|
||
!a && s.return && s.return();
|
||
} finally {
|
||
if (r) throw n;
|
||
}
|
||
}
|
||
return i;
|
||
}
|
||
return function (e, i) {
|
||
if (Array.isArray(e)) return e;
|
||
if (Symbol.iterator in Object(e)) return t(e, i);
|
||
throw new TypeError("Invalid attempt to destructure non-iterable instance");
|
||
};
|
||
}(),
|
||
h = !1,
|
||
d = Object(c.e)(n.a.version, "2.3.0") >= 0,
|
||
f = {},
|
||
v = null,
|
||
y = {
|
||
NONE: "none",
|
||
MUSIC: "music",
|
||
LIST: "list",
|
||
NO_REPEAT: "no-repeat",
|
||
REPEAT_ONE: "repeat-one",
|
||
REPEAT_ALL: "repeat-all"
|
||
},
|
||
m = {
|
||
name: "APlayer",
|
||
disableVersionBadge: !1,
|
||
components: {
|
||
Thumbnail: o.a,
|
||
Controls: l.a,
|
||
MusicList: s.a,
|
||
Lyrics: u.a
|
||
},
|
||
props: {
|
||
music: {
|
||
type: Object,
|
||
required: !0,
|
||
validator: function (t) {
|
||
return t.url && Object(c.a)("music.url", "1.4.0", "music.src"), t.author && Object(c.a)("music.author", "1.4.1", "music.artist"), t.src || t.url;
|
||
}
|
||
},
|
||
list: {
|
||
type: Array,
|
||
default: function () {
|
||
return [];
|
||
}
|
||
},
|
||
mini: {
|
||
type: Boolean,
|
||
default: !1
|
||
},
|
||
showLrc: {
|
||
type: Boolean,
|
||
default: !1
|
||
},
|
||
mutex: {
|
||
type: Boolean,
|
||
default: !0
|
||
},
|
||
theme: {
|
||
type: String,
|
||
default: "#41b883"
|
||
},
|
||
listMaxHeight: String,
|
||
listFolded: {
|
||
type: Boolean,
|
||
default: !1
|
||
},
|
||
float: {
|
||
type: Boolean,
|
||
default: !1
|
||
},
|
||
autoplay: {
|
||
type: Boolean,
|
||
default: !1
|
||
},
|
||
controls: {
|
||
type: Boolean,
|
||
default: !1
|
||
},
|
||
muted: {
|
||
type: Boolean,
|
||
default: !1
|
||
},
|
||
preload: String,
|
||
volume: {
|
||
type: Number,
|
||
default: .8,
|
||
validator: function (t) {
|
||
return t >= 0 && t <= 1;
|
||
}
|
||
},
|
||
shuffle: {
|
||
type: Boolean,
|
||
default: !1
|
||
},
|
||
repeat: {
|
||
type: String,
|
||
default: y.NO_REPEAT
|
||
},
|
||
listmaxheight: {
|
||
type: String,
|
||
validator: function (t) {
|
||
return t && Object(c.a)("listmaxheight", "1.1.2", "listMaxHeight"), !0;
|
||
}
|
||
},
|
||
narrow: {
|
||
type: Boolean,
|
||
default: !1,
|
||
validator: function (t) {
|
||
return t && Object(c.a)("narrow", "1.1.2", "mini"), !0;
|
||
}
|
||
},
|
||
showlrc: {
|
||
type: Boolean,
|
||
default: !1,
|
||
validator: function (t) {
|
||
return t && Object(c.a)("showlrc", "1.2.2", "showLrc"), !0;
|
||
}
|
||
}
|
||
},
|
||
data: function () {
|
||
return {
|
||
internalMusic: this.music,
|
||
isPlaying: !1,
|
||
isSeeking: !1,
|
||
wasPlayingBeforeSeeking: !1,
|
||
isMobile: /mobile/i.test(window.navigator.userAgent),
|
||
playStat: {
|
||
duration: 0,
|
||
loadedTime: 0,
|
||
playedTime: 0
|
||
},
|
||
showList: !this.listFolded,
|
||
audioPlayPromise: Promise.resolve(),
|
||
floatOriginX: 0,
|
||
floatOriginY: 0,
|
||
floatOffsetLeft: 0,
|
||
floatOffsetTop: 0,
|
||
selfAdaptingTheme: null,
|
||
internalMuted: this.muted,
|
||
internalVolume: this.volume,
|
||
isLoading: !1,
|
||
internalShuffle: this.shuffle,
|
||
internalRepeat: this.repeat,
|
||
shuffledList: []
|
||
};
|
||
},
|
||
computed: {
|
||
audio: function () {
|
||
return this.$refs.audio;
|
||
},
|
||
currentMusic: {
|
||
get: function () {
|
||
return this.internalMusic;
|
||
},
|
||
set: function (t) {
|
||
d && this.$emit("update:music", t), this.internalMusic = t;
|
||
}
|
||
},
|
||
isMiniMode: function () {
|
||
return this.mini || this.narrow;
|
||
},
|
||
shouldShowLrc: function () {
|
||
return this.showLrc || this.showlrc;
|
||
},
|
||
currentTheme: function () {
|
||
return this.selfAdaptingTheme || this.currentMusic.theme || this.theme;
|
||
},
|
||
isFloatMode: function () {
|
||
return this.float && !this.isMobile;
|
||
},
|
||
shouldAutoplay: function () {
|
||
return !this.isMobile && this.autoplay;
|
||
},
|
||
musicList: function () {
|
||
return this.list;
|
||
},
|
||
shouldShowNativeControls: function () {
|
||
return !1;
|
||
},
|
||
floatStyleObj: function () {
|
||
return {
|
||
transform: "translate(" + this.floatOffsetLeft + "px, " + this.floatOffsetTop + "px)",
|
||
webkitTransform: "translate(" + this.floatOffsetLeft + "px, " + this.floatOffsetTop + "px)"
|
||
};
|
||
},
|
||
currentPicStyleObj: function () {
|
||
return this.currentMusic && this.currentMusic.pic ? {
|
||
backgroundImage: "url(" + this.currentMusic.pic + ")"
|
||
} : {};
|
||
},
|
||
loadProgress: function () {
|
||
return 0 === this.playStat.duration ? 0 : this.playStat.loadedTime / this.playStat.duration;
|
||
},
|
||
playProgress: function () {
|
||
return 0 === this.playStat.duration ? 0 : this.playStat.playedTime / this.playStat.duration;
|
||
},
|
||
playIndex: {
|
||
get: function () {
|
||
return this.shuffledList.indexOf(this.currentMusic);
|
||
},
|
||
set: function (t) {
|
||
this.currentMusic = this.shuffledList[t % this.shuffledList.length];
|
||
}
|
||
},
|
||
shouldRepeat: function () {
|
||
return this.repeatMode !== y.NO_REPEAT;
|
||
},
|
||
isAudioMuted: {
|
||
get: function () {
|
||
return this.internalMuted;
|
||
},
|
||
set: function (t) {
|
||
d && this.$emit("update:muted", t), this.internalMuted = t;
|
||
}
|
||
},
|
||
audioVolume: {
|
||
get: function () {
|
||
return this.internalVolume;
|
||
},
|
||
set: function (t) {
|
||
d && this.$emit("update:volume", t), this.internalVolume = t;
|
||
}
|
||
},
|
||
shouldShuffle: {
|
||
get: function () {
|
||
return this.internalShuffle;
|
||
},
|
||
set: function (t) {
|
||
d && this.$emit("update:shuffle", t), this.internalShuffle = t;
|
||
}
|
||
},
|
||
repeatMode: {
|
||
get: function () {
|
||
switch (this.internalRepeat) {
|
||
case y.NONE:
|
||
case y.NO_REPEAT:
|
||
return y.NO_REPEAT;
|
||
case y.MUSIC:
|
||
case y.REPEAT_ONE:
|
||
return y.REPEAT_ONE;
|
||
default:
|
||
return y.REPEAT_ALL;
|
||
}
|
||
},
|
||
set: function (t) {
|
||
d && this.$emit("update:repeat", t), this.internalRepeat = t;
|
||
}
|
||
}
|
||
},
|
||
methods: {
|
||
onDragBegin: function () {
|
||
this.floatOriginX = this.floatOffsetLeft, this.floatOriginY = this.floatOffsetTop;
|
||
},
|
||
onDragAround: function (t) {
|
||
var e = t.offsetLeft,
|
||
i = t.offsetTop;
|
||
this.floatOffsetLeft = this.floatOriginX + e, this.floatOffsetTop = this.floatOriginY + i;
|
||
},
|
||
setNextMode: function () {
|
||
this.repeatMode === y.REPEAT_ALL ? this.repeatMode = y.REPEAT_ONE : this.repeatMode === y.REPEAT_ONE ? this.repeatMode = y.NO_REPEAT : this.repeatMode = y.REPEAT_ALL;
|
||
},
|
||
thenPlay: function () {
|
||
var t = this;
|
||
this.$nextTick(function () {
|
||
t.play();
|
||
});
|
||
},
|
||
toggle: function () {
|
||
this.audio.paused ? this.play() : this.pause();
|
||
},
|
||
play: function () {
|
||
var t = this;
|
||
this.mutex && (v && v !== this && v.pause(), v = this);
|
||
var e = this.audio.play();
|
||
if (e) return this.audioPlayPromise = new Promise(function (i, a) {
|
||
t.rejectPlayPromise = a, e.then(function (e) {
|
||
t.rejectPlayPromise = null, i(e);
|
||
}).catch(c.f);
|
||
});
|
||
},
|
||
pause: function () {
|
||
var t = this;
|
||
this.audioPlayPromise.then(function () {
|
||
t.audio.pause();
|
||
}).catch(function () {
|
||
t.audio.pause();
|
||
}), this.rejectPlayPromise && (this.rejectPlayPromise(), this.rejectPlayPromise = null);
|
||
},
|
||
onProgressDragBegin: function (t) {
|
||
this.wasPlayingBeforeSeeking = this.isPlaying, this.pause(), this.isSeeking = !0, isNaN(this.audio.duration) || (this.audio.currentTime = this.audio.duration * t);
|
||
},
|
||
onProgressDragging: function (t) {
|
||
isNaN(this.audio.duration) ? this.playStat.playedTime = 0 : this.audio.currentTime = this.audio.duration * t;
|
||
},
|
||
onProgressDragEnd: function (t) {
|
||
this.isSeeking = !1, this.wasPlayingBeforeSeeking && this.thenPlay();
|
||
},
|
||
toggleMute: function () {
|
||
this.setAudioMuted(!this.audio.muted);
|
||
},
|
||
setAudioMuted: function (t) {
|
||
this.audio.muted = t;
|
||
},
|
||
setAudioVolume: function (t) {
|
||
this.audio.volume = t, t > 0 && this.setAudioMuted(!1);
|
||
},
|
||
getShuffledList: function () {
|
||
if (!this.list.length) return [this.internalMusic];
|
||
var t = [].concat(a(this.list));
|
||
if (!this.internalShuffle || t.length <= 1) return t;
|
||
var e = t.indexOf(this.internalMusic);
|
||
if (2 === t.length && -1 !== e) return 0 === e ? t : [this.internalMusic, t[0]];
|
||
for (var i = t.length - 1; i > 0; i--) {
|
||
var r = Math.floor(Math.random() * (i + 1)),
|
||
n = t[i];
|
||
t[i] = t[r], t[r] = n;
|
||
}
|
||
if (-1 !== e && 0 !== (e = t.indexOf(this.internalMusic))) {
|
||
var o = [t[e], t[0]];
|
||
t[0] = o[0], t[e] = o[1];
|
||
}
|
||
return t;
|
||
},
|
||
onSelectSong: function (t) {
|
||
this.currentMusic === t ? this.toggle() : (this.currentMusic = t, this.thenPlay());
|
||
},
|
||
onAudioPlay: function () {
|
||
this.isPlaying = !0;
|
||
},
|
||
onAudioPause: function () {
|
||
this.isPlaying = !1;
|
||
},
|
||
onAudioWaiting: function () {
|
||
this.isLoading = !0;
|
||
},
|
||
onAudioCanplay: function () {
|
||
this.isLoading = !1;
|
||
},
|
||
onAudioDurationChange: function () {
|
||
1 !== this.audio.duration && (this.playStat.duration = this.audio.duration);
|
||
},
|
||
onAudioProgress: function () {
|
||
this.audio.buffered.length ? this.playStat.loadedTime = this.audio.buffered.end(this.audio.buffered.length - 1) : this.playStat.loadedTime = 0;
|
||
},
|
||
onAudioTimeUpdate: function () {
|
||
this.playStat.playedTime = this.audio.currentTime;
|
||
},
|
||
onAudioSeeking: function () {
|
||
this.playStat.playedTime = this.audio.currentTime;
|
||
},
|
||
onAudioSeeked: function () {
|
||
this.playStat.playedTime = this.audio.currentTime;
|
||
},
|
||
onAudioVolumeChange: function () {
|
||
this.audioVolume = this.audio.volume, this.isAudioMuted = this.audio.muted;
|
||
},
|
||
onAudioEnded: function () {
|
||
this.repeatMode === y.REPEAT_ALL ? (this.shouldShuffle && this.playIndex === this.shuffledList.length - 1 && (this.shuffledList = this.getShuffledList()), this.playIndex++, this.thenPlay()) : this.repeatMode === y.REPEAT_ONE ? this.thenPlay() : (this.playIndex++, 0 !== this.playIndex ? this.thenPlay() : 1 === this.shuffledList.length && (this.audio.currentTime = 0));
|
||
},
|
||
initAudio: function () {
|
||
var t = this;
|
||
this.audio.controls = this.shouldShowNativeControls, this.audio.muted = this.muted, this.audio.preload = this.preload, this.audio.volume = this.volume, ["abort", "canplay", "canplaythrough", "durationchange", "emptied", "encrypted", "ended", "error", "interruptbegin", "interruptend", "loadeddata", "loadedmetadata", "loadstart", "mozaudioavailable", "pause", "play", "playing", "progress", "ratechange", "seeked", "seeking", "stalled", "suspend", "timeupdate", "volumechange", "waiting"].forEach(function (e) {
|
||
t.audio.addEventListener(e, function (i) {
|
||
return t.$emit(e, i);
|
||
});
|
||
}), this.audio.addEventListener("play", this.onAudioPlay), this.audio.addEventListener("pause", this.onAudioPause), this.audio.addEventListener("abort", this.onAudioPause), this.audio.addEventListener("waiting", this.onAudioWaiting), this.audio.addEventListener("canplay", this.onAudioCanplay), this.audio.addEventListener("progress", this.onAudioProgress), this.audio.addEventListener("durationchange", this.onAudioDurationChange), this.audio.addEventListener("seeking", this.onAudioSeeking), this.audio.addEventListener("seeked", this.onAudioSeeked), this.audio.addEventListener("timeupdate", this.onAudioTimeUpdate), this.audio.addEventListener("volumechange", this.onAudioVolumeChange), this.audio.addEventListener("ended", this.onAudioEnded), this.currentMusic && (this.audio.src = this.currentMusic.src || this.currentMusic.url);
|
||
},
|
||
setSelfAdaptingTheme: function () {
|
||
var t = this;
|
||
if ("pic" === (this.currentMusic.theme || this.theme)) {
|
||
var e = this.currentMusic.pic;
|
||
if (f[e]) this.selfAdaptingTheme = f[e];else try {
|
||
new ColorThief().getColorAsync(e, function (i) {
|
||
var a = p(i, 3),
|
||
r = a[0],
|
||
n = a[1],
|
||
o = a[2];
|
||
f[e] = "rgb(" + r + ", " + n + ", " + o + ")", t.selfAdaptingTheme = "rgb(" + r + ", " + n + ", " + o + ")";
|
||
});
|
||
} catch (t) {
|
||
Object(c.f)("color-thief is required to support self-adapting theme");
|
||
}
|
||
} else this.selfAdaptingTheme = null;
|
||
}
|
||
},
|
||
watch: {
|
||
music: function (t) {
|
||
this.internalMusic = t;
|
||
},
|
||
currentMusic: {
|
||
handler: function (t) {
|
||
this.setSelfAdaptingTheme();
|
||
var e = t.src || t.url;
|
||
if (/\.m3u8(?=(#|\?|$))/.test(e)) {
|
||
if (this.audio.canPlayType("application/x-mpegURL") || this.audio.canPlayType("application/vnd.apple.mpegURL")) this.audio.src = e;else try {
|
||
var a = i(66);
|
||
a.isSupported() ? (this.hls || (this.hls = new a()), this.hls.loadSource(e), this.hls.attachMedia(this.audio)) : (Object(c.f)("HLS is not supported on your browser"), this.audio.src = e);
|
||
} catch (t) {
|
||
Object(c.f)("hls.js is required to support m3u8"), this.audio.src = e;
|
||
}
|
||
} else this.audio.src = e;
|
||
}
|
||
},
|
||
shouldShowNativeControls: function (t) {
|
||
this.audio.controls = t;
|
||
},
|
||
isAudioMuted: function (t) {
|
||
this.audio.muted = t;
|
||
},
|
||
preload: function (t) {
|
||
this.audio.preload = t;
|
||
},
|
||
audioVolume: function (t) {
|
||
this.audio.volume = t;
|
||
},
|
||
muted: function (t) {
|
||
this.internalMuted = t;
|
||
},
|
||
volume: function (t) {
|
||
this.internalVolume = t;
|
||
},
|
||
shuffle: function (t) {
|
||
this.internalShuffle = t;
|
||
},
|
||
repeat: function (t) {
|
||
this.internalRepeat = t;
|
||
}
|
||
},
|
||
beforeCreate: function () {
|
||
m.disableVersionBadge || h || (console.log("\n\n %c Vue-APlayer 1.6.1 %c vue-aplayer.js.org \n", "color: #fff; background:#41b883; padding:5px 0;", "color: #fff; background: #35495e; padding:5px 0;"), h = !0);
|
||
},
|
||
created: function () {
|
||
this.shuffledList = this.getShuffledList();
|
||
},
|
||
mounted: function () {
|
||
this.initAudio(), this.setSelfAdaptingTheme(), this.autoplay && this.play();
|
||
},
|
||
beforeDestroy: function () {
|
||
v === this && (v = null), this.hls && this.hls.destroy();
|
||
}
|
||
};
|
||
e.a = m;
|
||
}, function (t, e, i) {
|
||
|
||
var a = i(4);
|
||
e.a = {
|
||
components: {
|
||
IconButton: a.a
|
||
},
|
||
props: {
|
||
pic: String,
|
||
theme: String,
|
||
playing: {
|
||
type: Boolean,
|
||
default: !1
|
||
},
|
||
enableDrag: {
|
||
type: Boolean,
|
||
default: !1
|
||
}
|
||
},
|
||
data: function () {
|
||
return {
|
||
hasMovedSinceMouseDown: !1,
|
||
dragStartX: 0,
|
||
dragStartY: 0
|
||
};
|
||
},
|
||
computed: {
|
||
currentPicStyleObj: function () {
|
||
return this.pic ? {
|
||
backgroundImage: "url(" + this.pic + ")",
|
||
backgroundColor: this.theme
|
||
} : {};
|
||
}
|
||
},
|
||
methods: {
|
||
onDragBegin: function (t) {
|
||
this.enableDrag && (this.hasMovedSinceMouseDown = !1, this.$emit("dragbegin"), this.dragStartX = t.clientX, this.dragStartY = t.clientY, document.addEventListener("mousemove", this.onDocumentMouseMove), document.addEventListener("mouseup", this.onDocumentMouseUp));
|
||
},
|
||
onDocumentMouseMove: function (t) {
|
||
this.hasMovedSinceMouseDown = !0, this.$emit("dragging", {
|
||
offsetLeft: t.clientX - this.dragStartX,
|
||
offsetTop: t.clientY - this.dragStartY
|
||
});
|
||
},
|
||
onDocumentMouseUp: function (t) {
|
||
document.removeEventListener("mouseup", this.onDocumentMouseUp), document.removeEventListener("mousemove", this.onDocumentMouseMove), this.$emit("dragend");
|
||
},
|
||
onClick: function () {
|
||
this.hasMovedSinceMouseDown || this.$emit("toggleplay");
|
||
}
|
||
}
|
||
};
|
||
}, function (t, e, i) {
|
||
|
||
var a = i(8);
|
||
e.a = {
|
||
components: {
|
||
Icon: a.a
|
||
},
|
||
props: ["icon"]
|
||
};
|
||
}, function (t, e, i) {
|
||
|
||
var a = i(9),
|
||
r = i(43),
|
||
n = i(0),
|
||
o = Object(n.a)(a.a, r.a, r.b, !1, null, null, null);
|
||
e.a = o.exports;
|
||
}, function (t, e, i) {
|
||
|
||
var a = function () {
|
||
function t(t, e) {
|
||
var i = [],
|
||
a = !0,
|
||
r = !1,
|
||
n = void 0;
|
||
try {
|
||
for (var o, s = t[Symbol.iterator](); !(a = (o = s.next()).done) && (i.push(o.value), !e || i.length !== e); a = !0);
|
||
} catch (t) {
|
||
r = !0, n = t;
|
||
} finally {
|
||
try {
|
||
!a && s.return && s.return();
|
||
} finally {
|
||
if (r) throw n;
|
||
}
|
||
}
|
||
return i;
|
||
}
|
||
return function (e, i) {
|
||
if (Array.isArray(e)) return e;
|
||
if (Symbol.iterator in Object(e)) return t(e, i);
|
||
throw new TypeError("Invalid attempt to destructure non-iterable instance");
|
||
};
|
||
}(),
|
||
r = i(27),
|
||
n = r.keys().reduce(function (t, e) {
|
||
var i = r(e),
|
||
n = i.match(/^<svg.+?viewBox="(.+?)".*><path.+?d="(.+?)".*><\/path><\/svg>$/),
|
||
o = a(n, 3),
|
||
s = (o[0], o[1]),
|
||
l = o[2];
|
||
return t[e.match(/^.*\/(.+?)\.svg$/)[1]] = {
|
||
viewBox: s,
|
||
d: l
|
||
}, t;
|
||
}, {});
|
||
e.a = {
|
||
props: ["type"],
|
||
computed: {
|
||
svg: function () {
|
||
this.type;
|
||
return "prev" !== this.type && "next" !== this.type || "skip", n[this.type] || {};
|
||
},
|
||
style: function () {
|
||
if ("next" === this.type) return {
|
||
transform: "rotate(180deg)"
|
||
};
|
||
}
|
||
}
|
||
};
|
||
}, function (t, e, i) {
|
||
|
||
e.a = {
|
||
props: {
|
||
show: {
|
||
type: Boolean,
|
||
default: !0
|
||
},
|
||
currentMusic: Object,
|
||
musicList: {
|
||
type: Array,
|
||
default: function () {
|
||
return [];
|
||
}
|
||
},
|
||
playIndex: {
|
||
type: Number,
|
||
default: 0
|
||
},
|
||
theme: String,
|
||
listmaxheight: String
|
||
},
|
||
computed: {
|
||
listHeightStyle: function () {
|
||
return {
|
||
height: 33 * this.musicList.length - 1 + "px",
|
||
maxHeight: this.listmaxheight || ""
|
||
};
|
||
}
|
||
}
|
||
};
|
||
}, function (t, e, i) {
|
||
|
||
var a = i(4),
|
||
r = i(53),
|
||
n = i(57);
|
||
e.a = {
|
||
components: {
|
||
IconButton: a.a,
|
||
VProgress: r.a,
|
||
Volume: n.a
|
||
},
|
||
props: ["shuffle", "repeat", "stat", "theme", "volume", "muted"],
|
||
computed: {
|
||
loadProgress: function () {
|
||
return 0 === this.stat.duration ? 0 : this.stat.loadedTime / this.stat.duration;
|
||
},
|
||
playProgress: function () {
|
||
return 0 === this.stat.duration ? 0 : this.stat.playedTime / this.stat.duration;
|
||
}
|
||
},
|
||
methods: {
|
||
secondToTime: function (t) {
|
||
if (isNaN(t)) return "00:00";
|
||
var e = function (t) {
|
||
return t < 10 ? "0" + t : "" + t;
|
||
},
|
||
i = Math.trunc(t / 60),
|
||
a = Math.trunc(t - 60 * i),
|
||
r = Math.trunc(i / 60),
|
||
n = Math.trunc(t / 60 - 60 * Math.trunc(t / 60 / 60));
|
||
return t >= 3600 ? e(r) + ":" + e(n) + ":" + e(a) : e(i) + ":" + e(a);
|
||
}
|
||
}
|
||
};
|
||
}, function (t, e, i) {
|
||
|
||
var a = i(3),
|
||
r = i(8);
|
||
e.a = {
|
||
components: {
|
||
Icon: r.a
|
||
},
|
||
props: ["loadProgress", "playProgress", "theme"],
|
||
data: function () {
|
||
return {
|
||
thumbHovered: !1
|
||
};
|
||
},
|
||
methods: {
|
||
onThumbMouseDown: function (t) {
|
||
var e = this.$refs.barWrap.clientWidth,
|
||
i = (t.clientX - Object(a.b)(this.$refs.barWrap)) / e;
|
||
i = i > 0 ? i : 0, i = i < 1 ? i : 1, this.$emit("dragbegin", i), document.addEventListener("mousemove", this.onDocumentMouseMove), document.addEventListener("mouseup", this.onDocumentMouseUp);
|
||
},
|
||
onDocumentMouseMove: function (t) {
|
||
var e = this.$refs.barWrap.clientWidth,
|
||
i = (t.clientX - Object(a.b)(this.$refs.barWrap)) / e;
|
||
i = i > 0 ? i : 0, i = i < 1 ? i : 1, this.$emit("dragging", i);
|
||
},
|
||
onDocumentMouseUp: function (t) {
|
||
document.removeEventListener("mouseup", this.onDocumentMouseUp), document.removeEventListener("mousemove", this.onDocumentMouseMove);
|
||
var e = this.$refs.barWrap.clientWidth,
|
||
i = (t.clientX - Object(a.b)(this.$refs.barWrap)) / e;
|
||
i = i > 0 ? i : 0, i = i < 1 ? i : 1, this.$emit("dragend", i);
|
||
},
|
||
onThumbTouchStart: function (t) {
|
||
var e = this.$refs.barWrap.clientWidth,
|
||
i = (t.clientX - Object(a.b)(this.$refs.barWrap)) / e;
|
||
i = i > 0 ? i : 0, i = i < 1 ? i : 1, this.$emit("dragbegin", i), document.addEventListener("touchmove", this.onDocumentTouchMove), document.addEventListener("touchend", this.onDocumentTouchEnd);
|
||
},
|
||
onDocumentTouchMove: function (t) {
|
||
var e = t.changedTouches[0],
|
||
i = this.$refs.barWrap.clientWidth,
|
||
r = (e.clientX - Object(a.b)(this.$refs.barWrap)) / i;
|
||
r = r > 0 ? r : 0, r = r < 1 ? r : 1, this.$emit("dragging", r);
|
||
},
|
||
onDocumentTouchEnd: function (t) {
|
||
document.removeEventListener("touchend", this.onDocumentTouchEnd), document.removeEventListener("touchmove", this.onDocumentTouchMove);
|
||
var e = t.changedTouches[0],
|
||
i = this.$refs.barWrap.clientWidth,
|
||
r = (e.clientX - Object(a.b)(this.$refs.barWrap)) / i;
|
||
r = r > 0 ? r : 0, r = r < 1 ? r : 1, this.$emit("dragend", r);
|
||
}
|
||
}
|
||
};
|
||
}, function (t, e, i) {
|
||
|
||
var a = i(4),
|
||
r = i(3);
|
||
e.a = {
|
||
components: {
|
||
IconButton: a.a
|
||
},
|
||
props: ["volume", "muted", "theme"],
|
||
computed: {
|
||
volumeIcon: function () {
|
||
return this.muted || this.volume <= 0 ? "volume-off" : this.volume >= 1 ? "volume-up" : "volume-down";
|
||
}
|
||
},
|
||
methods: {
|
||
adjustVolume: function (t) {
|
||
var e = (40 - t.clientY + Object(r.c)(this.$refs.bar)) / 40;
|
||
e = e > 0 ? e : 0, e = e < 1 ? e : 1, this.$emit("setvolume", e);
|
||
},
|
||
onBarMouseDown: function () {
|
||
document.addEventListener("mousemove", this.onDocumentMouseMove), document.addEventListener("mouseup", this.onDocumentMouseUp);
|
||
},
|
||
onDocumentMouseMove: function (t) {
|
||
var e = (40 - t.clientY + Object(r.c)(this.$refs.bar)) / 40;
|
||
e = e > 0 ? e : 0, e = e < 1 ? e : 1, this.$emit("setvolume", e);
|
||
},
|
||
onDocumentMouseUp: function (t) {
|
||
document.removeEventListener("mouseup", this.onDocumentMouseUp), document.removeEventListener("mousemove", this.onDocumentMouseMove);
|
||
var e = (40 - t.clientY + Object(r.c)(this.$refs.bar)) / 40;
|
||
e = e > 0 ? e : 0, e = e < 1 ? e : 1, this.$emit("setvolume", e);
|
||
}
|
||
}
|
||
};
|
||
}, function (t, e, i) {
|
||
|
||
var a = i(3);
|
||
e.a = {
|
||
props: {
|
||
currentMusic: {
|
||
type: Object,
|
||
required: !0
|
||
},
|
||
playStat: {
|
||
type: Object,
|
||
required: !0
|
||
}
|
||
},
|
||
data: function () {
|
||
return {
|
||
displayLrc: "",
|
||
currentLineIndex: 0
|
||
};
|
||
},
|
||
computed: {
|
||
lrcLines: function () {
|
||
return Object(a.d)(this.displayLrc);
|
||
},
|
||
currentLine: function () {
|
||
return this.currentLineIndex > this.lrcLines.length - 1 ? null : this.lrcLines[this.currentLineIndex];
|
||
},
|
||
transformStyle: function () {
|
||
return {
|
||
transform: "translateY(" + 16 * -this.currentLineIndex + "px)",
|
||
webkitTransform: "translateY(" + 16 * -this.currentLineIndex + "px)"
|
||
};
|
||
}
|
||
},
|
||
methods: {
|
||
applyLrc: function (t) {
|
||
/^https?:\/\//.test(t) ? this.fetchLrc(t) : this.displayLrc = t;
|
||
},
|
||
fetchLrc: function (t) {
|
||
var e = this;
|
||
fetch(t).then(function (t) {
|
||
return t.text();
|
||
}).then(function (t) {
|
||
e.displayLrc = t;
|
||
});
|
||
},
|
||
hideLrc: function () {
|
||
this.displayLrc = "";
|
||
}
|
||
},
|
||
watch: {
|
||
currentMusic: {
|
||
immediate: !0,
|
||
handler: function (t) {
|
||
this.currentLineIndex = 0, t.lrc ? this.applyLrc(t.lrc) : this.hideLrc();
|
||
}
|
||
},
|
||
"playStat.playedTime": function (t) {
|
||
for (var e = 0; e < this.lrcLines.length; e++) {
|
||
var i = this.lrcLines[e],
|
||
a = this.lrcLines[e + 1];
|
||
t >= i[0] && (!a || t < a[0]) && (this.currentLineIndex = e);
|
||
}
|
||
}
|
||
}
|
||
};
|
||
}, function (t, e, i) {
|
||
|
||
function a(t) {
|
||
i(16);
|
||
}
|
||
Object.defineProperty(e, "__esModule", {
|
||
value: !0
|
||
});
|
||
var r = i(5),
|
||
n = i(67),
|
||
o = i(0),
|
||
s = a,
|
||
l = Object(o.a)(r.a, n.a, n.b, !1, s, null, null);
|
||
e.default = l.exports;
|
||
}, function (t, e, i) {
|
||
var a = i(17);
|
||
"string" == typeof a && (a = [[t.i, a, ""]]), a.locals && (t.exports = a.locals);
|
||
var r = i(2).default;
|
||
r("48028a76", a, !0, {});
|
||
}, function (t, e, i) {
|
||
e = t.exports = i(1)(!1), e.push([t.i, ".aplayer{font-family:Arial,Helvetica,sans-serif;color:#000;background-color:#fff;margin:5px;-webkit-box-shadow:0 2px 2px 0 rgba(0,0,0,.07),0 1px 5px 0 rgba(0,0,0,.1);box-shadow:0 2px 2px 0 rgba(0,0,0,.07),0 1px 5px 0 rgba(0,0,0,.1);border-radius:2px;overflow:hidden;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;line-height:normal;position:relative}.aplayer *{-webkit-box-sizing:content-box;box-sizing:content-box}.aplayer .aplayer-lrc-content{display:none}.aplayer .aplayer-body{display:-webkit-box;display:-ms-flexbox;display:flex;position:relative}.aplayer .aplayer-body .aplayer-info{-webkit-box-flex:1;-ms-flex-positive:1;flex-grow:1;display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-ms-flex-direction:column;flex-direction:column;text-align:start;padding:14px 7px 0 10px;height:66px;-webkit-box-sizing:border-box;box-sizing:border-box;overflow:hidden}.aplayer .aplayer-body .aplayer-info .aplayer-music{-webkit-box-flex:1;-ms-flex-positive:1;flex-grow:1;overflow:hidden;white-space:nowrap;text-overflow:ellipsis;margin-left:5px;-webkit-user-select:text;-moz-user-select:text;-ms-user-select:text;user-select:text;cursor:default;padding-bottom:2px}.aplayer .aplayer-body .aplayer-info .aplayer-music .aplayer-title{font-size:14px}.aplayer .aplayer-body .aplayer-info .aplayer-music .aplayer-author{font-size:12px;color:#666}.aplayer .aplayer-body .aplayer-info .aplayer-lrc{z-index:0}.aplayer audio[controls]{display:block;width:100%}.aplayer.aplayer-narrow{width:66px}.aplayer.aplayer-withlrc .aplayer-body .aplayer-pic{height:90px;width:90px}.aplayer.aplayer-withlrc .aplayer-body .aplayer-info{height:90px;padding:10px 7px 0}.aplayer.aplayer-withlist .aplayer-body .aplayer-info{border-bottom:1px solid #e9e9e9}.aplayer.aplayer-withlist .aplayer-body .aplayer-controller .aplayer-time .aplayer-icon.aplayer-icon-menu{display:block}.aplayer.aplayer-float{z-index:1}@-webkit-keyframes aplayer-roll{0%{left:0}to{left:-100%}}@keyframes aplayer-roll{0%{left:0}to{left:-100%}}", ""]);
|
||
}, function (t, e, i) {
|
||
|
||
function a(t, e) {
|
||
for (var i = [], a = {}, r = 0; r < e.length; r++) {
|
||
var n = e[r],
|
||
o = n[0],
|
||
s = n[1],
|
||
l = n[2],
|
||
u = n[3],
|
||
c = {
|
||
id: t + ":" + r,
|
||
css: s,
|
||
media: l,
|
||
sourceMap: u
|
||
};
|
||
a[o] ? a[o].parts.push(c) : i.push(a[o] = {
|
||
id: o,
|
||
parts: [c]
|
||
});
|
||
}
|
||
return i;
|
||
}
|
||
e.a = a;
|
||
}, function (e, i) {
|
||
e.exports = t;
|
||
}, function (t, e, i) {
|
||
|
||
function a(t) {
|
||
i(21);
|
||
}
|
||
var r = i(6),
|
||
n = i(45),
|
||
o = i(0),
|
||
s = a,
|
||
l = Object(o.a)(r.a, n.a, n.b, !1, s, null, null);
|
||
e.a = l.exports;
|
||
}, function (t, e, i) {
|
||
var a = i(22);
|
||
"string" == typeof a && (a = [[t.i, a, ""]]), a.locals && (t.exports = a.locals);
|
||
var r = i(2).default;
|
||
r("082b31c5", a, !0, {});
|
||
}, function (t, e, i) {
|
||
var a = i(23);
|
||
e = t.exports = i(1)(!1), e.push([t.i, ".aplayer-float .aplayer-pic:active{cursor:move}.aplayer-pic{-ms-flex-negative:0;flex-shrink:0;position:relative;height:66px;width:66px;background-image:url(" + a(i(24)) + ");background-size:cover;-webkit-transition:all .3s ease;transition:all .3s ease;cursor:pointer}.aplayer-pic:hover .aplayer-button{opacity:1}.aplayer-pic .aplayer-button{position:absolute;border-radius:50%;opacity:.8;text-shadow:0 1px 1px rgba(0,0,0,.2);-webkit-box-shadow:0 1px 1px rgba(0,0,0,.2);box-shadow:0 1px 1px rgba(0,0,0,.2);background:rgba(0,0,0,.2);-webkit-transition:all .1s ease;transition:all .1s ease}.aplayer-pic .aplayer-button .aplayer-fill{fill:#fff}.aplayer-pic .aplayer-play{width:26px;height:26px;border:2px solid #fff;bottom:50%;right:50%;margin:0 -15px -15px 0}.aplayer-pic .aplayer-play .aplayer-icon-play{position:absolute;top:3px;left:4px;height:20px;width:20px}.aplayer-pic .aplayer-pause{width:16px;height:16px;border:2px solid #fff;bottom:4px;right:4px}.aplayer-pic .aplayer-pause .aplayer-icon-pause{position:absolute;top:2px;left:2px;height:12px;width:12px}", ""]);
|
||
}, function (t, e) {
|
||
t.exports = function (t) {
|
||
return "string" != typeof t ? t : (/^['"].*['"]$/.test(t) && (t = t.slice(1, -1)), /["'() \t\n]/.test(t) ? '"' + t.replace(/"/g, '\\"').replace(/\n/g, "\\n") + '"' : t);
|
||
};
|
||
}, function (t, e) {
|
||
t.exports = "data:image/jpeg;base64,/9j/4QAYRXhpZgAASUkqAAgAAAAAAAAAAAAAAP/sABFEdWNreQABAAQAAAAeAAD/4QMfaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wLwA8P3hwYWNrZXQgYmVnaW49Iu+7vyIgaWQ9Ilc1TTBNcENlaGlIenJlU3pOVGN6a2M5ZCI/PiA8eDp4bXBtZXRhIHhtbG5zOng9ImFkb2JlOm5zOm1ldGEvIiB4OnhtcHRrPSJBZG9iZSBYTVAgQ29yZSA1LjYtYzA2NyA3OS4xNTc3NDcsIDIwMTUvMDMvMzAtMjM6NDA6NDIgICAgICAgICI+IDxyZGY6UkRGIHhtbG5zOnJkZj0iaHR0cDovL3d3dy53My5vcmcvMTk5OS8wMi8yMi1yZGYtc3ludGF4LW5zIyI+IDxyZGY6RGVzY3JpcHRpb24gcmRmOmFib3V0PSIiIHhtbG5zOnhtcE1NPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvbW0vIiB4bWxuczpzdFJlZj0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL3NUeXBlL1Jlc291cmNlUmVmIyIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bXBNTTpEb2N1bWVudElEPSJ4bXAuZGlkOjE2NjQ3NUZBM0Y4RDExRTY4NzJCRDdCNkZCQTQ0MjNBIiB4bXBNTTpJbnN0YW5jZUlEPSJ4bXAuaWlkOjE2NjQ3NUY5M0Y4RDExRTY4NzJCRDdCNkZCQTQ0MjNBIiB4bXA6Q3JlYXRvclRvb2w9IkFkb2JlIFBob3Rvc2hvcCBDQyAyMDE1IE1hY2ludG9zaCI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSI5OENEMEFFRjM0NTI1NjE0NEREQkU4RjkxRjAwNjM3NiIgc3RSZWY6ZG9jdW1lbnRJRD0iOThDRDBBRUYzNDUyNTYxNDREREJFOEY5MUYwMDYzNzYiLz4gPC9yZGY6RGVzY3JpcHRpb24+IDwvcmRmOlJERj4gPC94OnhtcG1ldGE+IDw/eHBhY2tldCBlbmQ9InIiPz7/7gAOQWRvYmUAZMAAAAAB/9sAhAAQCwsLDAsQDAwQFw8NDxcbFBAQFBsfFxcXFxcfHhcaGhoaFx4eIyUnJSMeLy8zMy8vQEBAQEBAQEBAQEBAQEBAAREPDxETERUSEhUUERQRFBoUFhYUGiYaGhwaGiYwIx4eHh4jMCsuJycnLis1NTAwNTVAQD9AQEBAQEBAQEBAQED/wAARCABkAGQDASIAAhEBAxEB/8QAgwAAAgIDAQAAAAAAAAAAAAAAAAYBBQIDBAcBAQEBAAAAAAAAAAAAAAAAAAABAhAAAQIEBAEJBgMHBQAAAAAAAQIDABEEBSExEgZBUWFxgaGxIhMUkTJCUmIVI0MWwdHh8XKSsvCCojNzEQEBAQEBAQEBAAAAAAAAAAAAAREhMVFBYf/aAAwDAQACEQMRAD8AaJ8vCJEYTjIZxtlIicc40VFZS0idVS6lpP1HE9Aind3dSrWWbdTPVruXgSQn98Awd0SBC+mp3fVYtUjFGk5F5U1S6Me6Mvtu6ncXbo01zNtzl2CJovwZxML/ANl3DwvZn/5fxiPt+72sWbkw/Lg4jTP/AImGhhiYWlXXdlD4q23IqWh7zlOZ/wCGrujpt+7bTWKDTijSvEy0O4CfJqy9sNMXmWMTECRExjzxMUEEEEBxLcbbQXHVBCEialKMgBFBU7jqax/0dmbU64fzJYy+aZwSOcxT7kvdPXVJpU6jTU5IC0HBauKucDhF7tS3ejolVJK51UlJQrCSRkeuJqppdspcV593dNU8cS0kkNjpPvKi8ZaZp2w3TtpabGSUAJHZEgzjXUVdPStebUOBpE5AnieQDieiKjeYyELVVva3ML0IZddI44IHaZxtod52upcDbqV0ylGSVLkUTP1JyibDDBOJxzjTUF8UzqqdIVUBtRZByK9J09seb1lzuKawuIqngRLSorUDMZ6k8DPMSwhaSPTwSDFbd7Bb7s2rzkBupl4KlIksH6vmHTE2GucuNqp6p3/tIKXCOKknST1xYgZDlihPsNxrLTXItFevXTuLU02omZadQZFP9Jw9ohxjz2tfF03GhFKdQXV6kqHINCJ/2tTj0KYJiQow6oIJY5QRR5hYLM5cK9KHkFNO1JbxIImOCeuPREyAAAkAJARyW63s26n8hlSnATqUtZmonnlKOucokhQtxDTa3XTpbbSVrVyJSNRhFq6usvNyap0K0v1JA5mG1YhtPJJOKzxOENG5HS3Yq1ScyhKSOZS0pPZCts8+ZfQtWK/LcUOk/wA4X3FhwoLJbKBgMtMIWZeN1xKVqWecqB9kJm7aKlo7wpulQGm3G0OKbT7qVKmDIcAZTh/LiW0KW4oJQgFS1HAAJEyTHnb6ndxX5XlAgVCwlH0MoEpnoSJwpD5ZFrXZ6JThOtTKJk9GHZCxvZmn9YHkJSh1KGw6QAC4p0uEauUhKIcmW0NNIaQJIbSEp5kpEhHntyqV3q7hlkzFQ/4T9ODSPYhM+uFI7rbZ9zU1EzXWuoGl5Ic9Pq0nH6XPAZ9MY1+6r2hh+3VjKGKojQtwApWlKhjhMjEcYZrzcW7JavMaA1pAZpUn5pSB6EgThT2xaTeLi5U1ZLjLJ8x4qzccUZhJ7zE/g6dlrtNO+t+pfSisUNDKF+EJScyFHCZh5BEpgzB4xR3TaVqr0lTKBR1BEw42JIJ+tvL2ShaZuN62xWejqZuMiRLKjqQtB+JpXD/U4vh69BxnKCK/73Qfa/uus+m0z+rVl5cvmnhBFRsHLyxIkrolGIMhKJSchAcl4pzVWmsYAmtbSijnUjxp7UwibdrEUd4pnlnS2olCycgFjTjHo4VHm9/paeku1QxTKCmtWrSPyyrFTf8AtiX6sW+5dwmtV9st5K2SoJdWnEuqnghP0z9sXe2rCLXTl18A1rwGvj5afkH7YoNov2aneW7WLCK2cmVOYISn6Tlq6Yaau+2mkaLjlU2ogYNtkLWo8JBMJ9GndFzFBanEpMqipmy1ygKHjV1J74odkW4u1blwWPw6ceW0eVxYx9ie+K+oeuG57sA0iXwtozSy1P3lHvh+t1AzbqNqkY9xsYq4qUcVKPSYe0/C9vxp9VPRvAEstqWlZGSVLCdM+mRjn2Xd6KkS9R1K0sqcUFtuKwSrCRSTDg42262pp1CXGljStChqSoHlBigqdk2h5RUyt2mn8CSFo6tePbDO6Ll67W1hOtyrZSn+sHsGMJW6r3S3Z9hukQS3T6gHSJFZXLBIzlhFs3sO3pV+JVPLHIEoR2+KLm32C024hdMwPNGTrh1r6irLqh2pwvfp+4fpPydJ9T5vqfT/ABaJadMvmljKCHLjxnBDDXDPGXGJmTkcogETMshjyxlPhFGqqfVT0b9QMSy2twDnSkkdsJtoomK7cC2KoB1plKtSVfmKT4ST0qUVQ7KbQ62th3xNuJUhY46VDSewwhvqrdvXsPrTqUMZ/C82fCVJP1dhiVYvKjY9vcVqpqhxgH8tQDgHQZpMRT7EokkF+qccHyISlufX4oubddKG5shymWCvNbRwWk84jtBMgeSGRNaKOgo7eyWaNoNIPvEYqUfqUcTHVOMRIxOKscooyBxg5eSIM5T48IkY/vgJOPVBOXOIBM80aKqspaNvzap1LaRlM4noGZgOjVBC5+sqX1ejyj6aUp6vxf6tGUuac4ImwxbAkKlEzBywjHGUgermiRPLhFGYJ48Y01tDSXBg09Y2HG5+E5KSZZoUMo2AgZRkDiBLDiIBQq9n3ClcL9pf80JxSkny3k9fuqjBvcu4bYfLuDBWBh+MgoV/eMDDoMyZ4RIM0kETT8pxETPi6WmN9UKhJ+ncQTnpIUP2R1p3jZCMVOJ5igxYu2q1vmbtGwvn0JB7JRznbthOJoW8eQqHcqHU40K3nZAMFOKllJB/bHI9vuiTMU9M44o/MQkdk4tUbdsaDMUTXXNXeY6maChp5eTTNI5ClCQe6HThWN+3Rc/Bb6UtIV8SUH/NeEZ02zrhWOefdqognNKT5izzajgIbpz7gIkfzhhqs/TFk9J6b0w05+ZM+ZPl1wRay9kEUV4y+qXZGachyc8EEBKeMAnLCf8ACCCAzE5d8ZHMS64IIA7oy+HDqgggIEpYdUZJnpE84IICeScSJYwQQE8IIIID/9k=";
|
||
}, function (t, e, i) {
|
||
var a = i(26);
|
||
"string" == typeof a && (a = [[t.i, a, ""]]), a.locals && (t.exports = a.locals);
|
||
var r = i(2).default;
|
||
r("a4518b4e", a, !0, {});
|
||
}, function (t, e, i) {
|
||
e = t.exports = i(1)(!1), e.push([t.i, ".aplayer-icon{width:15px;height:15px;border:none;background-color:transparent;outline:none;cursor:pointer;opacity:.8;vertical-align:middle;padding:0;font-size:12px;margin:0;display:inline}.aplayer-icon:hover{opacity:1}.aplayer-icon .aplayer-fill{-webkit-transition:all .2s ease-in-out;transition:all .2s ease-in-out}", ""]);
|
||
}, function (t, e, i) {
|
||
function a(t) {
|
||
return i(r(t));
|
||
}
|
||
function r(t) {
|
||
var e = n[t];
|
||
if (!(e + 1)) throw new Error("Cannot find module '" + t + "'.");
|
||
return e;
|
||
}
|
||
var n = {
|
||
"./loading.svg": 28,
|
||
"./lrc.svg": 29,
|
||
"./menu.svg": 30,
|
||
"./no-repeat.svg": 31,
|
||
"./pause.svg": 32,
|
||
"./play.svg": 33,
|
||
"./repeat-all-legacy.svg": 34,
|
||
"./repeat-all.svg": 35,
|
||
"./repeat-one-legacy.svg": 36,
|
||
"./repeat-one.svg": 37,
|
||
"./shuffle.svg": 38,
|
||
"./skip.svg": 39,
|
||
"./volume-down.svg": 40,
|
||
"./volume-off.svg": 41,
|
||
"./volume-up.svg": 42
|
||
};
|
||
a.keys = function () {
|
||
return Object.keys(n);
|
||
}, a.resolve = r, t.exports = a, a.id = 27;
|
||
}, function (t, e) {
|
||
t.exports = '<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 32 32"><path d="M4 16c0-6.6 5.4-12 12-12s12 5.4 12 12c0 1.2-0.8 2-2 2s-2-0.8-2-2c0-4.4-3.6-8-8-8s-8 3.6-8 8 3.6 8 8 8c1.2 0 2 0.8 2 2s-0.8 2-2 2c-6.6 0-12-5.4-12-12z"></path></svg>';
|
||
}, function (t, e) {
|
||
t.exports = '<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 32 32"><path d="M26.667 5.333h-21.333c-0 0-0.001 0-0.001 0-1.472 0-2.666 1.194-2.666 2.666 0 0 0 0.001 0 0.001v-0 16c0 0 0 0.001 0 0.001 0 1.472 1.194 2.666 2.666 2.666 0 0 0.001 0 0.001 0h21.333c0 0 0.001 0 0.001 0 1.472 0 2.666-1.194 2.666-2.666 0-0 0-0.001 0-0.001v0-16c0-0 0-0.001 0-0.001 0-1.472-1.194-2.666-2.666-2.666-0 0-0.001 0-0.001 0h0zM5.333 16h5.333v2.667h-5.333v-2.667zM18.667 24h-13.333v-2.667h13.333v2.667zM26.667 24h-5.333v-2.667h5.333v2.667zM26.667 18.667h-13.333v-2.667h13.333v2.667z"></path></svg>';
|
||
}, function (t, e) {
|
||
t.exports = '<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="-5 0 32 32"><path d="M20.8 14.4q0.704 0 1.152 0.48t0.448 1.12-0.48 1.12-1.12 0.48h-19.2q-0.64 0-1.12-0.48t-0.48-1.12 0.448-1.12 1.152-0.48h19.2zM1.6 11.2q-0.64 0-1.12-0.48t-0.48-1.12 0.448-1.12 1.152-0.48h19.2q0.704 0 1.152 0.48t0.448 1.12-0.48 1.12-1.12 0.48h-19.2zM20.8 20.8q0.704 0 1.152 0.48t0.448 1.12-0.48 1.12-1.12 0.48h-19.2q-0.64 0-1.12-0.48t-0.48-1.12 0.448-1.12 1.152-0.48h19.2z"></path></svg>';
|
||
}, function (t, e) {
|
||
t.exports = '<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 32 32"><path d="M2.667 7.027l1.707-1.693 22.293 22.293-1.693 1.707-4-4h-11.64v4l-5.333-5.333 5.333-5.333v4h8.973l-8.973-8.973v0.973h-2.667v-3.64l-4-4zM22.667 17.333h2.667v5.573l-2.667-2.667v-2.907zM22.667 6.667v-4l5.333 5.333-5.333 5.333v-4h-10.907l-2.667-2.667h13.573z"></path></svg>';
|
||
}, function (t, e) {
|
||
t.exports = '<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="-8 0 32 32"><path d="M14.080 4.8q2.88 0 2.88 2.048v18.24q0 2.112-2.88 2.112t-2.88-2.112v-18.24q0-2.048 2.88-2.048zM2.88 4.8q2.88 0 2.88 2.048v18.24q0 2.112-2.88 2.112t-2.88-2.112v-18.24q0-2.048 2.88-2.048z"></path></svg>';
|
||
}, function (t, e) {
|
||
t.exports = '<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="-8 0 32 32"><path d="M15.552 15.168q0.448 0.32 0.448 0.832 0 0.448-0.448 0.768l-13.696 8.512q-0.768 0.512-1.312 0.192t-0.544-1.28v-16.448q0-0.96 0.544-1.28t1.312 0.192z"></path></svg>';
|
||
}, function (t, e) {
|
||
t.exports = '<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="-1 0 32 32"><path d="M25.6 9.92q1.344 0 2.272 0.928t0.928 2.272v9.28q0 1.28-0.928 2.24t-2.272 0.96h-22.4q-1.28 0-2.24-0.96t-0.96-2.24v-9.28q0-1.344 0.96-2.272t2.24-0.928h8v-3.52l6.4 5.76-6.4 5.76v-3.52h-6.72v6.72h19.84v-6.72h-4.8v-4.48h6.080z"></path></svg>';
|
||
}, function (t, e) {
|
||
t.exports = '<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 32 32"><path d="M9.333 9.333h13.333v4l5.333-5.333-5.333-5.333v4h-16v8h2.667v-5.333zM22.667 22.667h-13.333v-4l-5.333 5.333 5.333 5.333v-4h16v-8h-2.667v5.333z"></path></svg>';
|
||
}, function (t, e) {
|
||
t.exports = '<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 38 32"><path d="M2.072 21.577c0.71-0.197 1.125-0.932 0.928-1.641-0.221-0.796-0.333-1.622-0.333-2.457 0-5.049 4.108-9.158 9.158-9.158h5.428c0.056-0.922 0.221-1.816 0.482-2.667h-5.911c-3.158 0-6.128 1.23-8.361 3.463s-3.463 5.203-3.463 8.361c0 1.076 0.145 2.143 0.431 3.171 0.164 0.59 0.7 0.976 1.284 0.976 0.117 0 0.238-0.016 0.357-0.049zM21.394 25.613h-12.409v-2.362c0-0.758-0.528-1.052-1.172-0.652l-5.685 3.522c-0.644 0.4-0.651 1.063-0.014 1.474l5.712 3.69c0.637 0.411 1.158 0.127 1.158-0.63v-2.374h12.409c3.158 0 6.128-1.23 8.361-3.463 1.424-1.424 2.44-3.148 2.99-5.029-0.985 0.368-2.033 0.606-3.125 0.691-1.492 3.038-4.619 5.135-8.226 5.135zM28.718 0c-4.985 0-9.026 4.041-9.026 9.026s4.041 9.026 9.026 9.026 9.026-4.041 9.026-9.026-4.041-9.026-9.026-9.026zM30.392 13.827h-1.728v-6.822c-0.635 0.576-1.433 1.004-2.407 1.285v-1.713c0.473-0.118 0.975-0.325 1.506-0.62 0.532-0.325 0.975-0.665 1.329-1.034h1.3v8.904z"></path></svg>';
|
||
}, function (t, e) {
|
||
t.exports = '<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 32 32"><path d="M9.333 9.333h13.333v4l5.333-5.333-5.333-5.333v4h-16v8h2.667v-5.333zM22.667 22.667h-13.333v-4l-5.333 5.333 5.333 5.333v-4h16v-8h-2.667v5.333zM17.333 20v-8h-1.333l-2.667 1.333v1.333h2v5.333h2z"></path></svg>';
|
||
}, function (t, e) {
|
||
t.exports = '<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 32 32"><path d="M22.667 4l7 6-7 6 7 6-7 6v-4h-3.653l-3.76-3.76 2.827-2.827 2.587 2.587h2v-8h-2l-12 12h-6v-4h4.347l12-12h3.653v-4zM2.667 8h6l3.76 3.76-2.827 2.827-2.587-2.587h-4.347v-4z"></path></svg>';
|
||
}, function (t, e) {
|
||
t.exports = '<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 32 32"><path d="M25.468 6.947c-0.326-0.172-0.724-0.151-1.030 0.057l-6.438 4.38v-3.553c0-0.371-0.205-0.71-0.532-0.884-0.326-0.172-0.724-0.151-1.030 0.057l-12 8.164c-0.274 0.186-0.438 0.496-0.438 0.827s0.164 0.641 0.438 0.827l12 8.168c0.169 0.115 0.365 0.174 0.562 0.174 0.16 0 0.321-0.038 0.468-0.116 0.327-0.173 0.532-0.514 0.532-0.884v-3.556l6.438 4.382c0.169 0.115 0.365 0.174 0.562 0.174 0.16 0 0.321-0.038 0.468-0.116 0.327-0.173 0.532-0.514 0.532-0.884v-16.333c0-0.371-0.205-0.71-0.532-0.884z"></path></svg>';
|
||
}, function (t, e) {
|
||
t.exports = '<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 32 32"><path d="M13.728 6.272v19.456q0 0.448-0.352 0.8t-0.8 0.32-0.8-0.32l-5.952-5.952h-4.672q-0.48 0-0.8-0.352t-0.352-0.8v-6.848q0-0.48 0.352-0.8t0.8-0.352h4.672l5.952-5.952q0.32-0.32 0.8-0.32t0.8 0.32 0.352 0.8zM20.576 16q0 1.344-0.768 2.528t-2.016 1.664q-0.16 0.096-0.448 0.096-0.448 0-0.8-0.32t-0.32-0.832q0-0.384 0.192-0.64t0.544-0.448 0.608-0.384 0.512-0.64 0.192-1.024-0.192-1.024-0.512-0.64-0.608-0.384-0.544-0.448-0.192-0.64q0-0.48 0.32-0.832t0.8-0.32q0.288 0 0.448 0.096 1.248 0.48 2.016 1.664t0.768 2.528z"></path></svg>';
|
||
}, function (t, e) {
|
||
t.exports = '<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 32 32"><path d="M13.728 6.272v19.456q0 0.448-0.352 0.8t-0.8 0.32-0.8-0.32l-5.952-5.952h-4.672q-0.48 0-0.8-0.352t-0.352-0.8v-6.848q0-0.48 0.352-0.8t0.8-0.352h4.672l5.952-5.952q0.32-0.32 0.8-0.32t0.8 0.32 0.352 0.8z"></path></svg>';
|
||
}, function (t, e) {
|
||
t.exports = '<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 32 32"><path d="M13.728 6.272v19.456q0 0.448-0.352 0.8t-0.8 0.32-0.8-0.32l-5.952-5.952h-4.672q-0.48 0-0.8-0.352t-0.352-0.8v-6.848q0-0.48 0.352-0.8t0.8-0.352h4.672l5.952-5.952q0.32-0.32 0.8-0.32t0.8 0.32 0.352 0.8zM20.576 16q0 1.344-0.768 2.528t-2.016 1.664q-0.16 0.096-0.448 0.096-0.448 0-0.8-0.32t-0.32-0.832q0-0.384 0.192-0.64t0.544-0.448 0.608-0.384 0.512-0.64 0.192-1.024-0.192-1.024-0.512-0.64-0.608-0.384-0.544-0.448-0.192-0.64q0-0.48 0.32-0.832t0.8-0.32q0.288 0 0.448 0.096 1.248 0.48 2.016 1.664t0.768 2.528zM25.152 16q0 2.72-1.536 5.056t-4 3.36q-0.256 0.096-0.448 0.096-0.48 0-0.832-0.352t-0.32-0.8q0-0.704 0.672-1.056 1.024-0.512 1.376-0.8 1.312-0.96 2.048-2.4t0.736-3.104-0.736-3.104-2.048-2.4q-0.352-0.288-1.376-0.8-0.672-0.352-0.672-1.056 0-0.448 0.32-0.8t0.8-0.352q0.224 0 0.48 0.096 2.496 1.056 4 3.36t1.536 5.056zM29.728 16q0 4.096-2.272 7.552t-6.048 5.056q-0.224 0.096-0.448 0.096-0.48 0-0.832-0.352t-0.32-0.8q0-0.64 0.704-1.056 0.128-0.064 0.384-0.192t0.416-0.192q0.8-0.448 1.44-0.896 2.208-1.632 3.456-4.064t1.216-5.152-1.216-5.152-3.456-4.064q-0.64-0.448-1.44-0.896-0.128-0.096-0.416-0.192t-0.384-0.192q-0.704-0.416-0.704-1.056 0-0.448 0.32-0.8t0.832-0.352q0.224 0 0.448 0.096 3.776 1.632 6.048 5.056t2.272 7.552z"></path></svg>';
|
||
}, function (t, e, i) {
|
||
|
||
i.d(e, "a", function () {
|
||
return a;
|
||
}), i.d(e, "b", function () {
|
||
return r;
|
||
});
|
||
var a = function () {
|
||
var t = this,
|
||
e = t.$createElement,
|
||
i = t._self._c || e;
|
||
return i("svg", {
|
||
style: t.style,
|
||
attrs: {
|
||
"xmlns:xlink": "http://www.w3.org/1999/xlink",
|
||
height: "100%",
|
||
version: "1.1",
|
||
viewBox: t.svg.viewBox,
|
||
width: "100%"
|
||
}
|
||
}, [i("use", {
|
||
attrs: {
|
||
"xlink:href": "#aplayer-${type}"
|
||
}
|
||
}), t._v(" "), i("path", {
|
||
staticClass: "aplayer-fill",
|
||
attrs: {
|
||
d: t.svg.d
|
||
}
|
||
})]);
|
||
},
|
||
r = [];
|
||
}, function (t, e, i) {
|
||
|
||
i.d(e, "a", function () {
|
||
return a;
|
||
}), i.d(e, "b", function () {
|
||
return r;
|
||
});
|
||
var a = function () {
|
||
var t = this,
|
||
e = t.$createElement,
|
||
i = t._self._c || e;
|
||
return i("button", {
|
||
staticClass: "aplayer-icon",
|
||
attrs: {
|
||
type: "button"
|
||
}
|
||
}, [i("icon", {
|
||
attrs: {
|
||
type: t.icon
|
||
}
|
||
})], 1);
|
||
},
|
||
r = [];
|
||
}, function (t, e, i) {
|
||
|
||
i.d(e, "a", function () {
|
||
return a;
|
||
}), i.d(e, "b", function () {
|
||
return r;
|
||
});
|
||
var a = function () {
|
||
var t = this,
|
||
e = t.$createElement,
|
||
i = t._self._c || e;
|
||
return i("div", {
|
||
staticClass: "aplayer-pic",
|
||
style: t.currentPicStyleObj,
|
||
on: {
|
||
mousedown: t.onDragBegin,
|
||
click: t.onClick
|
||
}
|
||
}, [i("div", {
|
||
staticClass: "aplayer-button",
|
||
class: t.playing ? "aplayer-pause" : "aplayer-play"
|
||
}, [i("icon-button", {
|
||
class: t.playing ? "aplayer-icon-pause" : "aplayer-icon-play",
|
||
attrs: {
|
||
icon: t.playing ? "pause" : "play"
|
||
}
|
||
})], 1)]);
|
||
},
|
||
r = [];
|
||
}, function (t, e, i) {
|
||
|
||
function a(t) {
|
||
i(47);
|
||
}
|
||
var r = i(10),
|
||
n = i(49),
|
||
o = i(0),
|
||
s = a,
|
||
l = Object(o.a)(r.a, n.a, n.b, !1, s, null, null);
|
||
e.a = l.exports;
|
||
}, function (t, e, i) {
|
||
var a = i(48);
|
||
"string" == typeof a && (a = [[t.i, a, ""]]), a.locals && (t.exports = a.locals);
|
||
var r = i(2).default;
|
||
r("7b9d1402", a, !0, {});
|
||
}, function (t, e, i) {
|
||
e = t.exports = i(1)(!1), e.push([t.i, ".aplayer-list{overflow:hidden}.aplayer-list.slide-v-enter-active,.aplayer-list.slide-v-leave-active{-webkit-transition:height .5s ease;transition:height .5s ease;will-change:height}.aplayer-list.slide-v-enter,.aplayer-list.slide-v-leave-to{height:0!important}.aplayer-list ol{list-style-type:none;margin:0;padding:0;overflow-y:auto}.aplayer-list ol::-webkit-scrollbar{width:5px}.aplayer-list ol::-webkit-scrollbar-track{background-color:#f9f9f9}.aplayer-list ol::-webkit-scrollbar-thumb{border-radius:3px;background-color:#eee}.aplayer-list ol::-webkit-scrollbar-thumb:hover{background-color:#ccc}.aplayer-list ol:hover li.aplayer-list-light:not(:hover){background-color:inherit;-webkit-transition:inherit;transition:inherit}.aplayer-list ol:not(:hover) li.aplayer-list-light{-webkit-transition:background-color .6s ease;transition:background-color .6s ease}.aplayer-list ol li{position:relative;height:32px;line-height:32px;padding:0 15px;font-size:12px;border-top:1px solid #e9e9e9;cursor:pointer;-webkit-transition:all .2s ease;transition:all .2s ease;overflow:hidden;margin:0;text-align:start;display:-webkit-box;display:-ms-flexbox;display:flex}.aplayer-list ol li:first-child{border-top:none}.aplayer-list ol li.aplayer-list-light,.aplayer-list ol li:hover{background:#efefef}.aplayer-list ol li.aplayer-list-light .aplayer-list-cur{display:inline-block}.aplayer-list ol li .aplayer-list-cur{display:none;width:3px;height:22px;position:absolute;left:0;top:5px;-webkit-transition:background-color .3s;transition:background-color .3s}.aplayer-list ol li .aplayer-list-index{color:#666;margin-right:12px}.aplayer-list ol li .aplayer-list-title{-webkit-box-flex:1;-ms-flex-positive:1;flex-grow:1}.aplayer-list ol li .aplayer-list-author{-ms-flex-negative:0;flex-shrink:0;color:#666;float:right}", ""]);
|
||
}, function (t, e, i) {
|
||
|
||
i.d(e, "a", function () {
|
||
return a;
|
||
}), i.d(e, "b", function () {
|
||
return r;
|
||
});
|
||
var a = function () {
|
||
var t = this,
|
||
e = t.$createElement,
|
||
i = t._self._c || e;
|
||
return i("transition", {
|
||
attrs: {
|
||
name: "slide-v"
|
||
}
|
||
}, [i("div", {
|
||
directives: [{
|
||
name: "show",
|
||
rawName: "v-show",
|
||
value: t.show,
|
||
expression: "show"
|
||
}],
|
||
ref: "list",
|
||
staticClass: "aplayer-list",
|
||
style: t.listHeightStyle
|
||
}, [i("ol", {
|
||
ref: "ol",
|
||
style: t.listHeightStyle
|
||
}, t._l(t.musicList, function (e, a) {
|
||
return i("li", {
|
||
key: a,
|
||
class: {
|
||
"aplayer-list-light": e === t.currentMusic
|
||
},
|
||
on: {
|
||
click: function (i) {
|
||
t.$emit("selectsong", e);
|
||
}
|
||
}
|
||
}, [i("span", {
|
||
staticClass: "aplayer-list-cur",
|
||
style: {
|
||
background: t.theme
|
||
}
|
||
}), t._v(" "), i("span", {
|
||
staticClass: "aplayer-list-index"
|
||
}, [t._v(t._s(a + 1))]), t._v(" "), i("span", {
|
||
staticClass: "aplayer-list-title"
|
||
}, [t._v(t._s(e.title || "Untitled"))]), t._v(" "), i("span", {
|
||
staticClass: "aplayer-list-author"
|
||
}, [t._v(t._s(e.artist || e.author || "Unknown"))])]);
|
||
}))])]);
|
||
},
|
||
r = [];
|
||
}, function (t, e, i) {
|
||
|
||
function a(t) {
|
||
i(51);
|
||
}
|
||
var r = i(11),
|
||
n = i(61),
|
||
o = i(0),
|
||
s = a,
|
||
l = Object(o.a)(r.a, n.a, n.b, !1, s, null, null);
|
||
e.a = l.exports;
|
||
}, function (t, e, i) {
|
||
var a = i(52);
|
||
"string" == typeof a && (a = [[t.i, a, ""]]), a.locals && (t.exports = a.locals);
|
||
var r = i(2).default;
|
||
r("c97c1d8a", a, !0, {});
|
||
}, function (t, e, i) {
|
||
e = t.exports = i(1)(!1), e.push([t.i, ".aplayer-controller,.aplayer-controller .aplayer-time{display:-webkit-box;display:-ms-flexbox;display:flex;-webkit-box-align:center;-ms-flex-align:center;align-items:center;position:relative}.aplayer-controller .aplayer-time{height:17px;color:#999;font-size:11px;padding-left:7px}.aplayer-controller .aplayer-time .aplayer-volume-wrap{margin-left:4px;margin-right:4px}.aplayer-controller .aplayer-time .aplayer-icon{cursor:pointer;-webkit-transition:all .2s ease;transition:all .2s ease;margin-left:4px}.aplayer-controller .aplayer-time .aplayer-icon.inactive{opacity:.3}.aplayer-controller .aplayer-time .aplayer-icon .aplayer-fill{fill:#666}.aplayer-controller .aplayer-time .aplayer-icon:hover .aplayer-fill{fill:#000}.aplayer-controller .aplayer-time .aplayer-icon.aplayer-icon-menu{display:none}.aplayer-controller .aplayer-time .aplayer-volume-wrap+.aplayer-icon{margin-left:0}.aplayer-controller .aplayer-time.aplayer-time-narrow .aplayer-icon-menu,.aplayer-controller .aplayer-time.aplayer-time-narrow .aplayer-icon-mode{display:none}", ""]);
|
||
}, function (t, e, i) {
|
||
|
||
function a(t) {
|
||
i(54);
|
||
}
|
||
var r = i(12),
|
||
n = i(56),
|
||
o = i(0),
|
||
s = a,
|
||
l = Object(o.a)(r.a, n.a, n.b, !1, s, null, null);
|
||
e.a = l.exports;
|
||
}, function (t, e, i) {
|
||
var a = i(55);
|
||
"string" == typeof a && (a = [[t.i, a, ""]]), a.locals && (t.exports = a.locals);
|
||
var r = i(2).default;
|
||
r("6f66d8c5", a, !0, {});
|
||
}, function (t, e, i) {
|
||
e = t.exports = i(1)(!1), e.push([t.i, ".aplayer-bar-wrap{margin:0 0 0 5px;padding:4px 0;cursor:pointer;-webkit-box-flex:1;-ms-flex:1;flex:1}.aplayer-bar-wrap .aplayer-bar{position:relative;height:2px;width:100%;background:#cdcdcd}.aplayer-bar-wrap .aplayer-bar .aplayer-loaded{position:absolute;left:0;top:0;bottom:0;background:#aaa;height:2px;-webkit-transition:all .5s ease;transition:all .5s ease;will-change:width}.aplayer-bar-wrap .aplayer-bar .aplayer-played{position:absolute;left:0;top:0;bottom:0;height:2px;-webkit-transition:background-color .3s;transition:background-color .3s;will-change:width}.aplayer-bar-wrap .aplayer-bar .aplayer-played .aplayer-thumb{position:absolute;top:0;right:5px;margin-top:-5px;margin-right:-10px;width:10px;height:10px;border:1px solid;-webkit-transform:scale(.8);transform:scale(.8);will-change:transform;-webkit-transition:background-color .3s,border-color .3s,-webkit-transform .3s;transition:background-color .3s,border-color .3s,-webkit-transform .3s;transition:transform .3s,background-color .3s,border-color .3s;transition:transform .3s,background-color .3s,border-color .3s,-webkit-transform .3s;border-radius:50%;background:#fff;cursor:pointer;overflow:hidden}.aplayer-bar-wrap .aplayer-bar .aplayer-played .aplayer-thumb:hover{-webkit-transform:scale(1);transform:scale(1)}.aplayer-bar-wrap .aplayer-bar .aplayer-played .aplayer-thumb .aplayer-loading-icon{display:none;width:100%;height:100%}.aplayer-bar-wrap .aplayer-bar .aplayer-played .aplayer-thumb .aplayer-loading-icon svg{position:absolute;-webkit-animation:spin 1s linear infinite;animation:spin 1s linear infinite;fill:#fff}.aplayer-loading .aplayer-bar-wrap .aplayer-bar .aplayer-thumb .aplayer-loading-icon{display:block}.aplayer-loading .aplayer-info .aplayer-controller .aplayer-bar-wrap .aplayer-bar .aplayer-played .aplayer-thumb{-webkit-transform:scale(1);transform:scale(1)}@-webkit-keyframes spin{0%{-webkit-transform:rotate(0);transform:rotate(0)}to{-webkit-transform:rotate(1turn);transform:rotate(1turn)}}@keyframes spin{0%{-webkit-transform:rotate(0);transform:rotate(0)}to{-webkit-transform:rotate(1turn);transform:rotate(1turn)}}", ""]);
|
||
}, function (t, e, i) {
|
||
|
||
i.d(e, "a", function () {
|
||
return a;
|
||
}), i.d(e, "b", function () {
|
||
return r;
|
||
});
|
||
var a = function () {
|
||
var t = this,
|
||
e = t.$createElement,
|
||
i = t._self._c || e;
|
||
return i("div", {
|
||
ref: "barWrap",
|
||
staticClass: "aplayer-bar-wrap",
|
||
on: {
|
||
mousedown: t.onThumbMouseDown,
|
||
touchstart: t.onThumbTouchStart
|
||
}
|
||
}, [i("div", {
|
||
staticClass: "aplayer-bar"
|
||
}, [i("div", {
|
||
staticClass: "aplayer-loaded",
|
||
style: {
|
||
width: 100 * t.loadProgress + "%"
|
||
}
|
||
}), t._v(" "), i("div", {
|
||
staticClass: "aplayer-played",
|
||
style: {
|
||
width: 100 * t.playProgress + "%",
|
||
background: t.theme
|
||
}
|
||
}, [i("span", {
|
||
ref: "thumb",
|
||
staticClass: "aplayer-thumb",
|
||
style: {
|
||
borderColor: t.theme,
|
||
backgroundColor: t.thumbHovered ? t.theme : "#fff"
|
||
},
|
||
on: {
|
||
mouseover: function (e) {
|
||
t.thumbHovered = !0;
|
||
},
|
||
mouseout: function (e) {
|
||
t.thumbHovered = !1;
|
||
}
|
||
}
|
||
}, [i("span", {
|
||
staticClass: "aplayer-loading-icon",
|
||
style: {
|
||
backgroundColor: t.theme
|
||
}
|
||
}, [i("icon", {
|
||
attrs: {
|
||
type: "loading"
|
||
}
|
||
})], 1)])])])]);
|
||
},
|
||
r = [];
|
||
}, function (t, e, i) {
|
||
|
||
function a(t) {
|
||
i(58);
|
||
}
|
||
var r = i(13),
|
||
n = i(60),
|
||
o = i(0),
|
||
s = a,
|
||
l = Object(o.a)(r.a, n.a, n.b, !1, s, null, null);
|
||
e.a = l.exports;
|
||
}, function (t, e, i) {
|
||
var a = i(59);
|
||
"string" == typeof a && (a = [[t.i, a, ""]]), a.locals && (t.exports = a.locals);
|
||
var r = i(2).default;
|
||
r("28c86b36", a, !0, {});
|
||
}, function (t, e, i) {
|
||
e = t.exports = i(1)(!1), e.push([t.i, '.aplayer-volume-wrap{position:relative;cursor:pointer;z-index:0}.aplayer-volume-wrap:hover .aplayer-volume-bar-wrap{display:block}.aplayer-volume-wrap .aplayer-volume-bar-wrap{display:none;position:absolute;bottom:15px;left:-4px;right:-4px;height:40px;z-index:-1;-webkit-transition:all .2s ease;transition:all .2s ease}.aplayer-volume-wrap .aplayer-volume-bar-wrap:after{content:"";position:absolute;bottom:-16px;left:0;right:0;height:62px;background-color:#fff;-webkit-box-shadow:0 0 2px 0 rgba(0,0,0,.07),0 0 5px 0 rgba(0,0,0,.1);box-shadow:0 0 2px 0 rgba(0,0,0,.07),0 0 5px 0 rgba(0,0,0,.1)}.aplayer-volume-wrap .aplayer-volume-bar-wrap .aplayer-volume-bar{position:absolute;bottom:0;left:11px;width:5px;height:40px;background:#aaa;border-radius:2.5px;overflow:hidden;z-index:1}.aplayer-volume-wrap .aplayer-volume-bar-wrap .aplayer-volume-bar .aplayer-volume{position:absolute;bottom:0;left:0;right:0;-webkit-transition:height .1s ease,background-color .3s;transition:height .1s ease,background-color .3s;will-change:height}', ""]);
|
||
}, function (t, e, i) {
|
||
|
||
i.d(e, "a", function () {
|
||
return a;
|
||
}), i.d(e, "b", function () {
|
||
return r;
|
||
});
|
||
var a = function () {
|
||
var t = this,
|
||
e = t.$createElement,
|
||
i = t._self._c || e;
|
||
return i("div", {
|
||
staticClass: "aplayer-volume-wrap"
|
||
}, [i("icon-button", {
|
||
class: "aplayer-icon-" + t.volumeIcon,
|
||
attrs: {
|
||
icon: t.volumeIcon
|
||
},
|
||
nativeOn: {
|
||
click: function (e) {
|
||
t.$emit("togglemute");
|
||
}
|
||
}
|
||
}), t._v(" "), i("div", {
|
||
staticClass: "aplayer-volume-bar-wrap",
|
||
on: {
|
||
mousedown: t.onBarMouseDown
|
||
}
|
||
}, [i("div", {
|
||
ref: "bar",
|
||
staticClass: "aplayer-volume-bar"
|
||
}, [i("div", {
|
||
staticClass: "aplayer-volume",
|
||
style: {
|
||
height: t.muted ? 0 : Math.trunc(100 * t.volume) + "%",
|
||
background: t.theme
|
||
}
|
||
})])])], 1);
|
||
},
|
||
r = [];
|
||
}, function (t, e, i) {
|
||
|
||
i.d(e, "a", function () {
|
||
return a;
|
||
}), i.d(e, "b", function () {
|
||
return r;
|
||
});
|
||
var a = function () {
|
||
var t = this,
|
||
e = t.$createElement,
|
||
i = t._self._c || e;
|
||
return i("div", {
|
||
staticClass: "aplayer-controller"
|
||
}, [i("v-progress", {
|
||
attrs: {
|
||
loadProgress: t.loadProgress,
|
||
playProgress: t.playProgress,
|
||
theme: t.theme
|
||
},
|
||
on: {
|
||
dragbegin: function (e) {
|
||
return t.$emit("dragbegin", e);
|
||
},
|
||
dragend: function (e) {
|
||
return t.$emit("dragend", e);
|
||
},
|
||
dragging: function (e) {
|
||
return t.$emit("dragging", e);
|
||
}
|
||
}
|
||
}), t._v(" "), i("div", {
|
||
staticClass: "aplayer-time"
|
||
}, [i("div", {
|
||
staticClass: "aplayer-time-inner"
|
||
}, [t._v("\n - "), i("span", {
|
||
staticClass: "aplayer-ptime"
|
||
}, [t._v(t._s(t.secondToTime(t.stat.playedTime)))]), t._v(" / "), i("span", {
|
||
staticClass: "aplayer-dtime"
|
||
}, [t._v(t._s(t.secondToTime(t.stat.duration)))])]), t._v(" "), t.$parent.isMobile ? t._e() : i("volume", {
|
||
attrs: {
|
||
volume: t.volume,
|
||
theme: t.theme,
|
||
muted: t.muted
|
||
},
|
||
on: {
|
||
togglemute: function (e) {
|
||
t.$emit("togglemute");
|
||
},
|
||
setvolume: function (e) {
|
||
return t.$emit("setvolume", e);
|
||
}
|
||
}
|
||
}), t._v(" "), i("icon-button", {
|
||
staticClass: "aplayer-icon-mode",
|
||
class: {
|
||
inactive: !t.shuffle
|
||
},
|
||
attrs: {
|
||
icon: "shuffle"
|
||
},
|
||
nativeOn: {
|
||
click: function (e) {
|
||
t.$emit("toggleshuffle");
|
||
}
|
||
}
|
||
}), t._v(" "), i("icon-button", {
|
||
staticClass: "aplayer-icon-mode",
|
||
class: {
|
||
inactive: "no-repeat" === t.repeat
|
||
},
|
||
attrs: {
|
||
icon: "repeat-one" === t.repeat ? "repeat-one" : "repeat-all"
|
||
},
|
||
nativeOn: {
|
||
click: function (e) {
|
||
t.$emit("nextmode");
|
||
}
|
||
}
|
||
}), t._v(" "), i("icon-button", {
|
||
staticClass: "aplayer-icon-menu",
|
||
class: {
|
||
inactive: !t.$parent.showList
|
||
},
|
||
attrs: {
|
||
icon: "menu"
|
||
},
|
||
nativeOn: {
|
||
click: function (e) {
|
||
t.$emit("togglelist");
|
||
}
|
||
}
|
||
})], 1)], 1);
|
||
},
|
||
r = [];
|
||
}, function (t, e, i) {
|
||
|
||
function a(t) {
|
||
i(63);
|
||
}
|
||
var r = i(14),
|
||
n = i(65),
|
||
o = i(0),
|
||
s = a,
|
||
l = Object(o.a)(r.a, n.a, n.b, !1, s, null, null);
|
||
e.a = l.exports;
|
||
}, function (t, e, i) {
|
||
var a = i(64);
|
||
"string" == typeof a && (a = [[t.i, a, ""]]), a.locals && (t.exports = a.locals);
|
||
var r = i(2).default;
|
||
r("229083b6", a, !0, {});
|
||
}, function (t, e, i) {
|
||
e = t.exports = i(1)(!1), e.push([t.i, '.aplayer-lrc{position:relative;height:30px;text-align:center;overflow:hidden;margin-bottom:7px}.aplayer-lrc:before{top:0;height:10%;background:-webkit-gradient(linear,left top,left bottom,from(#fff),to(hsla(0,0%,100%,0)));background:linear-gradient(180deg,#fff 0,hsla(0,0%,100%,0));filter:progid:DXImageTransform.Microsoft.gradient(startColorstr="#ffffff",endColorstr="#00ffffff",GradientType=0)}.aplayer-lrc:after,.aplayer-lrc:before{position:absolute;z-index:1;display:block;overflow:hidden;width:100%;content:" "}.aplayer-lrc:after{bottom:0;height:33%;background:-webkit-gradient(linear,left top,left bottom,from(hsla(0,0%,100%,0)),to(hsla(0,0%,100%,.8)));background:linear-gradient(180deg,hsla(0,0%,100%,0) 0,hsla(0,0%,100%,.8));filter:progid:DXImageTransform.Microsoft.gradient(startColorstr="#00ffffff",endColorstr="#ccffffff",GradientType=0)}.aplayer-lrc p{font-size:12px;color:#666;line-height:16px;height:16px;padding:0;margin:0;-webkit-transition:all .5s ease-out;transition:all .5s ease-out;opacity:.4;overflow:hidden}.aplayer-lrc p.aplayer-lrc-current{opacity:1;overflow:visible;height:auto}.aplayer-lrc .aplayer-lrc-contents{width:100%;-webkit-transition:all .5s ease-out;transition:all .5s ease-out;-webkit-user-select:text;-moz-user-select:text;-ms-user-select:text;user-select:text;cursor:default}', ""]);
|
||
}, function (t, e, i) {
|
||
|
||
i.d(e, "a", function () {
|
||
return a;
|
||
}), i.d(e, "b", function () {
|
||
return r;
|
||
});
|
||
var a = function () {
|
||
var t = this,
|
||
e = t.$createElement,
|
||
i = t._self._c || e;
|
||
return i("div", {
|
||
staticClass: "aplayer-lrc"
|
||
}, [i("div", {
|
||
staticClass: "aplayer-lrc-contents",
|
||
style: t.transformStyle
|
||
}, t._l(t.lrcLines, function (e, a) {
|
||
return i("p", {
|
||
key: a,
|
||
class: {
|
||
"aplayer-lrc-current": a === t.currentLineIndex
|
||
}
|
||
}, [t._v("\n " + t._s(e[1]) + "\n ")]);
|
||
}))]);
|
||
},
|
||
r = [];
|
||
}, function (t, i) {
|
||
if (void 0 === e) {
|
||
var a = new Error('Cannot find module "undefined"');
|
||
throw a.code = "MODULE_NOT_FOUND", a;
|
||
}
|
||
t.exports = e;
|
||
}, function (t, e, i) {
|
||
|
||
i.d(e, "a", function () {
|
||
return a;
|
||
}), i.d(e, "b", function () {
|
||
return r;
|
||
});
|
||
var a = function () {
|
||
var t = this,
|
||
e = t.$createElement,
|
||
i = t._self._c || e;
|
||
return i("div", {
|
||
staticClass: "aplayer",
|
||
class: {
|
||
"aplayer-narrow": t.isMiniMode,
|
||
"aplayer-withlist": !t.isMiniMode && t.musicList.length > 0,
|
||
"aplayer-withlrc": !t.isMiniMode && (!!t.$slots.display || t.shouldShowLrc),
|
||
"aplayer-float": t.isFloatMode,
|
||
"aplayer-loading": t.isPlaying && t.isLoading
|
||
},
|
||
style: t.floatStyleObj
|
||
}, [i("div", {
|
||
staticClass: "aplayer-body"
|
||
}, [i("thumbnail", {
|
||
attrs: {
|
||
pic: t.currentMusic.pic,
|
||
playing: t.isPlaying,
|
||
"enable-drag": t.isFloatMode,
|
||
theme: t.currentTheme
|
||
},
|
||
on: {
|
||
toggleplay: t.toggle,
|
||
dragbegin: t.onDragBegin,
|
||
dragging: t.onDragAround
|
||
}
|
||
}), t._v(" "), i("div", {
|
||
directives: [{
|
||
name: "show",
|
||
rawName: "v-show",
|
||
value: !t.isMiniMode,
|
||
expression: "!isMiniMode"
|
||
}],
|
||
staticClass: "aplayer-info"
|
||
}, [i("div", {
|
||
staticClass: "aplayer-music"
|
||
}, [i("span", {
|
||
staticClass: "aplayer-title"
|
||
}, [t._v(t._s(t.currentMusic.title || "Untitled"))]), t._v(" "), i("span", {
|
||
staticClass: "aplayer-author"
|
||
}, [t._v(t._s(t.currentMusic.artist || t.currentMusic.author || "Unknown"))])]), t._v(" "), t._t("display", [t.shouldShowLrc ? i("lyrics", {
|
||
attrs: {
|
||
"current-music": t.currentMusic,
|
||
"play-stat": t.playStat
|
||
}
|
||
}) : t._e()], {
|
||
currentMusic: t.currentMusic,
|
||
playStat: t.playStat
|
||
}), t._v(" "), i("controls", {
|
||
attrs: {
|
||
shuffle: t.shouldShuffle,
|
||
repeat: t.repeatMode,
|
||
stat: t.playStat,
|
||
volume: t.audioVolume,
|
||
muted: t.isAudioMuted,
|
||
theme: t.currentTheme
|
||
},
|
||
on: {
|
||
toggleshuffle: function (e) {
|
||
t.shouldShuffle = !t.shouldShuffle;
|
||
},
|
||
togglelist: function (e) {
|
||
t.showList = !t.showList;
|
||
},
|
||
togglemute: t.toggleMute,
|
||
setvolume: t.setAudioVolume,
|
||
dragbegin: t.onProgressDragBegin,
|
||
dragend: t.onProgressDragEnd,
|
||
dragging: t.onProgressDragging,
|
||
nextmode: t.setNextMode
|
||
}
|
||
})], 2)], 1), t._v(" "), i("audio", {
|
||
ref: "audio"
|
||
}), t._v(" "), i("music-list", {
|
||
attrs: {
|
||
show: t.showList && !t.isMiniMode,
|
||
"current-music": t.currentMusic,
|
||
"music-list": t.musicList,
|
||
"play-index": t.playIndex,
|
||
listmaxheight: t.listmaxheight || t.listMaxHeight,
|
||
theme: t.currentTheme
|
||
},
|
||
on: {
|
||
selectsong: t.onSelectSong
|
||
}
|
||
})], 1);
|
||
},
|
||
r = [];
|
||
}]).default;
|
||
});
|
||
});
|
||
|
||
var vueAplayer = unwrapExports(vueAplayer_min);
|
||
vueAplayer_min.VueAPlayer;
|
||
|
||
var playIcon = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAAAXNSR0IArs4c6QAABxBJREFUeF7dW2tsFFUU/s5sCygo7W4JwfggkZAYNNYnqBB31iI2jYnERwyiRuILFcJuwZQEAgYfDbSzii8CYqDlYRQhUQNIS3cFkRbERxXTYMCCgGI7U0ssXbbbOWa2bu1ruzOzM8suN9lfe+53vvPdc2fu4wwhBc1ddjYPjnA+VM4XBMdogHPAyGFoP2onQgsxt6is/g1QI1RuRFZWY9Cb22g3PbLLgcevFLKqFjKRm4AbTPth3gjQtmHZqNox13XWNE6cjpYK4C5TJhOpj4BQCNA4i8kqALZAVVcH5o86ZBW2JQKIkjID4BkAiqwilgBntVVCJCWA6FeKAC4BY3KKAu/lhsGlWWGhtLrE2WrWvykBCt5Uru5ktQRMs806tqofAT8xqDTgc24yg2lYgHvKW0SV1FUAxptxaFsfxocRYdiivd7hfxjxYUgAj7/5cWaqMOIgxbYNTFgc9Lq26PWrWwBRkr9I4UNOL/8B7ZhoUdDrfE0PiC4BREmuAlCgBzBdbJjopaDX+W4iPgkFEP3yWjBmJQJKy/+ZnwwU5w06ZQcVwF3etJRIWJKWwekkRcC0Gp9rVzzzuAJ4pObpDNqq0086m9ULwtB7d88bcWYgkgMK4Fl+5lrOzv4SzNemc2T6uVFlwOd8QrcAbkneSsB0/Q7S35KI59Z4897uy7RfBrjLlZlEXJn+IRlkyDjtECJTqr2jj/Xs2UuAwpU8NBRp+Qbgmw3CZ4r5moDP9WxcATySUsLgNzIlGlM8mYoCxc7tsb7dGVBQqozsHML1AK42BZwhnYiwrsbreqqfAJ5yZTYTv5chcSRDs41JuD523NadAaJf3mv3vn786CxMGJOF+lMdONrUmUwQSfallwM+5woNJCqAWCZPg4CdSaIO2n1J0QjcPX5It82+o2FU1oVw5EzETrfxsA8EfK6J3QLYveTtG3yMVaiDUVnXjs0HQykXgRl3BItdtV0ZICkBgN12sfj8xVwMHxJ/2/Hd7x3YUBvCDyc77KLQD5dApTU+50IqXClfHorA9JlaIsbavF814/JEZtH/Nx5oj06LcIR12Sdp1BDwua4jtyTfR8COJMHids+/MhvSw5fphm/4MxKdFvuP2Z8NHecdTnJLTc8ThPd1MzRoaFSAGPy2H0KorA3h73bVoEcD5qp6K3kk5Q3WjrZtamYF0OicUDqj2bC7IWwLO2I8ogmwmcGP2uIBQDICxDjtPHwey3e1WU6RmUpI9Ct7wWzbxYYVAmiRf3wohFV7zlkqAhGVkSjJPwOYYClyDzCrBNAgZ1W0olG2bgVJhA9I9MunwLgiEwQoq2rD9p/PW0aVgC1aBmh5dYllqH2ArMyA13f8g2prH4jVmgAtAHIyQYAn1rXiZIt1U4BBn5MoKd8DnJ/uAqyvbcf6/e3W0iSqII9f2cbMD1iL/D9aslNAWxRX2BF811b4LfL4ZT8z5qWjANomSQu+/qQ9W2Zm9RVNgKeZsSadBIioXaO+oc7ilO8bpMNRQGJZ0y0QhG/TRYCDx7tG/fBpe0a9R5yhYVnOHOo6Cle0N4Etr8JxoxxYPXNkQn3PRxjag+6j1B2OVAd8rqn/HYjI+wDcmZClSYPPXsjFiKHxD0Rqf+sadW0rnKqmzf9g8ailUVYX6kisLczRwD85lPojMXTypMCCvLquDFjRPBEOqrVT/YEORbXgf/3LuoWNbv6EuoDXNUmz73ksXgtG9KTUrqatCW68KgtfHQlbuqkxyjeW/r0EsHsaGCVppz3DcVvQlxN983VnwDSp1RlGR50NJa52xmIG+9OAz/VQrGOvR7PH3zKHWV1pBjVj+pDwYMCb21350u/dJPpl258FF0osIhys8bpu7+m/f4GEX3mMmDdcKJJ2+mWB7g/Oc2r1jt1twNWJW5K3E7SS94unaYXVQV/ewr4RDSiA6FfuAngXGJdeFBIQ9nBr/T3BpWK/pWbc9alban6JQP2KijJQEAUseALFuT8OxH3QQkmxXF4LytAq0f+iJVW4u2Z+7p54A5ewVNYtNX9HoJsycOTBRDODXufGwbgnFEDrLEpySq5rrRSZGWXBYteCRJi6BNBAMunNQKB3anzOOYmC77UU1mMsSspygBOqqgfLLhsCXq3xuRbrxdedATFAUZK9AJYBGK7XSYrsDH8tYjgDukVY0TyRBVpGhKkpCm5wNya/FzItQIxNdAstCM/Yebc4WOTazQ4ErO67vDUyKIanQF/wKf62MQ713HOpFMKKwGNxJC1ADEgTIhvhh1RWC23aR7SCeBN3CpuC851fGxnlpNcBRp25pdZxAqmFAHuYo+V3Zi9fTwHYycD+LIGqquc5TxjlksjesgwYzJF7RZMbAtyCIFzDTGMBHgtgLAgtYGh3EtqviUCNKtTjADc6OtVfdi8YrRVv29r+BTKBrQtOiFe6AAAAAElFTkSuQmCC";
|
||
|
||
var pauseIcon = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAAAXNSR0IArs4c6QAABrJJREFUeF7lm21sU1UYx//PXbcJQ0d7mREkuixqlqiwRA0MQdqymIAoIOAHooD4Fkkc6R0TSCCMQALK1js00YhRXNTEOExQJBOBtgg6UFFQTAxgggYRYe2GA2EvvY+5hc52Xdt7u1O4y863pv/zvPzuyzn3POcQrkFzqmfLwFQKULEE6XYGFwMoAmAHwQ6GHcBJgE4S8UlN035nSdq51yM3Zzs8yoaDivrQbd1aeJqEnMkMdgK4JUM/IQb2SCTt01hqCiiFJzK0k7SbUADO2tBEytHmgWkegELRwTLQJJHU1IW8rfs8BX+JsC8EgLM+NB0anifwoyKCSmuDcJo17e2wNPSt/oLoF4BrnnhvMjoIYHPAI69JCy2JICMAk9SLI23a5XUgLMrUsch+zNhFGq/yV484aNauaQBONTiHGGsBlJp1lmX9RQCr/IqsmvFjCoDb27KSQXryFm70vl9xzDcaoGEAbrXlJWZ6zajh66z7ya/IY43EYAiA0xusIqDWiEErafyKnDa/tAK3GlzKjI1WSsxoLAzeHlBGPJZKnxKAq651PkhrMOrQijrSuNK3dMTryWJLCsBdF5zAEnaCMcyKiZmJiUmaFfDYt/XVp08Azhq/jQrH7AHjITOOLKtlnGfSKgJK0feJc6k+onZ6W9YTaLllE8ogMP07IqDI09IC0Ke3pPH2DHxYvgsTPRnwOD6MDTThEXCrwW+Z8YDls8kkQMJBv0cenxSAS219HKx9kontVH3Gjs6FTQKOnu5GRzenNJ9vI9wzyoZuDThyqkt0KCCSKn0ee8+oEHcHuLzBrQBmi/TqnXsTykbbIiaPnw3jjb3/Jk1MB7V48lDceXNORH/4VDeUxn9EhgOAT+Qhd9xOpTCkG+4B4PS23U8IfyfS2+S78rD6kfhR9NMjHdjk079bEtsSdwFmjM2P+2PNjgvYe6xTZFhgQk30E7oHgMsbfBVAtUhPC8qHYMH4IXEmU13V2Lsl2qnhwCU0NF8SGZZ+2XveBREAD288U9CVk3sUgL5YKaxZFoCeYZjH6+sHEQDOupaFRLRFWOZXDVkZALO2JlBVVBMB4PIGdwBImCT0F4iVAQD4xq/ID14BoAb/BGNUfxPu3d/SAJgvc3u7nSrUtpIwh38Tnbxuz9IA9AGRUU7ZnPpaHgC0F8lZd66GSFo9GO8AItST2xusZ2DJ4ARA28ilhhrAbHgV1Qwoqz8CAB0mpzf0WbZKWtYHgDZyeYO7AFSYubJGtQMAwCX9HdDIwByjSZnRWR4A4TS51eDbzHjWTGJGtZYHAPxCbjW0kZmXGk3KjM7yAIj2k7MutIyIN5hJzKjW6gAI9BG564JzmfCx0aTM6AYAgA3kqj13HyQpYb3cTKLJtFYHwPpUeOL6NntufjiyPia6WR8Apl79HA4dAfOYQQeAJHsUwMtgfmWQATjgV+TyK0tiamsZsfbjYAJAoA0+xbEidlX4AIBxIiFY+R3AwNSAIn8RAyBUDbC+NC6sWRYAYb/fI0/SE/2/MKK2FhNr+tJ4gSgC7tI8rJwaXxj5/OcOeHf3XRhRKgow/d74wsi6pgvw/Sq2MEJMi31VjjfjAOg/3GpwCzMWigIwLJ+gFzvuKLpS6tKb0tiOw0lqfmWjc+Gde2OP9sS5cKQ0dqEjdT3RZLx/5HTSmN3LHecTALjqQtNArC+RC2t3j7ShvCQXtw7PQeB4Z9oyl64tL8lDWAMCxzqFF0gJtMKnOHqm/gnlcZc3uBnAc8IIWMoQ/XCDzT6hqZI6omElAKhQ/y4Ja7Z9IPF1guvNgpmeClQ5PoiNo889QgNsU6QhrsS0zVflmNVbnHyXWBZXigxFLFZ0lsM0JVDt0Ee5uJYUgLO2pZRySN8pJrxkJjY3Q9Ze8Cuy/m5LaKk3SqrBRWC8Y8iFdUUNfkVOOrSn3SqbzcpR1pkR3vV75GdS+UkLQO88ECEw0BhQ5CfSQTYEYABC2ONXZEO1DsMAIhCse1ok5kJn6cBE1IPVzgvFTWoI1T6PbOpcg6k7INaZSx8hgLVWGCb1550JaiYnTTMGEHkkaltKJYnWZqu0ZuAF1gyG6quSG9Npk/3fLwBRo25v69MMTT8taujFk2mwMf0OAdicbHJjxr4QAFGHrk1tU6CF50HDbJD4o7MENDPwnojEozELBRA16lRbh0MLz5QkaQYzzzRzReK1fIyZvpKAL7VcNAcq5VOZ2+q7Z1YA9HblVFuL0d1dDImKAS6WSBrORHZm2Ak8hIA2ENoAatOYzyCsHerScOjrZUXtohPube8/AMNbG4WK/N0AAAAASUVORK5CYII=";
|
||
|
||
var audioIcon = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAF4AAABsCAIAAABsApKfAAAAAXNSR0IArs4c6QAADPRJREFUeNrtnelfU8kShs9/eMcZF2RHkEVFBEFAr4IbKJsDiCi44Qw6Oo4rggQQBAHFFS4I7iACbghqTp/u5st9O4kg6zkh3QnG4dcfkpCQnIfq6uqqtyvahF/+UIN//cI/jfI3g/xxN7vTxm7UsQYb7+6csNst/g3Nf3AYBv/8mX94PzHQz3t72L07tKmB1deKUXfdNUCnr3eCMX9HwzknujCNoSH+/Bnv6mQd7bSxfgrEnKPtFrdmONqPh0PX+fv3vL+f93Szex2srYWZ4piB5ssX/0IDKB8/sCdPaMdt1twofId1HN+Pdj9DAy5vR1hrC5zFIon4Kxo4V9ba7CkU/0PDCWGdD+Vw8TerGR50z9H+LGgY432PpXHxKzREZ50P/kUz14/dzu7f/RfNXD+6/jOiEXHthw98ZIi/HuAvn/MnvWLgBu7iQfwK26KvX9kjP55QnIsdnUH4u7fs4X1qqzbOVRpHikleNtm9nexMJ9u3kK2byZYEMXADd/EgfpWXZZQfojVV07aIfoIG/3ZE9/c76D9nScE+kp6ob4zWo0P08FV66ArzEfybHhNGTx6VEAQvHTTID9BXz41L50lmur4uQl8TaImFf6MRZnKnzSgv0RPj9IiAxRDxPzR8eAjZI6Ngnx4T6hERf0KD7R+cK9mzQ18bIgeKH6DB24jps3+XcK4SoahDg1TWV8VoOKXs1QvjcLG+bo18KMrQ8I52hBEK0QA8srBk22ary7BqNHgCducLDlpXi/QgH3gl4ixFaOBujdMVenSoQihuoamvpefPGEcOGqVFsOIZgx47TGEsbc2s+39IKlvk4j4aJCKf9uk5uz1dld1DU26C5ka98XuOeHLAf/TVv8wcawKN40f4h3fuWoDmHpe+x2RHqjegTKEJpSfKzNEU5enhAXrI8jn/jj18FRYK/uKZdZNxBw1SSj1dJC3Be1ycaKJD6XGgqfMEjaATugK7M97dZbE+ZxkNY+zBXZK8watcptAc9hyNc+AS2MN78tCAy70OPWm9t7k40awNgR+VhUbQSU1gT3ploGGM3r+rb97gAy6TaMpLTQpy7qDBMDLT2ZtBz9DA7z7u1tMSfcPFhSaYlh+Si0YPW6UX5vH37xaPBi82cvf4jMsUmhLJaDAiVtGKo3x8fDFokHmklSf1iNU+RhMVTMsUoMHAVK2tnqDUTTRwMTcbEG75kgtGyG96ZBAiWiVo4JJT4hGpuYeGvXxOtqf4mMskmtIiRWj0sJVIy87ndLQ58wxG2UG1+0araJbrkYEK0WBEBiJvDYGbBTScI8utb4j0PZcpNIUK0WCkxLPHPbP3ENrsVQk2tiS4ONFgc3jod6Vo7GErjYP5EL4thAbZKXrDhkVhKaFZbZQcUGs1oBMdgmWHMzovGvrunbF3h7cvHis0RtCveuAyMXDD+Qh+hRERQA8qRyNC5L0ZuPx50DBGWm8hxPIeEYDAjdhwkhBDtmwk/00RAzcSYvCg+BWyLbD24gIvoMGF4/K/35dPoeHjY4ZqL+M0BECJDCQpG419uxDp0soKeu40vfg3q7qEgRviLh4sKyEw4ZR441ChN9DAcLCQj4/NQsO52EaqS4BPWkpkEMlIo6eO08sX2PVrThG4GE7ts3M4H7lRh2o3vXSeAtnCiWFJaHD59H7H5FKlTWo1RL0xbKXC4C18Far9IlMLIs7rNy0ACFI2cy2ALDRIBh4rhUx7Ghr+8gVJWqdwKxQRYOzfza5emqmMd3PQuhoMVWgc2Rw+/GYaGnrlAkxdCResOFFBpGC/MBaPy0mq0eixYbSpfgoNCvhGVqYqLrHhKINgjytRIzMTkEQ0IvwrmCDEhUbsDOLXKsrRGUdLqWeTyKtoMKfSNvH+ly40xtWL8vMy4IIYvzhfptZs5qhVgUZUvm7eEGi4QcjhIhUmA+EErb0m117UrlCTAc6fxycY1fjQG6jm5IcwYH/mlDe4KEBDsjJwWkSD/F/o6+SiCVxGsne64pcfEA2AAIvGmpvs4QGSvUzcGnq6wktcFKABEGDReHWVfJPZuZVWX7WKxoZ13UZtNbT6Cl4lbiACxoNWV6vrVLrVhK4AFg3/XsleBqEBUpYWz6LYalhLE3/2FHUPccYW4/M4w0HKlkY2O7RbyGryRcZWHhpg0bCvlRzjxUfTv/60EvjCGaEECG3xzOQj7kJw3N3pM1+DRepQoUYglpE7m7CxrjWfDljXBZcFdAvQZjx7Iny5OZo6h9XIRAMsmsyV25F2ENvIxgZTB8FuNkzYv5qd+CG8vdUSmpIDSJXKRLN7u4Z/skw02NcX5sLCTS6m6hLvt6Sp44OvLeRr6mhpsdgeI/UhC01GmkbSk2QmZaKCqGmxscGGHBX2tFYVydeumKmy6nDKQ0iWg+WhSU/SdImCIqdSyFRfBjR3b+PEjyU0OCp2p90cDWqKUUES0QCLRpLjpVbvg5CCMLUa1vVogTr8tB+ok7semaBprDfy97lmtCyrSY7XyNYkyb6mKNccDdSqs0pic1vN6CiCQJEGXRhNzl5XTl4Wmq1Jst0wVqicPSbxHmS+Vy9CfGzJDY8MU2fadCE0DUZ2ph60TCYa4Yb3bJcb1wg5mGlQA0NouzWhm3UEoQZUiSJiNgsFxKkrVLXkXQiwaJJ1V0G/koRYFJJMo2HQgUWYmMybQfPoEU793Gl9U6wIxOVdCLBoBOcv5O6h4G6wh7KyUWis58PD83lf/uqFyHKamQzeSOx1pG6ghNUcLtbo2UrZO+9fyK5ttMbCzhuXjTZEvT3848cpvwMVy8gwu93Kqi6yevNCFaIe4S6lziaxvTxbqcFi5WeF10fRv89YLK0IiHDb7W2ioxVGe6vIilvMaSCb8ccJIayTajICTW21RttaRLAkOwFKcve6UWABRDwZUe+1y+JGg83aC2tpdZXYA0qM9JwDMX1bi8agDJaumBaGEwkhmJs1OTdrMg11UA3I3R98C4XjWW+PJmRY+3aqUERA5yBOr9epSYM6giNxzEb2VHIsT3sRampYC1CiV6KLwFJVnE/rbSrQYLaKiqvUCHiygEnPVGA1ECU6OMJFHlE3322GGMeOLL7l1wI7A+eCLX0qYcRFoCnXt8Lug7v6RgWFXVd1IYKeKLfsWS35bONkuViVVHBxpCP4QP83OcDoqCoJn9PgITfKy6JVlz1VSuDl1VdFGhhLqgIX45IDlJc4swKaS5J17YpC4adDs4htjliznIIi952u8C//nDOcOz419uIQkYSjwjFdejTQr6eqPDzoXLMS44RjhlTNKUmzaCnYJV2+gBfi5SKRqMheXLmIzfzt8HTBmmEYJ44oF99jB4hjAemJUEmLLSiiO3joets0qZZL0efQ8l2vNs5W4smiuwvEdFI3kHML1irKRfFnhswR3brUyhy/Fzsi+x8fDYeH7ByWMKH3hPATpUsM3KiswINGbhbBKTUIf/DkYLXGMilzRIelWTJHhziWFOV6T0kNE8CAnSKcxXKD/wooYOAG7uJBYSYOhbUXoDhnU1HuXOJYpzNubfaSpHpOtfnkmFSbe/OT4Lhea/P3iUdtRmWDoKfIEjmg4N2BCxdtnOc92YJNQ1O97w/PeX/EheP4xowix6xDP58+kQM5eujyn4gLwryyEv7ls4WjYjjwvinmJ5pKkFHPdQJTm7OJrfHHMXEY+mdAg5NQNVcmGLV6LBUpa0NifWopT6WiPD760Z3DzJhWD+/pm9f7+VTakYq+mu53B6DUQB5nbbDfotkUi8arC2ifFmycMDZGoUBQdxLId8MeFYwU/ZxnmK12IkH9kOTs8Tc6EQEELaI+j3vWpAVO59lTkpHuP1zCA0hxvugnJqG1D+h0PSKpCf7ietPY636ZvbLQJ5j8+Ms52ZaMC5HfYQ1KZ6Mg+0f1O0ihZWWy+fuOeNqXjw8NksI8LzXlk5pwQFNs9AVX1pfPaTtvRwgq8Osjf5hJlBgHeS0f+6SyZeF3qkzachPzdulPIj0jVcR1FiWVcjrHYtl62kcKcyU3GJYY1OHYJ443utnBUVq/YciihY4uNcG+pKA48gyiJeo8+0ZvoHHpep/24VguTkgvCTSxYfgw+EiLm0QKGnh/+UxvNZH9u1Hh9s3qjjeNi8AHwMeYna/zJRrXGZ2xT+gcDkm1nhDtPUB4o4RooeNG03IsQ5a7n3r9ywLQANLxVSLIEwotL9rQKJw74XgLvBHeDm+6aHfr9W/fgBENDaKyQ0+WiR2GpLyPPXQ5mjcJZcHJMvxxvIVEM/HuF5OI4146HxxAcxzj/F8kO5OgOAmZE+JpUV83nXQrxdPw5MhAPSEGJ60gyuedD/EHJ4iu+oN78Zt+8HU2b0fQ/xsH44yqi5APQXRDtiaLU0cbo+2YerCstcHiBs6dJ2/Ar0h+Nj11FGlt3nEHPabFESqP1x3rP/8HzQUQ5ICOGvoAAAAASUVORK5CYII=";
|
||
|
||
var coverLocal = "5d9cf0a5d487a818.png";
|
||
|
||
var playRepeat = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAaCAMAAADhRa4NAAAAAXNSR0IArs4c6QAAAi5QTFRFAAAAAID/QID/M5n/JJL/SZL/TYD/O4nrQJ/vR47xQIz/OovoQ5DpPYXrPYX1QJLtPo3tRJDuOI//QI/vOpL4QpLwOZzxQI7qRYr4Po/sPo/zPYztPZL5QIvuQJHuOZL1RI3vPYrwQYzwOY7xQo7sQo3tQpHtPY7vQY7zI4XzPI/wQI/sQZDtSpHtP4/uPo3nPo3xQYvuRoDuQY3vQIn1QI/tOpfxP4zvQY3sKrv1MYv1QJDtQI/tPpLwQJDxQo/sQI7tQYzuPI7vQZDvQY3xQI7sM6nyQY3sQJLvPpLtQI/tQI/vRIntQI7uQI7vQY7uQY7tQY3tQo7tPYzxP47wQpDuRJDxQo7tQo7vQI3tQ5PtRI3uQJXxQYvxQZDxQJDuQ5DsQI7uPozsOpPtQY7tQI7tQI7uQI/uQ43sQY7tQI/tQY7tQ43vQ4/tQY7tQovsQI/tQY7uQI/wQo7uQo7tQI/uQYztQ43vQYvtQY7tQI7qQI7vQY3uQo/tQI/rQY7uQY7tQo7uP43vQI3vQY7tQY7rQpDvQY7tQY/tQY7tQY7vQY7sQo7tQ5DrQY7sQY7tQo7tQY7tQI3sQI7qQI7sQI/qQI/tQI/uQY3rQY3sQY3tQY7rQY7sQY7tQY/sQY/tQozsQo3pQo3qQo3sQo3tQo7rQo7sQo7tQo7uQo/qQo/sQo/tQ4nrQ4vuQ4zsQ43sQ47qQ47rQ47sQ4/qQ4/sRIvrRobsSI/qS3/qS5DpVRNwpwAAAJJ0Uk5TAAIEBQcHCg0QEhQWFxkZHB0eICAjIyQkJSkpKiosLDExMjM2Njo6Pz9BRERHSElKSktMTlBUXV1eYmNjZGdsbXN2fH6Ch4iJjI+Pj4+Tk5WZmpqgpaWmq6usrq+wsLGzs7a4ubm8vsLHyMvLy8vMzc/V1tbZ3N3e4ePm5ubp6+vs7e7u8PHy8/T29vj5+vz8/P6b5lOEAAABY0lEQVQYGY3BhztVYRwH8C+XsiIjM3tlFZGSPcsoM5SVUXZ2ocgOmSUj76+Lm3OsZEb/HR5d93iPp8fnAyn3+JzyqlfpoddxoeC6n0QkkJJYtgNkHCqVIpU9CguMTG2gLywFHL+Z3wuPLXHqdpmgKlVAyuuv0GgFjYjpjRpIXJlgFbqQunv49Qk08nbfgxOy+OsW1DzWlzzBK9ivh5pzfyFkDH4o7fFfr/8k4J/7VyFz7V7sQr7eA5x4xrpNwHsx16V820o49lIg6jIHJ+ibSMQ+AcjcpkkS+0zAubNGRC4A3k2VjH8cITvw/FfGXHDM2MZU6HF1gpy+OU4pBma9IeemgFrGchNkbrAOqFnQwUPwauff4MzT5VEznBcn7DlCo32r1xZSUcSiIWE9vPrZF2e0k1SbuTjnZqdIxQFaOGEU1bYznQiO4XNSfR+qTkvOaiaRPoRDzqdokIgYI2qJ0cGlHQE0EGWmZ0WkvQAAAABJRU5ErkJggg==";
|
||
|
||
var playOnce = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABwAAAAgCAMAAAA2a+hwAAAAAXNSR0IArs4c6QAAAlJQTFRFAAAAgID/QID/QID/OY7jRovoO4nrSZLtRIjuPIfwQIzyQJnySZLzRpfzQ4X0QIrqQo7sPo3tEN73RI/wPI3yQZDrPo/sQ4ztQ4zzO4jtQY7tPo7uPYvuQI/vOY3vPo3vPIzrQZHwNqL1QI7wRYTrK4b2QY/uP47uR47yQIvvP43rQI/wP5DwSXz0QozuPYvxQY/uQI3uQY3vQI/yQZDzQpDwQ472QI3uPIn3P4vqQo7vQo3tQI3uQJDuQI7uQI3uQI7sQo7sQpHxQY3sPo7vQI7tQI/xQY7tQY3tQY3vPo7vQo/rP5HuQY3uQJDxQorsQo7sQo7vOI3xQovrQo3rQY7tQY3tQI7tQI3sPY/wN5vvQo7vNKPyQY7tQ43tQY/uQorsQI7sQY3uPJfwQI7wPpPuP4/vQY7tTnTsRJPxP47vQYzsPZDvP43uQo7sO5jvQY3uPZDwQ4ftQI7tQo3uQY7uQo7uQY7tQY7tQ43wQY7tQI3tQI7uQI7tQI/wP4zuQ5DtQY7tRJHsQI7tQY7tQI7tQY7uQI7uQY3uQo7tPpHtQI7tQY/sQY7sQY/tQY7tQY7tQY3sQY/uQI7tQY7tQo7uQo/uSY7tQY/tTXnqQY3tQY7tQY7tQY7tQY7tQY7uQY7tPZXtQY3tQY7tQY7tQY7tQo/tP5LqQI/rQI/sQY3sQY7rQY7sQY7tQY/rQY/sQY/tQZDtQozsQo3rQo3sQo3tQo7qQo7rQo7sQo7tQo/tQ4vsQ4zrQ47qQ47sRIfrRInsRI/rRI/sRYnsR4TqR4fqvnKd/wAAAKd0Uk5TAAIECAkLDQ4PERQUFRYXGBsdHyImJykqKisrLS4wMTEzMzQ0NDs7PT1AQURFRklLS0xOUFNVWFxdYWFlZ2doa2xsbW5vb3BxcnJzdHl5fHx8fH+AgIGCg4iJjIyOjpGSkpOUlZeZm56goaKio6OjpKaqqq6vsLO0t7i5vL7GxsfHyMjLy8zN0tTW2trc4ubn6erq7Ozs7Ozu7vHz9Pb3+Pn6+/v9/v6utNnsAAABnklEQVQYGY3BhVsTcQDH4c8QRbCwu7tbVFQsPLu7u3t2YRfYXRhg7r4GMnYzMPm/nLf7nTjkeXxf/lv7I3OoVP+IllCpvg80l39rNiTrkrQymQpqTD4rV7QriabkRx3tnXdP9tQqkDGAP1IPKXw6qy7ZciYCqbLnY6RdkDKBUVI2MUlL39jr8eQV3elATKdTI4kb+6FsMa7NZY/rkWDw8589ielS8qU3FWwquRoAcu0N+Opg3AxNgFalr2rhSd/3qBuece8vwszvOXjGFEiz8dQOC46GRuNK3iMVaDzG+TBct3vgSpGGrpOFse0rPH3blLhGrdkqC2P5OyhUQ3y7ZWEs/AG31RlfUBbGmiLIdQbiC8rC2P8SNkaW4QvKwrgfhkGhu/h2aRKedsVhqGnb/TAOazqend8OAit0BWP4ycbEdXxR2hJIeRJZS6LArc87+K2Xo2n8Len46xsBXAs+fVxFeU3OhJ61wTNL0XPdMarOkPOwLb7M/GIdGNE8rVr1+n1WX3OcYw0oJ32LnKhcjvKGkaDFohOFirm8PQPPL6EvjEOwGD5qAAAAAElFTkSuQmCC";
|
||
|
||
//
|
||
var script$2 = {
|
||
name: 'XmlAudioModalPlayer',
|
||
props: {
|
||
//模式:preview 预览 (默认),editor 编辑
|
||
mode: {
|
||
type: String,
|
||
default: 'editor'
|
||
},
|
||
playStyle: {
|
||
type: String,
|
||
default: 'yuan-wen'
|
||
},
|
||
styleMode: {
|
||
type: String,
|
||
default: 'A'
|
||
},
|
||
url: {
|
||
type: String,
|
||
default: '',
|
||
require: true
|
||
},
|
||
title: {
|
||
type: String,
|
||
default: '音频文件'
|
||
},
|
||
coverImageUrl: {
|
||
type: String,
|
||
default: ''
|
||
},
|
||
//进度是否能控制
|
||
scheduleControl: {
|
||
type: String,
|
||
default: ''
|
||
},
|
||
loop: {
|
||
type: Boolean,
|
||
default: true
|
||
},
|
||
playMode: {
|
||
type: String,
|
||
default: 'xun-huan'
|
||
},
|
||
content: {
|
||
type: String,
|
||
default: '暂无简介'
|
||
},
|
||
targerLink: {
|
||
type: String,
|
||
default: ''
|
||
}
|
||
},
|
||
computed: {
|
||
videoAndMusicStop() {
|
||
let playState = sessionStorage.getItem('SET_PLAY_STATE');
|
||
return playState;
|
||
},
|
||
playModeIconSrc() {
|
||
let srcMap = {
|
||
'xun-huan': playRepeat,
|
||
'dan-ci': playOnce
|
||
};
|
||
return srcMap[this.playMode];
|
||
}
|
||
},
|
||
watch: {
|
||
url: {
|
||
handler(newVal) {
|
||
let _this = this;
|
||
this.isPlay = false;
|
||
let ua = navigator.userAgent;
|
||
/micromessenger/.test(ua.toLowerCase());
|
||
let isIOS = !!ua.match(/\(i[^;]+;( U;)? CPU.+Mac OS X/);
|
||
if (isIOS && newVal) {
|
||
try {
|
||
this.$nextTick(() => {
|
||
_this.$refs.musicAudio.muted = true;
|
||
const playPromise = _this.$refs.musicAudio.play();
|
||
_this.$refs.musicAudio.muted = false;
|
||
if (playPromise) {
|
||
playPromise.then(() => {
|
||
_this.$refs.musicAudio.pause();
|
||
}).catch(error => {
|
||
_this.$refs.musicAudio.pause();
|
||
console.log(error);
|
||
});
|
||
}
|
||
});
|
||
} catch (error) {
|
||
console.log(error);
|
||
this.$message({
|
||
type: 'warning',
|
||
message: error,
|
||
duration: 5000
|
||
});
|
||
}
|
||
}
|
||
},
|
||
immediate: true
|
||
},
|
||
videoAndMusicStop(val) {
|
||
console.log('watch', val);
|
||
let currentPlayStamp = val || '--';
|
||
if (`${this.url}__playTimeStamp__${this.lastOperateTimeStamp}` !== currentPlayStamp) {
|
||
this.musicAudio.pause();
|
||
}
|
||
}
|
||
},
|
||
data() {
|
||
return {
|
||
playIcon,
|
||
pauseIcon,
|
||
audioIcon,
|
||
coverLocal,
|
||
isPlay: false,
|
||
playTime: 0,
|
||
playDuration: '00:00',
|
||
playCurrentTime: '00:00',
|
||
totalDuration: 0,
|
||
link: {
|
||
type: String,
|
||
default: ''
|
||
},
|
||
//链接地址
|
||
lastOperateTimeStamp: '',
|
||
musicAudio: null,
|
||
manualPlay: false
|
||
};
|
||
},
|
||
mounted() {
|
||
// 加载完毕后,先获取<video>标签DOM对象
|
||
this.musicAudio = this.$refs.musicAudio;
|
||
// this.videoContainer = this.$refs.videoContainer
|
||
if (this.mode === 'preview' && this.playStyle === 'yuan-wen') {
|
||
this.$emit('resetTimeUpdate');
|
||
}
|
||
},
|
||
methods: {
|
||
changeDuration() {
|
||
this.$nextTick(() => {
|
||
this.playDuration = this.timeFormat(this.$refs.musicAudio.duration);
|
||
this.totalDuration = this.$refs.musicAudio.duration;
|
||
});
|
||
},
|
||
//将单位为秒的的时间转换成mm:ss的形式
|
||
timeFormat(num) {
|
||
let minute = parseInt(num / 60);
|
||
let second = parseInt(num % 60);
|
||
minute = minute >= 10 ? minute : `0${minute}`;
|
||
second = second >= 10 ? second : `0${second}`;
|
||
return `${minute}:${second}`;
|
||
},
|
||
play() {
|
||
// if (this.mode==='editor') {
|
||
// this.$message.warning('音频仅支持在预览模式下播放')
|
||
// return
|
||
// }
|
||
if (this.playStyle === 'yuan-wen') {
|
||
this.doPlay();
|
||
}
|
||
if (this.playStyle === 'tiao-zhuan') {
|
||
if (this.targerLink) {
|
||
// window.location.href=this.targerLink
|
||
window.open(this.targerLink, '_blank');
|
||
} else {
|
||
this.$message.warning('获取资源链接失败,请检查链接地址');
|
||
}
|
||
}
|
||
if (this.playStyle === 'tan-chuang') {
|
||
this.$emit('showDialog', {
|
||
url: this.url,
|
||
title: this.title,
|
||
coverFileUrl: this.coverImageUrl
|
||
});
|
||
}
|
||
},
|
||
doPlay() {
|
||
console.log('暂停');
|
||
let audio = document.querySelectorAll('audio');
|
||
audio.forEach(i => i.pause());
|
||
let video = document.querySelectorAll('video');
|
||
if (video && video.length) {
|
||
video.forEach(i => i.pause());
|
||
}
|
||
if (this.isPlay) {
|
||
this.$refs.musicAudio.pause();
|
||
this.isPlay = !this.isPlay;
|
||
} else {
|
||
this.manualPlay = true;
|
||
this.$refs.musicAudio.play().then(() => {
|
||
this.isPlay = !this.isPlay;
|
||
let timeStamp = new Date().getTime();
|
||
this.lastOperateTimeStamp = timeStamp;
|
||
let playStamp = `${this.url}__playTimeStamp__${timeStamp}`;
|
||
sessionStorage.setItem('SET_PLAY_STATE', playStamp); //存储
|
||
}).catch(error => {
|
||
this.$message.warning('无效音频资源,请检查资源地址');
|
||
console.log('音频播放catch', error);
|
||
});
|
||
}
|
||
// this.isPlay ? this.$refs.musicAudio.pause() : this.$refs.musicAudio.play()
|
||
// this.isPlay = !this.isPlay
|
||
// let timeStamp = new Date().getTime()
|
||
// this.lastOperateTimeStamp = timeStamp
|
||
// let playStamp = `${this.url}__playTimeStamp__${timeStamp}`
|
||
// // !this.isPlay && this.$store.commit('leaflet/SET_PLAY_STATE', playStamp)
|
||
// // this.$store.commit('leaflet/SET_PLAY_STATE', playStamp)
|
||
// sessionStorage.setItem('SET_PLAY_STATE', playStamp) //存储
|
||
},
|
||
updateTime(e) {
|
||
this.playTime = this.$refs.musicAudio.currentTime;
|
||
this.playCurrentTime = this.timeFormat(this.$refs.musicAudio.currentTime);
|
||
if (this.playTime >= this.totalDuration) {
|
||
this.isPlay = false;
|
||
}
|
||
this.$emit('timeUpdate', {
|
||
event: e,
|
||
time: this.$refs.musicAudio.currentTime
|
||
});
|
||
},
|
||
changeTime(val) {
|
||
this.$refs.musicAudio.currentTime = val;
|
||
},
|
||
onPlay() {
|
||
if (!this.manualPlay) {
|
||
this.$refs.musicAudio.pause();
|
||
}
|
||
}
|
||
}
|
||
};
|
||
|
||
function normalizeComponent(template, style, script, scopeId, isFunctionalTemplate, moduleIdentifier /* server only */, shadowMode, createInjector, createInjectorSSR, createInjectorShadow) {
|
||
if (typeof shadowMode !== 'boolean') {
|
||
createInjectorSSR = createInjector;
|
||
createInjector = shadowMode;
|
||
shadowMode = false;
|
||
}
|
||
// Vue.extend constructor export interop.
|
||
const options = typeof script === 'function' ? script.options : script;
|
||
// render functions
|
||
if (template && template.render) {
|
||
options.render = template.render;
|
||
options.staticRenderFns = template.staticRenderFns;
|
||
options._compiled = true;
|
||
// functional template
|
||
if (isFunctionalTemplate) {
|
||
options.functional = true;
|
||
}
|
||
}
|
||
// scopedId
|
||
if (scopeId) {
|
||
options._scopeId = scopeId;
|
||
}
|
||
let hook;
|
||
if (moduleIdentifier) {
|
||
// server build
|
||
hook = function (context) {
|
||
// 2.3 injection
|
||
context = context ||
|
||
// cached call
|
||
this.$vnode && this.$vnode.ssrContext ||
|
||
// stateful
|
||
this.parent && this.parent.$vnode && this.parent.$vnode.ssrContext; // functional
|
||
// 2.2 with runInNewContext: true
|
||
if (!context && typeof __VUE_SSR_CONTEXT__ !== 'undefined') {
|
||
context = __VUE_SSR_CONTEXT__;
|
||
}
|
||
// inject component styles
|
||
if (style) {
|
||
style.call(this, createInjectorSSR(context));
|
||
}
|
||
// register component module identifier for async chunk inference
|
||
if (context && context._registeredComponents) {
|
||
context._registeredComponents.add(moduleIdentifier);
|
||
}
|
||
};
|
||
// used by ssr in case component is cached and beforeCreate
|
||
// never gets called
|
||
options._ssrRegister = hook;
|
||
} else if (style) {
|
||
hook = shadowMode ? function (context) {
|
||
style.call(this, createInjectorShadow(context, this.$root.$options.shadowRoot));
|
||
} : function (context) {
|
||
style.call(this, createInjector(context));
|
||
};
|
||
}
|
||
if (hook) {
|
||
if (options.functional) {
|
||
// register for functional component in vue file
|
||
const originalRender = options.render;
|
||
options.render = function renderWithStyleInjection(h, context) {
|
||
hook.call(context);
|
||
return originalRender(h, context);
|
||
};
|
||
} else {
|
||
// inject component registration as beforeCreate hook
|
||
const existing = options.beforeCreate;
|
||
options.beforeCreate = existing ? [].concat(existing, hook) : [hook];
|
||
}
|
||
}
|
||
return script;
|
||
}
|
||
|
||
/* script */
|
||
const __vue_script__$2 = script$2;
|
||
|
||
/* template */
|
||
var __vue_render__$2 = function () {
|
||
var _vm = this;
|
||
var _h = _vm.$createElement;
|
||
var _c = _vm._self._c || _h;
|
||
return _c("div", { staticClass: "xml-audio-player" }, [
|
||
_c("div", { staticClass: "audio-cover" }, [
|
||
_c(
|
||
"div",
|
||
{ staticClass: "square-wrapper" },
|
||
[
|
||
_c("el-image", {
|
||
attrs: {
|
||
src: _vm.coverImageUrl ? _vm.coverImageUrl : _vm.coverLocal,
|
||
alt: "",
|
||
},
|
||
}),
|
||
_vm._v(" "),
|
||
_c("div", { staticClass: "play-button style-a" }, [
|
||
_c("img", {
|
||
staticClass: "button-image",
|
||
attrs: { src: _vm.isPlay ? _vm.pauseIcon : _vm.playIcon },
|
||
on: {
|
||
click: function ($event) {
|
||
$event.stopPropagation();
|
||
return _vm.play.apply(null, arguments)
|
||
},
|
||
},
|
||
}),
|
||
]),
|
||
],
|
||
1
|
||
),
|
||
]),
|
||
_vm._v(" "),
|
||
_c("div", { staticClass: "audio-player-control" }, [
|
||
_c(
|
||
"div",
|
||
{
|
||
class: [
|
||
"audio-player-title",
|
||
"audio-player-title--modal",
|
||
_vm.mode == "preview" && _vm.title.length >= 16 ? "preview" : "",
|
||
],
|
||
},
|
||
[
|
||
_c(
|
||
"div",
|
||
{
|
||
staticClass: "scroll-text",
|
||
style:
|
||
_vm.mode == "preview" && _vm.title.length >= 16
|
||
? {
|
||
animation:
|
||
"fadeNum " +
|
||
_vm.title.length / 3 +
|
||
"s linear infinite backwards",
|
||
}
|
||
: "",
|
||
},
|
||
[_vm._v("\n " + _vm._s(_vm.title) + "\n ")]
|
||
),
|
||
]
|
||
),
|
||
_vm._v(" "),
|
||
_c("div", { staticClass: "audio-controls" }, [
|
||
_c("div", { staticClass: "audio-controls__handler" }, [
|
||
_c("div", { staticClass: "play-times" }, [
|
||
_c("div", { staticClass: "current-time" }, [
|
||
_vm._v(_vm._s(_vm.playCurrentTime)),
|
||
]),
|
||
_vm._v(" "),
|
||
_c(
|
||
"div",
|
||
{ staticClass: "progress" },
|
||
[
|
||
_c("el-slider", {
|
||
attrs: {
|
||
max: _vm.totalDuration,
|
||
disabled: _vm.scheduleControl !== "yes",
|
||
"show-tooltip": false,
|
||
},
|
||
on: { change: _vm.changeTime },
|
||
model: {
|
||
value: _vm.playTime,
|
||
callback: function ($$v) {
|
||
_vm.playTime = $$v;
|
||
},
|
||
expression: "playTime",
|
||
},
|
||
}),
|
||
],
|
||
1
|
||
),
|
||
_vm._v(" "),
|
||
_c("div", { staticClass: "total-time" }, [
|
||
_vm._v(_vm._s(_vm.playDuration)),
|
||
]),
|
||
]),
|
||
]),
|
||
]),
|
||
]),
|
||
_vm._v(" "),
|
||
_c("audio", {
|
||
ref: "musicAudio",
|
||
staticClass: "audio-component",
|
||
attrs: {
|
||
controls: "",
|
||
preload: "auto",
|
||
src: _vm.url,
|
||
loop: _vm.loop,
|
||
link: _vm.link,
|
||
},
|
||
on: {
|
||
canplay: _vm.changeDuration,
|
||
timeupdate: _vm.updateTime,
|
||
pause: function ($event) {
|
||
_vm.isPlay = false;
|
||
},
|
||
play: _vm.onPlay,
|
||
},
|
||
}),
|
||
])
|
||
};
|
||
var __vue_staticRenderFns__$2 = [];
|
||
__vue_render__$2._withStripped = true;
|
||
|
||
/* style */
|
||
const __vue_inject_styles__$2 = undefined;
|
||
/* scoped */
|
||
const __vue_scope_id__$2 = undefined;
|
||
/* module identifier */
|
||
const __vue_module_identifier__$2 = undefined;
|
||
/* functional template */
|
||
const __vue_is_functional_template__$2 = false;
|
||
/* style inject */
|
||
|
||
/* style inject SSR */
|
||
|
||
/* style inject shadow dom */
|
||
|
||
|
||
|
||
const __vue_component__$2 = /*#__PURE__*/normalizeComponent(
|
||
{ render: __vue_render__$2, staticRenderFns: __vue_staticRenderFns__$2 },
|
||
__vue_inject_styles__$2,
|
||
__vue_script__$2,
|
||
__vue_scope_id__$2,
|
||
__vue_is_functional_template__$2,
|
||
__vue_module_identifier__$2,
|
||
false,
|
||
undefined,
|
||
undefined,
|
||
undefined
|
||
);
|
||
|
||
//
|
||
var script$1 = {
|
||
name: 'XmlMusicRender',
|
||
components: {
|
||
vueAplayer,
|
||
XmlAudioModalPlayer: __vue_component__$2
|
||
},
|
||
props: {
|
||
//模式:preview 预览 (默认),editor 编辑
|
||
mode: {
|
||
type: String,
|
||
default: 'editor'
|
||
},
|
||
//属性中传递过来的块儿数据
|
||
pBlockData: {},
|
||
//块儿数据名
|
||
blockDataName: String,
|
||
//页面类型,取值h5、pc、pad
|
||
pageType: {
|
||
type: String,
|
||
default: 'h5'
|
||
},
|
||
resourceBasisPath: {
|
||
type: String,
|
||
default: ''
|
||
},
|
||
//当前所处组的规则
|
||
currentRule: {
|
||
type: Object,
|
||
default: function () {
|
||
return {};
|
||
}
|
||
},
|
||
ruleIndex: Number,
|
||
extendParams: {
|
||
type: Object,
|
||
default: function () {
|
||
return {};
|
||
}
|
||
}
|
||
},
|
||
watch: {
|
||
pBlockData: {
|
||
handler(newValue) {
|
||
this.$nextTick(() => {
|
||
console.log('新data', newValue);
|
||
this.blockData = Object.assign({}, this.blockData, newValue);
|
||
});
|
||
},
|
||
deep: true,
|
||
immediate: true
|
||
}
|
||
},
|
||
data() {
|
||
return {
|
||
blockId: 'blockId' + Math.random(),
|
||
//当前块儿数据,一般用不到,因为一般的块儿中不会在块儿内更改数据
|
||
blockData: {
|
||
playMode: 'xun-huan',
|
||
scheduleControl: 'yes',
|
||
playStyle: 'yuan-wen',
|
||
styleMode: 'A'
|
||
},
|
||
//属性组件名
|
||
propertyComponentsName: 'xml-music-property',
|
||
dialogShow: false,
|
||
curMusicUrl: '',
|
||
curMusicTitle: '',
|
||
curCoverFileUrl: '',
|
||
curMusicContent: '',
|
||
currentPlayer: {},
|
||
modalPlayerKey: 'mpk' + new Date().getTime()
|
||
};
|
||
},
|
||
methods: {
|
||
// 此方法必须要,用户注册块儿点击事件
|
||
blockClick() {
|
||
this.$emit('blockclick', {
|
||
blockData: this.blockData,
|
||
dataName: this.blockDataName,
|
||
propertyComponentsName: this.propertyComponentsName,
|
||
blockId: this.blockId,
|
||
ruleIndex: this.ruleIndex
|
||
});
|
||
},
|
||
showDialog(param) {
|
||
this.curMusicUrl = param.url;
|
||
this.curMusicTitle = param.title;
|
||
this.curCoverFileUrl = param.coverFileUrl;
|
||
this.curMusicContent = param.content;
|
||
this.currentPlayer = this.blockData.musicSetList[param.playerIndex];
|
||
this.modalPlayerKey = 'mpk' + new Date().getTime();
|
||
this.$nextTick(() => {
|
||
this.dialogShow = true;
|
||
});
|
||
},
|
||
timeUpdate({
|
||
event,
|
||
time
|
||
}, data) {
|
||
// console.log({event, time, data});
|
||
this.$EventBus.$emit('audioTimeUpdate', {
|
||
event,
|
||
time,
|
||
data
|
||
});
|
||
},
|
||
resetTimeUpdate(data) {
|
||
this.$EventBus.$emit('resetTimeUpdate', data);
|
||
}
|
||
},
|
||
created() {},
|
||
computed: {
|
||
isEmpty() {
|
||
return !(this.blockData.musicSetList && this.blockData.musicSetList.length);
|
||
}
|
||
}
|
||
};
|
||
|
||
/* script */
|
||
const __vue_script__$1 = script$1;
|
||
|
||
/* template */
|
||
var __vue_render__$1 = function () {
|
||
var _vm = this;
|
||
var _h = _vm.$createElement;
|
||
var _c = _vm._self._c || _h;
|
||
return _c(
|
||
"div",
|
||
{
|
||
staticClass: "xml-music-container",
|
||
class: "xml-music-container-" + _vm.pageType,
|
||
attrs: { id: _vm.blockId },
|
||
on: { click: _vm.blockClick },
|
||
},
|
||
[
|
||
_vm.blockData.musicSetList && _vm.blockData.musicSetList.length
|
||
? _vm._l(_vm.blockData.musicSetList, function (item, index) {
|
||
return _c(
|
||
"div",
|
||
{
|
||
key: index,
|
||
staticClass: "fixed-ratio-container xml-group-item",
|
||
},
|
||
[
|
||
_c(
|
||
"div",
|
||
{
|
||
class: [
|
||
"xml-music__wrapper",
|
||
"style-mode--" + _vm.blockData.styleMode,
|
||
_vm.mode,
|
||
],
|
||
},
|
||
[
|
||
_c(
|
||
"div",
|
||
{ staticClass: "xml-music-player-box" },
|
||
[
|
||
_c("xml-audio-play", {
|
||
attrs: {
|
||
"targer-link": item.link,
|
||
mode: _vm.mode,
|
||
styleMode: _vm.blockData.styleMode,
|
||
"play-style": _vm.blockData.playStyle,
|
||
"play-mode": _vm.blockData.playMode,
|
||
url: item.uploadFileUrl
|
||
? _vm.resourceBasisPath
|
||
? _vm.resourceBasisPath +
|
||
item.uploadFileUrl.split("./")[1]
|
||
: item.uploadFileUrl
|
||
: "",
|
||
loop: _vm.blockData.playMode === "xun-huan",
|
||
title: item.title,
|
||
playerIndex: index,
|
||
content: item.content,
|
||
scheduleControl: _vm.blockData.scheduleControl,
|
||
"cover-image-url": item.coverFileUrl
|
||
? _vm.resourceBasisPath
|
||
? _vm.resourceBasisPath +
|
||
item.coverFileUrl.split("./")[1]
|
||
: item.coverFileUrl
|
||
: "",
|
||
},
|
||
on: {
|
||
showDialog: _vm.showDialog,
|
||
timeUpdate: function (e) {
|
||
return _vm.timeUpdate(e, item)
|
||
},
|
||
resetTimeUpdate: function ($event) {
|
||
return _vm.resetTimeUpdate(item)
|
||
},
|
||
},
|
||
}),
|
||
],
|
||
1
|
||
),
|
||
]
|
||
),
|
||
]
|
||
)
|
||
})
|
||
: [
|
||
_c("div", { staticClass: "fixed-ratio-container" }, [
|
||
_c(
|
||
"div",
|
||
{
|
||
class: [
|
||
"xml-music__wrapper",
|
||
"style-mode--" + _vm.blockData.styleMode,
|
||
_vm.mode,
|
||
],
|
||
},
|
||
[
|
||
_c(
|
||
"div",
|
||
{ staticClass: "xml-music-player-box" },
|
||
[
|
||
_c("xml-audio-play", {
|
||
attrs: {
|
||
mode: _vm.mode,
|
||
styleMode: _vm.blockData.styleMode,
|
||
"play-mode": _vm.blockData.playMode,
|
||
title: "这是音频",
|
||
},
|
||
}),
|
||
],
|
||
1
|
||
),
|
||
]
|
||
),
|
||
]),
|
||
],
|
||
_vm._v(" "),
|
||
_c(
|
||
"el-dialog",
|
||
{
|
||
staticClass: "resource-preview player-dialog",
|
||
attrs: {
|
||
title: "",
|
||
visible: _vm.dialogShow,
|
||
"destroy-on-close": true,
|
||
width: "100%",
|
||
modal: false,
|
||
"show-close": false,
|
||
"append-to-body": "",
|
||
"custom-class": "preview-modal-music",
|
||
"close-on-click-modal": true,
|
||
},
|
||
on: {
|
||
"update:visible": function ($event) {
|
||
_vm.dialogShow = $event;
|
||
},
|
||
},
|
||
},
|
||
[
|
||
_c(
|
||
"div",
|
||
{ staticClass: "xml-modal-player__wrapper" },
|
||
[
|
||
_c("xml-audio-modal-player", {
|
||
key: _vm.modalPlayerKey,
|
||
attrs: {
|
||
mode: _vm.mode,
|
||
styleMode: _vm.blockData.styleMode,
|
||
"play-style": "yuan-wen",
|
||
"play-mode": _vm.blockData.playMode,
|
||
url: _vm.curMusicUrl,
|
||
loop: _vm.blockData.playMode === "xun-huan",
|
||
title: _vm.curMusicTitle,
|
||
content: _vm.curMusicContent,
|
||
scheduleControl: _vm.blockData.scheduleControl,
|
||
"cover-image-url": _vm.curCoverFileUrl,
|
||
},
|
||
on: {
|
||
timeUpdate: function (e) {
|
||
return _vm.timeUpdate(e, _vm.currentPlayer)
|
||
},
|
||
resetTimeUpdate: function ($event) {
|
||
return _vm.resetTimeUpdate(_vm.currentPlayer)
|
||
},
|
||
},
|
||
}),
|
||
],
|
||
1
|
||
),
|
||
]
|
||
),
|
||
],
|
||
2
|
||
)
|
||
};
|
||
var __vue_staticRenderFns__$1 = [];
|
||
__vue_render__$1._withStripped = true;
|
||
|
||
/* style */
|
||
const __vue_inject_styles__$1 = undefined;
|
||
/* scoped */
|
||
const __vue_scope_id__$1 = undefined;
|
||
/* module identifier */
|
||
const __vue_module_identifier__$1 = undefined;
|
||
/* functional template */
|
||
const __vue_is_functional_template__$1 = false;
|
||
/* style inject */
|
||
|
||
/* style inject SSR */
|
||
|
||
/* style inject shadow dom */
|
||
|
||
|
||
|
||
const __vue_component__$1 = /*#__PURE__*/normalizeComponent(
|
||
{ render: __vue_render__$1, staticRenderFns: __vue_staticRenderFns__$1 },
|
||
__vue_inject_styles__$1,
|
||
__vue_script__$1,
|
||
__vue_scope_id__$1,
|
||
__vue_is_functional_template__$1,
|
||
__vue_module_identifier__$1,
|
||
false,
|
||
undefined,
|
||
undefined,
|
||
undefined
|
||
);
|
||
|
||
var playIcon2 = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAHEAAABxCAYAAADifkzQAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyJpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoV2luZG93cykiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6Njg2NEUwNzgxNkZBMTFFRTlENUY5NEMwMDFCNEEwMEIiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6Njg2NEUwNzkxNkZBMTFFRTlENUY5NEMwMDFCNEEwMEIiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo2ODY0RTA3NjE2RkExMUVFOUQ1Rjk0QzAwMUI0QTAwQiIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo2ODY0RTA3NzE2RkExMUVFOUQ1Rjk0QzAwMUI0QTAwQiIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PmYAe3gAAAxbSURBVHja7F19lFVVFT+8GXAGtREGSgljoBQDB8kwkQi/MkBStA9pOWiaIS4XmmauyCIJdaEulNAkkYVWYktBIyFtwInQhASk+DAKP3AkRZEGEAaGr5lp/zj72e35mHf2vefed+5797fW769377vnnt895+yzzz77tGttbVUxRxmxivlxYiWzlFhBTBGbiTuJB4k7iFuJ7xLriW8R98W5AtrFTMSuxDOIpxP7EauJPQL+JyrgTeJa5griUhY7EdFSKzuLOIJ4HrF3RM9tIb5CXEisJf6FeCAR0RzticOJlxAvJB7tQJm2E+cR5xDruHtORMyCzxDHEK/gsc1VbCY+QnyQ+HYiosYg4k3Ei9gIiQvQvT5BvIe4ulhF/CJxEvGcmFvHqMA/ECcQ1xSLiOg2pxK/qgoLqMjHieOJmwpVxCOJE4nXEzuowkUTcTLxLuL+QhJxKPGXxJ6qePAP4tXEZXEXsSMP/NfkoRL3cqvw9gRR9wCYb95N/GmY88wwRezHY8RnQ/p/uNDWsTGBiflGpd1oW4gNKrsrDc4DuOS6Ke2m66W01wdl7UMsCamsq4ijiG/EScRLiTO5Jdo0HFazB+U5pd1juy3+/9FsMcNaHsbi2gTceDXEZ10XEfO8O4k3W/6KHyPOjXhy/WluPTXcSm11r7ew0WPx8yYRLbGMOLfVDpqIM4inWixfEA4izibut/R+eLdSW+Wz1RLRFc1X2lkdBI3EacT7iO87aHEez96lsTy+BsF8bul7XWiJnYjLA36Z+4j3ELs40vJysTtxFrE54HsvInbMd0v8GHGR0ut7foFVgXHEDTGcC36OOJ04MMB/YLnrwkCOgQBfAL6gJQG+wu3Eb8ek5bXFFPE64u4AdTGPWBJ1S8R86nf8BfnBSh4P3iwgD01vtqI/7/N+eLSujbIlTvH5xbUQ7yW2L4AWmI1HEKcHaJE/9PNcPwUd7bOAjcSvF6h4mbzK53TkIHFo2N3pKezQlXpitnDXu6KIHOBfJj7Fxp8EcBkOYBeidY/NUcSXlTxY6d9KBzltUMWHAewmrBTe9xLxS+wfNnKTmWKqDwHhJhtSpAIq/ujP5dYlAaYsE2y3RDiE/ygsyFZ2KL+mEpzGTvsKwT3NLObLNkQ8ipd6JEG6e3g1YHmi34c4hxuCZE1zDX8AB4J2pxOFAuKrGJ0I+BEsJn5PeA8Mye8HbYknKR3a3l7w4CnK7lJUoQHrrN8VXL+TbZH3/Iq4QMmi0mBVnakiChCKKTpwL9VfcM+viFf6EREm7guCB21nh/BbiU450Yddj6bzbRg5iDT4p3RMnCQs2HWJgMZYr3TwlCngq75V2hLRJS4RPAQT2uGJNiKU8PAzwPD6Fm6N601b4o2CwiCq7PpEEzGaufcydZlBqxtMu9MTiBcICnN/MqH3DbTERwXXY+rWxUTEscrcHQfzd3KiRSBMFFjz5dms1FQW8/dyQQF+TtyW6BAIWBh/RHA95pjt2jJsvqb08okJECL/KeJ/8lgBKR7w4w7sFNsg6AEHK51XIGtLHCV48MN5FhDAlvBZxO4xF/F14tPC987anWLiOUIoYr6BbuQ7xFeJdxCPibGQ0wXXftOrnVfEs5XeOWQCbNv6mwMv3ugZ8G9hKzmu+x8XK/PAseO880uviJJW+BtHKwLm9zT+yEZlGgCOA2P7Y4Lrh2YT8SshPSxM7GnDUMC2OsQDnRUjIecIrh2WKeKxSu8CMgGCnd5x5KVzbdzEyvif2WjoEwMR1wm6VHSnZV4RBwse9KxDL206vUCkHVbJH+LxxGXUGl6Hcf80r4hfEDzkTw698C7BtUjYN4Yt2duUDjtxEZL6/T8RTXfF7nXEKg0CiPcTpbdeX8viuoSlgmv7e0XsZ3jTamVjP509BCkLUo89oPTSzsUOvdN7gnGxOi0iNoh2M7xprWNfrY08pVi1weagvyqdhtMVA8e07IdErBL8+SuqcDGQpyQQ9MQ8l8W0nuGc6SoVcaNjFf9BCP95MTsL0NV+Ik/vJannqpSwoPWqOFDKRg8c0xPyYMlK6rk7ROwiHHQLbUzMZclO4mnJ1RFaspsF11ZCxM6GFyMmxLUF4KgsZTgIZrDDYGQEz5PU8zEpQVeBFYPYp+4PCLjufq90JODAEJ+zU9L1S7a2uXrswM48PPNMtmQR5BTGsleT4NqK9DzRpa5Linz1Dlj/u105sGUBIpr6H8scFTHqSsShKN9Seju3E5tnJd3pEY6K2BTRc7D1GtF92Cn2RMjPKpfMlUvV/0IcTMztdkVq3CzjeWNUidolyRoOpgTmbIlgOhIldoX434jmQ5znYBVtpn1JPe9IKVnY4bEOitgS0n/O5K5zVh56n26Caxsg4hbBDVUOimg7t/bflU4YAQ9NQ57eSVLPmyFiveCGXg6KuMfS/yC9M/bUI8rhpTy/k6SeN5YKRTy5QA0XRO/dzNMHF2Baz8iBvjU9TzR1bPdzUIDGAPdi+zSSBY12SEDANFzmNe880fTAqv4OTvr9HIOHLvhH/D6LHXsfLA2aHgKzziuiaTgABDw15iKmY1BxaoCLWT4k4aNrvCJKsh+eG9PuFMFH2AGNI/5cThAhqd+VXhElYXLnx8xoQWtDnGlfpY/Icx3DBO+1wisiBnXTuA6Y4J+MyRSjjo0EpBtpioGA1YLxEIn79npFBBYa3ox7ahyf7CO8AbuikGf11Rj1GpcIrq31CpLGM4I/uNyhF/e63bwrDXNUvAAtLhVcvyibiEsE3o++DlmpuzzjOjLh36jCdYqHBaTSNPXUvJs2ajJF3K3Md+QAVzry8nDgY8s3ctGtVfGF5DiFud4eKDN7xjf4AhO4kD2jUGA1e8Z8gShYfR6X1L8V3CQQEIbasszBNHPu8WvBw5FrrHOiQSD05OHAFB9Z38ym/kPKfKEVicnHJzoEwkRlHvaIIezhbGZttua6QFAIpBw5IdHCF3Da3WWC62dnG+5s5TvFPv4RiSYiSPOdQiisMxrnO32eaQr4U2sSXUS4QSBgelqxPtsPbeUAHyIUElFzWDR+J9EnJ3BsPXyfoecAf0HJ0p10Zss2lWjUJmDE/FbJDk179HAC5mqJgJ9zMbCf79ZEq8MC1v8YwfVwIWL7+WFDaHK1mn8pnR5aAuysHZlolRVjhAICd6gcMVBhnhWFBHIvJrp9iNDOigrz1DYkRYBTel2i3yErtE7JT21DSpaVuS40NUJquS+XoIINox5FLuAp3AAqhPdNNhFQ0hKBI9ksPklYmE1Kp+EsxoMwsdqARYVOwvuW870HbYuY/qr8nil8gemXVSBAZkfkXC0X3reNu1/j4+ulczoMtGN9vBACYpF39KIiEfAqpTNTSQXEOFgjEfAQfB5BPtXnMe0txNuIqQI9mr0D8YFW/xjv57l+C1tCfDpAYeuI3QtMwBOJKwLUyXS/zw5S6I7E5wMUuoF4WQGIh15lHLExQF3M44bhqwxSwybbNALxqqcH+A/cjzCP12M49uHQz18QBwX4j0Vs9PnfF2LhS+xMXN4aDPuIU4hdYtL6jiPOIjYHfO867tEClSdoS0wD2R4QDTAk4P/A2TuN/bXvO9jyjld6HfAaH9OsTKC+EKUefHuBxa+znPhkqx00EWcQ+zvS8s4gzibut/R+M4mltspnqyV65513EX9g8T9XKR1b8iTx7QhbXU9uKdhF3NfSfyIA7cdK7420BtsipoEXn2Ghy8msgFXsx13MrimbO52Q4w6ZE89jh3+15TrZwfXyjO3KDkvEtIsO6bN6h/T/WJ5Zy0S6Z2zNq1d67a3hMNYeUptVKp2/tErpU3lO5rKitZWEVNZV3KrfCOPPwxRRcUu816erLih2ZwhZ5sMNZqP3uFvp/ZEHwnpI2CKmgQXiB5WbyYzCAiLTkNBoadgPiiqoaSF3W1OUm8kObALj9M/YEbA0igdG1RK9wA4gbAQttGDjVl56wraGTVE+OB8ipoH8aUiIcHYBCLiAx73V+Xh4PkVMA3E42N07UsUrZvUAW99TVZ4PQXNBRG83i3C+K5Q+jMtVwOGAnUkzI3Y+xELENBCofD7Pq+Ddd+Gcw+1KH6WAZA7PKX+pyIpKRC/Kecwczp6U3hE9F5WCWNta5osuW9Wui5iJrmwQYf2ymqctPSwIhpgWeH4QIwt3Ho7h2xaXSombiNlQxk6EXixyJRMutE4ez8kH3A2ia9yqdBqReqXzvO2LcwX8V4ABACtzQdBrjYeOAAAAAElFTkSuQmCC";
|
||
|
||
var pauseIcon2 = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAHEAAABxCAYAAADifkzQAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyJpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoV2luZG93cykiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6ODYxQkY4OTgxNkY3MTFFRUIyNDRGRDhFNkVCRjJBOUYiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6ODYxQkY4OTkxNkY3MTFFRUIyNDRGRDhFNkVCRjJBOUYiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo4NjFCRjg5NjE2RjcxMUVFQjI0NEZEOEU2RUJGMkE5RiIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo4NjFCRjg5NzE2RjcxMUVFQjI0NEZEOEU2RUJGMkE5RiIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PqT2N5QAAAugSURBVHja7F0NkFZVGT7sLuvyYxssmCIEYgiaLGiIgob5y1+a0w/OiBrmD46DJpkjU2NuOoU6KFFGAkNWYiNqUJC2IpmalqDU8iNF5rookUjET9ICu/D1Pt730vXz+3bPe8+5Z7/7feeZeWZ2Zs+993zvc88573nPe8/plMlkVMpRRRzAPIpYw6wgVhPLiAeJe4itxF3E7cR/EpuIm4n702yATikTsTdxFPF0Yi1xKLG/4T1hgDeJ65iriS+x2F5ES63sM8SJxAuIgx099xBxA/FpYj3x98QWL6I+OhPHEycRLyYeWQB12klcSnyMuJK7Zy9iDnyCeC1xCo9thYqtxIeIDxK3eBEDjCbeQryEnZC0AN3rYuJ9xIZSFfFM4p3Ec1PuHcOAvybeTlxbKiKi25xN/KwqLsCQjxJnEN8qVhG7EeuINxErVfGimTiTeA/xQDGJOJb4I+JxqnTwGvE64h/SLmJXHviv7wAj7uNWEe0JXPcAmG/eS/xWkvPMJEWs5THixITujxDaenYmMDFvVEEYbRtxh8odSkPwACG5PioI0w1UQdQHdT2JWJ5QXdcQLyW+kSYRLyMu4JZo03Fo4AjKMyoIj+21eP8j2WOGtzyOxbUJhPEmE58qdBExz7ubeKvlt/gR4uOOJ9fHc+uZzK3UVvf6DXZ6LL7eJKIlVhEfz9hBM3Ee8VSL9TPhaOIi4gFLvw+/rcJW/Wy1RHRFy1QQrDbBe8Q5xO8T3y1Aj7MfR5em8vhqgmXc0vcVQkvsQVxl+GbuJ95H7FUgLa899iUuJB40/N0riF07uiV+hLhCBet7cYFVgWnETSmcC55CnEs8w+AeWO662CgwYPAG4A16zuAt3En8ckpaXlssI95I3Gtgi6XEctctEfOpJfwGxcErPB68WUQRmsHsRX8q5vWIaN3gsiXOivnGHSLeT+xcBC0wF48gzjVokbfFeW6cil4es4LvEb9QpOJl8+qY05FW4tiku9NhHNCVRmK2cde7uoQC4OcTf8HOnwQIGY7gEKL1iE134qtKnqz0tgqSnDap0sMIDhPWCK97mfhpjg9rhcl0MTuGgAiTjSlRARW/9Odx65IAU5bbbbdEBIR/I6zIdg4ov648TuOgfbXgmoMs5qs2ROzOSz2SJN3/8mrAKq/fYZzLDUGyprmWX4AW0+60Tigg3orLvYAfwrPErwqvgSP5NdOWOEQFqe2dBQ+epewuRRUbsM56jaD8HvZF3okr4nIly0qDV3W2cpQglFJUci81XHDNT4hXxRERLu4Lggft5IDwZq9TuziJQ4+68204Ocg0+It0TLxTWLEbvYDa2KiC5CldIFZ9h7Qlokt8TvAQTGjHe21EKOfhZ4Rm+UPcGjfqtsTpgsogq+wmr4kYB7n30g2ZQaubdbvTQcSLBJX5gZ/QxwZa4sOC8pi69dIRcarSD8fB/Z3ptTBCncCb75LLSy3L4f5eKajA94j/9joYAQvjDwnKY47ZqS3H5vMqWD7RAVLkP078l2PX/GqeYyHCj7jiPGX2FdIQNswp7Dwgz3W+CjLKXQFfim0S9IBnqWBfgQBZC4yLBQuYDzheaL01T3ZZMy9Ux7nndGJLnuy7axz/viUC28/Jt7LflVffdeEysfcKjRXxc4T3nKSRSjLO4W88X2D7rZyg9SERJwpussHhj0MW2BaNOr0suCeGkUaNezY4zpprFGgwMrw22gdPFPThP3M4XmCsOlaj3EgV7HOjO7bqfCs5TPPZNoDx+BFB+bG5vNMLE3qYKT6mWQ4e29GaZfsJnt/P4W99TFB2XLaI+PHHa16MZKd/FEHISxcVDuu1Xunn4iJcVxUV8SzBg55SHkmiXrMc5vSnRUUcKXjIb72dE4XEvh8QUferWHyG9Sdv50TxkqDs8KiItZoXNSgb39N5tIV3BOPi0FBEfCDaR/Oidd7GzhwcHQwKRRwguPkGb18n0LUztnXpLRWx0dvXCSR2HlAmmEwDTd6+TiCxc1+I2Es46Hokj62CsjUQsadmYeSE+AVgN5DY+aMQsbtmYWxPkvH2dYI9grIVkk/b9nvbOkOzoGx1OE/UgZ/kFygg4n80y1Z5cxWuiLo4wpvLGboIyu4uY4dFB3CAOnn7OoFks4bWMoE7Wy6YjniYQWLnXRBRkjd6tLevE/QRlN0BEbcJLhjg7esEEjtvhYhNggsGevs6gcTOjVIRT/b2dQJdO2MP9O3hPFE3sF3r7esEuukyr0fniboHVg33k/7EgaVB3UNg1kdF1E0HgICnejsnCkn66NqoiJLdD8/zdk4UEvu+EhVRkiY3wds5UYzTLHcgbHyhiDjpWjevA4nGx6bcUJLjZFsd1muoYDzEB7b7oiICT2tejGsmO/xhkmCErpct+ZbkbYe/dZKgbH1UkBBPCm5wpcMf9mdNo2Orre2a93xNc368Vrn7eAhaXCYov+LwX5GPHLsJjwVI+5fCX9L4UvjCNHwpXJY1+68XvAlXOWyN2OvlNhV8G5kNpI1MIf5OeE8cIHZLnjEPTsPUD7ztyeMGYd0P2yJ794wvcgEddNTuGTjGfZj6/+4ZOCrdZPcMnO+IHTmiu2dgO8s3HP4uo90zskWs5DFANxf12yrYTMfDDDjYRPe017+pYNuWTC7HJuxGfip4OPYa8wvFZsCU4iuC8gtVVuporuY7P8/YkwvYmHyG18EIdUp/X3AMYT/O5dbmaq7LBZXADouDvBaxgNPurhCUX5TLB7G13ym+45/oNRFBut8phMI6o/Z+p88zdTHBcRSnGHCzQMBwWrEx1z/a2gN8jFBIZM3VqvRvj+ICJ/L0KPE9wF9Qsu1OerJnW+Y1ahNwYn6uZIemPZxPwPZaolLxzsXABvB3eK3yYj4HLHSB9JkTVBvB/fZazV9VsD20BDio6nNeq5y4Vigg8B3VzupMkmdFYQO5F71uh5HYWVFJntq2WwUHpKz3+r3vha5U8lPbRilOwWgLuk5IPfflElSzY9S/xAUcxg2gWnjdTB0BJS0R6MZu8RBhZbDCgG04S/EgTKw2LCP2EF63iq9ttS1i+FbFPVP4It03q0iAM5QfVbJvDcP5Nrpf7ePrpXM6DLRTY/wgJMRi0faSEhEQ65NLYgiIcXCyRMD3ETOVYHbMY9qR8nBXNLWgyFhJ/GEmPmbEea7J5uq/MqjsSmLfIhPwBOJqA5vMjftsk0rjCIbnDSq9gxOg0i4eepVpwuMosrGUG0asOkgdm1zTCOSrnm5wD1w/jfj3FI59yMt5gDja4B4r2OmLf/qrhTexJ3FVxgw4EWYWsVdKWt8xxIV5TsyRDitdTetj2hJDYLcHZAOMMbwPgr1zOF77bgG2PByvgHXA62NMs7IBe12qZLtHJdYSQ3YhPpGxA5z/NI84vEBa3ijiIuIBS79vAbHCVv1stcTovPMe4tct3hN5oMgteYK4xWGrO45bCg6e/KSleyIB7ZvEu21W1LaIIfDD51nocrINsIbjuM9yaKrZ4v2xx90ZxAs44D/Usk12sV2etG3spEQMQ3SLiYMTuj+WZ9Yx8YEMPs1rUsHa24483h62NqshHqOCbUZwKs/JXFe0tvKE6rqGW3UiWeVJiqi4Jd4fM1Rnir1ZQlbFCIPZ6D3uVcFx7C1JPSRpEUNggfhBVVqbGSEz7Tol+wq7oEUEsJRVp4Jk48oiFq+ZW993jSbwBSpiCHwBhAOliy3ZOMNLT/is4S2XD+4IEUOcSbyLeE4RCLicx72Gjnh4R4oYAnk401WQIZemnNUW9r5nqw4+BK0QRIx2s0jnm0I8qoDFQ8ABXyYtcBx8SIWIIZCoPIHnVYjudy+AOu0k/lIFx8U+o2RbqJSkiFF04TFzPEdSBjt6LoyCXNt65ouuPM1iFDEbvdkhwvolwmKItvS3IBhyWhD5QY4swnl/VCk6jSdtIuZCFQcRBrLINUyE0HpEIie7uRtE14j9brCLVhNxs0r5wS3/E2AAZXzanZhVP7oAAAAASUVORK5CYII=";
|
||
|
||
var recordIcon = "a6071719aecd4e53.png";
|
||
|
||
//
|
||
var script = {
|
||
name: 'AudioPlay',
|
||
props: {
|
||
//模式:preview 预览 (默认),editor 编辑
|
||
mode: {
|
||
type: String,
|
||
default: 'editor'
|
||
},
|
||
playStyle: {
|
||
type: String,
|
||
default: 'yuan-wen'
|
||
},
|
||
styleMode: {
|
||
type: String,
|
||
default: 'A'
|
||
},
|
||
url: {
|
||
type: String,
|
||
default: '',
|
||
require: true
|
||
},
|
||
title: {
|
||
type: String,
|
||
default: '音频文件'
|
||
},
|
||
content: {
|
||
type: String,
|
||
default: '暂无简介'
|
||
},
|
||
coverImageUrl: {
|
||
type: String,
|
||
default: ''
|
||
},
|
||
//进度是否能控制
|
||
scheduleControl: {
|
||
type: String,
|
||
default: ''
|
||
},
|
||
loop: {
|
||
type: Boolean,
|
||
default: true
|
||
},
|
||
playMode: {
|
||
type: String,
|
||
default: 'xun-huan'
|
||
},
|
||
targerLink: {
|
||
type: String,
|
||
default: ''
|
||
},
|
||
playerIndex: {
|
||
type: Number,
|
||
default: -1
|
||
}
|
||
},
|
||
computed: {
|
||
videoAndMusicStop() {
|
||
let playState = sessionStorage.getItem('SET_PLAY_STATE');
|
||
return playState;
|
||
},
|
||
playModeIconSrc() {
|
||
let srcMap = {
|
||
'xun-huan': playRepeat,
|
||
'dan-ci': playOnce
|
||
};
|
||
return srcMap[this.playMode];
|
||
}
|
||
},
|
||
watch: {
|
||
url: {
|
||
handler(newVal) {
|
||
let _this = this;
|
||
this.isPlay = false;
|
||
let ua = navigator.userAgent;
|
||
console.log('浏览器 UA', ua);
|
||
/micromessenger/.test(ua.toLowerCase());
|
||
let isIOS = !!ua.match(/\(i[^;]+;( U;)? CPU.+Mac OS X/);
|
||
// isWeixin && isIOS && newVal && this.$message({
|
||
// message: `当前UA为${ua}`,
|
||
// duration: 3000
|
||
// })
|
||
if (isIOS && newVal) {
|
||
try {
|
||
this.$nextTick(() => {
|
||
_this.$refs.musicAudio.muted = true;
|
||
const playPromise = _this.$refs.musicAudio.play();
|
||
_this.$refs.musicAudio.muted = false;
|
||
if (playPromise) {
|
||
playPromise.then(() => {
|
||
_this.$refs.musicAudio.pause();
|
||
}).catch(error => {
|
||
_this.$refs.musicAudio.pause();
|
||
console.log(error);
|
||
});
|
||
}
|
||
});
|
||
} catch (error) {
|
||
console.log(error);
|
||
this.$message({
|
||
type: 'warning',
|
||
message: error,
|
||
duration: 5000
|
||
});
|
||
}
|
||
}
|
||
},
|
||
immediate: true
|
||
},
|
||
videoAndMusicStop(val) {
|
||
console.log('watch', val);
|
||
let currentPlayStamp = val || '--';
|
||
if (`${this.url}__playTimeStamp__${this.lastOperateTimeStamp}` !== currentPlayStamp) {
|
||
this.musicAudio.pause();
|
||
}
|
||
}
|
||
},
|
||
data() {
|
||
return {
|
||
playIcon,
|
||
playIcon2,
|
||
pauseIcon,
|
||
pauseIcon2,
|
||
audioIcon,
|
||
coverLocal,
|
||
recordIcon,
|
||
isPlay: false,
|
||
playTime: 0,
|
||
playDuration: '00:00',
|
||
playCurrentTime: '00:00',
|
||
totalDuration: 0,
|
||
link: {
|
||
type: String,
|
||
default: ''
|
||
},
|
||
//链接地址
|
||
lastOperateTimeStamp: '',
|
||
musicAudio: null,
|
||
manualPlay: false
|
||
};
|
||
},
|
||
mounted() {
|
||
// 加载完毕后,先获取<video>标签DOM对象
|
||
this.musicAudio = this.$refs.musicAudio;
|
||
// this.videoContainer = this.$refs.videoContainer
|
||
if (this.mode === 'preview' && this.playStyle === 'yuan-wen') {
|
||
this.$emit('resetTimeUpdate');
|
||
}
|
||
},
|
||
methods: {
|
||
changeDuration() {
|
||
this.$nextTick(() => {
|
||
this.playDuration = this.timeFormat(this.$refs.musicAudio.duration);
|
||
this.totalDuration = this.$refs.musicAudio.duration;
|
||
});
|
||
},
|
||
//将单位为秒的的时间转换成mm:ss的形式
|
||
timeFormat(num) {
|
||
let minute = parseInt(num / 60);
|
||
let second = parseInt(num % 60);
|
||
minute = minute >= 10 ? minute : `0${minute}`;
|
||
second = second >= 10 ? second : `0${second}`;
|
||
return `${minute}:${second}`;
|
||
},
|
||
play() {
|
||
// if (this.mode==='editor') {
|
||
// this.$message.warning('音频仅支持在预览模式下播放')
|
||
// return
|
||
// }
|
||
if (this.playStyle === 'yuan-wen') {
|
||
this.doPlay();
|
||
}
|
||
if (this.playStyle === 'tiao-zhuan') {
|
||
if (this.targerLink) {
|
||
// window.location.href=this.targerLink
|
||
window.open(this.targerLink, '_blank');
|
||
} else {
|
||
this.$message.warning('获取资源链接失败,请检查链接地址');
|
||
}
|
||
}
|
||
if (this.playStyle === 'tan-chuang') {
|
||
this.$emit('showDialog', {
|
||
url: this.url,
|
||
title: this.title,
|
||
coverFileUrl: this.coverImageUrl,
|
||
content: this.content,
|
||
playerIndex: this.playerIndex
|
||
});
|
||
}
|
||
},
|
||
doPlay() {
|
||
let audio = document.querySelectorAll('audio');
|
||
audio.forEach(i => i.pause());
|
||
let video = document.querySelectorAll('video');
|
||
if (video && video.length) {
|
||
video.forEach(i => i.pause());
|
||
}
|
||
if (this.isPlay) {
|
||
this.$refs.musicAudio.pause();
|
||
this.isPlay = !this.isPlay;
|
||
} else {
|
||
this.manualPlay = true;
|
||
this.$refs.musicAudio.play().then(() => {
|
||
this.isPlay = !this.isPlay;
|
||
let timeStamp = new Date().getTime();
|
||
this.lastOperateTimeStamp = timeStamp;
|
||
let playStamp = `${this.url}__playTimeStamp__${timeStamp}`;
|
||
sessionStorage.setItem('SET_PLAY_STATE', playStamp); //存储
|
||
}).catch(error => {
|
||
this.$message.warning('无效音频资源,请检查资源地址');
|
||
console.log('音频播放catch', error);
|
||
});
|
||
}
|
||
// this.isPlay ? this.$refs.musicAudio.pause() : this.$refs.musicAudio.play()
|
||
// this.isPlay = !this.isPlay
|
||
// let timeStamp = new Date().getTime()
|
||
// this.lastOperateTimeStamp = timeStamp
|
||
// let playStamp = `${this.url}__playTimeStamp__${timeStamp}`
|
||
// // !this.isPlay && this.$store.commit('leaflet/SET_PLAY_STATE', playStamp)
|
||
// // this.$store.commit('leaflet/SET_PLAY_STATE', playStamp)
|
||
// sessionStorage.setItem('SET_PLAY_STATE', playStamp) //存储
|
||
},
|
||
updateTime(e) {
|
||
this.playTime = this.$refs.musicAudio.currentTime;
|
||
this.playCurrentTime = this.timeFormat(this.$refs.musicAudio.currentTime);
|
||
if (this.playTime >= this.totalDuration) {
|
||
this.isPlay = false;
|
||
}
|
||
this.$emit('timeUpdate', {
|
||
event: e,
|
||
time: this.$refs.musicAudio.currentTime
|
||
});
|
||
},
|
||
changeTime(val) {
|
||
this.$refs.musicAudio.currentTime = val;
|
||
},
|
||
onPlay() {
|
||
if (!this.manualPlay) {
|
||
this.$refs.musicAudio.pause();
|
||
}
|
||
}
|
||
}
|
||
};
|
||
|
||
/* script */
|
||
const __vue_script__ = script;
|
||
|
||
/* template */
|
||
var __vue_render__ = function () {
|
||
var _vm = this;
|
||
var _h = _vm.$createElement;
|
||
var _c = _vm._self._c || _h;
|
||
return _c("div", { class: ["xml-audio-player", _vm.mode] }, [
|
||
_c("div", { staticClass: "play-mode-wrap" }, [
|
||
_c("img", {
|
||
staticClass: "play-control-image",
|
||
attrs: { src: _vm.playModeIconSrc },
|
||
}),
|
||
]),
|
||
_vm._v(" "),
|
||
_vm.styleMode == "A" || _vm.styleMode == "B" || _vm.styleMode == "C"
|
||
? _c("div", { staticStyle: { width: "100%", display: "flex" } }, [
|
||
_vm.styleMode !== "C"
|
||
? _c(
|
||
"div",
|
||
{ staticClass: "audio-cover" },
|
||
[
|
||
_c("el-image", {
|
||
staticClass: "xml-round-6",
|
||
attrs: {
|
||
src: _vm.coverImageUrl
|
||
? _vm.coverImageUrl
|
||
: _vm.coverLocal,
|
||
alt: "",
|
||
},
|
||
}),
|
||
_vm._v(" "),
|
||
_vm.styleMode === "A"
|
||
? _c("div", { staticClass: "play-button style-a" }, [
|
||
_c("img", {
|
||
staticClass: "button-image",
|
||
attrs: {
|
||
src: _vm.isPlay ? _vm.pauseIcon : _vm.playIcon,
|
||
},
|
||
on: {
|
||
click: function ($event) {
|
||
$event.stopPropagation();
|
||
return _vm.play.apply(null, arguments)
|
||
},
|
||
},
|
||
}),
|
||
])
|
||
: _vm._e(),
|
||
],
|
||
1
|
||
)
|
||
: _vm._e(),
|
||
_vm._v(" "),
|
||
_c("div", { staticClass: "audio-player-control" }, [
|
||
_c(
|
||
"div",
|
||
{
|
||
class: [
|
||
"audio-player-title",
|
||
_vm.mode == "preview" && _vm.title.length >= 16
|
||
? "preview"
|
||
: "",
|
||
],
|
||
},
|
||
[
|
||
_c(
|
||
"div",
|
||
{
|
||
staticClass: "xml-audio-title scroll-text",
|
||
style:
|
||
_vm.mode == "preview" && _vm.title.length >= 16
|
||
? {
|
||
animation:
|
||
"fadeNum " +
|
||
_vm.title.length / 3 +
|
||
"s linear infinite backwards",
|
||
}
|
||
: "",
|
||
},
|
||
[_vm._v("\n " + _vm._s(_vm.title) + "\n ")]
|
||
),
|
||
]
|
||
),
|
||
_vm._v(" "),
|
||
_c("div", { staticClass: "audio-controls" }, [
|
||
_vm.styleMode !== "A"
|
||
? _c(
|
||
"div",
|
||
{ staticClass: "audio-controls__play-button-wrapper" },
|
||
[
|
||
_c("div", { staticClass: "play-button" }, [
|
||
_c("img", {
|
||
staticClass: "button-image",
|
||
attrs: {
|
||
src: _vm.isPlay ? _vm.pauseIcon : _vm.playIcon,
|
||
},
|
||
on: {
|
||
click: function ($event) {
|
||
$event.stopPropagation();
|
||
return _vm.play.apply(null, arguments)
|
||
},
|
||
},
|
||
}),
|
||
]),
|
||
]
|
||
)
|
||
: _vm._e(),
|
||
_vm._v(" "),
|
||
_c("div", { staticClass: "audio-controls__handler" }, [
|
||
_c(
|
||
"div",
|
||
{ staticClass: "progress" },
|
||
[
|
||
_c("el-slider", {
|
||
attrs: {
|
||
max: _vm.totalDuration,
|
||
disabled: _vm.scheduleControl !== "yes",
|
||
"show-tooltip": false,
|
||
},
|
||
on: { change: _vm.changeTime },
|
||
model: {
|
||
value: _vm.playTime,
|
||
callback: function ($$v) {
|
||
_vm.playTime = $$v;
|
||
},
|
||
expression: "playTime",
|
||
},
|
||
}),
|
||
],
|
||
1
|
||
),
|
||
_vm._v(" "),
|
||
_c("div", { staticClass: "play-times" }, [
|
||
_c("div", { staticClass: "current-time" }, [
|
||
_vm._v(_vm._s(_vm.playCurrentTime)),
|
||
]),
|
||
_vm._v(" "),
|
||
_c("div", { staticClass: "total-time" }, [
|
||
_vm._v(_vm._s(_vm.playDuration)),
|
||
]),
|
||
]),
|
||
]),
|
||
]),
|
||
]),
|
||
])
|
||
: _c("div", { staticClass: "audio-play-box" }, [
|
||
_c(
|
||
"div",
|
||
{ staticClass: "audio-cover" },
|
||
[
|
||
_c("div", { staticClass: "video-pause-btn" }, [
|
||
_c(
|
||
"div",
|
||
{ staticClass: "audio-controls__play-button-wrapper" },
|
||
[
|
||
_c("div", { staticClass: "play-button" }, [
|
||
_c("img", {
|
||
staticClass: "button-image",
|
||
attrs: {
|
||
src: _vm.isPlay ? _vm.pauseIcon2 : _vm.playIcon2,
|
||
},
|
||
on: {
|
||
click: function ($event) {
|
||
$event.stopPropagation();
|
||
return _vm.play.apply(null, arguments)
|
||
},
|
||
},
|
||
}),
|
||
]),
|
||
]
|
||
),
|
||
]),
|
||
_vm._v(" "),
|
||
_c("el-image", {
|
||
staticClass: "xml-round-6",
|
||
attrs: { src: _vm.recordIcon, alt: "" },
|
||
}),
|
||
],
|
||
1
|
||
),
|
||
_vm._v(" "),
|
||
_c("div", { staticClass: "audio-player-control" }, [
|
||
_c(
|
||
"div",
|
||
{
|
||
class: [
|
||
"audio-player-title",
|
||
_vm.mode == "preview" && _vm.title.length >= 16
|
||
? "preview"
|
||
: "",
|
||
],
|
||
},
|
||
[
|
||
_c(
|
||
"div",
|
||
{
|
||
staticClass: "xml-audio-title scroll-text",
|
||
style:
|
||
_vm.mode == "preview" && _vm.title.length >= 16
|
||
? {
|
||
animation:
|
||
"fadeNum " +
|
||
_vm.title.length / 3 +
|
||
"s linear infinite backwards",
|
||
}
|
||
: "",
|
||
},
|
||
[_vm._v("\n " + _vm._s(_vm.title) + "\n ")]
|
||
),
|
||
]
|
||
),
|
||
_vm._v(" "),
|
||
_c("div", { staticClass: "audio-controls" }, [
|
||
_c("div", { staticClass: "audio-controls__handler" }, [
|
||
_c(
|
||
"div",
|
||
{ staticClass: "progress" },
|
||
[
|
||
_c("el-slider", {
|
||
attrs: {
|
||
max: _vm.totalDuration,
|
||
disabled: _vm.scheduleControl !== "yes",
|
||
"show-tooltip": false,
|
||
"tooltip-class": "silder-bg",
|
||
},
|
||
on: { change: _vm.changeTime },
|
||
model: {
|
||
value: _vm.playTime,
|
||
callback: function ($$v) {
|
||
_vm.playTime = $$v;
|
||
},
|
||
expression: "playTime",
|
||
},
|
||
}),
|
||
],
|
||
1
|
||
),
|
||
_vm._v(" "),
|
||
_c("div", { staticClass: "play-times" }, [
|
||
_c("div", { staticClass: "current-time" }, [
|
||
_vm._v(_vm._s(_vm.playCurrentTime)),
|
||
]),
|
||
_vm._v(" "),
|
||
_c("div", { staticClass: "total-time" }, [
|
||
_vm._v(_vm._s(_vm.playDuration)),
|
||
]),
|
||
]),
|
||
]),
|
||
]),
|
||
]),
|
||
]),
|
||
_vm._v(" "),
|
||
_c("audio", {
|
||
ref: "musicAudio",
|
||
staticClass: "audio-component",
|
||
attrs: {
|
||
controls: "",
|
||
preload: "auto",
|
||
src: _vm.url,
|
||
loop: _vm.loop,
|
||
link: _vm.link,
|
||
},
|
||
on: {
|
||
canplay: _vm.changeDuration,
|
||
timeupdate: _vm.updateTime,
|
||
pause: function ($event) {
|
||
_vm.isPlay = false;
|
||
},
|
||
play: _vm.onPlay,
|
||
},
|
||
}),
|
||
])
|
||
};
|
||
var __vue_staticRenderFns__ = [];
|
||
__vue_render__._withStripped = true;
|
||
|
||
/* style */
|
||
const __vue_inject_styles__ = undefined;
|
||
/* scoped */
|
||
const __vue_scope_id__ = undefined;
|
||
/* module identifier */
|
||
const __vue_module_identifier__ = undefined;
|
||
/* functional template */
|
||
const __vue_is_functional_template__ = false;
|
||
/* style inject */
|
||
|
||
/* style inject SSR */
|
||
|
||
/* style inject shadow dom */
|
||
|
||
|
||
|
||
const __vue_component__ = /*#__PURE__*/normalizeComponent(
|
||
{ render: __vue_render__, staticRenderFns: __vue_staticRenderFns__ },
|
||
__vue_inject_styles__,
|
||
__vue_script__,
|
||
__vue_scope_id__,
|
||
__vue_is_functional_template__,
|
||
__vue_module_identifier__,
|
||
false,
|
||
undefined,
|
||
undefined,
|
||
undefined
|
||
);
|
||
|
||
const MusicComponents = [__vue_component__$1, __vue_component__];
|
||
var index = {
|
||
...MusicComponents
|
||
};
|
||
|
||
export { index as default };
|