view release on metacpan or search on metacpan
share/public_html/static/hls.js view on Meta::CPAN
EventEmitter.prototype._events = undefined;
EventEmitter.prototype._maxListeners = undefined;
// By default EventEmitters will print a warning if more than 10 listeners are
// added to it. This is a useful default which helps finding memory leaks.
EventEmitter.defaultMaxListeners = 10;
// Obviously not all Emitters should be limited to 10. This function allows
// that to be increased. Set to zero for unlimited.
EventEmitter.prototype.setMaxListeners = function(n) {
if (!isNumber(n) || n < 0 || isNaN(n))
throw TypeError('n must be a positive number');
this._maxListeners = n;
return this;
};
EventEmitter.prototype.emit = function(type) {
var er, handler, len, args, i, listeners;
if (!this._events)
this._events = {};
share/public_html/static/hls.js view on Meta::CPAN
var duration = result[1];
if (duration) {
// INF
frag.duration = parseFloat(duration);
// avoid sliced strings https://github.com/video-dev/hls.js/issues/939
var title = (' ' + result[2]).slice(1);
frag.title = title || null;
frag.tagList.push(title ? ['INF', duration, title] : ['INF', duration]);
} else if (result[3]) {
// url
if (!isNaN(frag.duration)) {
var sn = currentSN++;
frag.type = type;
frag.start = totalduration;
frag.levelkey = levelkey;
frag.sn = sn;
frag.level = id;
frag.cc = cc;
frag.urlId = levelUrlId;
frag.baseurl = baseurl;
// avoid sliced strings https://github.com/video-dev/hls.js/issues/939
share/public_html/static/hls.js view on Meta::CPAN
// Initialization Vector (IV)
levelkey.iv = decryptiv;
}
}
break;
case 'START':
var startParams = value1;
var startAttrs = new attr_list(startParams);
var startTimeOffset = startAttrs.decimalFloatingPoint('TIME-OFFSET');
// TIME-OFFSET can be 0
if (!isNaN(startTimeOffset)) {
level.startTimeOffset = startTimeOffset;
}
break;
case 'MAP':
var mapAttrs = new attr_list(value1);
frag.relurl = mapAttrs.URI;
frag.rawByteRange = mapAttrs.BYTERANGE;
frag.baseurl = baseurl;
frag.level = id;
share/public_html/static/hls.js view on Meta::CPAN
PlaylistLoader.prototype._handleTrackOrLevelPlaylist = function _handleTrackOrLevelPlaylist(response, stats, context, networkDetails) {
var hls = this.hls;
var id = context.id,
level = context.level,
type = context.type;
var url = PlaylistLoader.getResponseUrl(response, context);
var levelUrlId = isNaN(id) ? 0 : id;
var levelId = isNaN(level) ? levelUrlId : level; // level -> id -> 0
var levelType = PlaylistLoader.mapContextToLevelType(context);
var levelDetails = m3u8_parser.parseLevelPlaylist(response.data, url, levelId, levelType, levelUrlId);
// set stats on level structure
levelDetails.tload = stats.tload;
// We have done our first request (Manifest-type) and receive
// not a master playlist but a chunk-list (track/level)
// We fire the manifest-loaded event anyway with the parsed level-details
share/public_html/static/hls.js view on Meta::CPAN
var loaderContext = void 0,
loaderConfig = void 0,
loaderCallbacks = void 0;
loaderContext = { url: frag.url, frag: frag, responseType: 'arraybuffer', progressData: false };
var start = frag.byteRangeStartOffset,
end = frag.byteRangeEndOffset;
if (!isNaN(start) && !isNaN(end)) {
loaderContext.rangeStart = start;
loaderContext.rangeEnd = end;
}
loaderConfig = {
timeout: config.fragLoadingTimeOut,
maxRetry: 0,
retryDelay: 0,
maxRetryDelay: config.fragLoadingMaxRetryTimeout
};
share/public_html/static/hls.js view on Meta::CPAN
/**
* Fires when a fragment loading is completed
*/
FragmentTracker.prototype.onFragLoaded = function onFragLoaded(e) {
var fragment = e.frag;
// don't track initsegment (for which sn is not a number)
// don't track frags used for bitrateTest, they're irrelevant.
if (!isNaN(fragment.sn) && !fragment.bitrateTest) {
var fragKey = this.getFragmentKey(fragment);
var fragmentEntity = {
body: fragment,
range: Object.create(null),
buffered: false
};
this.fragments[fragKey] = fragmentEntity;
}
};
share/public_html/static/hls.js view on Meta::CPAN
}
var observer = this.observer;
if (observer) {
observer.removeAllListeners();
this.observer = null;
}
};
Demuxer.prototype.push = function push(data, initSegment, audioCodec, videoCodec, frag, duration, accurateTimeOffset, defaultInitPTS) {
var w = this.w;
var timeOffset = !isNaN(frag.startDTS) ? frag.startDTS : frag.start;
var decryptdata = frag.decryptdata;
var lastFrag = this.frag;
var discontinuity = !(lastFrag && frag.cc === lastFrag.cc);
var trackSwitch = !(lastFrag && frag.level === lastFrag.level);
var nextSN = lastFrag && frag.sn === lastFrag.sn + 1;
var contiguous = !trackSwitch && nextSN;
if (discontinuity) {
logger["b" /* logger */].log(this.id + ':discontinuity detected');
}
share/public_html/static/hls.js view on Meta::CPAN
level.textGroupIds.push(id);
break;
}
}
function updatePTS(fragments, fromIdx, toIdx) {
var fragFrom = fragments[fromIdx],
fragTo = fragments[toIdx],
fragToPTS = fragTo.startPTS;
// if we know startPTS[toIdx]
if (!isNaN(fragToPTS)) {
// update fragment duration.
// it helps to fix drifts between playlist reported duration and fragment real duration
if (toIdx > fromIdx) {
fragFrom.duration = fragToPTS - fragFrom.start;
if (fragFrom.duration < 0) {
logger["b" /* logger */].warn('negative duration computed for frag ' + fragFrom.sn + ',level ' + fragFrom.level + ', there should be some duration drift between playlist and fragment!');
}
} else {
fragTo.duration = fragFrom.start - fragToPTS;
if (fragTo.duration < 0) {
share/public_html/static/hls.js view on Meta::CPAN
fragTo.start = fragFrom.start + fragFrom.duration;
} else {
fragTo.start = Math.max(fragFrom.start - fragTo.duration, 0);
}
}
}
function updateFragPTSDTS(details, frag, startPTS, endPTS, startDTS, endDTS) {
// update frag PTS/DTS
var maxStartPTS = startPTS;
if (!isNaN(frag.startPTS)) {
// delta PTS between audio and video
var deltaPTS = Math.abs(frag.startPTS - startPTS);
if (isNaN(frag.deltaPTS)) {
frag.deltaPTS = deltaPTS;
} else {
frag.deltaPTS = Math.max(deltaPTS, frag.deltaPTS);
}
maxStartPTS = Math.max(startPTS, frag.startPTS);
startPTS = Math.min(startPTS, frag.startPTS);
endPTS = Math.max(endPTS, frag.endPTS);
startDTS = Math.min(startDTS, frag.startDTS);
endDTS = Math.max(endDTS, frag.endDTS);
share/public_html/static/hls.js view on Meta::CPAN
if (end < start) {
newDetails.PTSKnown = false;
return;
}
// loop through overlapping SN and update startPTS , cc, and duration if any found
for (var i = start; i <= end; i++) {
var oldFrag = oldfragments[delta + i],
newFrag = newfragments[i];
if (newFrag && oldFrag) {
ccOffset = oldFrag.cc - newFrag.cc;
if (!isNaN(oldFrag.startPTS)) {
newFrag.start = newFrag.startPTS = oldFrag.startPTS;
newFrag.endPTS = oldFrag.endPTS;
newFrag.duration = oldFrag.duration;
newFrag.backtracked = oldFrag.backtracked;
newFrag.dropped = oldFrag.dropped;
PTSFrag = newFrag;
}
}
}
share/public_html/static/hls.js view on Meta::CPAN
}
// try to align using programDateTime attribute (if available)
if (details.PTSKnown === false && lastLevel && lastLevel.details && lastLevel.details.fragments && lastLevel.details.fragments.length) {
// if last level sliding is 1000 and its first frag PROGRAM-DATE-TIME is 2017-08-20 1:10:00 AM
// and if new details first frag PROGRAM DATE-TIME is 2017-08-20 1:10:08 AM
// then we can deduce that playlist B sliding is 1000+8 = 1008s
var lastPDT = lastLevel.details.programDateTime;
var newPDT = details.programDateTime;
// date diff is in ms. frag.start is in seconds
var sliding = (newPDT - lastPDT) / 1000 + lastLevel.details.fragments[0].start;
if (!isNaN(sliding)) {
logger["b" /* logger */].log('adjusting PTS using programDateTime delta, sliding:' + sliding.toFixed(3));
adjustPts(sliding, details);
}
}
}
// CONCATENATED MODULE: ./src/task-loop.js
function task_loop__classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function task_loop__possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
share/public_html/static/hls.js view on Meta::CPAN
* @returns {number} nextPdt - The computed PDT
*/
function calculateNextPDT() {
var start = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0;
var bufferEnd = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;
var levelDetails = arguments[2];
var pdt = 0;
if (levelDetails.programDateTime) {
var parsedDateInt = Date.parse(levelDetails.programDateTime);
if (!isNaN(parsedDateInt)) {
pdt = bufferEnd * 1000 + parsedDateInt - 1000 * start;
}
}
return pdt;
}
/**
* Finds the first fragment whose endPDT value exceeds the given PDT.
* @param {Array<Fragment>} fragments - The array of candidate fragments
* @param {number|null} [PDTValue = null] - The PDT value which must be exceeded
share/public_html/static/hls.js view on Meta::CPAN
this.hls.trigger(events["a" /* default */].KEY_LOADING, { frag: frag });
};
StreamController.prototype._loadFragment = function _loadFragment(frag) {
// Check if fragment is not loaded
var fragState = this.fragmentTracker.getState(frag);
this.fragCurrent = frag;
this.startFragRequested = true;
// Don't update nextLoadPosition for fragments which are not buffered
if (!isNaN(frag.sn) && !frag.bitrateTest) {
this.nextLoadPosition = frag.start + frag.duration;
}
// Allow backtracked fragments to load
if (frag.backtracked || fragState === FragmentState.NOT_LOADED || fragState === FragmentState.PARTIAL) {
frag.autoLevel = this.hls.autoLevelEnabled;
frag.bitrateTest = this.bitrateTest;
this.hls.trigger(events["a" /* default */].FRAG_LOADING, { frag: frag });
// lazy demuxer init, as this could take some time ... do it during frag loading
share/public_html/static/hls.js view on Meta::CPAN
}
this.media = this.mediaBuffer = null;
this.loadedmetadata = false;
this.stopLoad();
};
StreamController.prototype.onMediaSeeking = function onMediaSeeking() {
var media = this.media,
currentTime = media ? media.currentTime : undefined,
config = this.config;
if (!isNaN(currentTime)) {
logger["b" /* logger */].log('media seeking to ' + currentTime.toFixed(3));
}
var mediaBuffer = this.mediaBuffer ? this.mediaBuffer : media;
var bufferInfo = BufferHelper.bufferInfo(mediaBuffer, currentTime, this.config.maxBufferHole);
if (this.state === State.FRAG_LOADING) {
var fragCurrent = this.fragCurrent;
// check if we are seeking to a unbuffered area AND if frag loading is in progress
if (bufferInfo.len === 0 && fragCurrent) {
var tolerance = config.maxFragLookUpTolerance,
share/public_html/static/hls.js view on Meta::CPAN
this.nextLoadPosition = this.startPosition = currentTime;
}
// tick to speed up processing
this.tick();
};
StreamController.prototype.onMediaSeeked = function onMediaSeeked() {
var media = this.media,
currentTime = media ? media.currentTime : undefined;
if (!isNaN(currentTime)) {
logger["b" /* logger */].log('media seeked to ' + currentTime.toFixed(3));
}
// tick to speed up FRAGMENT_PLAYING triggering
this.tick();
};
StreamController.prototype.onMediaEnded = function onMediaEnded() {
logger["b" /* logger */].log('media ended');
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
share/public_html/static/hls.js view on Meta::CPAN
logger["b" /* logger */].log('level ' + newLevelId + ' loaded [' + newDetails.startSN + ',' + newDetails.endSN + '],duration:' + duration);
if (newDetails.live) {
var curDetails = curLevel.details;
if (curDetails && newDetails.fragments.length > 0) {
// we already have details for that level, merge them
mergeDetails(curDetails, newDetails);
sliding = newDetails.fragments[0].start;
this.liveSyncPosition = this.computeLivePosition(sliding, curDetails);
if (newDetails.PTSKnown && !isNaN(sliding)) {
logger["b" /* logger */].log('live playlist sliding:' + sliding.toFixed(3));
} else {
logger["b" /* logger */].log('live playlist - outdated PTS, unknown sliding');
alignDiscontinuities(this.fragPrevious, lastLevel, newDetails);
}
} else {
logger["b" /* logger */].log('live playlist - first load, unknown sliding');
newDetails.PTSKnown = false;
alignDiscontinuities(this.fragPrevious, lastLevel, newDetails);
}
share/public_html/static/hls.js view on Meta::CPAN
// override level info
curLevel.details = newDetails;
this.levelLastLoaded = newLevelId;
this.hls.trigger(events["a" /* default */].LEVEL_UPDATED, { details: newDetails, level: newLevelId });
if (this.startFragRequested === false) {
// compute start position if set to -1. use it straight away if value is defined
if (this.startPosition === -1 || this.lastCurrentTime === -1) {
// first, check if start time offset has been set in playlist, if yes, use this value
var startTimeOffset = newDetails.startTimeOffset;
if (!isNaN(startTimeOffset)) {
if (startTimeOffset < 0) {
logger["b" /* logger */].log('negative start time offset ' + startTimeOffset + ', count from end of last fragment');
startTimeOffset = sliding + duration + startTimeOffset;
}
logger["b" /* logger */].log('start time offset found in playlist, adjust startPosition to ' + startTimeOffset);
this.startPosition = startTimeOffset;
} else {
// if live playlist, set start position to be fragment N-this.config.liveSyncDurationCount (usually 3)
if (newDetails.live) {
this.startPosition = this.computeLivePosition(sliding, newDetails);
share/public_html/static/hls.js view on Meta::CPAN
StreamController.prototype.onFragParsingData = function onFragParsingData(data) {
var _this2 = this;
var fragCurrent = this.fragCurrent;
var fragNew = data.frag;
if (fragCurrent && data.id === 'main' && fragNew.sn === fragCurrent.sn && fragNew.level === fragCurrent.level && !(data.type === 'audio' && this.altAudio) && // filter out main audio if audio track is loaded through audio stream controller
this.state === State.PARSING) {
var level = this.levels[this.level],
frag = fragCurrent;
if (isNaN(data.endPTS)) {
data.endPTS = data.startPTS + fragCurrent.duration;
data.endDTS = data.startDTS + fragCurrent.duration;
}
if (data.hasAudio === true) {
frag.addElementaryStream(loader_fragment.ElementaryStreamTypes.AUDIO);
}
if (data.hasVideo === true) {
frag.addElementaryStream(loader_fragment.ElementaryStreamTypes.VIDEO);
share/public_html/static/hls.js view on Meta::CPAN
this.clearTimer();
hls.trigger(events["a" /* default */].FRAG_LOAD_EMERGENCY_ABORTED, { frag: frag, stats: stats });
}
}
}
}
};
AbrController.prototype.onFragLoaded = function onFragLoaded(data) {
var frag = data.frag;
if (frag.type === 'main' && !isNaN(frag.sn)) {
// stop monitoring bw once frag loaded
this.clearTimer();
// store level id after successful fragment load
this.lastLoadedFragLevel = frag.level;
// reset forced auto level value so that next level will be selected
this._nextAutoLevel = -1;
// compute level average bitrate
if (this.hls.config.abrMaxWithRealBitrate) {
var level = this.hls.levels[frag.level];
share/public_html/static/hls.js view on Meta::CPAN
}
};
AbrController.prototype.onFragBuffered = function onFragBuffered(data) {
var stats = data.stats;
var frag = data.frag;
// only update stats on first frag buffering
// if same frag is loaded multiple times, it might be in browser cache, and loaded quickly
// and leading to wrong bw estimation
// on bitrate test, also only update stats once (if tload = tbuffered == on FRAG_LOADED)
if (stats.aborted !== true && frag.type === 'main' && !isNaN(frag.sn) && (!frag.bitrateTest || stats.tload === stats.tbuffered)) {
// use tparsed-trequest instead of tbuffered-trequest to compute fragLoadingProcessing; rationale is that buffer appending only happens once media is attached
// in case we use config.startFragPrefetch while media is not attached yet, fragment might be parsed while media not attached yet, but it will only be buffered on media attached
// as a consequence it could happen really late in the process. meaning that appending duration might appears huge ... leading to underestimated throughput estimation
var fragLoadingProcessingMs = stats.tparsed - stats.trequest;
logger["b" /* logger */].log('latency/loading/parsing/append/kbps:' + Math.round(stats.tfirst - stats.trequest) + '/' + Math.round(stats.tload - stats.tfirst) + '/' + Math.round(stats.tparsed - stats.tload) + '/' + Math.round(stats.tbuffered - ...
this._bwEstimator.sample(fragLoadingProcessingMs, stats.loaded);
stats.bwEstimate = this._bwEstimator.getEstimate();
// if fragment has been loaded to perform a bitrate test, (hls.startLevel = -1), store bitrate test delay duration
if (frag.bitrateTest) {
this.bitrateTestDelay = fragLoadingProcessingMs / 1000;
share/public_html/static/hls.js view on Meta::CPAN
duration = this.media.duration;
// initialise to the value that the media source is reporting
if (this._msDuration === null) {
this._msDuration = this.mediaSource.duration;
}
if (this._live === true && config.liveDurationInfinity === true) {
// Override duration to Infinity
logger["b" /* logger */].log('Media Source duration is set to Infinity');
this._msDuration = this.mediaSource.duration = Infinity;
} else if (this._levelDuration > this._msDuration && this._levelDuration > duration || duration === Infinity || isNaN(duration)) {
// levelDuration was the last value we set.
// not using mediaSource.duration as the browser may tweak this value
// only update Media Source duration if its value increase, this is to avoid
// flushing already buffered portion when switching between quality level
logger["b" /* logger */].log('Updating Media Source duration to ' + this._levelDuration.toFixed(3));
this._msDuration = this.mediaSource.duration = this._levelDuration;
}
};
BufferController.prototype.doFlush = function doFlush() {
share/public_html/static/hls.js view on Meta::CPAN
logger["b" /* logger */].log('Loading key for ' + frag.sn + ' of [' + trackDetails.startSN + ' ,' + trackDetails.endSN + '],track ' + trackId);
this.state = audio_stream_controller_State.KEY_LOADING;
hls.trigger(events["a" /* default */].KEY_LOADING, { frag: frag });
} else {
logger["b" /* logger */].log('Loading ' + frag.sn + ', cc: ' + frag.cc + ' of [' + trackDetails.startSN + ' ,' + trackDetails.endSN + '],track ' + trackId + ', currentTime:' + pos + ',bufferEnd:' + bufferEnd.toFixed(3));
// only load if fragment is not loaded or if in audio switch
// we force a frag loading in audio switch as fragment tracker might not have evicted previous frags in case of quick audio switch
if (audioSwitch || this.fragmentTracker.getState(frag) === FragmentState.NOT_LOADED) {
this.fragCurrent = frag;
this.startFragRequested = true;
if (!isNaN(frag.sn)) {
this.nextLoadPosition = frag.start + frag.duration;
}
hls.trigger(events["a" /* default */].FRAG_LOADING, { frag: frag });
this.state = audio_stream_controller_State.FRAG_LOADING;
}
}
}
}
break;
share/public_html/static/hls.js view on Meta::CPAN
newDetails.PTSKnown = false;
}
track.details = newDetails;
// compute start position
if (!this.startFragRequested) {
// compute start position if set to -1. use it straight away if value is defined
if (this.startPosition === -1) {
// first, check if start time offset has been set in playlist, if yes, use this value
var startTimeOffset = newDetails.startTimeOffset;
if (!isNaN(startTimeOffset)) {
logger["b" /* logger */].log('start time offset found in playlist, adjust startPosition to ' + startTimeOffset);
this.startPosition = startTimeOffset;
} else {
this.startPosition = 0;
}
}
this.nextLoadPosition = this.startPosition;
}
// only switch batck to IDLE state if we were waiting for track to start downloading a new fragment
if (this.state === audio_stream_controller_State.WAITING_TRACK) {
share/public_html/static/hls.js view on Meta::CPAN
AudioStreamController.prototype.onFragParsingData = function onFragParsingData(data) {
var _this2 = this;
var fragCurrent = this.fragCurrent;
var fragNew = data.frag;
if (fragCurrent && data.id === 'audio' && data.type === 'audio' && fragNew.sn === fragCurrent.sn && fragNew.level === fragCurrent.level && this.state === audio_stream_controller_State.PARSING) {
var trackId = this.trackId,
track = this.tracks[trackId],
hls = this.hls;
if (isNaN(data.endPTS)) {
data.endPTS = data.startPTS + fragCurrent.duration;
data.endDTS = data.startDTS + fragCurrent.duration;
}
fragCurrent.addElementaryStream(loader_fragment.ElementaryStreamTypes.AUDIO);
logger["b" /* logger */].log('parsed ' + data.type + ',PTS:[' + data.startPTS.toFixed(3) + ',' + data.endPTS.toFixed(3) + '],DTS:[' + data.startDTS.toFixed(3) + '/' + data.endDTS.toFixed(3) + '],nb:' + data.nb);
updateFragPTSDTS(track.details, fragCurrent, data.startPTS, data.endPTS);
var audioSwitch = this.audioSwitch,
share/public_html/static/hls.js view on Meta::CPAN
var startsWith = function startsWith(inputString, searchString, position) {
return inputString.substr(position || 0, searchString.length) === searchString;
};
var cueString2millis = function cueString2millis(timeString) {
var ts = parseInt(timeString.substr(-3));
var secs = parseInt(timeString.substr(-6, 2));
var mins = parseInt(timeString.substr(-9, 2));
var hours = timeString.length > 9 ? parseInt(timeString.substr(0, timeString.indexOf(':'))) : 0;
if (isNaN(ts) || isNaN(secs) || isNaN(mins) || isNaN(hours)) {
return -1;
}
ts += 1000 * secs;
ts += 60 * 1000 * mins;
ts += 60 * 60 * 1000 * hours;
return ts;
};
share/public_html/static/hls.js view on Meta::CPAN
}
var stringLength = string.length;
var searchString = String(search);
var searchLength = searchString.length;
var pos = stringLength;
if (arguments.length > 1) {
var position = arguments[1];
if (position !== undefined) {
// `ToInteger`
pos = position ? Number(position) : 0;
if (pos != pos) { // better `isNaN`
pos = 0;
}
}
}
var end = Math.min(Math.max(pos, 0), stringLength);
var start = end - searchLength;
if (start < 0) {
return false;
}
var index = -1;
share/public_html/static/music_inc/drflac.js view on Meta::CPAN
var Module = (() => {
var _scriptName = import.meta.url;
return (
async function(moduleArg = {}) {
var moduleRtn;
var Module=moduleArg;var readyPromiseResolve,readyPromiseReject;var readyPromise=new Promise((resolve,reject)=>{readyPromiseResolve=resolve;readyPromiseReject=reject});["_network_drflac_open_mem","_network_drflac_read_pcm_frames_f32_mem","_network_dr...
return moduleRtn;
}
);
})();
export default Module;
share/public_html/static/music_worklet_inprogress/decoder/bin/_mhfscl.js view on Meta::CPAN
var Module = (() => {
var _scriptName = import.meta.url;
return (
async function(moduleArg = {}) {
var moduleRtn;
var Module=moduleArg;var readyPromiseResolve,readyPromiseReject;var readyPromise=new Promise((resolve,reject)=>{readyPromiseResolve=resolve;readyPromiseReject=reject});var ENVIRONMENT_IS_WEB=typeof window=="object";var ENVIRONMENT_IS_WORKER=typeof im...
return moduleRtn;
}
);
})();
export default Module;