Transaction

6ff5ac56482cb8f3ce5b6c0db2774eea1f62bbb4ac017003a33048f4a959d64f
( - )
254,172
2019-07-09 10:20:30
1
95,275 B

2 Outputs

Total Output:
  • j"1ChDHzdd1H4wSjgGMHyndZm6qxEDGjqpJLNsedia.play(); } } }; /** * try to switch ASAP without breaking video playback: * in order to ensure smooth but quick level switching, * we need to find the next flushable buffer range * we should take into account new segment fetch time */ StreamController.prototype.nextLevelSwitch = function () { var media = this.media; // ensure that media is defined and that metadata are available (to retrieve currentTime) if (media && media.readyState) { var fetchdelay = void 0, fragPlayingCurrent = void 0, nextBufferedFrag = void 0; fragPlayingCurrent = this.getBufferedFrag(media.currentTime); if (fragPlayingCurrent && fragPlayingCurrent.startPTS > 1) { // flush buffer preceding current fragment (flush until current fragment start offset) // minus 1s to avoid video freezing, that could happen if we flush keyframe of current video ... this.flushMainBuffer(0, fragPlayingCurrent.startPTS - 1); } if (!media.paused) { // add a safety delay of 1s var nextLevelId = this.hls.nextLoadLevel, nextLevel = this.levels[nextLevelId], fragLastKbps = this.fragLastKbps; if (fragLastKbps && this.fragCurrent) { fetchdelay = this.fragCurrent.duration * nextLevel.bitrate / (1000 * fragLastKbps) + 1; } else { fetchdelay = 0; } } else { fetchdelay = 0; } // logger.log('fetchdelay:'+fetchdelay); // find buffer range that will be reached once new fragment will be fetched nextBufferedFrag = this.getBufferedFrag(media.currentTime + fetchdelay); if (nextBufferedFrag) { // we can flush buffer range following this one without stalling playback nextBufferedFrag = this.followingBufferedFrag(nextBufferedFrag); if (nextBufferedFrag) { // if we are here, we can also cancel any loading/demuxing in progress, as they are useless var fragCurrent = this.fragCurrent; if (fragCurrent && fragCurrent.loader) { fragCurrent.loader.abort(); } this.fragCurrent = null; // start flush position is the start PTS of next buffered frag. // we use frag.naxStartPTS which is max(audio startPTS, video startPTS). // in case there is a small PTS Delta between audio and video, using maxStartPTS avoids flushing last samples from current fragment this.flushMainBuffer(nextBufferedFrag.maxStartPTS, Number.POSITIVE_INFINITY); } } } }; StreamController.prototype.flushMainBuffer = function (startOffset, endOffset) { this.state = base_stream_controller_1.State.BUFFER_FLUSHING; var flushScope = { startOffset: startOffset, endOffset: endOffset }; // if alternate audio tracks are used, only flush video, otherwise flush everything if (this.altAudio) { flushScope.type = 'video'; } this.hls.trigger(events_1.default.BUFFER_FLUSHING, flushScope); }; StreamController.prototype.onMediaAttached = function (data) { var media = this.media = this.mediaBuffer = data.media; this.onvseeking = this.onMediaSeeking.bind(this); this.onvseeked = this.onMediaSeeked.bind(this); this.onvended = this.onMediaEnded.bind(this); media.addEventListener('seeking', this.onvseeking); media.addEventListener('seeked', this.onvseeked); media.addEventListener('ended', this.onvended); var config = this.config; if (this.levels && config.autoStartLoad) { this.hls.startLoad(config.startPosition); } this.gapController = new gap_controller_1.default(config, media, this.fragmentTracker, this.hls); }; StreamController.prototype.onMediaDetaching = function () { var media = this.media; if (media && media.ended) { logger_1.logger.log('MSE detaching and video ended, reset startPosition'); this.startPosition = this.lastCurrentTime = 0; } // reset fragment backtracked flag var levels = this.levels; if (levels) { levels.forEach(function (level) { if (level.details) { level.details.fragments.forEach(function (fragment) { fragment.backtracked = undefined; }); } }); } // remove video listeners if (media) { media.removeEventListener('seeking', this.onvseeking); media.removeEventListener('seeked', this.onvseeked); media.removeEventListener('ended', this.onvended); this.onvseeking = this.onvseeked = this.onvended = null; } this.media = this.mediaBuffer = null; this.loadedmetadata = false; this.stopLoad(); }; StreamController.prototype.onMediaSeeked = function () { var media = this.media, currentTime = media ? media.currentTime : undefined; if (Number.isFinite(currentTime)) { logger_1.logger.log("media seeked to " + currentTime.toFixed(3)); } // tick to speed up FRAGMENT_PLAYING triggering this.tick(); }; StreamController.prototype.onManifestLoading = function () { // reset buffer on manifest loading logger_1.logger.log('trigger BUFFER_RESET'); this.hls.trigger(events_1.default.BUFFER_RESET); this.fragmentTracker.removeAllFragments(); this.stalled = false; this.startPosition = this.lastCurrentTime = 0; }; StreamController.prototype.onManifestParsed = function (data) { var aac = false, heaac = false, codec; data.levels.forEach(function (level) { // detect if we have different kind of audio codecs used amongst playlists codec = level.audioCodec; if (codec) { if (codec.indexOf('mp4a.40.2') !== -1) { aac = true; } if (codec.indexOf('mp4a.40.5') !== -1) { heaac = true; } } }); this.audioCodecSwitch = (aac && heaac); if (this.audioCodecSwitch) { logger_1.logger.log('both AAC/HE-AAC audio found in levels; declaring level codec as HE-AAC'); } this.levels = data.levels; this.startFragRequested = false; var config = this.config; if (config.autoStartLoad || this.forceStartLoad) { this.hls.startLoad(config.startPosition); } }; StreamController.prototype.onLevelLoaded = function (data) { var newDetails = data.details; var newLevelId = data.level; var lastLevel = this.levels[this.levelLastLoaded]; var curLevel = this.levels[newLevelId]; var duration = newDetails.totalduration; var sliding = 0; logger_1.logger.log("level " + newLevelId + " loaded [" + newDetails.startSN + "," + newDetails.endSN + "],duration:" + duration); if (newDetails.live) { var curDetails = curLevel.details; if (curDetails && newDetails.fragments.length > 0) { // we already have details for that level, merge them LevelHelper.mergeDetails(curDetails, newDetails); sliding = newDetails.fragments[0].start; this.liveSyncPosition = this.computeLivePosition(sliding, curDetails); if (newDetails.PTSKnown && Number.isFinite(sliding)) { logger_1.logger.log("live playlist sliding:" + sliding.toFixed(3)); } else { logger_1.logger.log('live playlist - outdated PTS, unknown sliding'); discontinuities_1.alignStream(this.fragPrevious, lastLevel, newDetails); } } else { logger_1.logger.log('live playlist - first load, unknown sliding'); newDetails.PTSKnown = false; discontinuities_1.alignStream(this.fragPrevious, lastLevel, newDetails); } } else { newDetails.PTSKnown = false; } // override level info curLevel.details = newDetails; this.levelLastLoaded = newLevelId; this.hls.trigger(events_1.default.LEVEL_UPDATED, { details: newDetails, level: newLevelId }); if (this.startFragRequested === false) { // compute start position if set to -1. use it straight away if value is defined if (this.startPosition === -1 || this.lastCurrentTime === -1) { // first, check if start time offset has been set in playlist, if yes, use this value var startTimeOffset = newDetails.startTimeOffset; if (Number.isFinite(startTimeOffset)) { if (startTimeOffset < 0) { logger_1.logger.log("negative start time offset " + startTimeOffset + ", count from end of last fragment"); startTimeOffset = sliding + duration + startTimeOffset; } logger_1.logger.log("start time offset found in playlist, adjust startPosition to " + startTimeOffset); this.startPosition = startTimeOffset; } else { // if live playlist, set start position to be fragment N-this.config.liveSyncDurationCount (usually 3) if (newDetails.live) { this.startPosition = this.computeLivePosition(sliding, newDetails); logger_1.logger.log("configure startPosition to " + this.startPosition); } else { this.startPosition = 0; } } this.lastCurrentTime = this.startPosition; } this.nextLoadPosition = this.startPosition; } // only switch batck to IDLE state if we were waiting for level to start downloading a new fragment if (this.state === base_stream_controller_1.State.WAITING_LEVEL) { this.state = base_stream_controller_1.State.IDLE; } // trigger handler right now this.tick(); }; StreamController.prototype.onKeyLoaded = function () { if (this.state === base_stream_controller_1.State.KEY_LOADING) { this.state = base_stream_controller_1.State.IDLE; this.tick(); } }; StreamController.prototype.onFragLoaded = function (data) { var _a = this, fragCurrent = _a.fragCurrent, hls = _a.hls, levels = _a.levels, media = _a.media; var fragLoaded = data.frag; if (this.state === base_stream_controller_1.State.FRAG_LOADING && fragCurrent && fragLoaded.type === 'main' && fragLoaded.level === fragCurrent.level && fragLoaded.sn === fragCurrent.sn) { var stats = data.stats; var currentLevel = levels[fragCurrent.level]; var details = currentLevel.details; // reset frag bitrate test in any case after frag loaded event // if this frag was loaded to perform a bitrate test AND if hls.nextLoadLevel is greater than 0 // then this means that we should be able to load a fragment at a higher quality level this.bitrateTest = false; this.stats = stats; logger_1.logger.log("Loaded " + fragCurrent.sn + " of [" + details.startSN + " ," + details.endSN + "],level " + fragCurrent.level); if (fragLoaded.bitrateTest && hls.nextLoadLevel) { // switch back to IDLE state ... we just loaded a fragment to determine adequate start bitrate and initialize autoswitch algo this.state = base_stream_controller_1.State.IDLE; this.startFragRequested = false; stats.tparsed = stats.tbuffered = window.performance.now(); hls.trigger(events_1.default.FRAG_BUFFERED, { stats: stats, frag: fragCurrent, id: 'main' }); this.tick(); } else if (fragLoaded.sn === 'initSegment') { this.state = base_stream_controller_1.State.IDLE; stats.tparsed = stats.tbuffered = window.performance.now(); details.initSegment.data = data.payload; hls.trigger(events_1.default.FRAG_BUFFERED, { stats: stats, frag: fragCurrent, id: 'main' }); this.tick(); } else { logger_1.logger.log("Parsing " + fragCurrent.sn + " of [" + details.startSN + " ," + details.endSN + "],level " + fragCurrent.level + ", cc " + fragCurrent.cc); this.state = base_stream_controller_1.State.PARSING; this.pendingBuffering = true; this.appended = false; // Bitrate test frags are not usually buffered so the fragment tracker ignores them. If Hls.js decides to buffer // it (and therefore ends up at this line), then the fragment tracker needs to be manually informed. if (fragLoaded.bitrateTest) { fragLoaded.bitrateTest = false; this.fragmentTracker.onFragLoaded({ frag: fragLoaded }); } // time Offset is accurate if level PTS is known, or if playlist is not sliding (not live) and if media is not seeking (this is to overcome potential timestamp drifts between playlists and fragments) var accurateTimeOffset = !(media && media.seeking) && (details.PTSKnown || !details.live); var initSegmentData = details.initSegment ? details.initSegment.data : []; var audioCodec = this._getAudioCodec(currentLevel); // transmux the MPEG-TS data to ISO-BMFF segments var demuxer = this.demuxer = this.demuxer || new demuxer_1.default(this.hls, 'main'); demuxer.push(data.payload, initSegmentData, audioCodec, currentLevel.videoCodec, fragCurrent, details.totalduration, accurateTimeOffset); } } this.fragLoadError = 0; }; StreamController.prototype.onFragParsingInitSegment = function (data) { var fragCurrent = this.fragCurrent; var fragNew = data.frag; if (fragCurrent && data.id === 'main' && fragNew.sn === fragCurrent.sn && fragNew.level === fragCurrent.level && this.state === base_stream_controller_1.State.PARSING) { var tracks = data.tracks, trackName = void 0, track = void 0; // if audio track is expected to come from audio stream controller, discard any coming from main if (tracks.audio && this.altAudio) { delete tracks.audio; } // include levelCodec in audio and video tracks track = tracks.audio; if (track) { var audioCodec = this.levels[this.level].audioCodec, ua = navigator.userAgent.toLowerCase(); if (audioCodec && this.audioCodecSwap) { logger_1.logger.log('swapping playlist audio codec'); if (audioCodec.indexOf('mp4a.40.5') !== -1) { audioCodec = 'mp4a.40.2'; } else { audioCodec = 'mp4a.40.5'; } } // in case AAC and HE-AAC audio codecs are signalled in manifest // force HE-AAC , as it seems that most browsers prefers that way, // except for mono streams OR on FF // these conditions might need to be reviewed ... if (this.audioCodecSwitch) { // don't force HE-AAC if mono stream if (track.metadata.channelCount !== 1 && // don't force HE-AAC if firefox ua.indexOf('firefox') === -1) { audioCodec = 'mp4a.40.5'; } } // HE-AAC is broken on Android, always signal audio codec as AAC even if variant manifest states otherwise if (ua.indexOf('android') !== -1 && track.container !== 'audio/mpeg') { // Exclude mpeg audio audioCodec = 'mp4a.40.2'; logger_1.logger.log("Android: force audio codec to " + audioCodec); } track.levelCodec = audioCodec; track.id = data.id; } track = tracks.video; if (track) { track.levelCodec = this.levels[this.level].videoCodec; track.id = data.id; } this.hls.trigger(events_1.default.BUFFER_CODECS, tracks); // loop through tracks that are going to be provided to bufferController for (trackName in tracks) { track = tracks[trackName]; logger_1.logger.log("main track:" + trackName + ",container:" + track.container + ",codecs[level/parsed]=[" + track.levelCodec + "/" + track.codec + "]"); var initSegment = track.initSegment; if (initSegment) { this.appended = true; // arm pending Buffering flag before appending a segment this.pendingBuffering = true; this.hls.trigger(events_1.default.BUFFER_APPENDING, { type: trackName, data: initSegment, parent: 'main', content: 'initSegment' }); } } // trigger handler right now this.tick(); } }; StreamController.prototype.onFragParsingData = function (data) { var _this = this; var fragCurrent = this.fragCurrent; var fragNew = data.frag; if (fragCurrent && data.id === 'main' && fragNew.sn === fragCurrent.sn && fragNew.level === fragCurrent.level && !(data.type === 'audio' && this.altAudio) && // filter out main audio if audio track is loaded through audio stream controller this.state === base_stream_controller_1.State.PARSING) { var level = this.levels[this.level], frag = fragCurrent; if (!Number.isFinite(data.endPTS)) { data.endPTS = data.startPTS + fragCurrent.duration; data.endDTS = data.startDTS + fragCurrent.duration; } if (data.hasAudio === true) { frag.addElementaryStream(fragment_1.default.ElementaryStreamTypes.AUDIO); } if (data.hasVideo === true) { frag.addElementaryStream(fragment_1.default.ElementaryStreamTypes.VIDEO); } logger_1.logger.log("Parsed " + data.type + ",PTS:[" + data.startPTS.toFixed(3) + "," + data.endPTS.toFixed(3) + "],DTS:[" + data.startDTS.toFixed(3) + "/" + data.endDTS.toFixed(3) + "],nb:" + data.nb + ",dropped:" + (data.dropped || 0)); // Detect gaps in a fragment and try to fix it by finding a keyframe in the previous fragment (see _findFragments) if (data.type === 'video') { frag.dropped = data.dropped; if (frag.dropped) { if (!frag.backtracked) { var levelDetails = level.details; if (levelDetails && frag.sn === levelDetails.startSN) { logger_1.logger.warn('missing video frame(s) on first frag, appending with gap', frag.sn); } else { logger_1.logger.warn('missing video frame(s), backtracking fragment', frag.sn); // Return back to the IDLE state without appending to buffer // Causes findFragments to backtrack a segment and find the keyframe // Audio fragments arriving before video sets the nextLoadPosition, causing _findFragments to skip the backtracked fragment this.fragmentTracker.removeFragment(frag); frag.backtracked = true; this.nextLoadPosition = data.startPTS; this.state = base_stream_controller_1.State.IDLE; this.fragPrevious = frag; this.tick(); return; } } else { logger_1.logger.warn('Already backtracked on this fragment, appending with the gap', frag.sn); } } else { // Only reset the backtracked flag if we've loaded the frag without any dropped frames frag.backtracked = false; } } var drift = LevelHelper.updateFragPTSDTS(level.details, frag, data.startPTS, data.endPTS, data.startDTS, data.endDTS), hls_2 = this.hls; hls_2.trigger(events_1.default.LEVEL_PTS_UPDATED, { details: level.details, level: this.level, drift: drift, type: data.type, start: data.startPTS, end: data.endPTS }); // has remuxer dropped video frames located before first keyframe ? [data.data1, data.data2].forEach(function (buffer) { // only append in PARSING state (rationale is that an appending error could happen synchronously on first segment appending) // in that case it is useless to append following segments if (buffer && buffer.length && _this.state === base_stream_controller_1.State.PARSING) { _this.appended = true; // arm pending Buffering flag before appending a segment _this.pendingBuffering = true; hls_2.trigger(events_1.default.BUFFER_APPENDING, { type: data.type, data: buffer, parent: 'main', content: 'data' }); } }); // trigger handler right now this.tick(); } }; StreamController.prototype.onFragParsed = function (data) { var fragCurrent = this.fragCurrent; var fragNew = data.frag; if (fragCurrent && data.id === 'main' && fragNew.sn === fragCurrent.sn && fragNew.level === fragCurrent.level && this.state === base_stream_controller_1.State.PARSING) { this.stats.tparsed = window.performance.now(); this.state = base_stream_controller_1.State.PARSED; this._checkAppendedParsed(); } }; StreamController.prototype.onAudioTrackSwitching = function (data) { // if any URL found on new audio track, it is an alternate audio track var altAudio = !!data.url, trackId = data.id; // if we switch on main audio, ensure that main fragment scheduling is synced with media.buffered // don't do anything if we switch to alt audio: audio stream controller is handling it. // we will just have to change buffer scheduling on audioTrackSwitched if (!altAudio) { if (this.mediaBuffer !== this.media) { logger_1.logger.log('switching on main audio, use media.buffered to schedule main fragment loading'); this.mediaBuffer = this.media; var fragCurrent = this.fragCurrent; // we need to refill audio buffer from main: cancel any frag loading to speed up audio switch if (fragCurrent.loader) { logger_1.logger.log('switching to main audio track, cancel main fragment load'); fragCurrent.loader.abort(); } this.fragCurrent = null; this.fragPrevious = null; // destroy demuxer to force init segment generation (following audio switch) if (this.demuxer) { this.demuxer.destroy(); this.demuxer = null; } // switch to IDLE state to load new fragment this.state = base_stream_controller_1.State.IDLE; } var hls_3 = this.hls; // switching to main audio, flush all audio and trigger track switched hls_3.trigger(events_1.default.BUFFER_FLUSHING, { startOffset: 0, endOffset: Number.POSITIVE_INFINITY, type: 'audio' }); hls_3.trigger(events_1.default.AUDIO_TRACK_SWITCHED, { id: trackId }); this.altAudio = false; } }; StreamController.prototype.onAudioTrackSwitched = function (data) { var trackId = data.id, altAudio = !!this.hls.audioTracks[trackId].url; if (altAudio) { var videoBuffer = this.videoBuffer; // if we switched on alternate audio, ensure that main fragment scheduling is synced with video sourcebuffer buffered if (videoBuffer && this.mediaBuffer !== videoBuffer) { logger_1.logger.log('switching on alternate audio, use video.buffered to schedule main fragment loading'); this.mediaBuffer = videoBuffer; } } this.altAudio = altAudio; this.tick(); }; StreamController.prototype.onBufferCreated = function (data) { var tracks = data.tracks, mediaTrack, name, alternate = false; for (var type in tracks) { var track = tracks[type]; if (track.id === 'main') { name = type; mediaTrack = track; // keep video source buffer reference if (type === 'video') { this.videoBuffer = tracks[type].buffer; } } else { alternate = true; } } if (alternate && mediaTrack) { logger_1.logger.log("alternate track found, use " + name + ".buffered to schedule main fragment loading"); this.mediaBuffer = mediaTrack.buffer; } else { this.mediaBuffer = this.media; } }; StreamController.prototype.onBufferAppended = function (data) { if (data.parent === 'main') { var state = this.state; if (state === base_stream_controller_1.State.PARSING || state === base_stream_controller_1.State.PARSED) { // check if all buffers have been appended this.pendingBuffering = (data.pending > 0); this._checkAppendedParsed(); } } }; StreamController.prototype._checkAppendedParsed = function () { // trigger handler right now if (this.state === base_stream_controller_1.State.PARSED && (!this.appended || !this.pendingBuffering)) { var frag = this.fragCurrent; if (frag) { var media = this.mediaBuffer ? this.mediaBuffer : this.media; logger_1.logger.log("main buffered : " + time_ranges_1.default.toString(media.buffered)); this.fragPrevious = frag; var stats = this.stats; stats.tbuffered = window.performance.now(); // we should get rid of this.fragLastKbps this.fragLastKbps = Math.round(8 * stats.total / (stats.tbuffered - stats.tfirst)); this.hls.trigger(events_1.default.FRAG_BUFFERED, { stats: stats, frag: frag, id: 'main' }); this.state = base_stream_controller_1.State.IDLE; } this.tick(); } }; StreamController.prototype.onError = function (data) { var frag = data.frag || this.fragCurrent; // don't handle frag error not related to main fragment if (frag && frag.type !== 'main') { return; } // 0.5 : tolerance needed as some browsers stalls playback before reaching buffered end var mediaBuffered = !!this.media && buffer_helper_1.BufferHelper.isBuffered(this.media, this.media.currentTime) && buffer_helper_1.BufferHelper.isBuffered(this.media, this.media.currentTime + 0.5); switch (data.details) { case errors_1.ErrorDetails.FRAG_LOAD_ERROR: case errors_1.ErrorDetails.FRAG_LOAD_TIMEOUT: case errors_1.ErrorDetails.KEY_LOAD_ERROR: case errors_1.ErrorDetails.KEY_LOAD_TIMEOUT: if (!data.fatal) { // keep retrying until the limit will be reached if ((this.fragLoadError + 1) <= this.config.fragLoadingMaxRetry) { // exponential backoff capped to config.fragLoadingMaxRetryTimeout var delay = Math.min(Math.pow(2, this.fragLoadError) * this.config.fragLoadingRetryDelay, this.config.fragLoadingMaxRetryTimeout); logger_1.logger.warn("mediaController: frag loading failed, retry in " + delay + " ms"); this.retryDate = window.performance.now() + delay; // retry loading state // if loadedmetadata is not set, it means that we are emergency switch down on first frag // in that case, reset startFragRequested flag if (!this.loadedmetadata) { this.startFragRequested = false; this.nextLoadPosition = this.startPosition; } this.fragLoadError++; this.state = base_stream_controller_1.State.FRAG_LOADING_WAITING_RETRY; } else { logger_1.logger.error("mediaController: " + data.details + " reaches max retry, redispatch as fatal ..."); // switch error to fatal data.fatal = true; this.state = base_stream_controller_1.State.ERROR; } } break; case errors_1.ErrorDetails.LEVEL_LOAD_ERROR: case errors_1.ErrorDetails.LEVEL_LOAD_TIMEOUT: if (this.state !== base_stream_controller_1.State.ERROR) { if (data.fatal) { // if fatal error, stop processing this.state = base_stream_controller_1.State.ERROR; logger_1.logger.warn("streamController: " + data.details + ",switch to " + this.state + " state ..."); } else { // in case of non fatal error while loading level, if level controller is not retrying to load level , switch back to IDLE if (!data.levelRetry && this.state === base_stream_controller_1.State.WAITING_LEVEL) { this.state = base_stream_controller_1.State.IDLE; } } } break; case errors_1.ErrorDetails.BUFFER_FULL_ERROR: // if in appending state if (data.parent === 'main' && (this.state === base_stream_controller_1.State.PARSING || this.state === base_stream_controller_1.State.PARSED)) { // reduce max buf len if current position is buffered if (mediaBuffered) { this._reduceMaxBufferLength(this.config.maxBufferLength); this.state = base_stream_controller_1.State.IDLE; } else { // current position is not buffered, but browser is still complaining about buffer full error // this happens on IE/Edge, refer to https://github.com/video-dev/hls.js/pull/708 // in that case flush the whole buffer to recover logger_1.logger.warn('buffer full error also media.currentTime is not buffered, flush everything'); this.fragCurrent = null; // flush everything this.flushMainBuffer(0, Number.POSITIVE_INFINITY); } } break; default: break; } }; StreamController.prototype._reduceMaxBufferLength = function (minLength) { var config = this.config; if (config.maxMaxBufferLength >= minLength) { // reduce max buffer length as it might be too high. we do this to avoid loop flushing ... config.maxMaxBufferLength /= 2; logger_1.logger.warn("main:reduce max buffer length to " + config.maxMaxBufferLength + "s"); return true; } return false; }; /** * Checks the health of the buffer and attempts to resolve playback stalls. * @private */ StreamController.prototype._checkBuffer = function () { var media = this.media; if (!media || media.readyState === 0) { // Exit early if we don't have media or if the media hasn't bufferd anything yet (readyState 0) return; } var mediaBuffer = this.mediaBuffer ? this.mediaBuffer : media; var buffered = mediaBuffer.buffered; if (!this.loadedmetadata && buffered.length) { this.loadedmetadata = true; this._seekToStartPos(); } else if (this.immediateSwitch) { this.immediateLevelSwitchEnd(); } else { this.gapController.poll(this.lastCurrentTime, buffered); } }; StreamController.prototype.onFragLoadEmergencyAborted = function () { this.state = base_stream_controller_1.State.IDLE; // if loadedmetadata is not set, it means that we are emergency switch down on first frag // in that case, reset startFragRequested flag if (!this.loadedmetadata) { this.startFragRequested = false; this.nextLoadPosition = this.startPosition; } this.tick(); }; StreamController.prototype.onBufferFlushed = function () { /* after successful buffer flushing, filter flushed fragments from bufferedFrags use mediaBuffered instead of media (so that we will check against video.buffered ranges in case of alt audio track) */ var media = this.mediaBuffer ? this.mediaBuffer : this.media; if (media) { // filter fragments potentially evicted from buffer. this is to avoid memleak on live streams this.fragmentTracker.detectEvictedFragments(fragment_1.default.ElementaryStreamTypes.VIDEO, media.buffered); } // move to IDLE once flush complete. this should trigger new fragment loading this.state = base_stream_controller_1.State.IDLE; // reset reference to frag this.fragPrevious = null; }; StreamController.prototype.swapAudioCodec = function () { this.audioCodecSwap = !this.audioCodecSwap; }; StreamController.prototype.computeLivePosition = function (sliding, levelDetails) { var targetLatency = this.config.liveSyncDuration !== undefined ? this.config.liveSyncDuration : this.config.liveSyncDurationCount * levelDetails.targetduration; return sliding + Math.max(0, levelDetails.totalduration - targetLatency); }; /** * Seeks to the set startPosition if not equal to the mediaElement's current time. * @private */ StreamController.prototype._seekToStartPos = function () { var media = this.media; var currentTime = media.currentTime; // only adjust currentTime if different from startPosition or if startPosition not buffered // at that stage, there should be only one buffered range, as we reach that code after first fragment has been buffered var startPosition = media.seeking ? currentTime : this.startPosition; // if currentTime not matching with expected startPosition or startPosition not buffered but close to first buffered if (currentTime !== startPosition) { // if startPosition not buffered, let's seek to buffered.start(0) logger_1.logger.log("target start position not buffered, seek to buffered.start(0) " + startPosition + " from current time " + currentTime + " "); media.currentTime = startPosition; } }; StreamController.prototype._getAudioCodec = function (currentLevel) { var audioCodec = this.config.defaultAudioCodec || currentLevel.audioCodec; if (this.audioCodecSwap) { logger_1.logger.log('swapping playlist audio codec'); if (audioCodec) { if (audioCodec.indexOf('mp4a.40.5') !== -1) { audioCodec = 'mp4a.40.2'; } else { audioCodec = 'mp4a.40.5'; } } } return audioCodec; }; Object.defineProperty(StreamController.prototype, "liveSyncPosition", { get: function () { return this._liveSyncPosition; }, set: function (value) { this._liveSyncPosition = value; }, enumerable: true, configurable: true }); return StreamController; }(base_stream_controller_1.default)); exports.default = StreamController; /* WEBPACK VAR INJECTION */}.call(this, __webpack_require__(/*! ./src/polyfills/number */ "./src/polyfills/number.js")["Number"])) /***/ }), /***/ "./src/controller/subtitle-stream-controller.js": /*!******************************************************!*\ !*** ./src/controller/subtitle-stream-controller.js ***! \******************************************************/ /*! no static exports found */ /*! ModuleConcatenation bailout: Module is not an ECMAScript module */ /***/ (function(module, exports, __webpack_require__) { "use strict"; /** * @class SubtitleStreamController */ var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var events_1 = __webpack_require__(/*! ../events */ "./src/events.js"); var logger_1 = __webpack_require__(/*! ../utils/logger */ "./src/utils/logger.js"); var decrypter_1 = __webpack_require__(/*! ../crypt/decrypter */ "./src/crypt/decrypter.js"); var buffer_helper_1 = __webpack_require__(/*! ../utils/buffer-helper */ "./src/utils/buffer-helper.js"); var fragment_finders_1 = __webpack_require__(/*! ./fragment-finders */ "./src/controller/fragment-finders.js"); var fragment_tracker_1 = __webpack_require__(/*! ./fragment-tracker */ "./src/controller/fragment-tracker.js"); var base_stream_controller_1 = __webpack_require__(/*! ./base-stream-controller */ "./src/controller/base-stream-controller.js"); var level_helper_1 = __webpack_require__(/*! ./level-helper */ "./src/controller/level-helper.js"); var performance = window.performance; var TICK_INTERVAL = 500; // how often to tick in ms var SubtitleStreamController = /** @class */ (function (_super) { __extends(SubtitleStreamController, _super); function SubtitleStreamController(hls, fragmentTracker) { var _this = _super.call(this, hls, events_1.default.MEDIA_ATTACHED, events_1.default.MEDIA_DETACHING, events_1.default.ERROR, events_1.default.KEY_LOADED, events_1.default.FRAG_LOADED, events_1.default.SUBTITLE_TRACKS_UPDATED, events_1.default.SUBTITLE_TRACK_SWITCH, events_1.default.SUBTITLE_TRACK_LOADED, events_1.default.SUBTITLE_FRAG_PROCESSED, events_1.default.LEVEL_UPDATED) || this; _this.fragmentTracker = fragmentTracker; _this.config = hls.config; _this.state = base_stream_controller_1.State.STOPPED; _this.tracks = []; _this.tracksBuffered = []; _this.currentTrackId = -1; _this.decrypter = new decrypter_1.default(hls, hls.config); // lastAVStart stores the time in seconds for the start time of a level load _this.lastAVStart = 0; _this._onMediaSeeking = _this.onMediaSeeking.bind(_this); return _this; } SubtitleStreamController.prototype.onSubtitleFragProcessed = function (data) { var frag = data.frag, success = data.success; this.fragPrevious = frag; this.state = base_stream_controller_1.State.IDLE; if (!success) { return; } var buffered = this.tracksBuffered[this.currentTrackId]; if (!buffered) { return; } // Create/update a buffered array matching the interface used by BufferHelper.bufferedInfo // so we can re-use the logic used to detect how much have been buffered var timeRange; var fragStart = frag.start; for (var i = 0; i < buffered.length; i++) { if (fragStart >= buffered[i].start && fragStart <= buffered[i].end) { timeRange = buffered[i]; break; } } var fragEnd = frag.start + frag.duration; if (timeRange) { timeRange.end = fragEnd; } else { timeRange = { start: fragStart, end: fragEnd }; buffered.push(timeRange); } }; SubtitleStreamController.prototype.onMediaAttached = function (_a) { var media = _a.media; this.media = media; media.addEventListener('seeking', this._onMediaSeeking); this.state = base_stream_controller_1.State.IDLE; }; SubtitleStreamController.prototype.onMediaDetaching = function () { this.media.removeEventListener('seeking', this._onMediaSeeking); this.media = null; this.state = base_stream_controller_1.State.STOPPED; }; // If something goes wrong, proceed to next frag, if we were processing one. SubtitleStreamController.prototype.onError = function (data) { var frag = data.frag; // don't handle error not related to subtitle fragment if (!frag || frag.type !== 'subtitle') { return; } this.state = base_stream_controller_1.State.IDLE; }; // Got all new subtitle tracks. SubtitleStreamController.prototype.onSubtitleTracksUpdated = function (data) { var _this = this; logger_1.logger.log('subtitle tracks updated'); this.tracksBuffered = []; this.tracks = data.subtitleTracks; this.tracks.forEach(function (track) { _this.tracksBuffered[track.id] = []; }); }; SubtitleStreamController.prototype.onSubtitleTrackSwitch = function (data) { this.currentTrackId = data.id; if (!this.tracks || this.currentTrackId === -1) { this.clearInterval(); return; } // Check if track has the necessary details to load fragments var currentTrack = this.tracks[this.currentTrackId]; if (currentTrack && currentTrack.details) { this.setInterval(TICK_INTERVAL); } }; // Got a new set of subtitle fragments. SubtitleStreamController.prototype.onSubtitleTrackLoaded = function (data) { var id = data.id, details = data.details; var _a = this, currentTrackId = _a.currentTrackId, tracks = _a.tracks; var currentTrack = tracks[currentTrackId]; if (id >= tracks.length || id !== currentTrackId || !currentTrack) { return; } if (details.live) { level_helper_1.mergeSubtitlePlaylists(currentTrack.details, details, this.lastAVStart); } currentTrack.details = details; this.setInterval(TICK_INTERVAL); }; SubtitleStreamController.prototype.onKeyLoaded = function () { if (this.state === base_stream_controller_1.State.KEY_LOADING) { this.state = base_stream_controller_1.State.IDLE; } }; SubtitleStreamController.prototype.onFragLoaded = function (data) { var fragCurrent = this.fragCurrent; var decryptData = data.frag.decryptdata; var fragLoaded = data.frag; var hls = this.hls; if (this.state === base_stream_controller_1.State.FRAG_LOADING && fragCurrent && data.frag.type === 'subtitle' && fragCurrent.sn === data.frag.sn) { // check to see if the payload needs to be decrypted if (data.payload.byteLength > 0 && (decryptData && decryptData.key && decryptData.method === 'AES-128')) { var startTime_1 = performance.now(); // decrypt the subtitles this.decrypter.decrypt(data.payload, decryptData.key.buffer, decryptData.iv.buffer, function (decryptedData) { var endTime = performance.now(); hls.trigger(events_1.default.FRAG_DECRYPTED, { frag: fragLoaded, payload: decryptedData, stats: { tstart: startTime_1, tdecrypt: endTime } }); }); } } }; SubtitleStreamController.prototype.onLevelUpdated = function (_a) { var details = _a.details; var frags = details.fragments; this.lastAVStart = frags.length ? frags[0].start : 0; }; SubtitleStreamController.prototype.doTick = function () { if (!this.media) { this.state = base_stream_controller_1.State.IDLE; return; } switch (this.state) { case base_stream_controller_1.State.IDLE: { var _a = this, config = _a.config, currentTrackId = _a.currentTrackId, fragmentTracker = _a.fragmentTracker, media = _a.media, tracks = _a.tracks; if (!tracks || !tracks[currentTrackId] || !tracks[currentTrackId].details) { break; } var maxBufferHole = config.maxBufferHole, maxFragLookUpTolerance = config.maxFragLookUpTolerance; var maxConfigBuffer = Math.min(config.maxBufferLength, config.maxMaxBufferLength); var bufferedInfo = buffer_helper_1.BufferHelper.bufferedInfo(this._getBuffered(), media.currentTime, maxBufferHole); var bufferEnd = bufferedInfo.end, bufferLen = bufferedInfo.len; var trackDetails = tracks[currentTrackId].details; var fragments = trackDetails.fragments; var fragLen = fragments.length; var end = fragments[fragLen - 1].start + fragments[fragLen - 1].duration; if (bufferLen > maxConfigBuffer) { return; } var foundFrag = void 0; var fragPrevious = this.fragPrevious; if (bufferEnd < end) { if (fragPrevious && trackDetails.hasProgramDateTime) { foundFrag = fragment_finders_1.findFragmentByPDT(fragments, fragPrevious.endProgramDateTime, maxFragLookUpTolerance); } if (!foundFrag) { foundFrag = fragment_finders_1.findFragmentByPTS(fragPrevious, fragments, bufferEnd, maxFragLookUpTolerance); } } else { foundFrag = fragments[fragLen - 1]; } if (foundFrag && foundFrag.encrypted) { logger_1.logger.log("Loading key for " + foundFrag.sn); this.state = base_stream_controller_1.State.KEY_LOADING; this.hls.trigger(events_1.default.KEY_LOADING, { frag: foundFrag }); } else if (foundFrag && fragmentTracker.getState(foundFrag) === fragment_tracker_1.FragmentState.NOT_LOADED) { // only load if fragment is not loaded this.fragCurrent = foundFrag; this.state = base_stream_controller_1.State.FRAG_LOADING; this.hls.trigger(events_1.default.FRAG_LOADING, { frag: foundFrag }); } } } }; SubtitleStreamController.prototype.stopLoad = function () { this.lastAVStart = 0; _super.prototype.stopLoad.call(this); }; SubtitleStreamController.prototype._getBuffered = function () { return this.tracksBuffered[this.currentTrackId] || []; }; SubtitleStreamController.prototype.onMediaSeeking = function () { this.fragPrevious = null; }; return SubtitleStreamController; }(base_stream_controller_1.default)); exports.SubtitleStreamController = SubtitleStreamController; /***/ }), /***/ "./src/controller/subtitle-track-controller.js": /*!*****************************************************!*\ !*** ./src/controller/subtitle-track-controller.js ***! \*****************************************************/ /*! no static exports found */ /*! ModuleConcatenation bailout: Module is not an ECMAScript module */ /***/ (function(module, exports, __webpack_require__) { "use strict"; /* WEBPACK VAR INJECTION */(function(
    https://whatsonchain.com/tx/6ff5ac56482cb8f3ce5b6c0db2774eea1f62bbb4ac017003a33048f4a959d64f
    Partial data displayed. To get full data click on Download.