@videojs/http-streaming
Version:
Play back HLS and DASH with Video.js, even where it's not natively supported
1,419 lines (1,255 loc) • 127 kB
JavaScript
/**
* @file segment-loader.js
*/
import Playlist from './playlist';
import videojs from 'video.js';
import Config from './config';
import window from 'global/window';
import { initSegmentId, segmentKeyId } from './bin-utils';
import { mediaSegmentRequest, REQUEST_ERRORS } from './media-segment-request';
import segmentTransmuxer from './segment-transmuxer';
import { TIME_FUDGE_FACTOR, timeUntilRebuffer as timeUntilRebuffer_ } from './ranges';
import { minRebufferMaxBandwidthSelector } from './playlist-selectors';
import logger from './util/logger';
import {compactSegmentUrlDescription, concatSegments} from './util/segment';
import {
createCaptionsTrackIfNotExists,
addCaptionData,
removeCuesFromTrack
} from './util/text-tracks';
import { gopsSafeToAlignWith, removeGopBuffer, updateGopBuffer } from './util/gops';
import shallowEqual from './util/shallow-equal.js';
import { QUOTA_EXCEEDED_ERR } from './error-codes';
import {timeRangesToArray, lastBufferedEnd, timeAheadOf} from './ranges.js';
import {getKnownPartCount} from './playlist.js';
import {createTimeRanges} from './util/vjs-compat';
/**
* The segment loader has no recourse except to fetch a segment in the
* current playlist and use the internal timestamps in that segment to
* generate a syncPoint. This function returns a good candidate index
* for that process.
*
* @param {Array} segments - the segments array from a playlist.
* @return {number} An index of a segment from the playlist to load
*/
export const getSyncSegmentCandidate = function(currentTimeline, segments, targetTime) {
segments = segments || [];
const timelineSegments = [];
let time = 0;
for (let i = 0; i < segments.length; i++) {
const segment = segments[i];
if (currentTimeline === segment.timeline) {
timelineSegments.push(i);
time += segment.duration;
if (time > targetTime) {
return i;
}
}
}
if (timelineSegments.length === 0) {
return 0;
}
// default to the last timeline segment
return timelineSegments[timelineSegments.length - 1];
};
// In the event of a quota exceeded error, keep at least one second of back buffer. This
// number was arbitrarily chosen and may be updated in the future, but seemed reasonable
// as a start to prevent any potential issues with removing content too close to the
// playhead.
const MIN_BACK_BUFFER = 1;
// in ms
const CHECK_BUFFER_DELAY = 500;
const finite = (num) => typeof num === 'number' && isFinite(num);
// With most content hovering around 30fps, if a segment has a duration less than a half
// frame at 30fps or one frame at 60fps, the bandwidth and throughput calculations will
// not accurately reflect the rest of the content.
const MIN_SEGMENT_DURATION_TO_SAVE_STATS = 1 / 60;
export const illegalMediaSwitch = (loaderType, startingMedia, trackInfo) => {
// Although these checks should most likely cover non 'main' types, for now it narrows
// the scope of our checks.
if (loaderType !== 'main' || !startingMedia || !trackInfo) {
return null;
}
if (!trackInfo.hasAudio && !trackInfo.hasVideo) {
return 'Neither audio nor video found in segment.';
}
if (startingMedia.hasVideo && !trackInfo.hasVideo) {
return 'Only audio found in segment when we expected video.' +
' We can\'t switch to audio only from a stream that had video.' +
' To get rid of this message, please add codec information to the manifest.';
}
if (!startingMedia.hasVideo && trackInfo.hasVideo) {
return 'Video found in segment when we expected only audio.' +
' We can\'t switch to a stream with video from an audio only stream.' +
' To get rid of this message, please add codec information to the manifest.';
}
return null;
};
/**
* Calculates a time value that is safe to remove from the back buffer without interrupting
* playback.
*
* @param {TimeRange} seekable
* The current seekable range
* @param {number} currentTime
* The current time of the player
* @param {number} targetDuration
* The target duration of the current playlist
* @return {number}
* Time that is safe to remove from the back buffer without interrupting playback
*/
export const safeBackBufferTrimTime = (seekable, currentTime, targetDuration) => {
// 30 seconds before the playhead provides a safe default for trimming.
//
// Choosing a reasonable default is particularly important for high bitrate content and
// VOD videos/live streams with large windows, as the buffer may end up overfilled and
// throw an APPEND_BUFFER_ERR.
let trimTime = currentTime - Config.BACK_BUFFER_LENGTH;
if (seekable.length) {
// Some live playlists may have a shorter window of content than the full allowed back
// buffer. For these playlists, don't save content that's no longer within the window.
trimTime = Math.max(trimTime, seekable.start(0));
}
// Don't remove within target duration of the current time to avoid the possibility of
// removing the GOP currently being played, as removing it can cause playback stalls.
const maxTrimTime = currentTime - targetDuration;
return Math.min(maxTrimTime, trimTime);
};
export const segmentInfoString = (segmentInfo) => {
const {
startOfSegment,
duration,
segment,
part,
playlist: {
mediaSequence: seq,
id,
segments = []
},
mediaIndex: index,
partIndex,
timeline
} = segmentInfo;
const segmentLen = segments.length - 1;
let selection = 'mediaIndex/partIndex increment';
if (segmentInfo.getMediaInfoForTime) {
selection = `getMediaInfoForTime (${segmentInfo.getMediaInfoForTime})`;
} else if (segmentInfo.isSyncRequest) {
selection = 'getSyncSegmentCandidate (isSyncRequest)';
}
if (segmentInfo.independent) {
selection += ` with independent ${segmentInfo.independent}`;
}
const hasPartIndex = typeof partIndex === 'number';
const name = segmentInfo.segment.uri ? 'segment' : 'pre-segment';
const zeroBasedPartCount = hasPartIndex ? getKnownPartCount({preloadSegment: segment}) - 1 : 0;
return `${name} [${seq + index}/${seq + segmentLen}]` +
(hasPartIndex ? ` part [${partIndex}/${zeroBasedPartCount}]` : '') +
` segment start/end [${segment.start} => ${segment.end}]` +
(hasPartIndex ? ` part start/end [${part.start} => ${part.end}]` : '') +
` startOfSegment [${startOfSegment}]` +
` duration [${duration}]` +
` timeline [${timeline}]` +
` selected by [${selection}]` +
` playlist [${id}]`;
};
const timingInfoPropertyForMedia = (mediaType) => `${mediaType}TimingInfo`;
/**
* Returns the timestamp offset to use for the segment.
*
* @param {number} segmentTimeline
* The timeline of the segment
* @param {number} currentTimeline
* The timeline currently being followed by the loader
* @param {number} startOfSegment
* The estimated segment start
* @param {TimeRange[]} buffered
* The loader's buffer
* @param {boolean} overrideCheck
* If true, no checks are made to see if the timestamp offset value should be set,
* but sets it directly to a value.
*
* @return {number|null}
* Either a number representing a new timestamp offset, or null if the segment is
* part of the same timeline
*/
export const timestampOffsetForSegment = ({
segmentTimeline,
currentTimeline,
startOfSegment,
buffered,
overrideCheck
}) => {
// Check to see if we are crossing a discontinuity to see if we need to set the
// timestamp offset on the transmuxer and source buffer.
//
// Previously, we changed the timestampOffset if the start of this segment was less than
// the currently set timestampOffset, but this isn't desirable as it can produce bad
// behavior, especially around long running live streams.
if (!overrideCheck && segmentTimeline === currentTimeline) {
return null;
}
// When changing renditions, it's possible to request a segment on an older timeline. For
// instance, given two renditions with the following:
//
// #EXTINF:10
// segment1
// #EXT-X-DISCONTINUITY
// #EXTINF:10
// segment2
// #EXTINF:10
// segment3
//
// And the current player state:
//
// current time: 8
// buffer: 0 => 20
//
// The next segment on the current rendition would be segment3, filling the buffer from
// 20s onwards. However, if a rendition switch happens after segment2 was requested,
// then the next segment to be requested will be segment1 from the new rendition in
// order to fill time 8 and onwards. Using the buffered end would result in repeated
// content (since it would position segment1 of the new rendition starting at 20s). This
// case can be identified when the new segment's timeline is a prior value. Instead of
// using the buffered end, the startOfSegment can be used, which, hopefully, will be
// more accurate to the actual start time of the segment.
if (segmentTimeline < currentTimeline) {
return startOfSegment;
}
// segmentInfo.startOfSegment used to be used as the timestamp offset, however, that
// value uses the end of the last segment if it is available. While this value
// should often be correct, it's better to rely on the buffered end, as the new
// content post discontinuity should line up with the buffered end as if it were
// time 0 for the new content.
return buffered.length ? buffered.end(buffered.length - 1) : startOfSegment;
};
/**
* Returns whether or not the loader should wait for a timeline change from the timeline
* change controller before processing the segment.
*
* Primary timing in VHS goes by video. This is different from most media players, as
* audio is more often used as the primary timing source. For the foreseeable future, VHS
* will continue to use video as the primary timing source, due to the current logic and
* expectations built around it.
* Since the timing follows video, in order to maintain sync, the video loader is
* responsible for setting both audio and video source buffer timestamp offsets.
*
* Setting different values for audio and video source buffers could lead to
* desyncing. The following examples demonstrate some of the situations where this
* distinction is important. Note that all of these cases involve demuxed content. When
* content is muxed, the audio and video are packaged together, therefore syncing
* separate media playlists is not an issue.
*
* CASE 1: Audio prepares to load a new timeline before video:
*
* Timeline: 0 1
* Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
* Audio Loader: ^
* Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
* Video Loader ^
*
* In the above example, the audio loader is preparing to load the 6th segment, the first
* after a discontinuity, while the video loader is still loading the 5th segment, before
* the discontinuity.
*
* If the audio loader goes ahead and loads and appends the 6th segment before the video
* loader crosses the discontinuity, then when appended, the 6th audio segment will use
* the timestamp offset from timeline 0. This will likely lead to desyncing. In addition,
* the audio loader must provide the audioAppendStart value to trim the content in the
* transmuxer, and that value relies on the audio timestamp offset. Since the audio
* timestamp offset is set by the video (main) loader, the audio loader shouldn't load the
* segment until that value is provided.
*
* CASE 2: Video prepares to load a new timeline before audio:
*
* Timeline: 0 1
* Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
* Audio Loader: ^
* Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
* Video Loader ^
*
* In the above example, the video loader is preparing to load the 6th segment, the first
* after a discontinuity, while the audio loader is still loading the 5th segment, before
* the discontinuity.
*
* If the video loader goes ahead and loads and appends the 6th segment, then once the
* segment is loaded and processed, both the video and audio timestamp offsets will be
* set, since video is used as the primary timing source. This is to ensure content lines
* up appropriately, as any modifications to the video timing are reflected by audio when
* the video loader sets the audio and video timestamp offsets to the same value. However,
* setting the timestamp offset for audio before audio has had a chance to change
* timelines will likely lead to desyncing, as the audio loader will append segment 5 with
* a timestamp intended to apply to segments from timeline 1 rather than timeline 0.
*
* CASE 3: When seeking, audio prepares to load a new timeline before video
*
* Timeline: 0 1
* Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
* Audio Loader: ^
* Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
* Video Loader ^
*
* In the above example, both audio and video loaders are loading segments from timeline
* 0, but imagine that the seek originated from timeline 1.
*
* When seeking to a new timeline, the timestamp offset will be set based on the expected
* segment start of the loaded video segment. In order to maintain sync, the audio loader
* must wait for the video loader to load its segment and update both the audio and video
* timestamp offsets before it may load and append its own segment. This is the case
* whether the seek results in a mismatched segment request (e.g., the audio loader
* chooses to load segment 3 and the video loader chooses to load segment 4) or the
* loaders choose to load the same segment index from each playlist, as the segments may
* not be aligned perfectly, even for matching segment indexes.
*
* @param {Object} timelinechangeController
* @param {number} currentTimeline
* The timeline currently being followed by the loader
* @param {number} segmentTimeline
* The timeline of the segment being loaded
* @param {('main'|'audio')} loaderType
* The loader type
* @param {boolean} audioDisabled
* Whether the audio is disabled for the loader. This should only be true when the
* loader may have muxed audio in its segment, but should not append it, e.g., for
* the main loader when an alternate audio playlist is active.
*
* @return {boolean}
* Whether the loader should wait for a timeline change from the timeline change
* controller before processing the segment
*/
export const shouldWaitForTimelineChange = ({
timelineChangeController,
currentTimeline,
segmentTimeline,
loaderType,
audioDisabled
}) => {
if (currentTimeline === segmentTimeline) {
return false;
}
if (loaderType === 'audio') {
const lastMainTimelineChange = timelineChangeController.lastTimelineChange({
type: 'main'
});
// Audio loader should wait if:
//
// * main hasn't had a timeline change yet (thus has not loaded its first segment)
// * main hasn't yet changed to the timeline audio is looking to load
return !lastMainTimelineChange || lastMainTimelineChange.to !== segmentTimeline;
}
// The main loader only needs to wait for timeline changes if there's demuxed audio.
// Otherwise, there's nothing to wait for, since audio would be muxed into the main
// loader's segments (or the content is audio/video only and handled by the main
// loader).
if (loaderType === 'main' && audioDisabled) {
const pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({
type: 'audio'
});
// Main loader should wait for the audio loader if audio is not pending a timeline
// change to the current timeline.
//
// Since the main loader is responsible for setting the timestamp offset for both
// audio and video, the main loader must wait for audio to be about to change to its
// timeline before setting the offset, otherwise, if audio is behind in loading,
// segments from the previous timeline would be adjusted by the new timestamp offset.
//
// This requirement means that video will not cross a timeline until the audio is
// about to cross to it, so that way audio and video will always cross the timeline
// together.
//
// In addition to normal timeline changes, these rules also apply to the start of a
// stream (going from a non-existent timeline, -1, to timeline 0). It's important
// that these rules apply to the first timeline change because if they did not, it's
// possible that the main loader will cross two timelines before the audio loader has
// crossed one. Logic may be implemented to handle the startup as a special case, but
// it's easier to simply treat all timeline changes the same.
if (pendingAudioTimelineChange && pendingAudioTimelineChange.to === segmentTimeline) {
return false;
}
return true;
}
return false;
};
export const shouldFixBadTimelineChanges = (timelineChangeController) => {
if (!timelineChangeController) {
return false;
}
const pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({ type: 'audio' });
const pendingMainTimelineChange = timelineChangeController.pendingTimelineChange({ type: 'main' });
const hasPendingTimelineChanges = pendingAudioTimelineChange && pendingMainTimelineChange;
const differentPendingChanges = hasPendingTimelineChanges && pendingAudioTimelineChange.to !== pendingMainTimelineChange.to;
const isNotInitialPendingTimelineChange = hasPendingTimelineChanges && pendingAudioTimelineChange.from !== -1 && pendingMainTimelineChange.from !== -1;
if (isNotInitialPendingTimelineChange && differentPendingChanges) {
return true;
}
return false;
};
/**
* Check if the pending audio timeline change is behind the
* pending main timeline change.
*
* @param {SegmentLoader} segmentLoader
* @return {boolean}
*/
const isAudioTimelineBehind = (segmentLoader) => {
const pendingAudioTimelineChange = segmentLoader.timelineChangeController_.pendingTimelineChange({ type: 'audio' });
const pendingMainTimelineChange = segmentLoader.timelineChangeController_.pendingTimelineChange({ type: 'main' });
const hasPendingTimelineChanges = pendingAudioTimelineChange && pendingMainTimelineChange;
return hasPendingTimelineChanges && pendingAudioTimelineChange.to < pendingMainTimelineChange.to;
};
/**
* A method to check if the player is waiting for a timeline change, and fixes
* certain scenarios where the timelines need to be updated.
*
* @param {SegmentLoader} segmentLoader
*/
const checkAndFixTimelines = (segmentLoader) => {
const segmentInfo = segmentLoader.pendingSegment_;
if (!segmentInfo) {
return;
}
const waitingForTimelineChange = shouldWaitForTimelineChange({
timelineChangeController: segmentLoader.timelineChangeController_,
currentTimeline: segmentLoader.currentTimeline_,
segmentTimeline: segmentInfo.timeline,
loaderType: segmentLoader.loaderType_,
audioDisabled: segmentLoader.audioDisabled_
});
if (waitingForTimelineChange && shouldFixBadTimelineChanges(segmentLoader.timelineChangeController_)) {
if (isAudioTimelineBehind(segmentLoader)) {
segmentLoader.timelineChangeController_.trigger('audioTimelineBehind');
return;
}
segmentLoader.timelineChangeController_.trigger('fixBadTimelineChange');
}
};
export const mediaDuration = (timingInfos) => {
let maxDuration = 0;
['video', 'audio'].forEach(function(type) {
const typeTimingInfo = timingInfos[`${type}TimingInfo`];
if (!typeTimingInfo) {
return;
}
const {start, end} = typeTimingInfo;
let duration;
if (typeof start === 'bigint' || typeof end === 'bigint') {
duration = window.BigInt(end) - window.BigInt(start);
} else if (typeof start === 'number' && typeof end === 'number') {
duration = end - start;
}
if (typeof duration !== 'undefined' && duration > maxDuration) {
maxDuration = duration;
}
});
// convert back to a number if it is lower than MAX_SAFE_INTEGER
// as we only need BigInt when we are above that.
if (typeof maxDuration === 'bigint' && maxDuration < Number.MAX_SAFE_INTEGER) {
maxDuration = Number(maxDuration);
}
return maxDuration;
};
export const segmentTooLong = ({ segmentDuration, maxDuration }) => {
// 0 duration segments are most likely due to metadata only segments or a lack of
// information.
if (!segmentDuration) {
return false;
}
// For HLS:
//
// https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1
// The EXTINF duration of each Media Segment in the Playlist
// file, when rounded to the nearest integer, MUST be less than or equal
// to the target duration; longer segments can trigger playback stalls
// or other errors.
//
// For DASH, the mpd-parser uses the largest reported segment duration as the target
// duration. Although that reported duration is occasionally approximate (i.e., not
// exact), a strict check may report that a segment is too long more often in DASH.
return Math.round(segmentDuration) > maxDuration + TIME_FUDGE_FACTOR;
};
export const getTroublesomeSegmentDurationMessage = (segmentInfo, sourceType) => {
// Right now we aren't following DASH's timing model exactly, so only perform
// this check for HLS content.
if (sourceType !== 'hls') {
return null;
}
const segmentDuration = mediaDuration({
audioTimingInfo: segmentInfo.audioTimingInfo,
videoTimingInfo: segmentInfo.videoTimingInfo
});
// Don't report if we lack information.
//
// If the segment has a duration of 0 it is either a lack of information or a
// metadata only segment and shouldn't be reported here.
if (!segmentDuration) {
return null;
}
const targetDuration = segmentInfo.playlist.targetDuration;
const isSegmentWayTooLong = segmentTooLong({
segmentDuration,
maxDuration: targetDuration * 2
});
const isSegmentSlightlyTooLong = segmentTooLong({
segmentDuration,
maxDuration: targetDuration
});
const segmentTooLongMessage = `Segment with index ${segmentInfo.mediaIndex} ` +
`from playlist ${segmentInfo.playlist.id} ` +
`has a duration of ${segmentDuration} ` +
`when the reported duration is ${segmentInfo.duration} ` +
`and the target duration is ${targetDuration}. ` +
'For HLS content, a duration in excess of the target duration may result in ' +
'playback issues. See the HLS specification section on EXT-X-TARGETDURATION for ' +
'more details: ' +
'https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1';
if (isSegmentWayTooLong || isSegmentSlightlyTooLong) {
return {
severity: isSegmentWayTooLong ? 'warn' : 'info',
message: segmentTooLongMessage
};
}
return null;
};
/**
*
* @param {Object} options type of segment loader and segment either segmentInfo or simple segment
* @return a segmentInfo payload for events or errors.
*/
export const segmentInfoPayload = ({type, segment}) => {
if (!segment) {
return;
}
const isEncrypted = Boolean(segment.key || segment.map && segment.map.ke);
const isMediaInitialization = Boolean(segment.map && !segment.map.bytes);
const start = segment.startOfSegment === undefined ? segment.start : segment.startOfSegment;
return {
type: type || segment.type,
uri: segment.resolvedUri || segment.uri,
start,
duration: segment.duration,
isEncrypted,
isMediaInitialization
};
};
/**
* An object that manages segment loading and appending.
*
* @class SegmentLoader
* @param {Object} options required and optional options
* @extends videojs.EventTarget
*/
export default class SegmentLoader extends videojs.EventTarget {
constructor(settings, options = {}) {
super();
// check pre-conditions
if (!settings) {
throw new TypeError('Initialization settings are required');
}
if (typeof settings.currentTime !== 'function') {
throw new TypeError('No currentTime getter specified');
}
if (!settings.mediaSource) {
throw new TypeError('No MediaSource specified');
}
// public properties
this.bandwidth = settings.bandwidth;
this.throughput = {rate: 0, count: 0};
this.roundTrip = NaN;
this.resetStats_();
this.mediaIndex = null;
this.partIndex = null;
// private settings
this.hasPlayed_ = settings.hasPlayed;
this.currentTime_ = settings.currentTime;
this.seekable_ = settings.seekable;
this.seeking_ = settings.seeking;
this.duration_ = settings.duration;
this.mediaSource_ = settings.mediaSource;
this.vhs_ = settings.vhs;
this.loaderType_ = settings.loaderType;
this.currentMediaInfo_ = void 0;
this.startingMediaInfo_ = void 0;
this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
this.goalBufferLength_ = settings.goalBufferLength;
this.sourceType_ = settings.sourceType;
this.sourceUpdater_ = settings.sourceUpdater;
this.inbandTextTracks_ = settings.inbandTextTracks;
this.state_ = 'INIT';
this.timelineChangeController_ = settings.timelineChangeController;
this.shouldSaveSegmentTimingInfo_ = true;
this.parse708captions_ = settings.parse708captions;
this.useDtsForTimestampOffset_ = settings.useDtsForTimestampOffset;
this.captionServices_ = settings.captionServices;
this.exactManifestTimings = settings.exactManifestTimings;
this.addMetadataToTextTrack = settings.addMetadataToTextTrack;
// private instance variables
this.checkBufferTimeout_ = null;
this.error_ = void 0;
this.currentTimeline_ = -1;
this.shouldForceTimestampOffsetAfterResync_ = false;
this.pendingSegment_ = null;
this.xhrOptions_ = null;
this.pendingSegments_ = [];
this.audioDisabled_ = false;
this.isPendingTimestampOffset_ = false;
// TODO possibly move gopBuffer and timeMapping info to a separate controller
this.gopBuffer_ = [];
this.timeMapping_ = 0;
this.safeAppend_ = false;
this.appendInitSegment_ = {
audio: true,
video: true
};
this.playlistOfLastInitSegment_ = {
audio: null,
video: null
};
this.callQueue_ = [];
// If the segment loader prepares to load a segment, but does not have enough
// information yet to start the loading process (e.g., if the audio loader wants to
// load a segment from the next timeline but the main loader hasn't yet crossed that
// timeline), then the load call will be added to the queue until it is ready to be
// processed.
this.loadQueue_ = [];
this.metadataQueue_ = {
id3: [],
caption: []
};
this.waitingOnRemove_ = false;
this.quotaExceededErrorRetryTimeout_ = null;
// Fragmented mp4 playback
this.activeInitSegmentId_ = null;
this.initSegments_ = {};
// HLSe playback
this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
this.keyCache_ = {};
this.decrypter_ = settings.decrypter;
// Manages the tracking and generation of sync-points, mappings
// between a time in the display time and a segment index within
// a playlist
this.syncController_ = settings.syncController;
this.syncPoint_ = {
segmentIndex: 0,
time: 0
};
this.transmuxer_ = this.createTransmuxer_();
this.triggerSyncInfoUpdate_ = () => this.trigger('syncinfoupdate');
this.syncController_.on('syncinfoupdate', this.triggerSyncInfoUpdate_);
this.mediaSource_.addEventListener('sourceopen', () => {
if (!this.isEndOfStream_()) {
this.ended_ = false;
}
});
// ...for determining the fetch location
this.fetchAtBuffer_ = false;
this.logger_ = logger(`SegmentLoader[${this.loaderType_}]`);
Object.defineProperty(this, 'state', {
get() {
return this.state_;
},
set(newState) {
if (newState !== this.state_) {
this.logger_(`${this.state_} -> ${newState}`);
this.state_ = newState;
this.trigger('statechange');
}
}
});
this.sourceUpdater_.on('ready', () => {
if (this.hasEnoughInfoToAppend_()) {
this.processCallQueue_();
} else {
checkAndFixTimelines(this);
}
});
this.sourceUpdater_.on('codecschange', (metadata) => {
this.trigger({type: 'codecschange', ...metadata});
});
// Only the main loader needs to listen for pending timeline changes, as the main
// loader should wait for audio to be ready to change its timeline so that both main
// and audio timelines change together. For more details, see the
// shouldWaitForTimelineChange function.
if (this.loaderType_ === 'main') {
this.timelineChangeController_.on('pendingtimelinechange', () => {
if (this.hasEnoughInfoToAppend_()) {
this.processCallQueue_();
} else {
checkAndFixTimelines(this);
}
});
}
// The main loader only listens on pending timeline changes, but the audio loader,
// since its loads follow main, needs to listen on timeline changes. For more details,
// see the shouldWaitForTimelineChange function.
if (this.loaderType_ === 'audio') {
this.timelineChangeController_.on('timelinechange', (metadata) => {
this.trigger({type: 'timelinechange', ...metadata });
if (this.hasEnoughInfoToLoad_()) {
this.processLoadQueue_();
} else {
checkAndFixTimelines(this);
}
if (this.hasEnoughInfoToAppend_()) {
this.processCallQueue_();
} else {
checkAndFixTimelines(this);
}
});
}
}
/**
* TODO: Current sync controller consists of many hls-specific strategies
* media sequence sync is also hls-specific, and we would like to be protocol-agnostic on this level
* this should be a part of the sync-controller and sync controller should expect different strategy list based on the protocol.
*
* @return {MediaSequenceSync|null}
* @private
*/
get mediaSequenceSync_() {
return this.syncController_.getMediaSequenceSync(this.loaderType_);
}
createTransmuxer_() {
return segmentTransmuxer.createTransmuxer({
remux: false,
alignGopsAtEnd: this.safeAppend_,
keepOriginalTimestamps: true,
parse708captions: this.parse708captions_,
captionServices: this.captionServices_
});
}
/**
* reset all of our media stats
*
* @private
*/
resetStats_() {
this.mediaBytesTransferred = 0;
this.mediaRequests = 0;
this.mediaRequestsAborted = 0;
this.mediaRequestsTimedout = 0;
this.mediaRequestsErrored = 0;
this.mediaTransferDuration = 0;
this.mediaSecondsLoaded = 0;
this.mediaAppends = 0;
}
/**
* dispose of the SegmentLoader and reset to the default state
*/
dispose() {
this.trigger('dispose');
this.state = 'DISPOSED';
this.pause();
this.abort_();
if (this.transmuxer_) {
this.transmuxer_.terminate();
}
this.resetStats_();
if (this.checkBufferTimeout_) {
window.clearTimeout(this.checkBufferTimeout_);
}
if (this.syncController_ && this.triggerSyncInfoUpdate_) {
this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
}
this.off();
}
setAudio(enable) {
this.audioDisabled_ = !enable;
if (enable) {
this.appendInitSegment_.audio = true;
} else {
// remove current track audio if it gets disabled
this.sourceUpdater_.removeAudio(0, this.duration_());
}
}
/**
* abort anything that is currently doing on with the SegmentLoader
* and reset to a default state
*/
abort() {
if (this.state !== 'WAITING') {
if (this.pendingSegment_) {
this.pendingSegment_ = null;
}
this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);
return;
}
this.abort_();
// We aborted the requests we were waiting on, so reset the loader's state to READY
// since we are no longer "waiting" on any requests. XHR callback is not always run
// when the request is aborted. This will prevent the loader from being stuck in the
// WAITING state indefinitely.
this.state = 'READY';
// don't wait for buffer check timeouts to begin fetching the
// next segment
if (!this.paused()) {
this.monitorBuffer_();
}
}
/**
* abort all pending xhr requests and null any pending segements
*
* @private
*/
abort_() {
if (this.pendingSegment_ && this.pendingSegment_.abortRequests) {
this.pendingSegment_.abortRequests();
}
// clear out the segment being processed
this.pendingSegment_ = null;
this.callQueue_ = [];
this.loadQueue_ = [];
this.metadataQueue_.id3 = [];
this.metadataQueue_.caption = [];
this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);
this.waitingOnRemove_ = false;
window.clearTimeout(this.quotaExceededErrorRetryTimeout_);
this.quotaExceededErrorRetryTimeout_ = null;
}
checkForAbort_(requestId) {
// If the state is APPENDING, then aborts will not modify the state, meaning the first
// callback that happens should reset the state to READY so that loading can continue.
if (this.state === 'APPENDING' && !this.pendingSegment_) {
this.state = 'READY';
return true;
}
if (!this.pendingSegment_ || this.pendingSegment_.requestId !== requestId) {
return true;
}
return false;
}
/**
* set an error on the segment loader and null out any pending segements
*
* @param {Error} error the error to set on the SegmentLoader
* @return {Error} the error that was set or that is currently set
*/
error(error) {
if (typeof error !== 'undefined') {
this.logger_('error occurred:', error);
this.error_ = error;
}
this.pendingSegment_ = null;
return this.error_;
}
endOfStream() {
this.ended_ = true;
if (this.transmuxer_) {
// need to clear out any cached data to prepare for the new segment
segmentTransmuxer.reset(this.transmuxer_);
}
this.gopBuffer_.length = 0;
this.pause();
this.trigger('ended');
}
/**
* Indicates which time ranges are buffered
*
* @return {TimeRange}
* TimeRange object representing the current buffered ranges
*/
buffered_() {
const trackInfo = this.getMediaInfo_();
if (!this.sourceUpdater_ || !trackInfo) {
return createTimeRanges();
}
if (this.loaderType_ === 'main') {
const { hasAudio, hasVideo, isMuxed } = trackInfo;
if (hasVideo && hasAudio && !this.audioDisabled_ && !isMuxed) {
return this.sourceUpdater_.buffered();
}
if (hasVideo) {
return this.sourceUpdater_.videoBuffered();
}
}
// One case that can be ignored for now is audio only with alt audio,
// as we don't yet have proper support for that.
return this.sourceUpdater_.audioBuffered();
}
/**
* Gets and sets init segment for the provided map
*
* @param {Object} map
* The map object representing the init segment to get or set
* @param {boolean=} set
* If true, the init segment for the provided map should be saved
* @return {Object}
* map object for desired init segment
*/
initSegmentForMap(map, set = false) {
if (!map) {
return null;
}
const id = initSegmentId(map);
let storedMap = this.initSegments_[id];
if (set && !storedMap && map.bytes) {
this.initSegments_[id] = storedMap = {
resolvedUri: map.resolvedUri,
byterange: map.byterange,
bytes: map.bytes,
tracks: map.tracks,
timescales: map.timescales
};
}
return storedMap || map;
}
/**
* Gets and sets key for the provided key
*
* @param {Object} key
* The key object representing the key to get or set
* @param {boolean=} set
* If true, the key for the provided key should be saved
* @return {Object}
* Key object for desired key
*/
segmentKey(key, set = false) {
if (!key) {
return null;
}
const id = segmentKeyId(key);
let storedKey = this.keyCache_[id];
// TODO: We should use the HTTP Expires header to invalidate our cache per
// https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
if (this.cacheEncryptionKeys_ && set && !storedKey && key.bytes) {
this.keyCache_[id] = storedKey = {
resolvedUri: key.resolvedUri,
bytes: key.bytes
};
}
const result = {
resolvedUri: (storedKey || key).resolvedUri
};
if (storedKey) {
result.bytes = storedKey.bytes;
}
return result;
}
/**
* Returns true if all configuration required for loading is present, otherwise false.
*
* @return {boolean} True if the all configuration is ready for loading
* @private
*/
couldBeginLoading_() {
return this.playlist_ && !this.paused();
}
/**
* load a playlist and start to fill the buffer
*/
load() {
// un-pause
this.monitorBuffer_();
// if we don't have a playlist yet, keep waiting for one to be
// specified
if (!this.playlist_) {
return;
}
// if all the configuration is ready, initialize and begin loading
if (this.state === 'INIT' && this.couldBeginLoading_()) {
return this.init_();
}
// if we're in the middle of processing a segment already, don't
// kick off an additional segment request
if (!this.couldBeginLoading_() ||
(this.state !== 'READY' &&
this.state !== 'INIT')) {
return;
}
this.state = 'READY';
}
/**
* Once all the starting parameters have been specified, begin
* operation. This method should only be invoked from the INIT
* state.
*
* @private
*/
init_() {
this.state = 'READY';
// if this is the audio segment loader, and it hasn't been inited before, then any old
// audio data from the muxed content should be removed
this.resetEverything();
return this.monitorBuffer_();
}
/**
* set a playlist on the segment loader
*
* @param {PlaylistLoader} media the playlist to set on the segment loader
*/
playlist(newPlaylist, options = {}) {
if (!newPlaylist) {
return;
}
if (this.playlist_ &&
this.playlist_.endList &&
newPlaylist.endList &&
this.playlist_.uri === newPlaylist.uri) {
// skip update if both prev and new are vod and have the same URI
return;
}
const oldPlaylist = this.playlist_;
const segmentInfo = this.pendingSegment_;
this.playlist_ = newPlaylist;
this.xhrOptions_ = options;
// when we haven't started playing yet, the start of a live playlist
// is always our zero-time so force a sync update each time the playlist
// is refreshed from the server
//
// Use the INIT state to determine if playback has started, as the playlist sync info
// should be fixed once requests begin (as sync points are generated based on sync
// info), but not before then.
if (this.state === 'INIT') {
newPlaylist.syncInfo = {
mediaSequence: newPlaylist.mediaSequence,
time: 0
};
// Setting the date time mapping means mapping the program date time (if available)
// to time 0 on the player's timeline. The playlist's syncInfo serves a similar
// purpose, mapping the initial mediaSequence to time zero. Since the syncInfo can
// be updated as the playlist is refreshed before the loader starts loading, the
// program date time mapping needs to be updated as well.
//
// This mapping is only done for the main loader because a program date time should
// map equivalently between playlists.
if (this.loaderType_ === 'main') {
this.syncController_.setDateTimeMappingForStart(newPlaylist);
}
}
let oldId = null;
if (oldPlaylist) {
if (oldPlaylist.id) {
oldId = oldPlaylist.id;
} else if (oldPlaylist.uri) {
oldId = oldPlaylist.uri;
}
}
this.logger_(`playlist update [${oldId} => ${newPlaylist.id || newPlaylist.uri}]`);
if (this.mediaSequenceSync_) {
this.mediaSequenceSync_.update(newPlaylist, this.currentTime_());
this.logger_(`Playlist update:
currentTime: ${this.currentTime_()}
bufferedEnd: ${lastBufferedEnd(this.buffered_())}
`, this.mediaSequenceSync_.diagnostics);
}
// in VOD, this is always a rendition switch (or we updated our syncInfo above)
// in LIVE, we always want to update with new playlists (including refreshes)
this.trigger('syncinfoupdate');
// if we were unpaused but waiting for a playlist, start
// buffering now
if (this.state === 'INIT' && this.couldBeginLoading_()) {
return this.init_();
}
if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
if (this.mediaIndex !== null) {
// we must reset/resync the segment loader when we switch renditions and
// the segment loader is already synced to the previous rendition
// We only want to reset the loader here for LLHLS playback, as resetLoader sets fetchAtBuffer_
// to false, resulting in fetching segments at currentTime and causing repeated
// same-segment requests on playlist change. This erroneously drives up the playback watcher
// stalled segment count, as re-requesting segments at the currentTime or browser cached segments
// will not change the buffer.
// Reference for LLHLS fixes: https://github.com/videojs/http-streaming/pull/1201
const isLLHLS = !newPlaylist.endList && typeof newPlaylist.partTargetDuration === 'number';
if (isLLHLS) {
this.resetLoader();
} else {
this.resyncLoader();
}
}
this.currentMediaInfo_ = void 0;
this.trigger('playlistupdate');
// the rest of this function depends on `oldPlaylist` being defined
return;
}
// we reloaded the same playlist so we are in a live scenario
// and we will likely need to adjust the mediaIndex
const mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
this.logger_(`live window shift [${mediaSequenceDiff}]`);
// update the mediaIndex on the SegmentLoader
// this is important because we can abort a request and this value must be
// equal to the last appended mediaIndex
if (this.mediaIndex !== null) {
this.mediaIndex -= mediaSequenceDiff;
// this can happen if we are going to load the first segment, but get a playlist
// update during that. mediaIndex would go from 0 to -1 if mediaSequence in the
// new playlist was incremented by 1.
if (this.mediaIndex < 0) {
this.mediaIndex = null;
this.partIndex = null;
} else {
const segment = this.playlist_.segments[this.mediaIndex];
// partIndex should remain the same for the same segment
// unless parts fell off of the playlist for this segment.
// In that case we need to reset partIndex and resync
if (this.partIndex && (!segment.parts || !segment.parts.length || !segment.parts[this.partIndex])) {
const mediaIndex = this.mediaIndex;
this.logger_(`currently processing part (index ${this.partIndex}) no longer exists.`);
this.resetLoader();
// We want to throw away the partIndex and the data associated with it,
// as the part was dropped from our current playlists segment.
// The mediaIndex will still be valid so keep that around.
this.mediaIndex = mediaIndex;
}
}
}
// update the mediaIndex on the SegmentInfo object
// this is important because we will update this.mediaIndex with this value
// in `handleAppendsDone_` after the segment has been successfully appended
if (segmentInfo) {
segmentInfo.mediaIndex -= mediaSequenceDiff;
if (segmentInfo.mediaIndex < 0) {
segmentInfo.mediaIndex = null;
segmentInfo.partIndex = null;
} else {
// we need to update the referenced segment so that timing information is
// saved for the new playlist's segment, however, if the segment fell off the
// playlist, we can leave the old reference and just lose the timing info
if (segmentInfo.mediaIndex >= 0) {
segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
}
if (segmentInfo.partIndex >= 0 && segmentInfo.segment.parts) {
segmentInfo.part = segmentInfo.segment.parts[segmentInfo.partIndex];
}
}
}
this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
}
/**
* Prevent the loader from fetching additional segments. If there
* is a segment request outstanding, it will finish processing
* before the loader halts. A segment loader can be unpaused by
* calling load().
*/
pause() {
if (this.checkBufferTimeout_) {
window.clearTimeout(this.checkBufferTimeout_);
this.checkBufferTimeout_ = null;
}
}
/**
* Returns whether the segment loader is fetching additional
* segments when given the opportunity. This property can be
* modified through calls to pause() and load().
*/
paused() {
return this.checkBufferTimeout_ === null;
}
/**
* Delete all the buffered data and reset the SegmentLoader
*
* @param {Function} [done] an optional callback to be executed when the remove
* operation is complete
*/
resetEverything(done) {
this.ended_ = false;
this.activeInitSegmentId_ = null;
this.appendInitSegment_ = {
audio: true,
video: true
};
this.resetLoader();
// remove from 0, the earliest point, to Infinity, to signify removal of everything.
// VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
// we then clamp the value to duration if necessary.
this.remove(0, Infinity, done);
// clears fmp4 captions
if (this.transmuxer_) {
this.transmuxer_.postMessage({
action: 'clearAllMp4Captions'
});
// reset the cache in the transmuxer
this.transmuxer_.postMessage({
action: 'reset'
});
}
}
/**
* Force the SegmentLoader to resync and start loading around the currentTime instead
* of starting at the end of the buffer
*
* Useful for fast quality changes
*/
resetLoader() {
this.fetchAtBuffer_ = false;
if (this.mediaSequenceSync_) {
this.mediaSequenceSync_.resetAppendedStatus();
}
this.resyncLoader();
}
/**
* Force the SegmentLoader to restart synchronization and make a conservative guess
* before returning to the simple walk-forward method
*/
resyncLoader() {
if (this.transmuxer_) {
// need to clear out any cached data to prepare for the new segment
segmentTransmuxer.reset(this.transmuxer_);
}
this.mediaIndex = null;
this.partIndex = null;
this.syncPoint_ = null;
this.isPendingTimestampOffset_ = false;
// this is mainly to sync timing-info when switching between renditions with and without timestamp-rollover,
// so we don't want it for DASH or fragmented mp4 segments.
const isFmp4 = this.currentMediaInfo_ && this.currentMediaInfo_.isFmp4;
const isHlsTs = this.sourceType_ === 'hls' && !isFmp4;
if (isHlsTs) {
this.shouldForceTimestampOffsetAfterResync_ = true;
}
this.callQueue_ = [];
this.loadQueue_ = [];
this.metadataQueue_.id3 = [];
this.metadataQueue_.caption = [];
this.abort();
if (this.transmuxer_) {
this.transmuxer_.postMessage({
action: 'clearParsedMp4Captions'
});
}
}
/**
* Remove any data in the source buffer between start and end times
*
* @param {number} start - the start time of the region to remove from the buffer
* @param {number} end - the end time of the region to remove from the buffer
* @param {Function} [done] - an optional callback to be executed when the remove
* @param {boolean} force - force all remove operations to happen
* operation is complete
*/
remove(start, end, done = () => {}, force = false) {
// clamp end to duration if we need to remove everything.
// This is due to a browser bug that causes issues if we remove to Infinity.
// videojs/videojs-contrib-hls#1225
if (end === Infinity) {
end = this.duration_();
}
// skip removes that would throw an error
// commonly happens during a rendition switch at the start of a video
// from start 0 to end 0
if (end <= start) {
this.logger_('skipping remove because end ${end} is <= start ${start}');
return;
}
if (!this.sourceUpdater_ || !this.getMediaInfo_()) {
this.logger_('skipping remove because no source updater or starting media info');
// nothing to remove if we haven't processed any media
return;
}
// set it to one to complete this function's removes
let removesRemaining = 1;
const removeFinished = () => {
removesRemaining--;
if (removesRemaining === 0) {
done();
}
};
if (force || !this.audioDisabled_) {
removesRemaining++;
this.sourceUpdater_.removeAudio(start, end, removeFinished);
}
// While it would be better to only remove video if the main loader has video, this
// should be safe with audio only as removeVideo will call back even if there's no
// video buffer.
//
// In theory we can check to see if there's video before calling the remove, but in
// the event th