@videojs/http-streaming
Version:
Play back HLS and DASH with Video.js, even where it's not natively supported
1,708 lines (1,438 loc) • 997 kB
JavaScript
/*! @name @videojs/http-streaming @version 3.16.2 @license Apache-2.0 */
import _extends from '@babel/runtime/helpers/extends';
import document from 'global/document';
import window$1 from 'global/window';
import _resolveUrl from '@videojs/vhs-utils/es/resolve-url.js';
import videojs from 'video.js';
import { Parser } from 'm3u8-parser';
import { isAudioCodec, translateLegacyCodec, codecsFromDefault, parseCodecs, getMimeForCodec, DEFAULT_VIDEO_CODEC, DEFAULT_AUDIO_CODEC, browserSupportsCodec, muxerSupportsCodec } from '@videojs/vhs-utils/es/codecs.js';
import { simpleTypeFromSourceType } from '@videojs/vhs-utils/es/media-types.js';
export { simpleTypeFromSourceType } from '@videojs/vhs-utils/es/media-types.js';
import { isArrayBufferView, concatTypedArrays, stringToBytes, toUint8 } from '@videojs/vhs-utils/es/byte-helpers';
import { generateSidxKey, parseUTCTiming, parse, addSidxSegmentsToPlaylist } from 'mpd-parser';
import parseSidx from 'mux.js/lib/tools/parse-sidx';
import { getId3Offset } from '@videojs/vhs-utils/es/id3-helpers';
import { detectContainerForBytes, isLikelyFmp4MediaSegment } from '@videojs/vhs-utils/es/containers';
import { ONE_SECOND_IN_TS } from 'mux.js/lib/utils/clock';
/**
* @file resolve-url.js - Handling how URLs are resolved and manipulated
*/
const resolveUrl = _resolveUrl;
/**
* If the xhr request was redirected, return the responseURL, otherwise,
* return the original url.
*
* @api private
*
* @param {string} url - an url being requested
* @param {XMLHttpRequest} req - xhr request result
*
* @return {string}
*/
const resolveManifestRedirect = (url, req) => {
// To understand how the responseURL below is set and generated:
// - https://fetch.spec.whatwg.org/#concept-response-url
// - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
if (req && req.responseURL && url !== req.responseURL) {
return req.responseURL;
}
return url;
};
const logger = source => {
if (videojs.log.debug) {
return videojs.log.debug.bind(videojs, 'VHS:', `${source} >`);
}
return function () {};
};
/**
* Provides a compatibility layer between Video.js 7 and 8 API changes for VHS.
*/
/**
* Delegates to videojs.obj.merge (Video.js 8) or
* videojs.mergeOptions (Video.js 7).
*/
function merge(...args) {
const context = videojs.obj || videojs;
const fn = context.merge || context.mergeOptions;
return fn.apply(context, args);
}
/**
* Delegates to videojs.time.createTimeRanges (Video.js 8) or
* videojs.createTimeRanges (Video.js 7).
*/
function createTimeRanges(...args) {
const context = videojs.time || videojs;
const fn = context.createTimeRanges || context.createTimeRanges;
return fn.apply(context, args);
}
/**
* Converts provided buffered ranges to a descriptive string
*
* @param {TimeRanges} buffered - received buffered time ranges
*
* @return {string} - descriptive string
*/
function bufferedRangesToString(buffered) {
if (buffered.length === 0) {
return 'Buffered Ranges are empty';
}
let bufferedRangesStr = 'Buffered Ranges: \n';
for (let i = 0; i < buffered.length; i++) {
const start = buffered.start(i);
const end = buffered.end(i);
bufferedRangesStr += `${start} --> ${end}. Duration (${end - start})\n`;
}
return bufferedRangesStr;
}
/**
* ranges
*
* Utilities for working with TimeRanges.
*
*/
const TIME_FUDGE_FACTOR = 1 / 30; // Comparisons between time values such as current time and the end of the buffered range
// can be misleading because of precision differences or when the current media has poorly
// aligned audio and video, which can cause values to be slightly off from what you would
// expect. This value is what we consider to be safe to use in such comparisons to account
// for these scenarios.
const SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
const filterRanges = function (timeRanges, predicate) {
const results = [];
let i;
if (timeRanges && timeRanges.length) {
// Search for ranges that match the predicate
for (i = 0; i < timeRanges.length; i++) {
if (predicate(timeRanges.start(i), timeRanges.end(i))) {
results.push([timeRanges.start(i), timeRanges.end(i)]);
}
}
}
return createTimeRanges(results);
};
/**
* Attempts to find the buffered TimeRange that contains the specified
* time.
*
* @param {TimeRanges} buffered - the TimeRanges object to query
* @param {number} time - the time to filter on.
* @return {TimeRanges} a new TimeRanges object
*/
const findRange = function (buffered, time) {
return filterRanges(buffered, function (start, end) {
return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
});
};
/**
* Returns the TimeRanges that begin later than the specified time.
*
* @param {TimeRanges} timeRanges - the TimeRanges object to query
* @param {number} time - the time to filter on.
* @return {TimeRanges} a new TimeRanges object.
*/
const findNextRange = function (timeRanges, time) {
return filterRanges(timeRanges, function (start) {
return start - TIME_FUDGE_FACTOR >= time;
});
};
/**
* Returns gaps within a list of TimeRanges
*
* @param {TimeRanges} buffered - the TimeRanges object
* @return {TimeRanges} a TimeRanges object of gaps
*/
const findGaps = function (buffered) {
if (buffered.length < 2) {
return createTimeRanges();
}
const ranges = [];
for (let i = 1; i < buffered.length; i++) {
const start = buffered.end(i - 1);
const end = buffered.start(i);
ranges.push([start, end]);
}
return createTimeRanges(ranges);
};
/**
* Calculate the intersection of two TimeRanges
*
* @param {TimeRanges} bufferA
* @param {TimeRanges} bufferB
* @return {TimeRanges} The interesection of `bufferA` with `bufferB`
*/
const bufferIntersection = function (bufferA, bufferB) {
let start = null;
let end = null;
let arity = 0;
const extents = [];
const ranges = [];
if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
return createTimeRanges();
} // Handle the case where we have both buffers and create an
// intersection of the two
let count = bufferA.length; // A) Gather up all start and end times
while (count--) {
extents.push({
time: bufferA.start(count),
type: 'start'
});
extents.push({
time: bufferA.end(count),
type: 'end'
});
}
count = bufferB.length;
while (count--) {
extents.push({
time: bufferB.start(count),
type: 'start'
});
extents.push({
time: bufferB.end(count),
type: 'end'
});
} // B) Sort them by time
extents.sort(function (a, b) {
return a.time - b.time;
}); // C) Go along one by one incrementing arity for start and decrementing
// arity for ends
for (count = 0; count < extents.length; count++) {
if (extents[count].type === 'start') {
arity++; // D) If arity is ever incremented to 2 we are entering an
// overlapping range
if (arity === 2) {
start = extents[count].time;
}
} else if (extents[count].type === 'end') {
arity--; // E) If arity is ever decremented to 1 we leaving an
// overlapping range
if (arity === 1) {
end = extents[count].time;
}
} // F) Record overlapping ranges
if (start !== null && end !== null) {
ranges.push([start, end]);
start = null;
end = null;
}
}
return createTimeRanges(ranges);
};
/**
* Gets a human readable string for a TimeRange
*
* @param {TimeRange} range
* @return {string} a human readable string
*/
const printableRange = range => {
const strArr = [];
if (!range || !range.length) {
return '';
}
for (let i = 0; i < range.length; i++) {
strArr.push(range.start(i) + ' => ' + range.end(i));
}
return strArr.join(', ');
};
/**
* Calculates the amount of time left in seconds until the player hits the end of the
* buffer and causes a rebuffer
*
* @param {TimeRange} buffered
* The state of the buffer
* @param {Numnber} currentTime
* The current time of the player
* @param {number} playbackRate
* The current playback rate of the player. Defaults to 1.
* @return {number}
* Time until the player has to start rebuffering in seconds.
* @function timeUntilRebuffer
*/
const timeUntilRebuffer = function (buffered, currentTime, playbackRate = 1) {
const bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
return (bufferedEnd - currentTime) / playbackRate;
};
/**
* Converts a TimeRanges object into an array representation
*
* @param {TimeRanges} timeRanges
* @return {Array}
*/
const timeRangesToArray = timeRanges => {
const timeRangesList = [];
for (let i = 0; i < timeRanges.length; i++) {
timeRangesList.push({
start: timeRanges.start(i),
end: timeRanges.end(i)
});
}
return timeRangesList;
};
/**
* Determines if two time range objects are different.
*
* @param {TimeRange} a
* the first time range object to check
*
* @param {TimeRange} b
* the second time range object to check
*
* @return {Boolean}
* Whether the time range objects differ
*/
const isRangeDifferent = function (a, b) {
// same object
if (a === b) {
return false;
} // one or the other is undefined
if (!a && b || !b && a) {
return true;
} // length is different
if (a.length !== b.length) {
return true;
} // see if any start/end pair is different
for (let i = 0; i < a.length; i++) {
if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
return true;
}
} // if the length and every pair is the same
// this is the same time range
return false;
};
const lastBufferedEnd = function (a) {
if (!a || !a.length || !a.end) {
return;
}
return a.end(a.length - 1);
};
/**
* A utility function to add up the amount of time in a timeRange
* after a specified startTime.
* ie:[[0, 10], [20, 40], [50, 60]] with a startTime 0
* would return 40 as there are 40s seconds after 0 in the timeRange
*
* @param {TimeRange} range
* The range to check against
* @param {number} startTime
* The time in the time range that you should start counting from
*
* @return {number}
* The number of seconds in the buffer passed the specified time.
*/
const timeAheadOf = function (range, startTime) {
let time = 0;
if (!range || !range.length) {
return time;
}
for (let i = 0; i < range.length; i++) {
const start = range.start(i);
const end = range.end(i); // startTime is after this range entirely
if (startTime > end) {
continue;
} // startTime is within this range
if (startTime > start && startTime <= end) {
time += end - startTime;
continue;
} // startTime is before this range.
time += end - start;
}
return time;
};
/**
* @file playlist.js
*
* Playlist related utilities.
*/
/**
* Get the duration of a segment, with special cases for
* llhls segments that do not have a duration yet.
*
* @param {Object} playlist
* the playlist that the segment belongs to.
* @param {Object} segment
* the segment to get a duration for.
*
* @return {number}
* the segment duration
*/
const segmentDurationWithParts = (playlist, segment) => {
// if this isn't a preload segment
// then we will have a segment duration that is accurate.
if (!segment.preload) {
return segment.duration;
} // otherwise we have to add up parts and preload hints
// to get an up to date duration.
let result = 0;
(segment.parts || []).forEach(function (p) {
result += p.duration;
}); // for preload hints we have to use partTargetDuration
// as they won't even have a duration yet.
(segment.preloadHints || []).forEach(function (p) {
if (p.type === 'PART') {
result += playlist.partTargetDuration;
}
});
return result;
};
/**
* A function to get a combined list of parts and segments with durations
* and indexes.
*
* @param {Playlist} playlist the playlist to get the list for.
*
* @return {Array} The part/segment list.
*/
const getPartsAndSegments = playlist => (playlist.segments || []).reduce((acc, segment, si) => {
if (segment.parts) {
segment.parts.forEach(function (part, pi) {
acc.push({
duration: part.duration,
segmentIndex: si,
partIndex: pi,
part,
segment
});
});
} else {
acc.push({
duration: segment.duration,
segmentIndex: si,
partIndex: null,
segment,
part: null
});
}
return acc;
}, []);
const getLastParts = media => {
const lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];
return lastSegment && lastSegment.parts || [];
};
const getKnownPartCount = ({
preloadSegment
}) => {
if (!preloadSegment) {
return;
}
const {
parts,
preloadHints
} = preloadSegment;
let partCount = (preloadHints || []).reduce((count, hint) => count + (hint.type === 'PART' ? 1 : 0), 0);
partCount += parts && parts.length ? parts.length : 0;
return partCount;
};
/**
* Get the number of seconds to delay from the end of a
* live playlist.
*
* @param {Playlist} main the main playlist
* @param {Playlist} media the media playlist
* @return {number} the hold back in seconds.
*/
const liveEdgeDelay = (main, media) => {
if (media.endList) {
return 0;
} // dash suggestedPresentationDelay trumps everything
if (main && main.suggestedPresentationDelay) {
return main.suggestedPresentationDelay;
}
const hasParts = getLastParts(media).length > 0; // look for "part" delays from ll-hls first
if (hasParts && media.serverControl && media.serverControl.partHoldBack) {
return media.serverControl.partHoldBack;
} else if (hasParts && media.partTargetDuration) {
return media.partTargetDuration * 3; // finally look for full segment delays
} else if (media.serverControl && media.serverControl.holdBack) {
return media.serverControl.holdBack;
} else if (media.targetDuration) {
return media.targetDuration * 3;
}
return 0;
};
/**
* walk backward until we find a duration we can use
* or return a failure
*
* @param {Playlist} playlist the playlist to walk through
* @param {Number} endSequence the mediaSequence to stop walking on
*/
const backwardDuration = function (playlist, endSequence) {
let result = 0;
let i = endSequence - playlist.mediaSequence; // if a start time is available for segment immediately following
// the interval, use it
let segment = playlist.segments[i]; // Walk backward until we find the latest segment with timeline
// information that is earlier than endSequence
if (segment) {
if (typeof segment.start !== 'undefined') {
return {
result: segment.start,
precise: true
};
}
if (typeof segment.end !== 'undefined') {
return {
result: segment.end - segment.duration,
precise: true
};
}
}
while (i--) {
segment = playlist.segments[i];
if (typeof segment.end !== 'undefined') {
return {
result: result + segment.end,
precise: true
};
}
result += segmentDurationWithParts(playlist, segment);
if (typeof segment.start !== 'undefined') {
return {
result: result + segment.start,
precise: true
};
}
}
return {
result,
precise: false
};
};
/**
* walk forward until we find a duration we can use
* or return a failure
*
* @param {Playlist} playlist the playlist to walk through
* @param {number} endSequence the mediaSequence to stop walking on
*/
const forwardDuration = function (playlist, endSequence) {
let result = 0;
let segment;
let i = endSequence - playlist.mediaSequence; // Walk forward until we find the earliest segment with timeline
// information
for (; i < playlist.segments.length; i++) {
segment = playlist.segments[i];
if (typeof segment.start !== 'undefined') {
return {
result: segment.start - result,
precise: true
};
}
result += segmentDurationWithParts(playlist, segment);
if (typeof segment.end !== 'undefined') {
return {
result: segment.end - result,
precise: true
};
}
} // indicate we didn't find a useful duration estimate
return {
result: -1,
precise: false
};
};
/**
* Calculate the media duration from the segments associated with a
* playlist. The duration of a subinterval of the available segments
* may be calculated by specifying an end index.
*
* @param {Object} playlist a media playlist object
* @param {number=} endSequence an exclusive upper boundary
* for the playlist. Defaults to playlist length.
* @param {number} expired the amount of time that has dropped
* off the front of the playlist in a live scenario
* @return {number} the duration between the first available segment
* and end index.
*/
const intervalDuration = function (playlist, endSequence, expired) {
if (typeof endSequence === 'undefined') {
endSequence = playlist.mediaSequence + playlist.segments.length;
}
if (endSequence < playlist.mediaSequence) {
return 0;
} // do a backward walk to estimate the duration
const backward = backwardDuration(playlist, endSequence);
if (backward.precise) {
// if we were able to base our duration estimate on timing
// information provided directly from the Media Source, return
// it
return backward.result;
} // walk forward to see if a precise duration estimate can be made
// that way
const forward = forwardDuration(playlist, endSequence);
if (forward.precise) {
// we found a segment that has been buffered and so it's
// position is known precisely
return forward.result;
} // return the less-precise, playlist-based duration estimate
return backward.result + expired;
};
/**
* Calculates the duration of a playlist. If a start and end index
* are specified, the duration will be for the subset of the media
* timeline between those two indices. The total duration for live
* playlists is always Infinity.
*
* @param {Object} playlist a media playlist object
* @param {number=} endSequence an exclusive upper
* boundary for the playlist. Defaults to the playlist media
* sequence number plus its length.
* @param {number=} expired the amount of time that has
* dropped off the front of the playlist in a live scenario
* @return {number} the duration between the start index and end
* index.
*/
const duration = function (playlist, endSequence, expired) {
if (!playlist) {
return 0;
}
if (typeof expired !== 'number') {
expired = 0;
} // if a slice of the total duration is not requested, use
// playlist-level duration indicators when they're present
if (typeof endSequence === 'undefined') {
// if present, use the duration specified in the playlist
if (playlist.totalDuration) {
return playlist.totalDuration;
} // duration should be Infinity for live playlists
if (!playlist.endList) {
return window$1.Infinity;
}
} // calculate the total duration based on the segment durations
return intervalDuration(playlist, endSequence, expired);
};
/**
* Calculate the time between two indexes in the current playlist
* neight the start- nor the end-index need to be within the current
* playlist in which case, the targetDuration of the playlist is used
* to approximate the durations of the segments
*
* @param {Array} options.durationList list to iterate over for durations.
* @param {number} options.defaultDuration duration to use for elements before or after the durationList
* @param {number} options.startIndex partsAndSegments index to start
* @param {number} options.endIndex partsAndSegments index to end.
* @return {number} the number of seconds between startIndex and endIndex
*/
const sumDurations = function ({
defaultDuration,
durationList,
startIndex,
endIndex
}) {
let durations = 0;
if (startIndex > endIndex) {
[startIndex, endIndex] = [endIndex, startIndex];
}
if (startIndex < 0) {
for (let i = startIndex; i < Math.min(0, endIndex); i++) {
durations += defaultDuration;
}
startIndex = 0;
}
for (let i = startIndex; i < endIndex; i++) {
durations += durationList[i].duration;
}
return durations;
};
/**
* Calculates the playlist end time
*
* @param {Object} playlist a media playlist object
* @param {number=} expired the amount of time that has
* dropped off the front of the playlist in a live scenario
* @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
* playlist end calculation should consider the safe live end
* (truncate the playlist end by three segments). This is normally
* used for calculating the end of the playlist's seekable range.
* This takes into account the value of liveEdgePadding.
* Setting liveEdgePadding to 0 is equivalent to setting this to false.
* @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
* If this is provided, it is used in the safe live end calculation.
* Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
* Corresponds to suggestedPresentationDelay in DASH manifests.
* @return {number} the end time of playlist
* @function playlistEnd
*/
const playlistEnd = function (playlist, expired, useSafeLiveEnd, liveEdgePadding) {
if (!playlist || !playlist.segments) {
return null;
}
if (playlist.endList) {
return duration(playlist);
}
if (expired === null) {
return null;
}
expired = expired || 0;
let lastSegmentEndTime = intervalDuration(playlist, playlist.mediaSequence + playlist.segments.length, expired);
if (useSafeLiveEnd) {
liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);
lastSegmentEndTime -= liveEdgePadding;
} // don't return a time less than zero
return Math.max(0, lastSegmentEndTime);
};
/**
* Calculates the interval of time that is currently seekable in a
* playlist. The returned time ranges are relative to the earliest
* moment in the specified playlist that is still available. A full
* seekable implementation for live streams would need to offset
* these values by the duration of content that has expired from the
* stream.
*
* @param {Object} playlist a media playlist object
* dropped off the front of the playlist in a live scenario
* @param {number=} expired the amount of time that has
* dropped off the front of the playlist in a live scenario
* @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
* Corresponds to suggestedPresentationDelay in DASH manifests.
* @return {TimeRanges} the periods of time that are valid targets
* for seeking
*/
const seekable = function (playlist, expired, liveEdgePadding) {
const useSafeLiveEnd = true;
const seekableStart = expired || 0;
let seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
if (seekableEnd === null) {
return createTimeRanges();
} // Clamp seekable end since it can not be less than the seekable start
if (seekableEnd < seekableStart) {
seekableEnd = seekableStart;
}
return createTimeRanges(seekableStart, seekableEnd);
};
/**
* Determine the index and estimated starting time of the segment that
* contains a specified playback position in a media playlist.
*
* @param {Object} options.playlist the media playlist to query
* @param {number} options.currentTime The number of seconds since the earliest
* possible position to determine the containing segment for
* @param {number} options.startTime the time when the segment/part starts
* @param {number} options.startingSegmentIndex the segment index to start looking at.
* @param {number?} [options.startingPartIndex] the part index to look at within the segment.
*
* @return {Object} an object with partIndex, segmentIndex, and startTime.
*/
const getMediaInfoForTime = function ({
playlist,
currentTime,
startingSegmentIndex,
startingPartIndex,
startTime,
exactManifestTimings
}) {
let time = currentTime - startTime;
const partsAndSegments = getPartsAndSegments(playlist);
let startIndex = 0;
for (let i = 0; i < partsAndSegments.length; i++) {
const partAndSegment = partsAndSegments[i];
if (startingSegmentIndex !== partAndSegment.segmentIndex) {
continue;
} // skip this if part index does not match.
if (typeof startingPartIndex === 'number' && typeof partAndSegment.partIndex === 'number' && startingPartIndex !== partAndSegment.partIndex) {
continue;
}
startIndex = i;
break;
}
if (time < 0) {
// Walk backward from startIndex in the playlist, adding durations
// until we find a segment that contains `time` and return it
if (startIndex > 0) {
for (let i = startIndex - 1; i >= 0; i--) {
const partAndSegment = partsAndSegments[i];
time += partAndSegment.duration;
if (exactManifestTimings) {
if (time < 0) {
continue;
}
} else if (time + TIME_FUDGE_FACTOR <= 0) {
continue;
}
return {
partIndex: partAndSegment.partIndex,
segmentIndex: partAndSegment.segmentIndex,
startTime: startTime - sumDurations({
defaultDuration: playlist.targetDuration,
durationList: partsAndSegments,
startIndex,
endIndex: i
})
};
}
} // We were unable to find a good segment within the playlist
// so select the first segment
return {
partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
startTime: currentTime
};
} // When startIndex is negative, we first walk forward to first segment
// adding target durations. If we "run out of time" before getting to
// the first segment, return the first segment
if (startIndex < 0) {
for (let i = startIndex; i < 0; i++) {
time -= playlist.targetDuration;
if (time < 0) {
return {
partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
startTime: currentTime
};
}
}
startIndex = 0;
} // Walk forward from startIndex in the playlist, subtracting durations
// until we find a segment that contains `time` and return it
for (let i = startIndex; i < partsAndSegments.length; i++) {
const partAndSegment = partsAndSegments[i];
time -= partAndSegment.duration;
const canUseFudgeFactor = partAndSegment.duration > TIME_FUDGE_FACTOR;
const isExactlyAtTheEnd = time === 0;
const isExtremelyCloseToTheEnd = canUseFudgeFactor && time + TIME_FUDGE_FACTOR >= 0;
if (isExactlyAtTheEnd || isExtremelyCloseToTheEnd) {
// 1) We are exactly at the end of the current segment.
// 2) We are extremely close to the end of the current segment (The difference is less than 1 / 30).
// We may encounter this situation when
// we don't have exact match between segment duration info in the manifest and the actual duration of the segment
// For example:
// We appended 3 segments 10 seconds each, meaning we should have 30 sec buffered,
// but we the actual buffered is 29.99999
//
// In both cases:
// if we passed current time -> it means that we already played current segment
// if we passed buffered.end -> it means that this segment is already loaded and buffered
// we should select the next segment if we have one:
if (i !== partsAndSegments.length - 1) {
continue;
}
}
if (exactManifestTimings) {
if (time > 0) {
continue;
}
} else if (time - TIME_FUDGE_FACTOR >= 0) {
continue;
}
return {
partIndex: partAndSegment.partIndex,
segmentIndex: partAndSegment.segmentIndex,
startTime: startTime + sumDurations({
defaultDuration: playlist.targetDuration,
durationList: partsAndSegments,
startIndex,
endIndex: i
})
};
} // We are out of possible candidates so load the last one...
return {
segmentIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,
partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,
startTime: currentTime
};
};
/**
* Check whether the playlist is excluded or not.
*
* @param {Object} playlist the media playlist object
* @return {boolean} whether the playlist is excluded or not
* @function isExcluded
*/
const isExcluded = function (playlist) {
return playlist.excludeUntil && playlist.excludeUntil > Date.now();
};
/**
* Check whether the playlist is compatible with current playback configuration or has
* been excluded permanently for being incompatible.
*
* @param {Object} playlist the media playlist object
* @return {boolean} whether the playlist is incompatible or not
* @function isIncompatible
*/
const isIncompatible = function (playlist) {
return playlist.excludeUntil && playlist.excludeUntil === Infinity;
};
/**
* Check whether the playlist is enabled or not.
*
* @param {Object} playlist the media playlist object
* @return {boolean} whether the playlist is enabled or not
* @function isEnabled
*/
const isEnabled = function (playlist) {
const excluded = isExcluded(playlist);
return !playlist.disabled && !excluded;
};
/**
* Check whether the playlist has been manually disabled through the representations api.
*
* @param {Object} playlist the media playlist object
* @return {boolean} whether the playlist is disabled manually or not
* @function isDisabled
*/
const isDisabled = function (playlist) {
return playlist.disabled;
};
/**
* Returns whether the current playlist is an AES encrypted HLS stream
*
* @return {boolean} true if it's an AES encrypted HLS stream
*/
const isAes = function (media) {
for (let i = 0; i < media.segments.length; i++) {
if (media.segments[i].key) {
return true;
}
}
return false;
};
/**
* Checks if the playlist has a value for the specified attribute
*
* @param {string} attr
* Attribute to check for
* @param {Object} playlist
* The media playlist object
* @return {boolean}
* Whether the playlist contains a value for the attribute or not
* @function hasAttribute
*/
const hasAttribute = function (attr, playlist) {
return playlist.attributes && playlist.attributes[attr];
};
/**
* Estimates the time required to complete a segment download from the specified playlist
*
* @param {number} segmentDuration
* Duration of requested segment
* @param {number} bandwidth
* Current measured bandwidth of the player
* @param {Object} playlist
* The media playlist object
* @param {number=} bytesReceived
* Number of bytes already received for the request. Defaults to 0
* @return {number|NaN}
* The estimated time to request the segment. NaN if bandwidth information for
* the given playlist is unavailable
* @function estimateSegmentRequestTime
*/
const estimateSegmentRequestTime = function (segmentDuration, bandwidth, playlist, bytesReceived = 0) {
if (!hasAttribute('BANDWIDTH', playlist)) {
return NaN;
}
const size = segmentDuration * playlist.attributes.BANDWIDTH;
return (size - bytesReceived * 8) / bandwidth;
};
/*
* Returns whether the current playlist is the lowest rendition
*
* @return {Boolean} true if on lowest rendition
*/
const isLowestEnabledRendition = (main, media) => {
if (main.playlists.length === 1) {
return true;
}
const currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
return main.playlists.filter(playlist => {
if (!isEnabled(playlist)) {
return false;
}
return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
}).length === 0;
};
const playlistMatch = (a, b) => {
// both playlits are null
// or only one playlist is non-null
// no match
if (!a && !b || !a && b || a && !b) {
return false;
} // playlist objects are the same, match
if (a === b) {
return true;
} // first try to use id as it should be the most
// accurate
if (a.id && b.id && a.id === b.id) {
return true;
} // next try to use reslovedUri as it should be the
// second most accurate.
if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {
return true;
} // finally try to use uri as it should be accurate
// but might miss a few cases for relative uris
if (a.uri && b.uri && a.uri === b.uri) {
return true;
}
return false;
};
const someAudioVariant = function (main, callback) {
const AUDIO = main && main.mediaGroups && main.mediaGroups.AUDIO || {};
let found = false;
for (const groupName in AUDIO) {
for (const label in AUDIO[groupName]) {
found = callback(AUDIO[groupName][label]);
if (found) {
break;
}
}
if (found) {
break;
}
}
return !!found;
};
const isAudioOnly = main => {
// we are audio only if we have no main playlists but do
// have media group playlists.
if (!main || !main.playlists || !main.playlists.length) {
// without audio variants or playlists this
// is not an audio only main.
const found = someAudioVariant(main, variant => variant.playlists && variant.playlists.length || variant.uri);
return found;
} // if every playlist has only an audio codec it is audio only
for (let i = 0; i < main.playlists.length; i++) {
const playlist = main.playlists[i];
const CODECS = playlist.attributes && playlist.attributes.CODECS; // all codecs are audio, this is an audio playlist.
if (CODECS && CODECS.split(',').every(c => isAudioCodec(c))) {
continue;
} // playlist is in an audio group it is audio only
const found = someAudioVariant(main, variant => playlistMatch(playlist, variant));
if (found) {
continue;
} // if we make it here this playlist isn't audio and we
// are not audio only
return false;
} // if we make it past every playlist without returning, then
// this is an audio only playlist.
return true;
}; // exports
var Playlist = {
liveEdgeDelay,
duration,
seekable,
getMediaInfoForTime,
isEnabled,
isDisabled,
isExcluded,
isIncompatible,
playlistEnd,
isAes,
hasAttribute,
estimateSegmentRequestTime,
isLowestEnabledRendition,
isAudioOnly,
playlistMatch,
segmentDurationWithParts
};
const {
log
} = videojs;
const createPlaylistID = (index, uri) => {
return `${index}-${uri}`;
}; // default function for creating a group id
const groupID = (type, group, label) => {
return `placeholder-uri-${type}-${group}-${label}`;
};
/**
* Parses a given m3u8 playlist
*
* @param {Function} [onwarn]
* a function to call when the parser triggers a warning event.
* @param {Function} [oninfo]
* a function to call when the parser triggers an info event.
* @param {string} manifestString
* The downloaded manifest string
* @param {Object[]} [customTagParsers]
* An array of custom tag parsers for the m3u8-parser instance
* @param {Object[]} [customTagMappers]
* An array of custom tag mappers for the m3u8-parser instance
* @param {boolean} [llhls]
* Whether to keep ll-hls features in the manifest after parsing.
* @return {Object}
* The manifest object
*/
const parseManifest = ({
onwarn,
oninfo,
manifestString,
customTagParsers = [],
customTagMappers = [],
llhls
}) => {
const parser = new Parser();
if (onwarn) {
parser.on('warn', onwarn);
}
if (oninfo) {
parser.on('info', oninfo);
}
customTagParsers.forEach(customParser => parser.addParser(customParser));
customTagMappers.forEach(mapper => parser.addTagMapper(mapper));
parser.push(manifestString);
parser.end();
const manifest = parser.manifest; // remove llhls features from the parsed manifest
// if we don't want llhls support.
if (!llhls) {
['preloadSegment', 'skip', 'serverControl', 'renditionReports', 'partInf', 'partTargetDuration'].forEach(function (k) {
if (manifest.hasOwnProperty(k)) {
delete manifest[k];
}
});
if (manifest.segments) {
manifest.segments.forEach(function (segment) {
['parts', 'preloadHints'].forEach(function (k) {
if (segment.hasOwnProperty(k)) {
delete segment[k];
}
});
});
}
}
if (!manifest.targetDuration) {
let targetDuration = 10;
if (manifest.segments && manifest.segments.length) {
targetDuration = manifest.segments.reduce((acc, s) => Math.max(acc, s.duration), 0);
}
if (onwarn) {
onwarn({
message: `manifest has no targetDuration defaulting to ${targetDuration}`
});
}
manifest.targetDuration = targetDuration;
}
const parts = getLastParts(manifest);
if (parts.length && !manifest.partTargetDuration) {
const partTargetDuration = parts.reduce((acc, p) => Math.max(acc, p.duration), 0);
if (onwarn) {
onwarn({
message: `manifest has no partTargetDuration defaulting to ${partTargetDuration}`
});
log.error('LL-HLS manifest has parts but lacks required #EXT-X-PART-INF:PART-TARGET value. See https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-09#section-4.4.3.7. Playback is not guaranteed.');
}
manifest.partTargetDuration = partTargetDuration;
}
return manifest;
};
/**
* Loops through all supported media groups in main and calls the provided
* callback for each group
*
* @param {Object} main
* The parsed main manifest object
* @param {Function} callback
* Callback to call for each media group
*/
const forEachMediaGroup = (main, callback) => {
if (!main.mediaGroups) {
return;
}
['AUDIO', 'SUBTITLES'].forEach(mediaType => {
if (!main.mediaGroups[mediaType]) {
return;
}
for (const groupKey in main.mediaGroups[mediaType]) {
for (const labelKey in main.mediaGroups[mediaType][groupKey]) {
const mediaProperties = main.mediaGroups[mediaType][groupKey][labelKey];
callback(mediaProperties, mediaType, groupKey, labelKey);
}
}
});
};
/**
* Adds properties and attributes to the playlist to keep consistent functionality for
* playlists throughout VHS.
*
* @param {Object} config
* Arguments object
* @param {Object} config.playlist
* The media playlist
* @param {string} [config.uri]
* The uri to the media playlist (if media playlist is not from within a main
* playlist)
* @param {string} id
* ID to use for the playlist
*/
const setupMediaPlaylist = ({
playlist,
uri,
id
}) => {
playlist.id = id;
playlist.playlistErrors_ = 0;
if (uri) {
// For media playlists, m3u8-parser does not have access to a URI, as HLS media
// playlists do not contain their own source URI, but one is needed for consistency in
// VHS.
playlist.uri = uri;
} // For HLS main playlists, even though certain attributes MUST be defined, the
// stream may still be played without them.
// For HLS media playlists, m3u8-parser does not attach an attributes object to the
// manifest.
//
// To avoid undefined reference errors through the project, and make the code easier
// to write/read, add an empty attributes object for these cases.
playlist.attributes = playlist.attributes || {};
};
/**
* Adds ID, resolvedUri, and attributes properties to each playlist of the main, where
* necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
* playlist references to the playlists array.
*
* @param {Object} main
* The main playlist
*/
const setupMediaPlaylists = main => {
let i = main.playlists.length;
while (i--) {
const playlist = main.playlists[i];
setupMediaPlaylist({
playlist,
id: createPlaylistID(i, playlist.uri)
});
playlist.resolvedUri = resolveUrl(main.uri, playlist.uri);
main.playlists[playlist.id] = playlist; // URI reference added for backwards compatibility
main.playlists[playlist.uri] = playlist; // Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
// the stream can be played without it. Although an attributes property may have been
// added to the playlist to prevent undefined references, issue a warning to fix the
// manifest.
if (!playlist.attributes.BANDWIDTH) {
log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
}
}
};
/**
* Adds resolvedUri properties to each media group.
*
* @param {Object} main
* The main playlist
*/
const resolveMediaGroupUris = main => {
forEachMediaGroup(main, properties => {
if (properties.uri) {
properties.resolvedUri = resolveUrl(main.uri, properties.uri);
}
});
};
/**
* Creates a main playlist wrapper to insert a sole media playlist into.
*
* @param {Object} media
* Media playlist
* @param {string} uri
* The media URI
*
* @return {Object}
* main playlist
*/
const mainForMedia = (media, uri) => {
const id = createPlaylistID(0, uri);
const main = {
mediaGroups: {
'AUDIO': {},
'VIDEO': {},
'CLOSED-CAPTIONS': {},
'SUBTITLES': {}
},
uri: window$1.location.href,
resolvedUri: window$1.location.href,
playlists: [{
uri,
id,
resolvedUri: uri,
// m3u8-parser does not attach an attributes property to media playlists so make
// sure that the property is attached to avoid undefined reference errors
attributes: {}
}]
}; // set up ID reference
main.playlists[id] = main.playlists[0]; // URI reference added for backwards compatibility
main.playlists[uri] = main.playlists[0];
return main;
};
/**
* Does an in-place update of the main manifest to add updated playlist URI references
* as well as other properties needed by VHS that aren't included by the parser.
*
* @param {Object} main
* main manifest object
* @param {string} uri
* The source URI
* @param {function} createGroupID
* A function to determine how to create the groupID for mediaGroups
*/
const addPropertiesToMain = (main, uri, createGroupID = groupID) => {
main.uri = uri;
for (let i = 0; i < main.playlists.length; i++) {
if (!main.playlists[i].uri) {
// Set up phony URIs for the playlists since playlists are referenced by their URIs
// throughout VHS, but some formats (e.g., DASH) don't have external URIs
// TODO: consider adding dummy URIs in mpd-parser
const phonyUri = `placeholder-uri-${i}`;
main.playlists[i].uri = phonyUri;
}
}
const audioOnlyMain = isAudioOnly(main);
forEachMediaGroup(main, (properties, mediaType, groupKey, labelKey) => {
// add a playlist array under properties
if (!properties.playlists || !properties.playlists.length) {
// If the manifest is audio only and this media group does not have a uri, check
// if the media group is located in the main list of playlists. If it is, don't add
// placeholder properties as it shouldn't be considered an alternate audio track.
if (audioOnlyMain && mediaType === 'AUDIO' && !properties.uri) {
for (let i = 0; i < main.playlists.length; i++) {
const p = main.playlists[i];
if (p.attributes && p.attributes.AUDIO && p.attributes.AUDIO === groupKey) {
return;
}
}
}
properties.playlists = [_extends({}, properties)];
}
properties.playlists.forEach(function (p, i) {
const groupId = createGroupID(mediaType, groupKey, labelKey, p);
const id = createPlaylistID(i, groupId);
if (p.uri) {
p.resolvedUri = p.resolvedUri || resolveUrl(main.uri, p.uri);
} else {
// DEPRECATED, this has been added to prevent a breaking change.
// previously we only ever had a single media group playlist, so
// we mark the first playlist uri without prepending the index as we used to
// ideally we would do all of the playlists the same way.
p.uri = i === 0 ? groupId : id; // don't resolve a placeholder uri to an absolute url, just use
// the placeholder again
p.resolvedUri = p.uri;
}
p.id = p.id || id; // add an empty attributes object, all playlists are
// expected to have this.
p.attributes = p.attributes || {}; // setup ID and URI references (URI for backwards compatibility)
main.playlists[p.id] = p;
main.playlists[p.uri] = p;
});
});
setupMediaPlaylists(main);
resolveMediaGroupUris(main);
};
class DateRangesStorage {
constructor() {
this.offset_ = null;
this.pendingDateRanges_ = new Map();
this.processedDateRanges_ = new Map();
}
setOffset(segments = []) {
// already set
if (this.offset_ !== null) {
return;
} // no segment to process
if (!segments.length) {
return;
}
const [firstSegment] = segments; // no program date time
if (firstSegment.programDateTime === undefined) {
return;
} // Set offset as ProgramDateTime for the very first segment of the very first playlist load:
this.offset_ = firstSegment.programDateTime / 1000;
}
setPendingDateRanges(dateRanges = []) {
if (!dateRanges.length) {
return;
}
const [dateRange] = dateRanges;
const startTime = dateRange.startDate.getTime();
this.trimProcessedDateRanges_(startTime);
this.pendingDateRanges_ = dateRanges.reduce((map, pendingDateRange) => {
map.set(pendingDateRange.id, pendingDateRange);
return map;
}, new Map());
}
processDateRange(dateRange) {
this.pendingDateRanges_.delete(dateRange.id);
this.processedDateRanges_.set(dateRange.id, dateRange);
}
getDateRangesToProcess() {
if (this.offset_ === null) {
return [];
}
const dateRangeClasses = {};
const dateRangesToProcess = [];
this.pendingDateRanges_.forEach((dateRange, id) => {
if (this.processedDateRanges_.has(id)) {
return;
}
dateRange.startTime = dateRange.startDate.getTime() / 1000 - this.offset_;
dateRange.processDateRange = () => this.processDateRange(dateRange);
dateRangesToProcess.push(dateRange);
if (!dateRange.class) {
return;
}
if (dateRangeClasses[dateRange.class]) {
const length = dateRangeClasses[dateRange.class].push(dateRange);
dateRange.classListIndex = length - 1;
} else {
dateRangeClasses[dateRange.class] = [dateRange];
dateRange.classListIndex = 0;
}
});
for (const dateRange of dateRangesToProcess) {
const classList = dateRangeClasses[dateRange.class] || [];
if (dateRange.endDate) {
dateRange.endTime = dateRange.endDate.getTime() / 1000 - this.offset_;
} else if (dateRange.endOnNext && classList[dateRange.classListIndex + 1]) {
dateRange.endTime = classList[dateRange.classListIndex + 1].startTime;
} else if (dateRange.duration) {
dateRange.endTime = dateRange.startTime + dateRange.duration;
} else if (dateRange.plannedDuration) {
dateRange.endTime = dateRange.startTime + dateRange.plannedDuration;
} else {
dateRange.endTime = dateRange.startTime;
}
}
return dateRangesToProcess;
}
trimProcessedDateRanges_(startTime) {
const copy = new Map(this.processedDateRanges_);
copy.forEach((dateRange, id) => {
if (dateRange.startDate.getTime() < startTime) {
this.processedDateRanges_.delete(id);
}
});
}
}
const QUOTA_EXCEEDED_ERR = 22;
const getStreamingNetworkErrorMetadata = ({
requestType,
request,
error,
parseFailure
}) => {
const isBadStatus = request.status < 200 || request.status > 299;
const isFailure = request.status >= 400 && request.status <= 499;
const errorMetadata = {
uri: request.uri,
requestType
};
const isBadStatusOrParseFailure = isBadStatus && !isFailure || parseFailure;
if (error && isFailure) {
// copy original error and add to the metadata.
errorMetadata.error = _extends({}, error);
errorMetadata.errorType = videojs.Error.NetworkRequestFailed;
} else if (request.aborted) {
errorMetadata.errorType = videojs.Error.NetworkRequestAborted;
} else if (request.timedout) {
errorMetadata.erroType = videojs.Error.NetworkRequestTimeout;
} else if (isBadStatusOrParseFailure) {
const errorType = parseFailure ? videojs.Error.NetworkBodyParserFailed : videojs.Error.NetworkBadStatus;
errorMetadata.errorType = errorType;
errorMetadata.status = request.status;
errorMetadata.headers = request.headers;
}
return errorMetadata;
};
const {
EventTarget: EventTarget$1
} = videojs;
const addLLHLSQueryDirectives = (uri, media) => {
if (media.endList || !media.serverControl) {
return uri;
}
const parameters = {};
if (media.serverControl.canBlockReload) {
const {
preloadSegment
} = media; // next msn is a zero based value, length is not.
let nextMSN = media.mediaSequence + media.segments.length; //