shaka-player
Version:
DASH/EME video player library
1,425 lines (1,260 loc) • 190 kB
JavaScript
/*! @license
* Shaka Player
* Copyright 2016 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
goog.provide('shaka.hls.HlsParser');
goog.require('goog.Uri');
goog.require('goog.asserts');
goog.require('shaka.abr.Ewma');
goog.require('shaka.drm.DrmUtils');
goog.require('shaka.drm.FairPlay');
goog.require('shaka.drm.PlayReady');
goog.require('shaka.hls.Attribute');
goog.require('shaka.hls.ManifestTextParser');
goog.require('shaka.hls.Playlist');
goog.require('shaka.hls.PlaylistType');
goog.require('shaka.hls.Tag');
goog.require('shaka.hls.Utils');
goog.require('shaka.log');
goog.require('shaka.media.InitSegmentReference');
goog.require('shaka.media.ManifestParser');
goog.require('shaka.media.PresentationTimeline');
goog.require('shaka.media.QualityObserver');
goog.require('shaka.media.SegmentIndex');
goog.require('shaka.media.SegmentReference');
goog.require('shaka.media.SegmentUtils');
goog.require('shaka.net.DataUriPlugin');
goog.require('shaka.net.NetworkingEngine');
goog.require('shaka.net.NetworkingEngine.PendingRequest');
goog.require('shaka.util.ArrayUtils');
goog.require('shaka.util.BufferUtils');
goog.require('shaka.util.ContentSteeringManager');
goog.require('shaka.util.Error');
goog.require('shaka.util.EventManager');
goog.require('shaka.util.FakeEvent');
goog.require('shaka.util.LanguageUtils');
goog.require('shaka.util.ManifestParserUtils');
goog.require('shaka.util.MimeUtils');
goog.require('shaka.util.Networking');
goog.require('shaka.util.OperationManager');
goog.require('shaka.util.Pssh');
goog.require('shaka.util.Timer');
goog.require('shaka.util.TsParser');
goog.require('shaka.util.TXml');
goog.require('shaka.util.StreamUtils');
goog.require('shaka.util.Uint8ArrayUtils');
goog.requireType('shaka.hls.Segment');
/**
* HLS parser.
*
* @implements {shaka.extern.ManifestParser}
* @export
*/
shaka.hls.HlsParser = class {
/**
* Creates an Hls Parser object.
*/
constructor() {
/** @private {?shaka.extern.ManifestParser.PlayerInterface} */
this.playerInterface_ = null;
/** @private {?shaka.extern.ManifestConfiguration} */
this.config_ = null;
/** @private {number} */
this.globalId_ = 1;
/** @private {!Map<string, string>} */
this.globalVariables_ = new Map();
/**
* A map from group id to stream infos created from the media tags.
* @private {!Map<string, !Array<?shaka.hls.HlsParser.StreamInfo>>}
*/
this.groupIdToStreamInfosMap_ = new Map();
/**
* For media playlist lazy-loading to work in livestreams, we have to assume
* that each stream of a type (video, audio, etc) has the same mappings of
* sequence number to start time.
* This map stores those relationships.
* Only used during livestreams; we do not assume that VOD content is
* aligned in that way.
* @private {!Map<string, !Map<number, number>>}
*/
this.mediaSequenceToStartTimeByType_ = new Map();
// Set initial maps.
const ContentType = shaka.util.ManifestParserUtils.ContentType;
this.mediaSequenceToStartTimeByType_.set(ContentType.VIDEO, new Map());
this.mediaSequenceToStartTimeByType_.set(ContentType.AUDIO, new Map());
this.mediaSequenceToStartTimeByType_.set(ContentType.TEXT, new Map());
this.mediaSequenceToStartTimeByType_.set(ContentType.IMAGE, new Map());
/** @private {!Map<string, shaka.hls.HlsParser.DrmParser_>} */
this.keyFormatsToDrmParsers_ = new Map()
.set('com.apple.streamingkeydelivery',
(tag, type, ref) => this.fairplayDrmParser_(tag, type, ref))
.set('urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed',
(tag, type, ref) => this.widevineDrmParser_(tag, type, ref))
.set('com.microsoft.playready',
(tag, type, ref) => this.playreadyDrmParser_(tag, type, ref))
.set('urn:uuid:3d5e6d35-9b9a-41e8-b843-dd3c6e72c42c',
(tag, type, ref) => this.wiseplayDrmParser_(tag, type, ref));
/**
* The values are strings of the form "<VIDEO URI> - <AUDIO URI>",
* where the URIs are the verbatim media playlist URIs as they appeared in
* the master playlist.
*
* Used to avoid duplicates that vary only in their text stream.
*
* @private {!Set<string>}
*/
this.variantUriSet_ = new Set();
/**
* A map from (verbatim) media playlist URI to stream infos representing the
* playlists.
*
* On update, used to iterate through and update from media playlists.
*
* On initial parse, used to iterate through and determine minimum
* timestamps, offsets, and to handle TS rollover.
*
* During parsing, used to avoid duplicates in the async methods
* createStreamInfoFromMediaTags_, createStreamInfoFromImageTag_ and
* createStreamInfoFromVariantTags_.
*
* @private {!Map<string, shaka.hls.HlsParser.StreamInfo>}
*/
this.uriToStreamInfosMap_ = new Map();
/** @private {?shaka.media.PresentationTimeline} */
this.presentationTimeline_ = null;
/**
* The master playlist URI, after redirects.
*
* @private {string}
*/
this.masterPlaylistUri_ = '';
/** @private {shaka.hls.ManifestTextParser} */
this.manifestTextParser_ = new shaka.hls.ManifestTextParser();
/**
* The minimum sequence number for generated segments, when ignoring
* EXT-X-PROGRAM-DATE-TIME.
*
* @private {number}
*/
this.minSequenceNumber_ = -1;
/**
* The lowest time value for any of the streams, as defined by the
* EXT-X-PROGRAM-DATE-TIME value. Measured in seconds since January 1, 1970.
*
* @private {number}
*/
this.lowestSyncTime_ = Infinity;
/**
* Flag to indicate if any of the media playlists use
* EXT-X-PROGRAM-DATE-TIME.
*
* @private {boolean}
*/
this.usesProgramDateTime_ = false;
/**
* Whether the streams have previously been "finalized"; that is to say,
* whether we have loaded enough streams to know information about the asset
* such as timing information, live status, etc.
*
* @private {boolean}
*/
this.streamsFinalized_ = false;
/**
* Whether the manifest informs about the codec to use.
*
* @private
*/
this.codecInfoInManifest_ = false;
/**
* This timer is used to trigger the start of a manifest update. A manifest
* update is async. Once the update is finished, the timer will be restarted
* to trigger the next update. The timer will only be started if the content
* is live content.
*
* @private {shaka.util.Timer}
*/
this.updatePlaylistTimer_ = new shaka.util.Timer(() => {
if (this.mediaElement_ && !this.config_.continueLoadingWhenPaused) {
this.eventManager_.unlisten(this.mediaElement_, 'timeupdate');
if (this.mediaElement_.paused) {
this.eventManager_.listenOnce(
this.mediaElement_, 'timeupdate', () => this.onUpdate_());
return;
}
}
this.onUpdate_();
});
/** @private {shaka.hls.HlsParser.PresentationType_} */
this.presentationType_ = shaka.hls.HlsParser.PresentationType_.VOD;
/** @private {?shaka.extern.Manifest} */
this.manifest_ = null;
/** @private {number} */
this.maxTargetDuration_ = 0;
/** @private {number} */
this.lastTargetDuration_ = Infinity;
/**
* Partial segments target duration.
* @private {number}
*/
this.partialTargetDuration_ = 0;
/** @private {number} */
this.presentationDelay_ = 0;
/** @private {number} */
this.lowLatencyPresentationDelay_ = 0;
/** @private {shaka.util.OperationManager} */
this.operationManager_ = new shaka.util.OperationManager();
/**
* A map from closed captions' group id, to a map of closed captions info.
* {group id -> {closed captions channel id -> language}}
* @private {Map<string, Map<string, string>>}
*/
this.groupIdToClosedCaptionsMap_ = new Map();
/** @private {Map<string, string>} */
this.groupIdToCodecsMap_ = new Map();
/**
* A cache mapping EXT-X-MAP tag info to the InitSegmentReference created
* from the tag.
* The key is a string combining the EXT-X-MAP tag's absolute uri, and
* its BYTERANGE if available.
* @private {!Map<string, !shaka.media.InitSegmentReference>}
*/
this.mapTagToInitSegmentRefMap_ = new Map();
/** @private {Map<string, !shaka.extern.aesKey>} */
this.aesKeyInfoMap_ = new Map();
/** @private {Map<string, !Promise<shaka.extern.Response>>} */
this.aesKeyMap_ = new Map();
/** @private {Map<string, !Promise<shaka.extern.Response>>} */
this.identityKeyMap_ = new Map();
/** @private {Map<!shaka.media.InitSegmentReference, ?string>} */
this.initSegmentToKidMap_ = new Map();
/** @private {boolean} */
this.lowLatencyMode_ = false;
/** @private {boolean} */
this.lowLatencyByterangeOptimization_ = false;
/**
* An ewma that tracks how long updates take.
* This is to mitigate issues caused by slow parsing on embedded devices.
* @private {!shaka.abr.Ewma}
*/
this.averageUpdateDuration_ = new shaka.abr.Ewma(5);
/** @private {?shaka.util.ContentSteeringManager} */
this.contentSteeringManager_ = null;
/** @private {boolean} */
this.needsClosedCaptionsDetection_ = true;
/** @private {Set<string>} */
this.dateRangeIdsEmitted_ = new Set();
/** @private {shaka.util.EventManager} */
this.eventManager_ = new shaka.util.EventManager();
/** @private {HTMLMediaElement} */
this.mediaElement_ = null;
/** @private {?number} */
this.startTime_ = null;
/** @private {function():boolean} */
this.isPreloadFn_ = () => false;
}
/**
* @param {shaka.extern.ManifestConfiguration} config
* @param {(function():boolean)=} isPreloadFn
* @override
* @exportInterface
*/
configure(config, isPreloadFn) {
const needFireUpdate = this.playerInterface_ &&
config.updatePeriod != this.config_.updatePeriod &&
config.updatePeriod >= 0;
this.config_ = config;
if (isPreloadFn) {
this.isPreloadFn_ = isPreloadFn;
}
if (this.contentSteeringManager_) {
this.contentSteeringManager_.configure(this.config_);
}
if (needFireUpdate && this.manifest_ &&
this.manifest_.presentationTimeline.isLive()) {
this.updatePlaylistTimer_.tickNow();
}
}
/**
* @override
* @exportInterface
*/
async start(uri, playerInterface) {
goog.asserts.assert(this.config_, 'Must call configure() before start()!');
this.playerInterface_ = playerInterface;
this.lowLatencyMode_ = playerInterface.isLowLatencyMode();
const response = await this.requestManifest_([uri]).promise;
// Record the master playlist URI after redirects.
this.masterPlaylistUri_ = response.uri;
goog.asserts.assert(response.data, 'Response data should be non-null!');
await this.parseManifest_(response.data);
goog.asserts.assert(this.manifest_, 'Manifest should be non-null');
return this.manifest_;
}
/**
* @override
* @exportInterface
*/
stop() {
// Make sure we don't update the manifest again. Even if the timer is not
// running, this is safe to call.
if (this.updatePlaylistTimer_) {
this.updatePlaylistTimer_.stop();
this.updatePlaylistTimer_ = null;
}
/** @type {!Array<!Promise>} */
const pending = [];
if (this.operationManager_) {
pending.push(this.operationManager_.destroy());
this.operationManager_ = null;
}
this.playerInterface_ = null;
this.config_ = null;
this.variantUriSet_.clear();
this.manifest_ = null;
this.uriToStreamInfosMap_.clear();
this.groupIdToStreamInfosMap_.clear();
this.groupIdToCodecsMap_.clear();
this.globalVariables_.clear();
this.mapTagToInitSegmentRefMap_.clear();
this.aesKeyInfoMap_.clear();
this.aesKeyMap_.clear();
this.identityKeyMap_.clear();
this.initSegmentToKidMap_.clear();
this.dateRangeIdsEmitted_.clear();
if (this.contentSteeringManager_) {
this.contentSteeringManager_.destroy();
}
if (this.eventManager_) {
this.eventManager_.release();
this.eventManager_ = null;
}
return Promise.all(pending);
}
/**
* @override
* @exportInterface
*/
async update() {
if (!this.isLive_()) {
return;
}
/** @type {!Array<!Promise>} */
const updates = [];
const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
// This is necessary to calculate correctly the update time.
this.lastTargetDuration_ = Infinity;
this.manifest_.gapCount = 0;
// Only update active streams.
const activeStreamInfos = streamInfos.filter((s) => s.stream.segmentIndex);
for (const streamInfo of activeStreamInfos) {
updates.push(this.updateStream_(streamInfo));
}
await Promise.all(updates);
// Now that streams have been updated, notify the presentation timeline.
this.notifySegmentsForStreams_(activeStreamInfos.map((s) => s.stream));
// If any hasEndList is false, the stream is still live.
const stillLive = activeStreamInfos.some((s) => s.hasEndList == false);
if (activeStreamInfos.length && !stillLive) {
// Convert the presentation to VOD and set the duration.
const PresentationType = shaka.hls.HlsParser.PresentationType_;
this.setPresentationType_(PresentationType.VOD);
// The duration is the minimum of the end times of all active streams.
// Non-active streams are not guaranteed to have useful maxTimestamp
// values, due to the lazy-loading system, so they are ignored.
const maxTimestamps = activeStreamInfos.map((s) => s.maxTimestamp);
// The duration is the minimum of the end times of all streams.
this.presentationTimeline_.setDuration(Math.min(...maxTimestamps));
this.playerInterface_.updateDuration();
}
if (stillLive) {
this.determineDuration_();
}
// Check if any playlist does not have the first reference (due to a
// problem in the live encoder for example), and disable the stream if
// necessary.
for (const streamInfo of activeStreamInfos) {
if (!streamInfo.stream.isAudioMuxedInVideo &&
streamInfo.stream.segmentIndex &&
!streamInfo.stream.segmentIndex.earliestReference()) {
this.playerInterface_.disableStream(streamInfo.stream);
}
}
}
/**
* @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
* @return {!Map<number, number>}
* @private
*/
getMediaSequenceToStartTimeFor_(streamInfo) {
if (this.isLive_()) {
return this.mediaSequenceToStartTimeByType_.get(streamInfo.type);
} else {
return streamInfo.mediaSequenceToStartTime;
}
}
/**
* Updates a stream.
*
* @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
* @return {!Promise}
* @private
*/
async updateStream_(streamInfo) {
if (streamInfo.stream.isAudioMuxedInVideo) {
return;
}
const manifestUris = [];
for (const uri of streamInfo.getUris()) {
const uriObj = new goog.Uri(uri);
const queryData = uriObj.getQueryData();
if (streamInfo.canBlockReload) {
if (streamInfo.nextMediaSequence >= 0) {
// Indicates that the server must hold the request until a Playlist
// contains a Media Segment with Media Sequence
queryData.add('_HLS_msn', String(streamInfo.nextMediaSequence));
}
if (streamInfo.nextPart >= 0) {
// Indicates, in combination with _HLS_msn, that the server must hold
// the request until a Playlist contains Partial Segment N of Media
// Sequence Number M or later.
queryData.add('_HLS_part', String(streamInfo.nextPart));
}
}
if (streamInfo.canSkipSegments) {
// Enable delta updates. This will replace older segments with
// 'EXT-X-SKIP' tag in the media playlist.
queryData.add('_HLS_skip', 'YES');
}
if (queryData.getCount()) {
uriObj.setQueryData(queryData.toDecodedString());
}
manifestUris.push(uriObj.toString());
}
let response;
try {
response = await this.requestManifest_(
manifestUris, /* isPlaylist= */ true).promise;
} catch (e) {
if (this.playerInterface_) {
this.playerInterface_.disableStream(streamInfo.stream);
}
throw e;
}
if (!streamInfo.stream.segmentIndex) {
// The stream was closed since the update was first requested.
return;
}
/** @type {shaka.hls.Playlist} */
const playlist = this.manifestTextParser_.parsePlaylist(response.data);
if (playlist.type != shaka.hls.PlaylistType.MEDIA) {
throw new shaka.util.Error(
shaka.util.Error.Severity.CRITICAL,
shaka.util.Error.Category.MANIFEST,
shaka.util.Error.Code.HLS_INVALID_PLAYLIST_HIERARCHY);
}
// Record the final URI after redirects.
const responseUri = response.uri;
if (responseUri != response.originalUri &&
!streamInfo.getUris().includes(responseUri)) {
streamInfo.redirectUris.push(responseUri);
}
/** @type {!Array<!shaka.hls.Tag>} */
const variablesTags = shaka.hls.Utils.filterTagsByName(playlist.tags,
'EXT-X-DEFINE');
const mediaVariables = this.parseMediaVariables_(
variablesTags, responseUri);
const stream = streamInfo.stream;
const mediaSequenceToStartTime =
this.getMediaSequenceToStartTimeFor_(streamInfo);
const {keyIds, drmInfos, encrypted, aesEncrypted} =
await this.parseDrmInfo_(playlist, stream.mimeType,
streamInfo.getUris, mediaVariables);
if (!stream.encrypted && encrypted && !aesEncrypted) {
stream.encrypted = true;
}
const keysAreEqual =
(a, b) => a.size === b.size && [...a].every((value) => b.has(value));
if (!keysAreEqual(stream.keyIds, keyIds)) {
stream.keyIds = keyIds;
stream.drmInfos = drmInfos;
this.playerInterface_.newDrmInfo(stream);
}
const {segments, bandwidth} = this.createSegments_(
playlist, mediaSequenceToStartTime, mediaVariables,
streamInfo.getUris, streamInfo.type);
if (bandwidth) {
stream.bandwidth = bandwidth;
}
const qualityInfo =
shaka.media.QualityObserver.createQualityInfo(stream);
for (const segment of segments) {
if (segment.initSegmentReference) {
segment.initSegmentReference.mediaQuality = qualityInfo;
}
}
stream.segmentIndex.mergeAndEvict(
segments, this.presentationTimeline_.getSegmentAvailabilityStart());
if (segments.length) {
const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
const skipTag = shaka.hls.Utils.getFirstTagWithName(
playlist.tags, 'EXT-X-SKIP');
const skippedSegments =
skipTag ? Number(skipTag.getAttributeValue('SKIPPED-SEGMENTS')) : 0;
const {nextMediaSequence, nextPart} =
this.getNextMediaSequenceAndPart_(mediaSequenceNumber, segments);
streamInfo.nextMediaSequence = nextMediaSequence + skippedSegments;
streamInfo.nextPart = nextPart;
const playlistStartTime = mediaSequenceToStartTime.get(
mediaSequenceNumber);
stream.segmentIndex.evict(playlistStartTime);
}
const oldSegment = stream.segmentIndex.earliestReference();
if (oldSegment) {
streamInfo.minTimestamp = oldSegment.startTime;
const newestSegment = segments[segments.length - 1];
goog.asserts.assert(newestSegment, 'Should have segments!');
streamInfo.maxTimestamp = newestSegment.endTime;
}
// Once the last segment has been added to the playlist,
// #EXT-X-ENDLIST tag will be appended.
// If that happened, treat the rest of the EVENT presentation as VOD.
const endListTag =
shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-ENDLIST');
if (endListTag) {
// Flag this for later. We don't convert the whole presentation into VOD
// until we've seen the ENDLIST tag for all active playlists.
streamInfo.hasEndList = true;
}
this.determineLastTargetDuration_(playlist);
this.processDateRangeTags_(
playlist.tags, stream.type, mediaVariables, streamInfo.getUris);
}
/**
* @override
* @exportInterface
*/
onExpirationUpdated(sessionId, expiration) {
// No-op
}
/**
* @override
* @exportInterface
*/
onInitialVariantChosen(variant) {
// No-op
}
/**
* @override
* @exportInterface
*/
banLocation(uri) {
if (this.contentSteeringManager_) {
this.contentSteeringManager_.banLocation(uri);
}
}
/**
* @override
* @exportInterface
*/
setMediaElement(mediaElement) {
this.mediaElement_ = mediaElement;
}
/**
* Align the streams by sequence number by dropping early segments. Then
* offset the streams to begin at presentation time 0.
* @param {!Array<!shaka.hls.HlsParser.StreamInfo>} streamInfos
* @param {boolean=} force
* @private
*/
syncStreamsWithSequenceNumber_(streamInfos, force = false) {
// We assume that, when this is first called, we have enough info to
// determine how to use the program date times (e.g. we have both a video
// and an audio, and all other videos and audios match those).
// Thus, we only need to calculate this once.
const updateMinSequenceNumber = this.minSequenceNumber_ == -1;
// Sync using media sequence number. Find the highest starting sequence
// number among all streams. Later, we will drop any references to
// earlier segments in other streams, then offset everything back to 0.
for (const streamInfo of streamInfos) {
const segmentIndex = streamInfo.stream.segmentIndex;
goog.asserts.assert(segmentIndex,
'Only loaded streams should be synced');
const mediaSequenceToStartTime =
this.getMediaSequenceToStartTimeFor_(streamInfo);
const segment0 = segmentIndex.earliestReference();
if (segment0) {
// This looks inefficient, but iteration order is insertion order.
// So the very first entry should be the one we want.
// We assert that this holds true so that we are alerted by debug
// builds and tests if it changes. We still do a loop, though, so
// that the code functions correctly in production no matter what.
if (goog.DEBUG) {
const firstSequenceStartTime =
mediaSequenceToStartTime.values().next().value;
if (firstSequenceStartTime != segment0.startTime) {
shaka.log.warning(
'Sequence number map is not ordered as expected!');
}
}
for (const [sequence, start] of mediaSequenceToStartTime) {
if (start == segment0.startTime) {
if (updateMinSequenceNumber) {
this.minSequenceNumber_ = Math.max(
this.minSequenceNumber_, sequence);
}
// Even if we already have decided on a value for
// |this.minSequenceNumber_|, we still need to determine the first
// sequence number for the stream, to offset it in the code below.
streamInfo.firstSequenceNumber = sequence;
break;
}
}
}
}
if (this.minSequenceNumber_ < 0) {
// Nothing to sync.
return;
}
shaka.log.debug('Syncing HLS streams against base sequence number:',
this.minSequenceNumber_);
for (const streamInfo of streamInfos) {
if (!this.ignoreManifestProgramDateTimeFor_(streamInfo.type) && !force) {
continue;
}
const segmentIndex = streamInfo.stream.segmentIndex;
if (segmentIndex) {
// Drop any earlier references.
const numSegmentsToDrop =
this.minSequenceNumber_ - streamInfo.firstSequenceNumber;
if (numSegmentsToDrop > 0) {
segmentIndex.dropFirstReferences(numSegmentsToDrop);
// Now adjust timestamps back to begin at 0.
const segmentN = segmentIndex.earliestReference();
if (segmentN) {
const streamOffset = -segmentN.startTime;
// Modify all SegmentReferences equally.
streamInfo.stream.segmentIndex.offset(streamOffset);
// Update other parts of streamInfo the same way.
this.offsetStreamInfo_(streamInfo, streamOffset);
}
}
}
}
}
/**
* Synchronize streams by the EXT-X-PROGRAM-DATE-TIME tags attached to their
* segments. Also normalizes segment times so that the earliest segment in
* any stream is at time 0.
* @param {!Array<!shaka.hls.HlsParser.StreamInfo>} streamInfos
* @private
*/
syncStreamsWithProgramDateTime_(streamInfos) {
// We assume that, when this is first called, we have enough info to
// determine how to use the program date times (e.g. we have both a video
// and an audio, and all other videos and audios match those).
// Thus, we only need to calculate this once.
if (this.lowestSyncTime_ == Infinity) {
for (const streamInfo of streamInfos) {
const segmentIndex = streamInfo.stream.segmentIndex;
goog.asserts.assert(segmentIndex,
'Only loaded streams should be synced');
const segment0 = segmentIndex.earliestReference();
if (segment0 != null && segment0.syncTime != null) {
this.lowestSyncTime_ =
Math.min(this.lowestSyncTime_, segment0.syncTime);
}
}
}
const lowestSyncTime = this.lowestSyncTime_;
if (lowestSyncTime == Infinity) {
// Nothing to sync.
return;
}
shaka.log.debug('Syncing HLS streams against base time:', lowestSyncTime);
for (const streamInfo of this.uriToStreamInfosMap_.values()) {
if (this.ignoreManifestProgramDateTimeFor_(streamInfo.type)) {
continue;
}
const segmentIndex = streamInfo.stream.segmentIndex;
if (segmentIndex != null) {
// A segment's startTime should be based on its syncTime vs the lowest
// syncTime across all streams. The earliest segment sync time from
// any stream will become presentation time 0. If two streams start
// e.g. 6 seconds apart in syncTime, then their first segments will
// also start 6 seconds apart in presentation time.
const segment0 = segmentIndex.earliestReference();
if (!segment0) {
continue;
}
if (segment0.syncTime == null) {
shaka.log.alwaysError('Missing EXT-X-PROGRAM-DATE-TIME for stream',
streamInfo.getUris(),
'Expect AV sync issues!');
} else {
// Stream metadata are offset by a fixed amount based on the
// first segment.
const segment0TargetTime = segment0.syncTime - lowestSyncTime;
const streamOffset = segment0TargetTime - segment0.startTime;
this.offsetStreamInfo_(streamInfo, streamOffset);
// This is computed across all segments separately to manage
// accumulated drift in durations.
for (const segment of segmentIndex) {
segment.syncAgainst(lowestSyncTime);
}
}
}
}
}
/**
* @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
* @param {number} offset
* @private
*/
offsetStreamInfo_(streamInfo, offset) {
// Due to float compute issue we can have some millisecond issue.
// We don't apply the offset if it's the case.
if (Math.abs(offset) < 0.001) {
return;
}
// Adjust our accounting of the minimum timestamp.
streamInfo.minTimestamp += offset;
// Adjust our accounting of the maximum timestamp.
streamInfo.maxTimestamp += offset;
goog.asserts.assert(streamInfo.maxTimestamp >= 0,
'Negative maxTimestamp after adjustment!');
// Update our map from sequence number to start time.
const mediaSequenceToStartTime =
this.getMediaSequenceToStartTimeFor_(streamInfo);
for (const [key, value] of mediaSequenceToStartTime) {
mediaSequenceToStartTime.set(key, value + offset);
}
shaka.log.debug('Offset', offset, 'applied to',
streamInfo.getUris());
}
/**
* Parses the manifest.
*
* @param {BufferSource} data
* @return {!Promise}
* @private
*/
async parseManifest_(data) {
const Utils = shaka.hls.Utils;
const ContentType = shaka.util.ManifestParserUtils.ContentType;
goog.asserts.assert(this.masterPlaylistUri_,
'Master playlist URI must be set before calling parseManifest_!');
const playlist = this.manifestTextParser_.parsePlaylist(data);
/** @type {!Array<!shaka.hls.Tag>} */
const variablesTags = Utils.filterTagsByName(playlist.tags, 'EXT-X-DEFINE');
/** @type {!Array<!shaka.extern.Variant>} */
let variants = [];
/** @type {!Array<!shaka.extern.Stream>} */
let textStreams = [];
/** @type {!Array<!shaka.extern.Stream>} */
let imageStreams = [];
// This assert is our own sanity check.
goog.asserts.assert(this.presentationTimeline_ == null,
'Presentation timeline created early!');
// We don't know if the presentation is VOD or live until we parse at least
// one media playlist, so make a VOD-style presentation timeline for now
// and change the type later if we discover this is live.
// Since the player will load the first variant chosen early in the process,
// there isn't a window during playback where the live-ness is unknown.
this.presentationTimeline_ = new shaka.media.PresentationTimeline(
/* presentationStartTime= */ null, /* delay= */ 0);
this.presentationTimeline_.setStatic(true);
const getUris = () => {
return [this.masterPlaylistUri_];
};
/** @type {?string} */
let mediaPlaylistType = null;
/** @type {!Map<string, string>} */
let mediaVariables = new Map();
// Parsing a media playlist results in a single-variant stream.
if (playlist.type == shaka.hls.PlaylistType.MEDIA) {
this.needsClosedCaptionsDetection_ = false;
/** @type {!Array<!shaka.hls.Tag>} */
const variablesTags = shaka.hls.Utils.filterTagsByName(playlist.tags,
'EXT-X-DEFINE');
mediaVariables = this.parseMediaVariables_(
variablesTags, this.masterPlaylistUri_);
// By default we assume it is video, but in a later step the correct type
// is obtained.
mediaPlaylistType = ContentType.VIDEO;
// These values can be obtained later so these default values are good.
const codecs = '';
const languageValue = '';
const channelsCount = null;
const sampleRate = null;
const closedCaptions = new Map();
const spatialAudio = false;
const characteristics = null;
const forced = false; // Only relevant for text.
const primary = true; // This is the only stream!
const name = 'Media Playlist';
// Make the stream info, with those values.
const streamInfo = await this.convertParsedPlaylistIntoStreamInfo_(
this.globalId_++, mediaVariables, playlist, getUris, codecs,
mediaPlaylistType, languageValue, primary, name, channelsCount,
closedCaptions, characteristics, forced, sampleRate, spatialAudio);
this.uriToStreamInfosMap_.set(this.masterPlaylistUri_, streamInfo);
if (streamInfo.stream) {
const qualityInfo =
shaka.media.QualityObserver.createQualityInfo(streamInfo.stream);
streamInfo.stream.segmentIndex.forEachTopLevelReference(
(reference) => {
if (reference.initSegmentReference) {
reference.initSegmentReference.mediaQuality = qualityInfo;
}
});
}
mediaPlaylistType = streamInfo.stream.type;
// Wrap the stream from that stream info with a variant.
variants.push({
id: 0,
language: this.getLanguage_(languageValue),
disabledUntilTime: 0,
primary: true,
audio: mediaPlaylistType == 'audio' ? streamInfo.stream : null,
video: mediaPlaylistType == 'video' ? streamInfo.stream : null,
bandwidth: streamInfo.stream.bandwidth || 0,
allowedByApplication: true,
allowedByKeySystem: true,
decodingInfos: [],
});
} else {
this.parseMasterVariables_(variablesTags);
/** @type {!Array<!shaka.hls.Tag>} */
const mediaTags = Utils.filterTagsByName(
playlist.tags, 'EXT-X-MEDIA');
/** @type {!Array<!shaka.hls.Tag>} */
const variantTags = Utils.filterTagsByName(
playlist.tags, 'EXT-X-STREAM-INF');
/** @type {!Array<!shaka.hls.Tag>} */
const imageTags = Utils.filterTagsByName(
playlist.tags, 'EXT-X-IMAGE-STREAM-INF');
/** @type {!Array<!shaka.hls.Tag>} */
const iFrameTags = Utils.filterTagsByName(
playlist.tags, 'EXT-X-I-FRAME-STREAM-INF');
/** @type {!Array<!shaka.hls.Tag>} */
const sessionKeyTags = Utils.filterTagsByName(
playlist.tags, 'EXT-X-SESSION-KEY');
/** @type {!Array<!shaka.hls.Tag>} */
const sessionDataTags = Utils.filterTagsByName(
playlist.tags, 'EXT-X-SESSION-DATA');
/** @type {!Array<!shaka.hls.Tag>} */
const contentSteeringTags = Utils.filterTagsByName(
playlist.tags, 'EXT-X-CONTENT-STEERING');
this.processSessionData_(sessionDataTags);
await this.processContentSteering_(contentSteeringTags);
if (!this.config_.ignoreSupplementalCodecs) {
// Duplicate variant tags with supplementalCodecs
const newVariantTags = [];
for (const tag of variantTags) {
const supplementalCodecsString =
tag.getAttributeValue('SUPPLEMENTAL-CODECS');
if (!supplementalCodecsString) {
continue;
}
const supplementalCodecs = supplementalCodecsString.split(/\s*,\s*/)
.map((codec) => {
return codec.split('/')[0];
});
const newAttributes = tag.attributes.map((attr) => {
const name = attr.name;
let value = attr.value;
if (name == 'CODECS') {
value = supplementalCodecs.join(',');
const allCodecs = attr.value.split(',');
if (allCodecs.length > 1) {
const audioCodec =
shaka.util.ManifestParserUtils.guessCodecsSafe(
shaka.util.ManifestParserUtils.ContentType.AUDIO,
allCodecs);
if (audioCodec) {
value += ',' + audioCodec;
}
}
}
return new shaka.hls.Attribute(name, value);
});
newVariantTags.push(
new shaka.hls.Tag(tag.id, tag.name, newAttributes, null));
}
variantTags.push(...newVariantTags);
// Duplicate iFrame tags with supplementalCodecs
const newIFrameTags = [];
for (const tag of iFrameTags) {
const supplementalCodecsString =
tag.getAttributeValue('SUPPLEMENTAL-CODECS');
if (!supplementalCodecsString) {
continue;
}
const supplementalCodecs = supplementalCodecsString.split(/\s*,\s*/)
.map((codec) => {
return codec.split('/')[0];
});
const newAttributes = tag.attributes.map((attr) => {
const name = attr.name;
let value = attr.value;
if (name == 'CODECS') {
value = supplementalCodecs.join(',');
}
return new shaka.hls.Attribute(name, value);
});
newIFrameTags.push(
new shaka.hls.Tag(tag.id, tag.name, newAttributes, null));
}
iFrameTags.push(...newIFrameTags);
}
this.parseCodecs_(variantTags);
this.parseClosedCaptions_(mediaTags);
const iFrameStreams = this.parseIFrames_(iFrameTags);
variants = await this.createVariantsForTags_(
variantTags, sessionKeyTags, mediaTags, getUris,
this.globalVariables_, iFrameStreams);
textStreams = this.parseTexts_(mediaTags);
imageStreams = await this.parseImages_(imageTags, iFrameTags);
}
// Make sure that the parser has not been destroyed.
if (!this.playerInterface_) {
throw new shaka.util.Error(
shaka.util.Error.Severity.CRITICAL,
shaka.util.Error.Category.PLAYER,
shaka.util.Error.Code.OPERATION_ABORTED);
}
this.determineStartTime_(playlist);
// Single-variant streams aren't lazy-loaded, so for them we already have
// enough info here to determine the presentation type and duration.
if (playlist.type == shaka.hls.PlaylistType.MEDIA) {
if (this.isLive_()) {
this.changePresentationTimelineToLive_(playlist);
const delay = this.getUpdatePlaylistDelay_();
this.updatePlaylistTimer_.tickAfter(/* seconds= */ delay);
}
const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
this.finalizeStreams_(streamInfos);
this.determineDuration_();
goog.asserts.assert(mediaPlaylistType,
'mediaPlaylistType should be non-null');
this.processDateRangeTags_(
playlist.tags, mediaPlaylistType, mediaVariables, getUris);
}
this.manifest_ = {
presentationTimeline: this.presentationTimeline_,
variants,
textStreams,
imageStreams,
offlineSessionIds: [],
sequenceMode: this.config_.hls.sequenceMode,
ignoreManifestTimestampsInSegmentsMode:
this.config_.hls.ignoreManifestTimestampsInSegmentsMode,
type: shaka.media.ManifestParser.HLS,
serviceDescription: null,
nextUrl: null,
periodCount: 1,
gapCount: 0,
isLowLatency: false,
startTime: this.startTime_,
};
// If there is no 'CODECS' attribute in the manifest and codec guessing is
// disabled, we need to create the segment indexes now so that missing info
// can be parsed from the media data and added to the stream objects.
if (!this.codecInfoInManifest_ && this.config_.hls.disableCodecGuessing) {
const createIndexes = [];
for (const variant of this.manifest_.variants) {
if (variant.audio && variant.audio.codecs === '') {
createIndexes.push(variant.audio.createSegmentIndex());
}
if (variant.video && variant.video.codecs === '') {
createIndexes.push(variant.video.createSegmentIndex());
}
}
await Promise.all(createIndexes);
}
this.playerInterface_.makeTextStreamsForClosedCaptions(this.manifest_);
}
/**
* @param {!Array<!shaka.media.SegmentReference>} segments
* @return {!Promise<shaka.media.SegmentUtils.BasicInfo>}
* @private
*/
async getBasicInfoFromSegments_(segments) {
const HlsParser = shaka.hls.HlsParser;
const defaultBasicInfo = shaka.media.SegmentUtils.getBasicInfoFromMimeType(
this.config_.hls.mediaPlaylistFullMimeType);
if (!segments.length) {
return defaultBasicInfo;
}
const {segment, segmentIndex} = this.getAvailableSegment_(segments);
const segmentUris = segment.getUris();
const segmentUri = segmentUris[0];
const parsedUri = new goog.Uri(segmentUri);
const extension = parsedUri.getPath().split('.').pop();
const rawMimeType = HlsParser.RAW_FORMATS_TO_MIME_TYPES_.get(extension);
if (rawMimeType) {
return shaka.media.SegmentUtils.getBasicInfoFromMimeType(
rawMimeType);
}
const basicInfos = await Promise.all([
this.getInfoFromSegment_(segment.initSegmentReference, 0),
this.getInfoFromSegment_(segment, segmentIndex),
]);
const initMimeType = basicInfos[0].mimeType;
const contentMimeType = basicInfos[1].mimeType;
const initData = basicInfos[0].data;
const data = basicInfos[1].data;
const validMp4Extensions = [
'mp4',
'mp4a',
'm4s',
'm4i',
'm4a',
'm4f',
'cmfa',
'mp4v',
'm4v',
'cmfv',
'fmp4',
];
const validMp4MimeType = [
'audio/mp4',
'video/mp4',
'video/iso.segment',
];
if (shaka.util.TsParser.probe(
shaka.util.BufferUtils.toUint8(data))) {
const basicInfo = shaka.media.SegmentUtils.getBasicInfoFromTs(
data, this.config_.disableAudio, this.config_.disableVideo,
this.config_.disableText);
if (basicInfo) {
return basicInfo;
}
} else if (validMp4Extensions.includes(extension) ||
validMp4MimeType.includes(contentMimeType) ||
(initMimeType && validMp4MimeType.includes(initMimeType))) {
const basicInfo = shaka.media.SegmentUtils.getBasicInfoFromMp4(
initData, data, this.config_.disableText);
if (basicInfo) {
return basicInfo;
}
}
if (contentMimeType) {
return shaka.media.SegmentUtils.getBasicInfoFromMimeType(
contentMimeType);
}
if (initMimeType) {
return shaka.media.SegmentUtils.getBasicInfoFromMimeType(
initMimeType);
}
return defaultBasicInfo;
}
/**
* @param {?shaka.media.AnySegmentReference} segment
* @param {number} segmentIndex
* @return {!Promise<{mimeType: ?string, data: ?BufferSource}>}
* @private
*/
async getInfoFromSegment_(segment, segmentIndex) {
if (!segment) {
return {mimeType: null, data: null};
}
const requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
const segmentRequest = shaka.util.Networking.createSegmentRequest(
segment.getUris(), segment.getStartByte(), segment.getEndByte(),
this.config_.retryParameters);
const type = segment instanceof shaka.media.SegmentReference ?
shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_SEGMENT :
shaka.net.NetworkingEngine.AdvancedRequestType.INIT_SEGMENT;
const response = await this.makeNetworkRequest_(
segmentRequest, requestType, {type}).promise;
let data = response.data;
if (segment.aesKey) {
data = await shaka.media.SegmentUtils.aesDecrypt(
data, segment.aesKey, segmentIndex);
}
if (segment instanceof shaka.media.SegmentReference) {
segment.setSegmentData(data, /* singleUse= */ true);
} else {
segment.setSegmentData(data);
}
let mimeType = response.headers['content-type'];
if (mimeType) {
// Split the MIME type in case the server sent additional parameters.
mimeType = mimeType.split(';')[0].toLowerCase();
}
return {mimeType, data};
}
/** @private */
determineDuration_() {
goog.asserts.assert(this.presentationTimeline_,
'Presentation timeline not created!');
if (this.isLive_()) {
// The spec says nothing much about seeking in live content, but Safari's
// built-in HLS implementation does not allow it. Therefore we will set
// the availability window equal to the presentation delay. The player
// will be able to buffer ahead three segments, but the seek window will
// be zero-sized.
const PresentationType = shaka.hls.HlsParser.PresentationType_;
if (this.presentationType_ == PresentationType.LIVE) {
let segmentAvailabilityDuration = this.getLiveDuration_() || 0;
// The app can override that with a longer duration, to allow seeking.
if (!isNaN(this.config_.availabilityWindowOverride)) {
segmentAvailabilityDuration = this.config_.availabilityWindowOverride;
}
this.presentationTimeline_.setSegmentAvailabilityDuration(
segmentAvailabilityDuration);
}
} else {
// Use the minimum duration as the presentation duration.
this.presentationTimeline_.setDuration(this.getMinDuration_());
}
if (!this.presentationTimeline_.isStartTimeLocked()) {
for (const streamInfo of this.uriToStreamInfosMap_.values()) {
if (!streamInfo.stream.segmentIndex) {
continue; // Not active.
}
if (streamInfo.type != 'audio' && streamInfo.type != 'video') {
continue;
}
const firstReference =
streamInfo.stream.segmentIndex.earliestReference();
if (firstReference && firstReference.syncTime) {
const syncTime = firstReference.syncTime;
this.presentationTimeline_.setInitialProgramDateTime(syncTime);
}
}
}
// This is the first point where we have a meaningful presentation start
// time, and we need to tell PresentationTimeline that so that it can
// maintain consistency from here on.
this.presentationTimeline_.lockStartTime();
// This asserts that the live edge is being calculated from segment times.
// For VOD and event streams, this check should still pass.
goog.asserts.assert(
!this.presentationTimeline_.usingPresentationStartTime(),
'We should not be using the presentation start time in HLS!');
}
/**
* Get the variables of each variant tag, and store in a map.
* @param {!Array<!shaka.hls.Tag>} tags Variant tags from the playlist.
* @private
*/
parseMasterVariables_(tags) {
const queryParams = new goog.Uri(this.masterPlaylistUri_).getQueryData();
for (const variableTag of tags) {
const name = variableTag.getAttributeValue('NAME');
const value = variableTag.getAttributeValue('VALUE');
const queryParam = variableTag.getAttributeValue('QUERYPARAM');
if (name && value) {
if (!this.globalVariables_.has(name)) {
this.globalVariables_.set(name, value);
}
}
if (queryParam) {
const queryParamValue = queryParams.get(queryParam)[0];
if (queryParamValue && !this.globalVariables_.has(queryParamValue)) {
this.globalVariables_.set(queryParam, queryParamValue);
}
}
}
}
/**
* Get the variables of each variant tag, and store in a map.
* @param {!Array<!shaka.hls.Tag>} tags Variant tags from the playlist.
* @param {string} uri Media playlist URI.
* @return {!Map<string, string>}
* @private
*/
parseMediaVariables_(tags, uri) {
const queryParams = new goog.Uri(uri).getQueryData();
const mediaVariables = new Map();
for (const variableTag of tags) {
const name = variableTag.getAttributeValue('NAME');
const value = variableTag.getAttributeValue('VALUE');
const queryParam = variableTag.getAttributeValue('QUERYPARAM');
const mediaImport = variableTag.getAttributeValue('IMPORT');
if (name && value) {
if (!mediaVariables.has(name)) {
mediaVariables.set(name, value);
}
}
if (queryParam) {
const queryParamValue = queryParams.get(queryParam)[0];
if (queryParamValue && !mediaVariables.has(queryParamValue)) {
mediaVariables.set(queryParam, queryParamValue);
}
}
if (mediaImport) {
const globalValue = this.globalVariables_.get(mediaImport);
if (globalValue) {
mediaVariables.set(mediaImport, globalValue);
}
}
}
return mediaVariables;
}
/**
* Get the codecs of each variant tag, and store in a map from
* audio/video/subtitle group id to the codecs array list.
* @param {!Array<!shaka.hls.Tag>} tags Variant tags from the playlist.
* @private
*/
parseCodecs_(tags) {
const ContentType = shaka.util.ManifestParserUtils.ContentType;
for (const variantTag of tags) {
const audioGroupId = variantTag.getAttributeValue('AUDIO');
const videoGroupId = variantTag.getAttributeValue('VIDEO');
const subGroupId = variantTag.getAttributeValue('SUBTITLES');
const allCodecs = this.getCodecsForVariantTag_(variantTag);
if (subGroupId) {
const textCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
ContentType.TEXT, allCodecs);
goog.asserts.assert(textCodecs != null, 'Text codecs should be valid.');
this.groupIdToCodecsMap_.set(subGroupId, textCodecs);
shaka.util.ArrayUtils.remove(allCodecs, textCodecs);
}
if (audioGroupId) {
let codecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
ContentType.AUDIO, allCodecs);
if (!codecs) {
codecs = this.config_.hls.defaultAudioCodec;
}
this.groupIdToCodecsMap_.set(audioGroupId, codecs);
}
if (videoGroupId) {
let codecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
ContentType.VIDEO, allCodecs);
if (!codecs) {
codecs = this.config_.hls.defaultVideoCodec;
}
this.groupIdToCodecsMap_.set(videoGroupId, codecs);
}
}
}
/**
* Process EXT-X-SESSION-DATA tags.
*
* @param {!Array<!shaka.hls.Tag>} tags
* @private
*/
processSessionData_(tags) {
for (const tag of tags) {
const id = tag.getAttributeValue('DATA-ID');
const uri = tag.getAttributeValue('URI');
const language = tag.getAttributeValue('LANGUAGE');
const value = tag.getAttributeValue('VALUE');
const data = (new Map()).set('id', id);
if (uri) {
data.set('uri', shaka.hls.Utils.constructSegmentUris(
[this.masterPlaylistUri_], uri, this.globalVariables_)[0]);
}
if (language) {
data.set('language', language);
}
if (value) {
data.set('value', value);
}
const event = new shaka.util.FakeEv