UNPKG

shaka-player

Version:
1,438 lines (1,285 loc) 147 kB
/*! @license * Shaka Player * Copyright 2016 Google LLC * SPDX-License-Identifier: Apache-2.0 */ goog.provide('shaka.hls.HlsParser'); goog.require('goog.Uri'); goog.require('goog.asserts'); goog.require('shaka.abr.Ewma'); goog.require('shaka.hls.ManifestTextParser'); goog.require('shaka.hls.Playlist'); goog.require('shaka.hls.PlaylistType'); goog.require('shaka.hls.Tag'); goog.require('shaka.hls.Utils'); goog.require('shaka.log'); goog.require('shaka.media.DrmEngine'); goog.require('shaka.media.InitSegmentReference'); goog.require('shaka.media.ManifestParser'); goog.require('shaka.media.PresentationTimeline'); goog.require('shaka.media.SegmentIndex'); goog.require('shaka.media.SegmentReference'); goog.require('shaka.net.DataUriPlugin'); goog.require('shaka.net.NetworkingEngine'); goog.require('shaka.util.ArrayUtils'); goog.require('shaka.util.BufferUtils'); goog.require('shaka.util.Error'); goog.require('shaka.util.FakeEvent'); goog.require('shaka.util.Functional'); goog.require('shaka.util.LanguageUtils'); goog.require('shaka.util.ManifestParserUtils'); goog.require('shaka.util.MimeUtils'); goog.require('shaka.util.Mp4BoxParsers'); goog.require('shaka.util.Mp4Parser'); goog.require('shaka.util.OperationManager'); goog.require('shaka.util.Pssh'); goog.require('shaka.util.Timer'); goog.require('shaka.util.TsParser'); goog.require('shaka.util.Platform'); goog.require('shaka.util.Uint8ArrayUtils'); goog.require('shaka.util.XmlUtils'); goog.requireType('shaka.hls.Segment'); /** * HLS parser. * * @implements {shaka.extern.ManifestParser} * @export */ shaka.hls.HlsParser = class { /** * Creates an Hls Parser object. */ constructor() { /** @private {?shaka.extern.ManifestParser.PlayerInterface} */ this.playerInterface_ = null; /** @private {?shaka.extern.ManifestConfiguration} */ this.config_ = null; /** @private {number} */ this.globalId_ = 1; /** @private {!Map.<string, string>} */ this.globalVariables_ = new Map(); /** * A map from group id to stream infos created from the media tags. * @private {!Map.<string, !Array.<?shaka.hls.HlsParser.StreamInfo>>} */ this.groupIdToStreamInfosMap_ = new Map(); /** * For media playlist lazy-loading to work in livestreams, we have to assume * that each stream of a type (video, audio, etc) has the same mappings of * sequence number to start time. * This map stores those relationships. * Only used during livestreams; we do not assume that VOD content is * aligned in that way. * @private {!Map.<string, !Map.<number, number>>} */ this.mediaSequenceToStartTimeByType_ = new Map(); // Set initial maps. const ContentType = shaka.util.ManifestParserUtils.ContentType; this.mediaSequenceToStartTimeByType_.set(ContentType.VIDEO, new Map()); this.mediaSequenceToStartTimeByType_.set(ContentType.AUDIO, new Map()); this.mediaSequenceToStartTimeByType_.set(ContentType.TEXT, new Map()); this.mediaSequenceToStartTimeByType_.set(ContentType.IMAGE, new Map()); /** * The values are strings of the form "<VIDEO URI> - <AUDIO URI>", * where the URIs are the verbatim media playlist URIs as they appeared in * the master playlist. * * Used to avoid duplicates that vary only in their text stream. * * @private {!Set.<string>} */ this.variantUriSet_ = new Set(); /** * A map from (verbatim) media playlist URI to stream infos representing the * playlists. * * On update, used to iterate through and update from media playlists. * * On initial parse, used to iterate through and determine minimum * timestamps, offsets, and to handle TS rollover. * * During parsing, used to avoid duplicates in the async methods * createStreamInfoFromMediaTag_, createStreamInfoFromImageTag_ and * createStreamInfoFromVariantTag_. * * @private {!Map.<string, shaka.hls.HlsParser.StreamInfo>} */ this.uriToStreamInfosMap_ = new Map(); /** @private {?shaka.media.PresentationTimeline} */ this.presentationTimeline_ = null; /** * The master playlist URI, after redirects. * * @private {string} */ this.masterPlaylistUri_ = ''; /** @private {shaka.hls.ManifestTextParser} */ this.manifestTextParser_ = new shaka.hls.ManifestTextParser(); /** * The minimum sequence number for generated segments, when ignoring * EXT-X-PROGRAM-DATE-TIME. * * @private {number} */ this.minSequenceNumber_ = -1; /** * The lowest time value for any of the streams, as defined by the * EXT-X-PROGRAM-DATE-TIME value. Measured in seconds since January 1, 1970. * * @private {number} */ this.lowestSyncTime_ = Infinity; /** * Whether the streams have previously been "finalized"; that is to say, * whether we have loaded enough streams to know information about the asset * such as timing information, live status, etc. * * @private {boolean} */ this.streamsFinalized_ = false; /** * This timer is used to trigger the start of a manifest update. A manifest * update is async. Once the update is finished, the timer will be restarted * to trigger the next update. The timer will only be started if the content * is live content. * * @private {shaka.util.Timer} */ this.updatePlaylistTimer_ = new shaka.util.Timer(() => { this.onUpdate_(); }); /** @private {shaka.hls.HlsParser.PresentationType_} */ this.presentationType_ = shaka.hls.HlsParser.PresentationType_.VOD; /** @private {?shaka.extern.Manifest} */ this.manifest_ = null; /** @private {number} */ this.maxTargetDuration_ = 0; /** @private {number} */ this.lastTargetDuration_ = Infinity; /** Partial segments target duration. * @private {number} */ this.partialTargetDuration_ = 0; /** @private {number} */ this.presentationDelay_ = 0; /** @private {number} */ this.lowLatencyPresentationDelay_ = 0; /** @private {shaka.util.OperationManager} */ this.operationManager_ = new shaka.util.OperationManager(); /** A map from closed captions' group id, to a map of closed captions info. * {group id -> {closed captions channel id -> language}} * @private {Map.<string, Map.<string, string>>} */ this.groupIdToClosedCaptionsMap_ = new Map(); /** @private {Map.<string, string>} */ this.groupIdToCodecsMap_ = new Map(); /** A cache mapping EXT-X-MAP tag info to the InitSegmentReference created * from the tag. * The key is a string combining the EXT-X-MAP tag's absolute uri, and * its BYTERANGE if available. * {!Map.<string, !shaka.media.InitSegmentReference>} */ this.mapTagToInitSegmentRefMap_ = new Map(); /** @private {boolean} */ this.lowLatencyMode_ = false; /** @private {boolean} */ this.lowLatencyByterangeOptimization_ = false; /** * An ewma that tracks how long updates take. * This is to mitigate issues caused by slow parsing on embedded devices. * @private {!shaka.abr.Ewma} */ this.averageUpdateDuration_ = new shaka.abr.Ewma(5); } /** * @override * @exportInterface */ configure(config) { this.config_ = config; } /** * @override * @exportInterface */ async start(uri, playerInterface) { goog.asserts.assert(this.config_, 'Must call configure() before start()!'); this.playerInterface_ = playerInterface; this.lowLatencyMode_ = playerInterface.isLowLatencyMode(); const response = await this.requestManifest_(uri); // Record the master playlist URI after redirects. this.masterPlaylistUri_ = response.uri; goog.asserts.assert(response.data, 'Response data should be non-null!'); await this.parseManifest_(response.data, uri); goog.asserts.assert(this.manifest_, 'Manifest should be non-null'); return this.manifest_; } /** * @override * @exportInterface */ stop() { // Make sure we don't update the manifest again. Even if the timer is not // running, this is safe to call. if (this.updatePlaylistTimer_) { this.updatePlaylistTimer_.stop(); this.updatePlaylistTimer_ = null; } /** @type {!Array.<!Promise>} */ const pending = []; if (this.operationManager_) { pending.push(this.operationManager_.destroy()); this.operationManager_ = null; } this.playerInterface_ = null; this.config_ = null; this.variantUriSet_.clear(); this.manifest_ = null; this.uriToStreamInfosMap_.clear(); this.groupIdToStreamInfosMap_.clear(); this.groupIdToCodecsMap_.clear(); this.globalVariables_.clear(); return Promise.all(pending); } /** * @override * @exportInterface */ async update() { if (!this.isLive_()) { return; } /** @type {!Array.<!Promise>} */ const updates = []; const streamInfos = Array.from(this.uriToStreamInfosMap_.values()); // This is necessary to calculate correctly the update time. this.lastTargetDuration_ = Infinity; // Only update active streams. const activeStreamInfos = streamInfos.filter((s) => s.stream.segmentIndex); for (const streamInfo of activeStreamInfos) { updates.push(this.updateStream_(streamInfo)); } await Promise.all(updates); // Now that streams have been updated, notify the presentation timeline. this.notifySegmentsForStreams_(activeStreamInfos.map((s) => s.stream)); // If any hasEndList is false, the stream is still live. const stillLive = activeStreamInfos.some((s) => s.hasEndList == false); if (activeStreamInfos.length && !stillLive) { // Convert the presentation to VOD and set the duration. const PresentationType = shaka.hls.HlsParser.PresentationType_; this.setPresentationType_(PresentationType.VOD); // The duration is the minimum of the end times of all active streams. // Non-active streams are not guaranteed to have useful maxTimestamp // values, due to the lazy-loading system, so they are ignored. const maxTimestamps = activeStreamInfos.map((s) => s.maxTimestamp); // The duration is the minimum of the end times of all streams. this.presentationTimeline_.setDuration(Math.min(...maxTimestamps)); this.playerInterface_.updateDuration(); } if (stillLive) { this.determineDuration_(); } } /** * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo * @return {!Map.<number, number>} * @private */ getMediaSequenceToStartTimeFor_(streamInfo) { if (this.isLive_()) { return this.mediaSequenceToStartTimeByType_.get(streamInfo.type); } else { return streamInfo.mediaSequenceToStartTime; } } /** * Updates a stream. * * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo * @return {!Promise} * @private */ async updateStream_(streamInfo) { const manifestUri = streamInfo.absoluteMediaPlaylistUri; const uriObj = new goog.Uri(manifestUri); const queryData = new goog.Uri.QueryData(); if (streamInfo.canBlockReload) { if (streamInfo.nextMediaSequence >= 0) { // Indicates that the server must hold the request until a Playlist // contains a Media Segment with Media Sequence queryData.add('_HLS_msn', String(streamInfo.nextMediaSequence)); } if (streamInfo.nextPart >= 0) { // Indicates, in combination with _HLS_msn, that the server must hold // the request until a Playlist contains Partial Segment N of Media // Sequence Number M or later. queryData.add('_HLS_part', String(streamInfo.nextPart)); } } if (streamInfo.canSkipSegments) { // Enable delta updates. This will replace older segments with // 'EXT-X-SKIP' tag in the media playlist. queryData.add('_HLS_skip', 'YES'); } if (queryData.getCount()) { uriObj.setQueryData(queryData); } const response = await this.requestManifest_(uriObj.toString(), /* isPlaylist= */ true); if (!streamInfo.stream.segmentIndex) { // The stream was closed since the update was first requested. return; } /** @type {shaka.hls.Playlist} */ const playlist = this.manifestTextParser_.parsePlaylist( response.data, response.uri); if (playlist.type != shaka.hls.PlaylistType.MEDIA) { throw new shaka.util.Error( shaka.util.Error.Severity.CRITICAL, shaka.util.Error.Category.MANIFEST, shaka.util.Error.Code.HLS_INVALID_PLAYLIST_HIERARCHY); } /** @type {!Array.<!shaka.hls.Tag>} */ const variablesTags = shaka.hls.Utils.filterTagsByName(playlist.tags, 'EXT-X-DEFINE'); const mediaVariables = this.parseMediaVariables_(variablesTags); const stream = streamInfo.stream; const mediaSequenceToStartTime = this.getMediaSequenceToStartTimeFor_(streamInfo); const {keyIds, drmInfos} = this.parseDrmInfo_(playlist, stream.mimeType); const keysAreEqual = (a, b) => a.size === b.size && [...a].every((value) => b.has(value)); if (!keysAreEqual(stream.keyIds, keyIds)) { stream.keyIds = keyIds; stream.drmInfos = drmInfos; this.playerInterface_.newDrmInfo(stream); } const {segments, bandwidth} = this.createSegments_( streamInfo.verbatimMediaPlaylistUri, playlist, stream.type, stream.mimeType, mediaSequenceToStartTime, mediaVariables); stream.bandwidth = bandwidth; stream.segmentIndex.mergeAndEvict( segments, this.presentationTimeline_.getSegmentAvailabilityStart()); if (segments.length) { const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber( playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0); const skipTag = shaka.hls.Utils.getFirstTagWithName( playlist.tags, 'EXT-X-SKIP'); const skippedSegments = skipTag ? Number(skipTag.getAttributeValue('SKIPPED-SEGMENTS')) : 0; const {nextMediaSequence, nextPart} = this.getNextMediaSequenceAndPart_(mediaSequenceNumber, segments); streamInfo.nextMediaSequence = nextMediaSequence + skippedSegments; streamInfo.nextPart = nextPart; const playlistStartTime = mediaSequenceToStartTime.get( mediaSequenceNumber); stream.segmentIndex.evict(playlistStartTime); } const oldSegment = segments[0]; goog.asserts.assert(oldSegment, 'Should have segments!'); streamInfo.minTimestamp = oldSegment.startTime; const newestSegment = segments[segments.length - 1]; goog.asserts.assert(newestSegment, 'Should have segments!'); streamInfo.maxTimestamp = newestSegment.endTime; // Once the last segment has been added to the playlist, // #EXT-X-ENDLIST tag will be appended. // If that happened, treat the rest of the EVENT presentation as VOD. const endListTag = shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-ENDLIST'); if (endListTag) { // Flag this for later. We don't convert the whole presentation into VOD // until we've seen the ENDLIST tag for all active playlists. streamInfo.hasEndList = true; } this.determineLastTargetDuration_(playlist); } /** * @override * @exportInterface */ onExpirationUpdated(sessionId, expiration) { // No-op } /** * Align the streams by sequence number by dropping early segments. Then * offset the streams to begin at presentation time 0. * @param {!Array.<!shaka.hls.HlsParser.StreamInfo>} streamInfos * @private */ syncStreamsWithSequenceNumber_(streamInfos) { // We assume that, when this is first called, we have enough info to // determine how to use the program date times (e.g. we have both a video // and an audio, and all other videos and audios match those). // Thus, we only need to calculate this once. const updateMinSequenceNumber = this.minSequenceNumber_ == -1; // Sync using media sequence number. Find the highest starting sequence // number among all streams. Later, we will drop any references to // earlier segments in other streams, then offset everything back to 0. for (const streamInfo of streamInfos) { const segmentIndex = streamInfo.stream.segmentIndex; goog.asserts.assert(segmentIndex, 'Only loaded streams should be synced'); const mediaSequenceToStartTime = this.getMediaSequenceToStartTimeFor_(streamInfo); const segment0 = segmentIndex.earliestReference(); if (segment0) { // This looks inefficient, but iteration order is insertion order. // So the very first entry should be the one we want. // We assert that this holds true so that we are alerted by debug // builds and tests if it changes. We still do a loop, though, so // that the code functions correctly in production no matter what. if (goog.DEBUG) { const firstSequenceStartTime = mediaSequenceToStartTime.values().next().value; goog.asserts.assert( firstSequenceStartTime == segment0.startTime, 'Sequence number map is not ordered as expected!'); } for (const [sequence, start] of mediaSequenceToStartTime) { if (start == segment0.startTime) { if (updateMinSequenceNumber) { this.minSequenceNumber_ = Math.max( this.minSequenceNumber_, sequence); } // Even if we already have decided on a value for // |this.minSequenceNumber_|, we still need to determine the first // sequence number for the stream, to offset it in the code below. streamInfo.firstSequenceNumber = sequence; break; } } } } if (this.minSequenceNumber_ < 0) { // Nothing to sync. return; } shaka.log.debug('Syncing HLS streams against base sequence number:', this.minSequenceNumber_); for (const streamInfo of streamInfos) { const segmentIndex = streamInfo.stream.segmentIndex; if (segmentIndex) { // Drop any earlier references. const numSegmentsToDrop = this.minSequenceNumber_ - streamInfo.firstSequenceNumber; segmentIndex.dropFirstReferences(numSegmentsToDrop); // Now adjust timestamps back to begin at 0. const segmentN = segmentIndex.earliestReference(); if (segmentN) { const streamOffset = -segmentN.startTime; // Modify all SegmentReferences equally. streamInfo.stream.segmentIndex.offset(streamOffset); // Update other parts of streamInfo the same way. this.offsetStreamInfo_(streamInfo, streamOffset); } } } } /** * Synchronize streams by the EXT-X-PROGRAM-DATE-TIME tags attached to their * segments. Also normalizes segment times so that the earliest segment in * any stream is at time 0. * @param {!Array.<!shaka.hls.HlsParser.StreamInfo>} streamInfos * @private */ syncStreamsWithProgramDateTime_(streamInfos) { // We assume that, when this is first called, we have enough info to // determine how to use the program date times (e.g. we have both a video // and an audio, and all other videos and audios match those). // Thus, we only need to calculate this once. if (this.lowestSyncTime_ == Infinity) { for (const streamInfo of streamInfos) { const segmentIndex = streamInfo.stream.segmentIndex; goog.asserts.assert(segmentIndex, 'Only loaded streams should be synced'); const segment0 = segmentIndex.earliestReference(); if (segment0 != null && segment0.syncTime != null) { this.lowestSyncTime_ = Math.min(this.lowestSyncTime_, segment0.syncTime); } } } const lowestSyncTime = this.lowestSyncTime_; if (lowestSyncTime == Infinity) { // Nothing to sync. return; } shaka.log.debug('Syncing HLS streams against base time:', lowestSyncTime); for (const streamInfo of this.uriToStreamInfosMap_.values()) { const segmentIndex = streamInfo.stream.segmentIndex; if (segmentIndex != null) { // A segment's startTime should be based on its syncTime vs the lowest // syncTime across all streams. The earliest segment sync time from // any stream will become presentation time 0. If two streams start // e.g. 6 seconds apart in syncTime, then their first segments will // also start 6 seconds apart in presentation time. const segment0 = segmentIndex.earliestReference(); if (segment0.syncTime == null) { shaka.log.alwaysError('Missing EXT-X-PROGRAM-DATE-TIME for stream', streamInfo.verbatimMediaPlaylistUri, 'Expect AV sync issues!'); } else { // Stream metadata are offset by a fixed amount based on the // first segment. const segment0TargetTime = segment0.syncTime - lowestSyncTime; const streamOffset = segment0TargetTime - segment0.startTime; this.offsetStreamInfo_(streamInfo, streamOffset); // This is computed across all segments separately to manage // accumulated drift in durations. for (const segment of segmentIndex) { segment.syncAgainst(lowestSyncTime); } } } } } /** * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo * @param {number} offset * @private */ offsetStreamInfo_(streamInfo, offset) { // Adjust our accounting of the minimum timestamp. streamInfo.minTimestamp += offset; // Adjust our accounting of the maximum timestamp. streamInfo.maxTimestamp += offset; goog.asserts.assert(streamInfo.maxTimestamp >= 0, 'Negative maxTimestamp after adjustment!'); // Update our map from sequence number to start time. const mediaSequenceToStartTime = this.getMediaSequenceToStartTimeFor_(streamInfo); for (const [key, value] of mediaSequenceToStartTime) { mediaSequenceToStartTime.set(key, value + offset); } shaka.log.debug('Offset', offset, 'applied to', streamInfo.verbatimMediaPlaylistUri); } /** * Parses the manifest. * * @param {BufferSource} data * @param {string} uri * @return {!Promise} * @private */ async parseManifest_(data, uri) { const Utils = shaka.hls.Utils; goog.asserts.assert(this.masterPlaylistUri_, 'Master playlist URI must be set before calling parseManifest_!'); const playlist = this.manifestTextParser_.parsePlaylist( data, this.masterPlaylistUri_); /** @type {!Array.<!shaka.hls.Tag>} */ const variablesTags = Utils.filterTagsByName(playlist.tags, 'EXT-X-DEFINE'); this.parseMasterVariables_(variablesTags); /** @type {!Array.<!shaka.extern.Variant>} */ let variants = []; /** @type {!Array.<!shaka.extern.Stream>} */ let textStreams = []; /** @type {!Array.<!shaka.extern.Stream>} */ let imageStreams = []; // Parsing a media playlist results in a single-variant stream. if (playlist.type == shaka.hls.PlaylistType.MEDIA) { // Get necessary info for this stream. These are things we would normally // find from the master playlist (e.g. from values on EXT-X-MEDIA tags). const basicInfo = await this.getMediaPlaylistBasicInfo_(playlist); const type = basicInfo.type; const mimeType = basicInfo.mimeType; const codecs = basicInfo.codecs; const languageValue = basicInfo.language; const height = basicInfo.height; const width = basicInfo.width; const channelsCount = basicInfo.channelCount; const sampleRate = basicInfo.sampleRate; // Some values we cannot figure out, and aren't important enough to ask // the user to provide through config values. A lot of these are only // relevant to ABR, which isn't necessary if there's only one variant. // So these unknowns should be set to false or null, largely. const spatialAudio = false; const characteristics = null; const closedCaptions = new Map(); const forced = false; // Only relevant for text. const primary = true; // This is the only stream! const name = 'Media Playlist'; // Make the stream info, with those values. const streamInfo = await this.convertParsedPlaylistIntoStreamInfo_( playlist, uri, uri, codecs, type, languageValue, primary, name, channelsCount, closedCaptions, characteristics, forced, sampleRate, spatialAudio, mimeType); this.uriToStreamInfosMap_.set(uri, streamInfo); if (type == 'video') { this.addVideoAttributes_(streamInfo.stream, width, height, /* frameRate= */ null, /* videoRange= */ null); } // Wrap the stream from that stream info with a variant. variants.push({ id: 0, language: this.getLanguage_(languageValue), disabledUntilTime: 0, primary: true, audio: type == 'audio' ? streamInfo.stream : null, video: type == 'video' ? streamInfo.stream : null, bandwidth: streamInfo.stream.bandwidth || 0, allowedByApplication: true, allowedByKeySystem: true, decodingInfos: [], }); } else { /** @type {!Array.<!shaka.hls.Tag>} */ const mediaTags = Utils.filterTagsByName(playlist.tags, 'EXT-X-MEDIA'); /** @type {!Array.<!shaka.hls.Tag>} */ const variantTags = Utils.filterTagsByName( playlist.tags, 'EXT-X-STREAM-INF'); /** @type {!Array.<!shaka.hls.Tag>} */ const imageTags = Utils.filterTagsByName( playlist.tags, 'EXT-X-IMAGE-STREAM-INF'); /** @type {!Array.<!shaka.hls.Tag>} */ const sessionKeyTags = Utils.filterTagsByName( playlist.tags, 'EXT-X-SESSION-KEY'); this.parseCodecs_(variantTags); /** @type {!Array.<!shaka.hls.Tag>} */ const sesionDataTags = Utils.filterTagsByName(playlist.tags, 'EXT-X-SESSION-DATA'); for (const tag of sesionDataTags) { const id = tag.getAttributeValue('DATA-ID'); const uri = tag.getAttributeValue('URI'); const language = tag.getAttributeValue('LANGUAGE'); const value = tag.getAttributeValue('VALUE'); const data = (new Map()).set('id', id); if (uri) { data.set('uri', shaka.hls.Utils.constructAbsoluteUri( this.masterPlaylistUri_, uri)); } if (language) { data.set('language', language); } if (value) { data.set('value', value); } const event = new shaka.util.FakeEvent('sessiondata', data); if (this.playerInterface_) { this.playerInterface_.onEvent(event); } } // Parse audio and video media tags first, so that we can extract segment // start time from audio/video streams and reuse for text streams. this.createStreamInfosFromMediaTags_(mediaTags); this.parseClosedCaptions_(mediaTags); variants = this.createVariantsForTags_(variantTags, sessionKeyTags); textStreams = this.parseTexts_(mediaTags); imageStreams = await this.parseImages_(imageTags); } // Make sure that the parser has not been destroyed. if (!this.playerInterface_) { throw new shaka.util.Error( shaka.util.Error.Severity.CRITICAL, shaka.util.Error.Category.PLAYER, shaka.util.Error.Code.OPERATION_ABORTED); } // This assert is our own sanity check. goog.asserts.assert(this.presentationTimeline_ == null, 'Presentation timeline created early!'); // We don't know if the presentation is VOD or live until we parse at least // one media playlist, so make a VOD-style presentation timeline for now // and change the type later if we discover this is live. // Since the player will load the first variant chosen early in the process, // there isn't a window during playback where the live-ness is unknown. this.presentationTimeline_ = new shaka.media.PresentationTimeline( /* presentationStartTime= */ null, /* delay= */ 0); this.presentationTimeline_.setStatic(true); // Single-variant streams aren't lazy-loaded, so for them we already have // enough info here to determine the presentation type and duration. if (playlist.type == shaka.hls.PlaylistType.MEDIA) { if (this.isLive_()) { this.changePresentationTimelineToLive_(playlist); const delay = this.getUpdatePlaylistDelay_(); this.updatePlaylistTimer_.tickAfter(/* seconds= */ delay); } const streamInfos = Array.from(this.uriToStreamInfosMap_.values()); this.finalizeStreams_(streamInfos); this.determineDuration_(); } this.manifest_ = { presentationTimeline: this.presentationTimeline_, variants, textStreams, imageStreams, offlineSessionIds: [], minBufferTime: 0, sequenceMode: this.config_.hls.sequenceMode, ignoreManifestTimestampsInSegmentsMode: this.config_.hls.ignoreManifestTimestampsInSegmentsMode, type: shaka.media.ManifestParser.HLS, serviceDescription: null, }; this.playerInterface_.makeTextStreamsForClosedCaptions(this.manifest_); } /** * @param {shaka.hls.Playlist} playlist * @return {!Promise.<shaka.hls.HlsParser.BasicInfo>} * @private */ async getMediaPlaylistBasicInfo_(playlist) { const HlsParser = shaka.hls.HlsParser; const defaultFullMimeType = this.config_.hls.mediaPlaylistFullMimeType; const defaultMimeType = shaka.util.MimeUtils.getBasicType(defaultFullMimeType); const defaultType = defaultMimeType.split('/')[0]; const defaultCodecs = shaka.util.MimeUtils.getCodecs(defaultFullMimeType); const defaultBasicInfo = { type: defaultType, mimeType: defaultMimeType, codecs: defaultCodecs, language: null, height: null, width: null, channelCount: null, sampleRate: null, }; if (!playlist.segments.length) { return defaultBasicInfo; } const firstSegment = playlist.segments[0]; const parsedUri = new goog.Uri(firstSegment.absoluteUri); const extension = parsedUri.getPath().split('.').pop(); const rawMimeType = HlsParser.RAW_FORMATS_TO_MIME_TYPES_[extension]; if (rawMimeType) { return { type: 'audio', mimeType: rawMimeType, codecs: '', language: null, height: null, width: null, channelCount: null, sampleRate: null, }; } let segmentUris = [firstSegment.absoluteUri]; const initSegmentRef = this.getInitSegmentReference_( playlist, firstSegment.tags, new Map()); if (initSegmentRef) { segmentUris = initSegmentRef.getUris(); } const requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT; const segmentRequest = shaka.net.NetworkingEngine.makeRequest( segmentUris, this.config_.retryParameters); const type = initSegmentRef ? shaka.net.NetworkingEngine.AdvancedRequestType.INIT_SEGMENT : shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_SEGMENT; const response = await this.makeNetworkRequest_( segmentRequest, requestType, {type}); let contentMimeType = response.headers['content-type']; if (contentMimeType) { // Split the MIME type in case the server sent additional parameters. contentMimeType = contentMimeType.split(';')[0].toLowerCase(); } if (extension == 'ts' || contentMimeType == 'video/mp2t') { const basicInfo = this.getBasicInfoFromTs_(response); if (basicInfo) { return basicInfo; } } else if (extension == 'mp4' || contentMimeType == 'video/mp4' || contentMimeType == 'audio/mp4') { const basicInfo = this.getBasicInfoFromMp4_(response); if (basicInfo) { return basicInfo; } } return defaultBasicInfo; } /** * @param {shaka.extern.Response} response * @return {?shaka.hls.HlsParser.BasicInfo} * @private */ getBasicInfoFromTs_(response) { const uint8ArrayData = shaka.util.BufferUtils.toUint8(response.data); const tsParser = new shaka.util.TsParser().parse(uint8ArrayData); const tsCodecs = tsParser.getCodecs(); const videoInfo = tsParser.getVideoInfo(); const codecs = []; let hasAudio = false; let hasVideo = false; switch (tsCodecs.audio) { case 'aac': codecs.push('mp4a.40.2'); hasAudio = true; break; case 'mp3': codecs.push('mp4a.40.34'); hasAudio = true; break; case 'ac3': codecs.push('ac-3'); hasAudio = true; break; case 'ec3': codecs.push('ec-3'); hasAudio = true; break; } switch (tsCodecs.video) { case 'avc': if (videoInfo.codec) { codecs.push(videoInfo.codec); } else { codecs.push('avc1.42E01E'); } hasVideo = true; break; case 'hvc': if (videoInfo.codec) { codecs.push(videoInfo.codec); } else { codecs.push('hvc1.1.6.L93.90'); } hasVideo = true; break; } if (!codecs.length) { return null; } const onlyAudio = hasAudio && !hasVideo; return { type: onlyAudio ? 'audio' : 'video', mimeType: 'video/mp2t', codecs: codecs.join(', '), language: null, height: videoInfo.height, width: videoInfo.width, channelCount: null, sampleRate: null, }; } /** * @param {shaka.extern.Response} response * @return {?shaka.hls.HlsParser.BasicInfo} * @private */ getBasicInfoFromMp4_(response) { const Mp4Parser = shaka.util.Mp4Parser; const codecs = []; let hasAudio = false; let hasVideo = false; const addCodec = (codec) => { const codecLC = codec.toLowerCase(); switch (codecLC) { case 'avc1': case 'avc3': codecs.push(codecLC + '.42E01E'); hasVideo = true; break; case 'hev1': case 'hvc1': codecs.push(codecLC + '.1.6.L93.90'); hasVideo = true; break; case 'dvh1': case 'dvhe': codecs.push(codecLC + '.05.04'); hasVideo = true; break; case 'vp09': codecs.push(codecLC + '.00.10.08'); hasVideo = true; break; case 'av01': codecs.push(codecLC + '.0.01M.08'); hasVideo = true; break; case 'mp4a': // We assume AAC, but this can be wrong since mp4a supports // others codecs codecs.push('mp4a.40.2'); hasAudio = true; break; case 'ac-3': case 'ec-3': case 'opus': case 'flac': codecs.push(codecLC); hasAudio = true; break; } }; const codecBoxParser = (box) => addCodec(box.name); /** @type {?string} */ let language = null; /** @type {?string} */ let height = null; /** @type {?string} */ let width = null; /** @type {?number} */ let channelCount = null; /** @type {?number} */ let sampleRate = null; new Mp4Parser() .box('moov', Mp4Parser.children) .box('trak', Mp4Parser.children) .fullBox('tkhd', (box) => { goog.asserts.assert( box.version != null, 'TKHD is a full box and should have a valid version.'); const parsedTKHDBox = shaka.util.Mp4BoxParsers.parseTKHD( box.reader, box.version); height = String(parsedTKHDBox.height); width = String(parsedTKHDBox.width); }) .box('mdia', Mp4Parser.children) .fullBox('mdhd', (box) => { goog.asserts.assert( box.version != null, 'MDHD is a full box and should have a valid version.'); const parsedMDHDBox = shaka.util.Mp4BoxParsers.parseMDHD( box.reader, box.version); language = parsedMDHDBox.language; }) .box('minf', Mp4Parser.children) .box('stbl', Mp4Parser.children) .fullBox('stsd', Mp4Parser.sampleDescription) // AUDIO // These are the various boxes that signal a codec. .box('mp4a', (box) => { const parsedMP4ABox = shaka.util.Mp4BoxParsers.parseMP4A(box.reader); channelCount = parsedMP4ABox.channelCount; sampleRate = parsedMP4ABox.sampleRate; if (box.reader.hasMoreData()) { Mp4Parser.children(box); } else { codecBoxParser(box); } }) .box('esds', (box) => { const parsedESDSBox = shaka.util.Mp4BoxParsers.parseESDS(box.reader); codecs.push(parsedESDSBox.codec); hasAudio = true; }) .box('ac-3', codecBoxParser) .box('ec-3', codecBoxParser) .box('opus', codecBoxParser) .box('Opus', codecBoxParser) .box('fLaC', codecBoxParser) // VIDEO // These are the various boxes that signal a codec. .box('avc1', (box) => { const parsedAVCBox = shaka.util.Mp4BoxParsers.parseAVC(box.reader, box.name); codecs.push(parsedAVCBox.codec); hasVideo = true; }) .box('avc3', (box) => { const parsedAVCBox = shaka.util.Mp4BoxParsers.parseAVC(box.reader, box.name); codecs.push(parsedAVCBox.codec); hasVideo = true; }) .box('hev1', codecBoxParser) .box('hvc1', codecBoxParser) .box('dvh1', codecBoxParser) .box('dvhe', codecBoxParser) .box('vp09', codecBoxParser) .box('av01', codecBoxParser) // This signals an encrypted sample, which we can go inside of to // find the codec used. // Note: If encrypted, you can only have audio or video, not both. .box('enca', Mp4Parser.visualSampleEntry) .box('encv', Mp4Parser.visualSampleEntry) .box('sinf', Mp4Parser.children) .box('frma', (box) => { const {codec} = shaka.util.Mp4BoxParsers.parseFRMA(box.reader); addCodec(codec); }) .parse(response.data, /* partialOkay= */ true); if (!codecs.length) { return null; } const onlyAudio = hasAudio && !hasVideo; return { type: onlyAudio ? 'audio' : 'video', mimeType: onlyAudio ? 'audio/mp4' : 'video/mp4', codecs: this.filterDuplicateCodecs_(codecs).join(', '), language: language, height: height, width: width, channelCount: channelCount, sampleRate: sampleRate, }; } /** @private */ determineDuration_() { goog.asserts.assert(this.presentationTimeline_, 'Presentation timeline not created!'); if (this.isLive_()) { // The spec says nothing much about seeking in live content, but Safari's // built-in HLS implementation does not allow it. Therefore we will set // the availability window equal to the presentation delay. The player // will be able to buffer ahead three segments, but the seek window will // be zero-sized. const PresentationType = shaka.hls.HlsParser.PresentationType_; if (this.presentationType_ == PresentationType.LIVE) { let segmentAvailabilityDuration = this.getLiveDuration_(); // This defaults to the presentation delay, which has the effect of // making the live stream unseekable. This is consistent with Apple's // HLS implementation. if (this.config_.hls.useSafariBehaviorForLive) { segmentAvailabilityDuration = this.presentationTimeline_.getDelay(); } // The app can override that with a longer duration, to allow seeking. if (!isNaN(this.config_.availabilityWindowOverride)) { segmentAvailabilityDuration = this.config_.availabilityWindowOverride; } this.presentationTimeline_.setSegmentAvailabilityDuration( segmentAvailabilityDuration); } } else { // Use the minimum duration as the presentation duration. this.presentationTimeline_.setDuration(this.getMinDuration_()); } // This is the first point where we have a meaningful presentation start // time, and we need to tell PresentationTimeline that so that it can // maintain consistency from here on. this.presentationTimeline_.lockStartTime(); // This asserts that the live edge is being calculated from segment times. // For VOD and event streams, this check should still pass. goog.asserts.assert( !this.presentationTimeline_.usingPresentationStartTime(), 'We should not be using the presentation start time in HLS!'); } /** * Get the variables of each variant tag, and store in a map. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist. * @private */ parseMasterVariables_(tags) { for (const variableTag of tags) { const name = variableTag.getAttributeValue('NAME'); const value = variableTag.getAttributeValue('VALUE'); if (name && value) { if (!this.globalVariables_.has(name)) { this.globalVariables_.set(name, value); } } } } /** * Get the variables of each variant tag, and store in a map. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist. * @return {!Map.<string, string>} * @private */ parseMediaVariables_(tags) { const mediaVariables = new Map(); for (const variableTag of tags) { const name = variableTag.getAttributeValue('NAME'); const value = variableTag.getAttributeValue('VALUE'); const mediaImport = variableTag.getAttributeValue('IMPORT'); if (name && value) { mediaVariables.set(name, value); } if (mediaImport) { const globalValue = this.globalVariables_.get(mediaImport); if (globalValue) { mediaVariables.set(mediaImport, globalValue); } } } return mediaVariables; } /** * Get the codecs of each variant tag, and store in a map from * audio/video/subtitle group id to the codecs arraylist. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist. * @private */ parseCodecs_(tags) { const ContentType = shaka.util.ManifestParserUtils.ContentType; for (const variantTag of tags) { const audioGroupId = variantTag.getAttributeValue('AUDIO'); const videoGroupId = variantTag.getAttributeValue('VIDEO'); const subGroupId = variantTag.getAttributeValue('SUBTITLES'); const allCodecs = this.getCodecsForVariantTag_(variantTag); if (subGroupId) { const textCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe( ContentType.TEXT, allCodecs); goog.asserts.assert(textCodecs != null, 'Text codecs should be valid.'); this.groupIdToCodecsMap_.set(subGroupId, textCodecs); shaka.util.ArrayUtils.remove(allCodecs, textCodecs); } if (audioGroupId) { let codecs = shaka.util.ManifestParserUtils.guessCodecsSafe( ContentType.AUDIO, allCodecs); if (!codecs) { codecs = this.config_.hls.defaultAudioCodec; } this.groupIdToCodecsMap_.set(audioGroupId, codecs); } if (videoGroupId) { let codecs = shaka.util.ManifestParserUtils.guessCodecsSafe( ContentType.VIDEO, allCodecs); if (!codecs) { codecs = this.config_.hls.defaultVideoCodec; } this.groupIdToCodecsMap_.set(videoGroupId, codecs); } } } /** * Parse Subtitles and Closed Captions from 'EXT-X-MEDIA' tags. * Create text streams for Subtitles, but not Closed Captions. * * @param {!Array.<!shaka.hls.Tag>} mediaTags Media tags from the playlist. * @return {!Array.<!shaka.extern.Stream>} * @private */ parseTexts_(mediaTags) { // Create text stream for each Subtitle media tag. const subtitleTags = shaka.hls.Utils.filterTagsByType(mediaTags, 'SUBTITLES'); const textStreams = subtitleTags.map((tag) => { const disableText = this.config_.disableText; if (disableText) { return null; } try { return this.createStreamInfoFromMediaTag_(tag).stream; } catch (e) { if (this.config_.hls.ignoreTextStreamFailures) { return null; } throw e; } }); const type = shaka.util.ManifestParserUtils.ContentType.TEXT; // Set the codecs for text streams. for (const tag of subtitleTags) { const groupId = tag.getRequiredAttrValue('GROUP-ID'); const codecs = this.groupIdToCodecsMap_.get(groupId); if (codecs) { const textStreamInfos = this.groupIdToStreamInfosMap_.get(groupId); if (textStreamInfos) { for (const textStreamInfo of textStreamInfos) { textStreamInfo.stream.codecs = codecs; textStreamInfo.stream.mimeType = this.guessMimeTypeBeforeLoading_(type, codecs) || this.guessMimeTypeFallback_(type); } } } } // Do not create text streams for Closed captions. return textStreams.filter((s) => s); } /** * @param {!Array.<!shaka.hls.Tag>} imageTags from the playlist. * @return {!Promise.<!Array.<!shaka.extern.Stream>>} * @private */ async parseImages_(imageTags) { // Create image stream for each image tag. const imageStreamPromises = imageTags.map(async (tag) => { const disableThumbnails = this.config_.disableThumbnails; if (disableThumbnails) { return null; } try { const streamInfo = await this.createStreamInfoFromImageTag_(tag); return streamInfo.stream; } catch (e) { if (this.config_.hls.ignoreImageStreamFailures) { return null; } throw e; } }); const imageStreams = await Promise.all(imageStreamPromises); return imageStreams.filter((s) => s); } /** * @param {!Array.<!shaka.hls.Tag>} mediaTags Media tags from the playlist. * @private */ createStreamInfosFromMediaTags_(mediaTags) { // Filter out subtitles and media tags without uri. mediaTags = mediaTags.filter((tag) => { const uri = tag.getAttributeValue('URI') || ''; const type = tag.getAttributeValue('TYPE'); return type != 'SUBTITLES' && uri != ''; }); // Create stream info for each audio / video media tag. for (const tag of mediaTags) { this.createStreamInfoFromMediaTag_(tag); } } /** * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist. * @param {!Array.<!shaka.hls.Tag>} sessionKeyTags EXT-X-SESSION-KEY tags * from the playlist. * @return {!Array.<!shaka.extern.Variant>} * @private */ createVariantsForTags_(tags, sessionKeyTags) { // EXT-X-SESSION-KEY processing const drmInfos = []; const keyIds = new Set(); if (sessionKeyTags.length > 0) { for (const drmTag of sessionKeyTags) { const method = drmTag.getRequiredAttrValue('METHOD'); if (method != 'NONE' && method != 'AES-128') { // According to the HLS spec, KEYFORMAT is optional and implicitly // defaults to "identity". // https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-4.4.4.4 const keyFormat = drmTag.getAttributeValue('KEYFORMAT') || 'identity'; const drmParser = shaka.hls.HlsParser.KEYFORMATS_TO_DRM_PARSERS_[keyFormat]; const drmInfo = drmParser ? drmParser(drmTag, /* mimeType= */ '') : null; if (drmInfo) { if (drmInfo.keyIds) { for (const keyId of drmInfo.keyIds) { keyIds.add(keyId); } } drmInfos.push(drmInfo); } else { shaka.log.warning('Unsupported HLS KEYFORMAT', keyFormat); } } } } // Create variants for each variant tag. const allVariants = tags.map((tag) => { const frameRate = tag.getAttributeValue('FRAME-RATE'); const bandwidth = Number(tag.getAttributeValue('AVERAGE-BANDWIDTH')) || Number(tag.getRequiredAttrValue('BANDWIDTH')); const resolution = tag.getAttributeValue('RESOLUTION'); const [width, height] = resolution ? resolution.split('x') : [null, null]; const videoRange = tag.getAttributeValue('VIDEO-RANGE'); const streamInfos = this.createStreamInfosForVariantTag_(tag, resolution, frameRate); goog.asserts.assert(streamInfos.audio.length || streamInfos.video.length, 'We should have created a stream!'); return this.createVariants_( streamInfos.audio, streamInfos.video, bandwidth, width, height, frameRate, videoRange, drmInfos, keyIds); }); let variants = allVariants.reduce(shaka.util.Functional.collapseArrays, []); // Filter out null variants. variants = variants.filter((variant) => variant != null); return variants; } /** * Create audio and video streamInfos from an 'EXT-X-STREAM-INF' tag and its * related media tags. * * @param {!shaka.hls.Tag} tag * @param {?string} resolution * @param {?string} frameRate * @return {!shaka.hls.HlsParser.StreamInfos} * @privat