UNPKG

shaka-player

Version:
1,379 lines (1,226 loc) 125 kB
/*! @license * Shaka Player * Copyright 2016 Google LLC * SPDX-License-Identifier: Apache-2.0 */ goog.provide('shaka.hls.HlsParser'); goog.require('goog.Uri'); goog.require('goog.asserts'); goog.require('shaka.hls.ManifestTextParser'); goog.require('shaka.hls.Playlist'); goog.require('shaka.hls.PlaylistType'); goog.require('shaka.hls.Tag'); goog.require('shaka.hls.Utils'); goog.require('shaka.log'); goog.require('shaka.media.DrmEngine'); goog.require('shaka.media.InitSegmentReference'); goog.require('shaka.media.ManifestParser'); goog.require('shaka.media.MediaSourceEngine'); goog.require('shaka.media.PresentationTimeline'); goog.require('shaka.media.SegmentIndex'); goog.require('shaka.media.SegmentReference'); goog.require('shaka.net.DataUriPlugin'); goog.require('shaka.net.NetworkingEngine'); goog.require('shaka.util.ArrayUtils'); goog.require('shaka.util.BufferUtils'); goog.require('shaka.util.CmcdManager'); goog.require('shaka.util.Error'); goog.require('shaka.util.FakeEvent'); goog.require('shaka.util.Functional'); goog.require('shaka.util.LanguageUtils'); goog.require('shaka.util.ManifestParserUtils'); goog.require('shaka.util.MimeUtils'); goog.require('shaka.util.OperationManager'); goog.require('shaka.util.Pssh'); goog.require('shaka.util.Timer'); goog.require('shaka.util.Platform'); goog.require('shaka.util.Uint8ArrayUtils'); goog.require('shaka.util.XmlUtils'); goog.requireType('shaka.hls.Segment'); /** * HLS parser. * * @implements {shaka.extern.ManifestParser} * @export */ shaka.hls.HlsParser = class { /** * Creates an Hls Parser object. */ constructor() { /** @private {?shaka.extern.ManifestParser.PlayerInterface} */ this.playerInterface_ = null; /** @private {?shaka.extern.ManifestConfiguration} */ this.config_ = null; /** @private {number} */ this.globalId_ = 1; /** @private {!Map.<string, string>} */ this.globalVariables_ = new Map(); /** * A map from group id to stream infos created from the media tags. * @private {!Map.<string, !Array.<?shaka.hls.HlsParser.StreamInfo>>} */ this.groupIdToStreamInfosMap_ = new Map(); /** * For media playlist lazy-loading to work in livestreams, we have to assume * that each stream of a type (video, audio, etc) has the same mappings of * sequence number to start time. * This map stores those relationships. * Only used during livestreams; we do not assume that VOD content is * aligned in that way. * @private {!Map.<string, !Map.<number, number>>} */ this.mediaSequenceToStartTimeByType_ = new Map(); // Set initial maps. const ContentType = shaka.util.ManifestParserUtils.ContentType; this.mediaSequenceToStartTimeByType_.set(ContentType.VIDEO, new Map()); this.mediaSequenceToStartTimeByType_.set(ContentType.AUDIO, new Map()); this.mediaSequenceToStartTimeByType_.set(ContentType.TEXT, new Map()); this.mediaSequenceToStartTimeByType_.set(ContentType.IMAGE, new Map()); /** * The values are strings of the form "<VIDEO URI> - <AUDIO URI>", * where the URIs are the verbatim media playlist URIs as they appeared in * the master playlist. * * Used to avoid duplicates that vary only in their text stream. * * @private {!Set.<string>} */ this.variantUriSet_ = new Set(); /** * A map from (verbatim) media playlist URI to stream infos representing the * playlists. * * On update, used to iterate through and update from media playlists. * * On initial parse, used to iterate through and determine minimum * timestamps, offsets, and to handle TS rollover. * * During parsing, used to avoid duplicates in the async methods * createStreamInfoFromMediaTag_, createStreamInfoFromImageTag_ and * createStreamInfoFromVariantTag_. * * @private {!Map.<string, shaka.hls.HlsParser.StreamInfo>} */ this.uriToStreamInfosMap_ = new Map(); /** @private {?shaka.media.PresentationTimeline} */ this.presentationTimeline_ = null; /** * The master playlist URI, after redirects. * * @private {string} */ this.masterPlaylistUri_ = ''; /** @private {shaka.hls.ManifestTextParser} */ this.manifestTextParser_ = new shaka.hls.ManifestTextParser(); /** * This is the number of seconds we want to wait between finishing a * manifest update and starting the next one. This will be set when we parse * the manifest. * * @private {number} */ this.updatePlaylistDelay_ = 0; /** * The minimum sequence number for generated segments, when ignoring * EXT-X-PROGRAM-DATE-TIME. * * @private {number} */ this.minSequenceNumber_ = -1; /** * The lowest time value for any of the streams, as defined by the * EXT-X-PROGRAM-DATE-TIME value. Measured in seconds since January 1, 1970. * * @private {number} */ this.lowestSyncTime_ = Infinity; /** * Whether the streams have previously been "finalized"; that is to say, * whether we have loaded enough streams to know information about the asset * such as timing information, live status, etc. * * @private {boolean} */ this.streamsFinalized_ = false; /** * This timer is used to trigger the start of a manifest update. A manifest * update is async. Once the update is finished, the timer will be restarted * to trigger the next update. The timer will only be started if the content * is live content. * * @private {shaka.util.Timer} */ this.updatePlaylistTimer_ = new shaka.util.Timer(() => { this.onUpdate_(); }); /** @private {shaka.hls.HlsParser.PresentationType_} */ this.presentationType_ = shaka.hls.HlsParser.PresentationType_.VOD; /** @private {?shaka.extern.Manifest} */ this.manifest_ = null; /** @private {number} */ this.maxTargetDuration_ = 0; /** @private {number} */ this.minTargetDuration_ = Infinity; /** Partial segments target duration. * @private {number} */ this.partialTargetDuration_ = 0; /** @private {number} */ this.lowLatencyPresentationDelay_ = 0; /** @private {shaka.util.OperationManager} */ this.operationManager_ = new shaka.util.OperationManager(); /** A map from closed captions' group id, to a map of closed captions info. * {group id -> {closed captions channel id -> language}} * @private {Map.<string, Map.<string, string>>} */ this.groupIdToClosedCaptionsMap_ = new Map(); /** @private {Map.<string, string>} */ this.groupIdToCodecsMap_ = new Map(); /** A cache mapping EXT-X-MAP tag info to the InitSegmentReference created * from the tag. * The key is a string combining the EXT-X-MAP tag's absolute uri, and * its BYTERANGE if available. * {!Map.<string, !shaka.media.InitSegmentReference>} */ this.mapTagToInitSegmentRefMap_ = new Map(); /** @private {boolean} */ this.lowLatencyMode_ = false; } /** * @override * @exportInterface */ configure(config) { this.config_ = config; } /** * @override * @exportInterface */ async start(uri, playerInterface) { goog.asserts.assert(this.config_, 'Must call configure() before start()!'); this.playerInterface_ = playerInterface; this.lowLatencyMode_ = playerInterface.isLowLatencyMode(); const response = await this.requestManifest_(uri); // Record the master playlist URI after redirects. this.masterPlaylistUri_ = response.uri; goog.asserts.assert(response.data, 'Response data should be non-null!'); await this.parseManifest_(response.data, uri); goog.asserts.assert(this.manifest_, 'Manifest should be non-null'); return this.manifest_; } /** * @override * @exportInterface */ stop() { // Make sure we don't update the manifest again. Even if the timer is not // running, this is safe to call. if (this.updatePlaylistTimer_) { this.updatePlaylistTimer_.stop(); this.updatePlaylistTimer_ = null; } /** @type {!Array.<!Promise>} */ const pending = []; if (this.operationManager_) { pending.push(this.operationManager_.destroy()); this.operationManager_ = null; } this.playerInterface_ = null; this.config_ = null; this.variantUriSet_.clear(); this.manifest_ = null; this.uriToStreamInfosMap_.clear(); this.groupIdToStreamInfosMap_.clear(); this.groupIdToCodecsMap_.clear(); this.globalVariables_.clear(); return Promise.all(pending); } /** * @override * @exportInterface */ async update() { if (!this.isLive_()) { return; } /** @type {!Array.<!Promise>} */ const updates = []; const streamInfos = Array.from(this.uriToStreamInfosMap_.values()); // Only update active streams. const activeStreamInfos = streamInfos.filter((s) => s.stream.segmentIndex); for (const streamInfo of activeStreamInfos) { updates.push(this.updateStream_(streamInfo)); } await Promise.all(updates); // Now that streams have been updated, notify the presentation timeline. this.notifySegmentsForStreams_(activeStreamInfos.map((s) => s.stream)); // If any hasEndList is false, the stream is still live. const stillLive = activeStreamInfos.some((s) => s.hasEndList == false); if (activeStreamInfos.length && !stillLive) { // Convert the presentation to VOD and set the duration. const PresentationType = shaka.hls.HlsParser.PresentationType_; this.setPresentationType_(PresentationType.VOD); // The duration is the minimum of the end times of all active streams. // Non-active streams are not guaranteed to have useful maxTimestamp // values, due to the lazy-loading system, so they are ignored. const maxTimestamps = activeStreamInfos.map((s) => s.maxTimestamp); // The duration is the minimum of the end times of all streams. this.presentationTimeline_.setDuration(Math.min(...maxTimestamps)); this.playerInterface_.updateDuration(); } if (stillLive) { this.determineDuration_(); } } /** * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo * @return {!Map.<number, number>} * @private */ getMediaSequenceToStartTimeFor_(streamInfo) { if (this.isLive_()) { return this.mediaSequenceToStartTimeByType_.get(streamInfo.type); } else { return streamInfo.mediaSequenceToStartTime; } } /** * Updates a stream. * * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo * @return {!Promise} * @private */ async updateStream_(streamInfo) { const manifestUri = streamInfo.absoluteMediaPlaylistUri; const uriObj = new goog.Uri(manifestUri); if (this.lowLatencyMode_ && streamInfo.canSkipSegments) { // Enable delta updates. This will replace older segments with // 'EXT-X-SKIP' tag in the media playlist. uriObj.setQueryData(new goog.Uri.QueryData('_HLS_skip=YES')); } const response = await this.requestManifest_(uriObj.toString()); if (!streamInfo.stream.segmentIndex) { // The stream was closed since the update was first requested. return; } /** @type {shaka.hls.Playlist} */ const playlist = this.manifestTextParser_.parsePlaylist( response.data, response.uri); if (playlist.type != shaka.hls.PlaylistType.MEDIA) { throw new shaka.util.Error( shaka.util.Error.Severity.CRITICAL, shaka.util.Error.Category.MANIFEST, shaka.util.Error.Code.HLS_INVALID_PLAYLIST_HIERARCHY); } /** @type {!Array.<!shaka.hls.Tag>} */ const variablesTags = shaka.hls.Utils.filterTagsByName(playlist.tags, 'EXT-X-DEFINE'); const mediaVariables = this.parseMediaVariables_(variablesTags); const stream = streamInfo.stream; const mediaSequenceToStartTime = this.getMediaSequenceToStartTimeFor_(streamInfo); const {keyIds, drmInfos} = this.parseDrmInfo_(playlist, stream.mimeType); const keysAreEqual = (a, b) => a.size === b.size && [...a].every((value) => b.has(value)); if (!keysAreEqual(stream.keyIds, keyIds)) { stream.keyIds = keyIds; stream.drmInfos = drmInfos; this.playerInterface_.newDrmInfo(stream); } const segments = this.createSegments_( streamInfo.verbatimMediaPlaylistUri, playlist, stream.type, stream.mimeType, mediaSequenceToStartTime, mediaVariables); stream.segmentIndex.mergeAndEvict( segments, this.presentationTimeline_.getSegmentAvailabilityStart()); if (segments.length) { const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber( playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0); const playlistStartTime = mediaSequenceToStartTime.get( mediaSequenceNumber); stream.segmentIndex.evict(playlistStartTime); } const oldSegment = segments[0]; goog.asserts.assert(oldSegment, 'Should have segments!'); streamInfo.minTimestamp = oldSegment.startTime; const newestSegment = segments[segments.length - 1]; goog.asserts.assert(newestSegment, 'Should have segments!'); streamInfo.maxTimestamp = newestSegment.endTime; // Once the last segment has been added to the playlist, // #EXT-X-ENDLIST tag will be appended. // If that happened, treat the rest of the EVENT presentation as VOD. const endListTag = shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-ENDLIST'); if (endListTag) { // Flag this for later. We don't convert the whole presentation into VOD // until we've seen the ENDLIST tag for all active playlists. streamInfo.hasEndList = true; } } /** * @override * @exportInterface */ onExpirationUpdated(sessionId, expiration) { // No-op } /** * Align the streams by sequence number by dropping early segments. Then * offset the streams to begin at presentation time 0. * @param {!Array.<!shaka.hls.HlsParser.StreamInfo>} streamInfos * @private */ syncStreamsWithSequenceNumber_(streamInfos) { // We assume that, when this is first called, we have enough info to // determine how to use the program date times (e.g. we have both a video // and an audio, and all other videos and audios match those). // Thus, we only need to calculate this once. const updateMinSequenceNumber = this.minSequenceNumber_ == -1; // Sync using media sequence number. Find the highest starting sequence // number among all streams. Later, we will drop any references to // earlier segments in other streams, then offset everything back to 0. for (const streamInfo of streamInfos) { const segmentIndex = streamInfo.stream.segmentIndex; goog.asserts.assert(segmentIndex, 'Only loaded streams should be synced'); const mediaSequenceToStartTime = this.getMediaSequenceToStartTimeFor_(streamInfo); const segment0 = segmentIndex.earliestReference(); if (segment0) { // This looks inefficient, but iteration order is insertion order. // So the very first entry should be the one we want. // We assert that this holds true so that we are alerted by debug // builds and tests if it changes. We still do a loop, though, so // that the code functions correctly in production no matter what. if (goog.DEBUG) { const firstSequenceStartTime = mediaSequenceToStartTime.values().next().value; goog.asserts.assert( firstSequenceStartTime == segment0.startTime, 'Sequence number map is not ordered as expected!'); } for (const [sequence, start] of mediaSequenceToStartTime) { if (start == segment0.startTime) { if (updateMinSequenceNumber) { this.minSequenceNumber_ = Math.max( this.minSequenceNumber_, sequence); } // Even if we already have decided on a value for // |this.minSequenceNumber_|, we still need to determine the first // sequence number for the stream, to offset it in the code below. streamInfo.firstSequenceNumber = sequence; break; } } } } if (this.minSequenceNumber_ < 0) { // Nothing to sync. return; } shaka.log.debug('Syncing HLS streams against base sequence number:', this.minSequenceNumber_); for (const streamInfo of streamInfos) { const segmentIndex = streamInfo.stream.segmentIndex; if (segmentIndex) { // Drop any earlier references. const numSegmentsToDrop = this.minSequenceNumber_ - streamInfo.firstSequenceNumber; segmentIndex.dropFirstReferences(numSegmentsToDrop); // Now adjust timestamps back to begin at 0. const segmentN = segmentIndex.earliestReference(); if (segmentN) { const streamOffset = -segmentN.startTime; // Modify all SegmentReferences equally. streamInfo.stream.segmentIndex.offset(streamOffset); // Update other parts of streamInfo the same way. this.offsetStreamInfo_(streamInfo, streamOffset); } } } } /** * Synchronize streams by the EXT-X-PROGRAM-DATE-TIME tags attached to their * segments. Also normalizes segment times so that the earliest segment in * any stream is at time 0. * @param {!Array.<!shaka.hls.HlsParser.StreamInfo>} streamInfos * @private */ syncStreamsWithProgramDateTime_(streamInfos) { // We assume that, when this is first called, we have enough info to // determine how to use the program date times (e.g. we have both a video // and an audio, and all other videos and audios match those). // Thus, we only need to calculate this once. if (this.lowestSyncTime_ == Infinity) { for (const streamInfo of streamInfos) { const segmentIndex = streamInfo.stream.segmentIndex; goog.asserts.assert(segmentIndex, 'Only loaded streams should be synced'); const segment0 = segmentIndex.earliestReference(); if (segment0 != null && segment0.syncTime != null) { this.lowestSyncTime_ = Math.min(this.lowestSyncTime_, segment0.syncTime); } } } const lowestSyncTime = this.lowestSyncTime_; if (lowestSyncTime == Infinity) { // Nothing to sync. return; } shaka.log.debug('Syncing HLS streams against base time:', lowestSyncTime); for (const streamInfo of this.uriToStreamInfosMap_.values()) { const segmentIndex = streamInfo.stream.segmentIndex; if (segmentIndex != null) { // A segment's startTime should be based on its syncTime vs the lowest // syncTime across all streams. The earliest segment sync time from // any stream will become presentation time 0. If two streams start // e.g. 6 seconds apart in syncTime, then their first segments will // also start 6 seconds apart in presentation time. const segment0 = segmentIndex.earliestReference(); if (segment0.syncTime == null) { shaka.log.alwaysError('Missing EXT-X-PROGRAM-DATE-TIME for stream', streamInfo.verbatimMediaPlaylistUri, 'Expect AV sync issues!'); } else { // Stream metadata are offset by a fixed amount based on the // first segment. const segment0TargetTime = segment0.syncTime - lowestSyncTime; const streamOffset = segment0TargetTime - segment0.startTime; this.offsetStreamInfo_(streamInfo, streamOffset); // This is computed across all segments separately to manage // accumulated drift in durations. for (const segment of segmentIndex) { segment.syncAgainst(lowestSyncTime); } } } } } /** * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo * @param {number} offset * @private */ offsetStreamInfo_(streamInfo, offset) { // Adjust our accounting of the minimum timestamp. streamInfo.minTimestamp += offset; // Adjust our accounting of the maximum timestamp. streamInfo.maxTimestamp += offset; goog.asserts.assert(streamInfo.maxTimestamp >= 0, 'Negative maxTimestamp after adjustment!'); // Update our map from sequence number to start time. const mediaSequenceToStartTime = this.getMediaSequenceToStartTimeFor_(streamInfo); for (const [key, value] of mediaSequenceToStartTime) { mediaSequenceToStartTime.set(key, value + offset); } shaka.log.debug('Offset', offset, 'applied to', streamInfo.verbatimMediaPlaylistUri); } /** * Parses the manifest. * * @param {BufferSource} data * @param {string} uri * @return {!Promise} * @private */ async parseManifest_(data, uri) { const HlsParser = shaka.hls.HlsParser; const Utils = shaka.hls.Utils; goog.asserts.assert(this.masterPlaylistUri_, 'Master playlist URI must be set before calling parseManifest_!'); const playlist = this.manifestTextParser_.parsePlaylist( data, this.masterPlaylistUri_); /** @type {!Array.<!shaka.hls.Tag>} */ const variablesTags = Utils.filterTagsByName(playlist.tags, 'EXT-X-DEFINE'); this.parseMasterVariables_(variablesTags); /** @type {!Array.<!shaka.extern.Variant>} */ let variants = []; /** @type {!Array.<!shaka.extern.Stream>} */ let textStreams = []; /** @type {!Array.<!shaka.extern.Stream>} */ let imageStreams = []; // Parsing a media playlist results in a single-variant stream. if (playlist.type == shaka.hls.PlaylistType.MEDIA) { // Get necessary info for this stream, from the config. These are things // we would normally find from the master playlist (e.g. from values on // EXT-X-MEDIA tags). let fullMimeType = this.config_.hls.mediaPlaylistFullMimeType; // Try to infer the full mimetype better. if (playlist.segments.length) { const parsedUri = new goog.Uri(playlist.segments[0].absoluteUri); const extension = parsedUri.getPath().split('.').pop(); let mimeType = HlsParser.RAW_FORMATS_TO_MIME_TYPES_[extension]; if (mimeType) { fullMimeType = mimeType; } else if (extension === 'ts') { // TODO: Fetch one segment a use the TsParser to analize if there is // video, audio or both. } else if (extension === 'mp4') { // TODO: Fetch one segment a use the Mp4Parser to analize if there is // video, audio or both. } else if (HlsParser.AUDIO_EXTENSIONS_TO_MIME_TYPES_[extension]) { mimeType = HlsParser.AUDIO_EXTENSIONS_TO_MIME_TYPES_[extension]; const defaultAudioCodec = this.config_.hls.defaultAudioCodec; fullMimeType = `${mimeType}; codecs="${defaultAudioCodec}"`; } else if (HlsParser.VIDEO_EXTENSIONS_TO_MIME_TYPES_[extension]) { mimeType = HlsParser.VIDEO_EXTENSIONS_TO_MIME_TYPES_[extension]; const defaultVideoCodec = this.config_.hls.defaultVideoCodec; fullMimeType = `${mimeType}; codecs="${defaultVideoCodec}"`; } } const mimeType = shaka.util.MimeUtils.getBasicType(fullMimeType); const type = mimeType.split('/')[0]; const codecs = shaka.util.MimeUtils.getCodecs(fullMimeType); // Some values we cannot figure out, and aren't important enough to ask // the user to provide through config values. A lot of these are only // relevant to ABR, which isn't necessary if there's only one variant. // So these unknowns should be set to false or null, largely. const language = ''; const channelsCount = null; const spatialAudio = false; const characteristics = null; const closedCaptions = new Map(); const forced = false; // Only relevant for text. const primary = true; // This is the only stream! const name = 'Media Playlist'; // Make the stream info, with those values. const streamInfo = await this.convertParsedPlaylistIntoStreamInfo_( playlist, uri, uri, codecs, type, language, primary, name, channelsCount, closedCaptions, characteristics, forced, spatialAudio, mimeType); this.uriToStreamInfosMap_.set(uri, streamInfo); // Wrap the stream from that stream info with a variant. variants.push({ id: 0, language: 'und', disabledUntilTime: 0, primary: true, audio: type == 'audio' ? streamInfo.stream : null, video: type == 'video' ? streamInfo.stream : null, bandwidth: 0, allowedByApplication: true, allowedByKeySystem: true, decodingInfos: [], }); } else { /** @type {!Array.<!shaka.hls.Tag>} */ const mediaTags = Utils.filterTagsByName(playlist.tags, 'EXT-X-MEDIA'); /** @type {!Array.<!shaka.hls.Tag>} */ const variantTags = Utils.filterTagsByName( playlist.tags, 'EXT-X-STREAM-INF'); /** @type {!Array.<!shaka.hls.Tag>} */ const imageTags = Utils.filterTagsByName( playlist.tags, 'EXT-X-IMAGE-STREAM-INF'); /** @type {!Array.<!shaka.hls.Tag>} */ const sessionKeyTags = Utils.filterTagsByName( playlist.tags, 'EXT-X-SESSION-KEY'); this.parseCodecs_(variantTags); /** @type {!Array.<!shaka.hls.Tag>} */ const sesionDataTags = Utils.filterTagsByName(playlist.tags, 'EXT-X-SESSION-DATA'); for (const tag of sesionDataTags) { const id = tag.getAttributeValue('DATA-ID'); const uri = tag.getAttributeValue('URI'); const language = tag.getAttributeValue('LANGUAGE'); const value = tag.getAttributeValue('VALUE'); const data = (new Map()).set('id', id); if (uri) { data.set('uri', shaka.hls.Utils.constructAbsoluteUri( this.masterPlaylistUri_, uri)); } if (language) { data.set('language', language); } if (value) { data.set('value', value); } const event = new shaka.util.FakeEvent('sessiondata', data); if (this.playerInterface_) { this.playerInterface_.onEvent(event); } } // Parse audio and video media tags first, so that we can extract segment // start time from audio/video streams and reuse for text streams. this.createStreamInfosFromMediaTags_(mediaTags); this.parseClosedCaptions_(mediaTags); variants = this.createVariantsForTags_(variantTags, sessionKeyTags); textStreams = this.parseTexts_(mediaTags); imageStreams = await this.parseImages_(imageTags); } // Make sure that the parser has not been destroyed. if (!this.playerInterface_) { throw new shaka.util.Error( shaka.util.Error.Severity.CRITICAL, shaka.util.Error.Category.PLAYER, shaka.util.Error.Code.OPERATION_ABORTED); } // This assert is our own sanity check. goog.asserts.assert(this.presentationTimeline_ == null, 'Presentation timeline created early!'); // We don't know if the presentation is VOD or live until we parse at least // one media playlist, so make a VOD-style presentation timeline for now // and change the type later if we discover this is live. // Since the player will load the first variant chosen early in the process, // there isn't a window during playback where the live-ness is unknown. this.presentationTimeline_ = new shaka.media.PresentationTimeline( /* presentationStartTime= */ null, /* delay= */ 0); this.presentationTimeline_.setStatic(true); // Single-variant streams aren't lazy-loaded, so for them we already have // enough info here to determine the presentation type and duration. if (playlist.type == shaka.hls.PlaylistType.MEDIA) { if (this.isLive_()) { this.changePresentationTimelineToLive_(); const delay = this.updatePlaylistDelay_; this.updatePlaylistTimer_.tickAfter(/* seconds= */ delay); } const streamInfos = Array.from(this.uriToStreamInfosMap_.values()); this.finalizeStreams_(streamInfos); this.determineDuration_(); } this.manifest_ = { presentationTimeline: this.presentationTimeline_, variants, textStreams, imageStreams, offlineSessionIds: [], minBufferTime: 0, sequenceMode: true, }; this.playerInterface_.makeTextStreamsForClosedCaptions(this.manifest_); } /** @private */ determineDuration_() { goog.asserts.assert(this.presentationTimeline_, 'Presentation timeline not created!'); if (this.isLive_()) { // The HLS spec (RFC 8216) states in 6.3.4: // "the client MUST wait for at least the target duration before // attempting to reload the Playlist file again". // For LL-HLS, the server must add a new partial segment to the Playlist // every part target duration. this.updatePlaylistDelay_ = this.minTargetDuration_; // The spec says nothing much about seeking in live content, but Safari's // built-in HLS implementation does not allow it. Therefore we will set // the availability window equal to the presentation delay. The player // will be able to buffer ahead three segments, but the seek window will // be zero-sized. const PresentationType = shaka.hls.HlsParser.PresentationType_; if (this.presentationType_ == PresentationType.LIVE) { let segmentAvailabilityDuration = this.getLiveDuration_(); // This defaults to the presentation delay, which has the effect of // making the live stream unseekable. This is consistent with Apple's // HLS implementation. if (this.config_.hls.useSafariBehaviorForLive) { segmentAvailabilityDuration = this.presentationTimeline_.getDelay(); } // The app can override that with a longer duration, to allow seeking. if (!isNaN(this.config_.availabilityWindowOverride)) { segmentAvailabilityDuration = this.config_.availabilityWindowOverride; } this.presentationTimeline_.setSegmentAvailabilityDuration( segmentAvailabilityDuration); } } else { // Use the minimum duration as the presentation duration. this.presentationTimeline_.setDuration(this.getMinDuration_()); } // This is the first point where we have a meaningful presentation start // time, and we need to tell PresentationTimeline that so that it can // maintain consistency from here on. this.presentationTimeline_.lockStartTime(); // This asserts that the live edge is being calculated from segment times. // For VOD and event streams, this check should still pass. goog.asserts.assert( !this.presentationTimeline_.usingPresentationStartTime(), 'We should not be using the presentation start time in HLS!'); } /** * Get the variables of each variant tag, and store in a map. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist. * @private */ parseMasterVariables_(tags) { for (const variableTag of tags) { const name = variableTag.getAttributeValue('NAME'); const value = variableTag.getAttributeValue('VALUE'); if (name && value) { if (!this.globalVariables_.has(name)) { this.globalVariables_.set(name, value); } } } } /** * Get the variables of each variant tag, and store in a map. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist. * @return {!Map.<string, string>} * @private */ parseMediaVariables_(tags) { const mediaVariables = new Map(); for (const variableTag of tags) { const name = variableTag.getAttributeValue('NAME'); const value = variableTag.getAttributeValue('VALUE'); const mediaImport = variableTag.getAttributeValue('IMPORT'); if (name && value) { mediaVariables.set(name, value); } if (mediaImport) { const globalValue = this.globalVariables_.get(mediaImport); if (globalValue) { mediaVariables.set(mediaImport, globalValue); } } } return mediaVariables; } /** * Get the codecs of each variant tag, and store in a map from * audio/video/subtitle group id to the codecs arraylist. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist. * @private */ parseCodecs_(tags) { const ContentType = shaka.util.ManifestParserUtils.ContentType; for (const variantTag of tags) { const audioGroupId = variantTag.getAttributeValue('AUDIO'); const videoGroupId = variantTag.getAttributeValue('VIDEO'); const subGroupId = variantTag.getAttributeValue('SUBTITLES'); const allCodecs = this.getCodecsForVariantTag_(variantTag); if (subGroupId) { const textCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe( ContentType.TEXT, allCodecs); goog.asserts.assert(textCodecs != null, 'Text codecs should be valid.'); this.groupIdToCodecsMap_.set(subGroupId, textCodecs); shaka.util.ArrayUtils.remove(allCodecs, textCodecs); } if (audioGroupId) { let codecs = shaka.util.ManifestParserUtils.guessCodecsSafe( ContentType.AUDIO, allCodecs); if (!codecs) { codecs = this.config_.hls.defaultAudioCodec; } this.groupIdToCodecsMap_.set(audioGroupId, codecs); } if (videoGroupId) { let codecs = shaka.util.ManifestParserUtils.guessCodecsSafe( ContentType.VIDEO, allCodecs); if (!codecs) { codecs = this.config_.hls.defaultVideoCodec; } this.groupIdToCodecsMap_.set(videoGroupId, codecs); } } } /** * Parse Subtitles and Closed Captions from 'EXT-X-MEDIA' tags. * Create text streams for Subtitles, but not Closed Captions. * * @param {!Array.<!shaka.hls.Tag>} mediaTags Media tags from the playlist. * @return {!Array.<!shaka.extern.Stream>} * @private */ parseTexts_(mediaTags) { // Create text stream for each Subtitle media tag. const subtitleTags = shaka.hls.Utils.filterTagsByType(mediaTags, 'SUBTITLES'); const textStreams = subtitleTags.map((tag) => { const disableText = this.config_.disableText; if (disableText) { return null; } try { return this.createStreamInfoFromMediaTag_(tag).stream; } catch (e) { if (this.config_.hls.ignoreTextStreamFailures) { return null; } throw e; } }); const type = shaka.util.ManifestParserUtils.ContentType.TEXT; // Set the codecs for text streams. for (const tag of subtitleTags) { const groupId = tag.getRequiredAttrValue('GROUP-ID'); const codecs = this.groupIdToCodecsMap_.get(groupId); if (codecs) { const textStreamInfos = this.groupIdToStreamInfosMap_.get(groupId); if (textStreamInfos) { for (const textStreamInfo of textStreamInfos) { textStreamInfo.stream.codecs = codecs; textStreamInfo.stream.mimeType = this.guessMimeTypeBeforeLoading_(type, codecs) || this.guessMimeTypeFallback_(type); } } } } // Do not create text streams for Closed captions. return textStreams.filter((s) => s); } /** * @param {!Array.<!shaka.hls.Tag>} imageTags from the playlist. * @return {!Promise.<!Array.<!shaka.extern.Stream>>} * @private */ async parseImages_(imageTags) { // Create image stream for each image tag. const imageStreamPromises = imageTags.map(async (tag) => { const disableThumbnails = this.config_.disableThumbnails; if (disableThumbnails) { return null; } try { const streamInfo = await this.createStreamInfoFromImageTag_(tag); return streamInfo.stream; } catch (e) { if (this.config_.hls.ignoreImageStreamFailures) { return null; } throw e; } }); const imageStreams = await Promise.all(imageStreamPromises); return imageStreams.filter((s) => s); } /** * @param {!Array.<!shaka.hls.Tag>} mediaTags Media tags from the playlist. * @private */ createStreamInfosFromMediaTags_(mediaTags) { // Filter out subtitles and media tags without uri. mediaTags = mediaTags.filter((tag) => { const uri = tag.getAttributeValue('URI') || ''; const type = tag.getAttributeValue('TYPE'); return type != 'SUBTITLES' && uri != ''; }); // Create stream info for each audio / video media tag. for (const tag of mediaTags) { this.createStreamInfoFromMediaTag_(tag); } } /** * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist. * @param {!Array.<!shaka.hls.Tag>} sessionKeyTags EXT-X-SESSION-KEY tags * from the playlist. * @return {!Array.<!shaka.extern.Variant>} * @private */ createVariantsForTags_(tags, sessionKeyTags) { // EXT-X-SESSION-KEY processing const drmInfos = []; const keyIds = new Set(); if (sessionKeyTags.length > 0) { for (const drmTag of sessionKeyTags) { const method = drmTag.getRequiredAttrValue('METHOD'); if (method != 'NONE' && method != 'AES-128') { // According to the HLS spec, KEYFORMAT is optional and implicitly // defaults to "identity". // https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-4.4.4.4 const keyFormat = drmTag.getAttributeValue('KEYFORMAT') || 'identity'; const drmParser = shaka.hls.HlsParser.KEYFORMATS_TO_DRM_PARSERS_[keyFormat]; const drmInfo = drmParser ? drmParser(drmTag, /* mimeType= */ '') : null; if (drmInfo) { if (drmInfo.keyIds) { for (const keyId of drmInfo.keyIds) { keyIds.add(keyId); } } drmInfos.push(drmInfo); } else { shaka.log.warning('Unsupported HLS KEYFORMAT', keyFormat); } } } } // Create variants for each variant tag. const allVariants = tags.map((tag) => { const frameRate = tag.getAttributeValue('FRAME-RATE'); const bandwidth = Number(tag.getAttributeValue('AVERAGE-BANDWIDTH')) || Number(tag.getRequiredAttrValue('BANDWIDTH')); const resolution = tag.getAttributeValue('RESOLUTION'); const [width, height] = resolution ? resolution.split('x') : [null, null]; const videoRange = tag.getAttributeValue('VIDEO-RANGE'); const streamInfos = this.createStreamInfosForVariantTag_(tag, resolution, frameRate); goog.asserts.assert(streamInfos.audio.length || streamInfos.video.length, 'We should have created a stream!'); return this.createVariants_( streamInfos.audio, streamInfos.video, bandwidth, width, height, frameRate, videoRange, drmInfos, keyIds); }); let variants = allVariants.reduce(shaka.util.Functional.collapseArrays, []); // Filter out null variants. variants = variants.filter((variant) => variant != null); return variants; } /** * Create audio and video streamInfos from an 'EXT-X-STREAM-INF' tag and its * related media tags. * * @param {!shaka.hls.Tag} tag * @param {?string} resolution * @param {?string} frameRate * @return {!shaka.hls.HlsParser.StreamInfos} * @private */ createStreamInfosForVariantTag_(tag, resolution, frameRate) { const ContentType = shaka.util.ManifestParserUtils.ContentType; /** @type {!Array.<string>} */ let allCodecs = this.getCodecsForVariantTag_(tag); const audioGroupId = tag.getAttributeValue('AUDIO'); const videoGroupId = tag.getAttributeValue('VIDEO'); goog.asserts.assert(audioGroupId == null || videoGroupId == null, 'Unexpected: both video and audio described by media tags!'); const groupId = audioGroupId || videoGroupId; const streamInfos = (groupId && this.groupIdToStreamInfosMap_.has(groupId)) ? this.groupIdToStreamInfosMap_.get(groupId) : []; /** @type {shaka.hls.HlsParser.StreamInfos} */ const res = { audio: audioGroupId ? streamInfos : [], video: videoGroupId ? streamInfos : [], }; // Make an educated guess about the stream type. shaka.log.debug('Guessing stream type for', tag.toString()); let type; let ignoreStream = false; // The Microsoft HLS manifest generators will make audio-only variants // that link to their URI both directly and through an audio tag. // In that case, ignore the local URI and use the version in the // AUDIO tag, so you inherit its language. // As an example, see the manifest linked in issue #860. const streamURI = tag.getRequiredAttrValue('URI'); const hasSameUri = res.audio.find((audio) => { return audio && audio.verbatimMediaPlaylistUri == streamURI; }); const videoCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe( ContentType.VIDEO, allCodecs); const audioCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe( ContentType.AUDIO, allCodecs); if (audioCodecs && !videoCodecs) { // There are no associated media tags, and there's only audio codec, // and no video codec, so it should be audio. type = ContentType.AUDIO; shaka.log.debug('Guessing audio-only.'); } else if (!streamInfos.length && audioCodecs && videoCodecs) { // There are both audio and video codecs, so assume multiplexed content. // Note that the default used when CODECS is missing assumes multiple // (and therefore multiplexed). // Recombine the codec strings into one so that MediaSource isn't // lied to later. (That would trigger an error in Chrome.) shaka.log.debug('Guessing multiplexed audio+video.'); type = ContentType.VIDEO; allCodecs = [[videoCodecs, audioCodecs].join(',')]; } else if (res.audio.length && hasSameUri) { shaka.log.debug('Guessing audio-only.'); type = ContentType.AUDIO; ignoreStream = true; } else if (res.video.length && !res.audio.length) { // There are associated video streams. Assume this is audio. shaka.log.debug('Guessing audio-only.'); type = ContentType.AUDIO; } else { shaka.log.debug('Guessing video-only.'); type = ContentType.VIDEO; } if (!ignoreStream) { const streamInfo = this.createStreamInfoFromVariantTag_(tag, allCodecs, type); res[streamInfo.stream.type] = [streamInfo]; } return res; } /** * Get the codecs from the 'EXT-X-STREAM-INF' tag. * * @param {!shaka.hls.Tag} tag * @return {!Array.<string>} codecs * @private */ getCodecsForVariantTag_(tag) { // These are the default codecs to assume if none are specified. const defaultCodecsArray = []; if (!this.config_.disableVideo) { defaultCodecsArray.push(this.config_.hls.defaultVideoCodec); } if (!this.config_.disableAudio) { defaultCodecsArray.push(this.config_.hls.defaultAudioCodec); } const defaultCodecs = defaultCodecsArray.join(','); const codecsString = tag.getAttributeValue('CODECS', defaultCodecs); // Strip out internal whitespace while splitting on commas: /** @type {!Array.<string>} */ const codecs = codecsString.split(/\s*,\s*/); // Filter out duplicate codecs. const seen = new Set(); const ret = []; for (const codec of codecs) { // HLS says the CODECS field needs to include all codecs that appear in // the content. This means that if the content changes profiles, it should // include both. Since all known browsers support changing profiles // without any other work, just ignore them. See also: // https://github.com/shaka-project/shaka-player/issues/1817 const shortCodec = shaka.util.MimeUtils.getCodecBase(codec); if (!seen.has(shortCodec)) { ret.push(codec); seen.add(shortCodec); } else { shaka.log.debug('Ignoring duplicate codec'); } } return ret; } /** * Get the channel count information for an HLS audio track. * CHANNELS specifies an ordered, "/" separated list of parameters. * If the type is audio, the first parameter will be a decimal integer * specifying the number of independent, simultaneous audio channels. * No other channels parameters are currently defined. * * @param {!shaka.hls.Tag} tag * @return {?number} * @private */ getChannelsCount_(tag) { const channels = tag.getAttributeValue('CHANNELS'); if (!channels) { return null; } const channelcountstring = channels.split('/')[0]; const count = parseInt(channelcountstring, 10); return count; } /** * Get the spatial audio information for an HLS audio track. * In HLS the channels field indicates the number of audio channels that the * stream has (eg: 2). In the case of Dolby Atmos, the complexity is * expressed with the number of channels followed by the word JOC * (eg: 16/JOC), so 16 would be the number of channels (eg: 7.3.6 layout), * and JOC indicates that the stream has spatial audio. * @see https://developer.apple.com/documentation/http_live_streaming/hls_authoring_specification_for_apple_devices/hls_authoring_specification_for_apple_devices_appendixes * * @param {!shaka.hls.Tag} tag * @return {boolean} * @private */ isSpatialAudio_(tag) { const channels = tag.getAttributeValue('CHANNELS'); if (!channels) { return false; } return channels.includes('/JOC'); } /** * Get the closed captions map information for the EXT-X-STREAM-INF tag, to * create the stream info. * @param {!shaka.hls.Tag} tag * @param {string} type * @return {Map.<string, string>} closedCaptions * @private */ getClosedCaptions_(tag, type) { const ContentType = shaka.util.ManifestParserUtils.ContentType; // The attribute of closed captions is optional, and the value may be // 'NONE'. const closedCaptionsAttr = tag.getAttributeValue('CLOSED-CAPTIONS'); // EXT-X-STREAM-INF tags may have CLOSED-CAPTIONS attributes. // The value can be either a quoted-string or an enumerated-string with // the value NONE. If the value is a quoted-string, it MUST match the // value of the GROUP-ID attribute of an EXT-X-MEDIA tag elsewhere in the // Playlist whose TYPE attribute is CLOSED-CAPTIONS. if (type == ContentType.VIDEO && closedCaptionsAttr && closedCaptionsAttr != 'NONE') { return this.groupIdToClosedCaptionsMap_.get(closedCaptionsAttr); } return null; } /** * Get the language value. * * @param {!shaka.hls.Tag} tag * @return {string} * @private */ getLanguage_(tag) { const LanguageUtils = shaka.util.LanguageUtils; const languageValue = tag.getAttributeValue('LANGUAGE') || 'und'; return LanguageUtils.normalize(languageValue); } /** * Get the type value. * Shaka recognizes the content types 'audio', 'video', 'text', and 'image'. * The HLS 'subtitles' type needs to be mapped to 'text'. * @param {!shaka.hls.Tag} tag * @return {string} * @private */ getType_(tag) { let type = tag.getRequiredAttrValue('TYPE').toLowerCase(); if (type == 'subtitles') { type = shaka.util.ManifestParserUtils.ContentType.TEXT; } return type; } /** * @param {!Array.<shaka.hls.HlsParser.StreamInfo>} audioInfos * @param {!Array.<shaka.hls.HlsParser.StreamInfo>} videoInfos * @param {number} bandwidth * @param {?string} width * @param {?string} height * @param {?string} frameRate * @param {?string} videoRange * @param {!Array.<shaka.extern.DrmInfo>} drmInfos * @param {!Set.<string>} keyIds * @return {!Array.<!shaka.extern.Variant>} * @private */ createVariants_( audioInfos, videoInfos, bandwidth, width, height, frameRate, videoRange, drmInfos, keyIds) { const ContentType = shaka.util.ManifestParserUtils.ContentType; const DrmEngine = shaka.media.DrmEngine; for (const info of videoInfos) { this.addVideoAttributes_( info.stream, width, height, frameRate, videoRange); } // In case of audio-only or video-only content or the audio/video is // disabled by the config, we create an array of one item containing // a null. This way, the double-loop works for all kinds of content. // NOTE: we currently don't have support for audio-only content. const disableAudio = this.config_.disableAudio; if (!audioInfos.length || disableAudio) { audioInfos = [null]; } const disableVideo = this.config_.disableVideo; if (!videoInfos.length || disableVideo) { videoInfos = [null]; } const variants = []; for (const audioInfo of audioInfos) { for (const videoInfo of videoInfos) { const audioStream = audioInfo ? audioInfo.stream : null; if (audioStream) { audioStream.drmInfos = drmInfos; audioStream.keyIds = keyIds; } const videoStream = videoInfo ? videoInfo.stream : null; if (videoStream) { videoStream.drmInfos = drmInfos; videoStream.keyIds = keyIds; } const audioDrmInfos