UNPKG

shaka-player

Version:
1,415 lines (1,274 loc) 93.2 kB
/*! @license * Shaka Player * Copyright 2016 Google LLC * SPDX-License-Identifier: Apache-2.0 */ goog.provide('shaka.media.MediaSourceEngine'); goog.require('goog.asserts'); goog.require('shaka.log'); goog.require('shaka.config.CodecSwitchingStrategy'); goog.require('shaka.media.Capabilities'); goog.require('shaka.media.ContentWorkarounds'); goog.require('shaka.media.ClosedCaptionParser'); goog.require('shaka.media.IClosedCaptionParser'); goog.require('shaka.media.ManifestParser'); goog.require('shaka.media.SegmentReference'); goog.require('shaka.media.TimeRangesUtils'); goog.require('shaka.text.TextEngine'); goog.require('shaka.transmuxer.TransmuxerEngine'); goog.require('shaka.util.BufferUtils'); goog.require('shaka.util.Destroyer'); goog.require('shaka.util.Dom'); goog.require('shaka.util.Error'); goog.require('shaka.util.EventManager'); goog.require('shaka.util.FakeEvent'); goog.require('shaka.util.Functional'); goog.require('shaka.util.IDestroyable'); goog.require('shaka.util.Id3Utils'); goog.require('shaka.util.ManifestParserUtils'); goog.require('shaka.util.MimeUtils'); goog.require('shaka.util.Mp4BoxParsers'); goog.require('shaka.util.Mp4Parser'); goog.require('shaka.util.Platform'); goog.require('shaka.util.PublicPromise'); goog.require('shaka.util.StreamUtils'); goog.require('shaka.util.TsParser'); goog.require('shaka.lcevc.Dec'); /** * @summary * MediaSourceEngine wraps all operations on MediaSource and SourceBuffers. * All asynchronous operations return a Promise, and all operations are * internally synchronized and serialized as needed. Operations that can * be done in parallel will be done in parallel. * * @implements {shaka.util.IDestroyable} */ shaka.media.MediaSourceEngine = class { /** * @param {HTMLMediaElement} video The video element, whose source is tied to * MediaSource during the lifetime of the MediaSourceEngine. * @param {!shaka.extern.TextDisplayer} textDisplayer * The text displayer that will be used with the text engine. * MediaSourceEngine takes ownership of the displayer. When * MediaSourceEngine is destroyed, it will destroy the displayer. * @param {!shaka.media.MediaSourceEngine.PlayerInterface} playerInterface * Interface for common player methods. * @param {?shaka.lcevc.Dec} [lcevcDec] Optional - LCEVC Decoder Object */ constructor(video, textDisplayer, playerInterface, lcevcDec) { /** @private {HTMLMediaElement} */ this.video_ = video; /** @private {?shaka.media.MediaSourceEngine.PlayerInterface} */ this.playerInterface_ = playerInterface; /** @private {?shaka.extern.MediaSourceConfiguration} */ this.config_ = null; /** @private {shaka.extern.TextDisplayer} */ this.textDisplayer_ = textDisplayer; /** * @private {!Map<shaka.util.ManifestParserUtils.ContentType, SourceBuffer>} */ this.sourceBuffers_ = new Map(); /** * @private {!Map<shaka.util.ManifestParserUtils.ContentType, string>} */ this.sourceBufferTypes_ = new Map(); /** * @private {!Map<shaka.util.ManifestParserUtils.ContentType, * boolean>} */ this.expectedEncryption_ = new Map(); /** @private {shaka.text.TextEngine} */ this.textEngine_ = null; /** @private {boolean} */ this.segmentRelativeVttTiming_ = false; /** @private {?shaka.lcevc.Dec} */ this.lcevcDec_ = lcevcDec || null; /** * @private {!Map<string, !Array<shaka.media.MediaSourceEngine.Operation>>} */ this.queues_ = new Map(); /** @private {shaka.util.EventManager} */ this.eventManager_ = new shaka.util.EventManager(); /** * @private {!Map<shaka.util.ManifestParserUtils.ContentType, !shaka.extern.Transmuxer>} */ this.transmuxers_ = new Map(); /** @private {?shaka.media.IClosedCaptionParser} */ this.captionParser_ = null; /** @private {!shaka.util.PublicPromise} */ this.mediaSourceOpen_ = new shaka.util.PublicPromise(); /** @private {string} */ this.url_ = ''; /** @private {boolean} */ this.playbackHasBegun_ = false; /** @private {boolean} */ this.streamingAllowed_ = true; /** @private {boolean} */ this.usingRemotePlayback_ = false; /** @private {HTMLSourceElement} */ this.source_ = null; /** * Fallback source element with direct media URI, used for casting * purposes. * @private {HTMLSourceElement} */ this.secondarySource_ = null; /** @private {MediaSource} */ this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_); /** @private {boolean} */ this.reloadingMediaSource_ = false; /** @private {boolean} */ this.playAfterReset_ = false; /** @type {!shaka.util.Destroyer} */ this.destroyer_ = new shaka.util.Destroyer(() => this.doDestroy_()); /** @private {boolean} */ this.sequenceMode_ = false; /** @private {string} */ this.manifestType_ = shaka.media.ManifestParser.UNKNOWN; /** @private {boolean} */ this.ignoreManifestTimestampsInSegmentsMode_ = false; /** @private {boolean} */ this.attemptTimestampOffsetCalculation_ = false; /** @private {!shaka.util.PublicPromise<number>} */ this.textSequenceModeOffset_ = new shaka.util.PublicPromise(); /** @private {boolean} */ this.needSplitMuxedContent_ = false; /** @private {?number} */ this.lastDuration_ = null; /** * @private {!Map<shaka.util.ManifestParserUtils.ContentType, * !shaka.util.TsParser>} */ this.tsParsers_ = new Map(); /** @private {?number} */ this.firstVideoTimestamp_ = null; /** @private {?number} */ this.firstVideoReferenceStartTime_ = null; /** @private {?number} */ this.firstAudioTimestamp_ = null; /** @private {?number} */ this.firstAudioReferenceStartTime_ = null; /** @private {!shaka.util.PublicPromise<number>} */ this.audioCompensation_ = new shaka.util.PublicPromise(); if (this.video_.remote) { this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected'; this.eventManager_.listen(this.video_.remote, 'connect', () => { this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected'; }); this.eventManager_.listen(this.video_.remote, 'connecting', () => { this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected'; }); this.eventManager_.listen(this.video_.remote, 'disconnect', () => { this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected'; }); } } /** * Create a MediaSource object, attach it to the video element, and return it. * Resolves the given promise when the MediaSource is ready. * * Replaced by unit tests. * * @param {!shaka.util.PublicPromise} p * @return {!MediaSource} */ createMediaSource(p) { this.streamingAllowed_ = true; /** @type {!MediaSource} */ let mediaSource; if (window.ManagedMediaSource) { if (!this.secondarySource_) { this.video_.disableRemotePlayback = true; } mediaSource = new ManagedMediaSource(); this.eventManager_.listen( mediaSource, 'startstreaming', () => { shaka.log.info('MMS startstreaming'); this.streamingAllowed_ = true; }); this.eventManager_.listen( mediaSource, 'endstreaming', () => { shaka.log.info('MMS endstreaming'); this.streamingAllowed_ = false; }); } else { mediaSource = new MediaSource(); } // Set up MediaSource on the video element. this.eventManager_.listenOnce( mediaSource, 'sourceopen', () => this.onSourceOpen_(p)); // Correctly set when playback has begun. this.eventManager_.listenOnce(this.video_, 'playing', () => { this.playbackHasBegun_ = true; }); // Store the object URL for releasing it later. this.url_ = shaka.media.MediaSourceEngine.createObjectURL(mediaSource); this.video_.removeAttribute('src'); if (this.source_) { this.video_.removeChild(this.source_); } if (this.secondarySource_) { this.video_.removeChild(this.secondarySource_); } this.source_ = shaka.util.Dom.createSourceElement(this.url_); this.video_.appendChild(this.source_); if (this.secondarySource_) { this.video_.appendChild(this.secondarySource_); } this.video_.load(); return mediaSource; } /** * @param {string} uri * @param {string} mimeType */ addSecondarySource(uri, mimeType) { if (!this.video_ || !window.ManagedMediaSource || !this.mediaSource_) { shaka.log.warning( 'Secondary source is used only with ManagedMediaSource'); return; } if (this.secondarySource_) { this.video_.removeChild(this.secondarySource_); } this.secondarySource_ = shaka.util.Dom.createSourceElement(uri, mimeType); this.video_.appendChild(this.secondarySource_); this.video_.disableRemotePlayback = false; } /** * @param {shaka.util.PublicPromise} p * @private */ onSourceOpen_(p) { goog.asserts.assert(this.url_, 'Must have object URL'); // Release the object URL that was previously created, to prevent memory // leak. // createObjectURL creates a strong reference to the MediaSource object // inside the browser. Setting the src of the video then creates another // reference within the video element. revokeObjectURL will remove the // strong reference to the MediaSource object, and allow it to be // garbage-collected later. URL.revokeObjectURL(this.url_); p.resolve(); } /** * Checks if a certain type is supported. * * @param {shaka.extern.Stream} stream * @param {shaka.util.ManifestParserUtils.ContentType} contentType * @return {!Promise<boolean>} */ static async isStreamSupported(stream, contentType) { if (stream.createSegmentIndex) { await stream.createSegmentIndex(); } if (!stream.segmentIndex) { return false; } if (stream.segmentIndex.isEmpty()) { return true; } const MimeUtils = shaka.util.MimeUtils; const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine; const ContentType = shaka.util.ManifestParserUtils.ContentType; const StreamUtils = shaka.util.StreamUtils; const seenCombos = new Set(); // Check each combination of mimeType and codecs within the segment index. // Unfortunately we cannot use fullMimeTypes, as we ALSO need to check the // getFullTypeWithAllCodecs (for the sake of the transmuxer) and we have no // way of going from a full mimeType to a full mimeType with all codecs. // As this function is only called in debug mode, a little inefficiency is // acceptable. for (const ref of stream.segmentIndex) { const mimeType = ref.mimeType || stream.mimeType || ''; let codecs = ref.codecs || stream.codecs || ''; // Optimization for the case where the codecs and mimetype of the stream // match the reference. if (mimeType == stream.mimeType && codecs == stream.codecs) { continue; } // Don't check the same combination of mimetype + codecs twice. const combo = mimeType + ':' + codecs; if (seenCombos.has(combo)) { continue; } seenCombos.add(combo); if (contentType == ContentType.TEXT) { const fullMimeType = MimeUtils.getFullType(mimeType, codecs); if (!shaka.text.TextEngine.isTypeSupported(fullMimeType)) { return false; } } else { if (contentType == ContentType.VIDEO) { codecs = StreamUtils.getCorrectVideoCodecs(codecs); } else if (contentType == ContentType.AUDIO) { codecs = StreamUtils.getCorrectAudioCodecs(codecs, mimeType); } const extendedMimeType = MimeUtils.getExtendedType( stream, mimeType, codecs); const fullMimeType = MimeUtils.getFullTypeWithAllCodecs( mimeType, codecs); if (!shaka.media.Capabilities.isTypeSupported(extendedMimeType) && !TransmuxerEngine.isSupported(fullMimeType, stream.type)) { return false; } } } return true; } /** * Returns a map of MediaSource support for well-known types. * * @return {!Object<string, boolean>} */ static probeSupport() { const testMimeTypes = [ // MP4 types 'video/mp4; codecs="avc1.42E01E"', 'video/mp4; codecs="avc3.42E01E"', 'video/mp4; codecs="hev1.1.6.L93.90"', 'video/mp4; codecs="hvc1.1.6.L93.90"', 'video/mp4; codecs="hev1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC 'video/mp4; codecs="hvc1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC 'video/mp4; codecs="vp9"', 'video/mp4; codecs="vp09.00.10.08"', 'video/mp4; codecs="av01.0.01M.08"', 'video/mp4; codecs="dvh1.05.01"', 'video/mp4; codecs="dvh1.20.01"', 'audio/mp4; codecs="mp4a.40.2"', 'audio/mp4; codecs="ac-3"', 'audio/mp4; codecs="ec-3"', 'audio/mp4; codecs="ac-4.02.01.01"', 'audio/mp4; codecs="opus"', 'audio/mp4; codecs="flac"', 'audio/mp4; codecs="dtsc"', // DTS Digital Surround 'audio/mp4; codecs="dtse"', // DTS Express 'audio/mp4; codecs="dtsx"', // DTS:X // WebM types 'video/webm; codecs="vp8"', 'video/webm; codecs="vp9"', 'video/webm; codecs="vp09.00.10.08"', 'audio/webm; codecs="vorbis"', 'audio/webm; codecs="opus"', // MPEG2 TS types (video/ is also used for audio: https://bit.ly/TsMse) 'video/mp2t; codecs="avc1.42E01E"', 'video/mp2t; codecs="avc3.42E01E"', 'video/mp2t; codecs="hvc1.1.6.L93.90"', 'video/mp2t; codecs="mp4a.40.2"', 'video/mp2t; codecs="ac-3"', 'video/mp2t; codecs="ec-3"', // WebVTT types 'text/vtt', 'application/mp4; codecs="wvtt"', // TTML types 'application/ttml+xml', 'application/mp4; codecs="stpp"', // Containerless types ...shaka.util.MimeUtils.RAW_FORMATS, ]; const support = {}; for (const type of testMimeTypes) { if (shaka.text.TextEngine.isTypeSupported(type)) { support[type] = true; } else if (shaka.util.Platform.supportsMediaSource()) { support[type] = shaka.media.Capabilities.isTypeSupported(type) || shaka.transmuxer.TransmuxerEngine.isSupported(type); } else { support[type] = shaka.util.Platform.supportsMediaType(type); } const basicType = type.split(';')[0]; support[basicType] = support[basicType] || support[type]; } return support; } /** @override */ destroy() { return this.destroyer_.destroy(); } /** @private */ async doDestroy_() { const Functional = shaka.util.Functional; const cleanup = []; for (const [key, q] of this.queues_) { // Make a local copy of the queue and the first item. const inProgress = q[0]; const contentType = /** @type {string} */(key); // Drop everything else out of the original queue. this.queues_.set(contentType, q.slice(0, 1)); // We will wait for this item to complete/fail. if (inProgress) { cleanup.push(inProgress.p.catch(Functional.noop)); } // The rest will be rejected silently if possible. for (const item of q.slice(1)) { item.p.reject(shaka.util.Destroyer.destroyedError()); } } if (this.textEngine_) { cleanup.push(this.textEngine_.destroy()); } await Promise.all(cleanup); for (const transmuxer of this.transmuxers_.values()) { transmuxer.destroy(); } if (this.eventManager_) { this.eventManager_.release(); this.eventManager_ = null; } if (this.video_ && this.secondarySource_) { this.video_.removeChild(this.secondarySource_); } if (this.video_ && this.source_) { // "unload" the video element. this.video_.removeChild(this.source_); this.video_.load(); this.video_.disableRemotePlayback = false; } this.video_ = null; this.source_ = null; this.secondarySource_ = null; this.config_ = null; this.mediaSource_ = null; this.textEngine_ = null; this.textDisplayer_ = null; this.sourceBuffers_.clear(); this.expectedEncryption_.clear(); this.transmuxers_.clear(); this.captionParser_ = null; if (goog.DEBUG) { for (const [contentType, q] of this.queues_) { goog.asserts.assert( q.length == 0, contentType + ' queue should be empty after destroy!'); } } this.queues_.clear(); // This object is owned by Player this.lcevcDec_ = null; this.tsParsers_.clear(); this.playerInterface_ = null; } /** * @return {!Promise} Resolved when MediaSource is open and attached to the * media element. This process is actually initiated by the constructor. */ open() { return this.mediaSourceOpen_; } /** * Initialize MediaSourceEngine. * * Note that it is not valid to call this multiple times, except to add or * reinitialize text streams. * * @param {!Map<shaka.util.ManifestParserUtils.ContentType, * shaka.extern.Stream>} streamsByType * A map of content types to streams. All streams must be supported * according to MediaSourceEngine.isStreamSupported. * @param {boolean=} sequenceMode * If true, the media segments are appended to the SourceBuffer in strict * sequence. * @param {string=} manifestType * Indicates the type of the manifest. * @param {boolean=} ignoreManifestTimestampsInSegmentsMode * If true, don't adjust the timestamp offset to account for manifest * segment durations being out of sync with segment durations. In other * words, assume that there are no gaps in the segments when appending * to the SourceBuffer, even if the manifest and segment times disagree. * Indicates if the manifest has text streams. * * @return {!Promise} */ async init(streamsByType, sequenceMode=false, manifestType=shaka.media.ManifestParser.UNKNOWN, ignoreManifestTimestampsInSegmentsMode=false) { const ContentType = shaka.util.ManifestParserUtils.ContentType; await this.mediaSourceOpen_; if (this.ended() || this.closed()) { shaka.log.alwaysError('Expected MediaSource to be open during init(); ' + 'reopening the media source.'); this.mediaSourceOpen_ = new shaka.util.PublicPromise(); this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_); await this.mediaSourceOpen_; } this.sequenceMode_ = sequenceMode; this.manifestType_ = manifestType; this.ignoreManifestTimestampsInSegmentsMode_ = ignoreManifestTimestampsInSegmentsMode; this.attemptTimestampOffsetCalculation_ = !this.sequenceMode_ && this.manifestType_ == shaka.media.ManifestParser.HLS && !this.ignoreManifestTimestampsInSegmentsMode_; this.tsParsers_.clear(); this.firstVideoTimestamp_ = null; this.firstVideoReferenceStartTime_ = null; this.firstAudioTimestamp_ = null; this.firstAudioReferenceStartTime_ = null; this.audioCompensation_ = new shaka.util.PublicPromise(); for (const contentType of streamsByType.keys()) { const stream = streamsByType.get(contentType); // eslint-disable-next-line no-await-in-loop await this.initSourceBuffer_(contentType, stream, stream.codecs); if (this.needSplitMuxedContent_) { this.queues_.set(ContentType.AUDIO, []); this.queues_.set(ContentType.VIDEO, []); } else { this.queues_.set(contentType, []); } } const audio = streamsByType.get(ContentType.AUDIO); if (audio && audio.isAudioMuxedInVideo) { this.needSplitMuxedContent_ = true; } } /** * Initialize a specific SourceBuffer. * * @param {shaka.util.ManifestParserUtils.ContentType} contentType * @param {shaka.extern.Stream} stream * @param {string} codecs * @return {!Promise} * @private */ async initSourceBuffer_(contentType, stream, codecs) { const ContentType = shaka.util.ManifestParserUtils.ContentType; goog.asserts.assert( await shaka.media.MediaSourceEngine.isStreamSupported( stream, contentType), 'Type negotiation should happen before MediaSourceEngine.init!'); if (contentType == ContentType.AUDIO && codecs) { codecs = shaka.util.StreamUtils.getCorrectAudioCodecs( codecs, stream.mimeType); } let mimeType = shaka.util.MimeUtils.getFullType( stream.mimeType, codecs); if (contentType == ContentType.TEXT) { this.reinitText(mimeType, this.sequenceMode_, stream.external); } else { let needTransmux = this.config_.forceTransmux; if (!shaka.media.Capabilities.isTypeSupported(mimeType) || (!this.sequenceMode_ && shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType))) { needTransmux = true; } const mimeTypeWithAllCodecs = shaka.util.MimeUtils.getFullTypeWithAllCodecs( stream.mimeType, codecs); if (needTransmux) { const audioCodec = shaka.util.ManifestParserUtils.guessCodecsSafe( ContentType.AUDIO, (codecs || '').split(',')); const videoCodec = shaka.util.ManifestParserUtils.guessCodecsSafe( ContentType.VIDEO, (codecs || '').split(',')); if (audioCodec && videoCodec) { this.needSplitMuxedContent_ = true; await this.initSourceBuffer_(ContentType.AUDIO, stream, audioCodec); await this.initSourceBuffer_(ContentType.VIDEO, stream, videoCodec); return; } const transmuxerPlugin = shaka.transmuxer.TransmuxerEngine .findTransmuxer(mimeTypeWithAllCodecs); if (transmuxerPlugin) { const transmuxer = transmuxerPlugin(); this.transmuxers_.set(contentType, transmuxer); mimeType = transmuxer.convertCodecs(contentType, mimeTypeWithAllCodecs); } } const type = this.addExtraFeaturesToMimeType_(mimeType); this.destroyer_.ensureNotDestroyed(); let sourceBuffer; try { sourceBuffer = this.mediaSource_.addSourceBuffer(type); } catch (exception) { throw new shaka.util.Error( shaka.util.Error.Severity.CRITICAL, shaka.util.Error.Category.MEDIA, shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW, exception, 'The mediaSource_ status was ' + this.mediaSource_.readyState + ' expected \'open\'', null); } if (this.sequenceMode_) { sourceBuffer.mode = shaka.media.MediaSourceEngine.SourceBufferMode_.SEQUENCE; } this.eventManager_.listen( sourceBuffer, 'error', () => this.onError_(contentType)); this.eventManager_.listen( sourceBuffer, 'updateend', () => this.onUpdateEnd_(contentType)); this.sourceBuffers_.set(contentType, sourceBuffer); this.sourceBufferTypes_.set(contentType, mimeType); this.expectedEncryption_.set(contentType, !!stream.drmInfos.length); } } /** * Called by the Player to provide an updated configuration any time it * changes. Must be called at least once before init(). * * @param {shaka.extern.MediaSourceConfiguration} config */ configure(config) { this.config_ = config; if (this.textEngine_) { this.textEngine_.setModifyCueCallback(config.modifyCueCallback); } } /** * Indicate if the streaming is allowed by MediaSourceEngine. * If we using MediaSource we always returns true. * * @return {boolean} */ isStreamingAllowed() { return this.streamingAllowed_ && !this.usingRemotePlayback_ && !this.reloadingMediaSource_; } /** * Reinitialize the TextEngine for a new text type. * @param {string} mimeType * @param {boolean} sequenceMode * @param {boolean} external */ reinitText(mimeType, sequenceMode, external) { if (!this.textEngine_) { this.textEngine_ = new shaka.text.TextEngine(this.textDisplayer_); if (this.textEngine_) { this.textEngine_.setModifyCueCallback(this.config_.modifyCueCallback); } } this.textEngine_.initParser(mimeType, sequenceMode, external || this.segmentRelativeVttTiming_, this.manifestType_); } /** * @return {boolean} True if the MediaSource is in an "ended" state, or if the * object has been destroyed. */ ended() { if (this.reloadingMediaSource_) { return false; } return this.mediaSource_ ? this.mediaSource_.readyState == 'ended' : true; } /** * @return {boolean} True if the MediaSource is in an "closed" state, or if * the object has been destroyed. */ closed() { if (this.reloadingMediaSource_) { return false; } return this.mediaSource_ ? this.mediaSource_.readyState == 'closed' : true; } /** * Gets the first timestamp in buffer for the given content type. * * @param {shaka.util.ManifestParserUtils.ContentType} contentType * @return {?number} The timestamp in seconds, or null if nothing is buffered. */ bufferStart(contentType) { if (!this.sourceBuffers_.size) { return null; } const ContentType = shaka.util.ManifestParserUtils.ContentType; if (contentType == ContentType.TEXT) { return this.textEngine_.bufferStart(); } return shaka.media.TimeRangesUtils.bufferStart( this.getBuffered_(contentType)); } /** * Gets the last timestamp in buffer for the given content type. * * @param {shaka.util.ManifestParserUtils.ContentType} contentType * @return {?number} The timestamp in seconds, or null if nothing is buffered. */ bufferEnd(contentType) { if (!this.sourceBuffers_.size) { return null; } const ContentType = shaka.util.ManifestParserUtils.ContentType; if (contentType == ContentType.TEXT) { return this.textEngine_.bufferEnd(); } return shaka.media.TimeRangesUtils.bufferEnd( this.getBuffered_(contentType)); } /** * Determines if the given time is inside the buffered range of the given * content type. * * @param {shaka.util.ManifestParserUtils.ContentType} contentType * @param {number} time Playhead time * @return {boolean} */ isBuffered(contentType, time) { const ContentType = shaka.util.ManifestParserUtils.ContentType; if (contentType == ContentType.TEXT) { return this.textEngine_.isBuffered(time); } else { const buffered = this.getBuffered_(contentType); return shaka.media.TimeRangesUtils.isBuffered(buffered, time); } } /** * Computes how far ahead of the given timestamp is buffered for the given * content type. * * @param {shaka.util.ManifestParserUtils.ContentType} contentType * @param {number} time * @return {number} The amount of time buffered ahead in seconds. */ bufferedAheadOf(contentType, time) { const ContentType = shaka.util.ManifestParserUtils.ContentType; if (contentType == ContentType.TEXT) { return this.textEngine_.bufferedAheadOf(time); } else { const buffered = this.getBuffered_(contentType); return shaka.media.TimeRangesUtils.bufferedAheadOf(buffered, time); } } /** * Returns info about what is currently buffered. * @return {shaka.extern.BufferedInfo} */ getBufferedInfo() { const ContentType = shaka.util.ManifestParserUtils.ContentType; const TimeRangesUtils = shaka.media.TimeRangesUtils; const info = { total: this.reloadingMediaSource_ ? [] : TimeRangesUtils.getBufferedInfo(this.video_.buffered), audio: TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.AUDIO)), video: TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.VIDEO)), text: [], }; if (this.textEngine_) { const start = this.textEngine_.bufferStart(); const end = this.textEngine_.bufferEnd(); if (start != null && end != null) { info.text.push({start: start, end: end}); } } return info; } /** * @param {shaka.util.ManifestParserUtils.ContentType} contentType * @return {TimeRanges} The buffered ranges for the given content type, or * null if the buffered ranges could not be obtained. * @private */ getBuffered_(contentType) { if (this.reloadingMediaSource_ || this.usingRemotePlayback_) { return null; } try { return this.sourceBuffers_.get(contentType).buffered; } catch (exception) { if (this.sourceBuffers_.has(contentType)) { // Note: previous MediaSource errors may cause access to |buffered| to // throw. shaka.log.error('failed to get buffered range for ' + contentType, exception); } return null; } } /** * Create a new closed caption parser. This will ONLY be replaced by tests as * a way to inject fake closed caption parser instances. * * @param {string} mimeType * @return {!shaka.media.IClosedCaptionParser} */ getCaptionParser(mimeType) { return new shaka.media.ClosedCaptionParser(mimeType); } /** * This method is only public for testing. * * @param {shaka.util.ManifestParserUtils.ContentType} contentType * @param {!BufferSource} data * @param {!shaka.media.SegmentReference} reference The segment reference * we are appending * @param {shaka.extern.Stream} stream * @param {!string} mimeType * @return {{timestamp: ?number, metadata: !Array<shaka.extern.ID3Metadata>}} */ getTimestampAndDispatchMetadata(contentType, data, reference, stream, mimeType) { let timestamp = null; let metadata = []; const uint8ArrayData = shaka.util.BufferUtils.toUint8(data); if (shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType)) { const frames = shaka.util.Id3Utils.getID3Frames(uint8ArrayData); if (frames.length && reference) { const metadataTimestamp = frames.find((frame) => { return frame.description === 'com.apple.streaming.transportStreamTimestamp'; }); if (metadataTimestamp) { timestamp = Math.round(metadataTimestamp.data) / 1000; } /** @private {shaka.extern.ID3Metadata} */ const id3Metadata = { cueTime: reference.startTime, data: uint8ArrayData, frames: frames, dts: reference.startTime, pts: reference.startTime, }; this.playerInterface_.onMetadata( [id3Metadata], /* offset= */ 0, reference.endTime); } } else if (mimeType.includes('/mp4') && reference && reference.initSegmentReference && reference.initSegmentReference.timescale) { const timescale = reference.initSegmentReference.timescale; if (!isNaN(timescale)) { const hasEmsg = ((stream.emsgSchemeIdUris != null && stream.emsgSchemeIdUris.length > 0) || this.config_.dispatchAllEmsgBoxes); const Mp4Parser = shaka.util.Mp4Parser; let startTime = 0; let parsedMedia = false; const parser = new Mp4Parser(); if (hasEmsg) { parser.fullBox('emsg', (box) => this.parseEMSG_(reference, stream.emsgSchemeIdUris, box)); } parser.fullBox('prft', (box) => this.parsePrft_(timescale, box)) .box('moof', Mp4Parser.children) .box('traf', Mp4Parser.children) .fullBox('tfdt', (box) => { if (!parsedMedia) { goog.asserts.assert( box.version == 0 || box.version == 1, 'TFDT version can only be 0 or 1'); const parsed = shaka.util.Mp4BoxParsers.parseTFDTInaccurate( box.reader, box.version); startTime = parsed.baseMediaDecodeTime / timescale; parsedMedia = true; if (!hasEmsg) { box.parser.stop(); } } }).parse(data, /* partialOkay= */ true); if (parsedMedia && reference.timestampOffset == 0) { timestamp = startTime; } } } else if (!mimeType.includes('/mp4') && !mimeType.includes('/webm') && shaka.util.TsParser.probe(uint8ArrayData)) { if (!this.tsParsers_.has(contentType)) { this.tsParsers_.set(contentType, new shaka.util.TsParser()); } else { this.tsParsers_.get(contentType).clearData(); } const tsParser = this.tsParsers_.get(contentType).parse(uint8ArrayData); const startTime = tsParser.getStartTime(contentType); if (startTime != null) { timestamp = startTime; } metadata = tsParser.getMetadata(); } return {timestamp, metadata}; } /** * Parse the EMSG box from a MP4 container. * * @param {!shaka.media.SegmentReference} reference * @param {?Array<string>} emsgSchemeIdUris Array of emsg * scheme_id_uri for which emsg boxes should be parsed. * @param {!shaka.extern.ParsedBox} box * @private * https://dashif-documents.azurewebsites.net/Events/master/event.html#emsg-format * aligned(8) class DASHEventMessageBox * extends FullBox(‘emsg’, version, flags = 0){ * if (version==0) { * string scheme_id_uri; * string value; * unsigned int(32) timescale; * unsigned int(32) presentation_time_delta; * unsigned int(32) event_duration; * unsigned int(32) id; * } else if (version==1) { * unsigned int(32) timescale; * unsigned int(64) presentation_time; * unsigned int(32) event_duration; * unsigned int(32) id; * string scheme_id_uri; * string value; * } * unsigned int(8) message_data[]; */ parseEMSG_(reference, emsgSchemeIdUris, box) { let timescale; let id; let eventDuration; let schemeId; let startTime; let presentationTimeDelta; let value; if (box.version === 0) { schemeId = box.reader.readTerminatedString(); value = box.reader.readTerminatedString(); timescale = box.reader.readUint32(); presentationTimeDelta = box.reader.readUint32(); eventDuration = box.reader.readUint32(); id = box.reader.readUint32(); startTime = reference.startTime + (presentationTimeDelta / timescale); } else { timescale = box.reader.readUint32(); const pts = box.reader.readUint64(); startTime = (pts / timescale) + reference.timestampOffset; presentationTimeDelta = startTime - reference.startTime; eventDuration = box.reader.readUint32(); id = box.reader.readUint32(); schemeId = box.reader.readTerminatedString(); value = box.reader.readTerminatedString(); } const messageData = box.reader.readBytes( box.reader.getLength() - box.reader.getPosition()); // See DASH sec. 5.10.3.3.1 // If a DASH client detects an event message box with a scheme that is not // defined in MPD, the client is expected to ignore it. if ((emsgSchemeIdUris && emsgSchemeIdUris.includes(schemeId)) || this.config_.dispatchAllEmsgBoxes) { // See DASH sec. 5.10.4.1 // A special scheme in DASH used to signal manifest updates. if (schemeId == 'urn:mpeg:dash:event:2012') { this.playerInterface_.onManifestUpdate(); } else { // All other schemes are dispatched as a general 'emsg' event. const endTime = startTime + (eventDuration / timescale); /** @type {shaka.extern.EmsgInfo} */ const emsg = { startTime: startTime, endTime: endTime, schemeIdUri: schemeId, value: value, timescale: timescale, presentationTimeDelta: presentationTimeDelta, eventDuration: eventDuration, id: id, messageData: messageData, }; // Dispatch an event to notify the application about the emsg box. const eventName = shaka.util.FakeEvent.EventName.Emsg; const data = (new Map()).set('detail', emsg); const event = new shaka.util.FakeEvent(eventName, data); // A user can call preventDefault() on a cancelable event. event.cancelable = true; this.playerInterface_.onEmsg(emsg); // Additionally, ID3 events generate a 'metadata' event. This is a // pre-parsed version of the metadata blob already dispatched in the // 'emsg' event. if (schemeId == 'https://aomedia.org/emsg/ID3' || schemeId == 'https://developer.apple.com/streaming/emsg-id3') { // See https://aomediacodec.github.io/id3-emsg/ const frames = shaka.util.Id3Utils.getID3Frames(messageData); if (frames.length) { /** @private {shaka.extern.ID3Metadata} */ const metadata = { cueTime: startTime, data: messageData, frames: frames, dts: startTime, pts: startTime, }; this.playerInterface_.onMetadata( [metadata], /* offset= */ 0, endTime); } } } } } /** * Parse PRFT box. * @param {number} timescale * @param {!shaka.extern.ParsedBox} box * @private */ parsePrft_(timescale, box) { goog.asserts.assert( box.version == 0 || box.version == 1, 'PRFT version can only be 0 or 1'); const parsed = shaka.util.Mp4BoxParsers.parsePRFTInaccurate( box.reader, box.version); const wallClockTime = this.convertNtp_(parsed.ntpTimestamp); const programStartDate = new Date(wallClockTime - (parsed.mediaTime / timescale) * 1000); /** @type {shaka.extern.ProducerReferenceTime} */ const prftInfo = { wallClockTime, programStartDate, }; const eventName = shaka.util.FakeEvent.EventName.Prft; const data = (new Map()).set('detail', prftInfo); const event = new shaka.util.FakeEvent( eventName, data); this.playerInterface_.onEvent(event); } /** * Convert Ntp ntpTimeStamp to UTC Time * * @param {number} ntpTimeStamp * @return {number} utcTime * @private */ convertNtp_(ntpTimeStamp) { const start = new Date(Date.UTC(1900, 0, 1, 0, 0, 0)); return new Date(start.getTime() + ntpTimeStamp).getTime(); } /** * Enqueue an operation to append data to the SourceBuffer. * Start and end times are needed for TextEngine, but not for MediaSource. * Start and end times may be null for initialization segments; if present * they are relative to the presentation timeline. * * @param {shaka.util.ManifestParserUtils.ContentType} contentType * @param {!BufferSource} data * @param {?shaka.media.SegmentReference} reference The segment reference * we are appending, or null for init segments * @param {shaka.extern.Stream} stream * @param {?boolean} hasClosedCaptions True if the buffer contains CEA closed * captions * @param {boolean=} seeked True if we just seeked * @param {boolean=} adaptation True if we just automatically switched active * variant(s). * @param {boolean=} isChunkedData True if we add to the buffer from the * partial read of the segment. * @return {!Promise} */ async appendBuffer( contentType, data, reference, stream, hasClosedCaptions, seeked = false, adaptation = false, isChunkedData = false, fromSplit = false) { const ContentType = shaka.util.ManifestParserUtils.ContentType; if (contentType == ContentType.TEXT) { if (this.manifestType_ == shaka.media.ManifestParser.HLS) { // This won't be known until the first video segment is appended. const offset = await this.textSequenceModeOffset_; this.textEngine_.setTimestampOffset(offset); } await this.textEngine_.appendBuffer( data, reference ? reference.startTime : null, reference ? reference.endTime : null, reference ? reference.getUris()[0] : null); return; } if (!fromSplit && this.needSplitMuxedContent_) { await this.appendBuffer(ContentType.AUDIO, data, reference, stream, hasClosedCaptions, seeked, adaptation, isChunkedData, /* fromSplit= */ true); await this.appendBuffer(ContentType.VIDEO, data, reference, stream, hasClosedCaptions, seeked, adaptation, isChunkedData, /* fromSplit= */ true); return; } if (!this.sourceBuffers_.has(contentType)) { shaka.log.warning('Attempted to restore a non-existent source buffer'); return; } let timestampOffset = this.sourceBuffers_.get(contentType).timestampOffset; let mimeType = this.sourceBufferTypes_.get(contentType); if (this.transmuxers_.has(contentType)) { mimeType = this.transmuxers_.get(contentType).getOriginalMimeType(); } if (reference) { const {timestamp, metadata} = this.getTimestampAndDispatchMetadata( contentType, data, reference, stream, mimeType); if (timestamp != null) { if (this.firstVideoTimestamp_ == null && contentType == ContentType.VIDEO) { this.firstVideoTimestamp_ = timestamp; this.firstVideoReferenceStartTime_ = reference.startTime; if (this.firstAudioTimestamp_ != null) { let compensation = 0; // Only apply compensation if video and audio segment startTime // match, to avoid introducing sync issues. if (this.firstVideoReferenceStartTime_ == this.firstAudioReferenceStartTime_) { compensation = this.firstVideoTimestamp_ - this.firstAudioTimestamp_; } this.audioCompensation_.resolve(compensation); } } if (this.firstAudioTimestamp_ == null && contentType == ContentType.AUDIO) { this.firstAudioTimestamp_ = timestamp; this.firstAudioReferenceStartTime_ = reference.startTime; if (this.firstVideoTimestamp_ != null) { let compensation = 0; // Only apply compensation if video and audio segment startTime // match, to avoid introducing sync issues. if (this.firstVideoReferenceStartTime_ == this.firstAudioReferenceStartTime_) { compensation = this.firstVideoTimestamp_ - this.firstAudioTimestamp_; } this.audioCompensation_.resolve(compensation); } } let realTimestamp = timestamp; const RAW_FORMATS = shaka.util.MimeUtils.RAW_FORMATS; // For formats without containers and using segments mode, we need to // adjust TimestampOffset relative to 0 because segments do not have // any timestamp information. if (!this.sequenceMode_ && RAW_FORMATS.includes(this.sourceBufferTypes_.get(contentType))) { realTimestamp = 0; } const calculatedTimestampOffset = reference.startTime - realTimestamp; const timestampOffsetDifference = Math.abs(timestampOffset - calculatedTimestampOffset); if ((timestampOffsetDifference >= 0.001 || seeked || adaptation) && (!isChunkedData || calculatedTimestampOffset > 0 || !timestampOffset)) { timestampOffset = calculatedTimestampOffset; if (this.attemptTimestampOffsetCalculation_) { this.enqueueOperation_( contentType, () => this.abort_(contentType), null); this.enqueueOperation_( contentType, () => this.setTimestampOffset_(contentType, timestampOffset), null); } } // Timestamps can only be reliably extracted from video, not audio. // Packed audio formats do not have internal timestamps at all. // Prefer video for this when available. const isBestSourceBufferForTimestamps = contentType == ContentType.VIDEO || !(this.sourceBuffers_.has(ContentType.VIDEO)); if (isBestSourceBufferForTimestamps) { this.textSequenceModeOffset_.resolve(timestampOffset); } } if (metadata.length) { this.playerInterface_.onMetadata(metadata, timestampOffset, reference ? reference.endTime : null); } } if (hasClosedCaptions && contentType == ContentType.VIDEO) { if (!this.textEngine_) { this.reinitText(shaka.util.MimeUtils.CEA608_CLOSED_CAPTION_MIMETYPE, this.sequenceMode_, /* external= */ false); } if (!this.captionParser_) { const basicType = mimeType.split(';', 1)[0]; this.captionParser_ = this.getCaptionParser(basicType); } // If it is the init segment for closed captions, initialize the closed // caption parser. if (!reference) { this.captionParser_.init(data, adaptation); } else { const closedCaptions = this.captionParser_.parseFrom(data); if (closedCaptions.length) { this.textEngine_.storeAndAppendClosedCaptions( closedCaptions, reference.startTime, reference.endTime, timestampOffset); } } } if (this.transmuxers_.has(contentType)) { data = await this.transmuxers_.get(contentType).transmux( data, stream, reference, this.mediaSource_.duration, contentType); } data = this.workAroundBrokenPlatforms_( stream, data, reference, contentType); if (reference && this.sequenceMode_ && contentType != ContentType.TEXT) { // In sequence mode, for non-text streams, if we just cleared the buffer // and are either performing an unbuffered seek or handling an automatic // adaptation, we need to set a new timestampOffset on the sourceBuffer. if (seeked || adaptation) { let timestampOffset = reference.startTime; // Audio and video may not be aligned, so we will compensate for audio // if necessary. if (this.manifestType_ == shaka.media.ManifestParser.HLS && !this.needSplitMuxedContent_ && contentType == ContentType.AUDIO && this.sourceBuffers_.has(ContentType.VIDEO)) { const compensation = await this.audioCompensation_; // Only apply compensation if the difference is greater than 150ms if (Math.abs(compensation) > 0.15) { timestampOffset -= compensation; } } // The logic to call abort() before setting the timestampOffset is // extended during unbuffered seeks or automatic adaptations; it is // possible for the append state to be PARSING_MEDIA_SEGMENT from the // previous SourceBuffer#appendBuffer() call. this.enqueueOperation_( contentType, () => this.abort_(contentType), null); this.enqueueOperation_( contentType, () => this.setTimestampOffset_(contentType, timestampOffset), null); } } let bufferedBefore = null; await this.enqueueOperation_(contentType, () => { if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) { bufferedBefore = this.getBuffered_(contentType); } this.append_(contentType, data, timestampOffset, stream); }, reference ? reference.getUris()[0] : null); if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) { const bufferedAfter = this.getBuffered_(contentType); const newBuffered = shaka.media.TimeRangesUtils.computeAddedRange( bufferedBefore, bufferedAfter); if (newBuffered) { const segmentDuration = reference.endTime - reference.startTime; const timeAdded = newBuffered.end - newBuffered.start; // Check end times instead of start times. We may be overwriting a // buffer and only the end changes, and that would be fine. // Also, exclude tiny segments. Sometimes alignment segments as small // as 33ms are seen in Google DAI content. For such tiny segments, // half a segment duration would be no issue. const offset = Math.abs(newBuffered.end - reference.endTime); if (segmentDuration > 0.100 && (offset > segmentDuration / 2 || Math.abs(segmentDuration - timeAdded) > 0.030)) { shaka.log.error('Possible encoding problem detected!', 'Unexpected buffered range for reference', reference, 'from URIs', reference.getUris(), 'should be', {start: reference.startTime, end: reference.endTime}, 'but got', newBuffered); } } } } /** * Set the selected closed captions Id and language. * * @param {string} id */ setSelectedClosedCaptionId(id) { const VIDEO = shaka.util.ManifestParserUtils.ContentType.VIDEO; const videoBufferEndTime = this.bufferEnd(VIDEO) || 0; this.textEngine_.setSelectedClosedCaptionId(id, videoBufferEndTime); } /** Disable embedded closed captions. */ clearSelectedClosedCaptionId() { if (this.textEngine_) { this.textEngine_.setSelectedClosedCaptionId('', 0); } } /** * Enqueue an operation to remove data from the SourceBuffer. * * @param {shaka.util.ManifestParserUtils.ContentType} contentType * @param {number} startTime relative to the start of the presentation * @param {numbe