shaka-player
Version:
DASH/EME video player library
1,382 lines (1,195 loc) • 149 kB
JavaScript
/*! @license
* Shaka Player
* Copyright 2016 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
describe('StreamingEngine', () => {
const Util = shaka.test.Util;
const ContentType = shaka.util.ManifestParserUtils.ContentType;
const Uint8ArrayUtils = shaka.util.Uint8ArrayUtils;
// Dummy byte ranges and sizes for initialization and media segments.
// Create empty object first and initialize the fields through
// [] to allow field names to be expressions.
/**
* @type {!Object.<shaka.util.ManifestParserUtils.ContentType,
* !Array.<number>>}
*/
const initSegmentRanges = {};
initSegmentRanges[ContentType.AUDIO] = [100, 1000];
initSegmentRanges[ContentType.VIDEO] = [200, 2000];
/** @type {!Object.<shaka.util.ManifestParserUtils.ContentType, number>} */
const segmentSizes = {};
segmentSizes[ContentType.AUDIO] = 1000;
segmentSizes[ContentType.VIDEO] = 10000;
segmentSizes[ContentType.TEXT] = 500;
/** @type {!Object.<string, shaka.test.FakeMediaSourceEngine.SegmentData>} */
let segmentData;
/** @type {number} */
let presentationTimeInSeconds;
/** @type {boolean} */
let playing;
/** @type {!shaka.test.FakeMediaSourceEngine} */
let mediaSourceEngine;
/** @type {{audio: number, video: number, text: number}} */
let netEngineDelays;
/** @type {!shaka.test.FakeNetworkingEngine} */
let netEngine;
/** @type {{start: number, end: number}} */
let segmentAvailability;
/** @type {!shaka.test.FakePresentationTimeline} */
let timeline;
/** @type {?shaka.extern.Stream} */
let audioStream;
/** @type {?shaka.extern.Stream} */
let videoStream;
/** @type {shaka.extern.Variant} */
let variant;
/** @type {shaka.extern.Stream} */
let textStream;
/** @type {shaka.extern.Variant} */
let alternateVariant;
/** @type {shaka.extern.Stream} */
let alternateVideoStream;
/** @type {shaka.extern.Manifest} */
let manifest;
/** @type {!jasmine.Spy} */
let onError;
/** @type {!jasmine.Spy} */
let onEvent;
/** @type {!jasmine.Spy} */
let onManifestUpdate;
/** @type {!jasmine.Spy} */
let onSegmentAppended;
/** @type {!jasmine.Spy} */
let getBandwidthEstimate;
/** @type {!shaka.media.StreamingEngine} */
let streamingEngine;
/** @type {!jasmine.Spy} */
let beforeAppendSegment;
/** @type {!jasmine.Spy} */
let onMetadata;
/** @type {!jasmine.Spy} */
let disableStream;
/** @type {function(function(), number)} */
let realSetTimeout;
/**
* Runs the fake event loop.
* @param {function()=} callback An optional callback that is executed
* each time the clock ticks.
*/
async function runTest(callback) {
async function onTick(currentTime) {
if (callback) {
await callback();
}
if (playing) {
presentationTimeInSeconds++;
}
}
// No test should require more than 60 seconds of simulated time.
await Util.fakeEventLoop(60, onTick);
}
beforeAll(() => {
realSetTimeout = window.setTimeout;
jasmine.clock().install();
jasmine.clock().mockDate();
});
/**
* @param {boolean=} trickMode
* @param {number=} mediaOffset The offset from 0 for the segment start times
* @param {shaka.extern.aes128Key=} aes128Key The AES-128 key to put in
* the manifest, if one should exist
*/
function setupVod(trickMode, mediaOffset, aes128Key) {
// For VOD, we fake a presentation that has 2 Periods of equal duration
// (20 seconds), where each Period has 1 Variant and 1 text stream.
//
// There are 4 initialization segments: 1 audio and 1 video for the
// first Period, and 1 audio and 1 video for the second Period.
//
// There are 12 media segments: 2 audio, 2 video, and 2 text for the
// first Period, and 2 audio, 2 video, and 2 text for the second Period.
// All media segments are (by default) 10 seconds long.
const offset = mediaOffset || 0;
// timestampOffset is -ve since it is added to bring the timeline to 0.
// -0 and 0 are not same so explicitly set to 0.
const timestampOffset = offset === 0 ? 0 : -offset;
// Create SegmentData map for FakeMediaSourceEngine.
const initSegmentSizeAudio = initSegmentRanges[ContentType.AUDIO][1] -
initSegmentRanges[ContentType.AUDIO][0] + 1;
const initSegmentSizeVideo = initSegmentRanges[ContentType.VIDEO][1] -
initSegmentRanges[ContentType.VIDEO][0] + 1;
const makeBuffer = (size) => new ArrayBuffer(size);
segmentData = {
audio: {
initSegments: [
makeBuffer(initSegmentSizeAudio),
makeBuffer(initSegmentSizeAudio),
],
segments: [
makeBuffer(segmentSizes[ContentType.AUDIO]),
makeBuffer(segmentSizes[ContentType.AUDIO]),
makeBuffer(segmentSizes[ContentType.AUDIO]),
makeBuffer(segmentSizes[ContentType.AUDIO]),
],
segmentStartTimes: [offset, offset+10, offset+20, offset+30],
segmentDuration: 10,
timestampOffset: timestampOffset,
},
video: {
initSegments: [
makeBuffer(initSegmentSizeVideo),
makeBuffer(initSegmentSizeVideo),
],
segments: [
makeBuffer(segmentSizes[ContentType.VIDEO]),
makeBuffer(segmentSizes[ContentType.VIDEO]),
makeBuffer(segmentSizes[ContentType.VIDEO]),
makeBuffer(segmentSizes[ContentType.VIDEO]),
],
segmentStartTimes: [offset, offset+10, offset+20, offset+30],
segmentDuration: 10,
timestampOffset: timestampOffset,
},
text: {
initSegments: [],
segments: [
makeBuffer(segmentSizes[ContentType.TEXT]),
makeBuffer(segmentSizes[ContentType.TEXT]),
makeBuffer(segmentSizes[ContentType.TEXT]),
makeBuffer(segmentSizes[ContentType.TEXT]),
],
segmentStartTimes: [offset, offset+10, offset+20, offset+30],
segmentDuration: 10,
timestampOffset: timestampOffset,
},
};
if (trickMode) {
segmentData.trickvideo = {
initSegments: [
makeBuffer(initSegmentSizeVideo),
makeBuffer(initSegmentSizeVideo),
],
segments: [
makeBuffer(segmentSizes[ContentType.VIDEO]),
makeBuffer(segmentSizes[ContentType.VIDEO]),
makeBuffer(segmentSizes[ContentType.VIDEO]),
makeBuffer(segmentSizes[ContentType.VIDEO]),
],
segmentStartTimes: [offset, offset+10, offset+20, offset+30],
segmentDuration: 10,
timestampOffset: timestampOffset,
};
}
presentationTimeInSeconds = 0;
playing = false;
setupNetworkingEngine(
/* segmentsInFirstPeriod= */ 2,
/* segmentsInSecondPeriod= */ 2);
segmentAvailability = {
start: 0,
end: 40,
};
timeline = shaka.test.StreamingEngineUtil.createFakePresentationTimeline(
segmentAvailability,
/* presentationDuration= */ 40,
/* maxSegmentDuration= */ 10,
/* isLive= */ false);
setupManifest(
/* firstPeriodStartTime= */ 0,
/* secondPeriodStartTime= */ 20,
/* presentationDuration= */ 40,
aes128Key);
}
function setupLive() {
// For live, we fake a presentation that has 2 Periods of different
// durations (120 seconds and 20 seconds respectively), where each Period
// has 1 Variant and 1 text stream.
//
// There are 4 initialization segments: 1 audio and 1 video for the
// first Period, and 1 audio and 1 video for the second Period.
//
// There are 14 media segments: 12 audio, 12 video, and 12 text for the
// first Period, and 2 audio, 2 video, and 2 text for the second Period.
// All media segments are (by default) 10 seconds long.
//
// The segment availability window starts at t=100 (segment 11) and extends
// to t=120 (segment 13).
// Create SegmentData map for FakeMediaSourceEngine.
const initSegmentSizeAudio = initSegmentRanges[ContentType.AUDIO][1] -
initSegmentRanges[ContentType.AUDIO][0] + 1;
const initSegmentSizeVideo = initSegmentRanges[ContentType.VIDEO][1] -
initSegmentRanges[ContentType.VIDEO][0] + 1;
const makeBuffer = (size) => new ArrayBuffer(size);
segmentData = {
audio: {
initSegments:
[makeBuffer(initSegmentSizeAudio),
makeBuffer(initSegmentSizeAudio)],
segments: [],
segmentStartTimes: [],
segmentDuration: 10,
},
video: {
initSegments:
[makeBuffer(initSegmentSizeVideo),
makeBuffer(initSegmentSizeVideo)],
segments: [],
segmentStartTimes: [],
segmentDuration: 10,
},
text: {
initSegments: [],
segments: [],
segmentStartTimes: [],
segmentDuration: 10,
},
};
const segmentsInFirstPeriod = 12;
for (let i = 0; i < segmentsInFirstPeriod; i++) {
segmentData[ContentType.AUDIO].segments.push(
makeBuffer(segmentSizes[ContentType.AUDIO]));
segmentData[ContentType.VIDEO].segments.push(
makeBuffer(segmentSizes[ContentType.VIDEO]));
segmentData[ContentType.TEXT].segments.push(
makeBuffer(segmentSizes[ContentType.TEXT]));
segmentData[ContentType.AUDIO].segmentStartTimes.push(i * 10);
segmentData[ContentType.VIDEO].segmentStartTimes.push(i * 10);
segmentData[ContentType.TEXT].segmentStartTimes.push(i * 10);
}
const segmentsInSecondPeriod = 2;
for (let i = 0; i < segmentsInSecondPeriod; i++) {
segmentData[ContentType.AUDIO].segments.push(
makeBuffer(segmentSizes[ContentType.AUDIO]));
segmentData[ContentType.VIDEO].segments.push(
makeBuffer(segmentSizes[ContentType.VIDEO]));
segmentData[ContentType.TEXT].segments.push(
makeBuffer(segmentSizes[ContentType.TEXT]));
segmentData[ContentType.AUDIO].segmentStartTimes.push(
(segmentsInFirstPeriod + i) * 10);
segmentData[ContentType.VIDEO].segmentStartTimes.push(
(segmentsInFirstPeriod + i) * 10);
segmentData[ContentType.TEXT].segmentStartTimes.push(
(segmentsInFirstPeriod + i) * 10);
}
presentationTimeInSeconds = 110;
playing = false;
setupNetworkingEngine(
/* segmentsInFirstPeriod= */ 12,
/* segmentsInSecondPeriod= */ 2);
// NOTE: Many tests here start playback at 100, so the availability start is
// 90. This allows the async index creation processes to complete before
// the window moves, which gives us the startup conditions the tests expect.
// Keep in mind that the fake event loop in the tests ticks in whole
// seconds, so real async processes may take a surprising amount of fake
// time to complete. To test actual boundary conditions, you can change
// segmentAvailability.start in the test setup.
segmentAvailability = {
start: 90,
end: 140,
};
timeline = shaka.test.StreamingEngineUtil.createFakePresentationTimeline(
segmentAvailability,
/* presentationDuration= */ 140,
/* maxSegmentDuration= */ 10,
/* isLive= */ true);
setupManifest(
/* firstPeriodStartTime= */ 0,
/* secondPeriodStartTime= */ 120,
/* presentationDuration= */ 140);
}
function setupNetworkingEngine(
segmentsInFirstPeriod, segmentsInSecondPeriod) {
// Create the fake NetworkingEngine. Note: the StreamingEngine should never
// request a segment that does not exist.
netEngineDelays = {
audio: 0,
video: 0,
text: 0,
};
netEngine = shaka.test.StreamingEngineUtil.createFakeNetworkingEngine(
// Init segment generator:
(type, periodIndex) => {
expect((periodIndex == 0) || (periodIndex == 1));
return segmentData[type].initSegments[periodIndex];
},
// Media segment generator:
(type, periodIndex, position) => {
expect(position).toBeGreaterThan(-1);
expect((periodIndex == 0 && position <= segmentsInFirstPeriod) ||
(periodIndex == 1 && position <= segmentsInSecondPeriod));
const segment = segmentData[type].segments[position];
const startTime = segmentData[type].segmentStartTimes[position];
const endTime = startTime + segmentData[type].segmentDuration;
if (endTime < segmentAvailability.start ||
startTime > segmentAvailability.end) {
// Return null if the segment is out of the segment availability
// window.
return null;
}
return segment;
},
/* delays= */ netEngineDelays);
}
/**
* @param {number} firstPeriodStartTime
* @param {number} secondPeriodStartTime
* @param {number} presentationDuration
* @param {shaka.extern.aes128Key=} aes128Key
*/
function setupManifest(
firstPeriodStartTime, secondPeriodStartTime, presentationDuration,
aes128Key) {
const segmentDurations = {
audio: segmentData[ContentType.AUDIO].segmentDuration,
video: segmentData[ContentType.VIDEO].segmentDuration,
text: segmentData[ContentType.TEXT].segmentDuration,
};
const timestampOffsets = {
audio: segmentData[ContentType.AUDIO].timestampOffset,
video: segmentData[ContentType.VIDEO].timestampOffset,
text: segmentData[ContentType.TEXT].timestampOffset,
};
if (segmentData['trickvideo']) {
segmentDurations['trickvideo'] =
segmentData['trickvideo'].segmentDuration;
timestampOffsets['trickvideo'] =
segmentData['trickvideo'].timestampOffset;
}
manifest = shaka.test.StreamingEngineUtil.createManifest(
/** @type {!shaka.media.PresentationTimeline} */(timeline),
[firstPeriodStartTime, secondPeriodStartTime],
presentationDuration, segmentDurations, initSegmentRanges,
timestampOffsets, aes128Key);
audioStream = manifest.variants[0].audio;
videoStream = manifest.variants[0].video;
variant = manifest.variants[0];
textStream = manifest.textStreams[0];
// This Stream is only used to verify that StreamingEngine can setup
// Streams correctly.
alternateVideoStream =
shaka.test.StreamingEngineUtil.createMockVideoStream(8);
alternateVariant = {
audio: audioStream,
video: /** @type {shaka.extern.Stream} */ (alternateVideoStream),
id: 0,
language: 'und',
disabledUntilTime: 0,
primary: false,
bandwidth: 0,
allowedByApplication: true,
allowedByKeySystem: true,
decodingInfos: [],
};
manifest.variants.push(alternateVariant);
}
/**
* Creates the StreamingEngine.
**
* @param {shaka.extern.StreamingConfiguration=} config Optional
* configuration object which overrides the default one.
*/
function createStreamingEngine(config) {
onError = jasmine.createSpy('onError');
onError.and.callFake(fail);
onEvent = jasmine.createSpy('onEvent');
onManifestUpdate = jasmine.createSpy('onManifestUpdate');
onSegmentAppended = jasmine.createSpy('onSegmentAppended');
beforeAppendSegment = jasmine.createSpy('beforeAppendSegment');
onMetadata = jasmine.createSpy('onMetadata');
getBandwidthEstimate = jasmine.createSpy('getBandwidthEstimate');
getBandwidthEstimate.and.returnValue(1e3);
disableStream = jasmine.createSpy('disableStream');
disableStream.and.callFake(() => false);
beforeAppendSegment.and.callFake((segment) => {
return Promise.resolve();
});
if (!config) {
config = shaka.util.PlayerConfiguration.createDefault().streaming;
config.rebufferingGoal = 2;
config.bufferingGoal = 5;
config.bufferBehind = Infinity;
config.maxDisabledTime = 0; // Do not disable stream by default
}
goog.asserts.assert(
presentationTimeInSeconds != undefined,
'All tests should have defined an initial presentation time by now!');
const playerInterface = {
getPresentationTime: () => presentationTimeInSeconds,
getBandwidthEstimate: Util.spyFunc(getBandwidthEstimate),
mediaSourceEngine: mediaSourceEngine,
netEngine: /** @type {!shaka.net.NetworkingEngine} */(netEngine),
onError: Util.spyFunc(onError),
onEvent: Util.spyFunc(onEvent),
onManifestUpdate: Util.spyFunc(onManifestUpdate),
onSegmentAppended: Util.spyFunc(onSegmentAppended),
onInitSegmentAppended: () => {},
beforeAppendSegment: Util.spyFunc(beforeAppendSegment),
onMetadata: Util.spyFunc(onMetadata),
disableStream: Util.spyFunc(disableStream),
};
streamingEngine = new shaka.media.StreamingEngine(
/** @type {shaka.extern.Manifest} */(manifest), playerInterface);
streamingEngine.configure(config);
}
afterEach(() => {
streamingEngine.destroy().catch(fail);
});
afterAll(() => {
jasmine.clock().uninstall();
});
// This test initializes the StreamingEngine (SE) and allows it to play
// through both Periods.
//
// After construction of StreamingEngine, the following should occur:
// 1. The owner should immediately call switchVariant() with the initial
// variant.
// 2. The owner should call start().
// 3. SE should setup each of the initial Streams.
// 4. SE should start appending the initial Streams' segments.
// 5. SE should call MediaSourceEngine.endOfStream() after it has appended
// both segments from the second Period. At this point, the playhead
// should not be at the end of the presentation, but the test will be
// effectively over since SE will have nothing else to do.
it('initializes and plays VOD', async () => {
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
// Here we go!
streamingEngine.switchVariant(variant);
streamingEngine.switchTextStream(textStream);
await streamingEngine.start();
playing = true;
// Verify buffers.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [false, false],
video: [false, false],
text: [],
});
expect(mediaSourceEngine.segments).toEqual({
audio: [false, false, false, false],
video: [false, false, false, false],
text: [false, false, false, false],
});
const expectedMseInit = new Map();
expectedMseInit.set(ContentType.AUDIO, audioStream);
expectedMseInit.set(ContentType.VIDEO, videoStream);
expectedMseInit.set(ContentType.TEXT, textStream);
expect(mediaSourceEngine.init).toHaveBeenCalledWith(expectedMseInit,
/** sequenceMode= */ false, /** manifestType= */ 'UNKNOWN',
/** ignoreManifestTimestampsInSegmentsMode= */ false);
expect(mediaSourceEngine.init).toHaveBeenCalledTimes(1);
expect(mediaSourceEngine.setDuration).toHaveBeenCalledTimes(1);
expect(mediaSourceEngine.setDuration).toHaveBeenCalledWith(40);
await runTest();
expect(mediaSourceEngine.endOfStream).toHaveBeenCalled();
// Verify buffers.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [false, true],
video: [false, true],
text: [],
});
expect(mediaSourceEngine.segments).toEqual({
audio: [true, true, true, true],
video: [true, true, true, true],
text: [true, true, true, true],
});
netEngine.expectRangeRequest(
'0_audio_init',
initSegmentRanges[ContentType.AUDIO][0],
initSegmentRanges[ContentType.AUDIO][1],
/* isInit= */ true);
netEngine.expectRangeRequest(
'0_video_init',
initSegmentRanges[ContentType.VIDEO][0],
initSegmentRanges[ContentType.VIDEO][1],
/* isInit= */ true);
netEngine.expectRangeRequest(
'1_audio_init',
initSegmentRanges[ContentType.AUDIO][0],
initSegmentRanges[ContentType.AUDIO][1],
/* isInit= */ true);
netEngine.expectRangeRequest(
'1_video_init',
initSegmentRanges[ContentType.VIDEO][0],
initSegmentRanges[ContentType.VIDEO][1],
/* isInit= */ true);
const segmentType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
const segmentContext = {
type: shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_SEGMENT,
};
netEngine.expectRequest('0_audio_0', segmentType, segmentContext);
netEngine.expectRequest('0_video_0', segmentType, segmentContext);
netEngine.expectRequest('0_text_0', segmentType, segmentContext);
netEngine.expectRequest('0_audio_1', segmentType, segmentContext);
netEngine.expectRequest('0_video_1', segmentType, segmentContext);
netEngine.expectRequest('0_text_1', segmentType, segmentContext);
netEngine.expectRequest('1_audio_2', segmentType, segmentContext);
netEngine.expectRequest('1_video_2', segmentType, segmentContext);
netEngine.expectRequest('1_text_2', segmentType, segmentContext);
netEngine.expectRequest('1_audio_3', segmentType, segmentContext);
netEngine.expectRequest('1_video_3', segmentType, segmentContext);
netEngine.expectRequest('1_text_3', segmentType, segmentContext);
});
describe('unloadTextStream', () => {
it('doesn\'t send requests for text after calling unload', async () => {
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
streamingEngine.switchVariant(variant);
streamingEngine.switchTextStream(textStream);
await streamingEngine.start();
playing = true;
// Verify that after unloading text stream, no network request for text
// is sent.
await runTest(() => {
const segmentType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
const segmentContext = {
type: shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_SEGMENT,
};
if (presentationTimeInSeconds == 1) {
netEngine.expectRequest('0_text_0', segmentType, segmentContext);
netEngine.request.calls.reset();
streamingEngine.unloadTextStream();
} else if (presentationTimeInSeconds == 35) {
netEngine.expectNoRequest('0_text_0', segmentType, segmentContext);
netEngine.expectNoRequest('0_text_1', segmentType, segmentContext);
netEngine.expectNoRequest('1_text_2', segmentType, segmentContext);
netEngine.expectNoRequest('1_text_3', segmentType, segmentContext);
}
});
});
it('sets the current text stream to null', async () => {
createStreamingEngine();
streamingEngine.switchVariant(variant);
streamingEngine.switchTextStream(textStream);
expect(streamingEngine.getCurrentTextStream()).not.toBe(null);
await streamingEngine.start();
playing = true;
streamingEngine.unloadTextStream();
expect(streamingEngine.getCurrentTextStream()).toBe(null);
});
});
it('initializes and plays live', async () => {
setupLive();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
// Here we go!
presentationTimeInSeconds = 100;
streamingEngine.switchVariant(variant);
streamingEngine.switchTextStream(textStream);
await streamingEngine.start();
playing = true;
await runTest(slideSegmentAvailabilityWindow);
expect(mediaSourceEngine.endOfStream).toHaveBeenCalled();
// Verify buffers.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [false, true],
video: [false, true],
text: [],
});
// Since we started playback from segment 11, segments 10 through 14
// should be buffered. Those segment numbers are 1-based, and this array
// is 0-based, so we expect i >= 9 to be downloaded.
const segments = mediaSourceEngine.segments;
for (let i = 0; i < 14; i++) {
expect(segments[ContentType.AUDIO][i]).withContext(i).toBe(i >= 9);
expect(segments[ContentType.VIDEO][i]).withContext(i).toBe(i >= 9);
expect(segments[ContentType.TEXT][i]).withContext(i).toBe(i >= 9);
}
});
it('appends the ReadableStream data with low latency mode', async () => {
// Use the VOD manifests to test the streamDataCallback function in the low
// latency mode.
setupVod();
const config = shaka.util.PlayerConfiguration.createDefault().streaming;
config.lowLatencyMode = true;
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
mediaSourceEngine.appendBuffer.and.stub();
createStreamingEngine(config);
// Here we go!
streamingEngine.switchVariant(variant);
streamingEngine.switchTextStream(textStream);
await streamingEngine.start();
playing = true;
await runTest();
// In the mocks in StreamingEngineUtil, each segment gets fetched as two
// chunks of data, and each chunk contains one MDAT box.
// The streamDataCallback function will be triggered twice for each
// audio/video MP4 segment.
// appendBuffer should be called once for each init segment of the
// audio / video segment, and twice for each segment.
// 4 init segments + 8 audio/video segments * 2 + 4 text segments = 24.
if (window.ReadableStream) {
expect(mediaSourceEngine.appendBuffer).toHaveBeenCalledTimes(24);
} else {
// If ReadableStream is not supported by the browser, fall back to regular
// streaming.
// 4 init segments + 8 audio/video segments + 4 text segments = 16.
expect(mediaSourceEngine.appendBuffer).toHaveBeenCalledTimes(16);
}
});
it('plays when a small gap is present at the beginning', async () => {
const drift = 0.050; // 50 ms
setupVod();
mediaSourceEngine =
new shaka.test.FakeMediaSourceEngine(segmentData, drift);
createStreamingEngine();
// Here we go!
streamingEngine.switchVariant(variant);
streamingEngine.switchTextStream(textStream);
await streamingEngine.start();
playing = true;
await runTest();
});
it('plays with no chosen text streams', async () => {
setupVod();
manifest.textStreams = [];
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
// Here we go!
streamingEngine.switchVariant(variant);
// Don't call switchTextStream.
await streamingEngine.start();
playing = true;
await runTest();
expect(mediaSourceEngine.segments).toEqual({
audio: [true, true, true, true],
video: [true, true, true, true],
text: [false, false, false, false],
});
});
it('updates the timeline duration to match media duration', async () => {
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
mediaSourceEngine.endOfStream.and.callFake(() => {
expect(mediaSourceEngine.setDuration).toHaveBeenCalledWith(40);
expect(mediaSourceEngine.setDuration).toHaveBeenCalledTimes(1);
mediaSourceEngine.setDuration.calls.reset();
// Simulate the media ending BEFORE the expected (manifest) duration.
mediaSourceEngine.getDuration.and.returnValue(35);
return Promise.resolve();
});
// Here we go!
streamingEngine.switchVariant(variant);
streamingEngine.switchTextStream(textStream);
await streamingEngine.start();
playing = true;
await runTest();
expect(mediaSourceEngine.endOfStream).toHaveBeenCalled();
expect(timeline.setDuration).toHaveBeenCalledWith(35);
});
// https://github.com/shaka-project/shaka-player/issues/979
it('does not expand the timeline duration', async () => {
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
mediaSourceEngine.endOfStream.and.callFake(() => {
expect(mediaSourceEngine.setDuration).toHaveBeenCalledWith(40);
expect(mediaSourceEngine.setDuration).toHaveBeenCalledTimes(1);
mediaSourceEngine.setDuration.calls.reset();
// Simulate the media ending AFTER the expected (manifest) duration.
mediaSourceEngine.getDuration.and.returnValue(41);
return Promise.resolve();
});
// Here we go!
streamingEngine.switchVariant(variant);
streamingEngine.switchTextStream(textStream);
await streamingEngine.start();
playing = true;
await runTest();
expect(mediaSourceEngine.endOfStream).toHaveBeenCalled();
expect(timeline.setDuration).not.toHaveBeenCalled();
});
// https://github.com/shaka-project/shaka-player/issues/1967
it('does not change duration when 0', async () => {
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
// The duration can spuriously be set to 0, so we should ignore this and not
// update the duration.
mediaSourceEngine.getDuration.and.returnValue(0);
// Here we go!
streamingEngine.switchVariant(variant);
streamingEngine.switchTextStream(textStream);
await streamingEngine.start();
playing = true;
await runTest();
expect(mediaSourceEngine.endOfStream).toHaveBeenCalled();
expect(timeline.setDuration).not.toHaveBeenCalled();
});
it('applies fudge factors for append window', async () => {
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
// Here we go!
streamingEngine.switchVariant(variant);
streamingEngine.switchTextStream(textStream);
await streamingEngine.start();
playing = true;
await runTest();
// The second Period starts at 20, so we should set the appendWindowStart to
// 20, but reduced by a small fudge factor.
const lt20 = {
asymmetricMatch: (val) => val >= 19.9 && val < 20,
};
const gt40 = {
asymmetricMatch: (val) => val > 40 && val <= 40.1,
};
const streamsByType = new Map();
streamsByType.set(ContentType.AUDIO, audioStream);
streamsByType.set(ContentType.VIDEO, videoStream);
expect(mediaSourceEngine.setStreamProperties)
.toHaveBeenCalledWith('video', 0, lt20, gt40, false,
videoStream, streamsByType);
});
// Regression test for https://github.com/shaka-project/shaka-player/issues/3717
it('applies fudge factors for the duration', async () => {
setupVod();
// In #3717, the duration was just barely large enough to encompass an
// additional segment, but that segment didn't exist, so playback never
// completed. Here, we set the duration to just beyond the 3rd segment, and
// we make the 4th segment fail when requested.
const duration = 30.000000005;
timeline.getDuration.and.returnValue(duration);
const targetUri = '1_video_3'; // The URI of the 4th video segment.
failRequestsForTarget(netEngine, targetUri);
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
// Here we go!
streamingEngine.switchVariant(variant);
streamingEngine.switchTextStream(textStream);
await streamingEngine.start();
playing = true;
await runTest();
// The end of the stream should have been reached, and the 4th segment from
// each type should never have been requested.
expect(mediaSourceEngine.endOfStream).toHaveBeenCalled();
const segmentType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
const segmentContext = {
type: shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_SEGMENT,
};
netEngine.expectRequest('0_audio_0', segmentType, segmentContext);
netEngine.expectRequest('0_video_0', segmentType, segmentContext);
netEngine.expectRequest('0_text_0', segmentType, segmentContext);
netEngine.expectRequest('0_audio_1', segmentType, segmentContext);
netEngine.expectRequest('0_video_1', segmentType, segmentContext);
netEngine.expectRequest('0_text_1', segmentType, segmentContext);
netEngine.expectRequest('1_audio_2', segmentType, segmentContext);
netEngine.expectRequest('1_video_2', segmentType, segmentContext);
netEngine.expectRequest('1_text_2', segmentType, segmentContext);
netEngine.expectNoRequest('1_audio_3', segmentType, segmentContext);
netEngine.expectNoRequest('1_video_3', segmentType, segmentContext);
netEngine.expectNoRequest('1_text_3', segmentType, segmentContext);
});
it('does not buffer one media type ahead of another', async () => {
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
const config = shaka.util.PlayerConfiguration.createDefault().streaming;
config.bufferingGoal = 60;
config.failureCallback = () => streamingEngine.retry(0.1);
createStreamingEngine(config);
// Make requests for different types take different amounts of time.
// This would let some media types buffer faster than others if unchecked.
netEngineDelays.text = 0.1;
netEngineDelays.audio = 1.0;
netEngineDelays.video = 5.0; // Need init segment and media segment
mediaSourceEngine.appendBuffer.and.callFake((type, data, reference) => {
// Call to the underlying implementation.
const p = mediaSourceEngine.appendBufferImpl(type, data, reference);
// Validate that no one media type got ahead of any other.
let minBuffered = Infinity;
let maxBuffered = 0;
for (const t of ['audio', 'video', 'text']) {
const buffered = mediaSourceEngine.bufferedAheadOfImpl(t, 0);
minBuffered = Math.min(minBuffered, buffered);
maxBuffered = Math.max(maxBuffered, buffered);
}
// Simulated playback doesn't start until some of each is buffered. This
// realism is important to the test passing.
if (minBuffered > 0) {
playing = true;
}
// Sanity check.
expect(maxBuffered).not.toBeLessThan(minBuffered);
// Proof that we didn't get too far ahead (10s == 1 segment).
expect(maxBuffered - minBuffered).not.toBeGreaterThan(10);
return p;
});
// Here we go!
streamingEngine.switchVariant(variant);
streamingEngine.switchTextStream(textStream);
await streamingEngine.start();
// Simulated playback is started in the appendBuffer fake when some of each
// type is buffered. This realism is important to the test passing.
playing = false;
await runTest();
// Make sure appendBuffer was called, so that we know that we executed the
// checks in our fake above.
expect(mediaSourceEngine.appendBuffer).toHaveBeenCalled();
});
// https://github.com/shaka-project/shaka-player/issues/2957
it('plays with fewer text segments', async () => {
setupVod();
// Only use one segment for text, which will buffer less than the others.
segmentData['text'].segments.splice(1, 3);
await textStream.createSegmentIndex();
const oldGet = /** @type {?} */ (textStream.segmentIndex.get);
textStream.segmentIndex.get = (idx) => {
return idx > 0 ? null : oldGet(idx);
};
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
// Here we go!
streamingEngine.switchVariant(variant);
streamingEngine.switchTextStream(textStream);
await streamingEngine.start();
playing = true;
await runTest();
expect(mediaSourceEngine.segments).toEqual({
audio: [true, true, true, true],
video: [true, true, true, true],
text: [true],
});
});
describe('switchVariant/switchTextStream', () => {
let initialVariant;
let sameAudioVariant;
let sameVideoVariant;
let differentVariant;
let initialTextStream;
let newTextStream;
beforeEach(() => {
// Set up a manifest with multiple variants and a text stream.
manifest = shaka.test.ManifestGenerator.generate((manifest) => {
manifest.presentationTimeline.setDuration(60);
manifest.addVariant(0, (variant) => {
variant.addAudio(10, (stream) => {
stream.useSegmentTemplate('audio-10-%d.mp4', 10);
});
variant.addVideo(11, (stream) => {
stream.useSegmentTemplate('video-11-%d.mp4', 10);
});
});
manifest.addVariant(1, (variant) => {
variant.addExistingStream(10); // audio
variant.addVideo(12, (stream) => {
stream.useSegmentTemplate('video-12-%d.mp4', 10);
});
});
manifest.addVariant(2, (variant) => {
variant.addAudio(13, (stream) => {
stream.useSegmentTemplate('audio-13-%d.mp4', 10);
});
variant.addExistingStream(12); // video
});
manifest.addVariant(3, (variant) => {
variant.addVideo(14, (stream) => {
stream.useSegmentTemplate('video-14-%d.mp4', 10);
});
variant.addAudio(15, (stream) => {
stream.useSegmentTemplate('audio-15-%d.mp4', 10);
});
});
manifest.addTextStream(20, (stream) => {
stream.setInitSegmentReference(['text-20-init'], 0, null);
stream.useSegmentTemplate('text-20-%d.mp4', 10);
});
manifest.addTextStream(21, (stream) => {
stream.setInitSegmentReference(['text-21-init'], 0, null);
stream.useSegmentTemplate('text-21-%d.mp4', 10);
});
});
initialVariant = manifest.variants[0];
sameAudioVariant = manifest.variants[1];
sameVideoVariant = manifest.variants[2];
differentVariant = manifest.variants[3];
initialTextStream = manifest.textStreams[0];
newTextStream = manifest.textStreams[1];
// For these tests, we don't care about specific data appended.
// Just return any old ArrayBuffer for any requested segment.
netEngine = new shaka.test.FakeNetworkingEngine();
netEngine.setDefaultValue(new ArrayBuffer(0));
// For these tests, we also don't need FakeMediaSourceEngine to verify
// its input data.
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine({});
mediaSourceEngine.clear.and.returnValue(Promise.resolve());
mediaSourceEngine.bufferedAheadOf.and.returnValue(0);
mediaSourceEngine.bufferStart.and.returnValue(0);
mediaSourceEngine.setStreamProperties.and.returnValue(Promise.resolve());
mediaSourceEngine.remove.and.returnValue(Promise.resolve());
const bufferEnd = {audio: 0, video: 0, text: 0};
mediaSourceEngine.appendBuffer.and.callFake(
(type, data, reference) => {
bufferEnd[type] = reference && reference.endTime;
return Promise.resolve();
});
mediaSourceEngine.bufferEnd.and.callFake((type) => {
return bufferEnd[type];
});
mediaSourceEngine.bufferedAheadOf.and.callFake((type, start) => {
return Math.max(0, bufferEnd[type] - start);
});
mediaSourceEngine.isBuffered.and.callFake((type, time) => {
return time >= 0 && time < bufferEnd[type];
});
playing = false;
presentationTimeInSeconds = 0;
createStreamingEngine();
streamingEngine.switchVariant(initialVariant);
streamingEngine.switchTextStream(initialTextStream);
});
it('will not clear buffers if streams have not changed', async () => {
streamingEngine.start().catch(fail);
playing = true;
await Util.fakeEventLoop(1);
mediaSourceEngine.clear.calls.reset();
streamingEngine.switchVariant(sameAudioVariant, /* clearBuffer= */ true);
await Util.fakeEventLoop(1);
expect(mediaSourceEngine.clear).not.toHaveBeenCalledWith('audio');
expect(mediaSourceEngine.clear).toHaveBeenCalledWith('video');
expect(mediaSourceEngine.clear).not.toHaveBeenCalledWith('text');
mediaSourceEngine.clear.calls.reset();
streamingEngine.switchVariant(sameVideoVariant, /* clearBuffer= */ true);
await Util.fakeEventLoop(1);
expect(mediaSourceEngine.clear).toHaveBeenCalledWith('audio');
expect(mediaSourceEngine.clear).not.toHaveBeenCalledWith('video');
expect(mediaSourceEngine.clear).not.toHaveBeenCalledWith('text');
mediaSourceEngine.clear.calls.reset();
streamingEngine.switchTextStream(initialTextStream);
await Util.fakeEventLoop(1);
expect(mediaSourceEngine.clear).not.toHaveBeenCalled();
});
it('will not reset caption parser when text streams change', async () => {
await streamingEngine.start();
playing = true;
mediaSourceEngine.clear.calls.reset();
streamingEngine.switchTextStream(newTextStream);
await Util.fakeEventLoop(1);
expect(mediaSourceEngine.clear).toHaveBeenCalled();
expect(mediaSourceEngine.resetCaptionParser).not.toHaveBeenCalled();
});
// See https://github.com/shaka-project/shaka-player/issues/2956
it('works with fast variant switches during update', async () => {
// Delay the appendBuffer call until later so we are waiting for this to
// finish when we switch.
const p = new shaka.util.PublicPromise();
const old = mediaSourceEngine.appendBuffer;
// Replace the whole spy since we want to call the original.
mediaSourceEngine.appendBuffer =
jasmine.createSpy('appendBuffer')
.and.callFake(async (type, data, reference) => {
await p;
return Util.invokeSpy(old, type, data, reference);
});
await streamingEngine.start();
playing = true;
await Util.fakeEventLoop(1);
streamingEngine.switchVariant(differentVariant, /* clearBuffer= */ true);
streamingEngine.switchVariant(initialVariant, /* clearBuffer= */ true);
p.resolve();
await Util.fakeEventLoop(5);
expect(Util.invokeSpy(mediaSourceEngine.bufferEnd, 'video')).toBe(10);
});
it('works with fast text stream switches during update', async () => {
// Delay the appendBuffer call until later so we are waiting for this to
// finish when we switch.
const p = new shaka.util.PublicPromise();
const old = mediaSourceEngine.appendBuffer;
// Replace the whole spy since we want to call the original.
mediaSourceEngine.appendBuffer =
jasmine.createSpy('appendBuffer')
.and.callFake(async (type, data, reference) => {
await p;
return Util.invokeSpy(old, type, data, reference);
});
await streamingEngine.start();
playing = true;
await Util.fakeEventLoop(3);
netEngine.request.calls.reset();
streamingEngine.switchTextStream(newTextStream);
streamingEngine.switchTextStream(initialTextStream);
p.resolve();
await Util.fakeEventLoop(5);
const segmentType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
const segmentContext = {
type: shaka.net.NetworkingEngine.AdvancedRequestType.INIT_SEGMENT,
};
// Quickly switching back to text1, and text init segment should be
// fetched again.
netEngine.expectRequest('text-20-init', segmentType, segmentContext);
netEngine.expectNoRequest('text-21-init', segmentType, segmentContext);
// TODO: huh?
});
});
describe('handles seeks (VOD)', () => {
/** @type {!jasmine.Spy} */
let onTick;
beforeEach(() => {
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
onTick = jasmine.createSpy('onTick');
onTick.and.stub();
});
it('into buffered regions', async () => {
// Here we go!
streamingEngine.switchVariant(variant);
streamingEngine.switchTextStream(textStream);
await streamingEngine.start();
playing = true;
let seekComplete = false;
await runTest(() => {
if (presentationTimeInSeconds == 6 && !seekComplete) {
// Seek backwards to a buffered region in the first Period.
presentationTimeInSeconds -= 5;
streamingEngine.seeked();
seekComplete = true;
}
});
// Verify buffers.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [false, true],
video: [false, true],
text: [],
});
expect(mediaSourceEngine.segments).toEqual({
audio: [true, true, true, true],
video: [true, true, true, true],
text: [true, true, true, true],
});
});
it('into partially buffered regions in the same period', async () => {
// When seeking into a region within the same period, or changing
// resolution, and after the seek some states are buffered and some
// are unbuffered, StreamingEngine should only clear the unbuffered
// states.
mediaSourceEngine.endOfStream.and.callFake(() => {
// Should have the first Period entirely buffered.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [false, true],
video: [false, true],
text: [],
});
expect(mediaSourceEngine.segments).toEqual({
audio: [true, true, true, true],
video: [true, true, true, true],
text: [true, true, true, true],
});
// Fake the audio buffer being removed.
mediaSourceEngine.segments[ContentType.AUDIO] =
[true, true, false, false];
// Seek back into the second Period.
presentationTimeInSeconds -= 5;
expect(presentationTimeInSeconds).toBeGreaterThan(19);
streamingEngine.seeked();
mediaSourceEngine.endOfStream.and.returnValue(Promise.resolve());
return Promise.resolve();
});
// Here we go!
streamingEngine.switchVariant(variant);
streamingEngine.switchTextStream(textStream);
await streamingEngine.start();
playing = true;
await runTest();
// When seeking within the same period, clear the buffer of the
// unbuffered streams.
expect(mediaSourceEngine.clear).toHaveBeenCalledWith('audio');
expect(mediaSourceEngine.clear).not.toHaveBeenCalledWith('video');
expect(mediaSourceEngine.clear).not.toHaveBeenCalledWith('text');
// Verify buffers.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [false, true],
video: [false, true],
text: [],
});
expect(mediaSourceEngine.segments).toEqual({
audio: [false, false, true, true],
video: [true, true, true, true],
text: [true, true, true, true],
});
});
it('into buffered regions across Periods', async () => {
mediaSourceEngine.endOfStream.and.callFake(() => {
// Seek backwards to a buffered region in the first Period.
presentationTimeInSeconds -= 20;
expect(presentationTimeInSeconds).toBeLessThan(20);
streamingEngine.seeked();
// Verify that buffers are not cleared.
expect(mediaSourceEngine.clear).not.toHaveBeenCalled();
return Promise.resolve();
});
// Here we go!
streamingEngine.switchVariant(variant);
streamingEngine.switchTextStream(textStream);
await streamingEngine.start();
playing = true;
await runTest();
// Verify buffers.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [false, true],
video: [false, true],
text: [],
});
expect(mediaSourceEngine.segments).toEqual({
audio: [true, true, true, true],
video: [true, true, true, true],
text: [true, true, true, true],
});
});
it('into unbuffered regions', async () => {
onTick.and.callFake(() => {
if (presentationTimeInSeconds == 6) {
// Note that since the buffering goal is 5 seconds and each segment is
// 10 seconds long, the second segment of this Period will be required
// at 6 seconds.
// Verify that all buffers have been cleared.
expect(mediaSourceEngine.clear)
.toHaveBeenCalledWith(ContentType.AUDIO);
expect(mediaSourceEngine.clear)
.toHaveBeenCalledWith(ContentType.VIDEO);
expect(mediaSourceEngine.clear)
.toHaveBeenCalledWith(ContentType.TEXT);
// Verify buffers. The first segment is present because we start
// off-by-one after a seek.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [true, false],
video: [true, false],
text: [],
});
expect(mediaSourceEngine.segments).toEqual({
audio: [false, true, false, false],
video: [false, true, false, false],
text: [false, true, false, false],
});
}
});
// Here we go!
streamingEngine.switchVariant(variant);
streamingEngine.switchTextStream(textStream);
await streamingEngine.start();
playing = true;
// Seek forward to an unbuffered region in the first Period.
expect(presentationTimeInSeconds).toBe(0);
presentationTimeInSeconds += 15;
streamingEngine.seeked();
await runTest(Util.spyFunc(onTick));
// Verify buffers.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [false, true],
video: [false, true],
text: [],
});
expect(mediaSourceEngine.segments).toEqual({
audio: [false, true, true, true],
video: [false, true, true, true],
text: [false, true, true, true],
});
});
it('into unbuffered regions across Periods', async () => {
// Start from the second Period.
presentationTimeInSeconds = 25;
mediaSourceEngine.endOfStream.and.callFake(() => {
// Verify buffers.
expect(mediaSourceEngine.initSegments)