shaka-player
Version:
DASH/EME video player library
1,403 lines (1,191 loc) • 119 kB
JavaScript
/**
* @license
* Copyright 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
describe('StreamingEngine', function() {
const Util = shaka.test.Util;
const ContentType = shaka.util.ManifestParserUtils.ContentType;
const Uint8ArrayUtils = shaka.util.Uint8ArrayUtils;
// Dummy byte ranges and sizes for initialization and media segments.
// Create empty object first and initialize the fields through
// [] to allow field names to be expressions.
/**
* @type {!Object.<shaka.util.ManifestParserUtils.ContentType,
* !Array.<number>>}
*/
let initSegmentRanges = {};
initSegmentRanges[ContentType.AUDIO] = [100, 1000];
initSegmentRanges[ContentType.VIDEO] = [200, 2000];
/** @type {!Object.<shaka.util.ManifestParserUtils.ContentType, number>} */
let segmentSizes = {};
segmentSizes[ContentType.AUDIO] = 1000;
segmentSizes[ContentType.VIDEO] = 10000;
segmentSizes[ContentType.TEXT] = 500;
/** @type {!Object.<string, shaka.test.FakeMediaSourceEngine.SegmentData>} */
let segmentData;
/** @type {number} */
let presentationTimeInSeconds;
/** @type {boolean} */
let playing;
/** @type {!shaka.test.FakeMediaSourceEngine} */
let mediaSourceEngine;
let netEngine;
let timeline;
let audioStream1;
let videoStream1;
let variant1;
let textStream1;
let alternateVideoStream1;
let audioStream2;
let videoStream2;
let variant2;
let textStream2;
/** @type {shaka.extern.Manifest} */
let manifest;
/** @type {!jasmine.Spy} */
let onChooseStreams;
/** @type {!jasmine.Spy} */
let onCanSwitch;
/** @type {!jasmine.Spy} */
let onError;
/** @type {!jasmine.Spy} */
let onEvent;
/** @type {!jasmine.Spy} */
let onManifestUpdate;
/** @type {!jasmine.Spy} */
let onInitialStreamsSetup;
/** @type {!jasmine.Spy} */
let onStartupComplete;
/** @type {!jasmine.Spy} */
let onSegmentAppended;
/** @type {!jasmine.Spy} */
let getBandwidthEstimate;
/** @type {!shaka.media.StreamingEngine} */
let streamingEngine;
/**
* Runs the fake event loop.
* @param {function()=} callback An optional callback that is executed
* each time the clock ticks.
*/
function runTest(callback) {
function onTick(currentTime) {
if (callback) callback();
if (playing) {
presentationTimeInSeconds++;
}
}
// No test should require more than 60 seconds of simulated time.
Util.fakeEventLoop(60, onTick);
}
beforeAll(function() {
jasmine.clock().install();
jasmine.clock().mockDate();
// This mock is required for fakeEventLoop.
PromiseMock.install();
});
/** @param {boolean=} trickMode */
function setupVod(trickMode) {
// For VOD, we fake a presentation that has 2 Periods of equal duration
// (20 seconds), where each Period has 1 Variant and 1 text stream.
//
// There are 4 initialization segments: 1 audio and 1 video for the
// first Period, and 1 audio and 1 video for the second Period.
//
// There are 12 media segments: 2 audio, 2 video, and 2 text for the
// first Period, and 2 audio, 2 video, and 2 text for the second Period.
// All media segments are (by default) 10 seconds long.
// Create SegmentData map for FakeMediaSourceEngine.
let initSegmentSizeAudio = initSegmentRanges[ContentType.AUDIO][1] -
initSegmentRanges[ContentType.AUDIO][0] + 1;
let initSegmentSizeVideo = initSegmentRanges[ContentType.VIDEO][1] -
initSegmentRanges[ContentType.VIDEO][0] + 1;
function makeBuffer(size) { return new ArrayBuffer(size); }
segmentData = {
audio: {
initSegments: [
makeBuffer(initSegmentSizeAudio), makeBuffer(initSegmentSizeAudio),
],
segments: [
makeBuffer(segmentSizes[ContentType.AUDIO]),
makeBuffer(segmentSizes[ContentType.AUDIO]),
makeBuffer(segmentSizes[ContentType.AUDIO]),
makeBuffer(segmentSizes[ContentType.AUDIO]),
],
segmentStartTimes: [0, 10, 0, 10],
segmentPeriodTimes: [0, 0, 20, 20],
segmentDuration: 10,
},
video: {
initSegments: [
makeBuffer(initSegmentSizeVideo), makeBuffer(initSegmentSizeVideo),
],
segments: [
makeBuffer(segmentSizes[ContentType.VIDEO]),
makeBuffer(segmentSizes[ContentType.VIDEO]),
makeBuffer(segmentSizes[ContentType.VIDEO]),
makeBuffer(segmentSizes[ContentType.VIDEO]),
],
segmentStartTimes: [0, 10, 0, 10],
segmentPeriodTimes: [0, 0, 20, 20],
segmentDuration: 10,
},
text: {
initSegments: [],
segments: [
makeBuffer(segmentSizes[ContentType.TEXT]),
makeBuffer(segmentSizes[ContentType.TEXT]),
makeBuffer(segmentSizes[ContentType.TEXT]),
makeBuffer(segmentSizes[ContentType.TEXT]),
],
segmentStartTimes: [0, 10, 0, 10],
segmentPeriodTimes: [0, 0, 20, 20],
segmentDuration: 10,
},
};
if (trickMode) {
segmentData.trickvideo = {
initSegments: [
makeBuffer(initSegmentSizeVideo), makeBuffer(initSegmentSizeVideo),
],
segments: [
makeBuffer(segmentSizes[ContentType.VIDEO]),
makeBuffer(segmentSizes[ContentType.VIDEO]),
makeBuffer(segmentSizes[ContentType.VIDEO]),
makeBuffer(segmentSizes[ContentType.VIDEO]),
],
segmentStartTimes: [0, 10, 0, 10],
segmentPeriodTimes: [0, 0, 20, 20],
segmentDuration: 10,
};
}
presentationTimeInSeconds = 0;
playing = false;
setupNetworkingEngine(
2 /* segmentsInFirstPeriod */,
2 /* segmentsInSecondPeriod */);
timeline = shaka.test.StreamingEngineUtil.createFakePresentationTimeline(
0 /* segmentAvailabilityStart */,
40 /* segmentAvailabilityEnd */,
40 /* presentationDuration */,
10 /* maxSegmentDuration */,
false /* isLive */);
setupManifest(
0 /* firstPeriodStartTime */,
20 /* secondPeriodStartTime */,
40 /* presentationDuration */);
}
function setupLive() {
// For live, we fake a presentation that has 2 Periods of different
// durations (120 seconds and 20 seconds respectively), where each Period
// has 1 Variant and 1 text stream.
//
// There are 4 initialization segments: 1 audio and 1 video for the
// first Period, and 1 audio and 1 video for the second Period.
//
// There are 14 media segments: 12 audio, 12 video, and 12 text for the
// first Period, and 2 audio, 2 video, and 2 text for the second Period.
// All media segments are (by default) 10 seconds long.
//
// The segment availability window starts at t=100 (segment 11) and extends
// to t=120 (segment 13).
// Create SegmentData map for FakeMediaSourceEngine.
let initSegmentSizeAudio = initSegmentRanges[ContentType.AUDIO][1] -
initSegmentRanges[ContentType.AUDIO][0] + 1;
let initSegmentSizeVideo = initSegmentRanges[ContentType.VIDEO][1] -
initSegmentRanges[ContentType.VIDEO][0] + 1;
function makeBuffer(size) { return new ArrayBuffer(size); }
segmentData = {
audio: {
initSegments:
[makeBuffer(initSegmentSizeAudio),
makeBuffer(initSegmentSizeAudio)],
segments: [],
segmentStartTimes: [],
segmentPeriodTimes: [],
segmentDuration: 10,
},
video: {
initSegments:
[makeBuffer(initSegmentSizeVideo),
makeBuffer(initSegmentSizeVideo)],
segments: [],
segmentStartTimes: [],
segmentPeriodTimes: [],
segmentDuration: 10,
},
text: {
initSegments: [],
segments: [],
segmentStartTimes: [],
segmentPeriodTimes: [],
segmentDuration: 10,
},
};
let segmentsInFirstPeriod = 12;
for (let i = 0; i < segmentsInFirstPeriod; ++i) {
segmentData[ContentType.AUDIO].segments.push(
makeBuffer(segmentSizes[ContentType.AUDIO]));
segmentData[ContentType.VIDEO].segments.push(
makeBuffer(segmentSizes[ContentType.VIDEO]));
segmentData[ContentType.TEXT].segments.push(
makeBuffer(segmentSizes[ContentType.TEXT]));
segmentData[ContentType.AUDIO].segmentStartTimes.push(i * 10);
segmentData[ContentType.VIDEO].segmentStartTimes.push(i * 10);
segmentData[ContentType.TEXT].segmentStartTimes.push(i * 10);
segmentData[ContentType.AUDIO].segmentPeriodTimes.push(0);
segmentData[ContentType.VIDEO].segmentPeriodTimes.push(0);
segmentData[ContentType.TEXT].segmentPeriodTimes.push(0);
}
let segmentsInSecondPeriod = 2;
for (let i = 0; i < segmentsInSecondPeriod; ++i) {
segmentData[ContentType.AUDIO].segments.push(
makeBuffer(segmentSizes[ContentType.AUDIO]));
segmentData[ContentType.VIDEO].segments.push(
makeBuffer(segmentSizes[ContentType.VIDEO]));
segmentData[ContentType.TEXT].segments.push(
makeBuffer(segmentSizes[ContentType.TEXT]));
segmentData[ContentType.AUDIO].segmentStartTimes.push(i * 10);
segmentData[ContentType.VIDEO].segmentStartTimes.push(i * 10);
segmentData[ContentType.TEXT].segmentStartTimes.push(i * 10);
segmentData[ContentType.AUDIO].segmentPeriodTimes.push(
segmentsInFirstPeriod * 10);
segmentData[ContentType.VIDEO].segmentPeriodTimes.push(
segmentsInFirstPeriod * 10);
segmentData[ContentType.TEXT].segmentPeriodTimes.push(
segmentsInFirstPeriod * 10);
}
presentationTimeInSeconds = 110;
playing = false;
setupNetworkingEngine(
12 /* segmentsInFirstPeriod */,
2 /* segmentsInSecondPeriod */);
timeline = shaka.test.StreamingEngineUtil.createFakePresentationTimeline(
100 /* segmentAvailabilityStart */,
140 /* segmentAvailabilityEnd */,
140 /* presentationDuration */,
10 /* maxSegmentDuration */,
true /* isLive */);
setupManifest(
0 /* firstPeriodStartTime */,
120 /* secondPeriodStartTime */,
140 /* presentationDuration */);
}
function setupNetworkingEngine(
segmentsInFirstPeriod, segmentsInSecondPeriod) {
// Create the fake NetworkingEngine. Note: the StreamingEngine should never
// request a segment that does not exist.
netEngine = shaka.test.StreamingEngineUtil.createFakeNetworkingEngine(
// Init segment generator:
function(type, periodNumber) {
expect((periodNumber == 1) || (periodNumber == 2));
return segmentData[type].initSegments[periodNumber - 1];
},
// Media segment generator:
function(type, periodNumber, position) {
expect(position).toBeGreaterThan(0);
expect((periodNumber == 1 && position <= segmentsInFirstPeriod) ||
(periodNumber == 2 && position <= segmentsInSecondPeriod));
let i = (segmentsInFirstPeriod * (periodNumber - 1)) + (position - 1);
return segmentData[type].segments[i];
});
}
function setupManifest(
firstPeriodStartTime, secondPeriodStartTime, presentationDuration) {
let segmentDurations = {
audio: segmentData[ContentType.AUDIO].segmentDuration,
video: segmentData[ContentType.VIDEO].segmentDuration,
text: segmentData[ContentType.TEXT].segmentDuration,
};
if (segmentData['trickvideo']) {
segmentDurations['trickvideo'] =
segmentData['trickvideo'].segmentDuration;
}
manifest = shaka.test.StreamingEngineUtil.createManifest(
[firstPeriodStartTime, secondPeriodStartTime], presentationDuration,
segmentDurations);
manifest.presentationTimeline =
/** @type {!shaka.media.PresentationTimeline} */ (timeline);
manifest.minBufferTime = 2;
// Create InitSegmentReferences.
manifest.periods[0].variants[0].audio.initSegmentReference =
new shaka.media.InitSegmentReference(
function() { return ['1_audio_init']; },
initSegmentRanges[ContentType.AUDIO][0],
initSegmentRanges[ContentType.AUDIO][1]);
manifest.periods[0].variants[0].video.initSegmentReference =
new shaka.media.InitSegmentReference(
function() { return ['1_video_init']; },
initSegmentRanges[ContentType.VIDEO][0],
initSegmentRanges[ContentType.VIDEO][1]);
if (manifest.periods[0].variants[0].video.trickModeVideo) {
manifest.periods[0].variants[0].video.trickModeVideo
.initSegmentReference = new shaka.media.InitSegmentReference(
function() { return ['1_trickvideo_init']; },
initSegmentRanges[ContentType.VIDEO][0],
initSegmentRanges[ContentType.VIDEO][1]);
}
manifest.periods[1].variants[0].audio.initSegmentReference =
new shaka.media.InitSegmentReference(
function() { return ['2_audio_init']; },
initSegmentRanges[ContentType.AUDIO][0],
initSegmentRanges[ContentType.AUDIO][1]);
manifest.periods[1].variants[0].video.initSegmentReference =
new shaka.media.InitSegmentReference(
function() { return ['2_video_init']; },
initSegmentRanges[ContentType.VIDEO][0],
initSegmentRanges[ContentType.VIDEO][1]);
if (manifest.periods[1].variants[0].video.trickModeVideo) {
manifest.periods[1].variants[0].video.trickModeVideo
.initSegmentReference = new shaka.media.InitSegmentReference(
function() { return ['2_trickvideo_init']; },
initSegmentRanges[ContentType.VIDEO][0],
initSegmentRanges[ContentType.VIDEO][1]);
}
audioStream1 = manifest.periods[0].variants[0].audio;
videoStream1 = manifest.periods[0].variants[0].video;
variant1 = manifest.periods[0].variants[0];
textStream1 = manifest.periods[0].textStreams[0];
// This Stream is only used to verify that StreamingEngine can setup
// Streams correctly. It does not have init or media segments.
alternateVideoStream1 =
shaka.test.StreamingEngineUtil.createMockVideoStream(8);
alternateVideoStream1.createSegmentIndex.and.returnValue(Promise.resolve());
alternateVideoStream1.findSegmentPosition.and.returnValue(null);
alternateVideoStream1.getSegmentReference.and.returnValue(null);
let variant = {
audio: null,
video: /** @type {shaka.extern.Stream} */ (alternateVideoStream1),
id: 0,
language: 'und',
primary: false,
bandwidth: 0,
drmInfos: [],
allowedByApplication: true,
allowedByKeySystem: true,
};
manifest.periods[0].variants.push(variant);
audioStream2 = manifest.periods[1].variants[0].audio;
videoStream2 = manifest.periods[1].variants[0].video;
variant2 = manifest.periods[1].variants[0];
textStream2 = manifest.periods[1].textStreams[0];
}
/**
* Creates the StreamingEngine.
**
* @param {shaka.extern.StreamingConfiguration=} config Optional
* configuration object which overrides the default one.
*/
function createStreamingEngine(config) {
onChooseStreams = jasmine.createSpy('onChooseStreams');
onCanSwitch = jasmine.createSpy('onCanSwitch');
onInitialStreamsSetup = jasmine.createSpy('onInitialStreamsSetup');
onStartupComplete = jasmine.createSpy('onStartupComplete');
onError = jasmine.createSpy('onError');
onError.and.callFake(fail);
onEvent = jasmine.createSpy('onEvent');
onManifestUpdate = jasmine.createSpy('onManifestUpdate');
onSegmentAppended = jasmine.createSpy('onSegmentAppended');
getBandwidthEstimate = jasmine.createSpy('getBandwidthEstimate');
getBandwidthEstimate.and.returnValue(1e3);
if (!config) {
config = shaka.util.PlayerConfiguration.createDefault().streaming;
config.rebufferingGoal = 2;
config.bufferingGoal = 5;
config.bufferBehind = Infinity;
}
goog.asserts.assert(
presentationTimeInSeconds != undefined,
'All tests should have defined an initial presentation time by now!');
const playerInterface = {
getPresentationTime: () => presentationTimeInSeconds,
getBandwidthEstimate: Util.spyFunc(getBandwidthEstimate),
mediaSourceEngine: mediaSourceEngine,
netEngine: /** @type {!shaka.net.NetworkingEngine} */(netEngine),
onChooseStreams: Util.spyFunc(onChooseStreams),
onCanSwitch: Util.spyFunc(onCanSwitch),
onError: Util.spyFunc(onError),
onEvent: Util.spyFunc(onEvent),
onManifestUpdate: Util.spyFunc(onManifestUpdate),
onSegmentAppended: Util.spyFunc(onSegmentAppended),
onInitialStreamsSetup: Util.spyFunc(onInitialStreamsSetup),
onStartupComplete: Util.spyFunc(onStartupComplete),
};
streamingEngine = new shaka.media.StreamingEngine(
/** @type {shaka.extern.Manifest} */(manifest), playerInterface);
streamingEngine.configure(config);
}
afterEach(function(done) {
streamingEngine.destroy().catch(fail).then(done);
PromiseMock.flush();
});
afterAll(function() {
PromiseMock.uninstall();
jasmine.clock().uninstall();
});
// This test initializes the StreamingEngine (SE) and allows it to play
// through both Periods.
//
// After calling start() the following should occur:
// 1. SE should immediately call onChooseStreams() with the first Period.
// 2. SE should setup each of the initial Streams and then call
// onInitialStreamsSetup().
// 3. SE should start appending the initial Streams' segments and in
// parallel setup the remaining Streams within the Manifest.
// - SE should call onStartupComplete() after it has buffered at least 1
// segment of each type of content.
// - SE should call onCanSwitch() with the first Period after it has
// setup the remaining Streams within the first Period.
// 4. SE should call onChooseStreams() with the second Period after it has
// both segments within the first Period.
// - We must return the Streams within the second Period.
// 5. SE should call onCanSwitch() with the second Period shortly after
// step 4.
// 6. SE should call MediaSourceEngine.endOfStream() after it has appended
// both segments within the second Period. At this point the playhead
// should not be at the end of the presentation, but the test will be
// effectively over since SE will have nothing else to do.
it('initializes and plays VOD', function() {
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
onStartupComplete.and.callFake(function() {
// Verify buffers.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [true, false],
video: [true, false],
text: [],
});
expect(mediaSourceEngine.segments).toEqual({
audio: [true, false, false, false],
video: [true, false, false, false],
text: [true, false, false, false],
});
setupFakeGetTime(0);
});
expect(mediaSourceEngine.reinitText).not.toHaveBeenCalled();
onChooseStreams.and.callFake(function(period) {
expect(period).toBe(manifest.periods[0]);
onCanSwitch.and.callFake(function() {
expect(alternateVideoStream1.createSegmentIndex).toHaveBeenCalled();
expect(mediaSourceEngine.reinitText).not.toHaveBeenCalled();
mediaSourceEngine.reinitText.calls.reset();
onCanSwitch.and.throwError(new Error());
});
// For second Period.
onChooseStreams.and.callFake(function(period) {
expect(period).toBe(manifest.periods[1]);
// Verify buffers.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [true, false],
video: [true, false],
text: [],
});
expect(mediaSourceEngine.segments).toEqual({
audio: [true, true, false, false],
video: [true, true, false, false],
text: [true, true, false, false],
});
verifyNetworkingEngineRequestCalls(1);
onCanSwitch.and.callFake(function() {
expect(audioStream2.createSegmentIndex).toHaveBeenCalled();
expect(videoStream2.createSegmentIndex).toHaveBeenCalled();
expect(textStream2.createSegmentIndex).toHaveBeenCalled();
expect(mediaSourceEngine.reinitText).toHaveBeenCalled();
mediaSourceEngine.reinitText.calls.reset();
onCanSwitch.and.throwError(new Error());
});
// Switch to the second Period.
return defaultOnChooseStreams(period);
});
// Init the first Period.
return defaultOnChooseStreams(period);
});
onInitialStreamsSetup.and.callFake(function() {
const expectedObject = new Map();
expectedObject.set(ContentType.AUDIO, audioStream1);
expectedObject.set(ContentType.VIDEO, videoStream1);
expectedObject.set(ContentType.TEXT, textStream1);
expect(mediaSourceEngine.init)
.toHaveBeenCalledWith(expectedObject, false);
expect(mediaSourceEngine.init.calls.count()).toBe(1);
mediaSourceEngine.init.calls.reset();
expect(mediaSourceEngine.setDuration).toHaveBeenCalledWith(40);
expect(mediaSourceEngine.setDuration.calls.count()).toBe(1);
mediaSourceEngine.setDuration.calls.reset();
expect(audioStream1.createSegmentIndex).toHaveBeenCalled();
expect(videoStream1.createSegmentIndex).toHaveBeenCalled();
expect(textStream1.createSegmentIndex).toHaveBeenCalled();
expect(alternateVideoStream1.createSegmentIndex).not.toHaveBeenCalled();
});
// Here we go!
streamingEngine.start();
runTest();
expect(mediaSourceEngine.endOfStream).toHaveBeenCalled();
// Verify buffers.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [false, true],
video: [false, true],
text: [],
});
expect(mediaSourceEngine.segments).toEqual({
audio: [true, true, true, true],
video: [true, true, true, true],
text: [true, true, true, true],
});
verifyNetworkingEngineRequestCalls(2);
});
describe('loadNewTextStream', function() {
it('clears MediaSourceEngine', function() {
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
onStartupComplete.and.callFake(function() { setupFakeGetTime(0); });
onChooseStreams.and.callFake(onChooseStreamsWithUnloadedText);
streamingEngine.start();
runTest(function() {
if (presentationTimeInSeconds == 20) {
mediaSourceEngine.clear.calls.reset();
mediaSourceEngine.init.calls.reset();
streamingEngine.loadNewTextStream(textStream2);
PromiseMock.flush();
expect(mediaSourceEngine.clear).toHaveBeenCalledWith('text');
const expectedObject = new Map();
expectedObject.set(ContentType.TEXT, jasmine.any(Object));
expect(mediaSourceEngine.init).toHaveBeenCalledWith(
expectedObject, false);
}
});
});
});
describe('unloadTextStream', function() {
it('doesn\'t send requests for text after calling unload', function() {
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
onStartupComplete.and.callFake(function() { setupFakeGetTime(0); });
onChooseStreams.and.callFake(onChooseStreamsWithUnloadedText);
streamingEngine.start();
const segmentType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
// Verify that after unloading text stream, no network request for text
// is sent.
runTest(function() {
if (presentationTimeInSeconds == 1) {
netEngine.expectRequest('1_text_1', segmentType);
netEngine.request.calls.reset();
streamingEngine.unloadTextStream();
} else if (presentationTimeInSeconds == 35) {
netEngine.expectNoRequest('1_text_1', segmentType);
netEngine.expectNoRequest('1_text_2', segmentType);
netEngine.expectNoRequest('2_text_1', segmentType);
netEngine.expectNoRequest('2_text_2', segmentType);
}
});
});
});
it('initializes and plays live', function() {
setupLive();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
presentationTimeInSeconds = 100;
onStartupComplete.and.callFake(function() {
setupFakeGetTime(100);
});
onChooseStreams.and.callFake(defaultOnChooseStreams.bind(null));
// Here we go!
streamingEngine.start();
runTest(slideSegmentAvailabilityWindow);
expect(mediaSourceEngine.endOfStream).toHaveBeenCalled();
// Verify buffers.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [false, true],
video: [false, true],
text: [],
});
// Since we started playback from segment 11, segments 10 through 14
// should be buffered.
for (let i = 0; i <= 8; ++i) {
expect(mediaSourceEngine.segments[ContentType.AUDIO][i]).toBeFalsy();
expect(mediaSourceEngine.segments[ContentType.VIDEO][i]).toBeFalsy();
expect(mediaSourceEngine.segments[ContentType.TEXT][i]).toBeFalsy();
}
for (let i = 9; i <= 13; ++i) {
expect(mediaSourceEngine.segments[ContentType.AUDIO][i]).toBeTruthy();
expect(mediaSourceEngine.segments[ContentType.VIDEO][i]).toBeTruthy();
expect(mediaSourceEngine.segments[ContentType.TEXT][i]).toBeTruthy();
}
});
// Start the playhead in the first Period but pass start() Streams from the
// second Period.
it('plays from 1st Period when passed Streams from 2nd', function() {
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
onStartupComplete.and.callFake(function() {
setupFakeGetTime(0);
});
onChooseStreams.and.callFake(function(period) {
expect(period).toBe(manifest.periods[0]);
// Start with Streams from the second Period even though the playhead is
// in the first Period. onChooseStreams() should be called again for the
// first Period and then eventually for the second Period.
onChooseStreams.and.callFake(function(period) {
expect(period).toBe(manifest.periods[0]);
onChooseStreams.and.callFake(function(period) {
expect(period).toBe(manifest.periods[1]);
return defaultOnChooseStreams(period);
});
return defaultOnChooseStreams(period);
});
return {variant: variant2, text: textStream2};
});
streamingEngine.start();
runTest();
// Verify buffers.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [false, true],
video: [false, true],
text: [],
});
expect(mediaSourceEngine.segments).toEqual({
audio: [true, true, true, true],
video: [true, true, true, true],
text: [true, true, true, true],
});
});
// Start the playhead in the second Period but pass start() Streams from the
// first Period.
it('plays from 2nd Period when passed Streams from 1st', function() {
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
presentationTimeInSeconds = 20;
onStartupComplete.and.callFake(function() {
setupFakeGetTime(20);
});
onChooseStreams.and.callFake(function(period) {
expect(period).toBe(manifest.periods[1]);
// Start with Streams from the first Period even though the playhead is
// in the second Period. onChooseStreams() should be called again for the
// second Period.
onChooseStreams.and.callFake(function(period) {
expect(period).toBe(manifest.periods[1]);
onChooseStreams.and.throwError(new Error());
return defaultOnChooseStreams(period);
});
return {variant: variant1, text: textStream1};
});
streamingEngine.start();
runTest();
// Verify buffers.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [false, true],
video: [false, true],
text: [],
});
expect(mediaSourceEngine.segments).toEqual({
audio: [false, false, true, true],
video: [false, false, true, true],
text: [false, false, true, true],
});
});
it('plays when a small gap is present at the beginning', function() {
let drift = 0.050; // 50 ms
setupVod();
mediaSourceEngine =
new shaka.test.FakeMediaSourceEngine(segmentData, drift);
createStreamingEngine();
// Here we go!
onChooseStreams.and.callFake(defaultOnChooseStreams.bind(null));
streamingEngine.start();
runTest();
expect(onStartupComplete).toHaveBeenCalled();
});
it('plays when 1st Period doesn\'t have text streams', function() {
setupVod();
manifest.periods[0].textStreams = [];
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
onStartupComplete.and.callFake(setupFakeGetTime.bind(null, 0));
onChooseStreams.and.callFake(function(period) {
let chosen = defaultOnChooseStreams(period);
if (period == manifest.periods[0]) {
chosen.text = null;
}
return chosen;
});
// Here we go!
streamingEngine.start();
runTest();
expect(mediaSourceEngine.segments).toEqual({
audio: [true, true, true, true],
video: [true, true, true, true],
text: [false, false, true, true],
});
});
it('doesn\'t get stuck when 2nd Period isn\'t available yet', function() {
// See: https://github.com/google/shaka-player/pull/839
setupVod();
manifest.periods[0].textStreams = [];
// For the first update, indicate the segment isn't available. This should
// not cause us to fallback to the Playhead time to determine which segment
// to start streaming.
let oldGet = textStream2.getSegmentReference;
textStream2.getSegmentReference = function(idx) {
if (idx == 1) {
textStream2.getSegmentReference = oldGet;
return null;
}
return oldGet(idx);
};
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
onStartupComplete.and.callFake(setupFakeGetTime.bind(null, 0));
onChooseStreams.and.callFake(function(period) {
let chosen = defaultOnChooseStreams(period);
if (period == manifest.periods[0]) {
chosen.text = null;
}
return chosen;
});
// Here we go!
streamingEngine.start();
runTest();
expect(mediaSourceEngine.segments).toEqual({
audio: [true, true, true, true],
video: [true, true, true, true],
text: [false, false, true, true],
});
});
it('only reinitializes text when switching streams', function() {
// See: https://github.com/google/shaka-player/issues/910
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
onStartupComplete.and.callFake(setupFakeGetTime.bind(null, 0));
onChooseStreams.and.callFake(defaultOnChooseStreams);
// When we can switch in the second Period, switch to the playing stream.
onCanSwitch.and.callFake(function() {
onCanSwitch.and.callFake(function() {
expect(streamingEngine.getBufferingText()).toBe(textStream2);
mediaSourceEngine.reinitText.calls.reset();
streamingEngine.switchTextStream(textStream2);
});
});
// Here we go!
streamingEngine.start();
runTest();
expect(mediaSourceEngine.reinitText).not.toHaveBeenCalled();
});
it('plays when 2nd Period doesn\'t have text streams', function() {
setupVod();
manifest.periods[1].textStreams = [];
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
onStartupComplete.and.callFake(setupFakeGetTime.bind(null, 0));
onChooseStreams.and.callFake(function(period) {
let chosen = defaultOnChooseStreams(period);
if (period == manifest.periods[1]) {
chosen.text = null;
}
return chosen;
});
// Here we go!
streamingEngine.start();
runTest();
expect(mediaSourceEngine.segments).toEqual({
audio: [true, true, true, true],
video: [true, true, true, true],
text: [true, true, false, false],
});
});
it('updates the timeline duration to match media duration', function() {
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
onStartupComplete.and.callFake(setupFakeGetTime.bind(null, 0));
onChooseStreams.and.callFake(defaultOnChooseStreams);
mediaSourceEngine.endOfStream.and.callFake(function() {
expect(mediaSourceEngine.setDuration).toHaveBeenCalledWith(40);
expect(mediaSourceEngine.setDuration).toHaveBeenCalledTimes(1);
mediaSourceEngine.setDuration.calls.reset();
// Simulate the media ending BEFORE the expected (manifest) duration.
mediaSourceEngine.getDuration.and.returnValue(35);
return Promise.resolve();
});
// Here we go!
streamingEngine.start();
runTest();
expect(mediaSourceEngine.endOfStream).toHaveBeenCalled();
expect(timeline.setDuration).toHaveBeenCalledWith(35);
});
// https://github.com/google/shaka-player/issues/979
it('does not expand the timeline duration', function() {
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
onStartupComplete.and.callFake(setupFakeGetTime.bind(null, 0));
onChooseStreams.and.callFake(defaultOnChooseStreams);
mediaSourceEngine.endOfStream.and.callFake(function() {
expect(mediaSourceEngine.setDuration).toHaveBeenCalledWith(40);
expect(mediaSourceEngine.setDuration).toHaveBeenCalledTimes(1);
mediaSourceEngine.setDuration.calls.reset();
// Simulate the media ending AFTER the expected (manifest) duration.
mediaSourceEngine.getDuration.and.returnValue(41);
return Promise.resolve();
});
// Here we go!
streamingEngine.start();
runTest();
expect(mediaSourceEngine.endOfStream).toHaveBeenCalled();
expect(timeline.setDuration).not.toHaveBeenCalled();
});
// https://github.com/google/shaka-player/issues/1967
it('does not change duration when 0', () => {
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
onStartupComplete.and.callFake(() => setupFakeGetTime(0));
onChooseStreams.and.callFake(defaultOnChooseStreams);
// The duration can spuriously be set to 0, so we should ignore this and not
// update the duration.
mediaSourceEngine.getDuration.and.returnValue(0);
// Here we go!
streamingEngine.start();
runTest();
expect(mediaSourceEngine.endOfStream).toHaveBeenCalled();
expect(timeline.setDuration).not.toHaveBeenCalled();
});
it('applies fudge factor for appendWindowStart', function() {
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
onStartupComplete.and.callFake(setupFakeGetTime.bind(null, 0));
onChooseStreams.and.callFake(defaultOnChooseStreams);
// Here we go!
streamingEngine.start();
runTest();
// The second Period starts at 20, so we should set the appendWindowStart to
// 20, but reduced by a small fudge factor.
let lt20 = {
asymmetricMatch: function(val) {
return val >= 19.9 && val < 20;
},
};
expect(mediaSourceEngine.setStreamProperties)
.toHaveBeenCalledWith('video', 20, lt20, 40);
});
it('does not buffer one media type ahead of another', function() {
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
const config = shaka.util.PlayerConfiguration.createDefault().streaming;
config.bufferingGoal = 60;
config.failureCallback = () => streamingEngine.retry();
createStreamingEngine(config);
// Make requests for different types take different amounts of time.
// This would let some media types buffer faster than others if unchecked.
netEngine.delays.text = 0.1;
netEngine.delays.audio = 1.0;
netEngine.delays.video = 10.0;
mediaSourceEngine.appendBuffer.and.callFake((type, data, start, end) => {
// Call to the underlying implementation.
const p = mediaSourceEngine.appendBufferImpl(type, data, start, end);
// Validate that no one media type got ahead of any other.
let minBuffered = Infinity;
let maxBuffered = 0;
['audio', 'video', 'text'].forEach((t) => {
const buffered = mediaSourceEngine.bufferedAheadOfImpl(t, 0);
minBuffered = Math.min(minBuffered, buffered);
maxBuffered = Math.max(maxBuffered, buffered);
});
// Sanity check.
expect(maxBuffered).not.toBeLessThan(minBuffered);
// Proof that we didn't get too far ahead (10s == 1 segment).
expect(maxBuffered - minBuffered).not.toBeGreaterThan(10);
return p;
});
// Here we go!
onStartupComplete.and.callFake(setupFakeGetTime.bind(null, 0));
onChooseStreams.and.callFake(defaultOnChooseStreams);
streamingEngine.start();
runTest();
// Make sure appendBuffer was called, so that we know that we executed the
// checks in our fake above.
expect(mediaSourceEngine.appendBuffer).toHaveBeenCalled();
});
describe('switchVariant/switchTextStream', function() {
let initialVariant;
let sameAudioVariant;
let sameVideoVariant;
let initialTextStream;
beforeEach(function() {
// Set up a manifest with multiple variants and a text stream.
manifest = new shaka.test.ManifestGenerator()
.addPeriod(0)
.addVariant(0)
.addAudio(10).useSegmentTemplate('audio-10-%d.mp4', 10)
.addVideo(11).useSegmentTemplate('video-11-%d.mp4', 10)
.addVariant(1)
.addExistingStream(10) // audio
.addVideo(12).useSegmentTemplate('video-12-%d.mp4', 10)
.addVariant(2)
.addAudio(13).useSegmentTemplate('audio-13-%d.mp4', 10)
.addExistingStream(12) // video
.addTextStream(20).useSegmentTemplate('text-20-%d.mp4', 10)
.build();
initialVariant = manifest.periods[0].variants[0];
sameAudioVariant = manifest.periods[0].variants[1];
sameVideoVariant = manifest.periods[0].variants[2];
initialTextStream = manifest.periods[0].textStreams[0];
// For these tests, we don't care about specific data appended.
// Just return any old ArrayBuffer for any requested segment.
netEngine = {
request: function(requestType, request) {
let buffer = new ArrayBuffer(0);
let response = {uri: request.uris[0], data: buffer, headers: {}};
return shaka.util.AbortableOperation.completed(response);
},
};
// For these tests, we also don't need FakeMediaSourceEngine to verify
// its input data.
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine({});
mediaSourceEngine.clear.and.returnValue(Promise.resolve());
mediaSourceEngine.bufferedAheadOf.and.returnValue(0);
mediaSourceEngine.bufferStart.and.returnValue(0);
mediaSourceEngine.setStreamProperties.and.returnValue(Promise.resolve());
mediaSourceEngine.remove.and.returnValue(Promise.resolve());
let bufferEnd = {audio: 0, video: 0, text: 0};
mediaSourceEngine.appendBuffer.and.callFake(
function(type, data, start, end) {
bufferEnd[type] = end;
return Promise.resolve();
});
mediaSourceEngine.bufferEnd.and.callFake(function(type) {
return bufferEnd[type];
});
mediaSourceEngine.bufferedAheadOf.and.callFake(function(type, start) {
return Math.max(0, bufferEnd[type] - start);
});
mediaSourceEngine.isBuffered.and.callFake(function(type, time) {
return time >= 0 && time < bufferEnd[type];
});
playing = false;
presentationTimeInSeconds = 0;
createStreamingEngine();
onStartupComplete.and.callFake(setupFakeGetTime.bind(null, 0));
onChooseStreams.and.callFake(function() {
return {variant: initialVariant, text: initialTextStream};
});
});
it('will not clear buffers if streams have not changed', function() {
onCanSwitch.and.callFake(function() {
mediaSourceEngine.clear.calls.reset();
streamingEngine.switchVariant(
sameAudioVariant, /* clearBuffer */ true, /* safeMargin */ 0);
Util.fakeEventLoop(1);
expect(mediaSourceEngine.clear).not.toHaveBeenCalledWith('audio');
expect(mediaSourceEngine.clear).toHaveBeenCalledWith('video');
expect(mediaSourceEngine.clear).not.toHaveBeenCalledWith('text');
mediaSourceEngine.clear.calls.reset();
streamingEngine.switchVariant(
sameVideoVariant, /* clearBuffer */ true, /* safeMargin */ 0);
Util.fakeEventLoop(1);
expect(mediaSourceEngine.clear).toHaveBeenCalledWith('audio');
expect(mediaSourceEngine.clear).not.toHaveBeenCalledWith('video');
expect(mediaSourceEngine.clear).not.toHaveBeenCalledWith('text');
mediaSourceEngine.clear.calls.reset();
streamingEngine.switchTextStream(initialTextStream);
Util.fakeEventLoop(1);
expect(mediaSourceEngine.clear).not.toHaveBeenCalled();
});
streamingEngine.start().catch(fail);
Util.fakeEventLoop(1);
expect(onCanSwitch).toHaveBeenCalled();
});
});
describe('handles seeks (VOD)', function() {
/** @type {!jasmine.Spy} */
let onTick;
beforeEach(function() {
setupVod();
mediaSourceEngine = new shaka.test.FakeMediaSourceEngine(segmentData);
createStreamingEngine();
onStartupComplete.and.callFake(setupFakeGetTime.bind(null, 0));
onTick = jasmine.createSpy('onTick');
onTick.and.stub();
});
it('into buffered regions', function() {
onChooseStreams.and.callFake(function(period) {
expect(period).toBe(manifest.periods[0]);
onChooseStreams.and.callFake(function(period) {
expect(period).toBe(manifest.periods[1]);
// Seek backwards to a buffered region in the first Period. Note that
// since the buffering goal is 5 seconds and each segment is 10
// seconds long, the second segment of this Period will be required at
// 6 seconds. Then it will load the next Period, but not require the
// new segments.
expect(presentationTimeInSeconds).toBe(6);
presentationTimeInSeconds -= 5;
streamingEngine.seeked();
// Although we're seeking backwards we still have to return some
// Streams from the second Period here.
return defaultOnChooseStreams(period);
});
// Init the first Period.
return defaultOnChooseStreams(period);
});
// Here we go!
streamingEngine.start();
runTest();
// Verify buffers.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [false, true],
video: [false, true],
text: [],
});
expect(mediaSourceEngine.segments).toEqual({
audio: [true, true, true, true],
video: [true, true, true, true],
text: [true, true, true, true],
});
});
it('into partially buffered regions in the same period', function() {
// When seeking into a region within the same period, or changing
// resolution, and after the seek some states are buffered and some
// are unbuffered, StreamingEngine should only clear the unbuffered
// states.
onChooseStreams.and.callFake(function(period) {
expect(period).toBe(manifest.periods[0]);
onChooseStreams.and.callFake(function(period) {
expect(period).toBe(manifest.periods[1]);
mediaSourceEngine.endOfStream.and.callFake(function() {
// Should have the first Period entirely buffered.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [false, true],
video: [false, true],
text: [],
});
expect(mediaSourceEngine.segments).toEqual({
audio: [true, true, true, true],
video: [true, true, true, true],
text: [true, true, true, true],
});
// Fake the audio buffer being removed.
mediaSourceEngine.segments[ContentType.AUDIO] =
[true, true, false, false];
// Seek back into the second Period.
expect(presentationTimeInSeconds).toBe(26);
presentationTimeInSeconds -= 5;
streamingEngine.seeked();
mediaSourceEngine.endOfStream.and.returnValue(Promise.resolve());
return Promise.resolve();
});
return defaultOnChooseStreams(period);
});
return defaultOnChooseStreams(period);
});
onStartupComplete.and.callFake(setupFakeGetTime.bind(null, 0));
// Here we go!
streamingEngine.start();
runTest();
// When seeking within the same period, clear the buffer of the
// unbuffered streams.
expect(mediaSourceEngine.clear).toHaveBeenCalledWith('audio');
expect(mediaSourceEngine.clear).not.toHaveBeenCalledWith('video');
expect(mediaSourceEngine.clear).not.toHaveBeenCalledWith('text');
// Verify buffers.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [false, true],
video: [false, true],
text: [],
});
expect(mediaSourceEngine.segments).toEqual({
audio: [false, false, true, true],
video: [true, true, true, true],
text: [true, true, true, true],
});
});
it('into buffered regions across Periods', function() {
onChooseStreams.and.callFake(function(period) {
expect(period).toBe(manifest.periods[0]);
onChooseStreams.and.callFake(function(period) {
expect(period).toBe(manifest.periods[1]);
onChooseStreams.and.throwError(new Error());
// Switch to the second Period.
return defaultOnChooseStreams(period);
});
mediaSourceEngine.endOfStream.and.callFake(function() {
// Seek backwards to a buffered region in the first Period. Note
// that since the buffering goal is 5 seconds and each segment is
// 10 seconds long, the last segment should be required at 26 seconds.
// Then endOfStream() should be called.
expect(presentationTimeInSeconds).toBe(26);
presentationTimeInSeconds -= 20;
streamingEngine.seeked();
// Verify that buffers are not cleared.
expect(mediaSourceEngine.clear).not.toHaveBeenCalled();
return Promise.resolve();
});
// Init the first Period.
return defaultOnChooseStreams(period);
});
// Here we go!
streamingEngine.start();
runTest();
// Verify buffers.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [false, true],
video: [false, true],
text: [],
});
expect(mediaSourceEngine.segments).toEqual({
audio: [true, true, true, true],
video: [true, true, true, true],
text: [true, true, true, true],
});
});
it('into unbuffered regions', function() {
onChooseStreams.and.callFake(function(period) {
expect(period).toBe(manifest.periods[0]);
onChooseStreams.and.throwError(new Error());
// Init the first Period.
return defaultOnChooseStreams(period);
});
onStartupComplete.and.callFake(function() {
setupFakeGetTime(0);
// Seek forward to an unbuffered region in the first Period.
expect(presentationTimeInSeconds).toBe(0);
presentationTimeInSeconds += 15;
streamingEngine.seeked();
onTick.and.callFake(function() {
// Verify that all buffers have been cleared.
expect(mediaSourceEngine.clear)
.toHaveBeenCalledWith(ContentType.AUDIO);
expect(mediaSourceEngine.clear)
.toHaveBeenCalledWith(ContentType.VIDEO);
expect(mediaSourceEngine.clear)
.toHaveBeenCalledWith(ContentType.TEXT);
onTick.and.stub();
});
onChooseStreams.and.callFake(function(period) {
expect(period).toBe(manifest.periods[1]);
// Verify buffers.
expect(mediaSourceEngine.initSegments).toEqual({
audio: [true, false],
video: [true, false],
text: [],
});
expect(mediaSourceEngine.segments).toEqual({
audio: [true, true, false, false],
video: [true, true, false, false],