videocontext
Version:
A WebGL & HTML5 graph based video composition library
1,144 lines (1,027 loc) • 250 kB
JavaScript
var VideoContext =
/******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ exports: {},
/******/ id: moduleId,
/******/ loaded: false
/******/ };
/******/
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/
/******/ // Flag the module as loaded
/******/ module.loaded = true;
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/
/******/ // Load entry module and return exports
/******/ return __webpack_require__(0);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */
/***/ (function(module, exports, __webpack_require__) {
//Matthew Shotton, R&D User Experience,© BBC 2015
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
var _SourceNodesVideonodeJs = __webpack_require__(1);
var _SourceNodesVideonodeJs2 = _interopRequireDefault(_SourceNodesVideonodeJs);
var _SourceNodesImagenodeJs = __webpack_require__(26);
var _SourceNodesImagenodeJs2 = _interopRequireDefault(_SourceNodesImagenodeJs);
var _SourceNodesCanvasnodeJs = __webpack_require__(27);
var _SourceNodesCanvasnodeJs2 = _interopRequireDefault(_SourceNodesCanvasnodeJs);
var _SourceNodesSourcenodeJs = __webpack_require__(2);
var _ProcessingNodesCompositingnodeJs = __webpack_require__(28);
var _ProcessingNodesCompositingnodeJs2 = _interopRequireDefault(_ProcessingNodesCompositingnodeJs);
var _DestinationNodeDestinationnodeJs = __webpack_require__(31);
var _DestinationNodeDestinationnodeJs2 = _interopRequireDefault(_DestinationNodeDestinationnodeJs);
var _ProcessingNodesEffectnodeJs = __webpack_require__(32);
var _ProcessingNodesEffectnodeJs2 = _interopRequireDefault(_ProcessingNodesEffectnodeJs);
var _ProcessingNodesTransitionnodeJs = __webpack_require__(33);
var _ProcessingNodesTransitionnodeJs2 = _interopRequireDefault(_ProcessingNodesTransitionnodeJs);
var _rendergraphJs = __webpack_require__(34);
var _rendergraphJs2 = _interopRequireDefault(_rendergraphJs);
var _videoelementcacheJs = __webpack_require__(35);
var _videoelementcacheJs2 = _interopRequireDefault(_videoelementcacheJs);
var _utilsJs = __webpack_require__(3);
var _DefinitionsDefinitionsJs = __webpack_require__(4);
var _DefinitionsDefinitionsJs2 = _interopRequireDefault(_DefinitionsDefinitionsJs);
var updateablesManager = new _utilsJs.UpdateablesManager();
/**
* VideoContext.
* @module VideoContext
*/
var VideoContext = (function () {
/**
* Initialise the VideoContext and render to the specific canvas. A 2nd parameter can be passed to the constructor which is a function that get's called if the VideoContext fails to initialise.
*
* @param {Canvas} canvas - the canvas element to render the output to.
* @param {function} initErrorCallback - a callback for if initialising the canvas failed.
* @param {Object} options - a nuber of custom options which can be set on the VideoContext, generally best left as default.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement, function(){console.error("Sorry, your browser dosen\'t support WebGL");});
* var videoNode = ctx.video("video.mp4");
* videoNode.connect(ctx.destination);
* videoNode.start(0);
* videoNode.stop(10);
* ctx.play();
*
*/
function VideoContext(canvas, initErrorCallback) {
var options = arguments.length <= 2 || arguments[2] === undefined ? { "preserveDrawingBuffer": true, "manualUpdate": false, "endOnLastSourceEnd": true, useVideoElementCache: true, videoElementCacheSize: 6, webglContextAttributes: { preserveDrawingBuffer: true, alpha: false } } : arguments[2];
_classCallCheck(this, VideoContext);
this._canvas = canvas;
var manualUpdate = false;
this.endOnLastSourceEnd = true;
var webglContextAttributes = { preserveDrawingBuffer: true, alpha: false };
if ("manualUpdate" in options) manualUpdate = options.manualUpdate;
if ("endOnLastSourceEnd" in options) this._endOnLastSourceEnd = options.endOnLastSourceEnd;
if ("webglContextAttributes" in options) webglContextAttributes = options.webglContextAttributes;
if (webglContextAttributes.alpha === undefined) webglContextAttributes.alpha = false;
if (webglContextAttributes.alpha === true) {
console.error("webglContextAttributes.alpha must be false for correct opeation");
}
this._gl = canvas.getContext("experimental-webgl", webglContextAttributes);
if (this._gl === null) {
console.error("Failed to intialise WebGL.");
if (initErrorCallback) initErrorCallback();
return;
}
// Initialise the video element cache
if (!options.useVideoElementCache) options.useVideoElementCache = true;
this._useVideoElementCache = options.useVideoElementCache;
if (this._useVideoElementCache) {
if (!options.videoElementCacheSize) options.videoElementCacheSize = 5;
this._videoElementCache = new _videoelementcacheJs2["default"](options.videoElementCacheSize);
}
this._renderGraph = new _rendergraphJs2["default"]();
this._sourceNodes = [];
this._processingNodes = [];
this._timeline = [];
this._currentTime = 0;
this._state = VideoContext.STATE.PAUSED;
this._playbackRate = 1.0;
this._sourcesPlaying = undefined;
this._destinationNode = new _DestinationNodeDestinationnodeJs2["default"](this._gl, this._renderGraph);
this._callbacks = new Map();
this._callbacks.set("stalled", []);
this._callbacks.set("update", []);
this._callbacks.set("ended", []);
this._callbacks.set("content", []);
this._callbacks.set("nocontent", []);
this._timelineCallbacks = [];
if (!manualUpdate) {
updateablesManager.register(this);
}
}
//playing - all sources are active
//paused - all sources are paused
//stalled - one or more sources is unable to play
//ended - all sources have finished playing
//broken - the render graph is in a broken state
/**
* Register a callback to happen at a specific point in time.
* @param {number} time - the time at which to trigger the callback.
* @param {Function} func - the callback to register.
* @param {number} ordering - the order in which to call the callback if more than one is registered for the same time.
*/
_createClass(VideoContext, [{
key: "registerTimelineCallback",
value: function registerTimelineCallback(time, func) {
var ordering = arguments.length <= 2 || arguments[2] === undefined ? 0 : arguments[2];
this._timelineCallbacks.push({ "time": time, "func": func, "ordering": ordering });
}
/**
* Unregister a callback which happens at a specific point in time.
* @param {Function} func - the callback to unregister.
*/
}, {
key: "unregisterTimelineCallback",
value: function unregisterTimelineCallback(func) {
var toRemove = [];
var _iteratorNormalCompletion = true;
var _didIteratorError = false;
var _iteratorError = undefined;
try {
for (var _iterator = this._timelineCallbacks[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
var callback = _step.value;
if (callback.func === func) {
toRemove.push(callback);
}
}
} catch (err) {
_didIteratorError = true;
_iteratorError = err;
} finally {
try {
if (!_iteratorNormalCompletion && _iterator["return"]) {
_iterator["return"]();
}
} finally {
if (_didIteratorError) {
throw _iteratorError;
}
}
}
var _iteratorNormalCompletion2 = true;
var _didIteratorError2 = false;
var _iteratorError2 = undefined;
try {
for (var _iterator2 = toRemove[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {
var callback = _step2.value;
var index = this._timelineCallbacks.indexOf(callback);
this._timelineCallbacks.splice(index, 1);
}
} catch (err) {
_didIteratorError2 = true;
_iteratorError2 = err;
} finally {
try {
if (!_iteratorNormalCompletion2 && _iterator2["return"]) {
_iterator2["return"]();
}
} finally {
if (_didIteratorError2) {
throw _iteratorError2;
}
}
}
}
/**
* Regsiter a callback to listen to one of the following events: "stalled", "update", "ended", "content", "nocontent"
*
* "stalled" happend anytime playback is stopped due to unavailbale data for playing assets (i.e video still loading)
* . "update" is called any time a frame is rendered to the screen. "ended" is called once plackback has finished
* (i.e ctx.currentTime == ctx.duration). "content" is called a the start of a time region where there is content
* playing out of one or more sourceNodes. "nocontent" is called at the start of any time region where the
* VideoContext is still playing, but there are currently no activly playing soureces.
*
* @param {String} type - the event to register against ("stalled", "update", or "ended").
* @param {Function} func - the callback to register.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* ctx.registerCallback("stalled", function(){console.log("Playback stalled");});
* ctx.registerCallback("update", function(){console.log("new frame");});
* ctx.registerCallback("ended", function(){console.log("Playback ended");});
*/
}, {
key: "registerCallback",
value: function registerCallback(type, func) {
if (!this._callbacks.has(type)) return false;
this._callbacks.get(type).push(func);
}
/**
* Remove a previously registed callback
*
* @param {Function} func - the callback to remove.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
*
* //the callback
* var updateCallback = function(){console.log("new frame")};
*
* //register the callback
* ctx.registerCallback("update", updateCallback);
* //then unregister it
* ctx.unregisterCallback(updateCallback);
*
*/
}, {
key: "unregisterCallback",
value: function unregisterCallback(func) {
var _iteratorNormalCompletion3 = true;
var _didIteratorError3 = false;
var _iteratorError3 = undefined;
try {
for (var _iterator3 = this._callbacks.values()[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) {
var funcArray = _step3.value;
var index = funcArray.indexOf(func);
if (index !== -1) {
funcArray.splice(index, 1);
return true;
}
}
} catch (err) {
_didIteratorError3 = true;
_iteratorError3 = err;
} finally {
try {
if (!_iteratorNormalCompletion3 && _iterator3["return"]) {
_iterator3["return"]();
}
} finally {
if (_didIteratorError3) {
throw _iteratorError3;
}
}
}
return false;
}
}, {
key: "_callCallbacks",
value: function _callCallbacks(type) {
var funcArray = this._callbacks.get(type);
var _iteratorNormalCompletion4 = true;
var _didIteratorError4 = false;
var _iteratorError4 = undefined;
try {
for (var _iterator4 = funcArray[Symbol.iterator](), _step4; !(_iteratorNormalCompletion4 = (_step4 = _iterator4.next()).done); _iteratorNormalCompletion4 = true) {
var func = _step4.value;
func(this._currentTime);
}
} catch (err) {
_didIteratorError4 = true;
_iteratorError4 = err;
} finally {
try {
if (!_iteratorNormalCompletion4 && _iterator4["return"]) {
_iterator4["return"]();
}
} finally {
if (_didIteratorError4) {
throw _iteratorError4;
}
}
}
}
/**
* Get the canvas that the VideoContext is using.
*
* @return {HTMLElement} The canvas that the VideoContext is using.
*
*/
}, {
key: "play",
/**
* Start the VideoContext playing
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video("video.mp4");
* videoNode.connect(ctx.destination);
* videoNode.start(0);
* videoNode.stop(10);
* ctx.play();
*/
value: function play() {
console.debug("VideoContext - playing");
//Initialise the video elemnt cache
if (this._videoElementCache) this._videoElementCache.init();
// set the state.
this._state = VideoContext.STATE.PLAYING;
return true;
}
/**
* Pause playback of the VideoContext
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video("video.mp4");
* videoNode.connect(ctx.destination);
* videoNode.start(0);
* videoNode.stop(20);
* ctx.currentTime = 10; // seek 10 seconds in
* ctx.play();
* setTimeout(function(){ctx.pause();}, 1000); //pause playback after roughly one second.
*/
}, {
key: "pause",
value: function pause() {
console.debug("VideoContext - pausing");
this._state = VideoContext.STATE.PAUSED;
return true;
}
/**
* Create a new node representing a video source
*
* @param {string|Video} - The URL or video element to create the video from.
* @sourceOffset {number} - Offset into the start of the source video to start playing from.
* @preloadTime {number} - How many seconds before the video is to be played to start loading it.
* @videoElementAttributes {Object} - A dictionary of attributes to map onto the underlying video element.
* @return {VideoNode} A new video node.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video("video.mp4");
*
* @example
* var canvasElement = document.getElementById("canvas");
* var videoElement = document.getElementById("video");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video(videoElement);
*/
}, {
key: "video",
value: function video(src) {
var sourceOffset = arguments.length <= 1 || arguments[1] === undefined ? 0 : arguments[1];
var preloadTime = arguments.length <= 2 || arguments[2] === undefined ? 4 : arguments[2];
var videoElementAttributes = arguments.length <= 3 || arguments[3] === undefined ? {} : arguments[3];
var videoNode = new _SourceNodesVideonodeJs2["default"](src, this._gl, this._renderGraph, this._currentTime, this._playbackRate, sourceOffset, preloadTime, this._videoElementCache, videoElementAttributes);
this._sourceNodes.push(videoNode);
return videoNode;
}
/**
* @depricated
*/
}, {
key: "createVideoSourceNode",
value: function createVideoSourceNode(src) {
var sourceOffset = arguments.length <= 1 || arguments[1] === undefined ? 0 : arguments[1];
var preloadTime = arguments.length <= 2 || arguments[2] === undefined ? 4 : arguments[2];
var videoElementAttributes = arguments.length <= 3 || arguments[3] === undefined ? {} : arguments[3];
this._depricate("Warning: createVideoSourceNode will be depricated in v1.0, please switch to using VideoContext.video()");
return this.video(src, sourceOffset, preloadTime, videoElementAttributes);
}
/**
* Create a new node representing an image source
* @param {string|Image} src - The url or image element to create the image node from.
* @param {number} [preloadTime] - How long before a node is to be displayed to attmept to load it.
* @param {Object} [imageElementAttributes] - Any attributes to be given to the underlying image element.
* @return {ImageNode} A new image node.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var imageNode = ctx.image("image.png");
*
* @example
* var canvasElement = document.getElementById("canvas");
* var imageElement = document.getElementById("image");
* var ctx = new VideoContext(canvasElement);
* var imageNode = ctx.image(imageElement);
*/
}, {
key: "image",
value: function image(src) {
var preloadTime = arguments.length <= 1 || arguments[1] === undefined ? 4 : arguments[1];
var imageElementAttributes = arguments.length <= 2 || arguments[2] === undefined ? {} : arguments[2];
var imageNode = new _SourceNodesImagenodeJs2["default"](src, this._gl, this._renderGraph, this._currentTime, preloadTime, imageElementAttributes);
this._sourceNodes.push(imageNode);
return imageNode;
}
/**
* @depricated
*/
}, {
key: "createImageSourceNode",
value: function createImageSourceNode(src) {
var sourceOffset = arguments.length <= 1 || arguments[1] === undefined ? 0 : arguments[1];
var preloadTime = arguments.length <= 2 || arguments[2] === undefined ? 4 : arguments[2];
var imageElementAttributes = arguments.length <= 3 || arguments[3] === undefined ? {} : arguments[3];
this._depricate("Warning: createImageSourceNode will be depricated in v1.0, please switch to using VideoContext.image()");
return this.image(src, sourceOffset, preloadTime, imageElementAttributes);
}
/**
* Create a new node representing a canvas source
* @param {Canvas} src - The canvas element to create the canvas node from.
* @return {CanvasNode} A new canvas node.
*/
}, {
key: "canvas",
value: function canvas(_canvas) {
var canvasNode = new _SourceNodesCanvasnodeJs2["default"](_canvas, this._gl, this._renderGraph, this._currentTime);
this._sourceNodes.push(canvasNode);
return canvasNode;
}
/**
* @depricated
*/
}, {
key: "createCanvasSourceNode",
value: function createCanvasSourceNode(canvas) {
var sourceOffset = arguments.length <= 1 || arguments[1] === undefined ? 0 : arguments[1];
var preloadTime = arguments.length <= 2 || arguments[2] === undefined ? 4 : arguments[2];
this._depricate("Warning: createCanvasSourceNode will be depricated in v1.0, please switch to using VideoContext.canvas()");
return this.canvas(canvas, sourceOffset, preloadTime);
}
/**
* Create a new effect node.
* @param {Object} definition - this is an object defining the shaders, inputs, and properties of the compositing node to create. Builtin definitions can be found by accessing VideoContext.DEFINITIONS.
* @return {EffectNode} A new effect node created from the passed definition
*/
}, {
key: "effect",
value: function effect(definition) {
var effectNode = new _ProcessingNodesEffectnodeJs2["default"](this._gl, this._renderGraph, definition);
this._processingNodes.push(effectNode);
return effectNode;
}
/**
* @depricated
*/
}, {
key: "createEffectNode",
value: function createEffectNode(definition) {
this._depricate("Warning: createEffectNode will be depricated in v1.0, please switch to using VideoContext.effect()");
return this.effect(definition);
}
/**
* Create a new compositiing node.
*
* Compositing nodes are used for operations such as combining multiple video sources into a single track/connection for further processing in the graph.
*
* A compositing node is slightly different to other processing nodes in that it only has one input in it's definition but can have unlimited connections made to it.
* The shader in the definition is run for each input in turn, drawing them to the output buffer. This means there can be no interaction between the spearte inputs to a compositing node, as they are individually processed in seperate shader passes.
*
* @param {Object} definition - this is an object defining the shaders, inputs, and properties of the compositing node to create. Builtin definitions can be found by accessing VideoContext.DEFINITIONS
*
* @return {CompositingNode} A new compositing node created from the passed definition.
*
* @example
*
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
*
* //A simple compositing node definition which just renders all the inputs to the output buffer.
* var combineDefinition = {
* vertexShader : "\
* attribute vec2 a_position;\
* attribute vec2 a_texCoord;\
* varying vec2 v_texCoord;\
* void main() {\
* gl_Position = vec4(vec2(2.0,2.0)*vec2(1.0, 1.0), 0.0, 1.0);\
* v_texCoord = a_texCoord;\
* }",
* fragmentShader : "\
* precision mediump float;\
* uniform sampler2D u_image;\
* uniform float a;\
* varying vec2 v_texCoord;\
* varying float v_progress;\
* void main(){\
* vec4 color = texture2D(u_image, v_texCoord);\
* gl_FragColor = color;\
* }",
* properties:{
* "a":{type:"uniform", value:0.0},
* },
* inputs:["u_image"]
* };
* //Create the node, passing in the definition.
* var trackNode = videoCtx.compositor(combineDefinition);
*
* //create two videos which will play at back to back
* var videoNode1 = ctx.video("video1.mp4");
* videoNode1.play(0);
* videoNode1.stop(10);
* var videoNode2 = ctx.video("video2.mp4");
* videoNode2.play(10);
* videoNode2.stop(20);
*
* //Connect the nodes to the combine node. This will give a single connection representing the two videos which can
* //be connected to other effects such as LUTs, chromakeyers, etc.
* videoNode1.connect(trackNode);
* videoNode2.connect(trackNode);
*
* //Don't do anything exciting, just connect it to the output.
* trackNode.connect(ctx.destination);
*
*/
}, {
key: "compositor",
value: function compositor(definition) {
var compositingNode = new _ProcessingNodesCompositingnodeJs2["default"](this._gl, this._renderGraph, definition);
this._processingNodes.push(compositingNode);
return compositingNode;
}
/**
* @depricated
*/
}, {
key: "createCompositingNode",
value: function createCompositingNode(definition) {
this._depricate("Warning: createCompositingNode will be depricated in v1.0, please switch to using VideoContext.compositor()");
return this.compositor(definition);
}
/**
* Create a new transition node.
*
* Transistion nodes are a type of effect node which have parameters which can be changed as events on the timeline.
*
* For example a transition node which cross-fades between two videos could have a "mix" property which sets the
* progress through the transistion. Rather than having to write your own code to adjust this property at specfic
* points in time a transition node has a "transition" function which takes a startTime, stopTime, targetValue, and a
* propertyName (which will be "mix"). This will linearly interpolate the property from the curernt value to
* tragetValue between the startTime and stopTime.
*
* @param {Object} definition - this is an object defining the shaders, inputs, and properties of the transition node to create.
* @return {TransitionNode} A new transition node created from the passed definition.
* @example
*
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
*
* //A simple cross-fade node definition which cross-fades between two videos based on the mix property.
* var crossfadeDefinition = {
* vertexShader : "\
* attribute vec2 a_position;\
* attribute vec2 a_texCoord;\
* varying vec2 v_texCoord;\
* void main() {\
* gl_Position = vec4(vec2(2.0,2.0)*a_position-vec2(1.0, 1.0), 0.0, 1.0);\
* v_texCoord = a_texCoord;\
* }",
* fragmentShader : "\
* precision mediump float;\
* uniform sampler2D u_image_a;\
* uniform sampler2D u_image_b;\
* uniform float mix;\
* varying vec2 v_texCoord;\
* varying float v_mix;\
* void main(){\
* vec4 color_a = texture2D(u_image_a, v_texCoord);\
* vec4 color_b = texture2D(u_image_b, v_texCoord);\
* color_a[0] *= mix;\
* color_a[1] *= mix;\
* color_a[2] *= mix;\
* color_a[3] *= mix;\
* color_b[0] *= (1.0 - mix);\
* color_b[1] *= (1.0 - mix);\
* color_b[2] *= (1.0 - mix);\
* color_b[3] *= (1.0 - mix);\
* gl_FragColor = color_a + color_b;\
* }",
* properties:{
* "mix":{type:"uniform", value:0.0},
* },
* inputs:["u_image_a","u_image_b"]
* };
*
* //Create the node, passing in the definition.
* var transitionNode = videoCtx.transition(crossfadeDefinition);
*
* //create two videos which will overlap by two seconds
* var videoNode1 = ctx.video("video1.mp4");
* videoNode1.play(0);
* videoNode1.stop(10);
* var videoNode2 = ctx.video("video2.mp4");
* videoNode2.play(8);
* videoNode2.stop(18);
*
* //Connect the nodes to the transistion node.
* videoNode1.connect(transitionNode);
* videoNode2.connect(transitionNode);
*
* //Set-up a transition which happens at the crossover point of the playback of the two videos
* transitionNode.transition(8,10,1.0,"mix");
*
* //Connect the transition node to the output
* transitionNode.connect(ctx.destination);
*
* //start playback
* ctx.play();
*/
}, {
key: "transition",
value: function transition(definition) {
var transitionNode = new _ProcessingNodesTransitionnodeJs2["default"](this._gl, this._renderGraph, definition);
this._processingNodes.push(transitionNode);
return transitionNode;
}
/**
* @depricated
*/
}, {
key: "createTransitionNode",
value: function createTransitionNode(definition) {
this._depricate("Warning: createTransitionNode will be depricated in v1.0, please switch to using VideoContext.transition()");
return this.transition(definition);
}
}, {
key: "_isStalled",
value: function _isStalled() {
for (var i = 0; i < this._sourceNodes.length; i++) {
var sourceNode = this._sourceNodes[i];
if (!sourceNode._isReady()) {
return true;
}
}
return false;
}
/**
* This allows manual calling of the update loop of the videoContext.
*
* @param {Number} dt - The difference in seconds between this and the previous calling of update.
* @example
*
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement, undefined, {"manualUpdate" : true});
*
* var previousTime;
* function update(time){
* if (previousTime === undefined) previousTime = time;
* var dt = (time - previousTime)/1000;
* ctx.update(dt);
* previousTime = time;
* requestAnimationFrame(update);
* }
* update();
*
*/
}, {
key: "update",
value: function update(dt) {
this._update(dt);
}
}, {
key: "_update",
value: function _update(dt) {
//Remove any destroyed nodes
this._sourceNodes = this._sourceNodes.filter(function (sourceNode) {
if (!sourceNode.destroyed) return sourceNode;
});
this._processingNodes = this._processingNodes.filter(function (processingNode) {
if (!processingNode.destroyed) return processingNode;
});
if (this._state === VideoContext.STATE.PLAYING || this._state === VideoContext.STATE.STALLED || this._state === VideoContext.STATE.PAUSED) {
this._callCallbacks("update");
if (this._state !== VideoContext.STATE.PAUSED) {
if (this._isStalled()) {
this._callCallbacks("stalled");
this._state = VideoContext.STATE.STALLED;
} else {
this._state = VideoContext.STATE.PLAYING;
}
}
if (this._state === VideoContext.STATE.PLAYING) {
//Handle timeline callbacks.
var activeCallbacks = new Map();
var _iteratorNormalCompletion5 = true;
var _didIteratorError5 = false;
var _iteratorError5 = undefined;
try {
for (var _iterator5 = this._timelineCallbacks[Symbol.iterator](), _step5; !(_iteratorNormalCompletion5 = (_step5 = _iterator5.next()).done); _iteratorNormalCompletion5 = true) {
var callback = _step5.value;
if (callback.time >= this.currentTime && callback.time < this._currentTime + dt * this._playbackRate) {
//group the callbacks by time
if (!activeCallbacks.has(callback.time)) activeCallbacks.set(callback.time, []);
activeCallbacks.get(callback.time).push(callback);
}
}
//Sort the groups of callbacks by the times of the groups
} catch (err) {
_didIteratorError5 = true;
_iteratorError5 = err;
} finally {
try {
if (!_iteratorNormalCompletion5 && _iterator5["return"]) {
_iterator5["return"]();
}
} finally {
if (_didIteratorError5) {
throw _iteratorError5;
}
}
}
var timeIntervals = Array.from(activeCallbacks.keys());
timeIntervals.sort(function (a, b) {
return a - b;
});
var _iteratorNormalCompletion6 = true;
var _didIteratorError6 = false;
var _iteratorError6 = undefined;
try {
for (var _iterator6 = timeIntervals[Symbol.iterator](), _step6; !(_iteratorNormalCompletion6 = (_step6 = _iterator6.next()).done); _iteratorNormalCompletion6 = true) {
var t = _step6.value;
var callbacks = activeCallbacks.get(t);
callbacks.sort(function (a, b) {
return a.ordering - b.ordering;
});
var _iteratorNormalCompletion7 = true;
var _didIteratorError7 = false;
var _iteratorError7 = undefined;
try {
for (var _iterator7 = callbacks[Symbol.iterator](), _step7; !(_iteratorNormalCompletion7 = (_step7 = _iterator7.next()).done); _iteratorNormalCompletion7 = true) {
var callback = _step7.value;
callback.func();
}
} catch (err) {
_didIteratorError7 = true;
_iteratorError7 = err;
} finally {
try {
if (!_iteratorNormalCompletion7 && _iterator7["return"]) {
_iterator7["return"]();
}
} finally {
if (_didIteratorError7) {
throw _iteratorError7;
}
}
}
}
} catch (err) {
_didIteratorError6 = true;
_iteratorError6 = err;
} finally {
try {
if (!_iteratorNormalCompletion6 && _iterator6["return"]) {
_iterator6["return"]();
}
} finally {
if (_didIteratorError6) {
throw _iteratorError6;
}
}
}
this._currentTime += dt * this._playbackRate;
if (this._currentTime > this.duration && this._endOnLastSourceEnd) {
//Do an update od the sourcenodes in case anything in the "ended" callbacks modifes currentTime and sources haven't had a chance to stop.
for (var i = 0; i < this._sourceNodes.length; i++) {
this._sourceNodes[i]._update(this._currentTime);
}
this._state = VideoContext.STATE.ENDED;
this._callCallbacks("ended");
}
}
var sourcesPlaying = false;
for (var i = 0; i < this._sourceNodes.length; i++) {
var sourceNode = this._sourceNodes[i];
if (this._state === VideoContext.STATE.STALLED) {
if (sourceNode._isReady() && sourceNode._state === _SourceNodesSourcenodeJs.SOURCENODESTATE.playing) sourceNode._pause();
}
if (this._state === VideoContext.STATE.PAUSED) {
sourceNode._pause();
}
if (this._state === VideoContext.STATE.PLAYING) {
sourceNode._play();
}
sourceNode._update(this._currentTime);
if (sourceNode._state === _SourceNodesSourcenodeJs.SOURCENODESTATE.paused || sourceNode._state === _SourceNodesSourcenodeJs.SOURCENODESTATE.playing) {
sourcesPlaying = true;
}
}
if (sourcesPlaying !== this._sourcesPlaying && this._state === VideoContext.STATE.PLAYING) {
if (sourcesPlaying === true) {
this._callCallbacks("content");
} else {
this._callCallbacks("nocontent");
}
this._sourcesPlaying = sourcesPlaying;
}
/*
* Itterate the directed acyclic graph using Khan's algorithm (KHAAAAAN!).
*
* This has highlighted a bunch of ineffencies in the rendergraph class about how its stores connections.
* Mainly the fact that to get inputs for a node you have to iterate the full list of connections rather than
* a node owning it's connections.
* The trade off with changing this is making/removing connections becomes more costly performance wise, but
* this is deffinately worth while because getting the connnections is a much more common operation.
*
* TL;DR Future matt - refactor this.
*
*/
var sortedNodes = [];
var connections = this._renderGraph.connections.slice();
var nodes = _rendergraphJs2["default"].getInputlessNodes(connections);
while (nodes.length > 0) {
var node = nodes.pop();
sortedNodes.push(node);
var _iteratorNormalCompletion8 = true;
var _didIteratorError8 = false;
var _iteratorError8 = undefined;
try {
for (var _iterator8 = _rendergraphJs2["default"].outputEdgesFor(node, connections)[Symbol.iterator](), _step8; !(_iteratorNormalCompletion8 = (_step8 = _iterator8.next()).done); _iteratorNormalCompletion8 = true) {
var edge = _step8.value;
var index = connections.indexOf(edge);
if (index > -1) connections.splice(index, 1);
if (_rendergraphJs2["default"].inputEdgesFor(edge.destination, connections).length === 0) {
nodes.push(edge.destination);
}
}
} catch (err) {
_didIteratorError8 = true;
_iteratorError8 = err;
} finally {
try {
if (!_iteratorNormalCompletion8 && _iterator8["return"]) {
_iterator8["return"]();
}
} finally {
if (_didIteratorError8) {
throw _iteratorError8;
}
}
}
}
var _iteratorNormalCompletion9 = true;
var _didIteratorError9 = false;
var _iteratorError9 = undefined;
try {
for (var _iterator9 = sortedNodes[Symbol.iterator](), _step9; !(_iteratorNormalCompletion9 = (_step9 = _iterator9.next()).done); _iteratorNormalCompletion9 = true) {
var node = _step9.value;
if (this._sourceNodes.indexOf(node) === -1) {
node._update(this._currentTime);
node._render();
}
}
} catch (err) {
_didIteratorError9 = true;
_iteratorError9 = err;
} finally {
try {
if (!_iteratorNormalCompletion9 && _iterator9["return"]) {
_iterator9["return"]();
}
} finally {
if (_didIteratorError9) {
throw _iteratorError9;
}
}
}
}
}
/**
* Destroy all nodes in the graph and reset the timeline. After calling this any created nodes will be unusable.
*/
}, {
key: "reset",
value: function reset() {
var _iteratorNormalCompletion10 = true;
var _didIteratorError10 = false;
var _iteratorError10 = undefined;
try {
for (var _iterator10 = this._callbacks[Symbol.iterator](), _step10; !(_iteratorNormalCompletion10 = (_step10 = _iterator10.next()).done); _iteratorNormalCompletion10 = true) {
var callback = _step10.value;
this.unregisterCallback(callback);
}
} catch (err) {
_didIteratorError10 = true;
_iteratorError10 = err;
} finally {
try {
if (!_iteratorNormalCompletion10 && _iterator10["return"]) {
_iterator10["return"]();
}
} finally {
if (_didIteratorError10) {
throw _iteratorError10;
}
}
}
var _iteratorNormalCompletion11 = true;
var _didIteratorError11 = false;
var _iteratorError11 = undefined;
try {
for (var _iterator11 = this._sourceNodes[Symbol.iterator](), _step11; !(_iteratorNormalCompletion11 = (_step11 = _iterator11.next()).done); _iteratorNormalCompletion11 = true) {
var node = _step11.value;
node.destroy();
}
} catch (err) {
_didIteratorError11 = true;
_iteratorError11 = err;
} finally {
try {
if (!_iteratorNormalCompletion11 && _iterator11["return"]) {
_iterator11["return"]();
}
} finally {
if (_didIteratorError11) {
throw _iteratorError11;
}
}
}
var _iteratorNormalCompletion12 = true;
var _didIteratorError12 = false;
var _iteratorError12 = undefined;
try {
for (var _iterator12 = this._processingNodes[Symbol.iterator](), _step12; !(_iteratorNormalCompletion12 = (_step12 = _iterator12.next()).done); _iteratorNormalCompletion12 = true) {
var node = _step12.value;
node.destroy();
}
} catch (err) {
_didIteratorError12 = true;
_iteratorError12 = err;
} finally {
try {
if (!_iteratorNormalCompletion12 && _iterator12["return"]) {
_iterator12["return"]();
}
} finally {
if (_didIteratorError12) {
throw _iteratorError12;
}
}
}
this._update(0);
this._sourceNodes = [];
this._processingNodes = [];
this._timeline = [];
this._currentTime = 0;
this._state = VideoContext.STATE.PAUSED;
this._playbackRate = 1.0;
this._sourcesPlaying = undefined;
this._callbacks.set("stalled", []);
this._callbacks.set("update", []);
this._callbacks.set("ended", []);
this._callbacks.set("content", []);
this._callbacks.set("nocontent", []);
this._timelineCallbacks = [];
}
}, {
key: "_depricate",
value: function _depricate(msg) {
console.log(msg);
}
}, {
key: "element",
get: function get() {
return this._canvas;
}
/**
* Get the current state.
*
* This will be either
* - VideoContext.STATE.PLAYING: current sources on timeline are active
* - VideoContext.STATE.PAUSED: all sources are paused
* - VideoContext.STATE.STALLED: one or more sources is unable to play
* - VideoContext.STATE.ENDED: all sources have finished playing
* - VideoContext.STATE.BROKEN: the render graph is in a broken state
* @return {number} The number representing the state.
*
*/
}, {
key: "state",
get: function get() {
return this._state;
}
/**
* Set the progress through the internal timeline.
* Setting this can be used as a way to implement a scrubaable timeline.
*
* @param {number} currentTime - this is the