UNPKG

p5

Version:

[![npm version](https://badge.fury.io/js/p5.svg)](https://www.npmjs.com/package/p5)

900 lines (751 loc) 451 kB
/** [p5.sound] Version: 0.3.12 - 2020-01-06 */ /** * <p>p5.sound extends p5 with <a href="http://caniuse.com/audio-api" * target="_blank">Web Audio</a> functionality including audio input, * playback, analysis and synthesis. * </p> * <ul> * <li><a href="#/p5.SoundFile"><b>p5.SoundFile</b></a>: Load and play sound files.</li> * <li><a href="#/p5.Amplitude"><b>p5.Amplitude</b></a>: Get the current volume of a sound.</li> * <li><a href="#/p5.AudioIn"><b>p5.AudioIn</b></a>: Get sound from an input source, typically * a computer microphone.</li> * <li><a href="#/p5.FFT"><b>p5.FFT</b></a>: Analyze the frequency of sound. Returns * results from the frequency spectrum or time domain (waveform).</li> * <li><a href="#/p5.Oscillator"><b>p5.Oscillator</b></a>: Generate Sine, * Triangle, Square and Sawtooth waveforms. Base class of * <li><a href="#/p5.Noise">p5.Noise</a> and <a href="#/p5.Pulse">p5.Pulse</a>. * </li> * <li> * <a href="#/p5.MonoSynth">p5.MonoSynth</a> and <a href="#/p5.PolySynth">p5.PolySynth</a>: Play musical notes * </li> * <li><a href="#/p5.Envelope"><b>p5.Envelope</b></a>: An Envelope is a series * of fades over time. Often used to control an object's * output gain level as an "ADSR Envelope" (Attack, Decay, * Sustain, Release). Can also modulate other parameters.</li> * <li><a href="#/p5.Delay"><b>p5.Delay</b></a>: A delay effect with * parameters for feedback, delayTime, and lowpass filter.</li> * <li><a href="#/p5.Filter"><b>p5.Filter</b></a>: Filter the frequency range of a * sound. * </li> * <li><a href="#/p5.Reverb"><b>p5.Reverb</b></a>: Add reverb to a sound by specifying * duration and decay. </li> * <b><li><a href="#/p5.Convolver">p5.Convolver</a>:</b> Extends * <a href="#/p5.Reverb">p5.Reverb</a> to simulate the sound of real * physical spaces through convolution.</li> * <b><li><a href="#/p5.SoundRecorder">p5.SoundRecorder</a></b>: Record sound for playback * / save the .wav file. * <b><li><a href="#/p5.SoundLoop">p5.SoundLoop</a>, <a href="#/p5.Phrase">p5.Phrase</a></b>, <b><a href="#/p5.Part">p5.Part</a></b> and * <b><a href="#/p5.Score">p5.Score</a></b>: Compose musical sequences. * </li> * <li><a href="#/p5/userStartAudio">userStartAudio</a>: Enable audio in a * browser- and user-friendly way.</a> * <p>p5.sound is on <a href="https://github.com/therewasaguy/p5.sound/">GitHub</a>. * Download the latest version * <a href="https://github.com/therewasaguy/p5.sound/blob/master/lib/p5.sound.js">here</a>.</p> * * @module p5.sound * @submodule p5.sound * @for p5.sound * @main */ /** * p5.sound * https://p5js.org/reference/#/libraries/p5.sound * * From the Processing Foundation and contributors * https://github.com/processing/p5.js-sound/graphs/contributors * * MIT License (MIT) * https://github.com/processing/p5.js-sound/blob/master/LICENSE * * Some of the many audio libraries & resources that inspire p5.sound: * - TONE.js (c) Yotam Mann. Licensed under The MIT License (MIT). https://github.com/TONEnoTONE/Tone.js * - buzz.js (c) Jay Salvat. Licensed under The MIT License (MIT). http://buzz.jaysalvat.com/ * - Boris Smus Web Audio API book, 2013. Licensed under the Apache License http://www.apache.org/licenses/LICENSE-2.0 * - wavesurfer.js https://github.com/katspaugh/wavesurfer.js * - Web Audio Components by Jordan Santell https://github.com/web-audio-components * - Wilm Thoben's Sound library for Processing https://github.com/processing/processing/tree/master/java/libraries/sound * * Web Audio API: http://w3.org/TR/webaudio/ */ (function(modules) { var installedModules = {}; function __webpack_require__(moduleId) { if(installedModules[moduleId]) { return installedModules[moduleId].exports; } var module = installedModules[moduleId] = { i: moduleId, l: false, exports: {} }; modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); module.l = true; return module.exports; } __webpack_require__.m = modules; __webpack_require__.c = installedModules; __webpack_require__.d = function(exports, name, getter) { if(!__webpack_require__.o(exports, name)) { Object.defineProperty(exports, name, { enumerable: true, get: getter }); } }; __webpack_require__.r = function(exports) { if(typeof Symbol !== 'undefined' && Symbol.toStringTag) { Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' }); } Object.defineProperty(exports, '__esModule', { value: true }); }; __webpack_require__.t = function(value, mode) { if(mode & 1) value = __webpack_require__(value); if(mode & 8) return value; if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value; var ns = Object.create(null); __webpack_require__.r(ns); Object.defineProperty(ns, 'default', { enumerable: true, value: value }); if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key)); return ns; }; __webpack_require__.n = function(module) { var getter = module && module.__esModule ? function getDefault() { return module['default']; } : function getModuleExports() { return module; }; __webpack_require__.d(getter, 'a', getter); return getter; }; __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); }; __webpack_require__.p = ""; return __webpack_require__(__webpack_require__.s = 31); }) ([ (function(module, exports, __webpack_require__) { var __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_RESULT__ = (function(){"use strict";function a(t,e){this.isUndef(t)||1===t?this.input=this.context.createGain():1<t&&(this.input=new Array(t)),this.isUndef(e)||1===e?this.output=this.context.createGain():1<e&&(this.output=new Array(t))}var e;return a.prototype.set=function(t,e,n){if(this.isObject(t))n=e;else if(this.isString(t)){var o={};o[t]=e,t=o}t:for(var i in t){e=t[i];var r=this;if(-1!==i.indexOf(".")){for(var s=i.split("."),u=0;u<s.length-1;u++)if((r=r[s[u]])instanceof a){s.splice(0,u+1);var p=s.join(".");r.set(p,e);continue t}i=s[s.length-1]}var c=r[i];this.isUndef(c)||(a.Signal&&c instanceof a.Signal||a.Param&&c instanceof a.Param?c.value!==e&&(this.isUndef(n)?c.value=e:c.rampTo(e,n)):c instanceof AudioParam?c.value!==e&&(c.value=e):c instanceof a?c.set(e):c!==e&&(r[i]=e))}return this},a.prototype.get=function(t){this.isUndef(t)?t=this._collectDefaults(this.constructor):this.isString(t)&&(t=[t]);for(var e={},n=0;n<t.length;n++){var o=t[n],i=this,r=e;if(-1!==o.indexOf(".")){for(var s=o.split("."),u=0;u<s.length-1;u++){var p=s[u];r[p]=r[p]||{},r=r[p],i=i[p]}o=s[s.length-1]}var c=i[o];this.isObject(t[o])?r[o]=c.get():a.Signal&&c instanceof a.Signal?r[o]=c.value:a.Param&&c instanceof a.Param?r[o]=c.value:c instanceof AudioParam?r[o]=c.value:c instanceof a?r[o]=c.get():this.isFunction(c)||this.isUndef(c)||(r[o]=c)}return e},a.prototype._collectDefaults=function(t){var e=[];if(this.isUndef(t.defaults)||(e=Object.keys(t.defaults)),!this.isUndef(t._super))for(var n=this._collectDefaults(t._super),o=0;o<n.length;o++)-1===e.indexOf(n[o])&&e.push(n[o]);return e},a.prototype.toString=function(){for(var t in a){var e=t[0].match(/^[A-Z]$/),n=a[t]===this.constructor;if(this.isFunction(a[t])&&e&&n)return t}return"Tone"},Object.defineProperty(a.prototype,"numberOfInputs",{get:function(){return this.input?this.isArray(this.input)?this.input.length:1:0}}),Object.defineProperty(a.prototype,"numberOfOutputs",{get:function(){return this.output?this.isArray(this.output)?this.output.length:1:0}}),a.prototype.dispose=function(){return this.isUndef(this.input)||(this.input instanceof AudioNode&&this.input.disconnect(),this.input=null),this.isUndef(this.output)||(this.output instanceof AudioNode&&this.output.disconnect(),this.output=null),this},a.prototype.connect=function(t,e,n){return Array.isArray(this.output)?(e=this.defaultArg(e,0),this.output[e].connect(t,0,n)):this.output.connect(t,e,n),this},a.prototype.disconnect=function(t,e,n){this.isArray(this.output)?this.isNumber(t)?this.output[t].disconnect():(e=this.defaultArg(e,0),this.output[e].disconnect(t,0,n)):this.output.disconnect.apply(this.output,arguments)},a.prototype.connectSeries=function(){if(1<arguments.length)for(var t=arguments[0],e=1;e<arguments.length;e++){var n=arguments[e];t.connect(n),t=n}return this},a.prototype.chain=function(){if(0<arguments.length)for(var t=this,e=0;e<arguments.length;e++){var n=arguments[e];t.connect(n),t=n}return this},a.prototype.fan=function(){if(0<arguments.length)for(var t=0;t<arguments.length;t++)this.connect(arguments[t]);return this},AudioNode.prototype.chain=a.prototype.chain,AudioNode.prototype.fan=a.prototype.fan,a.prototype.defaultArg=function(t,e){if(this.isObject(t)&&this.isObject(e)){var n={};for(var o in t)n[o]=this.defaultArg(e[o],t[o]);for(var i in e)n[i]=this.defaultArg(t[i],e[i]);return n}return this.isUndef(t)?e:t},a.prototype.optionsObject=function(t,e,n){var o={};if(1===t.length&&this.isObject(t[0]))o=t[0];else for(var i=0;i<e.length;i++)o[e[i]]=t[i];return this.isUndef(n)?o:this.defaultArg(o,n)},a.prototype.isUndef=function(t){return void 0===t},a.prototype.isFunction=function(t){return"function"==typeof t},a.prototype.isNumber=function(t){return"number"==typeof t},a.prototype.isObject=function(t){return"[object Object]"===Object.prototype.toString.call(t)&&t.constructor===Object},a.prototype.isBoolean=function(t){return"boolean"==typeof t},a.prototype.isArray=function(t){return Array.isArray(t)},a.prototype.isString=function(t){return"string"==typeof t},a.noOp=function(){},a.prototype._readOnly=function(t){if(Array.isArray(t))for(var e=0;e<t.length;e++)this._readOnly(t[e]);else Object.defineProperty(this,t,{writable:!1,enumerable:!0})},a.prototype._writable=function(t){if(Array.isArray(t))for(var e=0;e<t.length;e++)this._writable(t[e]);else Object.defineProperty(this,t,{writable:!0})},a.State={Started:"started",Stopped:"stopped",Paused:"paused"},a.prototype.equalPowerScale=function(t){var e=.5*Math.PI;return Math.sin(t*e)},a.prototype.dbToGain=function(t){return Math.pow(2,t/6)},a.prototype.gainToDb=function(t){return Math.log(t)/Math.LN10*20},a.prototype.intervalToFrequencyRatio=function(t){return Math.pow(2,t/12)},a.prototype.now=function(){return a.context.now()},a.now=function(){return a.context.now()},a.extend=function(t,e){function n(){}a.prototype.isUndef(e)&&(e=a),n.prototype=e.prototype,t.prototype=new n,(t.prototype.constructor=t)._super=e},Object.defineProperty(a,"context",{get:function(){return e},set:function(t){e=a.Context&&t instanceof a.Context?t:new a.Context(t),a.Context&&a.Context.emit("init",e)}}),Object.defineProperty(a.prototype,"context",{get:function(){return a.context}}),a.setContext=function(t){a.context=t},Object.defineProperty(a.prototype,"blockTime",{get:function(){return 128/this.context.sampleRate}}),Object.defineProperty(a.prototype,"sampleTime",{get:function(){return 1/this.context.sampleRate}}),Object.defineProperty(a,"supported",{get:function(){var t=window.hasOwnProperty("AudioContext")||window.hasOwnProperty("webkitAudioContext"),e=window.hasOwnProperty("Promise"),n=window.hasOwnProperty("Worker");return t&&e&&n}}),a.version="r10",window.TONE_SILENCE_VERSION_LOGGING||console.log("%c * Tone.js "+a.version+" * ","background: #000; color: #fff"),a}).call(exports, __webpack_require__, exports, module), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); }), (function(module, exports, __webpack_require__) { "use strict"; var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__; !(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(17)], __WEBPACK_AMD_DEFINE_RESULT__ = (function (audiocontext) { var Master = function Master() { this.input = audiocontext.createGain(); this.output = audiocontext.createGain(); this.limiter = audiocontext.createDynamicsCompressor(); this.limiter.threshold.value = -3; this.limiter.ratio.value = 20; this.limiter.knee.value = 1; this.audiocontext = audiocontext; this.output.disconnect(); this.input.connect(this.limiter); this.limiter.connect(this.output); this.meter = audiocontext.createGain(); this.fftMeter = audiocontext.createGain(); this.output.connect(this.meter); this.output.connect(this.fftMeter); this.output.connect(this.audiocontext.destination); this.soundArray = []; this.parts = []; this.extensions = []; }; var p5sound = new Master(); /** * Returns a number representing the master amplitude (volume) for sound * in this sketch. * * @method getMasterVolume * @return {Number} Master amplitude (volume) for sound in this sketch. * Should be between 0.0 (silence) and 1.0. */ p5.prototype.getMasterVolume = function () { return p5sound.output.gain.value; }; /** * <p>Scale the output of all sound in this sketch</p> * Scaled between 0.0 (silence) and 1.0 (full volume). * 1.0 is the maximum amplitude of a digital sound, so multiplying * by greater than 1.0 may cause digital distortion. To * fade, provide a <code>rampTime</code> parameter. For more * complex fades, see the Envelope class. * * Alternately, you can pass in a signal source such as an * oscillator to modulate the amplitude with an audio signal. * * <p><b>How This Works</b>: When you load the p5.sound module, it * creates a single instance of p5sound. All sound objects in this * module output to p5sound before reaching your computer's output. * So if you change the amplitude of p5sound, it impacts all of the * sound in this module.</p> * * <p>If no value is provided, returns a Web Audio API Gain Node</p> * * @method masterVolume * @param {Number|Object} volume Volume (amplitude) between 0.0 * and 1.0 or modulating signal/oscillator * @param {Number} [rampTime] Fade for t seconds * @param {Number} [timeFromNow] Schedule this event to happen at * t seconds in the future */ p5.prototype.masterVolume = function (vol, rampTime, tFromNow) { if (typeof vol === 'number') { var rampTime = rampTime || 0; var tFromNow = tFromNow || 0; var now = p5sound.audiocontext.currentTime; var currentVol = p5sound.output.gain.value; p5sound.output.gain.cancelScheduledValues(now + tFromNow); p5sound.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow); p5sound.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime); } else if (vol) { vol.connect(p5sound.output.gain); } else { return p5sound.output.gain; } }; /** * `p5.soundOut` is the p5.sound master output. It sends output to * the destination of this window's web audio context. It contains * Web Audio API nodes including a dyanmicsCompressor (<code>.limiter</code>), * and Gain Nodes for <code>.input</code> and <code>.output</code>. * * @property {Object} soundOut */ p5.prototype.soundOut = p5.soundOut = p5sound; p5.soundOut._silentNode = p5sound.audiocontext.createGain(); p5.soundOut._silentNode.gain.value = 0; p5.soundOut._silentNode.connect(p5sound.audiocontext.destination); return p5sound; }).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); }), (function(module, exports, __webpack_require__) { var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(5),__webpack_require__(8),__webpack_require__(22),__webpack_require__(9)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(n){"use strict";return n.Signal=function(){var t=this.optionsObject(arguments,["value","units"],n.Signal.defaults);this.output=this._gain=this.context.createGain(),t.param=this._gain.gain,n.Param.call(this,t),this.input=this._param=this._gain.gain,this.context.getConstant(1).chain(this._gain)},n.extend(n.Signal,n.Param),n.Signal.defaults={value:0,units:n.Type.Default,convert:!0},n.Signal.prototype.connect=n.SignalBase.prototype.connect,n.Signal.prototype.dispose=function(){return n.Param.prototype.dispose.call(this),this._param=null,this._gain.disconnect(),this._gain=null,this},n.Signal}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); }), (function(module, exports, __webpack_require__) { var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(2),__webpack_require__(9)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(i){"use strict";return i.Multiply=function(t){this.createInsOuts(2,0),this._mult=this.input[0]=this.output=new i.Gain,this._param=this.input[1]=this.output.gain,this._param.value=this.defaultArg(t,0)},i.extend(i.Multiply,i.Signal),i.Multiply.prototype.dispose=function(){return i.prototype.dispose.call(this),this._mult.dispose(),this._mult=null,this._param=null,this},i.Multiply}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); }), (function(module, exports, __webpack_require__) { "use strict"; var __WEBPACK_AMD_DEFINE_RESULT__; !(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) { var p5sound = __webpack_require__(1); var CrossFade = __webpack_require__(51); /** * Effect is a base class for audio effects in p5. <br> * This module handles the nodes and methods that are * common and useful for current and future effects. * * * This class is extended by <a href="/reference/#/p5.Distortion">p5.Distortion</a>, * <a href="/reference/#/p5.Compressor">p5.Compressor</a>, * <a href="/reference/#/p5.Delay">p5.Delay</a>, * <a href="/reference/#/p5.Filter">p5.Filter</a>, * <a href="/reference/#/p5.Reverb">p5.Reverb</a>. * * @class p5.Effect * @constructor * * @param {Object} [ac] Reference to the audio context of the p5 object * @param {AudioNode} [input] Gain Node effect wrapper * @param {AudioNode} [output] Gain Node effect wrapper * @param {Object} [_drywet] Tone.JS CrossFade node (defaults to value: 1) * @param {AudioNode} [wet] Effects that extend this class should connect * to the wet signal to this gain node, so that dry and wet * signals are mixed properly. */ p5.Effect = function () { this.ac = p5sound.audiocontext; this.input = this.ac.createGain(); this.output = this.ac.createGain(); /** * The p5.Effect class is built * using Tone.js CrossFade * @private */ this._drywet = new CrossFade(1); /** * In classes that extend * p5.Effect, connect effect nodes * to the wet parameter */ this.wet = this.ac.createGain(); this.input.connect(this._drywet.a); this.wet.connect(this._drywet.b); this._drywet.connect(this.output); this.connect(); p5sound.soundArray.push(this); }; /** * Set the output volume of the filter. * * @method amp * @for p5.Effect * @param {Number} [vol] amplitude between 0 and 1.0 * @param {Number} [rampTime] create a fade that lasts until rampTime * @param {Number} [tFromNow] schedule this event to happen in tFromNow seconds */ p5.Effect.prototype.amp = function (vol, rampTime, tFromNow) { var rampTime = rampTime || 0; var tFromNow = tFromNow || 0; var now = p5sound.audiocontext.currentTime; var currentVol = this.output.gain.value; this.output.gain.cancelScheduledValues(now); this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow + .001); this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime + .001); }; /** * Link effects together in a chain * Example usage: filter.chain(reverb, delay, panner); * May be used with an open-ended number of arguments * * @method chain * @for p5.Effect * @param {Object} [arguments] Chain together multiple sound objects */ p5.Effect.prototype.chain = function () { if (arguments.length > 0) { this.connect(arguments[0]); for (var i = 1; i < arguments.length; i += 1) { arguments[i - 1].connect(arguments[i]); } } return this; }; /** * Adjust the dry/wet value. * * @method drywet * @for p5.Effect * @param {Number} [fade] The desired drywet value (0 - 1.0) */ p5.Effect.prototype.drywet = function (fade) { if (typeof fade !== "undefined") { this._drywet.fade.value = fade; } return this._drywet.fade.value; }; /** * Send output to a p5.js-sound, Web Audio Node, or use signal to * control an AudioParam * * @method connect * @for p5.Effect * @param {Object} unit */ p5.Effect.prototype.connect = function (unit) { var u = unit || p5.soundOut.input; this.output.connect(u.input ? u.input : u); }; /** * Disconnect all output. * @method disconnect * @for p5.Effect */ p5.Effect.prototype.disconnect = function () { if (this.output) { this.output.disconnect(); } }; p5.Effect.prototype.dispose = function () { var index = p5sound.soundArray.indexOf(this); p5sound.soundArray.splice(index, 1); if (this.input) { this.input.disconnect(); delete this.input; } if (this.output) { this.output.disconnect(); delete this.output; } if (this._drywet) { this._drywet.disconnect(); delete this._drywet; } if (this.wet) { this.wet.disconnect(); delete this.wet; } this.ac = undefined; }; return p5.Effect; }).call(exports, __webpack_require__, exports, module), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); }), (function(module, exports, __webpack_require__) { var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(19)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(e){"use strict";return e.WaveShaper=function(e,t){this._shaper=this.input=this.output=this.context.createWaveShaper(),this._curve=null,Array.isArray(e)?this.curve=e:isFinite(e)||this.isUndef(e)?this._curve=new Float32Array(this.defaultArg(e,1024)):this.isFunction(e)&&(this._curve=new Float32Array(this.defaultArg(t,1024)),this.setMap(e))},e.extend(e.WaveShaper,e.SignalBase),e.WaveShaper.prototype.setMap=function(e){for(var t=0,r=this._curve.length;t<r;t++){var s=t/(r-1)*2-1;this._curve[t]=e(s,t)}return this._shaper.curve=this._curve,this},Object.defineProperty(e.WaveShaper.prototype,"curve",{get:function(){return this._shaper.curve},set:function(e){this._curve=new Float32Array(e),this._shaper.curve=this._curve}}),Object.defineProperty(e.WaveShaper.prototype,"oversample",{get:function(){return this._shaper.oversample},set:function(e){if(-1===["none","2x","4x"].indexOf(e))throw new RangeError("Tone.WaveShaper: oversampling must be either 'none', '2x', or '4x'");this._shaper.oversample=e}}),e.WaveShaper.prototype.dispose=function(){return e.prototype.dispose.call(this),this._shaper.disconnect(),this._shaper=null,this._curve=null,this},e.WaveShaper}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); }), (function(module, exports, __webpack_require__) { "use strict"; var __WEBPACK_AMD_DEFINE_RESULT__; function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } !(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) { var p5sound = __webpack_require__(1); var processorNames = __webpack_require__(10); /** * @for p5 */ /** * Returns a number representing the sample rate, in samples per second, * of all sound objects in this audio context. It is determined by the * sampling rate of your operating system's sound card, and it is not * currently possile to change. * It is often 44100, or twice the range of human hearing. * * @method sampleRate * @return {Number} samplerate samples per second */ p5.prototype.sampleRate = function () { return p5sound.audiocontext.sampleRate; }; /** * Returns the closest MIDI note value for * a given frequency. * * @method freqToMidi * @param {Number} frequency A freqeuncy, for example, the "A" * above Middle C is 440Hz * @return {Number} MIDI note value */ p5.prototype.freqToMidi = function (f) { var mathlog2 = Math.log(f / 440) / Math.log(2); var m = Math.round(12 * mathlog2) + 69; return m; }; /** * Returns the frequency value of a MIDI note value. * General MIDI treats notes as integers where middle C * is 60, C# is 61, D is 62 etc. Useful for generating * musical frequencies with oscillators. * * @method midiToFreq * @param {Number} midiNote The number of a MIDI note * @return {Number} Frequency value of the given MIDI note * @example * <div><code> * let midiNotes = [60, 64, 67, 72]; * let noteIndex = 0; * let midiVal, freq; * * function setup() { * let cnv = createCanvas(100, 100); * cnv.mousePressed(startSound); * osc = new p5.TriOsc(); * env = new p5.Envelope(); * } * * function draw() { * background(220); * text('tap to play', 10, 20); * if (midiVal) { * text('MIDI: ' + midiVal, 10, 40); * text('Freq: ' + freq, 10, 60); * } * } * * function startSound() { * // see also: userStartAudio(); * osc.start(); * * midiVal = midiNotes[noteIndex % midiNotes.length]; * freq = midiToFreq(midiVal); * osc.freq(freq); * env.ramp(osc, 0, 1.0, 0); * * noteIndex++; * } * </code></div> */ var midiToFreq = p5.prototype.midiToFreq = function (m) { return 440 * Math.pow(2, (m - 69) / 12.0); }; var noteToFreq = function noteToFreq(note) { if (typeof note !== 'string') { return note; } var wholeNotes = { A: 21, B: 23, C: 24, D: 26, E: 28, F: 29, G: 31 }; var value = wholeNotes[note[0].toUpperCase()]; var octave = ~~note.slice(-1); value += 12 * (octave - 1); switch (note[1]) { case '#': value += 1; break; case 'b': value -= 1; break; default: break; } return midiToFreq(value); }; /** * List the SoundFile formats that you will include. LoadSound * will search your directory for these extensions, and will pick * a format that is compatable with the client's web browser. * <a href="http://media.io/">Here</a> is a free online file * converter. * * @method soundFormats * @param {String} [...formats] i.e. 'mp3', 'wav', 'ogg' * @example * <div><code> * function preload() { * // set the global sound formats * soundFormats('mp3', 'ogg'); * * // load either beatbox.mp3, or .ogg, depending on browser * mySound = loadSound('assets/beatbox.mp3'); * } * * function setup() { * let cnv = createCanvas(100, 100); * background(220); * text('sound loaded! tap to play', 10, 20, width - 20); * cnv.mousePressed(function() { * mySound.play(); * }); * } * </code></div> */ p5.prototype.soundFormats = function () { p5sound.extensions = []; for (var i = 0; i < arguments.length; i++) { arguments[i] = arguments[i].toLowerCase(); if (['mp3', 'wav', 'ogg', 'm4a', 'aac'].indexOf(arguments[i]) > -1) { p5sound.extensions.push(arguments[i]); } else { throw arguments[i] + ' is not a valid sound format!'; } } }; p5.prototype.disposeSound = function () { for (var i = 0; i < p5sound.soundArray.length; i++) { p5sound.soundArray[i].dispose(); } }; p5.prototype.registerMethod('remove', p5.prototype.disposeSound); p5.prototype._checkFileFormats = function (paths) { var path; if (typeof paths === 'string') { path = paths; var extTest = path.split('.').pop(); if (['mp3', 'wav', 'ogg', 'm4a', 'aac'].indexOf(extTest) > -1) { if (p5.prototype.isFileSupported(extTest)) { path = path; } else { var pathSplit = path.split('.'); var pathCore = pathSplit[pathSplit.length - 1]; for (var i = 0; i < p5sound.extensions.length; i++) { var extension = p5sound.extensions[i]; var supported = p5.prototype.isFileSupported(extension); if (supported) { pathCore = ''; if (pathSplit.length === 2) { pathCore += pathSplit[0]; } for (var i = 1; i <= pathSplit.length - 2; i++) { var p = pathSplit[i]; pathCore += '.' + p; } path = pathCore += '.'; path = path += extension; break; } } } } else { for (var i = 0; i < p5sound.extensions.length; i++) { var extension = p5sound.extensions[i]; var supported = p5.prototype.isFileSupported(extension); if (supported) { path = path + '.' + extension; break; } } } } else if (_typeof(paths) === 'object') { for (var i = 0; i < paths.length; i++) { var extension = paths[i].split('.').pop(); var supported = p5.prototype.isFileSupported(extension); if (supported) { path = paths[i]; break; } } } return path; }; /** * Used by Osc and Envelope to chain signal math */ p5.prototype._mathChain = function (o, math, thisChain, nextChain, type) { for (var i in o.mathOps) { if (o.mathOps[i] instanceof type) { o.mathOps[i].dispose(); thisChain = i; if (thisChain < o.mathOps.length - 1) { nextChain = o.mathOps[i + 1]; } } } o.mathOps[thisChain - 1].disconnect(); o.mathOps[thisChain - 1].connect(math); math.connect(nextChain); o.mathOps[thisChain] = math; return o; }; function convertToWav(audioBuffer) { var leftChannel, rightChannel; leftChannel = audioBuffer.getChannelData(0); if (audioBuffer.numberOfChannels > 1) { rightChannel = audioBuffer.getChannelData(1); } else { rightChannel = leftChannel; } var interleaved = interleave(leftChannel, rightChannel); var buffer = new window.ArrayBuffer(44 + interleaved.length * 2); var view = new window.DataView(buffer); writeUTFBytes(view, 0, 'RIFF'); view.setUint32(4, 36 + interleaved.length * 2, true); writeUTFBytes(view, 8, 'WAVE'); writeUTFBytes(view, 12, 'fmt '); view.setUint32(16, 16, true); view.setUint16(20, 1, true); view.setUint16(22, 2, true); view.setUint32(24, p5sound.audiocontext.sampleRate, true); view.setUint32(28, p5sound.audiocontext.sampleRate * 4, true); view.setUint16(32, 4, true); view.setUint16(34, 16, true); writeUTFBytes(view, 36, 'data'); view.setUint32(40, interleaved.length * 2, true); var lng = interleaved.length; var index = 44; var volume = 1; for (var i = 0; i < lng; i++) { view.setInt16(index, interleaved[i] * (0x7FFF * volume), true); index += 2; } return view; } function interleave(leftChannel, rightChannel) { var length = leftChannel.length + rightChannel.length; var result = new Float32Array(length); var inputIndex = 0; for (var index = 0; index < length;) { result[index++] = leftChannel[inputIndex]; result[index++] = rightChannel[inputIndex]; inputIndex++; } return result; } function writeUTFBytes(view, offset, string) { var lng = string.length; for (var i = 0; i < lng; i++) { view.setUint8(offset + i, string.charCodeAt(i)); } } function safeBufferSize(idealBufferSize) { var bufferSize = idealBufferSize; var tempAudioWorkletNode = new AudioWorkletNode(p5sound.audiocontext, processorNames.soundFileProcessor); if (tempAudioWorkletNode instanceof ScriptProcessorNode) { bufferSize = tempAudioWorkletNode.bufferSize; } tempAudioWorkletNode.disconnect(); tempAudioWorkletNode = null; return bufferSize; } return { convertToWav: convertToWav, midiToFreq: midiToFreq, noteToFreq: noteToFreq, safeBufferSize: safeBufferSize }; }).call(exports, __webpack_require__, exports, module), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); }), (function(module, exports, __webpack_require__) { var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(2),__webpack_require__(9)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(i){"use strict";return i.Add=function(t){this.createInsOuts(2,0),this._sum=this.input[0]=this.input[1]=this.output=new i.Gain,this._param=this.input[1]=new i.Signal(t),this._param.connect(this._sum)},i.extend(i.Add,i.Signal),i.Add.prototype.dispose=function(){return i.prototype.dispose.call(this),this._sum.dispose(),this._sum=null,this._param.dispose(),this._param=null,this},i.Add}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); }), (function(module, exports, __webpack_require__) { var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(20),__webpack_require__(45),__webpack_require__(46),__webpack_require__(12)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(t){return t.Type={Default:"number",Time:"time",Frequency:"frequency",TransportTime:"transportTime",Ticks:"ticks",NormalRange:"normalRange",AudioRange:"audioRange",Decibels:"db",Interval:"interval",BPM:"bpm",Positive:"positive",Cents:"cents",Degrees:"degrees",MIDI:"midi",BarsBeatsSixteenths:"barsBeatsSixteenths",Samples:"samples",Hertz:"hertz",Note:"note",Milliseconds:"milliseconds",Seconds:"seconds",Notation:"notation"},t.prototype.toSeconds=function(e){return this.isNumber(e)?e:this.isUndef(e)?this.now():this.isString(e)?new t.Time(e).toSeconds():e instanceof t.TimeBase?e.toSeconds():void 0},t.prototype.toFrequency=function(e){return this.isNumber(e)?e:this.isString(e)||this.isUndef(e)?new t.Frequency(e).valueOf():e instanceof t.TimeBase?e.toFrequency():void 0},t.prototype.toTicks=function(e){return this.isNumber(e)||this.isString(e)?new t.TransportTime(e).toTicks():this.isUndef(e)?t.Transport.ticks:e instanceof t.TimeBase?e.toTicks():void 0},t}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); }), (function(module, exports, __webpack_require__) { var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(22),__webpack_require__(8)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(i){"use strict";return window.GainNode&&!AudioContext.prototype.createGain&&(AudioContext.prototype.createGain=AudioContext.prototype.createGainNode),i.Gain=function(){var t=this.optionsObject(arguments,["gain","units"],i.Gain.defaults);this.input=this.output=this._gainNode=this.context.createGain(),this.gain=new i.Param({param:this._gainNode.gain,units:t.units,value:t.gain,convert:t.convert}),this._readOnly("gain")},i.extend(i.Gain),i.Gain.defaults={gain:1,convert:!0},i.Gain.prototype.dispose=function(){i.Param.prototype.dispose.call(this),this._gainNode.disconnect(),this._gainNode=null,this._writable("gain"),this.gain.dispose(),this.gain=null},i.prototype.createInsOuts=function(t,n){1===t?this.input=new i.Gain:1<t&&(this.input=new Array(t)),1===n?this.output=new i.Gain:1<n&&(this.output=new Array(t))},i.Gain}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); }), (function(module, exports) { module.exports = { recorderProcessor: 'recorder-processor', soundFileProcessor: 'sound-file-processor', amplitudeProcessor: 'amplitude-processor' }; }), (function(module, exports, __webpack_require__) { "use strict"; var __WEBPACK_AMD_DEFINE_RESULT__; !(__WEBPACK_AMD_DEFINE_RESULT__ = (function () { var CustomError = function CustomError(name, errorTrace, failedPath) { var err = new Error(); var tempStack, splitStack; err.name = name; err.originalStack = err.stack + errorTrace; tempStack = err.stack + errorTrace; err.failedPath = failedPath; var splitStack = tempStack.split('\n'); splitStack = splitStack.filter(function (ln) { return !ln.match(/(p5.|native code|globalInit)/g); }); err.stack = splitStack.join('\n'); return err; }; return CustomError; }).call(exports, __webpack_require__, exports, module), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); }), (function(module, exports, __webpack_require__) { var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(18)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(o){function t(e,t,n){if(e.input)Array.isArray(e.input)?(o.prototype.isUndef(n)&&(n=0),this.connect(e.input[n])):this.connect(e.input,t,n);else try{e instanceof AudioNode?i.call(this,e,t,n):i.call(this,e,t)}catch(t){throw new Error("error connecting to node: "+e+"\n"+t)}}var i,r;return!window.hasOwnProperty("AudioContext")&&window.hasOwnProperty("webkitAudioContext")&&(window.AudioContext=window.webkitAudioContext),o.Context=function(t){for(var e in o.Emitter.call(this),t=t||new window.AudioContext,this._context=t,this._context)this._defineProperty(this._context,e);this._latencyHint="interactive",this._lookAhead=.1,this._updateInterval=this._lookAhead/3,this._computedUpdateInterval=0,this._worker=this._createWorker(),this._constants={}},o.extend(o.Context,o.Emitter),o.Emitter.mixin(o.Context),o.Context.prototype._defineProperty=function(e,n){this.isUndef(this[n])&&Object.defineProperty(this,n,{get:function(){return"function"==typeof e[n]?e[n].bind(e):e[n]},set:function(t){e[n]=t}})},o.Context.prototype.now=function(){return this._context.currentTime},o.Context.prototype._createWorker=function(){window.URL=window.URL||window.webkitURL;var t=new Blob(["var timeoutTime = "+(1e3*this._updateInterval).toFixed(1)+";self.onmessage = function(msg){\ttimeoutTime = parseInt(msg.data);};function tick(){\tsetTimeout(tick, timeoutTime);\tself.postMessage('tick');}tick();"]),e=URL.createObjectURL(t),n=new Worker(e);return n.addEventListener("message",function(){this.emit("tick")}.bind(this)),n.addEventListener("message",function(){var t=this.now();if(this.isNumber(this._lastUpdate)){var e=t-this._lastUpdate;this._computedUpdateInterval=Math.max(e,.97*this._computedUpdateInterval)}this._lastUpdate=t}.bind(this)),n},o.Context.prototype.getConstant=function(t){if(this._constants[t])return this._constants[t];for(var e=this._context.createBuffer(1,128,this._context.sampleRate),n=e.getChannelData(0),o=0;o<n.length;o++)n[o]=t;var i=this._context.createBufferSource();return i.channelCount=1,i.channelCountMode="explicit",i.buffer=e,i.loop=!0,i.start(0),this._constants[t]=i},Object.defineProperty(o.Context.prototype,"lag",{get:function(){var t=this._computedUpdateInterval-this._updateInterval;return t=Math.max(t,0)}}),Object.defineProperty(o.Context.prototype,"lookAhead",{get:function(){return this._lookAhead},set:function(t){this._lookAhead=t}}),Object.defineProperty(o.Context.prototype,"updateInterval",{get:function(){return this._updateInterval},set:function(t){this._updateInterval=Math.max(t,o.prototype.blockTime),this._worker.postMessage(Math.max(1e3*t,1))}}),Object.defineProperty(o.Context.prototype,"latencyHint",{get:function(){return this._latencyHint},set:function(t){var e=t;if(this._latencyHint=t,this.isString(t))switch(t){case"interactive":e=.1,this._context.latencyHint=t;break;case"playback":e=.8,this._context.latencyHint=t;break;case"balanced":e=.25,this._context.latencyHint=t;break;case"fastest":e=.01}this.lookAhead=e,this.updateInterval=e/3}}),o.supported?(i=AudioNode.prototype.connect,r=AudioNode.prototype.disconnect,AudioNode.prototype.connect!==t&&(AudioNode.prototype.connect=t,AudioNode.prototype.disconnect=function(e,t,n){if(e&&e.input&&Array.isArray(e.input))o.prototype.isUndef(n)&&(n=0),this.disconnect(e.input[n],t,n);else if(e&&e.input)this.disconnect(e.input,t,n);else try{r.apply(this,arguments)}catch(t){throw new Error("error disconnecting node: "+e+"\n"+t)}}),o.context=new o.Context):console.warn("This browser does not support Tone.js"),o.Context}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); }), (function(module, exports, __webpack_require__) { var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(7),__webpack_require__(3),__webpack_require__(2)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(i){"use strict";return i.Scale=function(t,e){this._outputMin=this.defaultArg(t,0),this._outputMax=this.defaultArg(e,1),this._scale=this.input=new i.Multiply(1),this._add=this.output=new i.Add(0),this._scale.connect(this._add),this._setRange()},i.extend(i.Scale,i.SignalBase),Object.defineProperty(i.Scale.prototype,"min",{get:function(){return this._outputMin},set:function(t){this._outputMin=t,this._setRange()}}),Object.defineProperty(i.Scale.prototype,"max",{get:function(){return this._outputMax},set:function(t){this._outputMax=t,this._setRange()}}),i.Scale.prototype._setRange=function(){this._add.value=this._outputMin,this._scale.value=this._outputMax-this._outputMin},i.Scale.prototype.dispose=function(){return i.prototype.dispose.call(this),this._add.dispose(),this._add=null,this._scale.dispose(),this._scale=null,this},i.Scale}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); }), (function(module, exports, __webpack_require__) { var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(2),__webpack_require__(24)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(o){"use strict";return o.TimelineSignal=function(){var e=this.optionsObject(arguments,["value","units"],o.Signal.defaults);this._events=new o.Timeline(10),o.Signal.apply(this,e),e.param=this._param,o.Param.call(this,e),this._initial=this._fromUnits(this._param.value)},o.extend(o.TimelineSignal,o.Param),o.TimelineSignal.Type={Linear:"linear",Exponential:"exponential",Target:"target",Curve:"curve",Set:"set"},Object.defineProperty(o.TimelineSignal.prototype,"value",{get:function(){var e=this.now(),t=this.getValueAtTime(e);return this._toUnits(t)},set:function(e){var t=this._fromUnits(e);this._initial=t,this.cancelScheduledValues(),this._param.value=t}}),o.TimelineSignal.prototype.setValueAtTime=function(e,t){return e=this._fromUnits(e),t=this.toSeconds(t),this._events.add({type:o.TimelineSignal.Type.Set,value:e,time:t}),this._param.setValueAtTime(e,t),this},o.TimelineSignal.prototype.linearRampToValueAtTime=function(e,t){return e=this._fromUnits(e),t=this.toSeconds(t),this._events.add({type:o.TimelineSignal.Type.Linear,value:e,time:t}),this._param.linearRampToValueAtTime(e,t),this},o.TimelineSignal.prototype.exponentialRampToValueAtTime=function(e,t){t=this.toSeconds(t);var i=this._searchBefore(t);i&&0===i.value&&this.setValueAtTime(this._minOutput,i.time),e=this._fromUnits(e);var n=Math.max(e,this._minOutput);return this._events.add({type:o.TimelineSignal.Type.Exponential,value:n,time:t}),e<this._minOutput?(this._param.exponentialRampToValueAtTime(this._minOutput,t-this.sampleTime),this.setValueAtTime(0,t)):this._param.exponentialRampToValueAtTime(e,t),this},o.TimelineSignal.prototype.setTargetAtTime=function(e,t,i){return e=this._fromUnits(e),e=Math.max(this._minOutput,e),i=Math.max(this._minOutput,i),t=this.toSeconds(t),this._events.add({type:o.TimelineSignal.Type.Target,value:e,time:t,constant:i}),this._param.setTargetAtTime(e,t,i),this},o.TimelineSignal.prototype.setValueCurveAtTime=function(e,t,i,n){n=this.defaultArg(n,1);for(var a=new Array(e.length),l=0;l<a.length;l++)a[l]=this._fromUnits(e[l])*n;t=this.toSeconds(t),i=this.toSeconds(i),this._events.add({type:o.TimelineSignal.Type.Curve,value:a,time:t,duration:i}),this._param.setValueAtTime(a[0],t);for(var s=1;s<a.length;s++){var r=t+s/(a.length-1)*i;this._param.linearRampToValueAtTime(a[s],r)}return this},o.TimelineSignal.prototype.cancelScheduledValues=function(e){return e=this.toSeconds(e),this._events.cancel(e),this._param.cancelScheduledValues(e),this},o.TimelineSignal.prototype.setRampPoint=function(e){e=this.toSeconds(e);var t=this._toUnits(this.getValueAtTime(e)),i=this._searchBefore(e);if(i&&i.time===e)this.cancelScheduledValues(e+this.sampleTime);else if(i&&i.type===o.TimelineSignal.Type.Curve&&i.time+i.duration>e)this.cancelScheduledValues(e),this.linearRampToValueAtTime(t,e);else{var n=this._searchAfter(e);n&&(this.cancelScheduledValues(e),n.type===o.TimelineSignal.Type.Linear?this.linearRampToValueAtTime(t,e):n.type===o.TimelineSignal.Type.Exponential&&this.exponentialRampToValueAtTime(t,e)),this.setValueAtTime(t,e)}return this},o.TimelineSignal.prototype.linearRampToValueBetween=function(e,t,i){return this.setRampPoint(t),this.linearRampToValueAtTime(e,i),this},o.TimelineSignal.prototype.exponentialRampToValueBetween=function(e,t,i){return this.setRampPoint(t),this.exponentialRampToValueAtTime(e,i),this},o.TimelineSignal.prototype._searchBefore=function(e){return this._events.get(e)},o.TimelineSignal.prototype._searchAfter=function(e){return this._events.getAfter(e)},o.TimelineSignal.prototype.getValueAtTime=function(e){e=this.toSeconds(e);var t=this._searchAfter(e),i=this._searchBefore(e),n=this._initial;if(null===i)n=this._initial;else if(i.type===o.TimelineSignal.Type.Target){var a,l=this._events.getBefore(i.time);a=null===l?this._initial:l.value,n=this._exponentialApproach(i.time,a,i.value,i.constant,e)}else n=i.type===o.TimelineSignal.Type.Curve?this._curveInterpolate(i.time,i.value,i.duration,e):null===t?i.value:t.type===o.TimelineSignal.Type.Linear?this._linearInterpolate(i.time,i.value,t.time,t.value,e):t.type===o.TimelineSignal.Type.Exponential?this._exponentialInterpolate(i.time,i.value,t.time,t.value,e):i.value;return n},o.TimelineSignal.prototype.connect=o.SignalBase.prototype.connect,o.TimelineSignal.prototype._exponentialApproach=function(e,t,i,n,a){return i+(t-i)*Math.exp(-(a-e)/n)},o.TimelineSignal.prototype._linearInterpolate=function(e,t,i,n,a){return t+(a-e)/(i-e)*(n-t)},o.TimelineSignal.prototype._exponentialInterpolate=function(e,t,i,n,a){return(t=Math.max(this._minOutput,t))*Math.pow(n/t,(a-e)/(i-e))},o.TimelineSignal.prototype._curveInterpolate=function(e,t,i,n){var a=t.length;if(e+i<=n)return t[a-1];if(n<=e)return t[0];var l=(n-e)/i,s=Math.floor((a-1)*l),r=Math.ceil((a-1)*l),o=t[s],p=t[r];return r===s?o:this._linearInterpolate(s,o,r,p,l*(a-1))},o.TimelineSignal.prototype.dispose=function(){o.Signal.prototype.dispose.call(this),o.Param.prototype.dispose.call(this),this._events.dispose(),this._events=null},o.TimelineSignal}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); }), (function(module, exports, __webpack_require__) { "use strict"; var __WEBPACK_AMD_DEFINE_RESULT__; !(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) { var Effect = __webpack_require__(4); /** * <p>A p5.Filter uses a Web Audio Biquad Filter to filter * the frequency response of an input source. Subclasses * include:</p> * <a href="/reference/#/p5.LowPass"><code>p5.LowPass</code></a>: * Allows frequencies below the cutoff frequency to pass through, * and attenuates frequencies above the cutoff.<br/> * <a href="/reference/#/p5.HighPass"><code>p5.HighPass</code></a>: * The opposite of a lowpass filter. <br/> * <a href="/reference/#/p5.BandPass"><code>p5.BandPass</code></a>: * Allows a range of frequencies to pass through and attenuates * the frequencies below and above this frequency range.<br/> * * The <code>.res()</code> method controls either width of the * bandpass, or resonance of the low/highpass cutoff frequency. * * This class extends <a href = "/reference/#/p5.Effect">p5.Effect</a>. * Methods <a href = "/reference/#/p5.Effec