@types/p5
Version:
TypeScript definitions for p5
1,257 lines (1,156 loc) • 111 kB
TypeScript
// This file was auto-generated. Please do not edit it.
import * as p5 from '../../index';
declare module '../../index' {
class SoundFile {
/**
* SoundFile object with a path to a file. The
* p5.SoundFile may not be available immediately
* because it loads the file information
* asynchronously.
*
* To do something with the sound as soon as it loads
* pass the name of a function as the second
* parameter.
*
* Only one file path is required. However, audio
* file formats (i.e. mp3, ogg, wav and m4a/aac) are
* not supported by all web browsers. If you want to
* ensure compatability, instead of a single file
* path, you may include an Array of filepaths, and
* the browser will choose a format that works.
*
* @param path path to a sound file (String).
* Optionally, you may include multiple file formats
* in an array. Alternately, accepts an object from
* the HTML5 File API, or a p5.File.
* @param [successCallback] Name of a function to
* call once file loads
* @param [errorCallback] Name of a function to call
* if file fails to load. This function will receive
* an error or XMLHttpRequest object with information
* about what went wrong.
* @param [whileLoadingCallback] Name of a function
* to call while file is loading. That function will
* receive progress of the request to load the sound
* file (between 0 and 1) as its first parameter.
* This progress does not account for the additional
* time needed to decode the audio data.
*/
constructor(
path: string | any[],
successCallback?: (...args: any[]) => any,
errorCallback?: (...args: any[]) => any,
whileLoadingCallback?: (...args: any[]) => any
);
/**
* Returns true if the sound file finished loading
* successfully.
*/
isLoaded(): boolean;
/**
* Play the p5.SoundFile
* @param [startTime] (optional) schedule playback to
* start (in seconds from now).
* @param [rate] (optional) playback rate
* @param [amp] (optional) amplitude (volume) of
* playback
* @param [cueStart] (optional) cue start time in
* seconds
* @param [duration] (optional) duration of playback
* in seconds
*/
play(startTime?: number, rate?: number, amp?: number, cueStart?: number, duration?: number): void;
/**
* p5.SoundFile has two play modes: restart and
* sustain. Play Mode determines what happens to a
* p5.SoundFile if it is triggered while in the
* middle of playback. In sustain mode, playback will
* continue simultaneous to the new playback. In
* restart mode, play() will stop playback and start
* over. With untilDone, a sound will play only if
* it's not already playing. Sustain is the default
* mode.
* @param str 'restart' or 'sustain' or 'untilDone'
*/
playMode(str: string): void;
/**
* Pauses a file that is currently playing. If the
* file is not playing, then nothing will happen.
* After pausing, .play() will resume from the paused
* position. If p5.SoundFile had been set to loop
* before it was paused, it will continue to loop
* after it is unpaused with .play().
* @param [startTime] (optional) schedule event to
* occur seconds from now
*/
pause(startTime?: number): void;
/**
* Loop the p5.SoundFile. Accepts optional parameters
* to set the playback rate, playback volume,
* loopStart, loopEnd.
* @param [startTime] (optional) schedule event to
* occur seconds from now
* @param [rate] (optional) playback rate
* @param [amp] (optional) playback volume
* @param [cueLoopStart] (optional) startTime in
* seconds
* @param [duration] (optional) loop duration in
* seconds
*/
loop(startTime?: number, rate?: number, amp?: number, cueLoopStart?: number, duration?: number): void;
/**
* Set a p5.SoundFile's looping flag to true or
* false. If the sound is currently playing, this
* change will take effect when it reaches the end of
* the current playback.
* @param Boolean set looping to true or false
*/
setLoop(Boolean: boolean): void;
/**
* Returns 'true' if a p5.SoundFile is currently
* looping and playing, 'false' if not.
*/
isLooping(): boolean;
/**
* Returns true if a p5.SoundFile is playing, false
* if not (i.e. paused or stopped).
*/
isPlaying(): boolean;
/**
* Returns true if a p5.SoundFile is paused, false if
* not (i.e. playing or stopped).
*/
isPaused(): boolean;
/**
* Stop soundfile playback.
* @param [startTime] (optional) schedule event to
* occur in seconds from now
*/
stop(startTime?: number): void;
/**
* Set the stereo panning of a p5.sound object to a
* floating point number between -1.0 (left) and 1.0
* (right). Default is 0.0 (center).
* @param [panValue] Set the stereo panner
* @param [timeFromNow] schedule this event to happen
* seconds from now
*/
pan(panValue?: number, timeFromNow?: number): void;
/**
* Returns the current stereo pan position (-1.0 to
* 1.0)
* @return Returns the stereo pan setting of the
* Oscillator as a number between -1.0 (left) and 1.0
* (right). 0.0 is center and default.
*/
getPan(): number;
/**
* Set the playback rate of a sound file. Will change
* the speed and the pitch. Values less than zero
* will reverse the audio buffer.
* @param [playbackRate] Set the playback rate. 1.0
* is normal, .5 is half-speed, 2.0 is twice as fast.
* Values less than zero play backwards.
*/
rate(playbackRate?: number): void;
/**
* Multiply the output volume (amplitude) of a sound
* file between 0.0 (silence) and 1.0 (full volume).
* 1.0 is the maximum amplitude of a digital sound,
* so multiplying by greater than 1.0 may cause
* digital distortion. To fade, provide a rampTime
* parameter. For more complex fades, see the
* Envelope class. Alternately, you can pass in a
* signal source such as an oscillator to modulate
* the amplitude with an audio signal.
* @param volume Volume (amplitude) between 0.0 and
* 1.0 or modulating signal/oscillator
* @param [rampTime] Fade for t seconds
* @param [timeFromNow] Schedule this event to happen
* at t seconds in the future
*/
setVolume(volume: number | object, rampTime?: number, timeFromNow?: number): void;
/**
* Returns the duration of a sound file in seconds.
* @return The duration of the soundFile in seconds.
*/
duration(): number;
/**
* Return the current position of the p5.SoundFile
* playhead, in seconds. Time is relative to the
* normal buffer direction, so if reverseBuffer has
* been called, currentTime will count backwards.
* @return currentTime of the soundFile in seconds.
*/
currentTime(): number;
/**
* Move the playhead of a soundfile that is currently
* playing to a new position and a new duration, in
* seconds. If none are given, will reset the file to
* play entire duration from start to finish. To set
* the position of a soundfile that is not currently
* playing, use the play or loop methods.
* @param cueTime cueTime of the soundFile in
* seconds.
* @param duration duration in seconds.
*/
jump(cueTime: number, duration: number): void;
/**
* Return the number of channels in a sound file. For
* example, Mono = 1, Stereo = 2.
* @return [channels]
*/
channels(): number;
/**
* Return the sample rate of the sound file.
* @return [sampleRate]
*/
sampleRate(): number;
/**
* Return the number of samples in a sound file.
* Equal to sampleRate * duration.
* @return [sampleCount]
*/
frames(): number;
/**
* Returns an array of amplitude peaks in a
* p5.SoundFile that can be used to draw a static
* waveform. Scans through the p5.SoundFile's audio
* buffer to find the greatest amplitudes. Accepts
* one parameter, 'length', which determines size of
* the array. Larger arrays result in more precise
* waveform visualizations. Inspired by
* Wavesurfer.js.
* @param [length] length is the size of the returned
* array. Larger length results in more precision.
* Defaults to 5*width of the browser window.
* @return Array of peaks.
*/
getPeaks(length?: number): Float32Array;
/**
* Reverses the p5.SoundFile's buffer source.
* Playback must be handled separately (see example).
*/
reverseBuffer(): void;
/**
* Schedule an event to be called when the soundfile
* reaches the end of a buffer. If the soundfile is
* playing through once, this will be called when it
* ends. If it is looping, it will be called when
* stop is called.
* @param callback function to call when the
* soundfile has ended.
*/
onended(callback: (...args: any[]) => any): void;
/**
* Connects the output of a p5sound object to input
* of another p5.sound object. For example, you may
* connect a p5.SoundFile to an FFT or an Effect. If
* no parameter is given, it will connect to the main
* output. Most p5sound objects connect to the master
* output when they are created.
* @param [object] Audio object that accepts an input
*/
connect(object?: object): void;
/**
* Disconnects the output of this p5sound object.
*/
disconnect(): void;
/**
* Reset the source for this SoundFile to a new path
* (URL).
* @param path path to audio file
* @param callback Callback
*/
setPath(path: string, callback: (...args: any[]) => any): void;
/**
* Replace the current Audio Buffer with a new
* Buffer.
* @param buf Array of Float32 Array(s). 2 Float32
* Arrays will create a stereo source. 1 will create
* a mono source.
*/
setBuffer(buf: any[]): void;
/**
* Schedule events to trigger every time a
* MediaElement (audio/video) reaches a playback cue
* point. Accepts a callback function, a time (in
* seconds) at which to trigger the callback, and an
* optional parameter for the callback.
*
* Time will be passed as the first parameter to the
* callback function, and param will be the second
* parameter.
* @param time Time in seconds, relative to this
* media element's playback. For example, to trigger
* an event every time playback reaches two seconds,
* pass in the number 2. This will be passed as the
* first parameter to the callback function.
* @param callback Name of a function that will be
* called at the given time. The callback will
* receive time and (optionally) param as its two
* parameters.
* @param [value] An object to be passed as the
* second parameter to the callback function.
* @return id ID of this cue, useful for
* removeCue(id)
*/
addCue(time: number, callback: (...args: any[]) => any, value?: object): number;
/**
* Remove a callback based on its ID. The ID is
* returned by the addCue method.
* @param id ID of the cue, as returned by addCue
*/
removeCue(id: number): void;
/**
* Remove all of the callbacks that had originally
* been scheduled via the addCue method.
*/
clearCues(): void;
/**
* Save a p5.SoundFile as a .wav file. The browser
* will prompt the user to download the file to their
* device. To upload a file to a server, see getBlob
* @param [fileName] name of the resulting .wav file.
*/
save(fileName?: string): void;
/**
* This method is useful for sending a SoundFile to a
* server. It returns the .wav-encoded audio data as
* a "Blob". A Blob is a file-like data object that
* can be uploaded to a server with an http request.
* We'll use the httpDo options object to send a POST
* request with some specific options: we encode the
* request as multipart/form-data, and attach the
* blob as one of the form values using FormData.
* @return A file-like data object
*/
getBlob(): Blob;
}
class Amplitude {
/**
* Amplitude measures volume between 0.0 and 1.0.
* Listens to all p5sound by default, or use
* setInput() to listen to a specific sound source.
* Accepts an optional smoothing value, which
* defaults to 0.
*
* @param [smoothing] between 0.0 and .999 to smooth
* amplitude readings (defaults to 0)
*/
constructor(smoothing?: number);
/**
* Connects to the p5sound instance (main output) by
* default. Optionally, you can pass in a specific
* source (i.e. a soundfile).
* @param [snd] set the sound source (optional,
* defaults to main output)
* @param [smoothing] a range between 0.0 and 1.0 to
* smooth amplitude readings
*/
setInput(snd?: any, smoothing?: number): void;
/**
* Returns a single Amplitude reading at the moment
* it is called. For continuous readings, run in the
* draw loop.
* @param [channel] Optionally return only channel 0
* (left) or 1 (right)
* @return Amplitude as a number between 0.0 and 1.0
*/
getLevel(channel?: number): number;
/**
* Determines whether the results of
* Amplitude.process() will be Normalized. To
* normalize, Amplitude finds the difference the
* loudest reading it has processed and the maximum
* amplitude of 1.0. Amplitude adds this difference
* to all values to produce results that will
* reliably map between 0.0 and 1.0. However, if a
* louder moment occurs, the amount that Normalize
* adds to all the values will change. Accepts an
* optional boolean parameter (true or false).
* Normalizing is off by default.
* @param [boolean] set normalize to true (1) or
* false (0)
*/
toggleNormalize(boolean?: boolean): void;
/**
* Smooth Amplitude analysis by averaging with the
* last analysis frame. Off by default.
* @param set smoothing from 0.0 <= 1
*/
smooth(set: number): void;
}
class FFT {
/**
* FFT (Fast Fourier Transform) is an analysis
* algorithm that isolates individual audio
* frequencies within a waveform. Once instantiated,
* a p5.FFT object can return an array based on two
* types of analyses:
*
* • FFT.waveform() computes amplitude values along
* the time domain. The array indices correspond to
* samples across a brief moment in time. Each value
* represents amplitude of the waveform at that
* sample of time.
*
* • FFT.analyze() computes amplitude values along
* the frequency domain. The array indices correspond
* to frequencies (i.e. pitches), from the lowest to
* the highest that humans can hear. Each value
* represents amplitude at that slice of the
* frequency spectrum. Use with getEnergy() to
* measure amplitude at specific frequencies, or
* within a range of frequencies.
*
* FFT analyzes a very short snapshot of sound called
* a sample buffer. It returns an array of amplitude
* measurements, referred to as bins. The array is
* 1024 bins long by default. You can change the bin
* array length, but it must be a power of 2 between
* 16 and 1024 in order for the FFT algorithm to
* function correctly. The actual size of the FFT
* buffer is twice the number of bins, so given a
* standard sample rate, the buffer is 2048/44100
* seconds long.
*
* @param [smoothing] Smooth results of Freq
* Spectrum. 0.0 < smoothing < 1.0. Defaults to 0.8.
* @param [bins] Length of resulting array. Must be a
* power of two between 16 and 1024. Defaults to
* 1024.
*/
constructor(smoothing?: number, bins?: number);
/**
* Set the input source for the FFT analysis. If no
* source is provided, FFT will analyze all sound in
* the sketch.
* @param [source] p5.sound object (or web audio API
* source node)
*/
setInput(source?: object): void;
/**
* Returns an array of amplitude values (between -1.0
* and +1.0) that represent a snapshot of amplitude
* readings in a single buffer. Length will be equal
* to bins (defaults to 1024). Can be used to draw
* the waveform of a sound.
* @param [bins] Must be a power of two between 16
* and 1024. Defaults to 1024.
* @param [precision] If any value is provided, will
* return results in a Float32 Array which is more
* precise than a regular array.
* @return Array Array of amplitude values (-1 to 1)
* over time. Array length = bins.
*/
waveform(bins?: number, precision?: string): any[];
/**
* Returns an array of amplitude values (between 0
* and 255) across the frequency spectrum. Length is
* equal to FFT bins (1024 by default). The array
* indices correspond to frequencies (i.e. pitches),
* from the lowest to the highest that humans can
* hear. Each value represents amplitude at that
* slice of the frequency spectrum. Must be called
* prior to using getEnergy().
* @param [bins] Must be a power of two between 16
* and 1024. Defaults to 1024.
* @param [scale] If "dB," returns decibel float
* measurements between -140 and 0 (max). Otherwise
* returns integers from 0-255.
* @return spectrum Array of energy
* (amplitude/volume) values across the frequency
* spectrum. Lowest energy (silence) = 0, highest
* possible is 255.
*/
analyze(bins?: number, scale?: number): any[];
/**
* Returns the amount of energy (volume) at a
* specific frequency, or the average amount of
* energy between two frequencies. Accepts Number(s)
* corresponding to frequency (in Hz), or a "string"
* corresponding to predefined frequency ranges
* ("bass", "lowMid", "mid", "highMid", "treble").
* Returns a range between 0 (no energy/volume at
* that frequency) and 255 (maximum energy). NOTE:
* analyze() must be called prior to getEnergy().
* analyze() tells the FFT to analyze frequency data,
* and getEnergy() uses the results to determine the
* value at a specific frequency or range of
* frequencies.
* @param frequency1 Will return a value representing
* energy at this frequency. Alternately, the strings
* "bass", "lowMid" "mid", "highMid", and "treble"
* will return predefined frequency ranges.
* @param [frequency2] If a second frequency is
* given, will return average amount of energy that
* exists between the two frequencies.
* @return Energy Energy (volume/amplitude) from 0
* and 255.
*/
getEnergy(frequency1: number | string, frequency2?: number): number;
/**
* Returns the spectral centroid of the input
* signal. NOTE: analyze() must be called prior to
* getCentroid(). Analyze() tells the FFT to analyze
* frequency data, and getCentroid() uses the results
* determine the spectral centroid.
* @return Spectral Centroid Frequency of the
* spectral centroid in Hz.
*/
getCentroid(): number;
/**
* Smooth FFT analysis by averaging with the last
* analysis frame.
* @param smoothing 0.0 < smoothing < 1.0. Defaults
* to 0.8.
*/
smooth(smoothing: number): void;
/**
* Returns an array of average amplitude values for a
* given number of frequency bands split equally. N
* defaults to 16. NOTE: analyze() must be called
* prior to linAverages(). Analyze() tells the FFT to
* analyze frequency data, and linAverages() uses the
* results to group them into a smaller set of
* averages.
* @param N Number of returned frequency groups
* @return linearAverages Array of average amplitude
* values for each group
*/
linAverages(N: number): any[];
/**
* Returns an array of average amplitude values of
* the spectrum, for a given set of Octave Bands
* NOTE: analyze() must be called prior to
* logAverages(). Analyze() tells the FFT to analyze
* frequency data, and logAverages() uses the results
* to group them into a smaller set of averages.
* @param octaveBands Array of Octave Bands objects
* for grouping
* @return logAverages Array of average amplitude
* values for each group
*/
logAverages(octaveBands: any[]): any[];
/**
* Calculates and Returns the 1/N Octave Bands N
* defaults to 3 and minimum central frequency to
* 15.625Hz. (1/3 Octave Bands ~= 31 Frequency Bands)
* Setting fCtr0 to a central value of a higher
* octave will ignore the lower bands and produce
* less frequency groups.
* @param N Specifies the 1/N type of generated
* octave bands
* @param fCtr0 Minimum central frequency for the
* lowest band
* @return octaveBands Array of octave band objects
* with their bounds
*/
getOctaveBands(N: number, fCtr0: number): any[];
}
class Oscillator {
/**
* Creates a signal that oscillates between -1.0 and
* 1.0. By default, the oscillation takes the form of
* a sinusoidal shape ('sine'). Additional types
* include 'triangle', 'sawtooth' and 'square'. The
* frequency defaults to 440 oscillations per second
* (440Hz, equal to the pitch of an 'A' note). Set
* the type of oscillation with setType(), or by
* instantiating a specific oscillator: p5.SinOsc,
* p5.TriOsc, p5.SqrOsc, or p5.SawOsc.
*
* @param [freq] frequency defaults to 440Hz
* @param [type] type of oscillator. Options: 'sine'
* (default), 'triangle', 'sawtooth', 'square'
*/
constructor(freq?: number, type?: string);
/**
* Start an oscillator. Starting an oscillator on a
* user gesture will enable audio in browsers that
* have a strict autoplay policy, including Chrome
* and most mobile devices. See also:
* userStartAudio().
* @param [time] startTime in seconds from now.
* @param [frequency] frequency in Hz.
*/
start(time?: number, frequency?: number): void;
/**
* Stop an oscillator. Accepts an optional parameter
* to determine how long (in seconds from now) until
* the oscillator stops.
* @param secondsFromNow Time, in seconds from now.
*/
stop(secondsFromNow: number): void;
/**
* Set the amplitude between 0 and 1.0. Or, pass in
* an object such as an oscillator to modulate
* amplitude with an audio signal.
* @param vol between 0 and 1.0 or a modulating
* signal/oscillator
* @param [rampTime] create a fade that lasts
* rampTime
* @param [timeFromNow] schedule this event to happen
* seconds from now
* @return gain If no value is provided, returns the
* Web Audio API AudioParam that controls this
* oscillator's gain/amplitude/volume)
*/
amp(vol: number | object, rampTime?: number, timeFromNow?: number): AudioParam;
/**
* Returns the value of output gain
* @return Amplitude value between 0.0 and 1.0
*/
getAmp(): number;
/**
* Set frequency of an oscillator to a value. Or,
* pass in an object such as an oscillator to
* modulate the frequency with an audio signal.
* @param Frequency Frequency in Hz or modulating
* signal/oscillator
* @param [rampTime] Ramp time (in seconds)
* @param [timeFromNow] Schedule this event to happen
* at x seconds from now
* @return Frequency If no value is provided, returns
* the Web Audio API AudioParam that controls this
* oscillator's frequency
*/
freq(Frequency: number | object, rampTime?: number, timeFromNow?: number): AudioParam;
/**
* Returns the value of frequency of oscillator
* @return Frequency of oscillator in Hertz
*/
getFreq(): number;
/**
* Set type to 'sine', 'triangle', 'sawtooth' or
* 'square'.
* @param type 'sine', 'triangle', 'sawtooth' or
* 'square'.
*/
setType(type: string): void;
/**
* Returns current type of oscillator eg. 'sine',
* 'triangle', 'sawtooth' or 'square'.
* @return type of oscillator eg . 'sine',
* 'triangle', 'sawtooth' or 'square'.
*/
getType(): string;
/**
* Connect to a p5.sound / Web Audio object.
* @param unit A p5.sound or Web Audio object
*/
connect(unit: object): void;
/**
* Disconnect all outputs
*/
disconnect(): void;
/**
* Pan between Left (-1) and Right (1)
* @param panning Number between -1 and 1
* @param timeFromNow schedule this event to happen
* seconds from now
*/
pan(panning: number, timeFromNow: number): void;
/**
* Returns the current value of panPosition , between
* Left (-1) and Right (1)
* @return panPosition of oscillator , between Left
* (-1) and Right (1)
*/
getPan(): number;
/**
* Set the phase of an oscillator between 0.0 and
* 1.0. In this implementation, phase is a delay time
* based on the oscillator's current frequency.
* @param phase float between 0.0 and 1.0
*/
phase(phase: number): void;
/**
* Add a value to the p5.Oscillator's output
* amplitude, and return the oscillator. Calling this
* method again will override the initial add() with
* a new value.
* @param number Constant number to add
* @return Oscillator Returns this oscillator with
* scaled output
*/
add(number: number): Oscillator;
/**
* Multiply the p5.Oscillator's output amplitude by a
* fixed value (i.e. turn it up!). Calling this
* method again will override the initial mult() with
* a new value.
* @param number Constant number to multiply
* @return Oscillator Returns this oscillator with
* multiplied output
*/
mult(number: number): Oscillator;
/**
* Scale this oscillator's amplitude values to a
* given range, and return the oscillator. Calling
* this method again will override the initial
* scale() with new values.
* @param inMin input range minumum
* @param inMax input range maximum
* @param outMin input range minumum
* @param outMax input range maximum
* @return Oscillator Returns this oscillator with
* scaled output
*/
scale(inMin: number, inMax: number, outMin: number, outMax: number): Oscillator;
}
class Envelope {
/**
* Envelopes are pre-defined amplitude distribution
* over time. Typically, envelopes are used to
* control the output volume of an object, a series
* of fades referred to as Attack, Decay, Sustain and
* Release ( ADSR ). Envelopes can also control other
* Web Audio Parameters—for example, a p5.Envelope
* can control an Oscillator's frequency like this:
* osc.freq(env). Use setRange to change the
* attack/release level. Use setADSR to change
* attackTime, decayTime, sustainPercent and
* releaseTime.
*
* Use the play method to play the entire envelope,
* the ramp method for a pingable trigger, or
* triggerAttack/ triggerRelease to trigger
* noteOn/noteOff.
*
*/
constructor();
/**
* Reset the envelope with a series of time/value
* pairs.
* @param attackTime Time (in seconds) before level
* reaches attackLevel
* @param attackLevel Typically an amplitude between
* 0.0 and 1.0
* @param decayTime Time
* @param decayLevel Amplitude (In a standard ADSR
* envelope, decayLevel = sustainLevel)
* @param releaseTime Release Time (in seconds)
* @param releaseLevel Amplitude
*/
set(
attackTime: number,
attackLevel: number,
decayTime: number,
decayLevel: number,
releaseTime: number,
releaseLevel: number
): void;
/**
* Set values like a traditional ADSR envelope .
* @param attackTime Time (in seconds before envelope
* reaches Attack Level
* @param [decayTime] Time (in seconds) before
* envelope reaches Decay/Sustain Level
* @param [susRatio] Ratio between attackLevel and
* releaseLevel, on a scale from 0 to 1, where 1.0 =
* attackLevel, 0.0 = releaseLevel. The susRatio
* determines the decayLevel and the level at which
* the sustain portion of the envelope will sustain.
* For example, if attackLevel is 0.4, releaseLevel
* is 0, and susAmt is 0.5, the decayLevel would be
* 0.2. If attackLevel is increased to 1.0 (using
* setRange), then decayLevel would increase
* proportionally, to become 0.5.
* @param [releaseTime] Time in seconds from now
* (defaults to 0)
*/
setADSR(attackTime: number, decayTime?: number, susRatio?: number, releaseTime?: number): void;
/**
* Set max (attackLevel) and min (releaseLevel) of
* envelope.
* @param aLevel attack level (defaults to 1)
* @param rLevel release level (defaults to 0)
*/
setRange(aLevel: number, rLevel: number): void;
/**
* Assign a parameter to be controlled by this
* envelope. If a p5.Sound object is given, then the
* p5.Envelope will control its output gain. If
* multiple inputs are provided, the env will control
* all of them.
* @param [inputs] A p5.sound object or Web Audio
* Param.
*/
setInput(inputs?: object): void;
/**
* Set whether the envelope ramp is linear (default)
* or exponential. Exponential ramps can be useful
* because we perceive amplitude and frequency
* logarithmically.
* @param isExp true is exponential, false is linear
*/
setExp(isExp: boolean): void;
/**
* Play tells the envelope to start acting on a given
* input. If the input is a p5.sound object (i.e.
* AudioIn, Oscillator, SoundFile), then Envelope
* will control its output volume. Envelopes can also
* be used to control any Web Audio Audio Param.
* @param unit A p5.sound object or Web Audio Param.
* @param [startTime] time from now (in seconds) at
* which to play
* @param [sustainTime] time to sustain before
* releasing the envelope
*/
play(unit: object, startTime?: number, sustainTime?: number): void;
/**
* Trigger the Attack, and Decay portion of the
* Envelope. Similar to holding down a key on a
* piano, but it will hold the sustain level until
* you let go. Input can be any p5.sound object, or a
* Web Audio Param.
* @param unit p5.sound Object or Web Audio Param
* @param secondsFromNow time from now (in seconds)
*/
triggerAttack(unit: object, secondsFromNow: number): void;
/**
* Trigger the Release of the Envelope. This is
* similar to releasing the key on a piano and
* letting the sound fade according to the release
* level and release time.
* @param unit p5.sound Object or Web Audio Param
* @param secondsFromNow time to trigger the release
*/
triggerRelease(unit: object, secondsFromNow: number): void;
/**
* Exponentially ramp to a value using the first two
* values from setADSR(attackTime, decayTime) as
* time constants for simple exponential ramps. If
* the value is higher than current value, it uses
* attackTime, while a decrease uses decayTime.
* @param unit p5.sound Object or Web Audio Param
* @param secondsFromNow When to trigger the ramp
* @param v Target value
* @param [v2] Second target value
*/
ramp(unit: object, secondsFromNow: number, v: number, v2?: number): void;
/**
* Add a value to the p5.Oscillator's output
* amplitude, and return the oscillator. Calling this
* method again will override the initial add() with
* new values.
* @param number Constant number to add
* @return Envelope Returns this envelope with scaled
* output
*/
add(number: number): Envelope;
/**
* Multiply the p5.Envelope's output amplitude by a
* fixed value. Calling this method again will
* override the initial mult() with new values.
* @param number Constant number to multiply
* @return Envelope Returns this envelope with scaled
* output
*/
mult(number: number): Envelope;
/**
* Scale this envelope's amplitude values to a given
* range, and return the envelope. Calling this
* method again will override the initial scale()
* with new values.
* @param inMin input range minumum
* @param inMax input range maximum
* @param outMin input range minumum
* @param outMax input range maximum
* @return Envelope Returns this envelope with scaled
* output
*/
scale(inMin: number, inMax: number, outMin: number, outMax: number): Envelope;
/**
* Time until envelope reaches attackLevel
*/
attackTime: any;
/**
* Level once attack is complete.
*/
attackLevel: any;
/**
* Time until envelope reaches decayLevel.
*/
decayTime: any;
/**
* Level after decay. The envelope will sustain here
* until it is released.
*/
decayLevel: any;
/**
* Duration of the release portion of the envelope.
*/
releaseTime: any;
/**
* Level at the end of the release.
*/
releaseLevel: any;
}
class Noise extends Oscillator {
/**
* Noise is a type of oscillator that generates a
* buffer with random values.
*
* @param type Type of noise can be 'white'
* (default), 'brown' or 'pink'.
*/
constructor(type: string);
/**
* Set type of noise to 'white', 'pink' or 'brown'.
* White is the default.
* @param [type] 'white', 'pink' or 'brown'
*/
setType(type?: string): void;
}
class Pulse extends Oscillator {
/**
* Creates a Pulse object, an oscillator that
* implements Pulse Width Modulation. The pulse is
* created with two oscillators. Accepts a parameter
* for frequency, and to set the width between the
* pulses. See p5.Oscillator for a full list of
* methods.
*
* @param [freq] Frequency in oscillations per second
* (Hz)
* @param [w] Width between the pulses (0 to 1.0,
* defaults to 0)
*/
constructor(freq?: number, w?: number);
/**
* Set the width of a Pulse object (an oscillator
* that implements Pulse Width Modulation).
* @param [width] Width between the pulses (0 to 1.0,
* defaults to 0)
*/
width(width?: number): void;
}
class AudioIn {
/**
* Get audio from an input, i.e. your computer's
* microphone. Turn the mic on/off with the start()
* and stop() methods. When the mic is on, its volume
* can be measured with getLevel or by connecting an
* FFT object.
*
* If you want to hear the AudioIn, use the
* .connect() method. AudioIn does not connect to
* p5.sound output by default to prevent feedback.
*
* Note: This uses the getUserMedia/ Stream API,
* which is not supported by certain browsers. Access
* in Chrome browser is limited to localhost and
* https, but access over http may be limited.
*
* @param [errorCallback] A function to call if there
* is an error accessing the AudioIn. For example,
* Safari and iOS devices do not currently allow
* microphone access.
*/
constructor(errorCallback?: (...args: any[]) => any);
/**
* Start processing audio input. This enables the use
* of other AudioIn methods like getLevel(). Note
* that by default, AudioIn is not connected to
* p5.sound's output. So you won't hear anything
* unless you use the connect() method.
*
* Certain browsers limit access to the user's
* microphone. For example, Chrome only allows access
* from localhost and over https. For this reason,
* you may want to include an errorCallback—a
* function that is called in case the browser won't
* provide mic access.
* @param [successCallback] Name of a function to
* call on success.
* @param [errorCallback] Name of a function to call
* if there was an error. For example, some browsers
* do not support getUserMedia.
*/
start(successCallback?: (...args: any[]) => any, errorCallback?: (...args: any[]) => any): void;
/**
* Turn the AudioIn off. If the AudioIn is stopped,
* it cannot getLevel(). If re-starting, the user may
* be prompted for permission access.
*/
stop(): void;
/**
* Connect to an audio unit. If no parameter is
* provided, will connect to the main output (i.e.
* your speakers).
* @param [unit] An object that accepts audio input,
* such as an FFT
*/
connect(unit?: object): void;
/**
* Disconnect the AudioIn from all audio units. For
* example, if connect() had been called,
* disconnect() will stop sending signal to your
* speakers.
*/
disconnect(): void;
/**
* Read the Amplitude (volume level) of an AudioIn.
* The AudioIn class contains its own instance of the
* Amplitude class to help make it easy to get a
* microphone's volume level. Accepts an optional
* smoothing value (0.0 < 1.0). NOTE: AudioIn must
* .start() before using .getLevel().
* @param [smoothing] Smoothing is 0.0 by default.
* Smooths values based on previous values.
* @return Volume level (between 0.0 and 1.0)
*/
getLevel(smoothing?: number): number;
/**
* Set amplitude (volume) of a mic input between 0
* and 1.0.
* @param vol between 0 and 1.0
* @param [time] ramp time (optional)
*/
amp(vol: number, time?: number): void;
/**
* Returns a list of available input sources. This is
* a wrapper for MediaDevices.enumerateDevices() -
* Web APIs | MDN and it returns a Promise.
* @param [successCallback] This callback function
* handles the sources when they have been
* enumerated. The callback function receives the
* deviceList array as its only argument
* @param [errorCallback] This optional callback
* receives the error message as its argument.
* @return Returns a Promise that can be used in
* place of the callbacks, similar to the
* enumerateDevices() method
*/
getSources(successCallback?: (...args: any[]) => any, errorCallback?: (...args: any[]) => any): Promise<any>;
/**
* Set the input source. Accepts a number
* representing a position in the array returned by
* getSources(). This is only available in browsers
* that support
* navigator.mediaDevices.enumerateDevices()
* @param num position of input source in the array
*/
setSource(num: number): void;
input: GainNode;
output: GainNode;
stream: MediaStream | null;
mediaStream: MediaStreamAudioSourceNode | null;
currentSource: number | null;
/**
* Client must allow browser to access their
* microphone / audioin source. Default: false. Will
* become true when the client enables access.
*/
enabled: boolean;
/**
* Input amplitude, connect to it by default but not
* to master out
*/
amplitude: Amplitude;
}
class Effect {
/**
* Effect is a base class for audio effects in p5.
* This module handles the nodes and methods that are
* common and useful for current and future effects.
*
* This class is extended by p5.Distortion,
* p5.Compressor, p5.Delay, p5.Filter, p5.Reverb.
*
* @param [ac] Reference to the audio context of the
* p5 object
* @param [input] Gain Node effect wrapper
* @param [output] Gain Node effect wrapper
* @param [_drywet] Tone.JS CrossFade node (defaults
* to value: 1)
* @param [wet] Effects that extend this class should
* connect to the wet signal to this gain node, so
* that dry and wet signals are mixed properly.
*/
constructor(ac?: object, input?: AudioNode, output?: AudioNode, _drywet?: object, wet?: AudioNode);
/**
* Set the output volume of the filter.
* @param [vol] amplitude between 0 and 1.0
* @param [rampTime] create a fade that lasts until
* rampTime
* @param [tFromNow] schedule this event to happen in
* tFromNow seconds
*/
amp(vol?: number, rampTime?: number, tFromNow?: number): void;
/**
* Link effects together in a chain Example usage:
* filter.chain(reverb, delay, panner); May be used
* with an open-ended number of arguments
* @param [arguments] Chain together multiple sound
* objects
*/
chain(arguments?: object): void;
/**
* Adjust the dry/wet value.
* @param [fade] The desired drywet value (0 - 1.0)
*/
drywet(fade?: number): void;
/**
* Send output to a p5.js-sound, Web Audio Node, or
* use signal to control an AudioParam
*/
connect(unit: object): void;
/**
* Disconnect all output.
*/
disconnect(): void;
}
class Filter extends Effect {
/**
* A p5.Filter uses a Web Audio Biquad Filter to
* filter the frequency response of an input source.
* Subclasses include: p5.LowPass: Allows frequencies
* below the cutoff frequency to pass through, and
* attenuates frequencies above the cutoff.
* p5.HighPass: The opposite of a lowpass filter.
*
* p5.BandPass: Allows a range of frequencies to pass
* through and attenuates the frequencies below and
* above this frequency range.
*
*
* The .res() method controls either width of the
* bandpass, or resonance of the low/highpass cutoff
* frequency.
*
* This class extends p5.Effect. Methods amp(),
* chain(), drywet(), connect(), and disconnect() are
* available.
*
* @param [type] 'lowpass' (default), 'highpass',
* 'bandpass'
*/
constructor(type?: string);
/**
* Filter an audio signal according to a set of
* filter parameters.
* @param Signal An object that outputs audio
* @param [freq] Frequency in Hz, from 10 to 22050
* @param [res] Resonance/Width of the filter
* frequency from 0.001 to 1000
*/
process(Signal: object, freq?: number, res?: number): void;
/**
* Set the frequency and the resonance of the filter.
* @param [freq] Frequency in Hz, from 10 to 22050
* @param [res] Resonance (Q) from 0.001 to 1000
* @param [timeFromNow] schedule this event to happen
* seconds from now
*/
set(freq?: number, res?: number, timeFromNow?: number): void;
/**
* Set the filter frequency, in Hz, from 10 to 22050
* (the range of human hearing, although in reality
* most people hear in a narrower range).
* @param freq Filter Frequency
* @param [timeFromNow] schedule this event to happen
* seconds from now