wavesurfer.js
Version:
Interactive navigable audio visualization using Web Audio and Canvas
368 lines (312 loc) • 10.1 kB
JavaScript
'use strict';
WaveSurfer.WebAudio = {
scriptBufferSize: 256,
fftSize: 128,
PLAYING_STATE: 0,
PAUSED_STATE: 1,
FINISHED_STATE: 2,
getAudioContext: function () {
if (!(window.AudioContext || window.webkitAudioContext)) {
throw new Error("Your browser doesn't support Web Audio");
}
if (!WaveSurfer.WebAudio.audioContext) {
WaveSurfer.WebAudio.audioContext = new (
window.AudioContext || window.webkitAudioContext
);
}
return WaveSurfer.WebAudio.audioContext;
},
init: function (params) {
this.params = params;
this.ac = params.audioContext || this.getAudioContext();
this.lastPlay = this.ac.currentTime;
this.startPosition = 0;
this.states = [
Object.create(WaveSurfer.WebAudio.state.playing),
Object.create(WaveSurfer.WebAudio.state.paused),
Object.create(WaveSurfer.WebAudio.state.finished)
];
this.setState(this.PAUSED_STATE);
this.createVolumeNode();
this.createScriptNode();
this.createAnalyserNode();
this.setPlaybackRate(this.params.audioRate);
},
disconnectFilters: function () {
if (this.filters) {
this.filters.forEach(function (filter) {
filter && filter.disconnect();
});
this.filters = null;
}
},
setState: function (state) {
if (this.state !== this.states[state]) {
this.state = this.states[state];
this.state.init.call(this);
}
},
// Unpacked filters
setFilter: function () {
this.setFilters([].slice.call(arguments));
},
/**
* @param {Array} filters Packed ilters array
*/
setFilters: function (filters) {
this.disconnectFilters();
if (filters && filters.length) {
this.filters = filters;
// Connect each filter in turn
filters.reduce(function (prev, curr) {
prev.connect(curr);
return curr;
}, this.analyser).connect(this.gainNode);
} else {
this.analyser.connect(this.gainNode);
}
},
createScriptNode: function () {
var my = this;
var bufferSize = this.scriptBufferSize;
if (this.ac.createScriptProcessor) {
this.scriptNode = this.ac.createScriptProcessor(bufferSize);
} else {
this.scriptNode = this.ac.createJavaScriptNode(bufferSize);
}
this.scriptNode.connect(this.ac.destination);
this.scriptNode.onaudioprocess = function () {
var time = my.getCurrentTime();
if (my.state === my.states[my.PLAYING_STATE]) {
my.fireEvent('audioprocess', time);
}
if (my.buffer && time > my.getDuration()) {
my.setState(my.FINISHED_STATE);
}
};
},
createAnalyserNode: function () {
this.analyser = this.ac.createAnalyser();
this.analyser.fftSize = this.fftSize;
this.analyserData = new Uint8Array(this.analyser.frequencyBinCount);
this.analyser.connect(this.gainNode);
},
/**
* Create the gain node needed to control the playback volume.
*/
createVolumeNode: function () {
// Create gain node using the AudioContext
if (this.ac.createGain) {
this.gainNode = this.ac.createGain();
} else {
this.gainNode = this.ac.createGainNode();
}
// Add the gain node to the graph
this.gainNode.connect(this.ac.destination);
},
/**
* Set the gain to a new value.
*
* @param {Number} newGain The new gain, a floating point value
* between 0 and 1. 0 being no gain and 1 being maximum gain.
*/
setVolume: function (newGain) {
this.gainNode.gain.value = newGain;
},
/**
* Get the current gain.
*
* @returns {Number} The current gain, a floating point value
* between 0 and 1. 0 being no gain and 1 being maximum gain.
*/
getVolume: function () {
return this.gainNode.gain.value;
},
decodeArrayBuffer: function (arraybuffer, callback, errback) {
var my = this;
this.ac.decodeAudioData(arraybuffer, function (data) {
my.buffer = data;
callback(data);
}, errback);
},
/**
* @returns {Float32Array} Array of peaks.
*/
getPeaks: function (length) {
var buffer = this.buffer;
var sampleSize = buffer.length / length;
var sampleStep = ~~(sampleSize / 10) || 1;
var channels = buffer.numberOfChannels;
var peaks = new Float32Array(length);
for (var c = 0; c < channels; c++) {
var chan = buffer.getChannelData(c);
for (var i = 0; i < length; i++) {
var start = ~~(i * sampleSize);
var end = ~~(start + sampleSize);
var max = 0;
for (var j = start; j < end; j += sampleStep) {
var value = chan[j];
if (value > max) {
max = value;
// faster than Math.abs
} else if (-value > max) {
max = -value;
}
}
if (c == 0 || max > peaks[i]) {
peaks[i] = max;
}
}
}
return peaks;
},
getPlayedPercents: function () {
return this.state.getPlayedPercents.call(this);
},
disconnectSource: function () {
if (this.source) {
this.source.disconnect();
}
},
/**
* Returns the real-time waveform data.
*
* @return {Uint8Array} The frequency data.
* Values range from 0 to 255.
*/
waveform: function () {
this.analyser.getByteTimeDomainData(this.analyserData);
return this.analyserData;
},
destroy: function () {
this.pause();
this.unAll();
this.buffer = null;
this.disconnectFilters();
this.disconnectSource();
this.gainNode.disconnect();
this.scriptNode.disconnect();
this.analyser.disconnect();
},
load: function (buffer) {
this.startPosition = 0;
this.lastPlay = this.ac.currentTime;
this.buffer = buffer;
this.createSource();
},
createSource: function () {
this.disconnectSource();
this.source = this.ac.createBufferSource();
//adjust for old browsers.
this.source.start = this.source.start || this.source.noteGrainOn;
this.source.stop = this.source.stop || this.source.noteOff;
this.source.playbackRate.value = this.playbackRate;
this.source.buffer = this.buffer;
this.source.connect(this.analyser);
},
isPaused: function () {
return this.state !== this.states[this.PLAYING_STATE];
},
getDuration: function () {
return this.buffer.duration || 0;
},
seekTo: function (start, end) {
if (start == null) {
start = this.getCurrentTime();
if (start >= this.getDuration()) {
start = 0;
}
}
if (end == null) {
end = this.getDuration();
}
this.startPosition = start;
this.lastPlay = this.ac.currentTime;
if (this.state === this.states[this.FINISHED_STATE]) {
this.setState(this.PAUSED_STATE);
}
return { start: start, end: end };
},
getPlayedTime: function () {
return (this.ac.currentTime - this.lastPlay) * this.playbackRate;
},
/**
* Plays the loaded audio region.
*
* @param {Number} start Start offset in seconds,
* relative to the beginning of a clip.
* @param {Number} end When to stop
* relative to the beginning of a clip.
*/
play: function (start, end) {
// need to re-create source on each playback
this.createSource();
var adjustedTime = this.seekTo(start, end);
start = adjustedTime.start;
end = adjustedTime.end;
this.source.start(0, start, end - start);
this.setState(this.PLAYING_STATE);
},
/**
* Pauses the loaded audio.
*/
pause: function () {
this.startPosition += this.getPlayedTime();
this.source && this.source.stop(0);
this.setState(this.PAUSED_STATE);
},
/**
* Returns the current time in seconds relative to the audioclip's duration.
*/
getCurrentTime: function () {
return this.state.getCurrentTime.call(this);
},
/**
* Set the audio source playback rate.
*/
setPlaybackRate: function (value) {
value = value || 1;
if (this.isPaused()) {
this.playbackRate = value;
} else {
this.pause();
this.playbackRate = value;
this.play();
}
}
};
WaveSurfer.WebAudio.state = {};
WaveSurfer.WebAudio.state.playing = {
init: function () {
},
getPlayedPercents: function () {
var duration = this.getDuration();
return (this.getCurrentTime() / duration) || 0;
},
getCurrentTime: function () {
return this.startPosition + this.getPlayedTime();
}
};
WaveSurfer.WebAudio.state.paused = {
init: function () {
},
getPlayedPercents: function () {
var duration = this.getDuration();
return (this.getCurrentTime() / duration) || 0;
},
getCurrentTime: function () {
return this.startPosition;
}
};
WaveSurfer.WebAudio.state.finished = {
init: function () {
this.fireEvent('finish');
},
getPlayedPercents: function () {
return 1;
},
getCurrentTime: function () {
return this.getDuration();
}
};
WaveSurfer.util.extend(WaveSurfer.WebAudio, WaveSurfer.Observer);