wavesurfer
Version:
Interactive navigable audio visualization using Web Audio and Canvas
1,750 lines (1,466 loc) • 72.6 kB
JavaScript
(function (root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module unless amdModuleId is set
define('wavesurfer', [], function () {
return (root['WaveSurfer'] = factory());
});
} else if (typeof exports === 'object') {
// Node. Does not work with strict CommonJS, but
// only CommonJS-like environments that support module.exports,
// like Node.
module.exports = factory();
} else {
root['WaveSurfer'] = factory();
}
}(this, function () {
'use strict';
var WaveSurfer = {
defaultParams: {
audioContext : null,
audioRate : 1,
autoCenter : true,
backend : 'WebAudio',
container : null,
cursorColor : '#333',
cursorWidth : 1,
dragSelection : true,
fillParent : true,
forceDecode : false,
height : 128,
hideScrollbar : false,
interact : true,
loopSelection : true,
mediaContainer: null,
mediaControls : false,
mediaType : 'audio',
minPxPerSec : 20,
partialRender : false,
pixelRatio : window.devicePixelRatio || screen.deviceXDPI / screen.logicalXDPI,
progressColor : '#555',
normalize : false,
renderer : 'MultiCanvas',
scrollParent : false,
skipLength : 2,
splitChannels : false,
waveColor : '#999',
},
init: function (params) {
// Extract relevant parameters (or defaults)
this.params = WaveSurfer.util.extend({}, this.defaultParams, params);
this.container = 'string' == typeof params.container ?
document.querySelector(this.params.container) :
this.params.container;
if (!this.container) {
throw new Error('Container element not found');
}
if (this.params.mediaContainer == null) {
this.mediaContainer = this.container;
} else if (typeof this.params.mediaContainer == 'string') {
this.mediaContainer = document.querySelector(this.params.mediaContainer);
} else {
this.mediaContainer = this.params.mediaContainer;
}
if (!this.mediaContainer) {
throw new Error('Media Container element not found');
}
// Used to save the current volume when muting so we can
// restore once unmuted
this.savedVolume = 0;
// The current muted state
this.isMuted = false;
// Will hold a list of event descriptors that need to be
// cancelled on subsequent loads of audio
this.tmpEvents = [];
// Holds any running audio downloads
this.currentAjax = null;
this.createDrawer();
this.createBackend();
this.createPeakCache();
this.isDestroyed = false;
},
createDrawer: function () {
var my = this;
this.drawer = Object.create(WaveSurfer.Drawer[this.params.renderer]);
this.drawer.init(this.container, this.params);
this.drawer.on('redraw', function () {
my.drawBuffer();
my.drawer.progress(my.backend.getPlayedPercents());
});
// Click-to-seek
this.drawer.on('click', function (e, progress) {
setTimeout(function () {
my.seekTo(progress);
}, 0);
});
// Relay the scroll event from the drawer
this.drawer.on('scroll', function (e) {
if (my.params.partialRender) {
my.drawBuffer();
}
my.fireEvent('scroll', e);
});
},
createBackend: function () {
var my = this;
if (this.backend) {
this.backend.destroy();
}
// Back compat
if (this.params.backend == 'AudioElement') {
this.params.backend = 'MediaElement';
}
if (this.params.backend == 'WebAudio' && !WaveSurfer.WebAudio.supportsWebAudio()) {
this.params.backend = 'MediaElement';
}
this.backend = Object.create(WaveSurfer[this.params.backend]);
this.backend.init(this.params);
this.backend.on('finish', function () { my.fireEvent('finish'); });
this.backend.on('play', function () { my.fireEvent('play'); });
this.backend.on('pause', function () { my.fireEvent('pause'); });
this.backend.on('audioprocess', function (time) {
my.drawer.progress(my.backend.getPlayedPercents());
my.fireEvent('audioprocess', time);
});
},
createPeakCache: function() {
if (this.params.partialRender) {
this.peakCache = Object.create(WaveSurfer.PeakCache);
this.peakCache.init();
}
},
getDuration: function () {
return this.backend.getDuration();
},
getCurrentTime: function () {
return this.backend.getCurrentTime();
},
play: function (start, end) {
this.fireEvent('interaction', this.play.bind(this, start, end));
this.backend.play(start, end);
},
pause: function () {
this.backend.isPaused() || this.backend.pause();
},
playPause: function () {
this.backend.isPaused() ? this.play() : this.pause();
},
isPlaying: function () {
return !this.backend.isPaused();
},
skipBackward: function (seconds) {
this.skip(-seconds || -this.params.skipLength);
},
skipForward: function (seconds) {
this.skip(seconds || this.params.skipLength);
},
skip: function (offset) {
var position = this.getCurrentTime() || 0;
var duration = this.getDuration() || 1;
position = Math.max(0, Math.min(duration, position + (offset || 0)));
this.seekAndCenter(position / duration);
},
seekAndCenter: function (progress) {
this.seekTo(progress);
this.drawer.recenter(progress);
},
seekTo: function (progress) {
this.fireEvent('interaction', this.seekTo.bind(this, progress));
var paused = this.backend.isPaused();
// avoid draw wrong position while playing backward seeking
if (!paused) {
this.backend.pause();
}
// avoid small scrolls while paused seeking
var oldScrollParent = this.params.scrollParent;
this.params.scrollParent = false;
this.backend.seekTo(progress * this.getDuration());
this.drawer.progress(this.backend.getPlayedPercents());
if (!paused) {
this.backend.play();
}
this.params.scrollParent = oldScrollParent;
this.fireEvent('seek', progress);
},
stop: function () {
this.pause();
this.seekTo(0);
this.drawer.progress(0);
},
/**
* Set the playback volume.
*
* @param {Number} newVolume A value between 0 and 1, 0 being no
* volume and 1 being full volume.
*/
setVolume: function (newVolume) {
this.backend.setVolume(newVolume);
},
/**
* Get the playback volume.
*/
getVolume: function () {
return this.backend.getVolume();
},
/**
* Set the playback rate.
*
* @param {Number} rate A positive number. E.g. 0.5 means half the
* normal speed, 2 means double speed and so on.
*/
setPlaybackRate: function (rate) {
this.backend.setPlaybackRate(rate);
},
/**
* Get the playback rate.
*/
getPlaybackRate: function () {
return this.backend.getPlaybackRate();
},
/**
* Toggle the volume on and off. It not currenly muted it will
* save the current volume value and turn the volume off.
* If currently muted then it will restore the volume to the saved
* value, and then rest the saved value.
*/
toggleMute: function () {
this.setMute(!this.isMuted);
},
setMute: function (mute) {
// ignore all muting requests if the audio is already in that state
if (mute === this.isMuted) {
return;
}
if (mute) {
// If currently not muted then save current volume,
// turn off the volume and update the mute properties
this.savedVolume = this.backend.getVolume();
this.backend.setVolume(0);
this.isMuted = true;
} else {
// If currently muted then restore to the saved volume
// and update the mute properties
this.backend.setVolume(this.savedVolume);
this.isMuted = false;
}
},
/**
* Get the current mute status.
*/
getMute: function () {
return this.isMuted;
},
/**
* Get the list of current set filters as an array.
*
* Filters must be set with setFilters method first
*/
getFilters: function() {
return this.backend.filters || [];
},
toggleScroll: function () {
this.params.scrollParent = !this.params.scrollParent;
this.drawBuffer();
},
toggleInteraction: function () {
this.params.interact = !this.params.interact;
},
drawBuffer: function () {
var nominalWidth = Math.round(
this.getDuration() * this.params.minPxPerSec * this.params.pixelRatio
);
var parentWidth = this.drawer.getWidth();
var width = nominalWidth;
var start = this.drawer.getScrollX();
var end = Math.min(start + parentWidth, width);
// Fill container
if (this.params.fillParent && (!this.params.scrollParent || nominalWidth < parentWidth)) {
width = parentWidth;
start = 0;
end = width;
}
if (this.params.partialRender) {
var newRanges = this.peakCache.addRangeToPeakCache(width, start, end);
for (var i = 0; i < newRanges.length; i++) {
var peaks = this.backend.getPeaks(width, newRanges[i][0], newRanges[i][1]);
this.drawer.drawPeaks(peaks, width, newRanges[i][0], newRanges[i][1]);
}
} else {
start = 0;
end = width;
var peaks = this.backend.getPeaks(width, start, end);
this.drawer.drawPeaks(peaks, width, start, end);
}
this.fireEvent('redraw', peaks, width);
},
zoom: function (pxPerSec) {
this.params.minPxPerSec = pxPerSec;
this.params.scrollParent = true;
this.drawBuffer();
this.drawer.progress(this.backend.getPlayedPercents());
this.drawer.recenter(
this.getCurrentTime() / this.getDuration()
);
this.fireEvent('zoom', pxPerSec);
},
/**
* Internal method.
*/
loadArrayBuffer: function (arraybuffer) {
this.decodeArrayBuffer(arraybuffer, function (data) {
if (!this.isDestroyed) {
this.loadDecodedBuffer(data);
}
}.bind(this));
},
/**
* Directly load an externally decoded AudioBuffer.
*/
loadDecodedBuffer: function (buffer) {
this.backend.load(buffer);
this.drawBuffer();
this.fireEvent('ready');
},
/**
* Loads audio data from a Blob or File object.
*
* @param {Blob|File} blob Audio data.
*/
loadBlob: function (blob) {
var my = this;
// Create file reader
var reader = new FileReader();
reader.addEventListener('progress', function (e) {
my.onProgress(e);
});
reader.addEventListener('load', function (e) {
my.loadArrayBuffer(e.target.result);
});
reader.addEventListener('error', function () {
my.fireEvent('error', 'Error reading file');
});
reader.readAsArrayBuffer(blob);
this.empty();
},
/**
* Loads audio and re-renders the waveform.
*/
load: function (url, peaks, preload) {
this.empty();
switch (this.params.backend) {
case 'WebAudio': return this.loadBuffer(url, peaks);
case 'MediaElement': return this.loadMediaElement(url, peaks, preload);
}
},
/**
* Loads audio using Web Audio buffer backend.
*/
loadBuffer: function (url, peaks) {
var load = (function (action) {
if (action) {
this.tmpEvents.push(this.once('ready', action));
}
return this.getArrayBuffer(url, this.loadArrayBuffer.bind(this));
}).bind(this);
if (peaks) {
this.backend.setPeaks(peaks);
this.drawBuffer();
this.tmpEvents.push(this.once('interaction', load));
} else {
return load();
}
},
/**
* Either create a media element, or load
* an existing media element.
* @param {String|HTMLElement} urlOrElt Either a path to a media file,
* or an existing HTML5 Audio/Video
* Element
* @param {Array} [peaks] Array of peaks. Required to bypass
* web audio dependency
*/
loadMediaElement: function (urlOrElt, peaks, preload) {
var url = urlOrElt;
if (typeof urlOrElt === 'string') {
this.backend.load(url, this.mediaContainer, peaks, preload);
} else {
var elt = urlOrElt;
this.backend.loadElt(elt, peaks);
// If peaks are not provided,
// url = element.src so we can get peaks with web audio
url = elt.src;
}
this.tmpEvents.push(
this.backend.once('canplay', (function () {
this.drawBuffer();
this.fireEvent('ready');
}).bind(this)),
this.backend.once('error', (function (err) {
this.fireEvent('error', err);
}).bind(this))
);
// If no pre-decoded peaks provided or pre-decoded peaks are
// provided with forceDecode flag, attempt to download the
// audio file and decode it with Web Audio.
if (peaks) { this.backend.setPeaks(peaks); }
if ((!peaks || this.params.forceDecode) && this.backend.supportsWebAudio()) {
this.getArrayBuffer(url, (function (arraybuffer) {
this.decodeArrayBuffer(arraybuffer, (function (buffer) {
this.backend.buffer = buffer;
this.backend.setPeaks(null);
this.drawBuffer();
this.fireEvent('waveform-ready');
}).bind(this));
}).bind(this));
}
},
decodeArrayBuffer: function (arraybuffer, callback) {
this.arraybuffer = arraybuffer;
this.backend.decodeArrayBuffer(
arraybuffer,
(function (data) {
// Only use the decoded data if we haven't been destroyed or another decode started in the meantime
if (!this.isDestroyed && this.arraybuffer == arraybuffer) {
callback(data);
this.arraybuffer = null;
}
}).bind(this),
this.fireEvent.bind(this, 'error', 'Error decoding audiobuffer')
);
},
getArrayBuffer: function (url, callback) {
var my = this;
var ajax = WaveSurfer.util.ajax({
url: url,
responseType: 'arraybuffer'
});
this.currentAjax = ajax;
this.tmpEvents.push(
ajax.on('progress', function (e) {
my.onProgress(e);
}),
ajax.on('success', function (data, e) {
callback(data);
my.currentAjax = null;
}),
ajax.on('error', function (e) {
my.fireEvent('error', 'XHR error: ' + e.target.statusText);
my.currentAjax = null;
})
);
return ajax;
},
onProgress: function (e) {
if (e.lengthComputable) {
var percentComplete = e.loaded / e.total;
} else {
// Approximate progress with an asymptotic
// function, and assume downloads in the 1-3 MB range.
percentComplete = e.loaded / (e.loaded + 1000000);
}
this.fireEvent('loading', Math.round(percentComplete * 100), e.target);
},
/**
* Exports PCM data into a JSON array and opens in a new window.
*/
exportPCM: function (length, accuracy, noWindow) {
length = length || 1024;
accuracy = accuracy || 10000;
noWindow = noWindow || false;
var peaks = this.backend.getPeaks(length, accuracy);
var arr = [].map.call(peaks, function (val) {
return Math.round(val * accuracy) / accuracy;
});
var json = JSON.stringify(arr);
if (!noWindow) {
window.open('data:application/json;charset=utf-8,' +
encodeURIComponent(json));
}
return json;
},
/**
* Save waveform image as data URI.
*
* The default format is 'image/png'. Other supported types are
* 'image/jpeg' and 'image/webp'.
*/
exportImage: function(format, quality) {
if (!format) {
format = 'image/png';
}
if (!quality) {
quality = 1;
}
return this.drawer.getImage(format, quality);
},
cancelAjax: function () {
if (this.currentAjax) {
this.currentAjax.xhr.abort();
this.currentAjax = null;
}
},
clearTmpEvents: function () {
this.tmpEvents.forEach(function (e) { e.un(); });
},
/**
* Display empty waveform.
*/
empty: function () {
if (!this.backend.isPaused()) {
this.stop();
this.backend.disconnectSource();
}
this.cancelAjax();
this.clearTmpEvents();
this.drawer.progress(0);
this.drawer.setWidth(0);
this.drawer.drawPeaks({ length: this.drawer.getWidth() }, 0);
},
/**
* Remove events, elements and disconnect WebAudio nodes.
*/
destroy: function () {
this.fireEvent('destroy');
this.cancelAjax();
this.clearTmpEvents();
this.unAll();
this.backend.destroy();
this.drawer.destroy();
this.isDestroyed = true;
}
};
WaveSurfer.create = function (params) {
var wavesurfer = Object.create(WaveSurfer);
wavesurfer.init(params);
return wavesurfer;
};
WaveSurfer.util = {
extend: function (dest) {
var sources = Array.prototype.slice.call(arguments, 1);
sources.forEach(function (source) {
Object.keys(source).forEach(function (key) {
dest[key] = source[key];
});
});
return dest;
},
debounce: function (func, wait, immediate) {
var args, context, timeout;
var later = function() {
timeout = null;
if (!immediate) {
func.apply(context, args);
}
};
return function() {
context = this;
args = arguments;
var callNow = immediate && !timeout;
clearTimeout(timeout);
timeout = setTimeout(later, wait);
if (!timeout) {
timeout = setTimeout(later, wait);
}
if (callNow) {
func.apply(context, args);
}
};
},
min: function (values) {
var min = +Infinity;
for (var i in values) {
if (values[i] < min) {
min = values[i];
}
}
return min;
},
max: function (values) {
var max = -Infinity;
for (var i in values) {
if (values[i] > max) {
max = values[i];
}
}
return max;
},
getId: function () {
return 'wavesurfer_' + Math.random().toString(32).substring(2);
},
ajax: function (options) {
var ajax = Object.create(WaveSurfer.Observer);
var xhr = new XMLHttpRequest();
var fired100 = false;
xhr.open(options.method || 'GET', options.url, true);
xhr.responseType = options.responseType || 'json';
xhr.addEventListener('progress', function (e) {
ajax.fireEvent('progress', e);
if (e.lengthComputable && e.loaded == e.total) {
fired100 = true;
}
});
xhr.addEventListener('load', function (e) {
if (!fired100) {
ajax.fireEvent('progress', e);
}
ajax.fireEvent('load', e);
if (200 == xhr.status || 206 == xhr.status) {
ajax.fireEvent('success', xhr.response, e);
} else {
ajax.fireEvent('error', e);
}
});
xhr.addEventListener('error', function (e) {
ajax.fireEvent('error', e);
});
xhr.send();
ajax.xhr = xhr;
return ajax;
}
};
/* Observer */
WaveSurfer.Observer = {
/**
* Attach a handler function for an event.
*/
on: function (event, fn) {
if (!this.handlers) { this.handlers = {}; }
var handlers = this.handlers[event];
if (!handlers) {
handlers = this.handlers[event] = [];
}
handlers.push(fn);
// Return an event descriptor
return {
name: event,
callback: fn,
un: this.un.bind(this, event, fn)
};
},
/**
* Remove an event handler.
*/
un: function (event, fn) {
if (!this.handlers) { return; }
var handlers = this.handlers[event];
if (handlers) {
if (fn) {
for (var i = handlers.length - 1; i >= 0; i--) {
if (handlers[i] == fn) {
handlers.splice(i, 1);
}
}
} else {
handlers.length = 0;
}
}
},
/**
* Remove all event handlers.
*/
unAll: function () {
this.handlers = null;
},
/**
* Attach a handler to an event. The handler is executed at most once per
* event type.
*/
once: function (event, handler) {
var my = this;
var fn = function () {
handler.apply(this, arguments);
setTimeout(function () {
my.un(event, fn);
}, 0);
};
return this.on(event, fn);
},
fireEvent: function (event) {
if (!this.handlers) { return; }
var handlers = this.handlers[event];
var args = Array.prototype.slice.call(arguments, 1);
handlers && handlers.forEach(function (fn) {
fn.apply(null, args);
});
}
};
/* Make the main WaveSurfer object an observer */
WaveSurfer.util.extend(WaveSurfer, WaveSurfer.Observer);
'use strict';
WaveSurfer.WebAudio = {
scriptBufferSize: 256,
PLAYING_STATE: 0,
PAUSED_STATE: 1,
FINISHED_STATE: 2,
supportsWebAudio: function () {
return !!(window.AudioContext || window.webkitAudioContext);
},
getAudioContext: function () {
if (!this.ac) {
this.ac = new (
window.AudioContext || window.webkitAudioContext
);
}
return this.ac;
},
getOfflineAudioContext: function (sampleRate) {
if (!WaveSurfer.WebAudio.offlineAudioContext) {
WaveSurfer.WebAudio.offlineAudioContext = new (
window.OfflineAudioContext || window.webkitOfflineAudioContext
)(1, 2, sampleRate);
}
return WaveSurfer.WebAudio.offlineAudioContext;
},
init: function (params) {
this.params = params;
this.ac = params.audioContext || this.getAudioContext();
this.lastPlay = this.ac.currentTime;
this.startPosition = 0;
this.scheduledPause = null;
this.states = [
Object.create(WaveSurfer.WebAudio.state.playing),
Object.create(WaveSurfer.WebAudio.state.paused),
Object.create(WaveSurfer.WebAudio.state.finished)
];
this.createVolumeNode();
this.createScriptNode();
this.createAnalyserNode();
this.setState(this.PAUSED_STATE);
this.setPlaybackRate(this.params.audioRate);
this.setLength(0);
},
disconnectFilters: function () {
if (this.filters) {
this.filters.forEach(function (filter) {
filter && filter.disconnect();
});
this.filters = null;
// Reconnect direct path
this.analyser.connect(this.gainNode);
}
},
setState: function (state) {
if (this.state !== this.states[state]) {
this.state = this.states[state];
this.state.init.call(this);
}
},
// Unpacked filters
setFilter: function () {
this.setFilters([].slice.call(arguments));
},
/**
* @param {Array} filters Packed ilters array
*/
setFilters: function (filters) {
// Remove existing filters
this.disconnectFilters();
// Insert filters if filter array not empty
if (filters && filters.length) {
this.filters = filters;
// Disconnect direct path before inserting filters
this.analyser.disconnect();
// Connect each filter in turn
filters.reduce(function (prev, curr) {
prev.connect(curr);
return curr;
}, this.analyser).connect(this.gainNode);
}
},
createScriptNode: function () {
if (this.ac.createScriptProcessor) {
this.scriptNode = this.ac.createScriptProcessor(this.scriptBufferSize);
} else {
this.scriptNode = this.ac.createJavaScriptNode(this.scriptBufferSize);
}
this.scriptNode.connect(this.ac.destination);
},
addOnAudioProcess: function () {
var my = this;
this.scriptNode.onaudioprocess = function () {
var time = my.getCurrentTime();
if (time >= my.getDuration()) {
my.setState(my.FINISHED_STATE);
my.fireEvent('pause');
} else if (time >= my.scheduledPause) {
my.pause();
} else if (my.state === my.states[my.PLAYING_STATE]) {
my.fireEvent('audioprocess', time);
}
};
},
removeOnAudioProcess: function () {
this.scriptNode.onaudioprocess = null;
},
createAnalyserNode: function () {
this.analyser = this.ac.createAnalyser();
this.analyser.connect(this.gainNode);
},
/**
* Create the gain node needed to control the playback volume.
*/
createVolumeNode: function () {
// Create gain node using the AudioContext
if (this.ac.createGain) {
this.gainNode = this.ac.createGain();
} else {
this.gainNode = this.ac.createGainNode();
}
// Add the gain node to the graph
this.gainNode.connect(this.ac.destination);
},
/**
* Set the gain to a new value.
*
* @param {Number} newGain The new gain, a floating point value
* between 0 and 1. 0 being no gain and 1 being maximum gain.
*/
setVolume: function (newGain) {
this.gainNode.gain.value = newGain;
},
/**
* Get the current gain.
*
* @returns {Number} The current gain, a floating point value
* between 0 and 1. 0 being no gain and 1 being maximum gain.
*/
getVolume: function () {
return this.gainNode.gain.value;
},
decodeArrayBuffer: function (arraybuffer, callback, errback) {
if (!this.offlineAc) {
this.offlineAc = this.getOfflineAudioContext(this.ac ? this.ac.sampleRate : 44100);
}
this.offlineAc.decodeAudioData(arraybuffer, (function (data) {
callback(data);
}).bind(this), errback);
},
/**
* Set pre-decoded peaks.
*/
setPeaks: function (peaks) {
this.peaks = peaks;
},
/**
* Set the rendered length (different from the length of the audio).
*/
setLength: function (length) {
// No resize, we can preserve the cached peaks.
if (this.mergedPeaks && length == ((2 * this.mergedPeaks.length - 1) + 2)) {
return;
}
this.splitPeaks = [];
this.mergedPeaks = [];
// Set the last element of the sparse array so the peak arrays are
// appropriately sized for other calculations.
var channels = this.buffer ? this.buffer.numberOfChannels : 1;
for (var c = 0; c < channels; c++) {
this.splitPeaks[c] = [];
this.splitPeaks[c][2 * (length - 1)] = 0;
this.splitPeaks[c][2 * (length - 1) + 1] = 0;
}
this.mergedPeaks[2 * (length - 1)] = 0;
this.mergedPeaks[2 * (length - 1) + 1] = 0;
},
/**
* Compute the max and min value of the waveform when broken into
* <length> subranges.
* @param {Number} length How many subranges to break the waveform into.
* @param {Number} first First sample in the required range.
* @param {Number} last Last sample in the required range.
* @returns {Array} Array of 2*<length> peaks or array of arrays
* of peaks consisting of (max, min) values for each subrange.
*/
getPeaks: function (length, first, last) {
if (this.peaks) { return this.peaks; }
this.setLength(length);
var sampleSize = this.buffer.length / length;
var sampleStep = ~~(sampleSize / 10) || 1;
var channels = this.buffer.numberOfChannels;
for (var c = 0; c < channels; c++) {
var peaks = this.splitPeaks[c];
var chan = this.buffer.getChannelData(c);
for (var i = first; i <= last; i++) {
var start = ~~(i * sampleSize);
var end = ~~(start + sampleSize);
var min = 0;
var max = 0;
for (var j = start; j < end; j += sampleStep) {
var value = chan[j];
if (value > max) {
max = value;
}
if (value < min) {
min = value;
}
}
peaks[2 * i] = max;
peaks[2 * i + 1] = min;
if (c == 0 || max > this.mergedPeaks[2 * i]) {
this.mergedPeaks[2 * i] = max;
}
if (c == 0 || min < this.mergedPeaks[2 * i + 1]) {
this.mergedPeaks[2 * i + 1] = min;
}
}
}
return this.params.splitChannels ? this.splitPeaks : this.mergedPeaks;
},
getPlayedPercents: function () {
return this.state.getPlayedPercents.call(this);
},
disconnectSource: function () {
if (this.source) {
this.source.disconnect();
}
},
destroy: function () {
if (!this.isPaused()) {
this.pause();
}
this.unAll();
this.buffer = null;
this.disconnectFilters();
this.disconnectSource();
this.gainNode.disconnect();
this.scriptNode.disconnect();
this.analyser.disconnect();
// close the audioContext if it was created by wavesurfer
// not passed in as a parameter
if (!this.params.audioContext) {
// check if browser supports AudioContext.close()
if (typeof this.ac.close === 'function') {
this.ac.close();
}
}
},
load: function (buffer) {
this.startPosition = 0;
this.lastPlay = this.ac.currentTime;
this.buffer = buffer;
this.createSource();
},
createSource: function () {
this.disconnectSource();
this.source = this.ac.createBufferSource();
//adjust for old browsers.
this.source.start = this.source.start || this.source.noteGrainOn;
this.source.stop = this.source.stop || this.source.noteOff;
this.source.playbackRate.value = this.playbackRate;
this.source.buffer = this.buffer;
this.source.connect(this.analyser);
},
isPaused: function () {
return this.state !== this.states[this.PLAYING_STATE];
},
getDuration: function () {
if (!this.buffer) {
return 0;
}
return this.buffer.duration;
},
seekTo: function (start, end) {
if (!this.buffer) { return; }
this.scheduledPause = null;
if (start == null) {
start = this.getCurrentTime();
if (start >= this.getDuration()) {
start = 0;
}
}
if (end == null) {
end = this.getDuration();
}
this.startPosition = start;
this.lastPlay = this.ac.currentTime;
if (this.state === this.states[this.FINISHED_STATE]) {
this.setState(this.PAUSED_STATE);
}
return { start: start, end: end };
},
getPlayedTime: function () {
return (this.ac.currentTime - this.lastPlay) * this.playbackRate;
},
/**
* Plays the loaded audio region.
*
* @param {Number} start Start offset in seconds,
* relative to the beginning of a clip.
* @param {Number} end When to stop
* relative to the beginning of a clip.
*/
play: function (start, end) {
if (!this.buffer) { return; }
// need to re-create source on each playback
this.createSource();
var adjustedTime = this.seekTo(start, end);
start = adjustedTime.start;
end = adjustedTime.end;
this.scheduledPause = end;
this.source.start(0, start, end - start);
if (this.ac.state == 'suspended') {
this.ac.resume && this.ac.resume();
}
this.setState(this.PLAYING_STATE);
this.fireEvent('play');
},
/**
* Pauses the loaded audio.
*/
pause: function () {
this.scheduledPause = null;
this.startPosition += this.getPlayedTime();
this.source && this.source.stop(0);
this.setState(this.PAUSED_STATE);
this.fireEvent('pause');
},
/**
* Returns the current time in seconds relative to the audioclip's duration.
*/
getCurrentTime: function () {
return this.state.getCurrentTime.call(this);
},
/**
* Returns the current playback rate.
*/
getPlaybackRate: function () {
return this.playbackRate;
},
/**
* Set the audio source playback rate.
*/
setPlaybackRate: function (value) {
value = value || 1;
if (this.isPaused()) {
this.playbackRate = value;
} else {
this.pause();
this.playbackRate = value;
this.play();
}
}
};
WaveSurfer.WebAudio.state = {};
WaveSurfer.WebAudio.state.playing = {
init: function () {
this.addOnAudioProcess();
},
getPlayedPercents: function () {
var duration = this.getDuration();
return (this.getCurrentTime() / duration) || 0;
},
getCurrentTime: function () {
return this.startPosition + this.getPlayedTime();
}
};
WaveSurfer.WebAudio.state.paused = {
init: function () {
this.removeOnAudioProcess();
},
getPlayedPercents: function () {
var duration = this.getDuration();
return (this.getCurrentTime() / duration) || 0;
},
getCurrentTime: function () {
return this.startPosition;
}
};
WaveSurfer.WebAudio.state.finished = {
init: function () {
this.removeOnAudioProcess();
this.fireEvent('finish');
},
getPlayedPercents: function () {
return 1;
},
getCurrentTime: function () {
return this.getDuration();
}
};
WaveSurfer.util.extend(WaveSurfer.WebAudio, WaveSurfer.Observer);
'use strict';
WaveSurfer.MediaElement = Object.create(WaveSurfer.WebAudio);
WaveSurfer.util.extend(WaveSurfer.MediaElement, {
init: function (params) {
this.params = params;
// Dummy media to catch errors
this.media = {
currentTime: 0,
duration: 0,
paused: true,
playbackRate: 1,
play: function () {},
pause: function () {}
};
this.mediaType = params.mediaType.toLowerCase();
this.elementPosition = params.elementPosition;
this.setPlaybackRate(this.params.audioRate);
this.createTimer();
},
/**
* Create a timer to provide a more precise `audioprocess' event.
*/
createTimer: function () {
var my = this;
var playing = false;
var onAudioProcess = function () {
if (my.isPaused()) { return; }
my.fireEvent('audioprocess', my.getCurrentTime());
// Call again in the next frame
var requestAnimationFrame = window.requestAnimationFrame || window.webkitRequestAnimationFrame;
requestAnimationFrame(onAudioProcess);
};
this.on('play', onAudioProcess);
},
/**
* Create media element with url as its source,
* and append to container element.
* @param {String} url path to media file
* @param {HTMLElement} container HTML element
* @param {Array} peaks array of peak data
* @param {String} preload HTML 5 preload attribute value
*/
load: function (url, container, peaks, preload) {
var my = this;
var media = document.createElement(this.mediaType);
media.controls = this.params.mediaControls;
media.autoplay = this.params.autoplay || false;
media.preload = preload == null ? 'auto' : preload;
media.src = url;
media.style.width = '100%';
var prevMedia = container.querySelector(this.mediaType);
if (prevMedia) {
container.removeChild(prevMedia);
}
container.appendChild(media);
this._load(media, peaks);
},
/**
* Load existing media element.
* @param {MediaElement} elt HTML5 Audio or Video element
* @param {Array} peaks array of peak data
*/
loadElt: function (elt, peaks) {
var my = this;
var media = elt;
media.controls = this.params.mediaControls;
media.autoplay = this.params.autoplay || false;
this._load(media, peaks);
},
/**
* Private method called by both load (from url)
* and loadElt (existing media element).
* @param {MediaElement} media HTML5 Audio or Video element
* @param {Array} peaks array of peak data
* @private
*/
_load: function (media, peaks) {
var my = this;
// load must be called manually on iOS, otherwise peaks won't draw
// until a user interaction triggers load --> 'ready' event
if (typeof media.load == 'function') {
media.load();
}
media.addEventListener('error', function () {
my.fireEvent('error', 'Error loading media element');
});
media.addEventListener('canplay', function () {
my.fireEvent('canplay');
});
media.addEventListener('ended', function () {
my.fireEvent('finish');
});
this.media = media;
this.peaks = peaks;
this.onPlayEnd = null;
this.buffer = null;
this.setPlaybackRate(this.playbackRate);
},
isPaused: function () {
return !this.media || this.media.paused;
},
getDuration: function () {
var duration = (this.buffer || this.media).duration;
if (duration >= Infinity) { // streaming audio
duration = this.media.seekable.end(0);
}
return duration;
},
getCurrentTime: function () {
return this.media && this.media.currentTime;
},
getPlayedPercents: function () {
return (this.getCurrentTime() / this.getDuration()) || 0;
},
/**
* Set the audio source playback rate.
*/
setPlaybackRate: function (value) {
this.playbackRate = value || 1;
this.media.playbackRate = this.playbackRate;
},
seekTo: function (start) {
if (start != null) {
this.media.currentTime = start;
}
this.clearPlayEnd();
},
/**
* Plays the loaded audio region.
*
* @param {Number} start Start offset in seconds,
* relative to the beginning of a clip.
* @param {Number} end End offset in seconds,
* relative to the beginning of a clip.
*/
play: function (start, end) {
this.seekTo(start);
this.media.play();
end && this.setPlayEnd(end);
this.fireEvent('play');
},
/**
* Pauses the loaded audio.
*/
pause: function () {
this.media && this.media.pause();
this.clearPlayEnd();
this.fireEvent('pause');
},
setPlayEnd: function (end) {
var my = this;
this.onPlayEnd = function (time) {
if (time >= end) {
my.pause();
my.seekTo(end);
}
};
this.on('audioprocess', this.onPlayEnd);
},
clearPlayEnd: function () {
if (this.onPlayEnd) {
this.un('audioprocess', this.onPlayEnd);
this.onPlayEnd = null;
}
},
getPeaks: function (length, start, end) {
if (this.buffer) {
return WaveSurfer.WebAudio.getPeaks.call(this, length, start, end);
}
return this.peaks || [];
},
getVolume: function () {
return this.media.volume;
},
setVolume: function (val) {
this.media.volume = val;
},
destroy: function () {
this.pause();
this.unAll();
this.media && this.media.parentNode && this.media.parentNode.removeChild(this.media);
this.media = null;
}
});
//For backwards compatibility
WaveSurfer.AudioElement = WaveSurfer.MediaElement;
'use strict';
WaveSurfer.Drawer = {
init: function (container, params) {
this.container = container;
this.params = params;
this.width = 0;
this.height = params.height * this.params.pixelRatio;
this.lastPos = 0;
this.initDrawer(params);
this.createWrapper();
this.createElements();
},
createWrapper: function () {
this.wrapper = this.container.appendChild(
document.createElement('wave')
);
this.style(this.wrapper, {
display: 'block',
position: 'relative',
userSelect: 'none',
webkitUserSelect: 'none',
height: this.params.height + 'px'
});
if (this.params.fillParent || this.params.scrollParent) {
this.style(this.wrapper, {
width: '100%',
overflowX: this.params.hideScrollbar ? 'hidden' : 'auto',
overflowY: 'hidden'
});
}
this.setupWrapperEvents();
},
handleEvent: function (e, noPrevent) {
!noPrevent && e.preventDefault();
var clientX = e.targetTouches ? e.targetTouches[0].clientX : e.clientX;
var bbox = this.wrapper.getBoundingClientRect();
var nominalWidth = this.width;
var parentWidth = this.getWidth();
var progress;
if (!this.params.fillParent && nominalWidth < parentWidth) {
progress = ((clientX - bbox.left) * this.params.pixelRatio / nominalWidth) || 0;
if (progress > 1) {
progress = 1;
}
} else {
progress = ((clientX - bbox.left + this.wrapper.scrollLeft) / this.wrapper.scrollWidth) || 0;
}
return progress;
},
setupWrapperEvents: function () {
var my = this;
this.wrapper.addEventListener('click', function (e) {
var scrollbarHeight = my.wrapper.offsetHeight - my.wrapper.clientHeight;
if (scrollbarHeight != 0) {
// scrollbar is visible. Check if click was on it
var bbox = my.wrapper.getBoundingClientRect();
if (e.clientY >= bbox.bottom - scrollbarHeight) {
// ignore mousedown as it was on the scrollbar
return;
}
}
if (my.params.interact) {
my.fireEvent('click', e, my.handleEvent(e));
}
});
this.wrapper.addEventListener('scroll', function (e) {
my.fireEvent('scroll', e);
});
},
drawPeaks: function (peaks, length, start, end) {
this.setWidth(length);
this.params.barWidth ?
this.drawBars(peaks, 0, start, end) :
this.drawWave(peaks, 0, start, end);
},
style: function (el, styles) {
Object.keys(styles).forEach(function (prop) {
if (el.style[prop] !== styles[prop]) {
el.style[prop] = styles[prop];
}
});
return el;
},
resetScroll: function () {
if (this.wrapper !== null) {
this.wrapper.scrollLeft = 0;
}
},
recenter: function (percent) {
var position = this.wrapper.scrollWidth * percent;
this.recenterOnPosition(position, true);
},
recenterOnPosition: function (position, immediate) {
var scrollLeft = this.wrapper.scrollLeft;
var half = ~~(this.wrapper.clientWidth / 2);
var target = position - half;
var offset = target - scrollLeft;
var maxScroll = this.wrapper.scrollWidth - this.wrapper.clientWidth;
if (maxScroll == 0) {
// no need to continue if scrollbar is not there
return;
}
// if the cursor is currently visible...
if (!immediate && -half <= offset && offset < half) {
// we'll limit the "re-center" rate.
var rate = 5;
offset = Math.max(-rate, Math.min(rate, offset));
target = scrollLeft + offset;
}
// limit target to valid range (0 to maxScroll)
target = Math.max(0, Math.min(maxScroll, target));
// no use attempting to scroll if we're not moving
if (target != scrollLeft) {
this.wrapper.scrollLeft = target;
}
},
getScrollX: function() {
return Math.round(this.wrapper.scrollLeft * this.params.pixelRatio);
},
getWidth: function () {
return Math.round(this.container.clientWidth * this.params.pixelRatio);
},
setWidth: function (width) {
if (this.width == width) {
return;
}
this.width = width;
if (this.params.fillParent || this.params.scrollParent) {
this.style(this.wrapper, {
width: ''
});
} else {
this.style(this.wrapper, {
width: ~~(this.width / this.params.pixelRatio) + 'px'
});
}
this.updateSize();
},
setHeight: function (height) {
if (height == this.height) { return; }
this.height = height;
this.style(this.wrapper, {
height: ~~(this.height / this.params.pixelRatio) + 'px'
});
this.updateSize();
},
progress: function (progress) {
var minPxDelta = 1 / this.params.pixelRatio;
var pos = Math.round(progress * this.width) * minPxDelta;
if (pos < this.lastPos || pos - this.lastPos >= minPxDelta) {
this.lastPos = pos;
if (this.params.scrollParent && this.params.autoCenter) {
var newPos = ~~(this.wrapper.scrollWidth * progress);
this.recenterOnPosition(newPos);
}
this.updateProgress(pos);
}
},
destroy: function () {
this.unAll();
if (this.wrapper) {
this.container.removeChild(this.wrapper);
this.wrapper = null;
}
},
/* Renderer-specific methods */
initDrawer: function () {},
createElements: function () {},
updateSize: function () {},
drawWave: function (peaks, max) {},
clearWave: function () {},
updateProgress: function (position) {}
};
WaveSurfer.util.extend(WaveSurfer.Drawer, WaveSurfer.Observer);
'use strict';
WaveSurfer.Drawer.Canvas = Object.create(WaveSurfer.Drawer);
WaveSurfer.util.extend(WaveSurfer.Drawer.Canvas, {
createElements: function () {
var waveCanvas = this.wrapper.appendChild(
this.style(document.createElement('canvas'), {
position: 'absolute',
zIndex: 1,
left: 0,
top: 0,
bottom: 0
})
);
this.waveCc = waveCanvas.getContext('2d');
this.progressWave = this.wrapper.appendChild(
this.style(document.createElement('wave'), {
position: 'absolute',
zIndex: 2,
left: 0,
top: 0,
bottom: 0,
overflow: 'hidden',
width: '0',
display: 'none',
boxSizing: 'border-box',
borderRightStyle: 'solid',
borderR