react-audio-wave
Version:
visualize a waveform of an audio file
1,031 lines (990 loc) • 40.9 kB
JavaScript
var ReactAudioWave = (function (exports, _extends, react, classNames, ResizeObserver, _defineProperty, extractPeaks) {
'use strict';
const useSize = (target, onSizeChange) => {
const [size, setSize] = react.useState({
width: undefined,
height: undefined
});
react.useLayoutEffect(() => {
let targetElement;
if (typeof target === "function") {
targetElement = target();
} else if ("current" in target) {
targetElement = target.current;
} else {
targetElement = target;
}
if (!targetElement) {
return;
}
const resizeObserver = new ResizeObserver(entries => {
entries.forEach(entry => {
const {
clientWidth,
clientHeight
} = entry.target;
const newSize = {
width: clientWidth,
height: clientHeight
};
onSizeChange === null || onSizeChange === void 0 || onSizeChange(newSize);
setSize(newSize);
});
});
resizeObserver.observe(targetElement);
return () => {
resizeObserver.disconnect();
};
}, [onSizeChange, target]);
return size;
};
const WaveCanvas = _ref => {
let {
width,
height,
peaks,
color,
bits,
barGap,
barWidth,
className,
pixelRatio: scale,
offset = 0
} = _ref;
const canvasRef = react.useRef(null);
react.useEffect(() => {
const len = width / scale;
const ctx = canvasRef.current.getContext("2d");
const h = height / scale;
const h2 = h / 2;
const maxValue = 2 ** (bits - 1);
const barStart = barWidth + barGap;
ctx.clearRect(0, 0, width, height);
ctx.save();
ctx.fillStyle = color;
ctx.scale(scale, scale);
for (let pixel = 0; pixel < len; pixel += barStart) {
const minPeak = peaks[(pixel + offset) * 2] / maxValue;
const maxPeak = peaks[(pixel + offset) * 2 + 1] / maxValue;
const min = Math.abs(minPeak * h2);
const max = Math.abs(maxPeak * h2);
ctx.fillRect(pixel, h2 - max, barWidth, max + min);
}
ctx.restore();
}, [barGap, barWidth, bits, color, height, offset, peaks, scale, width]);
return /*#__PURE__*/React.createElement("canvas", {
width: width,
height: height,
ref: canvasRef,
className: classNames(className)
}, "Your browser does not support HTML5 canvas.");
};
var WaveCanvas$1 = /*#__PURE__*/react.memo(WaveCanvas);
const WaveProgress = /*#__PURE__*/react.forwardRef((_ref, ref) => {
let {
className,
progressColor,
progressStyle,
children,
cursorColor,
progressCursorVisible
} = _ref;
const [offsetPixels, setOffsetPixels] = react.useState(0);
react.useImperativeHandle(ref, () => ({
changeOffsetPixels: currentPixels => {
if (currentPixels !== undefined) {
setOffsetPixels(currentPixels);
}
}
}));
return /*#__PURE__*/React.createElement("div", {
className: classNames(className),
style: {
...progressStyle,
width: offsetPixels,
borderRight: progressCursorVisible ? "1px solid ".concat(cursorColor || progressColor) : "none"
}
}, children);
});
var WaveProgress$1 = /*#__PURE__*/react.memo(WaveProgress);
const LoadedPercent = /*#__PURE__*/react.forwardRef((_, ref) => {
const [percent, setPercent] = react.useState();
react.useImperativeHandle(ref, () => ({
changeLoadedPercent: currentPercent => {
if (currentPercent !== undefined) {
setPercent(currentPercent);
}
}
}));
return /*#__PURE__*/React.createElement("span", null, percent);
});
var LoadedPercent$1 = /*#__PURE__*/react.memo(LoadedPercent);
function styleInject(css, ref) {
if ( ref === void 0 ) ref = {};
var insertAt = ref.insertAt;
if (!css || typeof document === 'undefined') { return; }
var head = document.head || document.getElementsByTagName('head')[0];
var style = document.createElement('style');
style.type = 'text/css';
if (insertAt === 'top') {
if (head.firstChild) {
head.insertBefore(style, head.firstChild);
} else {
head.appendChild(style);
}
} else {
head.appendChild(style);
}
if (style.styleSheet) {
style.styleSheet.cssText = css;
} else {
style.appendChild(document.createTextNode(css));
}
}
var css_248z$1 = ".cursor-time{align-items:center;bottom:0;box-sizing:border-box;display:flex;left:0;position:absolute;top:0;width:auto}.cursor-time,.cursor-time .time{pointer-events:none}";
styleInject(css_248z$1);
const CursorTime = /*#__PURE__*/react.forwardRef((_ref, ref) => {
let {
config,
cursorColor,
cursorVisible,
timeFormat
} = _ref;
const {
zIndex = 4,
customShowTimeStyle
} = config || {};
const {
backgroundColor = cursorColor,
color = "#fff",
padding = "2px",
fontSize = "10px",
...restStyles
} = customShowTimeStyle || {};
const [left, setLeft] = react.useState(0);
const [opacity, setOpacity] = react.useState(0);
const [flip, setFlip] = react.useState(false);
const [cursorTime, setCursorTime] = react.useState(() => (timeFormat === null || timeFormat === void 0 ? void 0 : timeFormat(0)) || "00:00:00");
const timeContentRef = react.useRef(null);
const {
width: timeContentWidth
} = useSize(timeContentRef);
react.useImperativeHandle(ref, () => ({
updateCursorPosition: (timeValue, x, clientX, right) => {
const formatValue = timeFormat === null || timeFormat === void 0 ? void 0 : timeFormat(timeValue);
setCursorTime(formatValue);
setOpacity(1);
setFlip(right < clientX + timeContentWidth);
setLeft(x);
},
hideCursor: () => {
setOpacity(0);
},
showCursor: () => {
setOpacity(1);
}
}));
if (!cursorVisible) {
return null;
}
return /*#__PURE__*/React.createElement("div", {
className: "cursor-time",
style: {
opacity,
zIndex,
left,
borderLeftStyle: "solid",
borderLeftWidth: 1,
borderLeftColor: backgroundColor
}
}, /*#__PURE__*/React.createElement("div", {
ref: timeContentRef,
className: "time",
style: {
backgroundColor,
color,
padding,
fontSize,
marginLeft: flip ? -timeContentWidth : 0,
...restStyles
}
}, cursorTime));
});
var CursorTime$1 = /*#__PURE__*/react.memo(CursorTime);
/**
* 将时间转换为像素
* @param seconds
* @param samplesPerPixel
* @param sampleRate
* @returns number
*/
function secondsToPixels(seconds, samplesPerPixel, sampleRate) {
return Math.ceil(seconds * sampleRate / samplesPerPixel);
}
/**
* 将像素转换为事件
* @param pixels
* @param samplesPerPixel
* @param sampleRate
* @returns number
*/
function pixelsToSeconds(pixels, samplesPerPixel, sampleRate) {
return pixels * samplesPerPixel / sampleRate;
}
/**
* 对小数转百分比处理,默认保留2位小数
* @param rate
* @param fixed
* @returns string
*/
function formatPercent(rate) {
let fixed = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 2;
if ([Number.isNaN(rate), rate === undefined, rate === null].includes(true)) {
return "";
}
if ([0, "0"].includes(rate)) {
return "0";
}
return "".concat(Number.parseFloat(String((Number(rate) * 100).toFixed(fixed))), "%");
}
class WebAudio {
constructor(supportPlaybackRate) {
_defineProperty(this, "audioContext", void 0);
_defineProperty(this, "buffer", void 0);
_defineProperty(this, "source", void 0);
_defineProperty(this, "gain", void 0);
_defineProperty(this, "peakData", void 0);
_defineProperty(this, "duration", void 0);
_defineProperty(this, "lastOffsetPixels", void 0);
_defineProperty(this, "pausedAtOffsetTime", void 0);
_defineProperty(this, "currentOffsetTime", void 0);
_defineProperty(this, "startTime", void 0);
_defineProperty(this, "samplesPerPixel", void 0);
_defineProperty(this, "volume", void 0);
_defineProperty(this, "playing", void 0);
_defineProperty(this, "supportPlaybackRate", void 0);
_defineProperty(this, "hasEndedListener", void 0);
_defineProperty(this, "audioMedia", void 0);
_defineProperty(this, "pixelRatio", void 0);
// 是否支持倍速播放,之所有要添加这个参数,是因为通过web audio拿到audiobuffer才能进行音频可视化,但是在设置倍速播放时,其音高会异常,可参考https://github.com/WebAudio/web-audio-api/issues/2487和https://developer.mozilla.org/en-US/docs/Web/API/AudioBufferSourceNode
// <audio>标签是可以支持倍速,但通过audio标签拿不到audiobuffer,做不了音频可视化
// 所以通过这个参数控制,如果不需要倍数播放功能,则全程直接操作audiobuffer,若需要则仅用audiobuffer进行音频可视化,播放暂停等操作操作原生<audio>标签
this.supportPlaybackRate = supportPlaybackRate;
this.audioContext = new (window.AudioContext || window.webkitAudioContext)();
this.source = null;
// 使用audioBuffer播放的话,用此作为中间节点以调节音量
this.gain = null;
this.peakData = {
length: 0,
data: [],
bits: 8
};
// 峰值采样率,在这儿用于表示每多px能代表多少buffer
this.samplesPerPixel = 1000;
// 音频时长,相较audio标签,通过audioBuffer的计算得来的更为准确
this.duration = 0;
// 音频音量
this.volume = 1;
// 暂停/停止播放时音频播放到哪个时间点了
this.pausedAtOffsetTime = 0;
// 音频当前播放到哪个时间点了
this.currentOffsetTime = 0;
// 用于记录当点击播放时的那个时间点
this.startTime = 0;
// 用于记录上次计算出的px偏移量,用于和当前计算出的px偏移量对比以决定是否要更新进度
this.lastOffsetPixels = 0;
// 是否处于播放状态
this.playing = false;
// 如果要支持倍速的话,得用audio标签控制播放暂停等
this.audioMedia = null;
// 是否注册有监听播放结束的事件
this.hasEndedListener = false;
// 设备像素比
this.pixelRatio = Math.max(1, Math.floor(window.devicePixelRatio)); // 有些设备是0
}
initWebAudio(audioData, successCallback, errorCallback) {
try {
this.audioContext.decodeAudioData(audioData, decodeData => {
this.buffer = decodeData;
this.duration = decodeData.duration;
successCallback(this.duration);
});
} catch (error) {
errorCallback === null || errorCallback === void 0 || errorCallback(error);
}
}
initAudioElement(audioSrc, container) {
if (!this.supportPlaybackRate) {
return;
}
if (this.audioMedia) {
this.audioMedia.remove();
this.audioMedia = null;
}
this.audioMedia = document.createElement("audio");
this.audioMedia.preload = "auto";
this.audioMedia.controls = false;
this.audioMedia.src = audioSrc;
container.append(this.audioMedia);
}
getWebaudioPeaks(width, mono) {
if (!width || !this.buffer) {
return;
}
this.samplesPerPixel = Math.floor(this.buffer.length / width);
// 根据设定的canvas的宽度,计算峰值采样率,每px能代表多少buffer,原则上讲最后得到的peakData的length和设定的canvas的宽度是一样的
this.peakData = extractPeaks(this.buffer, this.samplesPerPixel, mono);
return this.peakData;
}
getCurrentOffsetTime() {
if (this.supportPlaybackRate) {
this.currentOffsetTime = this.audioMedia.currentTime;
return this.currentOffsetTime;
}
this.currentOffsetTime = this.playing ? this.audioContext.currentTime - this.startTime + this.pausedAtOffsetTime : this.pausedAtOffsetTime;
return this.currentOffsetTime;
}
getCurrentOffsetPixels() {
const calcOffsetPixels = secondsToPixels(this.currentOffsetTime, this.samplesPerPixel, this.buffer.sampleRate);
if (calcOffsetPixels !== this.lastOffsetPixels) {
this.lastOffsetPixels = calcOffsetPixels;
return calcOffsetPixels;
}
}
getPixelsToSeconds(x) {
return pixelsToSeconds(x, this.samplesPerPixel, this.buffer.sampleRate);
}
updateCurrentOffsetPositon(time) {
if (this.supportPlaybackRate) {
this.audioMedia.currentTime = time;
return;
}
this.currentOffsetTime = time;
this.pausedAtOffsetTime = time;
}
pause() {
var _this$source;
this.playing = false;
if (this.supportPlaybackRate) {
this.audioMedia.pause();
return;
}
// this.audioContext.currentTime - this.startTime 可以计算出此次播放了多长时间,再+=开始播放前停止的位置的话就可以知道停止时播放到整段音频的什么位置了,即下次播放时知道从哪儿开始播了
this.pausedAtOffsetTime += this.audioContext.currentTime - this.startTime;
(_this$source = this.source) === null || _this$source === void 0 || _this$source.stop();
this.source.disconnect();
}
ended() {
var _this$source2, _this$source3, _this$gain;
this.playing = false;
this.currentOffsetTime = 0;
this.lastOffsetPixels = 0;
this.hasEndedListener = false; // 之所以要有这个标注,是避免中间暂定再播放因为多次addEventListener,再播放结束时会触发多次
if (this.supportPlaybackRate) {
this.audioMedia.currentTime = 0;
this.audioMedia.pause();
return;
}
(_this$source2 = this.source) === null || _this$source2 === void 0 || _this$source2.stop(0);
(_this$source3 = this.source) === null || _this$source3 === void 0 || _this$source3.disconnect();
(_this$gain = this.gain) === null || _this$gain === void 0 || _this$gain.disconnect();
this.pausedAtOffsetTime = 0;
this.startTime = 0;
this.source = null;
this.gain = null;
}
updateAudioSource() {
return new Promise(resolve => {
const sendEnded = () => {
var _this$source4, _this$source4$removeE;
// 注意使用audiobuffer控制播放时,点击暂停时,该事件也会触发,而使用audio标签控制播放时,只有播放完成时该事件才会触发
resolve(true);
if (this.supportPlaybackRate) {
var _this$audioMedia, _this$audioMedia$remo;
(_this$audioMedia = this.audioMedia) === null || _this$audioMedia === void 0 || (_this$audioMedia$remo = _this$audioMedia.removeEventListener) === null || _this$audioMedia$remo === void 0 || _this$audioMedia$remo.call(_this$audioMedia, "ended", sendEnded);
return;
}
(_this$source4 = this.source) === null || _this$source4 === void 0 || (_this$source4$removeE = _this$source4.removeEventListener) === null || _this$source4$removeE === void 0 || _this$source4$removeE.call(_this$source4, "ended", sendEnded);
};
// 如果要支持倍速,那播放暂停相关操作通过audio标签来完成
if (this.supportPlaybackRate) {
if (!this.hasEndedListener) {
this.audioMedia.addEventListener("ended", sendEnded);
this.hasEndedListener = true;
}
return;
}
this.gain = this.audioContext.createGain();
this.source = this.audioContext.createBufferSource();
this.source.buffer = this.buffer;
this.gain.gain.value = this.volume;
// this.source.playbackRate.value = 1.5; // 调整播放速度,但目前播放速度变化的同时音调会被改变,目前web audio api还不支持preservePitch,更多可参考https://github.com/WebAudio/web-audio-api/issues/2487
this.source.connect(this.gain);
this.gain.connect(this.audioContext.destination);
this.source.loop = false;
// 该事件会在点击暂定时也会触发(注意区别于audio标签的ended),不能纯粹的作为播放完成的监听事件,得结合当前播放状态
this.source.addEventListener("ended", sendEnded);
});
}
play() {
var _this$source5;
this.playing = true;
if (this.supportPlaybackRate) {
this.audioMedia.play();
return;
}
// this.audioContext.currentTime 返回的值是一个绝对的时间值,首次播放时(音频加载下来还未播放过)可当做x坐标原点,即为0,
// 然后不管后面有啥操作(比如暂停),这个时间是走着的(不以他人意志为转移)
this.startTime = this.audioContext.currentTime;
// 什么时候开始播(点击之后立即开始播)、从哪儿开始播、播多少时间
(_this$source5 = this.source) === null || _this$source5 === void 0 || _this$source5.start(this.startTime, this.pausedAtOffsetTime, this.duration); // this.startOffset % this.duration
}
changeVolume(volume) {
this.volume = volume;
if (this.supportPlaybackRate) {
this.audioMedia.volume = volume;
return;
}
if (this.gain) {
this.gain.gain.value = volume;
}
}
changePlaybackRate(playbackRate) {
if (!this.supportPlaybackRate) {
return;
}
this.audioMedia.playbackRate = playbackRate;
}
}
function _classPrivateFieldInitSpec(obj, privateMap, value) { _checkPrivateRedeclaration(obj, privateMap); privateMap.set(obj, value); }
function _checkPrivateRedeclaration(obj, privateCollection) { if (privateCollection.has(obj)) { throw new TypeError("Cannot initialize the same private elements twice on an object"); } }
function _classPrivateFieldGet(s, a) { return s.get(_assertClassBrand(s, a)); }
function _assertClassBrand(e, t, n) { if ("function" == typeof e ? e === t : e.has(t)) return arguments.length < 3 ? t : n; throw new TypeError("Private element is not present on this object"); }
var _emiter = /*#__PURE__*/new WeakMap();
class EventEmitter {
constructor() {
_classPrivateFieldInitSpec(this, _emiter, new Map());
}
on(topic) {
let topics = _classPrivateFieldGet(_emiter, this).get(topic);
if (!topics) {
_classPrivateFieldGet(_emiter, this).set(topic, topics = []);
}
for (var _len = arguments.length, handlers = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
handlers[_key - 1] = arguments[_key];
}
topics.push(...handlers);
}
off(topic, handler) {
if (!handler) {
return _classPrivateFieldGet(_emiter, this).delete(topic);
}
const topics = _classPrivateFieldGet(_emiter, this).get(topic);
if (!topics) {
return false;
}
const index = topics.indexOf(handler);
if (index < 0) {
return false;
}
topics.splice(index, 1);
if (topics.length === 0) {
_classPrivateFieldGet(_emiter, this).delete(topic);
}
return true;
}
emit(topic) {
for (var _len2 = arguments.length, args = new Array(_len2 > 1 ? _len2 - 1 : 0), _key2 = 1; _key2 < _len2; _key2++) {
args[_key2 - 1] = arguments[_key2];
}
const topics = _classPrivateFieldGet(_emiter, this).get(topic);
if (!topics) {
return;
}
topics.forEach(handler => {
try {
handler(...args);
} catch (error) {
console.log(error);
}
});
}
}
function fetchFile(url, options) {
if (!url) {
throw new Error("fetch url missing");
}
let total = 0;
let loaded = 0;
// eslint-disable-next-line unicorn/prefer-event-target
const instance = new EventEmitter();
const fetchHeaders = new Headers();
const fetchRequest = new Request(url);
const {
headers,
responseType = "arraybuffer",
method = "GET",
mode = "cors",
credentials = "same-origin",
cache = "default",
redirect = "follow",
referrer = "client"
} = options || {};
// add ability to abort
instance.controller = new AbortController();
// check if headers have to be added
if (headers) {
// add custom request headers
Object.entries(options.headers).forEach(_ref => {
let [key, value] = _ref;
fetchHeaders.append(key, value);
});
}
// parse fetch options
const fetchOptions = {
method,
mode,
credentials,
cache,
redirect,
referrer,
headers: fetchHeaders,
signal: instance.controller.signal
};
fetch(fetchRequest, fetchOptions).then(response => {
let progressAvailable = true;
if (!response.body) {
// ReadableStream is not yet supported in this browser
// see https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream
progressAvailable = false;
}
// Server must send CORS header "Access-Control-Expose-Headers: content-length"
const contentLength = response.headers.get("content-length");
total = Number.parseInt(contentLength, 10);
if (contentLength === null) {
// Content-Length server response header missing.
// Don't evaluate download progress if we can't compare against a total size
// see https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS#Access-Control-Expose-Headers
progressAvailable = false;
}
if (!progressAvailable) {
// not able to check download progress so skip it
return response;
}
const reader = response.body.getReader();
return new Response(new ReadableStream({
start(controller) {
// 下面的函数处理每个数据块
const push = () => {
reader.read().then(_ref2 => {
let {
done,
value
} = _ref2;
// done - true if the stream has already given you all its data.
// value - some data. Always undefined when done is true.
if (done) {
instance.emit("progress", {
loaded,
total,
lengthComputable: false
});
// no more data needs to be consumed, close the stream
controller.close();
return;
}
loaded += value.byteLength;
instance.emit("progress", {
loaded,
total,
lengthComputable: !(total === 0)
});
// no more data needs to be consumed, close the stream
controller.enqueue(value);
push();
}).catch(error => {
controller.error(error);
});
};
push();
}
}), fetchOptions);
}).then(response => {
let errMsg;
if (response.ok) {
switch (responseType) {
case "arraybuffer":
{
return response.arrayBuffer();
}
case "json":
{
return response.json();
}
case "blob":
{
return response.blob();
}
case "text":
{
return response.text();
}
default:
{
errMsg = "Unknown responseType: ".concat(responseType);
break;
}
}
}
if (!errMsg) {
errMsg = "HTTP error status: ".concat(response.status);
}
throw new Error(errMsg);
}).then(response => {
instance.emit("success", response);
}).catch(error => {
instance.emit("error", error);
});
// return the fetch request
instance.fetchRequest = fetchRequest;
return instance;
}
var css_248z = ".wave-container{box-sizing:border-box;overflow:hidden;position:relative}.wave-container .channel{position:relative;z-index:1}.wave-container .channel .progress{bottom:0;box-sizing:border-box;left:0;overflow:hidden;position:absolute;top:0;z-index:4}.wave-container .channel .wave-canvas{margin:0;padding:0}.error-container{align-items:center;display:flex;height:100%}.error-container .wave-error-text{color:#ff4d4f}";
styleInject(css_248z);
let LoadStateEnum = /*#__PURE__*/function (LoadStateEnum) {
LoadStateEnum[LoadStateEnum["EMPTY"] = -1] = "EMPTY";
LoadStateEnum[LoadStateEnum["INIT"] = 0] = "INIT";
LoadStateEnum[LoadStateEnum["LOADING"] = 1] = "LOADING";
LoadStateEnum[LoadStateEnum["SUCCESS"] = 2] = "SUCCESS";
LoadStateEnum[LoadStateEnum["ERROR"] = 3] = "ERROR";
return LoadStateEnum;
}({});
const renderErrorElementFunc = error => {
return /*#__PURE__*/React.createElement("div", {
className: "error-container"
}, /*#__PURE__*/React.createElement("span", {
className: "wave-error-text"
}, "Decoding failed: ", error));
};
const ReactAudioWave = /*#__PURE__*/react.forwardRef((props, ref) => {
const {
audioSrc,
waveHeight,
colors,
progressStyle,
onChangeLoadState,
onCurrentTimeChange,
onPlayEnded,
cursorTimeConfig,
className,
children,
timeFormat,
onWaveSizeChange,
placeholder: Placeholder,
barGap = 0,
barWidth = 1,
mono = true,
cursorVisible = true,
progressCursorVisible = true,
supportPlaybackRate = false,
emptyElement = /*#__PURE__*/React.createElement("span", null, "no audio content"),
renderErrorElement = renderErrorElementFunc
} = props;
const [loadState, setLoadState] = react.useState(LoadStateEnum.INIT);
const webAudioRef = react.useRef(null);
const containerRef = react.useRef(null);
const animationRef = react.useRef(null);
const waveProgressRef = react.useRef(null);
const cursorTimeRef = react.useRef(null);
const loadedPercentRef = react.useRef(null);
const loadedErrorRef = react.useRef(null);
const audioSourcePromiseRef = react.useRef(null);
const {
width
} = useSize(containerRef, onWaveSizeChange);
const containerWidthRef = react.useRef(width);
containerWidthRef.current = width;
if (!webAudioRef.current) {
webAudioRef.current = new WebAudio(supportPlaybackRate);
}
const {
progressColor,
waveColor,
cursorColor,
waveBackground = "transparent"
} = colors;
const onAudioError = react.useCallback(error => {
loadedErrorRef.current = error;
setLoadState(LoadStateEnum.ERROR);
onChangeLoadState === null || onChangeLoadState === void 0 || onChangeLoadState(LoadStateEnum.ERROR);
}, [onChangeLoadState]);
// 加载音频资源
const requestAudioFile = react.useCallback(async () => {
try {
setLoadState(LoadStateEnum.LOADING);
onChangeLoadState === null || onChangeLoadState === void 0 || onChangeLoadState(LoadStateEnum.LOADING);
const request = await fetchFile(audioSrc);
// 音频加载成功
request.on("success", data => {
if (data.byteLength === 0) {
setLoadState(LoadStateEnum.EMPTY);
onChangeLoadState === null || onChangeLoadState === void 0 || onChangeLoadState(LoadStateEnum.EMPTY);
return;
}
webAudioRef.current.initWebAudio(data, duration => {
// 音频解码完成
loadedErrorRef.current = null;
setLoadState(LoadStateEnum.SUCCESS);
onChangeLoadState === null || onChangeLoadState === void 0 || onChangeLoadState(LoadStateEnum.SUCCESS, duration);
}, onAudioError // 音频解码失败
);
});
// 音频加载进度
request.on("progress", loadedState => {
if (loadedState === undefined) {
return;
}
loadedPercentRef.current.changeLoadedPercent(formatPercent(loadedState.loaded / loadedState.total));
});
// 音频加载失败
request.on("error", onAudioError);
} catch (error) {
onAudioError(error);
}
}, [audioSrc, onAudioError, onChangeLoadState]);
// 计算实时进度
const calcRealTimeProgress = react.useCallback(() => {
var _waveProgressRef$curr;
const currentOffsetTime = webAudioRef.current.getCurrentOffsetTime();
const currentOffsetPixels = webAudioRef.current.getCurrentOffsetPixels();
(_waveProgressRef$curr = waveProgressRef.current) === null || _waveProgressRef$curr === void 0 || _waveProgressRef$curr.changeOffsetPixels(currentOffsetPixels);
onCurrentTimeChange === null || onCurrentTimeChange === void 0 || onCurrentTimeChange(currentOffsetTime);
}, [onCurrentTimeChange]);
// 播放时实时计算播放进度
const startAnimation = react.useCallback(() => {
if (!webAudioRef.current.playing) {
// 加这一步以防止在播放一段时间结束时,未能正常结束掉动画
window.cancelAnimationFrame(animationRef.current);
return;
}
calcRealTimeProgress();
animationRef.current = window.requestAnimationFrame(startAnimation);
}, [calcRealTimeProgress]);
// 音频暂停
const pauseAudio = react.useCallback(() => {
webAudioRef.current.pause();
window.cancelAnimationFrame(animationRef.current);
}, []);
// 音频播放
const playAudio = react.useCallback(() => {
audioSourcePromiseRef.current = webAudioRef.current.updateAudioSource(); // 如果是支持倍速而用的audio标签控制播放的话,这一步纯粹只是注册ended时间
webAudioRef.current.play();
startAnimation();
audioSourcePromiseRef.current.then(ended => {
if (ended && webAudioRef.current.playing) {
// 音频播放结束,这个结合播放状态判断是否播放结束仅用于audiobuffer,其实用audio标签的话,这个播放状态没啥用
webAudioRef.current.ended();
waveProgressRef.current.changeOffsetPixels(0);
onPlayEnded === null || onPlayEnded === void 0 || onPlayEnded();
onCurrentTimeChange === null || onCurrentTimeChange === void 0 || onCurrentTimeChange(0);
window.cancelAnimationFrame(animationRef.current);
}
});
}, [onCurrentTimeChange, onPlayEnded, startAnimation]);
// 跳转到某个时间点,如果有第2个参数,则控制跳转后是否播放
const seekTo = react.useCallback((offsetTime, play) => {
const {
playing: currentPlaying
} = webAudioRef.current;
// 如果当前音频状态是暂停状态
if (currentPlaying === false) {
webAudioRef.current.updateCurrentOffsetPositon(offsetTime);
calcRealTimeProgress();
if (play === true) {
playAudio();
}
// 如果跳转之后要暂停,因为本来就是暂停状态,无需做其他处理了
return;
}
// 如果当前音频状态是播放状态
if (currentPlaying === true) {
if (supportPlaybackRate) {
// 如果支持倍速,即表示得使用audio标签控制播放
webAudioRef.current.updateCurrentOffsetPositon(offsetTime);
// 如果跳转之后继续播放,更改了audio的currentTime就可以了
if (play === false) {
pauseAudio();
calcRealTimeProgress();
}
return;
}
if (play === true || play === undefined) {
// 使用audiobuffer播放控制的话,并且正在播放得先暂停
pauseAudio();
webAudioRef.current.updateCurrentOffsetPositon(offsetTime);
audioSourcePromiseRef.current.then(() => {
playAudio();
});
return;
}
// 如果跳转之后要暂停播放
pauseAudio();
webAudioRef.current.updateCurrentOffsetPositon(offsetTime);
calcRealTimeProgress();
}
}, [calcRealTimeProgress, pauseAudio, playAudio, supportPlaybackRate]);
// 控制音量大小 0~1
const changeVolume = react.useCallback(volume => {
webAudioRef.current.changeVolume(volume);
}, []);
const changePlaybackRate = react.useCallback(playbackRate => {
webAudioRef.current.changePlaybackRate(playbackRate);
}, []);
const destroy = react.useCallback(() => {
if (webAudioRef.current.playing) {
pauseAudio();
}
webAudioRef.current = null;
setLoadState(LoadStateEnum.INIT);
}, [pauseAudio]);
// 根据鼠标点击的位置计算seek音频时间点
const calcCurrentPosition = event => {
const containerRectInfo = containerRef.current.getBoundingClientRect();
const x = event.clientX - containerRectInfo.left;
const timeValue = Math.max(0, webAudioRef.current.getPixelsToSeconds(x));
return {
x,
time: timeValue,
clientX: event.clientX,
right: containerRectInfo.right
};
};
const onWaveSeekClick = event => {
const {
time: offsetTime
} = calcCurrentPosition(event);
seekTo(offsetTime);
};
// 有实时指针的时候移动的时候实时计算移动到音频哪个时间点
const onMouseMove = event => {
var _cursorTimeRef$curren;
if (!cursorVisible || loadState !== LoadStateEnum.SUCCESS) {
return;
}
const {
time,
x,
clientX,
right
} = calcCurrentPosition(event);
(_cursorTimeRef$curren = cursorTimeRef.current) === null || _cursorTimeRef$curren === void 0 || _cursorTimeRef$curren.updateCursorPosition(time, x, clientX, right);
};
// 鼠标移除时隐藏实时指针
const onMouseleave = () => {
var _cursorTimeRef$curren2;
if (!cursorVisible || loadState !== LoadStateEnum.SUCCESS) {
return;
}
(_cursorTimeRef$curren2 = cursorTimeRef.current) === null || _cursorTimeRef$curren2 === void 0 || _cursorTimeRef$curren2.hideCursor();
};
// 鼠标移入时显示实时指针
const onMouseEnter = () => {
var _cursorTimeRef$curren3;
if (!cursorVisible || loadState !== LoadStateEnum.SUCCESS) {
return;
}
(_cursorTimeRef$curren3 = cursorTimeRef.current) === null || _cursorTimeRef$curren3 === void 0 || _cursorTimeRef$curren3.showCursor();
};
react.useImperativeHandle(ref, () => ({
seekTo,
destroy,
play: playAudio,
pause: pauseAudio,
volume: changeVolume,
playbackRate: changePlaybackRate
}));
react.useEffect(() => {
requestAudioFile();
}, [requestAudioFile]);
react.useEffect(() => {
webAudioRef.current.initAudioElement(audioSrc, containerRef.current);
}, [audioSrc]);
const waveNode = react.useMemo(() => {
if (width && loadState === LoadStateEnum.SUCCESS) {
// 加载成功以保障拿到了audiobuffer
const peakData = webAudioRef.current.getWebaudioPeaks(width, mono);
return peakData.data.map((data, index) => {
var _peakData$length;
const canvasWidth = (_peakData$length = peakData === null || peakData === void 0 ? void 0 : peakData.length) !== null && _peakData$length !== void 0 ? _peakData$length : width;
const waveCanvasProps = {
pixelRatio: webAudioRef.current.pixelRatio,
color: waveColor,
peaks: data,
bits: peakData.bits,
width: canvasWidth,
height: waveHeight,
className: "wave-canvas",
barGap,
barWidth
};
return /*#__PURE__*/React.createElement("div", {
key: index,
className: "channel",
style: {
height: waveHeight,
width: canvasWidth,
backgroundColor: waveBackground
}
}, /*#__PURE__*/React.createElement(WaveProgress$1, {
ref: waveProgressRef,
progressStyle: progressStyle,
progressCursorVisible: progressCursorVisible,
progressColor: progressColor,
cursorColor: cursorColor,
className: "progress"
}, /*#__PURE__*/React.createElement(WaveCanvas$1, _extends({}, waveCanvasProps, {
color: progressColor
}))), /*#__PURE__*/React.createElement(CursorTime$1, {
ref: cursorTimeRef,
timeFormat: timeFormat,
config: cursorTimeConfig,
cursorColor: cursorColor,
cursorVisible: cursorVisible
}), /*#__PURE__*/React.createElement(WaveCanvas$1, waveCanvasProps));
});
}
}, [barGap, barWidth, cursorColor, cursorTimeConfig, cursorVisible, loadState, mono, progressColor, progressCursorVisible, progressStyle, waveBackground, waveColor, waveHeight, width, timeFormat]);
const renderContent = () => {
if (loadState === LoadStateEnum.EMPTY) {
return emptyElement;
}
if (loadState === LoadStateEnum.LOADING) {
return /*#__PURE__*/React.createElement(Placeholder, null, /*#__PURE__*/React.createElement(LoadedPercent$1, {
ref: loadedPercentRef
}));
}
if (loadState === LoadStateEnum.ERROR) {
var _loadedErrorRef$curre;
return renderErrorElement === null || renderErrorElement === void 0 ? void 0 : renderErrorElement((_loadedErrorRef$curre = loadedErrorRef.current) === null || _loadedErrorRef$curre === void 0 ? void 0 : _loadedErrorRef$curre.toString());
}
return /*#__PURE__*/React.createElement(React.Fragment, null, children, waveNode);
};
return /*#__PURE__*/React.createElement("div", {
className: classNames("wave-container", {
[className]: !!className
}),
style: {
height: waveHeight
},
ref: containerRef,
onMouseMove: onMouseMove,
onMouseLeave: onMouseleave,
onMouseEnter: onMouseEnter,
onClick: onWaveSeekClick
}, renderContent());
});
var wave = /*#__PURE__*/react.memo(ReactAudioWave);
exports.LoadStateEnum = LoadStateEnum;
exports.ReactAudioWave = wave;
return exports;
})({}, _extends, React, classnames, resize-observer-polyfill, _defineProperty, webaudio-peaks);