@trap_stevo/legendarybuilderproreact-ui
Version:
The legendary UI & utility API that makes your application a legendary application. ~ Created by Steven Compton
420 lines • 15.6 kB
JavaScript
import _asyncToGenerator from "@babel/runtime/helpers/asyncToGenerator";
import _slicedToArray from "@babel/runtime/helpers/slicedToArray";
import _regeneratorRuntime from "@babel/runtime/regenerator";
import React, { createContext, useContext, useState, useEffect, useRef } from "react";
import { audioBufferToWav, audioBufferToMP3, detectBitrate } from "../HUDManagers/HUDAudioManager.js";
var HUDAudioContext = /*#__PURE__*/createContext(null);
export var HUDAudioProvider = function HUDAudioProvider(_ref) {
var children = _ref.children;
var audioContextRef = useRef(new (window.AudioContext || window.webkitAudioContext)());
var _useState = useState(null),
_useState2 = _slicedToArray(_useState, 2),
audioBuffer = _useState2[0],
setAudioBuffer = _useState2[1];
var analyserRef = useRef(audioContextRef.current.createAnalyser());
var gainNodeRef = useRef(audioContextRef.current.createGain());
var animationFrameRef = useRef(null);
var videoElementRef = useRef(null);
var sourceRef = useRef(null);
analyserRef.current.fftSize = 2048;
gainNodeRef.current.connect(audioContextRef.current.destination);
var loadVideoAudio = /*#__PURE__*/function () {
var _ref2 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee(videoSRC) {
return _regeneratorRuntime.wrap(function _callee$(_context) {
while (1) switch (_context.prev = _context.next) {
case 0:
return _context.abrupt("return", new Promise(function (resolve, reject) {
var video = document.createElement("video");
video.src = videoSRC;
video.crossOrigin = "anonymous";
video.preload = "auto";
video.oncanplaythrough = function () {
var track = audioContextRef.current.createMediaElementSource(video);
videoElementRef.current = video;
resolve(track);
};
video.onerror = function (error) {
reject(error);
};
}));
case 1:
case "end":
return _context.stop();
}
}, _callee);
}));
return function loadVideoAudio(_x) {
return _ref2.apply(this, arguments);
};
}();
var loadAudio = /*#__PURE__*/function () {
var _ref3 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee2(audioSRC) {
var response, arrayBuffer, buffer;
return _regeneratorRuntime.wrap(function _callee2$(_context2) {
while (1) switch (_context2.prev = _context2.next) {
case 0:
_context2.next = 2;
return fetch(audioSRC);
case 2:
response = _context2.sent;
_context2.next = 5;
return response.arrayBuffer();
case 5:
arrayBuffer = _context2.sent;
_context2.next = 8;
return audioContextRef.current.decodeAudioData(arrayBuffer);
case 8:
buffer = _context2.sent;
setAudioBuffer(buffer);
return _context2.abrupt("return", buffer);
case 11:
case "end":
return _context2.stop();
}
}, _callee2);
}));
return function loadAudio(_x2) {
return _ref3.apply(this, arguments);
};
}();
var playAudioFile = function playAudioFile(audioSRC) {
var _ref4 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
_ref4$onLoadedData = _ref4.onLoadedData,
onLoadedData = _ref4$onLoadedData === void 0 ? null : _ref4$onLoadedData,
_ref4$onEnded = _ref4.onEnded,
onEnded = _ref4$onEnded === void 0 ? null : _ref4$onEnded,
_ref4$onError = _ref4.onError,
onError = _ref4$onError === void 0 ? null : _ref4$onError,
_ref4$onPause = _ref4.onPause,
onPause = _ref4$onPause === void 0 ? null : _ref4$onPause,
_ref4$onPlay = _ref4.onPlay,
onPlay = _ref4$onPlay === void 0 ? null : _ref4$onPlay,
_ref4$playbackRate = _ref4.playbackRate,
playbackRate = _ref4$playbackRate === void 0 ? 1.0 : _ref4$playbackRate,
_ref4$volume = _ref4.volume,
volume = _ref4$volume === void 0 ? 1.0 : _ref4$volume,
_ref4$loop = _ref4.loop,
loop = _ref4$loop === void 0 ? false : _ref4$loop;
try {
var audio = new Audio(audioSRC);
audio.volume = volume;
audio.loop = loop;
audio.playbackRate = playbackRate;
audio.onended = function () {
if (onEnded) {
onEnded();
}
};
audio.onerror = function (error) {
if (onError) {
onError(error);
}
console.error("Did not load audio from file: ", error);
};
audio.onplay = function () {
if (onPlay) {
onPlay();
}
};
audio.onpause = function () {
if (onPause) {
onPause();
}
};
audio.onloadeddata = function () {
if (onLoadedData) {
onLoadedData();
}
};
audio.play();
return audio;
} catch (error) {
console.error("Did not play audio from file: ", error);
if (onError) {
onError(error);
}
}
};
var playAudio = function playAudio() {
var startTime = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0;
var volume = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0.5;
var drawCurrentWaveform = arguments.length > 2 ? arguments[2] : undefined;
var drawWaveform = arguments.length > 3 ? arguments[3] : undefined;
var setProgress = arguments.length > 4 ? arguments[4] : undefined;
var stopAudioCallback = arguments.length > 5 ? arguments[5] : undefined;
var dragging = arguments.length > 6 ? arguments[6] : undefined;
var previewStart = arguments.length > 7 ? arguments[7] : undefined;
if (sourceRef.current) {
stopAudio(drawWaveform, setProgress, audioBuffer, previewStart);
}
var source = audioContextRef.current.createBufferSource();
source.buffer = audioBuffer;
gainNodeRef.current.gain.value = volume;
source.connect(analyserRef.current);
analyserRef.current.connect(gainNodeRef.current);
source.start(0, startTime);
sourceRef.current = source;
var startTimeOffset = audioContextRef.current.currentTime - startTime;
var updateProgress = function updateProgress() {
if (dragging) {
stopAudioCallback();
return;
}
var currentTime = audioContextRef.current.currentTime - startTimeOffset;
var duration = audioBuffer.duration;
var progress = currentTime / duration;
if (progress > 1 && setProgress) {
progress = 1;
setProgress(progress);
} else if (progress <= 1 && setProgress) {
setProgress(progress);
}
if (drawCurrentWaveform) {
drawCurrentWaveform(progress);
}
if (drawWaveform) {
drawWaveform(progress);
}
animationFrameRef.current = requestAnimationFrame(updateProgress);
};
animationFrameRef.current = requestAnimationFrame(updateProgress);
source.onended = function () {
if (stopAudioCallback) {
stopAudioCallback();
return;
}
setProgress(previewStart / 1000);
};
};
var stopAudio = function stopAudio(drawFullWaveform, setProgress, audioBuffer, previewStart) {
if (sourceRef.current) {
sourceRef.current.stop();
sourceRef.current.disconnect();
sourceRef.current = null;
if (setProgress) {
setProgress(previewStart / 1000);
}
if (drawFullWaveform && audioBuffer) {
drawFullWaveform(audioBuffer);
}
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
}
};
var adjustVolume = function adjustVolume(volume) {
if (gainNodeRef.current) {
gainNodeRef.current.gain.value = volume / 100;
}
};
var extractAudioFromSRC = /*#__PURE__*/function () {
var _ref5 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee3(audioSRC) {
var returnInFormat,
response,
arrayBuffer,
audioContext,
audioBuffer,
_args3 = arguments;
return _regeneratorRuntime.wrap(function _callee3$(_context3) {
while (1) switch (_context3.prev = _context3.next) {
case 0:
returnInFormat = _args3.length > 1 && _args3[1] !== undefined ? _args3[1] : "wav";
_context3.next = 3;
return fetch(audioSRC);
case 3:
response = _context3.sent;
_context3.next = 6;
return response.arrayBuffer();
case 6:
arrayBuffer = _context3.sent;
audioContext = new AudioContext();
_context3.next = 10;
return audioContext.decodeAudioData(arrayBuffer);
case 10:
audioBuffer = _context3.sent;
_context3.t0 = returnInFormat.toLowerCase();
_context3.next = _context3.t0 === "wav" ? 14 : _context3.t0 === "mp3" ? 15 : _context3.t0 === "audiobuffer" ? 16 : 17;
break;
case 14:
return _context3.abrupt("return", audioBufferToWav(audioBuffer));
case 15:
return _context3.abrupt("return", audioBufferToMP3(audioBuffer).mp3Blob);
case 16:
return _context3.abrupt("return", audioBuffer);
case 17:
return _context3.abrupt("return", audioBuffer);
case 18:
case "end":
return _context3.stop();
}
}, _callee3);
}));
return function extractAudioFromSRC(_x3) {
return _ref5.apply(this, arguments);
};
}();
var convertBlobToAudioBuffer = /*#__PURE__*/function () {
var _ref6 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee4(audioBlob) {
var audioContext, arrayBuffer, audioBuffer;
return _regeneratorRuntime.wrap(function _callee4$(_context4) {
while (1) switch (_context4.prev = _context4.next) {
case 0:
audioContext = new (window.AudioContext || window.webkitAudioContext)();
_context4.next = 3;
return audioBlob.arrayBuffer();
case 3:
arrayBuffer = _context4.sent;
_context4.next = 6;
return audioContext.decodeAudioData(arrayBuffer);
case 6:
audioBuffer = _context4.sent;
return _context4.abrupt("return", audioBuffer);
case 8:
case "end":
return _context4.stop();
}
}, _callee4);
}));
return function convertBlobToAudioBuffer(_x4) {
return _ref6.apply(this, arguments);
};
}();
var createAudioURLFromSRC = /*#__PURE__*/function () {
var _ref7 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee5(audioSRC) {
var audioType,
external,
audioBuffer,
audioBlob,
_args5 = arguments;
return _regeneratorRuntime.wrap(function _callee5$(_context5) {
while (1) switch (_context5.prev = _context5.next) {
case 0:
audioType = _args5.length > 1 && _args5[1] !== undefined ? _args5[1] : "wav";
external = _args5.length > 2 && _args5[2] !== undefined ? _args5[2] : false;
if (!external) {
_context5.next = 8;
break;
}
_context5.next = 5;
return fetch(audioSRC);
case 5:
_context5.t0 = _context5.sent;
_context5.next = 11;
break;
case 8:
_context5.next = 10;
return extractAudioFromSRC(audioSRC, audioType);
case 10:
_context5.t0 = _context5.sent;
case 11:
audioBuffer = _context5.t0;
_context5.t1 = Blob;
if (!external) {
_context5.next = 19;
break;
}
_context5.next = 16;
return audioBuffer.arrayBuffer();
case 16:
_context5.t2 = _context5.sent;
_context5.next = 20;
break;
case 19:
_context5.t2 = audioBuffer;
case 20:
_context5.t3 = _context5.t2;
_context5.t4 = [_context5.t3];
_context5.t5 = {
type: "audio/".concat(audioType)
};
audioBlob = new _context5.t1(_context5.t4, _context5.t5);
return _context5.abrupt("return", URL.createObjectURL(audioBlob));
case 25:
case "end":
return _context5.stop();
}
}, _callee5);
}));
return function createAudioURLFromSRC(_x5) {
return _ref7.apply(this, arguments);
};
}();
var downloadAudio = /*#__PURE__*/function () {
var _ref8 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee6(audioSRC) {
var videoAudio,
audioName,
audioType,
filename,
currentAudioType,
url,
a,
_args6 = arguments;
return _regeneratorRuntime.wrap(function _callee6$(_context6) {
while (1) switch (_context6.prev = _context6.next) {
case 0:
videoAudio = _args6.length > 1 && _args6[1] !== undefined ? _args6[1] : false;
audioName = _args6.length > 2 && _args6[2] !== undefined ? _args6[2] : null;
audioType = _args6.length > 3 && _args6[3] !== undefined ? _args6[3] : "wav";
filename = audioName || audioSRC.split("/").pop().split(".")[0];
currentAudioType = audioType || "wav";
if (!videoAudio) {
_context6.next = 11;
break;
}
_context6.next = 8;
return createAudioURLFromSRC(audioSRC, currentAudioType);
case 8:
_context6.t0 = _context6.sent;
_context6.next = 14;
break;
case 11:
_context6.next = 13;
return createAudioURLFromSRC(audioSRC, currentAudioType, true);
case 13:
_context6.t0 = _context6.sent;
case 14:
url = _context6.t0;
a = document.createElement("a");
a.href = url;
a.download = "".concat(filename, ".").concat(currentAudioType);
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
return _context6.abrupt("return", url);
case 22:
case "end":
return _context6.stop();
}
}, _callee6);
}));
return function downloadAudio(_x6) {
return _ref8.apply(this, arguments);
};
}();
return /*#__PURE__*/React.createElement(HUDAudioContext.Provider, {
value: {
loadVideoAudio: loadVideoAudio,
loadAudio: loadAudio,
downloadAudio: downloadAudio,
detectBitrate: detectBitrate,
audioBufferToWav: audioBufferToWav,
audioBufferToMP3: audioBufferToMP3,
convertBlobToAudioBuffer: convertBlobToAudioBuffer,
createAudioURLFromSRC: createAudioURLFromSRC,
extractAudioFromSRC: extractAudioFromSRC,
playAudioFile: playAudioFile,
playAudio: playAudio,
stopAudio: stopAudio,
adjustVolume: adjustVolume,
analyser: analyserRef
}
}, children);
};
export var useHUDAudio = function useHUDAudio() {
var context = useContext(HUDAudioContext);
if (!context) {
throw new Error('Must use within an HUDAudioProvider');
}
return context;
};