@yoliani/react-native-audio-waveform
Version:
A React Native component to show audio waveform with ease in react native application
546 lines • 22.9 kB
JavaScript
import clamp from 'lodash/clamp';
import floor from 'lodash/floor';
import head from 'lodash/head';
import isEmpty from 'lodash/isEmpty';
import isNil from 'lodash/isNil';
import React, { forwardRef, useEffect, useImperativeHandle, useRef, useState, } from 'react';
import { PanResponder, ScrollView, View, } from 'react-native';
import { DurationType, FinishMode, PermissionStatus, playbackSpeedThreshold, PlayerState, RecorderState, UpdateFrequency, } from '../../constants';
import { useAudioPermission, useAudioPlayer, useAudioRecorder, } from '../../hooks';
import { WaveformCandle } from '../WaveformCandle';
import styles from './WaveformStyles';
import {} from './WaveformTypes';
export const Waveform = forwardRef((props, ref) => {
const {
// The maximum number of candles set in the waveform. Once this limit is reached, the oldest candle will be removed as a new one is added to the waveform.
maxCandlesToRender = 300, mode, volume = 3,
// The playback speed of the audio player. A value of 1.0 represents normal playback speed.
playbackSpeed = 1.0, candleSpace = 2, candleWidth = 5, containerStyle = {}, waveColor, scrubColor, onPlayerStateChange, onRecorderStateChange, onPanStateChange = () => { }, onError = (_error) => { }, onRecordingProgressChange = (_currentProgress) => { }, onCurrentProgressChange = () => { }, candleHeightScale = 3, onChangeWaveformLoadState = (_state) => { }, showsHorizontalScrollIndicator = false, } = props;
// Determine the audio source to use
const audioSource = React.useMemo(() => {
const staticProps = props;
if (staticProps.source?.uri) {
return staticProps.source.uri;
}
if (staticProps.path) {
return staticProps.path;
}
return null;
}, [props]);
// Create player key based on the audio source
const playerKey = React.useMemo(() => `PlayerFor${audioSource}`, [audioSource]);
// Validate that either path or source is provided for static mode
React.useEffect(() => {
if (mode === 'static' && !audioSource) {
onError(new Error('Either path or source must be provided for static mode'));
}
}, [mode, audioSource, onError]);
const viewRef = useRef(null);
const scrollRef = useRef(null);
const isLayoutCalculated = useRef(false);
const isAutoPaused = useRef(false);
const isAudioPlaying = useRef(false);
const [waveform, setWaveform] = useState([]);
const [viewLayout, setViewLayout] = useState(null);
const [seekPosition, setSeekPosition] = useState(null);
const [songDuration, setSongDuration] = useState(0);
const [noOfSamples, setNoOfSamples] = useState(0);
const [currentProgress, setCurrentProgress] = useState(0);
const [panMoving, setPanMoving] = useState(false);
const [playerState, setPlayerState] = useState(PlayerState.stopped);
const [recorderState, setRecorderState] = useState(RecorderState.stopped);
const [isWaveformExtracted, setWaveformExtracted] = useState(false);
const audioSpeed = playbackSpeed > playbackSpeedThreshold ? 1.0 : playbackSpeed;
const { extractWaveformData, preparePlayer, getDuration, seekToPlayer, playPlayer, stopPlayer, pausePlayer, onCurrentDuration, onDidFinishPlayingAudio, onCurrentRecordingWaveformData, setPlaybackSpeed, markPlayerAsUnmounted, } = useAudioPlayer();
const { startRecording, stopRecording, pauseRecording, resumeRecording } = useAudioRecorder();
const { checkHasAudioRecorderPermission } = useAudioPermission();
/**
* Updates the playback speed of the audio player.
*
* @param speed - The new playback speed to set.
* @returns A Promise that resolves when the playback speed has been updated.
* @throws An error if there was a problem updating the playback speed.
*/
const updatePlaybackSpeed = async (speed) => {
try {
await setPlaybackSpeed({ speed, playerKey });
}
catch (error) {
console.error('Error updating playback speed', error);
}
};
useEffect(() => {
updatePlaybackSpeed(audioSpeed);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [audioSpeed]);
const prepareAudioPlayer = async (progress) => {
if (!audioSource) {
return Promise.reject(new Error('Can not start player: no audio source provided (path or source)'));
}
try {
const staticProps = props;
const prepare = await preparePlayer({
...(staticProps.path ? { path: staticProps.path } : {}),
...(staticProps.source ? { source: staticProps.source } : {}),
playerKey,
updateFrequency: UpdateFrequency.medium,
volume: volume,
progress,
});
return Promise.resolve(prepare);
}
catch (err) {
return Promise.reject(err);
}
};
const getAudioDuration = async () => {
try {
const duration = await getDuration({
playerKey,
durationType: DurationType.max,
});
if (!isNil(duration)) {
const audioDuration = Number(duration);
setSongDuration(audioDuration > 0 ? audioDuration : 0);
return Promise.resolve(audioDuration);
}
else {
return Promise.reject(new Error(`Could not get duration for source: ${audioSource}`));
}
}
catch (err) {
return Promise.reject(err);
}
};
const preparePlayerAndGetDuration = async () => {
try {
const prepare = await prepareAudioPlayer();
if (prepare) {
const duration = await getAudioDuration();
if (duration < 0) {
await getAudioDuration();
}
}
}
catch (err) {
onError(err);
}
};
const getAudioWaveForm = async (noOfSample) => {
if (!isNil(audioSource) && !isEmpty(audioSource)) {
try {
onChangeWaveformLoadState(true);
const staticProps = props;
const result = await extractWaveformData({
...(staticProps.path ? { path: staticProps.path } : {}),
...(staticProps.source ? { source: staticProps.source } : {}),
playerKey,
noOfSamples: Math.max(noOfSample, 1),
});
onChangeWaveformLoadState(false);
if (!isNil(result) && !isEmpty(result)) {
const waveforms = head(result);
if (!isNil(waveforms) && !isEmpty(waveforms)) {
setWaveform(waveforms);
await preparePlayerAndGetDuration();
setWaveformExtracted(true);
}
}
}
catch (err) {
onChangeWaveformLoadState(false);
onError(err);
}
}
else {
onError(new Error(`Can not find waveform for mode ${mode} source: ${audioSource}`));
}
};
const stopPlayerAction = async (resetProgress = true) => {
if (mode === 'static') {
try {
const result = await stopPlayer({
playerKey,
});
isAudioPlaying.current = false;
if (!isNil(result) && result) {
if (resetProgress) {
setCurrentProgress(0);
}
setPlayerState(PlayerState.stopped);
return Promise.resolve(result);
}
else {
return Promise.reject(new Error(`error in stopping player for source: ${audioSource}`));
}
}
catch (err) {
return Promise.reject(err);
}
}
else {
return Promise.reject(new Error('error in stopping player: mode is not static'));
}
};
const startPlayerAction = async (args) => {
if (mode === 'static') {
try {
isAudioPlaying.current = true;
if (playerState === PlayerState.stopped) {
if (isWaveformExtracted) {
await prepareAudioPlayer(currentProgress);
}
else {
await getAudioWaveForm(noOfSamples);
}
}
const play = await playPlayer({
finishMode: FinishMode.stop,
playerKey,
speed: audioSpeed,
...args,
});
if (play) {
setPlayerState(PlayerState.playing);
return Promise.resolve(true);
}
else {
return Promise.reject(new Error(`error in starting player for source: ${audioSource}`));
}
}
catch (error) {
if (playerState === PlayerState.paused) {
// If the player is not prepared, triggering the stop will reset the player for next click. Fix blocked paused player after a call to `stopAllPlayers`
await stopPlayerAction();
}
return Promise.reject(error);
}
}
else {
return Promise.reject(new Error('error in starting player: mode is not static'));
}
};
const pausePlayerAction = async (changePlayerState = true) => {
if (mode === 'static') {
try {
isAudioPlaying.current = false;
const pause = await pausePlayer({
playerKey,
});
if (pause) {
if (changePlayerState) {
setPlayerState(PlayerState.paused);
}
return Promise.resolve(true);
}
else {
return Promise.reject(new Error(`error in pause player for source: ${audioSource}`));
}
}
catch (error) {
return Promise.reject(error);
}
}
else {
return Promise.reject(new Error('error in pausing player: mode is not static'));
}
};
const startRecordingAction = async (args) => {
if (mode === 'live') {
try {
const hasPermission = await checkHasAudioRecorderPermission();
if (hasPermission === PermissionStatus.granted) {
const start = await startRecording(args);
if (!isNil(start) && start) {
setRecorderState(RecorderState.recording);
return Promise.resolve(true);
}
else {
return Promise.reject(new Error('error in start recording action'));
}
}
else {
return Promise.reject(new Error('error in start recording: audio recording permission is not granted'));
}
}
catch (err) {
return Promise.reject(err);
}
}
else {
return Promise.reject(new Error('error in start recording: mode is not live'));
}
};
const stopRecordingAction = async () => {
if (mode === 'live') {
try {
const data = await stopRecording();
if (!isNil(data) && !isEmpty(data)) {
setWaveform([]);
const pathData = head(data);
if (!isNil(pathData)) {
setRecorderState(RecorderState.stopped);
return Promise.resolve(pathData);
}
else {
return Promise.reject(new Error('error in stopping recording. can not get path of recording'));
}
}
else {
return Promise.reject(new Error('error in stopping recording. can not get path of recording'));
}
}
catch (err) {
return Promise.reject(err);
}
}
else {
return Promise.reject(new Error('error in stop recording: mode is not live'));
}
};
const pauseRecordingAction = async () => {
if (mode === 'live') {
try {
const pause = await pauseRecording();
if (!isNil(pause) && pause) {
setRecorderState(RecorderState.paused);
return Promise.resolve(pause);
}
else {
return Promise.reject(new Error('Error in pausing recording audio'));
}
}
catch (err) {
return Promise.reject(err);
}
}
else {
return Promise.reject(new Error('error in pause recording: mode is not live'));
}
};
const resumeRecordingAction = async () => {
if (mode === 'live') {
try {
const hasPermission = await checkHasAudioRecorderPermission();
if (hasPermission === PermissionStatus.granted) {
const resume = await resumeRecording();
if (!isNil(resume)) {
setRecorderState(RecorderState.recording);
return Promise.resolve(resume);
}
else {
return Promise.reject(new Error('Error in resume recording'));
}
}
else {
return Promise.reject(new Error('error in resume recording: audio recording permission is not granted'));
}
}
catch (err) {
return Promise.reject(err);
}
}
else {
return Promise.reject(new Error('error in resume recording: mode is not live'));
}
};
useEffect(() => {
if (!isNil(viewLayout?.width)) {
const getNumberOfSamples = floor((viewLayout?.width ?? 0) / (candleWidth + candleSpace));
// when orientation changes, the layout needs to be recalculated
if (viewLayout?.x === 0 && viewLayout?.y === 0) {
isLayoutCalculated.current = false;
}
setNoOfSamples(getNumberOfSamples);
if (mode === 'static') {
getAudioWaveForm(getNumberOfSamples);
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [viewLayout?.width, mode, candleWidth, candleSpace]);
const seekToPlayerAction = async () => {
if (!isNil(seekPosition)) {
if (mode === 'static') {
const seekAmount = (seekPosition?.pageX - (viewLayout?.x ?? 0)) /
(viewLayout?.width ?? 1);
const clampedSeekAmount = clamp(seekAmount, 0, 1);
if (!panMoving) {
try {
await seekToPlayer({
playerKey,
progress: clampedSeekAmount * songDuration,
});
}
catch (e) {
if (playerState === PlayerState.paused) {
// If the player is not prepared, triggering the stop will reset the player for next click. Fix blocked paused player after a call to `stopAllPlayers`
await stopPlayerAction(false);
}
}
if (playerState === PlayerState.playing) {
startPlayerAction();
}
}
setCurrentProgress(clampedSeekAmount * songDuration);
}
}
};
useEffect(() => {
seekToPlayerAction();
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [seekPosition, panMoving, mode, songDuration]);
useEffect(() => {
const tracePlayerState = onDidFinishPlayingAudio(async (data) => {
if (data.playerKey === playerKey) {
if (data.finishType === FinishMode.stop) {
stopPlayerAction();
}
else if (data.finishType === FinishMode.pause) {
setPlayerState(PlayerState.paused);
}
}
});
const tracePlaybackValue = onCurrentDuration(data => {
if (data.playerKey === playerKey) {
const currentAudioDuration = Number(data.currentDuration);
if (!isNaN(currentAudioDuration)) {
setCurrentProgress(currentAudioDuration);
}
else {
setCurrentProgress(0);
}
}
});
const traceRecorderWaveformValue = onCurrentRecordingWaveformData(result => {
if (mode === 'live') {
if (!isNil(onRecordingProgressChange)) {
onRecordingProgressChange(result.progress);
}
if (!isNil(result.currentDecibel)) {
setWaveform((previousWaveform) => {
// Add the new decibel to the waveform
const updatedWaveform = [
...previousWaveform,
result.currentDecibel,
];
// Limit the size of the waveform array to 'maxCandlesToRender'
return updatedWaveform.length > maxCandlesToRender
? updatedWaveform.slice(1)
: updatedWaveform;
});
if (scrollRef.current) {
scrollRef.current.scrollToEnd({ animated: true });
}
}
}
});
return () => {
tracePlayerState.remove();
tracePlaybackValue.remove();
traceRecorderWaveformValue.remove();
markPlayerAsUnmounted();
};
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
useEffect(() => {
if (!isNil(onPlayerStateChange)) {
onPlayerStateChange(playerState);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [playerState]);
useEffect(() => {
if (!isNil(onRecorderStateChange)) {
onRecorderStateChange(recorderState);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [recorderState]);
useEffect(() => {
if (panMoving) {
if (playerState === PlayerState.playing) {
pausePlayerAction(false);
isAutoPaused.current = true;
}
}
else {
if (playerState === PlayerState.paused && isAutoPaused.current) {
startPlayerAction();
}
isAutoPaused.current = false;
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [panMoving]);
const calculateLayout = () => {
viewRef.current?.measureInWindow((x, y, width, height) => {
setViewLayout({ x, y, width, height });
if (x !== 0 || y !== 0) {
// found the position of view in window
isLayoutCalculated.current = true;
}
});
};
const panResponder = useRef(PanResponder.create({
onStartShouldSetPanResponder: () => {
if (!isLayoutCalculated.current) {
calculateLayout();
}
return true;
},
onMoveShouldSetPanResponder: () => true,
onPanResponderGrant: () => {
setPanMoving(true);
onPanStateChange(true);
},
onPanResponderStart: () => { },
onPanResponderMove: event => {
setSeekPosition(event.nativeEvent);
},
onPanResponderEnd: () => {
onPanStateChange(false);
setPanMoving(false);
},
onPanResponderRelease: e => {
setSeekPosition(e.nativeEvent);
onPanStateChange(false);
setPanMoving(false);
},
})).current;
useEffect(() => {
if (!isNil(onCurrentProgressChange)) {
onCurrentProgressChange(currentProgress, songDuration);
}
}, [currentProgress, songDuration, onCurrentProgressChange]);
/* Ensure that the audio player is released (or stopped) once the song's duration is determined,
especially if the audio is not playing immediately after loading */
useEffect(() => {
if (songDuration !== 0 &&
mode === 'static' &&
isAudioPlaying.current !== true) {
isAudioPlaying.current = false;
stopPlayerAction(false);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [songDuration]);
useImperativeHandle(ref, () => ({
startPlayer: startPlayerAction,
stopPlayer: stopPlayerAction,
pausePlayer: pausePlayerAction,
resumePlayer: startPlayerAction,
startRecord: startRecordingAction,
pauseRecord: pauseRecordingAction,
stopRecord: stopRecordingAction,
resumeRecord: resumeRecordingAction,
currentState: mode === 'static' ? playerState : recorderState,
playerKey: audioSource || '',
}));
return (React.createElement(View, { style: [styles.waveformContainer, containerStyle] },
React.createElement(View, { ref: viewRef, style: styles.waveformInnerContainer, onLayout: calculateLayout, ...(mode === 'static' ? panResponder.panHandlers : {}) },
React.createElement(ScrollView, { horizontal: true, showsHorizontalScrollIndicator: showsHorizontalScrollIndicator, ref: scrollRef, style: styles.scrollContainer, scrollEnabled: mode === 'live' }, waveform?.map?.((amplitude, indexCandle) => (React.createElement(WaveformCandle, { key: indexCandle, index: indexCandle, amplitude: amplitude, parentViewLayout: viewLayout, candleWidth,
candleSpace,
noOfSamples,
songDuration,
currentProgress,
waveColor,
scrubColor,
candleHeightScale })))))));
});
//# sourceMappingURL=Waveform.js.map