@revrag-ai/embed-react-native
Version:
A powerful React Native library for integrating AI-powered voice agents into mobile applications. Features real-time voice communication, intelligent speech processing, customizable UI components, and comprehensive event handling for building conversation
181 lines (165 loc) • 6.34 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.WaveformVisualizer = void 0;
var _react = require("react");
var _reactNative = require("react-native");
var _jsxRuntime = require("react/jsx-runtime");
// This module defines a custom hook for generating and animating a waveform visualization
// based on the audio activity in a LiveKit room. It uses the LiveKit SDK to track
// audio levels and update the waveform accordingly. The hook provides a set of animated
// values that can be used to render the waveform in a React Native component.
// The hook takes a room reference as an input and uses it to track the audio activity
// of the remote participants in the room. It checks the audio level of each participant
// and updates the waveform accordingly. The hook also handles the animation of the waveform
// by updating the heights of the bars based on the audio activity.
// React Native compatible waveform simulator
const useReactNativeAudioWaveform = roomRef => {
const [isAudioActive, setIsAudioActive] = (0, _react.useState)(false);
const intervalRef = (0, _react.useRef)(null);
const [currentHeights, setCurrentHeights] = (0, _react.useState)(Array(10).fill(0));
// Create animated values for each bar
const barCount = 10;
const animatedBars = (0, _react.useRef)(Array(barCount).fill(0).map(() => new _reactNative.Animated.Value(0))).current;
(0, _react.useEffect)(() => {
// Check if there's an active room connection AND if agent is talking
const checkAudioActivity = () => {
const room = roomRef.current;
if (room?.state !== 'connected') {
setIsAudioActive(false);
return;
}
// Check if any remote participant is currently speaking
let isAgentSpeaking = false;
// Loop through all remote participants
room.remoteParticipants.forEach(participant => {
const audioTrackPublications = Array.from(participant.getTrackPublications().values());
const remoteAudioTrack = audioTrackPublications.find(pub => pub.track?.kind === 'audio');
// Check if this participant has audio track, is not muted, and is actively speaking
if (remoteAudioTrack?.track && !remoteAudioTrack?.isMuted) {
// Check audio level to detect actual speech (optional but more accurate)
const audioLevel = participant.audioLevel || 0;
if (audioLevel > 0.05) {
// Threshold for detecting speech
isAgentSpeaking = true;
}
}
});
setIsAudioActive(isAgentSpeaking);
};
// Initial check
checkAudioActivity();
// Set up periodic checking for room state changes
intervalRef.current = setInterval(checkAudioActivity, 500);
// Clean up on unmount
return () => {
if (intervalRef.current) {
clearInterval(intervalRef.current);
intervalRef.current = null;
}
setIsAudioActive(false);
};
}, [roomRef]);
// Continuous smooth animation
(0, _react.useEffect)(() => {
const animateWaveform = () => {
// Generate smooth waveform data - stop animation completely when not active
const targetHeights = isAudioActive ? Array(barCount).fill(0).map((_, index) => {
const timeOffset = Date.now() / 800 + index * 0.3;
const baseHeight = 0.5;
const amplitude = 0.5;
const height = baseHeight + amplitude * Math.abs(Math.sin(timeOffset));
return Math.max(0.1, Math.min(1.0, height));
}) : Array(barCount).fill(0); // Completely freeze animation when mic is muted
// Update current heights for conditional logic
setCurrentHeights(targetHeights);
const animations = animatedBars.map((animatedValue, index) => {
const targetHeight = targetHeights[index] || 0;
return _reactNative.Animated.timing(animatedValue, {
toValue: targetHeight,
duration: isAudioActive ? 400 : 600,
// Slower fade out when going inactive
useNativeDriver: false
});
});
_reactNative.Animated.parallel(animations).start();
};
// Start animation immediately and repeat
animateWaveform();
const animationInterval = setInterval(animateWaveform, 300);
return () => {
clearInterval(animationInterval);
};
}, [isAudioActive, animatedBars]);
return {
animatedBars,
currentHeights,
isActive: isAudioActive
};
};
const WaveformVisualizer = ({
roomRef
}) => {
const {
animatedBars,
currentHeights
} = useReactNativeAudioWaveform(roomRef);
return /*#__PURE__*/(0, _jsxRuntime.jsx)(_reactNative.View, {
style: styles.container,
children: animatedBars.map((animatedHeight, idx) => {
// Use the tracked height values instead of trying to access animated value directly
const currentHeightValue = currentHeights[idx] || 0.1;
// Apply conditional logic based on height
let conditionalValue;
if (currentHeightValue > 0.7) {
conditionalValue = 1;
} else if (currentHeightValue >= 0.4 && currentHeightValue <= 0.5) {
conditionalValue = 5;
} else {
conditionalValue = 1;
}
// Determine bar style based on position
const isEdgeBar = idx <= 1 || idx >= 8;
const barStyle = [styles.bar, conditionalValue === 10 ? styles.barWide : styles.barNormal, isEdgeBar ? styles.barEdge : styles.barCenter];
// You can use conditionalValue for width, color, or other properties
return /*#__PURE__*/(0, _jsxRuntime.jsx)(_reactNative.Animated.View, {
style: [barStyle, {
height: animatedHeight.interpolate({
inputRange: [0, 1],
outputRange: [0, 25]
})
}]
}, idx);
})
});
};
exports.WaveformVisualizer = WaveformVisualizer;
const styles = _reactNative.StyleSheet.create({
container: {
flexDirection: 'row',
alignItems: 'center',
height: '100%',
alignSelf: 'center',
justifyContent: 'center',
zIndex: 1000
},
bar: {
borderRadius: 100,
alignSelf: 'center',
margin: 1.5
},
barNormal: {
width: 4
},
barWide: {
width: 4
},
barEdge: {
backgroundColor: 'rgba(255, 255, 255, 0.5)'
},
barCenter: {
backgroundColor: 'white'
}
});
//# sourceMappingURL=EmbedAudioWave.js.map