@mustafakarali/react-native-audio-stream
Version:
React Native Audio Stream - High-performance audio streaming for React Native
771 lines (752 loc) • 24.6 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.AudioStream = void 0;
var _reactNative = require("react-native");
var _types = require("./types");
var _logger = require("./logger");
// Support both old and new architecture
const RNAudioStream = _reactNative.NativeModules.RNAudioStream;
class AudioStream {
static instance = null;
eventListeners = new Map();
nativeEventSubscriptions = [];
config = _types.DEFAULT_CONFIG;
currentUrl = null;
isInitialized = false;
constructor() {
if (!RNAudioStream) {
throw new Error('RNAudioStream native module is not available. Please ensure the library is properly linked.');
}
this.eventEmitter = new _reactNative.NativeEventEmitter(RNAudioStream);
this.setupNativeEventListeners();
}
static getInstance() {
if (!AudioStream.instance) {
AudioStream.instance = new AudioStream();
}
return AudioStream.instance;
}
setupNativeEventListeners() {
// Map native events to callbacks
const eventMappings = [['onStreamStart', 'onStart', () => undefined], ['onStreamBuffer', 'onBuffer', data => data.isBuffering], ['onStreamProgress', 'onProgress', data => ({
currentTime: data.currentTime,
duration: data.duration,
percentage: data.percentage
})], ['onStreamError', 'onError', data => ({
code: data.code,
message: data.message,
details: data.details,
recoverable: data.recoverable
})], ['onStreamEnd', 'onEnd', () => undefined], ['onStreamStateChange', 'onStateChange', data => data.state], ['onStreamMetadata', 'onMetadata', data => data.metadata], ['onStreamStats', 'onStats', data => data.stats], ['onNetworkStateChange', 'onNetworkStateChange', data => ({
isConnected: data.isConnected,
type: data.type || undefined
})]];
eventMappings.forEach(([nativeEvent, callbackKey, transformer]) => {
const subscription = this.eventEmitter.addListener(nativeEvent, data => {
_logger.logger.verbose(`Native event received: ${nativeEvent}`, data);
const listeners = this.eventListeners.get(callbackKey);
if (listeners) {
const transformedData = transformer(data);
listeners.forEach(listener => {
try {
listener(transformedData);
} catch (error) {
_logger.logger.error(`Error in event listener ${callbackKey}:`, error);
}
});
}
});
this.nativeEventSubscriptions.push(subscription);
});
}
async initialize(config) {
if (this.isInitialized) {
_logger.logger.warn('AudioStream is already initialized, skipping...');
return;
}
try {
_logger.logger.info('Initializing AudioStream');
this.config = {
..._types.DEFAULT_CONFIG,
...config
};
if (config?.logLevel !== undefined) {
_logger.logger.setLogLevel(config.logLevel);
}
await RNAudioStream.initialize(this.config);
this.isInitialized = true;
_logger.logger.info('AudioStream initialized successfully');
} catch (error) {
_logger.logger.error('Failed to initialize AudioStream:', error);
throw this.createError(_types.ErrorCodes.INITIALIZATION_ERROR, 'Failed to initialize audio stream', error);
}
}
async destroy() {
try {
_logger.logger.info('Destroying AudioStream');
// Clean up event subscriptions
this.nativeEventSubscriptions.forEach(subscription => subscription.remove());
this.nativeEventSubscriptions = [];
this.eventListeners.clear();
await RNAudioStream.destroy();
this.isInitialized = false;
this.currentUrl = null;
_logger.logger.info('AudioStream destroyed successfully');
} catch (error) {
_logger.logger.error('Failed to destroy AudioStream:', error);
throw error;
}
}
async startStream(url, config) {
this.ensureInitialized();
try {
_logger.logger.info(`Starting stream: ${url}`);
const streamConfig = {
...this.config,
...config
};
_logger.logger.time('startStream');
await RNAudioStream.startStream(url, streamConfig);
_logger.logger.timeEnd('startStream');
this.currentUrl = url;
_logger.logger.info('Stream started successfully');
} catch (error) {
_logger.logger.error('Failed to start stream:', error);
throw this.createError(_types.ErrorCodes.NETWORK_ERROR, 'Failed to start stream', error);
}
}
async stopStream() {
this.ensureInitialized();
try {
_logger.logger.info('Stopping stream');
await RNAudioStream.stopStream();
this.currentUrl = null;
this.removeAllEventListeners();
} catch (error) {
_logger.logger.error('Failed to stop stream:', error);
throw error;
}
}
async play() {
this.ensureInitialized();
try {
_logger.logger.debug('Playing');
await RNAudioStream.play();
_logger.logger.logPlaybackEvent('play');
} catch (error) {
_logger.logger.error('Failed to play:', error);
throw this.createError(_types.ErrorCodes.INVALID_STATE, 'Failed to play', error);
}
}
async pause() {
this.ensureInitialized();
try {
_logger.logger.debug('Pausing');
await RNAudioStream.pause();
_logger.logger.logPlaybackEvent('pause');
} catch (error) {
_logger.logger.error('Failed to pause:', error);
throw this.createError(_types.ErrorCodes.INVALID_STATE, 'Failed to pause', error);
}
}
async stop() {
this.ensureInitialized();
try {
_logger.logger.debug('Stopping');
await RNAudioStream.stop();
_logger.logger.logPlaybackEvent('stop');
} catch (error) {
_logger.logger.error('Failed to stop:', error);
throw error;
}
}
async cancelStream() {
this.ensureInitialized();
try {
_logger.logger.info('Cancelling stream');
await RNAudioStream.cancelStream();
this.currentUrl = null;
_logger.logger.info('Stream cancelled successfully');
} catch (error) {
_logger.logger.error('Failed to cancel stream:', error);
throw error;
}
}
async seek(position) {
this.ensureInitialized();
if (position < 0) {
throw new Error('Seek position cannot be negative');
}
try {
_logger.logger.debug(`Seeking to ${position}s`);
await RNAudioStream.seek(position);
_logger.logger.logPlaybackEvent('seek', {
position
});
} catch (error) {
_logger.logger.error('Failed to seek:', error);
throw this.createError(_types.ErrorCodes.INVALID_STATE, 'Failed to seek', error);
}
}
async setVolume(volume) {
this.ensureInitialized();
if (volume < 0 || volume > 1) {
throw new Error('Volume must be between 0.0 and 1.0');
}
try {
_logger.logger.debug(`Setting volume to ${volume}`);
await RNAudioStream.setVolume(volume);
} catch (error) {
_logger.logger.error('Failed to set volume:', error);
throw error;
}
}
async getVolume() {
this.ensureInitialized();
try {
return await RNAudioStream.getVolume();
} catch (error) {
_logger.logger.error('Failed to get volume:', error);
throw error;
}
}
async setPlaybackRate(rate) {
this.ensureInitialized();
if (rate < 0.5 || rate > 2.0) {
throw new Error('Playback rate must be between 0.5 and 2.0');
}
try {
_logger.logger.debug(`Setting playback rate to ${rate}`);
await RNAudioStream.setPlaybackRate(rate);
_logger.logger.logPlaybackEvent('playbackRateChange', {
rate
});
} catch (error) {
_logger.logger.error('Failed to set playback rate:', error);
throw error;
}
}
async getPlaybackRate() {
this.ensureInitialized();
try {
return await RNAudioStream.getPlaybackRate();
} catch (error) {
_logger.logger.error('Failed to get playback rate:', error);
throw error;
}
}
async getState() {
this.ensureInitialized();
try {
return await RNAudioStream.getState();
} catch (error) {
_logger.logger.error('Failed to get state:', error);
throw error;
}
}
async getCurrentTime() {
this.ensureInitialized();
try {
return await RNAudioStream.getCurrentTime();
} catch (error) {
_logger.logger.error('Failed to get current time:', error);
throw error;
}
}
async getDuration() {
this.ensureInitialized();
try {
return await RNAudioStream.getDuration();
} catch (error) {
_logger.logger.error('Failed to get duration:', error);
throw error;
}
}
async getBufferedPercentage() {
this.ensureInitialized();
try {
return await RNAudioStream.getBufferedPercentage();
} catch (error) {
_logger.logger.error('Failed to get buffered percentage:', error);
throw error;
}
}
async getStats() {
this.ensureInitialized();
try {
return await RNAudioStream.getStats();
} catch (error) {
_logger.logger.error('Failed to get stats:', error);
throw error;
}
}
async getMetadata() {
this.ensureInitialized();
try {
return await RNAudioStream.getMetadata();
} catch (error) {
_logger.logger.error('Failed to get metadata:', error);
throw error;
}
}
async setEqualizer(bands) {
this.ensureInitialized();
try {
_logger.logger.debug('Setting equalizer', bands);
await RNAudioStream.setEqualizer(bands);
} catch (error) {
_logger.logger.error('Failed to set equalizer:', error);
throw error;
}
}
async getEqualizer() {
this.ensureInitialized();
try {
return await RNAudioStream.getEqualizer();
} catch (error) {
_logger.logger.error('Failed to get equalizer:', error);
throw error;
}
}
async applyEqualizerPreset(preset) {
this.ensureInitialized();
try {
let equalizerPreset;
if (typeof preset === 'number') {
// If preset is a number (index), get the preset from the list
if (preset < 0 || preset >= _types.EQUALIZER_PRESETS.length) {
throw new Error(`Invalid preset index: ${preset}. Must be between 0 and ${_types.EQUALIZER_PRESETS.length - 1}`);
}
equalizerPreset = _types.EQUALIZER_PRESETS[preset];
_logger.logger.info(`Applying equalizer preset by index: ${preset} (${equalizerPreset.name})`);
} else {
// If preset is an object
equalizerPreset = preset;
_logger.logger.info(`Applying equalizer preset: ${equalizerPreset.name}`);
}
await this.setEqualizer(equalizerPreset.bands);
} catch (error) {
_logger.logger.error('Failed to apply equalizer preset:', error);
throw error;
}
}
async getEqualizerPresets() {
return _types.EQUALIZER_PRESETS;
}
addEventListener(event, callback) {
if (!callback) return;
if (!this.eventListeners.has(event)) {
this.eventListeners.set(event, new Set());
}
this.eventListeners.get(event).add(callback);
_logger.logger.verbose(`Added event listener for ${event}`);
}
removeEventListener(event, callback) {
if (!callback) return;
const listeners = this.eventListeners.get(event);
if (listeners) {
listeners.delete(callback);
_logger.logger.verbose(`Removed event listener for ${event}`);
}
}
removeAllEventListeners() {
this.eventListeners.clear();
_logger.logger.verbose('Removed all event listeners');
}
async clearCache() {
this.ensureInitialized();
try {
_logger.logger.info('Clearing cache');
await RNAudioStream.clearCache();
_logger.logger.info('Cache cleared successfully');
} catch (error) {
_logger.logger.error('Failed to clear cache:', error);
throw this.createError(_types.ErrorCodes.CACHE_ERROR, 'Failed to clear cache', error);
}
}
async getCacheSize() {
this.ensureInitialized();
try {
return await RNAudioStream.getCacheSize();
} catch (error) {
_logger.logger.error('Failed to get cache size:', error);
throw error;
}
}
async preloadStream(url, duration) {
this.ensureInitialized();
try {
_logger.logger.info(`Preloading stream: ${url}, duration: ${duration}s`);
await RNAudioStream.preloadStream(url, duration);
_logger.logger.info('Stream preloaded successfully');
} catch (error) {
_logger.logger.error('Failed to preload stream:', error);
throw this.createError(_types.ErrorCodes.NETWORK_ERROR, 'Failed to preload stream', error);
}
}
async setNetworkPriority(priority) {
this.ensureInitialized();
try {
_logger.logger.debug(`Setting network priority to ${priority}`);
await RNAudioStream.setNetworkPriority(priority);
} catch (error) {
_logger.logger.error('Failed to set network priority:', error);
throw error;
}
}
async requestAudioFocus() {
this.ensureInitialized();
try {
_logger.logger.debug('Requesting audio focus');
const granted = await RNAudioStream.requestAudioFocus();
_logger.logger.debug(`Audio focus ${granted ? 'granted' : 'denied'}`);
return granted;
} catch (error) {
_logger.logger.error('Failed to request audio focus:', error);
throw error;
}
}
async abandonAudioFocus() {
this.ensureInitialized();
try {
_logger.logger.debug('Abandoning audio focus');
await RNAudioStream.abandonAudioFocus();
} catch (error) {
_logger.logger.error('Failed to abandon audio focus:', error);
throw error;
}
}
async setAudioSessionCategory(category) {
this.ensureInitialized();
try {
_logger.logger.debug(`Setting audio session category to ${category}`);
await RNAudioStream.setAudioSessionCategory(category);
} catch (error) {
_logger.logger.error('Failed to set audio session category:', error);
throw error;
}
}
// iOS 26 Features
async showInputPicker() {
this.ensureInitialized();
try {
_logger.logger.info('Showing input picker');
await RNAudioStream.showInputPicker();
_logger.logger.info('Input picker shown successfully');
} catch (error) {
_logger.logger.error('Failed to show input picker:', error);
throw this.createError(_types.ErrorCodes.UNSUPPORTED_FORMAT, 'Failed to show input picker', error);
}
}
async getAvailableInputs() {
this.ensureInitialized();
try {
_logger.logger.debug('Getting available inputs');
const inputs = await RNAudioStream.getAvailableInputs();
_logger.logger.debug(`Found ${inputs.length} available inputs`);
return inputs;
} catch (error) {
_logger.logger.error('Failed to get available inputs:', error);
throw error;
}
}
async enableEnhancedBuffering(enable) {
this.ensureInitialized();
try {
_logger.logger.info(`${enable ? 'Enabling' : 'Disabling'} enhanced buffering`);
await RNAudioStream.enableEnhancedBuffering(enable);
_logger.logger.info(`Enhanced buffering ${enable ? 'enabled' : 'disabled'} successfully`);
} catch (error) {
_logger.logger.error('Failed to set enhanced buffering:', error);
throw error;
}
}
async enableSpatialAudio(enable) {
this.ensureInitialized();
try {
_logger.logger.info(`${enable ? 'Enabling' : 'Disabling'} spatial audio`);
await RNAudioStream.enableSpatialAudio(enable);
_logger.logger.info(`Spatial audio ${enable ? 'enabled' : 'disabled'} successfully`);
} catch (error) {
_logger.logger.error('Failed to set spatial audio:', error);
throw error;
}
}
async useQueuePlayer(enable) {
this.ensureInitialized();
try {
_logger.logger.info(`${enable ? 'Using' : 'Not using'} queue player`);
await RNAudioStream.useQueuePlayer(enable);
_logger.logger.info(`Queue player ${enable ? 'enabled' : 'disabled'} successfully`);
} catch (error) {
_logger.logger.error('Failed to set queue player:', error);
throw error;
}
}
async createRoutePickerView() {
this.ensureInitialized();
try {
_logger.logger.info('Creating route picker view');
const viewTag = await RNAudioStream.createRoutePickerView();
_logger.logger.info(`Route picker view created with tag: ${viewTag}`);
return viewTag;
} catch (error) {
_logger.logger.error('Failed to create route picker view:', error);
throw error;
}
}
async playFromData(base64Data, config) {
this.ensureInitialized();
try {
const mergedConfig = {
...this.config,
...config
};
_logger.logger.info('Playing from binary data');
_logger.logger.debug('Data size:', base64Data.length, 'characters');
await RNAudioStream.playFromData(base64Data, mergedConfig);
// Set up event listeners
this.setupNativeEventListeners();
} catch (error) {
_logger.logger.error('Failed to play from data:', error);
throw error;
}
}
async appendToBuffer(base64Data) {
this.ensureInitialized();
try {
_logger.logger.debug('Appending to buffer, size:', base64Data.length, 'characters');
await RNAudioStream.appendToBuffer(base64Data);
} catch (error) {
_logger.logger.error('Failed to append to buffer:', error);
throw error;
}
}
// Real-time streaming methods (Android only)
async startRealtimeStream(config) {
this.ensureInitialized();
if (_reactNative.Platform.OS !== 'android') {
throw new Error('Real-time streaming is only available on Android');
}
try {
const mergedConfig = {
...this.config,
...config
};
_logger.logger.info('Starting real-time stream');
await RNAudioStream.startRealtimeStream(mergedConfig);
// Set up event listeners
this.setupNativeEventListeners();
} catch (error) {
_logger.logger.error('Failed to start real-time stream:', error);
throw error;
}
}
async appendRealtimeData(base64Data) {
this.ensureInitialized();
if (_reactNative.Platform.OS !== 'android') {
throw new Error('Real-time streaming is only available on Android');
}
try {
_logger.logger.debug('Appending real-time data, size:', base64Data.length, 'characters');
await RNAudioStream.appendRealtimeData(base64Data);
} catch (error) {
_logger.logger.error('Failed to append real-time data:', error);
throw error;
}
}
async completeRealtimeStream() {
this.ensureInitialized();
if (_reactNative.Platform.OS !== 'android') {
throw new Error('Real-time streaming is only available on Android');
}
try {
_logger.logger.info('Completing real-time stream');
await RNAudioStream.completeRealtimeStream();
} catch (error) {
_logger.logger.error('Failed to complete real-time stream:', error);
throw error;
}
}
async getStreamingStats() {
this.ensureInitialized();
if (_reactNative.Platform.OS !== 'android') {
throw new Error('Real-time streaming is only available on Android');
}
try {
return await RNAudioStream.getStreamingStats();
} catch (error) {
_logger.logger.error('Failed to get streaming stats:', error);
throw error;
}
}
ensureInitialized() {
if (!this.isInitialized) {
throw new Error('AudioStream is not initialized. Call initialize() first.');
}
}
createError(code, message, originalError) {
return {
code,
message,
details: originalError,
recoverable: code === _types.ErrorCodes.NETWORK_ERROR || code === _types.ErrorCodes.BUFFER_TIMEOUT
};
}
/**
* Start memory-based streaming (Android only)
* @platform android
*/
static async startMemoryStream(config) {
if (_reactNative.Platform.OS !== 'android') {
throw new Error('Memory streaming is only available on Android');
}
return RNAudioStream.startMemoryStream(config || {
autoPlay: true
});
}
/**
* Append audio chunk to memory stream (Android only)
* @platform android
*/
static async appendToMemoryStream(base64Data) {
if (_reactNative.Platform.OS !== 'android') {
throw new Error('Memory streaming is only available on Android');
}
return RNAudioStream.appendToMemoryStream(base64Data);
}
/**
* Complete memory stream (Android only)
* @platform android
*/
static async completeMemoryStream() {
if (_reactNative.Platform.OS !== 'android') {
throw new Error('Memory streaming is only available on Android');
}
return RNAudioStream.completeMemoryStream();
}
/**
* Stream audio from URL with optimal settings
* Automatically handles Android/iOS differences
*/
static async streamFromURL(url, onChunk) {
const response = await fetch(url);
// @ts-ignore - response.body exists in React Native runtime
if (!response.body) {
throw new Error('Response body is empty');
}
// @ts-ignore - ReadableStream exists in React Native runtime
const reader = response.body.getReader();
const instance = AudioStream.getInstance();
await instance.initialize();
if (_reactNative.Platform.OS === 'android') {
// Android: Use memory streaming for better performance
await this.startMemoryStream({
autoPlay: true
});
while (true) {
const {
done,
value
} = await reader.read();
if (done) break;
if (onChunk) onChunk(value);
// Convert to base64
// @ts-ignore - btoa exists in React Native runtime
const base64 = btoa(String.fromCharCode(...value));
await this.appendToMemoryStream(base64);
}
await this.completeMemoryStream();
} else {
// iOS: Use appendToBuffer
while (true) {
const {
done,
value
} = await reader.read();
if (done) break;
if (onChunk) onChunk(value);
// Convert to base64
// @ts-ignore - btoa exists in React Native runtime
const base64 = btoa(String.fromCharCode(...value));
await instance.appendToBuffer(base64);
}
}
}
/**
* Simple Text-to-Speech streaming with ElevenLabs
*/
static async streamTTS(text, config) {
const {
apiKey,
voiceId,
model = 'eleven_multilingual_v2',
onProgress
} = config;
onProgress?.('Connecting...');
const response = await fetch(`https://api.elevenlabs.io/v1/text-to-speech/${voiceId}/stream`, {
method: 'POST',
headers: {
'xi-api-key': apiKey,
'Content-Type': 'application/json'
},
body: JSON.stringify({
text,
model_id: model,
voice_settings: {
stability: 0.5,
similarity_boost: 0.5
},
optimize_streaming_latency: 4
})
});
if (!response.ok) {
throw new Error(`TTS API error: ${response.status}`);
}
onProgress?.('Streaming...');
// @ts-ignore - response.body exists in React Native runtime
if (!response.body) {
throw new Error('Response body is empty');
}
// @ts-ignore - ReadableStream exists in React Native runtime
const reader = response.body.getReader();
const instance = AudioStream.getInstance();
await instance.initialize();
if (_reactNative.Platform.OS === 'android') {
await this.startMemoryStream({
autoPlay: true
});
while (true) {
const {
done,
value
} = await reader.read();
if (done) break;
onProgress?.(`Received ${value.length} bytes`);
// Convert to base64
// @ts-ignore - btoa exists in React Native runtime
const base64 = btoa(String.fromCharCode(...value));
await this.appendToMemoryStream(base64);
}
await this.completeMemoryStream();
} else {
while (true) {
const {
done,
value
} = await reader.read();
if (done) break;
onProgress?.(`Received ${value.length} bytes`);
// Convert to base64
// @ts-ignore - btoa exists in React Native runtime
const base64 = btoa(String.fromCharCode(...value));
await instance.appendToBuffer(base64);
}
}
onProgress?.('Completed');
}
}
exports.AudioStream = AudioStream;
//# sourceMappingURL=AudioStream.js.map