UNPKG

win-stream-audio

Version:

🎧 Stream Windows system audio to Android devices over WiFi with professional audio controls, EQ, pitch shifting, and effects

463 lines (369 loc) 16.5 kB
/** * AudioPilot - Audio Processing Module * Handles EQ, pitch shifting, effects, and audio playback */ class AudioProcessor { constructor() { this.sharedAudioContext = null; this.analyser = null; this.dataArray = null; // Audio settings this.eqSettings = { bass: 0, mid: 0, treble: 0 }; this.pitchShift = 0; this.masterVolume = 0.5; this.audioFilterMode = true; this.activeEffects = { reverb: false, echo: false, distortion: false, chorus: false }; // Playback state this.isPlaying = false; this.audioBuffer = []; this.audioQueue = []; this.consecutiveErrors = 0; } // Initialize audio context and analyzer initializeAudioContext() { try { if (!this.sharedAudioContext) { this.sharedAudioContext = new (window.AudioContext || window.webkitAudioContext)(); console.log('🔊 Shared Audio Context Created'); } if (!this.analyser) { this.analyser = this.sharedAudioContext.createAnalyser(); this.analyser.fftSize = 256; this.dataArray = new Uint8Array(this.analyser.frequencyBinCount); console.log('🔊 Audio Analysis System Online'); } if (this.sharedAudioContext.state === 'suspended') { this.sharedAudioContext.resume().then(() => { console.log('🔊 Audio Context Resumed'); }); } return true; } catch (error) { console.error('❌ Audio Analysis Failed:', error.message); return false; } } // Play raw audio with full processing chain playRawAudio(rawChunks, onComplete) { try { if (!this.sharedAudioContext) { this.initializeAudioContext(); } if (this.sharedAudioContext.state === 'suspended') { this.sharedAudioContext.resume(); } // Convert chunks to ArrayBuffers Promise.all(rawChunks.map(chunk => { if (chunk instanceof ArrayBuffer) { return Promise.resolve(chunk); } else if (chunk instanceof Blob) { return chunk.arrayBuffer(); } })).then(arrayBuffers => { // Calculate total length let totalLength = 0; arrayBuffers.forEach(buffer => { totalLength += buffer.byteLength / 8; }); // Create audio buffer const audioBuffer = this.sharedAudioContext.createBuffer(2, totalLength, 48000); const leftChannel = audioBuffer.getChannelData(0); const rightChannel = audioBuffer.getChannelData(1); // Fill buffer with audio data and apply noise reduction let offset = 0; arrayBuffers.forEach(buffer => { const audioData = new Float32Array(buffer); for (let i = 0; i < audioData.length; i += 2) { if (offset < totalLength) { let leftSample = audioData[i]; let rightSample = audioData[i + 1]; // Noise gate const noiseThreshold = 0.001; if (Math.abs(leftSample) < noiseThreshold) leftSample = 0; if (Math.abs(rightSample) < noiseThreshold) rightSample = 0; // Soft clipping leftSample = this.softClip(leftSample); rightSample = this.softClip(rightSample); leftChannel[offset] = leftSample; rightChannel[offset] = rightSample; offset++; } } }); // Apply processing based on filter mode if (this.audioFilterMode) { this.applySmoothingFilter(leftChannel); this.applySmoothingFilter(rightChannel); if (this.pitchShift !== 0) { this.applyPitchShift(audioBuffer, this.pitchShift); } this.playWithFullProcessing(audioBuffer, onComplete); } else { this.playRawOnly(audioBuffer, onComplete); } }).catch(error => { console.error('❌ Audio Processing Error:', error.message); this.consecutiveErrors++; if (onComplete) onComplete(); }); } catch (error) { console.error('❌ Raw Audio Error:', error.message); this.consecutiveErrors++; if (onComplete) onComplete(); } } // Play with full processing chain playWithFullProcessing(audioBuffer, onComplete) { const source = this.sharedAudioContext.createBufferSource(); const gainNode = this.sharedAudioContext.createGain(); // Create EQ filters const bassFilter = this.sharedAudioContext.createBiquadFilter(); bassFilter.type = 'lowshelf'; bassFilter.frequency.value = 320; bassFilter.gain.value = this.eqSettings.bass; const midFilter = this.sharedAudioContext.createBiquadFilter(); midFilter.type = 'peaking'; midFilter.frequency.value = 1000; midFilter.Q.value = 1; midFilter.gain.value = this.eqSettings.mid; const trebleFilter = this.sharedAudioContext.createBiquadFilter(); trebleFilter.type = 'highshelf'; trebleFilter.frequency.value = 3200; trebleFilter.gain.value = this.eqSettings.treble; // Noise reduction filters const highPassFilter = this.sharedAudioContext.createBiquadFilter(); highPassFilter.type = 'highpass'; highPassFilter.frequency.value = 80; highPassFilter.Q.value = 0.7; const lowPassFilter = this.sharedAudioContext.createBiquadFilter(); lowPassFilter.type = 'lowpass'; lowPassFilter.frequency.value = 15000; lowPassFilter.Q.value = 0.7; // Compressor const compressor = this.sharedAudioContext.createDynamicsCompressor(); compressor.threshold.value = -24; compressor.knee.value = 30; compressor.ratio.value = 12; compressor.attack.value = 0.003; compressor.release.value = 0.25; source.buffer = audioBuffer; gainNode.gain.value = this.masterVolume; // Connect processing chain let currentNode = source; // Noise reduction currentNode.connect(highPassFilter); currentNode = highPassFilter; currentNode.connect(lowPassFilter); currentNode = lowPassFilter; // EQ chain currentNode.connect(bassFilter); currentNode = bassFilter; currentNode.connect(midFilter); currentNode = midFilter; currentNode.connect(trebleFilter); currentNode = trebleFilter; // Compressor currentNode.connect(compressor); currentNode = compressor; // Apply effects currentNode = this.applyAudioEffects(currentNode); // Final gain and output currentNode.connect(gainNode); // Connect to analyser for visualization if (this.analyser) { gainNode.connect(this.analyser); this.analyser.connect(this.sharedAudioContext.destination); } else { gainNode.connect(this.sharedAudioContext.destination); } source.start(); console.log('🎵 Audio Stream Active (Full Processing)'); source.onended = () => { this.consecutiveErrors = 0; if (onComplete) onComplete(); }; } // Play raw audio without processing playRawOnly(audioBuffer, onComplete) { const source = this.sharedAudioContext.createBufferSource(); const gainNode = this.sharedAudioContext.createGain(); source.buffer = audioBuffer; gainNode.gain.value = this.masterVolume; source.connect(gainNode); if (this.analyser) { gainNode.connect(this.analyser); this.analyser.connect(this.sharedAudioContext.destination); } else { gainNode.connect(this.sharedAudioContext.destination); } source.start(); console.log('🎵 Audio Stream Active (Raw)'); source.onended = () => { this.consecutiveErrors = 0; if (onComplete) onComplete(); }; } // Audio processing utility functions softClip(sample) { if (sample > 0.95) return 0.95; if (sample < -0.95) return -0.95; if (Math.abs(sample) > 0.7) { const sign = sample > 0 ? 1 : -1; const abs = Math.abs(sample); return sign * (0.7 + (abs - 0.7) * 0.3); } return sample; } applySmoothingFilter(channelData) { const alpha = 0.1; for (let i = 1; i < channelData.length; i++) { channelData[i] = alpha * channelData[i] + (1 - alpha) * channelData[i - 1]; } } applyPitchShift(audioBuffer, cents) { const pitchRatio = Math.pow(2, cents / 1200); const channels = audioBuffer.numberOfChannels; for (let channel = 0; channel < channels; channel++) { const channelData = audioBuffer.getChannelData(channel); const newChannelData = new Float32Array(channelData.length); for (let i = 0; i < newChannelData.length; i++) { const sourceIndex = i * pitchRatio; const index1 = Math.floor(sourceIndex); const index2 = Math.min(index1 + 1, channelData.length - 1); const fraction = sourceIndex - index1; if (index1 < channelData.length) { newChannelData[i] = channelData[index1] * (1 - fraction) + channelData[index2] * fraction; } } channelData.set(newChannelData); } } applyAudioEffects(inputNode) { let currentNode = inputNode; // Reverb effect if (this.activeEffects.reverb) { const convolver = this.sharedAudioContext.createConvolver(); convolver.buffer = this.createReverbImpulse(); const reverbGain = this.sharedAudioContext.createGain(); const dryGain = this.sharedAudioContext.createGain(); reverbGain.gain.value = 0.3; dryGain.gain.value = 0.7; const merger = this.sharedAudioContext.createChannelMerger(2); currentNode.connect(convolver); convolver.connect(reverbGain); reverbGain.connect(merger, 0, 0); currentNode.connect(dryGain); dryGain.connect(merger, 0, 1); currentNode = merger; } // Echo effect if (this.activeEffects.echo) { const delay = this.sharedAudioContext.createDelay(1.0); const feedback = this.sharedAudioContext.createGain(); const echoGain = this.sharedAudioContext.createGain(); delay.delayTime.value = 0.3; feedback.gain.value = 0.4; echoGain.gain.value = 0.3; currentNode.connect(delay); delay.connect(feedback); feedback.connect(delay); delay.connect(echoGain); const merger = this.sharedAudioContext.createChannelMerger(2); currentNode.connect(merger, 0, 0); echoGain.connect(merger, 0, 1); currentNode = merger; } // Distortion effect if (this.activeEffects.distortion) { const waveshaper = this.sharedAudioContext.createWaveShaper(); waveshaper.curve = this.createDistortionCurve(50); waveshaper.oversample = '4x'; currentNode.connect(waveshaper); currentNode = waveshaper; } // Chorus effect if (this.activeEffects.chorus) { const delay1 = this.sharedAudioContext.createDelay(0.1); const delay2 = this.sharedAudioContext.createDelay(0.1); const lfo1 = this.sharedAudioContext.createOscillator(); const lfo2 = this.sharedAudioContext.createOscillator(); const lfoGain1 = this.sharedAudioContext.createGain(); const lfoGain2 = this.sharedAudioContext.createGain(); lfo1.frequency.value = 0.5; lfo2.frequency.value = 0.7; lfoGain1.gain.value = 0.005; lfoGain2.gain.value = 0.007; delay1.delayTime.value = 0.02; delay2.delayTime.value = 0.03; lfo1.connect(lfoGain1); lfoGain1.connect(delay1.delayTime); lfo2.connect(lfoGain2); lfoGain2.connect(delay2.delayTime); const merger = this.sharedAudioContext.createChannelMerger(2); currentNode.connect(delay1); currentNode.connect(delay2); delay1.connect(merger, 0, 0); delay2.connect(merger, 0, 1); lfo1.start(); lfo2.start(); currentNode = merger; } return currentNode; } createReverbImpulse() { const length = this.sharedAudioContext.sampleRate * 2; const impulse = this.sharedAudioContext.createBuffer(2, length, this.sharedAudioContext.sampleRate); for (let channel = 0; channel < 2; channel++) { const channelData = impulse.getChannelData(channel); for (let i = 0; i < length; i++) { channelData[i] = (Math.random() * 2 - 1) * Math.pow(1 - i / length, 2); } } return impulse; } createDistortionCurve(amount) { const samples = 44100; const curve = new Float32Array(samples); const deg = Math.PI / 180; for (let i = 0; i < samples; i++) { const x = (i * 2) / samples - 1; curve[i] = ((3 + amount) * x * 20 * deg) / (Math.PI + amount * Math.abs(x)); } return curve; } // Test audio functionality playTestTone() { try { if (!this.sharedAudioContext) { this.initializeAudioContext(); } if (this.sharedAudioContext.state === 'suspended') { this.sharedAudioContext.resume(); } const oscillator = this.sharedAudioContext.createOscillator(); const gainNode = this.sharedAudioContext.createGain(); oscillator.connect(gainNode); gainNode.connect(this.sharedAudioContext.destination); oscillator.frequency.value = 440; gainNode.gain.value = this.masterVolume * 0.3; oscillator.start(); oscillator.stop(this.sharedAudioContext.currentTime + 0.5); console.log('✅ Audio Test Complete - 440Hz Tone'); return true; } catch (error) { console.error('❌ Audio Test Failed:', error.message); return false; } } } // Export for use in other modules window.AudioProcessor = AudioProcessor;