audio-mixer-engine
Version:
Audio engine library for audio mixer applications with MIDI parsing, playback, and synthesis
527 lines (440 loc) • 18.3 kB
JavaScript
import AudioEngine from './audio-engine.js';
import SpessaSynthChannelHandle from './spessasynth-channel-handle.js';
import workletUrl from 'spessasynth_lib/dist/spessasynth_processor.min.js?url';
/**
* SpessaSynth implementation of AudioEngine
* Uses spessasynth_lib for MIDI synthesis with part-centric channels
*/
export default class SpessaSynthAudioEngine extends AudioEngine {
constructor(audioContext, options = {}) {
super(audioContext, options);
this.synthesizer = null;
this.soundfont = null;
this.channelCounter = 0; // Auto-assign MIDI channels to parts
this.partToMidiChannel = new Map(); // partId -> MIDI channel mapping
this.midiChannelToPart = new Map(); // MIDI channel -> partId mapping
this.individualOutputs = []; // Individual output nodes for each MIDI channel
this.metronomeAnalyser = null; // AnalyserNode for latency measurement
}
async initialize(soundfontData) {
this._emitProgress('importing', 'Loading SpessaSynth library...');
// Import SpessaSynth WorkletSynthesizer
const { WorkletSynthesizer } = await import('spessasynth_lib');
// Handle different soundfont data types
let soundfontBuffer;
if (typeof soundfontData === 'string') {
this._emitProgress('loading-soundfont', 'Downloading soundfont...');
soundfontBuffer = await this._loadSoundfontWithProgress(soundfontData);
} else if (soundfontData instanceof ArrayBuffer) {
this._emitProgress('loading-soundfont', 'Soundfont provided');
soundfontBuffer = soundfontData;
} else {
throw new Error('Invalid soundfont data type. Expected string path or ArrayBuffer.');
}
// Add the AudioWorklet module (required by SpessaSynth)
this._emitProgress('loading-worklet', 'Loading audio worklet processor...');
await this._loadAudioWorkletSafely();
console.log('AudioWorklet loaded. Note: Worklet processor may persist across page reloads.');
// Create individual output nodes for each MIDI channel (16 channels)
this._emitProgress('creating-synth', 'Setting up audio channels...');
this._setupIndividualOutputs();
// Create a dummy target node for the synthesizer (won't be used for output)
// SpessaSynth requires a valid AudioNode but we'll use individual outputs instead
this.dummyTarget = this.audioContext.createGain();
// Don't connect dummyTarget to anything - it's just to satisfy the constructor
// Add delay before synthesizer creation to prevent Firefox timing issues
await new Promise(resolve => setTimeout(resolve, 50));
this._emitProgress('creating-synth', 'Initializing synthesizer...');
// Create synthesizer with v4 API
this.synthesizer = new WorkletSynthesizer(this.audioContext);
// Load soundfont using the v4 API
this._emitProgress('loading-soundbank', 'Loading soundbank into synthesizer...');
await this.synthesizer.soundBankManager.addSoundBank(soundfontBuffer, 'main');
await this.synthesizer.isReady;
// Connect individual outputs for per-channel routing
this._emitProgress('finalizing', 'Finalizing audio setup...');
this._connectIndividualOutputs();
// Initialize metronome channel with woodblock instrument
this._initializeMetronomeChannel();
this._emitProgress('ready', 'Audio engine ready');
this.isInitialized = true;
}
createChannel(partId, options = {}) {
this._validateInitialized();
// Check if part already has a channel
if (this.partToMidiChannel.has(partId)) {
throw new Error(`Channel for part '${partId}' already exists`);
}
// Assign next available MIDI channel (reserve channel 15 for metronome)
const midiChannel = this.channelCounter;
if (midiChannel >= 15) {
throw new Error('Maximum number of musical part channels (15) exceeded. Channel 15 is reserved for metronome.');
}
this.channelCounter++;
this.partToMidiChannel.set(partId, midiChannel);
this.midiChannelToPart.set(midiChannel, partId);
// Create channel handle
const handle = new SpessaSynthChannelHandle(
this,
partId,
midiChannel,
options
);
// Register with parent
this._registerChannel(handle);
// Set initial instrument if provided
if (options.instrument) {
handle.setInstrument(options.instrument);
}
return handle;
}
allSoundsOff() {
if (!this.synthesizer) return;
// Send comprehensive reset to all channels (0-15, including metronome on 15)
for (let midiChannel = 0; midiChannel < 16; midiChannel++) {
if (this.synthesizer.controllerChange) {
// All Sounds Off (CC 120) - immediate sound termination
this.synthesizer.controllerChange(midiChannel, 120, 0);
// All Notes Off (CC 123) - stop all notes gracefully
this.synthesizer.controllerChange(midiChannel, 123, 0);
// Reset All Controllers (CC 121) - reset pitch bend, modulation, etc.
this.synthesizer.controllerChange(midiChannel, 121, 0);
}
}
console.log('Sent comprehensive reset to all 16 MIDI channels');
}
/**
* Clear all channels (for loading new scores)
*/
clearAllChannels() {
// Stop all sounds first
this.allSoundsOff();
// Clear channel mappings
this.partToMidiChannel.clear();
this.midiChannelToPart.clear();
// Reset channel counter
this.channelCounter = 0;
// NOTE: We do NOT clear individualOutputs array because SpessaSynth depends
// on it for audio routing. The individual output nodes are reused across
// different scores and should only be destroyed when the entire engine is destroyed.
}
destroy() {
// Stop all sounds first
this.allSoundsOff();
// Disconnect synthesizer
if (this.synthesizer && typeof this.synthesizer.disconnect === 'function') {
this.synthesizer.disconnect();
}
// Disconnect individual outputs
this.individualOutputs.forEach(outputNode => {
if (outputNode && outputNode.disconnect) {
outputNode.disconnect();
}
});
this.individualOutputs = [];
// Disconnect dummy target (if it exists)
if (this.dummyTarget) {
this.dummyTarget.disconnect();
this.dummyTarget = null;
}
// Clear mappings
this.partToMidiChannel.clear();
this.midiChannelToPart.clear();
this.channelCounter = 0;
// Call parent cleanup
super.destroy();
this.synthesizer = null;
this.soundfont = null;
}
/**
* Get the MIDI channel number for a part
* @param {string} partId - Part identifier
* @returns {number|null} MIDI channel number or null if not found
*/
getMidiChannelForPart(partId) {
return this.partToMidiChannel.has(partId) ? this.partToMidiChannel.get(partId) : null;
}
/**
* Get access to the underlying synthesizer for channel handles
* @returns {Object} SpessaSynth synthesizer instance
* @protected
*/
_getSynthesizer() {
return this.synthesizer;
}
/**
* Get the individual output node for a specific MIDI channel
* @param {number} midiChannel - MIDI channel number (0-15)
* @returns {AudioNode|null} Individual output node or null if not available
* @protected
*/
_getIndividualOutput(midiChannel) {
if (midiChannel >= 0 && midiChannel < this.individualOutputs.length) {
return this.individualOutputs[midiChannel];
}
return null;
}
/**
* Get the metronome channel (channel 15) output node
* @returns {AudioNode|null} Metronome channel output node or null if not available
*/
getMetronomeChannel() {
const channel15 = this._getIndividualOutput(15);
console.log(`Metronome channel 15 output:`, channel15 ? 'Available' : 'NULL', `(total outputs: ${this.individualOutputs.length})`);
return channel15;
}
/**
* Set up individual output nodes for each MIDI channel
* @private
*/
_setupIndividualOutputs() {
this.individualOutputs = [];
// Create gain nodes for each of the 16 MIDI channels
for (let i = 0; i < 16; i++) {
const outputGain = this.audioContext.createGain();
outputGain.gain.value = 1.0;
// Note: Don't connect to master gain here - individual outputs will be
// connected by external code (demo, mixer, etc.) for independent control
this.individualOutputs.push(outputGain);
}
// Set up analyser on metronome channel (15) for latency measurement
this._setupMetronomeAnalyser();
}
/**
* Set up AnalyserNode on metronome output for latency measurement
* @private
*/
_setupMetronomeAnalyser() {
const metronomeOutput = this.individualOutputs[15];
if (!metronomeOutput) {
console.warn('Cannot setup metronome analyser: channel 15 not available');
return;
}
// Create analyser with settings optimized for onset detection
this.metronomeAnalyser = this.audioContext.createAnalyser();
this.metronomeAnalyser.fftSize = 2048; // Good balance for time resolution
this.metronomeAnalyser.smoothingTimeConstant = 0; // No smoothing for accurate onset detection
// Connect metronome output to analyser (doesn't affect audio routing)
// Note: This is a "tap" - metronome output can still be connected elsewhere
metronomeOutput.connect(this.metronomeAnalyser);
console.log('Metronome analyser initialized for latency measurement');
}
/**
* Connect synthesizer individual outputs to channel-specific gain nodes
* @private
*/
_connectIndividualOutputs() {
try {
if (this.synthesizer && this.synthesizer.connectIndividualOutputs) {
// SpessaSynth outputs 2-17 are individual MIDI channels (0-15)
// Connect each synthesizer channel output to our individual output nodes
this.synthesizer.connectIndividualOutputs(this.individualOutputs);
} else {
console.warn('Synthesizer does not support individual outputs, using master output only');
// Fallback: connect synthesizer to master output
if (this.synthesizer && this.synthesizer.connect && this.audioContext.destination) {
this.synthesizer.connect(this.audioContext.destination);
}
}
} catch (error) {
console.warn('Failed to connect individual outputs:', error.message);
console.warn('Falling back to master output routing');
}
}
/**
* Load soundfont from file path or URL with progress tracking
* @param {string} path - Path to soundfont file
* @returns {Promise<ArrayBuffer>} Soundfont data
* @private
*/
async _loadSoundfontWithProgress(path) {
const response = await fetch(path);
if (!response.ok) {
throw new Error(`Failed to load soundfont: ${response.status} ${response.statusText}`);
}
// Get content length for progress calculation
const contentLength = response.headers.get('content-length');
const total = contentLength ? parseInt(contentLength, 10) : null;
// If we don't have content-length, fall back to simple loading
if (!total || !response.body) {
return await response.arrayBuffer();
}
// Stream the response with progress tracking
const reader = response.body.getReader();
let received = 0;
const chunks = [];
while (true) {
const { done, value } = await reader.read();
if (done) break;
chunks.push(value);
received += value.length;
// Emit progress (0.0-1.0)
const progress = received / total;
const percentText = Math.round(progress * 100);
this._emitProgress(
'loading-soundfont',
`Downloading soundfont: ${percentText}% (${Math.round(received / 1024)} KB / ${Math.round(total / 1024)} KB)`,
progress
);
}
// Combine chunks into single ArrayBuffer
const arrayBuffer = new Uint8Array(received);
let position = 0;
for (const chunk of chunks) {
arrayBuffer.set(chunk, position);
position += chunk.length;
}
return arrayBuffer.buffer;
}
/**
* Load AudioWorklet
* @private
*/
async _loadAudioWorkletSafely() {
const maxRetries = 5;
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
await this.audioContext.audioWorklet.addModule(workletUrl);
return;
} catch (error) {
console.warn(`AudioWorklet loading failed (attempt ${attempt}/${maxRetries}):`, error.message);
if (attempt === maxRetries) {
throw new Error(`AudioWorklet failed after ${maxRetries} attempts: ${error.message}`);
}
// Escalating delay between retries
const retryDelay = attempt * 500;
await new Promise(resolve => setTimeout(resolve, retryDelay));
}
}
}
/**
* Initialize metronome channel (15) with woodblock instrument
* @private
*/
_initializeMetronomeChannel() {
try {
const synthesizer = this._getSynthesizer();
if (!synthesizer) {
console.warn('Cannot initialize metronome channel: synthesizer not available');
return;
}
const metronomeChannel = 15;
// Set woodblock instrument (115) on channel 15
if (synthesizer.programChange) {
synthesizer.programChange(metronomeChannel, 115); // Woodblock
console.log('Metronome channel 15 initialized with woodblock instrument (115)');
}
// Set channel volume to full (in case it was muted elsewhere)
if (synthesizer.controllerChange) {
synthesizer.controllerChange(metronomeChannel, 7, 127); // Volume control
}
} catch (error) {
console.warn('Failed to initialize metronome channel:', error);
// Don't throw - metronome initialization shouldn't break the engine
}
}
/**
* Play a metronome tick using MIDI channel 15 with fallback to parent implementation
* @param {number} audioTime - Absolute audio context time for the tick
* @param {boolean} isAccent - Whether this is an accent beat (downbeat)
* @param {number} volume - Volume level (0.0-1.0)
* @returns {Promise<void>}
*/
async playMetronomeTick(audioTime, isAccent, volume) {
try {
// Check if we have MIDI metronome capabilities
const metronomeChannel = this.getMetronomeChannel();
const synthesizer = this._getSynthesizer();
if (!metronomeChannel || !synthesizer) {
// Fallback to parent class implementation (buffer-based)
return super.playMetronomeTick(audioTime, isAccent, volume);
}
// Use MIDI channel 15 for metronome with woodblock instrument
const midiChannel = 15;
// Use two pitches of woodblock: E4 (64) for accent, C4 (60) for regular
const note = isAccent ? 86 : 60; // G4 : C4
const baseVol = isAccent ? 127 : 100;
// Scale volume to velocity (0.0-1.0 -> 0-127)
const velocity = Math.round(Math.min(127, Math.max(0, volume * baseVol)));
// Calculate timing - same logic as base AudioEngine class
const currentTime = this.audioContext.currentTime;
const startTime = Math.max(audioTime, currentTime);
const delay = startTime - currentTime;
// Note: instrument (woodblock) is already set during channel creation
// Use SpessaSynth's precise scheduling with options.time (NO setTimeout!)
const tickDuration = 0.1; // 100ms tick duration
if (synthesizer.post) {
// Send note-on message with precise timing
synthesizer.post({
channelNumber: midiChannel,
type: "midiMessage",
data: {
messageData: [0x90 | midiChannel, note, velocity],
channelOffset: 0,
force: false,
options: {
time: startTime // Sample-accurate metronome timing!
}
}
});
// Send note-off message with precise timing
synthesizer.post({
channelNumber: midiChannel,
type: "midiMessage",
data: {
messageData: [0x80 | midiChannel, note, 0],
channelOffset: 0,
force: false,
options: {
time: startTime + tickDuration // Precise tick duration!
}
}
});
} else {
// Fallback to direct method calls if post() not available
if (delay <= 0.010) {
if (synthesizer.noteOn) {
synthesizer.noteOn(midiChannel, note, velocity);
}
setTimeout(() => {
if (synthesizer.noteOff) {
synthesizer.noteOff(midiChannel, note);
}
}, 100);
} else {
setTimeout(() => {
if (synthesizer.noteOn) {
synthesizer.noteOn(midiChannel, note, velocity);
}
setTimeout(() => {
if (synthesizer.noteOff) {
synthesizer.noteOff(midiChannel, note);
}
}, 100);
}, delay * 1000);
}
}
} catch (error) {
console.warn('MIDI metronome failed, falling back to buffers:', error);
// Fallback to parent class implementation
return super.playMetronomeTick(audioTime, isAccent, volume);
}
}
/**
* Get the metronome output node (channel 15)
* @returns {AudioNode|null} Metronome output node or null if not available
*/
getMetronomeOutput() {
if (!this.individualOutputs || this.individualOutputs.length < 16) {
return null;
}
return this.individualOutputs[15];
}
/**
* Get the metronome analyser node for latency measurement
* @returns {AnalyserNode|null} Analyser node or null if not available
*/
getMetronomeAnalyser() {
return this.metronomeAnalyser;
}
}