audio-mixer-engine
Version:
Audio engine library for audio mixer applications with MIDI parsing, playback, and synthesis
920 lines (795 loc) • 29.5 kB
JavaScript
/**
* MidiParser - A class for parsing MIDI files
* Extracts parts, bar structure, and metadata
*/
class MidiParser {
constructor() {
// Common part names to detect
this.partNames = [
'soprano', 'alto', 'tenor', 'bass',
'treble', 'mezzo', 'baritone',
's', 'a', 't', 'b', 'satb'
];
// Store the parsed data
this.parsedData = {
parts: {}, // Will contain separate arrays for each vocal part
barStructure: [], // Will contain bar timing information
metadata: {} // Will contain title, composer, etc.
};
}
/**
* Main method to parse a MIDI file
* @param {ArrayBuffer} midiFileBuffer - The MIDI file as an ArrayBuffer
* @param {Object} metadataOverrides - Optional metadata overrides (title, composer, parts mapping, etc.)
* @returns {Object} Parsed data with parts, barStructure and metadata
*/
async parse(midiFileBuffer, metadataOverrides = null) {
try {
const midi = await this._parseMidiBuffer(midiFileBuffer);
// Store metadata overrides for use in extraction methods
this.metadataOverrides = metadataOverrides || {};
// Normalize legacy metadata format if detected
this._normalizeLegacyMetadata();
// Extract metadata
this._extractMetadata(midi);
// Extract bar structure (time signatures, tempo changes)
this._extractBarStructure(midi);
// Extract parts
this._extractParts(midi);
// Include normalized metadata in the result (for beat mapping)
this.parsedData.structureMetadata = this.metadataOverrides;
return this.parsedData;
} catch (error) {
console.error('Error parsing MIDI file:', error);
throw error;
}
}
/**
* Normalize legacy metadata format (v1) to current format (v2)
* Detects and converts:
* - scores[] array wrapper -> unwraps scores[0]
* - parts array with URLs -> parts object with trackIndex/instrument
* - bars array -> preserved for beat mapping
* @private
*/
_normalizeLegacyMetadata() {
if (!this.metadataOverrides || Object.keys(this.metadataOverrides).length === 0) {
return;
}
const metadata = this.metadataOverrides;
// Detect legacy format: has 'scores' array wrapper
if (metadata.scores && Array.isArray(metadata.scores) && metadata.scores.length > 0) {
const score = metadata.scores[0];
// Unwrap scores[0] - merge it with top-level metadata
if (score.parts) {
// Convert legacy parts array to current format object
metadata.parts = this._convertLegacyParts(score.parts);
}
// Preserve bars array if present
if (score.bars) {
metadata.bars = score.bars;
}
// Remove the scores wrapper
delete metadata.scores;
}
// If no 'scores' wrapper but has legacy parts array (array instead of object)
else if (metadata.parts && Array.isArray(metadata.parts)) {
metadata.parts = this._convertLegacyParts(metadata.parts);
}
// Remove legacy fields that should be ignored
delete metadata.type;
delete metadata.version;
delete metadata.subtitle;
}
/**
* Convert legacy parts array to current format object
* Parses URL query parameters (track, prog) from legacy format
* @private
* @param {Array} legacyParts - Array of part objects with url, name, volume
* @returns {Object} Parts object with trackIndex and instrument
*/
_convertLegacyParts(legacyParts) {
const parts = {};
for (const legacyPart of legacyParts) {
if (!legacyPart.name || !legacyPart.url) {
continue;
}
// Convert part name to lowercase for consistency
const partKey = legacyPart.name.toLowerCase();
// Parse URL query parameters
const urlParams = this._parseUrlParams(legacyPart.url);
// Create part config
const partConfig = {};
// Extract trackIndex from 'track' parameter
if (urlParams.track !== undefined) {
partConfig.trackIndex = parseInt(urlParams.track, 10);
}
// Extract instrument from 'prog' parameter
if (urlParams.prog !== undefined) {
const prog = parseInt(urlParams.prog, 10);
// Only set instrument if prog is not 0 (0 means use MIDI default)
if (prog !== 0) {
partConfig.instrument = prog;
}
}
// Only add part if it has a trackIndex
if (partConfig.trackIndex !== undefined) {
parts[partKey] = partConfig;
}
}
return parts;
}
/**
* Parse URL query parameters
* @private
* @param {string} url - URL string with query parameters
* @returns {Object} Object with parameter key-value pairs
*/
_parseUrlParams(url) {
const params = {};
// Find query string (everything after '?')
const queryStart = url.indexOf('?');
if (queryStart === -1) {
return params;
}
const queryString = url.substring(queryStart + 1);
// Parse parameters
const pairs = queryString.split('&');
for (const pair of pairs) {
const [key, value] = pair.split('=');
if (key && value !== undefined) {
params[key] = value;
}
}
return params;
}
/**
* Parse the MIDI buffer into a workable format
* @private
*/
async _parseMidiBuffer(midiFileBuffer) {
// Convert ArrayBuffer to Uint8Array
const data = new Uint8Array(midiFileBuffer);
// Check if it's a valid MIDI file
if (!(data[0] === 0x4D && data[1] === 0x54 && data[2] === 0x68 && data[3] === 0x64)) {
throw new Error('Not a valid MIDI file');
}
// Parse header chunk
const headerLength = this._bytesToNumber(data.slice(4, 8));
const format = this._bytesToNumber(data.slice(8, 10));
const tracksCount = this._bytesToNumber(data.slice(10, 12));
const division = this._bytesToNumber(data.slice(12, 14));
// MIDI time division (ticks per quarter note or SMPTE)
const ticksPerBeat = division & 0x8000 ? null : division;
const midi = {
format,
ticksPerBeat,
tracks: [],
duration: 0
};
// Parse each track
let currentPosition = 8 + headerLength;
for (let i = 0; i < tracksCount; i++) {
if (data[currentPosition] === 0x4D && data[currentPosition + 1] === 0x54 &&
data[currentPosition + 2] === 0x72 && data[currentPosition + 3] === 0x6B) {
const trackLength = this._bytesToNumber(data.slice(currentPosition + 4, currentPosition + 8));
const trackData = data.slice(currentPosition + 8, currentPosition + 8 + trackLength);
const track = this._parseTrack(trackData);
midi.tracks.push(track);
currentPosition += 8 + trackLength;
} else {
throw new Error(`Invalid track header at position ${currentPosition}`);
}
}
return midi;
}
/**
* Parse a single MIDI track
* @private
*/
_parseTrack(data) {
const track = {
notes: [],
name: null,
lyrics: [],
events: [],
duration: 0
};
let currentPosition = 0;
let currentTick = 0;
let runningStatus = null;
while (currentPosition < data.length) {
// Parse delta time
let deltaTime = 0;
let byte = 0;
// noinspection JSBitwiseOperatorUsage
do {
byte = data[currentPosition++];
deltaTime = (deltaTime << 7) | (byte & 0x7F);
} while (byte & 0x80);
currentTick += deltaTime;
// Get event type
byte = data[currentPosition++];
let eventType = byte;
// Handle running status
if ((byte & 0x80) === 0) {
if (runningStatus === null) {
throw new Error("Running status byte encountered before status byte");
}
eventType = runningStatus;
currentPosition--; // We need to reread the current byte as a data byte
} else {
runningStatus = eventType;
}
// Handle different event types
if (eventType === 0xFF) { // Meta event
const metaType = data[currentPosition++];
const metaLength = this._readVariableLengthValue(data, currentPosition);
currentPosition += metaLength.bytesRead;
const metaData = data.slice(currentPosition, currentPosition + metaLength.value);
currentPosition += metaLength.value;
// Handle meta events
switch (metaType) {
case 0x03: // Track name
track.name = this._bytesToString(metaData);
break;
case 0x01: // Text event
track.events.push({
type: 'text',
text: this._bytesToString(metaData),
tick: currentTick
});
break;
case 0x05: // Lyrics
track.lyrics.push({
text: this._bytesToString(metaData),
tick: currentTick
});
break;
case 0x51: // Tempo
const microsecondsPerBeat = this._bytesToNumber(metaData);
const tempo = Math.round(60000000 / microsecondsPerBeat);
track.events.push({
type: 'tempo',
bpm: tempo,
tick: currentTick
});
break;
case 0x58: // Time signature
track.events.push({
type: 'timeSignature',
numerator: metaData[0],
denominator: Math.pow(2, metaData[1]),
tick: currentTick
});
break;
case 0x2F: // End of track
track.duration = currentTick;
break;
}
}
else if ((eventType & 0xF0) === 0x90) { // Note on
const channel = eventType & 0x0F;
const noteNumber = data[currentPosition++];
const velocity = data[currentPosition++];
if (velocity > 0) { // Note on with velocity > 0
track.notes.push({
type: 'noteOn',
noteNumber,
velocity,
tick: currentTick,
channel
});
} else { // Note on with velocity = 0 is equivalent to note off
track.notes.push({
type: 'noteOff',
noteNumber,
tick: currentTick,
channel
});
}
}
else if ((eventType & 0xF0) === 0x80) { // Note off
const channel = eventType & 0x0F;
const noteNumber = data[currentPosition++];
// noinspection JSUnusedLocalSymbols
const velocity = data[currentPosition++]; // Release velocity, intentionally ignored (maybe for now)
track.notes.push({
type: 'noteOff',
noteNumber,
tick: currentTick,
channel
});
}
else if (eventType === 0xF0 || eventType === 0xF7) { // SysEx events
const length = this._readVariableLengthValue(data, currentPosition);
currentPosition += length.bytesRead + length.value;
}
else if ((eventType & 0xF0) === 0xB0) { // Controller change
const channel = eventType & 0x0F;
const controllerNumber = data[currentPosition++];
const value = data[currentPosition++];
track.events.push({
type: 'controller',
controllerNumber,
value,
channel,
tick: currentTick
});
}
else if ((eventType & 0xF0) === 0xC0) { // Program change
const channel = eventType & 0x0F;
const programNumber = data[currentPosition++];
track.events.push({
type: 'programChange',
programNumber,
channel,
tick: currentTick
});
}
else if ((eventType & 0xF0) === 0xD0) { // Channel aftertouch
const channel = eventType & 0x0F;
const pressure = data[currentPosition++];
track.events.push({
type: 'channelAftertouch',
pressure,
channel,
tick: currentTick
});
}
else if ((eventType & 0xF0) === 0xE0) { // Pitch bend
const channel = eventType & 0x0F;
const lsb = data[currentPosition++];
const msb = data[currentPosition++];
const value = ((msb << 7) | lsb) - 8192; // Center value is 8192 (0x2000)
track.events.push({
type: 'pitchBend',
value,
channel,
tick: currentTick
});
}
else if ((eventType & 0xF0) === 0xA0) { // Note aftertouch (poly pressure)
const channel = eventType & 0x0F;
const noteNumber = data[currentPosition++];
const pressure = data[currentPosition++];
track.events.push({
type: 'noteAftertouch',
noteNumber,
pressure,
channel,
tick: currentTick
});
}
else {
// Skip unknown event types
console.warn(`Unknown event type: ${eventType.toString(16)} at position ${currentPosition - 1}`);
// Try to recover by skipping to the next event
currentPosition++;
}
}
return track;
}
/**
* Extract metadata from the MIDI object
* @private
*/
_extractMetadata(midi) {
const metadata = {
title: null,
composer: null,
partNames: [],
format: midi.format,
ticksPerBeat: midi.ticksPerBeat
};
// Look for title, composer and part names in track names and text events
midi.tracks.forEach((track, index) => {
// First track with name is often the title
if (track.name && !metadata.title) {
metadata.title = track.name;
}
// Check for common metadata in text events
track.events.filter(e => e.type === 'text').forEach(event => {
const text = event.text.toLowerCase();
if ((text.includes('compos') || text.includes('by')) && !metadata.composer) {
metadata.composer = event.text;
}
});
// Store track name as potential part name
if (track.name) {
// Check if this track name matches a common choir part name
const trackNameLower = track.name.toLowerCase();
for (const partName of this.partNames) {
if (trackNameLower.includes(partName)) {
metadata.partNames.push({
index,
name: track.name
});
break;
}
}
}
});
// Apply metadata overrides
if (this.metadataOverrides.title !== undefined) {
metadata.title = this.metadataOverrides.title;
}
if (this.metadataOverrides.composer !== undefined) {
metadata.composer = this.metadataOverrides.composer;
}
if (this.metadataOverrides.arranger !== undefined) {
metadata.arranger = this.metadataOverrides.arranger;
}
if (this.metadataOverrides.copyright !== undefined) {
metadata.copyright = this.metadataOverrides.copyright;
}
this.parsedData.metadata = metadata;
}
/**
* Extract bar structure from MIDI data
* Generates bars with time signature and precise beat timing
* Format: { sig: [numerator, denominator], beats: [time1, time2, ...] }
* where beat times are in seconds, calculated from MIDI ticks and tempo changes
* @private
*/
_extractBarStructure(midi) {
// Get ticks per beat for calculations
const ticksPerBeat = midi.ticksPerBeat || 480;
// Collect all time signature and tempo changes
const allEvents = [];
midi.tracks.forEach(track => {
track.events.forEach(event => {
if (event.type === 'timeSignature' || event.type === 'tempo') {
allEvents.push(event);
}
});
});
// Sort events by tick
allEvents.sort((a, b) => a.tick - b.tick);
// Find total duration of the MIDI file (latest note end tick)
let totalDurationTicks = 0;
midi.tracks.forEach(track => {
if (track.notes) {
track.notes.forEach(note => {
if (note.type === 'noteOff' && note.tick > totalDurationTicks) {
totalDurationTicks = note.tick;
}
});
}
});
// If no notes found, use a reasonable default
if (totalDurationTicks === 0) {
totalDurationTicks = ticksPerBeat * 8; // Default to 8 beats
}
// Build bar structure based on time signature changes
const barStructure = [];
// Get all time signature changes with their positions
const timeSignatureChanges = allEvents
.filter(event => event.type === 'timeSignature')
.sort((a, b) => a.tick - b.tick);
// Start with the default (will be overridden if there is a time signature at tick 0)
let currentTimeSignature = { numerator: 4, denominator: 4 };
let currentTick = 0;
let timeSignatureIndex = 0;
while (currentTick < totalDurationTicks) {
// Update time signature for the current position
while (timeSignatureIndex < timeSignatureChanges.length &&
timeSignatureChanges[timeSignatureIndex].tick <= currentTick) {
currentTimeSignature = timeSignatureChanges[timeSignatureIndex];
timeSignatureIndex++;
}
// For anacrusis/pickup bars, the bar should extend to the next time signature change
// For normal bars, use standard bar length
let barEndTick;
// Regular bar - use standard time signature-based length
barEndTick = currentTick + (ticksPerBeat * 4 * currentTimeSignature.numerator / currentTimeSignature.denominator);
// Generate precise beat times for each beat in the bar
const beatsInBar = currentTimeSignature.numerator;
const beats = [];
// Calculate ticks per beat based on the time signature denominator
// ticksPerBeat is always ticks per quarter note
// For 2/2 time: beat = half note = 2 * quarter notes = 2 * ticksPerBeat
// For 4/4 time: beat = quarter note = 1 * ticksPerBeat
// For 3/8 time: beat = eighth note = 0.5 * ticksPerBeat
const ticksPerBeatInThisTimeSignature = ticksPerBeat * (4 / currentTimeSignature.denominator);
for (let beat = 0; beat < beatsInBar; beat++) {
const beatStartTick = currentTick + (beat * ticksPerBeatInThisTimeSignature);
const beatTime = this._ticksToTime(beatStartTick, midi);
beats.push(beatTime);
}
barStructure.push({
sig: [currentTimeSignature.numerator, currentTimeSignature.denominator],
beats: beats
});
// Move to next bar
currentTick = barEndTick;
}
this.parsedData.barStructure = barStructure;
}
/**
* Extract notes for each voice part
* @private
*/
_extractParts(midi) {
const parts = {};
const ticksPerBeat = midi.ticksPerBeat;
// Check if parts override is provided
if (this.metadataOverrides.parts) {
// Use parts override mapping
for (const [partName, partConfig] of Object.entries(this.metadataOverrides.parts)) {
// Validate part configuration
if (!partConfig.trackIndex && partConfig.trackIndex !== 0 && !partConfig.trackName) {
// Skip parts without track identification
continue;
}
// Get track by trackIndex (priority) or trackName
let track = null;
let trackIndex = null;
if (partConfig.trackIndex !== undefined && partConfig.trackIndex !== null) {
// Use trackIndex
trackIndex = partConfig.trackIndex;
if (trackIndex >= 0 && trackIndex < midi.tracks.length) {
track = midi.tracks[trackIndex];
}
} else if (partConfig.trackName) {
// Fall back to trackName
const trackIdx = midi.tracks.findIndex(t => t.name === partConfig.trackName);
if (trackIdx !== -1) {
track = midi.tracks[trackIdx];
trackIndex = trackIdx;
}
}
if (!track) {
// Skip if track not found
continue;
}
// Extract part data from track
const partData = this._extractPartDataFromTrack(track, trackIndex, midi, ticksPerBeat);
// Apply instrument override if provided
if (partConfig.instrument !== undefined && partConfig.instrument !== null) {
partData.defaultInstrument = this._resolveInstrument(partConfig.instrument);
}
parts[partName] = partData;
}
} else {
// Use auto-detection (existing logic)
midi.tracks.forEach((track, trackIndex) => {
if (!track.notes.length) return; // Skip tracks with no notes
let partName = null;
// Try to identify part name from track name
if (track.name) {
const lowerName = track.name.toLowerCase();
for (const name of this.partNames) {
// For single letters, require exact match; for words, allow includes
if (name.length === 1) {
if (lowerName === name) {
partName = name;
break;
}
} else {
if (lowerName.includes(name)) {
partName = name;
break;
}
}
}
}
// If no recognized part name, use track name or track number
if (!partName) {
partName = track.name || `Track ${trackIndex + 1}`;
}
// Normalize part name
if (partName === 's') partName = 'soprano';
if (partName === 'a') partName = 'alto';
if (partName === 't') partName = 'tenor';
if (partName === 'b') partName = 'bass';
// Ensure unique part names by appending counter if name already exists
let uniquePartName = partName;
let counter = 2; // Start at 2 for first duplicate
while (parts[uniquePartName]) {
uniquePartName = `${partName} ${counter}`;
counter++;
}
partName = uniquePartName;
parts[partName] = this._extractPartDataFromTrack(track, trackIndex, midi, ticksPerBeat);
});
}
this.parsedData.parts = parts;
}
/**
* Extract part data from a MIDI track
* @private
*/
_extractPartDataFromTrack(track, trackIndex, midi, ticksPerBeat) {
// Process notes
const notes = [];
const noteOns = {};
track.notes.forEach(noteEvent => {
if (noteEvent.type === 'noteOn') {
// Store note start information
noteOns[noteEvent.noteNumber] = {
tick: noteEvent.tick,
velocity: noteEvent.velocity
};
} else if (noteEvent.type === 'noteOff') {
// If we have a matching note on, create a complete note
if (noteOns[noteEvent.noteNumber]) {
const start = noteOns[noteEvent.noteNumber];
const duration = noteEvent.tick - start.tick;
notes.push({
pitch: noteEvent.noteNumber,
name: this._midiNoteToName(noteEvent.noteNumber),
startTick: start.tick,
endTick: noteEvent.tick,
duration,
// Convert ticks to actual time considering tempo changes
startTime: this._ticksToTime(start.tick, midi),
endTime: this._ticksToTime(noteEvent.tick, midi),
velocity: start.velocity
});
// Remove from active notes
delete noteOns[noteEvent.noteNumber];
}
}
});
// Add any lyrics associated with this track
const lyrics = track.lyrics.map(lyric => ({
text: lyric.text,
tick: lyric.tick,
time: lyric.tick / ticksPerBeat // Time in quarter notes
}));
// Sort notes by start time
notes.sort((a, b) => a.startTick - b.startTick);
// Extract program changes for this track
const programChanges = track.events
.filter(event => event.type === 'programChange')
.map(event => ({
programNumber: event.programNumber,
tick: event.tick,
time: this._ticksToTime(event.tick, midi)
}))
.sort((a, b) => a.tick - b.tick);
// Determine default instrument (first program change or 0 for piano)
const defaultInstrument = programChanges.length > 0 ? programChanges[0].programNumber : 0;
return {
notes,
lyrics,
trackIndex,
programChanges,
defaultInstrument
};
}
/**
* Resolve instrument to MIDI program number
* @private
*/
_resolveInstrument(instrument) {
if (typeof instrument === 'number') {
return instrument;
}
// String instrument name - convert to program number
// Import getInstrumentProgram if available, otherwise use simple mapping
if (typeof instrument === 'string') {
// Simple mapping for common instruments (can be expanded)
const instrumentMap = {
'choir_aahs': 52,
'piano': 0,
'acoustic_grand_piano': 0,
'bright_acoustic_piano': 1,
'electric_grand_piano': 2,
'strings': 48,
'string_ensemble_1': 48,
'violin': 40,
'viola': 41,
'cello': 42,
'contrabass': 43
};
const normalized = instrument.toLowerCase().replace(/ /g, '_');
return instrumentMap[normalized] !== undefined ? instrumentMap[normalized] : 0;
}
return 0; // Default to piano
}
/**
* Convert a MIDI note number to note name
* @private
*/
_midiNoteToName(noteNumber) {
const notes = ['C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B'];
const octave = Math.floor(noteNumber / 12) - 1;
const note = notes[noteNumber % 12];
return `${note}${octave}`;
}
/**
* Convert bytes to number
* @private
*/
_bytesToNumber(bytes) {
let value = 0;
for (let i = 0; i < bytes.length; i++) {
value = (value << 8) | bytes[i];
}
return value;
}
/**
* Convert bytes to string
* @private
*/
_bytesToString(bytes) {
return new TextDecoder().decode(bytes);
}
/**
* Read variable-length value from MIDI data
* @private
*/
_readVariableLengthValue(data, position) {
let value = 0;
let byte;
let bytesRead = 0;
// noinspection JSBitwiseOperatorUsage
do {
byte = data[position + bytesRead++];
value = (value << 7) | (byte & 0x7F);
} while (byte & 0x80);
return { value, bytesRead };
}
/**
* Convert ticks to time in seconds considering tempo changes within bars
* @private
*/
_ticksToTime(targetTick, midi) {
const ticksPerBeat = midi.ticksPerBeat || 480;
// Get all tempo events sorted by tick
const tempoEvents = [];
midi.tracks.forEach(track => {
track.events.forEach(event => {
if (event.type === 'tempo') {
tempoEvents.push(event);
}
});
});
tempoEvents.sort((a, b) => a.tick - b.tick);
let totalTime = 0;
let currentTick = 0;
let currentTempo = 120; // Default tempo
// Process tempo changes up to the target tick
for (const tempoEvent of tempoEvents) {
if (tempoEvent.tick > targetTick) break;
// Add time for the segment from currentTick to this tempo change
if (tempoEvent.tick > currentTick) {
const segmentTicks = tempoEvent.tick - currentTick;
const segmentTime = (segmentTicks / ticksPerBeat) * (60 / currentTempo);
totalTime += segmentTime;
currentTick = tempoEvent.tick;
}
// Update current tempo
currentTempo = tempoEvent.bpm;
}
// Add time for remaining ticks at current tempo
if (targetTick > currentTick) {
const remainingTicks = targetTick - currentTick;
const remainingTime = (remainingTicks / ticksPerBeat) * (60 / currentTempo);
totalTime += remainingTime;
}
return totalTime;
}
}
// Export the class
export default MidiParser;
// Example usage:
// const parser = new MidiParser();
// const fileInput = document.getElementById('midiFileInput');
//
// fileInput.addEventListener('change', async (event) => {
// const file = event.target.files[0];
// const arrayBuffer = await file.arrayBuffer();
//
// try {
// const parsedData = await parser.parse(arrayBuffer);
// console.log('Parsed MIDI data:', parsedData);
// console.log('Parts:', parsedData.parts);
// console.log('Bar structure:', parsedData.barStructure);
// console.log('Metadata:', parsedData.metadata);
// } catch (error) {
// console.error('Error parsing MIDI file:', error);
// }
// });