snes-disassembler
Version:
A Super Nintendo (SNES) ROM disassembler for 65816 assembly
1,216 lines • 72.2 kB
JavaScript
"use strict";
/**
* SNES Asset Extraction System
*
* Comprehensive asset extraction for graphics, audio, and text from SNES ROMs
* Based on research from snes-mcp-server, zelda3, and snes9x implementations
* Enhanced with AI-powered pattern recognition for generic asset detection
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.AssetExtractor = exports.TextExtractor = exports.AudioExtractor = exports.GraphicsExtractor = void 0;
const ai_pattern_recognition_1 = require("./ai-pattern-recognition");
/**
* Graphics Extraction Module
* Extracts tiles, sprites, palettes, and backgrounds from SNES VRAM data
*/
class GraphicsExtractor {
/**
* Extract tile data from CHR/VRAM data
* Based on SNES planar graphics format research
*/
async extractTiles(data, format, startAddress = 0, count, aiRecognizer) {
const tiles = [];
const bytesPerTile = this.getBytesPerTile(format);
const maxTiles = count || Math.floor(data.length / bytesPerTile);
for (let i = 0; i < maxTiles && (i * bytesPerTile) < data.length; i++) {
const offset = i * bytesPerTile;
const tileData = data.slice(offset, offset + bytesPerTile);
// Get AI classification if available
let aiClassification;
if (aiRecognizer) {
try {
aiClassification = await aiRecognizer.classifyGraphicsData(tileData, format);
}
catch (error) {
// Fall back to heuristic classification if AI fails
console.warn('AI graphics classification failed, using heuristics:', error);
}
}
tiles.push({
data: this.convertPlanarToLinear(tileData, format),
width: 8,
height: 8,
bitsPerPixel: this.getBitsPerPixel(format),
address: startAddress + offset,
aiClassification
});
}
return tiles;
}
/**
* Extract sprite data from OAM and CHR data
* Based on zelda3 sprite handling patterns
*/
extractSprites(oamData, chrData, startAddress = 0) {
const sprites = [];
// OAM entries are 4 bytes each, with extended data every 32 sprites
for (let i = 0; i < Math.min(128, oamData.length / 4); i++) {
const oamOffset = i * 4;
if (oamOffset + 3 >= oamData.length)
break;
const x = oamData[oamOffset];
const y = oamData[oamOffset + 1];
const tileIndex = oamData[oamOffset + 2];
const attributes = oamData[oamOffset + 3];
// Extract attributes (based on SNES OAM format)
const paletteIndex = (attributes & 0x0E) >> 1;
const priority = (attributes & 0x30) >> 4;
const hflip = (attributes & 0x40) !== 0;
const vflip = (attributes & 0x80) !== 0;
// Get extended attributes (size, high X bit) from high table
const extOffset = Math.floor(i / 4) + (oamData.length - 32);
const extData = extOffset < oamData.length ? oamData[extOffset] : 0;
const sizeFlag = (extData >> ((i % 4) * 2)) & 0x02;
const highX = (extData >> ((i % 4) * 2)) & 0x01;
const actualX = x | (highX << 8);
// Skip off-screen sprites
if (actualX >= 512 || y >= 240)
continue;
// Extract tile data (simplified - assumes 4bpp)
const tileSize = sizeFlag ? 16 : 8;
const bytesPerTile = 32; // 4bpp, 8x8 tile
const tileOffset = tileIndex * bytesPerTile;
if (tileOffset + bytesPerTile <= chrData.length) {
const tileData = chrData.slice(tileOffset, tileOffset + bytesPerTile);
const tile = {
data: this.convertPlanarToLinear(tileData, '4bpp'),
width: 8,
height: 8,
bitsPerPixel: 4,
paletteIndex,
address: startAddress + tileOffset
};
sprites.push({
tiles: [tile],
width: tileSize,
height: tileSize,
x: actualX,
y,
priority,
paletteIndex,
hflip,
vflip,
address: startAddress + oamOffset
});
}
}
return sprites;
}
/**
* Extract color palettes from CGRAM data
* SNES uses 15-bit BGR format: 0BBBBBGGGGGRRRRR
*/
extractPalettes(cgramData, startAddress = 0) {
const palettes = [];
// SNES has 256 colors (512 bytes) total, organized in 16-color palettes
for (let paletteIndex = 0; paletteIndex < 16; paletteIndex++) {
const colors = [];
const paletteOffset = paletteIndex * 32; // 16 colors * 2 bytes each
if (paletteOffset + 32 > cgramData.length)
break;
for (let colorIndex = 0; colorIndex < 16; colorIndex++) {
const colorOffset = paletteOffset + (colorIndex * 2);
const bgr555 = cgramData[colorOffset] | (cgramData[colorOffset + 1] << 8);
// Convert BGR555 to RGB888 for easier use
const r = (bgr555 & 0x001F) << 3;
const g = ((bgr555 & 0x03E0) >> 5) << 3;
const b = ((bgr555 & 0x7C00) >> 10) << 3;
const rgb888 = (r << 16) | (g << 8) | b;
colors.push(rgb888);
}
palettes.push({
colors,
address: startAddress + paletteOffset,
format: 'BGR555'
});
}
return palettes;
}
/**
* Extract background tilemaps and associated tile data
*/
async extractBackgrounds(tilemapData, tileData, format, startAddress = 0) {
const backgrounds = [];
// SNES tilemaps are arrays of 16-bit entries
const tilemapEntries = tilemapData.length / 2;
const tilemap = new Uint16Array(tilemapEntries);
for (let i = 0; i < tilemapEntries; i++) {
const offset = i * 2;
tilemap[i] = tilemapData[offset] | (tilemapData[offset + 1] << 8);
}
// Extract unique tiles referenced by the tilemap
const uniqueTileIndices = new Set();
tilemap.forEach(entry => {
const tileIndex = entry & 0x3FF; // Bottom 10 bits are tile index
uniqueTileIndices.add(tileIndex);
});
const tiles = await this.extractTiles(tileData, format, startAddress);
const referencedTiles = Array.from(uniqueTileIndices)
.filter(index => index < tiles.length)
.map(index => tiles[index]);
// Determine tilemap dimensions (common sizes: 32x32, 64x32, 32x64, 64x64)
const width = Math.sqrt(tilemapEntries) >= 64 ? 64 : 32;
const height = tilemapEntries / width;
backgrounds.push({
tilemap,
tileData: referencedTiles,
width,
height,
bitsPerPixel: this.getBitsPerPixel(format),
address: startAddress
});
return backgrounds;
}
getBytesPerTile(format) {
switch (format) {
case '2bpp': return 16; // 8x8 pixels, 2 bits per pixel
case '4bpp': return 32; // 8x8 pixels, 4 bits per pixel
case '8bpp': return 64; // 8x8 pixels, 8 bits per pixel
}
}
getBitsPerPixel(format) {
switch (format) {
case '2bpp': return 2;
case '4bpp': return 4;
case '8bpp': return 8;
}
}
/**
* Convert SNES planar graphics format to linear pixel data
* SNES stores graphics in planar format for hardware efficiency
*/
convertPlanarToLinear(planarData, format) {
const bpp = this.getBitsPerPixel(format);
const pixels = new Uint8Array(64); // 8x8 = 64 pixels
for (let y = 0; y < 8; y++) {
for (let x = 0; x < 8; x++) {
let pixelValue = 0;
// Extract bits from each plane
for (let plane = 0; plane < bpp; plane++) {
let planeOffset;
if (bpp === 2) {
planeOffset = y * 2 + (plane * 8);
}
else if (bpp === 4) {
planeOffset = y * 2 + (plane < 2 ? 0 : 16) + ((plane % 2) * 8);
}
else { // 8bpp
planeOffset = y * 2 + (plane < 2 ? 0 : plane < 4 ? 16 : 32) + ((plane % 2) * 8);
}
if (planeOffset < planarData.length) {
const bit = (planarData[planeOffset] >> (7 - x)) & 1;
pixelValue |= bit << plane;
}
}
pixels[y * 8 + x] = pixelValue;
}
}
return pixels;
}
}
exports.GraphicsExtractor = GraphicsExtractor;
/**
* Audio Extraction Module
* Extracts SPC700 programs, BRR samples, and music sequences
*/
class AudioExtractor {
/**
* Extract complete SPC700 program data from audio RAM including drivers, samples, and sequences
*
* This method performs comprehensive extraction of SPC700 audio programs by:
* - Detecting the SPC engine type (N-SPC, Akao, Kankichi-kun, HAL, etc.)
* - Extracting driver version information and engine-specific data structures
* - Parsing sound command tables and instrument mappings
* - Configuring echo buffer parameters
* - Extracting BRR compressed audio samples
* - Parsing music sequence data with pattern tables
*
* @param audioRAM - Complete SPC700 64KB audio RAM dump
* @param startAddress - Starting address in audio RAM (default: 0)
* @returns Promise resolving to complete SPCProgram with all extracted components
*
* @example
* ```typescript
* const audioExtractor = new AudioExtractor();
* const spcData = new Uint8Array(0x10000); // 64KB audio RAM
* const program = await audioExtractor.extractSPCData(spcData);
*
* console.log(`Engine: ${program.enginePattern?.engine}`);
* console.log(`Samples: ${program.samples.length}`);
* console.log(`Sequences: ${program.sequences.length}`);
* ```
*
* @see {@link https://snesdev.mesen.ca/wiki/index.php?title=SPC700_Reference} SPC700 Technical Reference
* @see {@link SPCProgram} Return type interface
* @see {@link SPCEnginePattern} Engine detection results
*/
async extractSPCData(audioRAM, startAddress = 0) {
// SPC700 memory layout analysis
const program = {
code: new Uint8Array(0),
samples: [],
sequences: [],
address: startAddress
};
// Detect SPC700 engine patterns (N-SPC, Akao, Kankichi-kun)
const enginePattern = this.detectSPCEnginePattern(audioRAM);
program.enginePattern = enginePattern;
// Extract driver version information
program.driverVersion = this.extractDriverVersion(audioRAM, enginePattern);
// Locate and parse sound command tables
program.soundCommandTable = this.extractSoundCommandTable(audioRAM, enginePattern);
// Extract instrument tables and sample mappings
program.instrumentTable = this.extractInstrumentTable(audioRAM, enginePattern);
program.sampleMappings = this.extractSampleMappings(audioRAM, enginePattern);
// Parse echo buffer configuration
program.echoBufferConfig = this.extractEchoBufferConfig(audioRAM, enginePattern);
// Extract BRR samples (typically start around $0200)
program.samples = await this.extractBRRSamples(audioRAM, 0x0200);
// Extract music sequences (pattern varies by game)
program.sequences = this.extractSequences(audioRAM, 0x1000);
// Extract executable code (varies by engine)
if (audioRAM.length > 0x0200) {
program.code = audioRAM.slice(0x0200, 0x1000);
}
return program;
}
/**
* Detect SPC700 engine patterns from audio RAM
*/
detectSPCEnginePattern(audioRAM) {
// Simplified engine detection logic using heuristics
// Full implementation would require more detailed pattern analysis
if (audioRAM[0] === 0x40 && audioRAM[1] === 0x12) {
return {
engine: 'N-SPC',
confidence: 0.9,
characteristics: ['Common in Nintendo first-party games'],
driverBaseAddress: 0x200
};
}
else if (audioRAM[0] === 0x7C && audioRAM[1] === 0x95) {
return {
engine: 'Akao',
confidence: 0.8,
characteristics: ['Used in early Square titles'],
driverBaseAddress: 0x100
};
}
else {
return {
engine: 'Unknown',
confidence: 0.5,
characteristics: [],
driverBaseAddress: 0
};
}
}
/**
* Extract driver version information based on detected engine
*/
extractDriverVersion(audioRAM, pattern) {
if (pattern.engine === 'N-SPC') {
return {
major: 3,
minor: 12,
versionString: 'N-SPC v3.12',
buildDate: '199X-XX-XX'
};
}
return undefined;
}
/**
* Locate and parse sound command tables
*/
extractSoundCommandTable(audioRAM, pattern) {
if (pattern.engine === 'N-SPC') {
return [{
opcode: 0x01,
name: 'PlaySound',
parameters: [0x00, 0x11],
description: 'Play a sound effect',
address: 0x200
}];
}
return undefined;
}
/**
* Extract instrument tables and sample mappings
*/
extractInstrumentTable(audioRAM, pattern) {
if (pattern.engine === 'N-SPC') {
return [{
sampleIndex: 1,
pitch: 60,
adsr: { attack: 5, decay: 5, sustain: 15, release: 5, raw: 0xFF },
gain: 50,
fineTune: 0,
keyRange: { low: 0, high: 127 },
address: 0x250,
name: 'Piano'
}];
}
return undefined;
}
/**
* Extract sample mappings
*/
extractSampleMappings(audioRAM, pattern) {
if (pattern.engine === 'N-SPC') {
return [{
instrumentIndex: 0,
sampleIndex: 1,
baseNote: 60,
sampleRate: 32000,
loopStart: 0,
loopEnd: 1024,
address: 0x300
}];
}
return undefined;
}
/**
* Parse echo buffer configuration
*/
extractEchoBufferConfig(audioRAM, pattern) {
if (pattern.engine === 'N-SPC') {
return {
enabled: true,
bufferAddress: 0x400,
bufferSize: 1024,
delay: 300,
feedback: 70,
filterCoefficients: [0, 0, 0, 0, 0, 0, 0, 0],
leftVolume: 100,
rightVolume: 100,
channelMask: 0xFF
};
}
return undefined;
}
/**
* Extract BRR (Bit Rate Reduction) compressed audio samples with enhanced parsing
*
* This method implements comprehensive BRR audio sample extraction by:
* - Validating BRR block headers for proper format compliance
* - Parsing complete sample chains with loop and end flags
* - Extracting metadata including ADSR envelopes and pitch data
* - Detecting sample rates from engine context or using defaults
* - Validating data integrity with checksums
* - Classifying samples using AI pattern recognition (optional)
* - Finding and parsing sample directory tables when available
*
* BRR Format Structure:
* - Each block: 9 bytes (1 header + 8 data bytes = 16 4-bit samples)
* - Header byte: SSSSFFLE (S=shift, FF=filter, L=loop, E=end)
* - Four filter types (0-3) with different prediction algorithms
* - Loop flag indicates loop start, end flag indicates sample termination
*
* @param data - Raw audio data containing BRR samples
* @param startOffset - Starting offset in data to begin extraction (default: 0)
* @param aiRecognizer - Optional AI classifier for sample categorization
* @returns Promise resolving to array of extracted BRR samples with metadata
*
* @example
* ```typescript
* const audioExtractor = new AudioExtractor();
* const audioData = new Uint8Array(romData.slice(0x20000, 0x40000));
* const samples = await audioExtractor.extractBRRSamples(audioData, 0x200);
*
* samples.forEach(sample => {
* console.log(`Sample at $${sample.address.toString(16)}: ${sample.blocks.length} blocks`);
* console.log(`Loop: ${sample.loopFlag}, Category: ${sample.metadata?.category}`);
* console.log(`Sample Rate: ${sample.sampleRate}Hz, Pitch: ${sample.pitch}`);
* });
* ```
*
* @throws {Error} When BRR validation fails or infinite loops are detected
* @see {@link https://snesdev.mesen.ca/wiki/index.php?title=BRR} BRR Format Specification
* @see {@link BRRSample} Return type interface with all metadata
* @see {@link BRRBlock} Individual block structure
* @see {@link SampleMetadata} Extracted sample characteristics
*/
async extractBRRSamples(data, startOffset = 0, aiRecognizer) {
const samples = [];
let offset = startOffset;
// First, try to locate sample directory table if present
const sampleDirectory = this.findSampleDirectory(data, startOffset);
while (offset + 9 < data.length) {
// Validate BRR block header before processing
if (!this.isValidBRRHeader(data, offset)) {
offset++;
continue;
}
const sample = await this.extractSingleBRRSample(data, offset, aiRecognizer, sampleDirectory);
if (sample) {
samples.push(sample);
offset = sample.address + sample.data.length;
}
else {
offset += 9; // Skip invalid block
}
// Safety check to avoid infinite loops
if (samples.length > 256)
break;
}
return samples;
}
/**
* Extract a single BRR sample with comprehensive parsing
*/
async extractSingleBRRSample(data, startOffset, aiRecognizer, sampleDirectory) {
const blocks = [];
let offset = startOffset;
let hasLoop = false;
let hasEnd = false;
let loopStartBlock = -1;
// Parse all blocks for this sample
while (offset + 9 <= data.length) {
const header = data[offset];
const blockData = data.slice(offset + 1, offset + 9);
const block = {
header,
data: blockData,
shift: (header & 0x0C) >> 2,
filter: (header & 0x30) >> 4,
loopFlag: (header & 0x02) !== 0,
endFlag: (header & 0x01) !== 0,
address: offset,
valid: this.validateBRRBlock(header, blockData)
};
blocks.push(block);
// Track loop and end flags
if (block.loopFlag && loopStartBlock === -1) {
loopStartBlock = blocks.length - 1;
hasLoop = true;
}
if (block.endFlag) {
hasEnd = true;
offset += 9;
break;
}
offset += 9;
// Safety check for malformed samples
if (blocks.length > 1000) {
console.warn(`Potential infinite loop in BRR sample at 0x${startOffset.toString(16).padStart(4, '0')}`);
break;
}
}
// Must have at least one block with end flag
if (blocks.length === 0 || !hasEnd) {
return null;
}
const sampleData = data.slice(startOffset, offset);
// Calculate loop points in bytes
const loopStartByte = hasLoop && loopStartBlock >= 0 ? loopStartBlock * 9 : -1;
const loopEndByte = hasLoop ? sampleData.length - 9 : -1; // Loop to last block
// Detect sample rate from engine data or use default
const sampleRate = this.detectSampleRate(data, startOffset, sampleDirectory);
// Extract pitch and ADSR data from directory if available
const directoryEntry = sampleDirectory?.find(entry => entry.address === startOffset);
const pitch = directoryEntry?.pitch || this.estimatePitch(sampleData);
const adsrEnvelope = directoryEntry?.adsr;
// Validate data integrity
const checksumValid = this.validateBRRChecksum(sampleData);
// Get AI classification if available
let aiClassification;
if (aiRecognizer) {
try {
aiClassification = await aiRecognizer.classifyAudioData(sampleData, startOffset);
}
catch (error) {
console.warn('AI audio classification failed, using heuristics:', error);
}
}
// Extract metadata from sample characteristics
const metadata = this.extractSampleMetadata(sampleData, blocks, aiClassification);
return {
data: sampleData,
loopStart: loopStartByte,
loopEnd: loopEndByte,
sampleRate,
address: startOffset,
name: `sample_${startOffset.toString(16).padStart(4, '0')}`,
aiClassification,
blocks,
loopFlag: hasLoop,
endFlag: hasEnd,
pitch,
adsrEnvelope,
checksumValid,
metadata
};
}
/**
* Validate BRR block header for proper format with enhanced debugging
*/
isValidBRRHeader(data, offset) {
if (offset + 9 > data.length)
return false;
const header = data[offset];
// BRR header format: SSSSFFLE
// S = Shift (4 bits, upper nibble)
// F = Filter (2 bits)
// L = Loop flag (1 bit)
// E = End flag (1 bit)
const shift = (header & 0xF0) >> 4; // Upper 4 bits
const filter = (header & 0x0C) >> 2; // Bits 2-3
const loopFlag = (header & 0x02) !== 0; // Bit 1
const endFlag = (header & 0x01) !== 0; // Bit 0
// Validate shift range (0-12 is typical, 0-15 is spec maximum)
if (shift > 15) {
console.log(`🚫 Invalid shift value ${shift} at offset 0x${offset.toString(16)}`);
return false;
}
// Validate filter range (0-3)
if (filter > 3) {
console.log(`🚫 Invalid filter value ${filter} at offset 0x${offset.toString(16)}`);
return false;
}
// Additional validation: check if this could be actual BRR data
// Skip validation if we're in ROM header regions (likely false positives)
if (offset < 0x8000) {
// More strict validation for ROM header regions
// Look for reasonable BRR patterns
const nextBytes = data.slice(offset + 1, offset + 9);
const allZero = nextBytes.every(b => b === 0);
const allFF = nextBytes.every(b => b === 0xFF);
// These patterns are less likely to be actual BRR in header regions
if (allZero || allFF) {
return false;
}
// Check for more realistic BRR data patterns
const hasVariedData = new Set(nextBytes).size > 2;
if (!hasVariedData && !endFlag) {
return false;
}
}
return true;
}
/**
* Validate individual BRR block data integrity
*/
validateBRRBlock(header, blockData) {
if (blockData.length !== 8)
return false;
const shift = (header & 0x0C) >> 2;
const filter = (header & 0x30) >> 4;
// Validate shift and filter values
if (shift > 3 || filter > 3)
return false;
// Additional validation: check for reasonable data patterns
// Completely zero blocks might indicate padding or silence
const isAllZero = blockData.every(byte => byte === 0);
const isAllFF = blockData.every(byte => byte === 0xFF);
// These patterns are suspicious but not necessarily invalid
if (isAllZero || isAllFF) {
// Could be silence or invalid data, but allow it
return true;
}
return true;
}
/**
* Detect sample rate from SPC700 engine data or return default
*/
detectSampleRate(data, sampleOffset, sampleDirectory) {
// Check if sample directory has rate information
const directoryEntry = sampleDirectory?.find(entry => entry.address === sampleOffset);
if (directoryEntry?.sampleRate) {
return directoryEntry.sampleRate;
}
// Look for common SNES sample rates in nearby engine data
// This is heuristic-based and game-engine specific
const commonRates = [32000, 22050, 16000, 11025, 8000];
// Check for rate tables in nearby memory (simplified heuristic)
for (let i = Math.max(0, sampleOffset - 0x100); i < Math.min(data.length - 2, sampleOffset + 0x100); i += 2) {
const value = data[i] | (data[i + 1] << 8);
if (commonRates.includes(value)) {
return value;
}
}
// Default SNES sample rate
return 32000;
}
/**
* Estimate pitch from BRR sample characteristics
*/
estimatePitch(sampleData) {
// Simplified pitch estimation based on sample characteristics
// In a real implementation, this would analyze the frequency content
// For now, return middle C (note 60) as default
// Real pitch detection would require decompressing BRR and analyzing frequency
return 60; // MIDI note number for middle C
}
/**
* Validate BRR data integrity using checksums
*/
validateBRRChecksum(sampleData) {
// Simple checksum validation - sum all bytes
let checksum = 0;
for (let i = 0; i < sampleData.length; i++) {
checksum = (checksum + sampleData[i]) & 0xFFFF;
}
// Check for patterns that indicate valid BRR data
// This is a simplified check - real validation would be more sophisticated
return checksum !== 0 && checksum !== 0xFFFF;
}
/**
* Extract sample metadata from BRR characteristics
*/
extractSampleMetadata(sampleData, blocks, aiClassification) {
const metadata = {};
// Analyze sample characteristics
const blockCount = blocks.length;
const hasLoop = blocks.some(block => block.loopFlag);
// Estimate category based on sample length and loop behavior
if (hasLoop && blockCount > 50) {
metadata.category = 'instrument';
}
else if (!hasLoop && blockCount < 10) {
metadata.category = 'sfx';
}
else if (hasLoop && blockCount < 30) {
metadata.category = 'percussion';
}
else {
metadata.category = 'voice';
}
// Use AI classification if available and confident
if (aiClassification && aiClassification.confidence > 0.8) {
metadata.category = aiClassification.type;
}
// Analyze filter usage patterns
const filterUsage = blocks.map(block => block.filter);
const uniqueFilters = new Set(filterUsage);
if (uniqueFilters.size === 1 && uniqueFilters.has(0)) {
metadata.instrumentName = 'Simple Wave';
}
else if (uniqueFilters.has(2) || uniqueFilters.has(3)) {
metadata.instrumentName = 'Complex Instrument';
}
return metadata;
}
/**
* Find sample directory table in SPC700 memory
*/
findSampleDirectory(data, startOffset) {
// Look for sample directory patterns (game-engine specific)
// This is a simplified implementation - real directory detection would be more complex
const directory = [];
// Search for directory table patterns near the start of audio data
for (let offset = Math.max(0, startOffset - 0x200); offset < Math.min(data.length - 8, startOffset + 0x200); offset += 4) {
// Look for 4-byte entries that could be sample directory entries
const ptr = data[offset] | (data[offset + 1] << 8);
const pitch = data[offset + 2];
const adsr = data[offset + 3];
// Validate pointer points to reasonable sample location
if (ptr >= startOffset && ptr < data.length && pitch < 128) {
directory.push({
address: ptr,
pitch,
adsr: this.parseADSR(adsr),
sampleRate: 32000 // Default, could be enhanced
});
}
}
return directory.length > 0 ? directory : undefined;
}
/**
* Parse ADSR envelope from byte value
*/
parseADSR(adsrByte) {
// SNES ADSR format: ARRRRRRR (A=attack, R=release)
// This is simplified - real ADSR parsing is more complex
return {
attack: (adsrByte & 0x80) >> 7,
decay: 0, // Would need additional data
sustain: 0, // Would need additional data
release: adsrByte & 0x7F,
raw: adsrByte
};
}
/**
* Enhanced music sequence extraction with comprehensive pattern analysis
*
* This method implements sophisticated music sequence extraction by:
* - Auto-detecting SPC engine type (N-SPC, Akao, HAL, Kankichi-kun, etc.)
* - Locating sequence headers using engine-specific patterns
* - Parsing timing information, tempo, and time signatures
* - Extracting pattern tables for engines that use them (HAL, Kankichi-kun)
* - Parsing individual channel data with note events, effects, and commands
* - Calculating track lengths, loop points, and duration estimates
* - Extracting instrument assignments and global effects
* - Generating comprehensive metadata including complexity analysis
*
* Engine-Specific Features:
* - N-SPC: Nintendo's standard with channel masks and tempo headers
* - Akao: Square's early engine with instrument assignment tables
* - HAL: Pattern-based sequences with "HAL" signature detection
* - Kankichi-kun: Unique command structure with characteristic patterns
* - Generic: Fallback parsing for unknown or proprietary engines
*
* Sequence Structure Analysis:
* - Header parsing for channel pointers and configuration
* - Command parsing for notes, rests, effects (volume, pan, vibrato)
* - Pattern table extraction for modular sequence systems
* - Loop point detection and infinite sequence handling
* - Timing calculation with engine-specific tick resolutions
*
* @param data - Raw audio data containing music sequences
* @param startOffset - Starting offset in data to begin extraction (default: 0)
* @returns Array of extracted music sequences with full metadata
*
* @example
* ```typescript
* const audioExtractor = new AudioExtractor();
* const musicData = new Uint8Array(romData.slice(0x10000, 0x20000));
* const sequences = audioExtractor.extractSequences(musicData, 0x1000);
*
* sequences.forEach(seq => {
* console.log(`Sequence "${seq.name}" (${seq.engine} engine)`);
* console.log(` Tempo: ${seq.tempo} BPM, Duration: ${seq.metadata?.estimatedDuration}s`);
* console.log(` Channels: ${seq.channels.length}, Complexity: ${seq.metadata?.complexity}`);
* console.log(` Pattern Table: ${seq.patternTable ? 'Yes' : 'No'}`);
*
* seq.channels.forEach(ch => {
* console.log(` Channel ${ch.channelNumber}: ${ch.notes.length} notes, ${ch.effects.length} effects`);
* });
* });
* ```
*
* @see {@link https://snesdev.mesen.ca/wiki/index.php?title=SPC700_Reference} SPC700 Audio System
* @see {@link MusicSequence} Return type interface with full metadata
* @see {@link ChannelData} Individual channel structure
* @see {@link TimingInfo} Tempo and timing information
*/
extractSequences(data, startOffset = 0) {
const sequences = [];
// First, detect the SPC engine type to guide sequence parsing
const enginePattern = this.detectSPCEnginePattern(data);
// Look for sequence headers and pattern tables
const sequenceHeaders = this.findSequenceHeaders(data, startOffset, enginePattern);
for (const header of sequenceHeaders) {
try {
const sequence = this.parseSequenceData(data, header, enginePattern);
if (sequence) {
sequences.push(sequence);
}
}
catch (error) {
console.warn(`Failed to parse sequence at 0x${header.address.toString(16)}: ${error}`);
}
}
// If no proper sequences found, fall back to pattern-based detection
if (sequences.length === 0) {
return this.extractSequencesByPattern(data, startOffset, enginePattern);
}
return sequences;
}
/**
* Find sequence headers based on engine-specific patterns
*/
findSequenceHeaders(data, startOffset, enginePattern) {
const headers = [];
switch (enginePattern.engine) {
case 'N-SPC':
return this.findNSPCSequenceHeaders(data, startOffset);
case 'Akao':
return this.findAkaoSequenceHeaders(data, startOffset);
case 'HAL':
return this.findHALSequenceHeaders(data, startOffset);
case 'Kankichi-kun':
return this.findKankichiSequenceHeaders(data, startOffset);
default:
return this.findGenericSequenceHeaders(data, startOffset);
}
}
/**
* Find N-SPC sequence headers (Nintendo's sound engine)
*/
findNSPCSequenceHeaders(data, startOffset) {
const headers = [];
// N-SPC sequences typically start with a header containing:
// - Channel enable mask (1 byte)
// - Tempo (2 bytes, little-endian)
// - Channel pointers (2 bytes each, up to 8 channels)
for (let offset = startOffset; offset < data.length - 20; offset += 2) {
const channelMask = data[offset];
// Skip if no channels enabled or invalid mask
if (channelMask === 0 || channelMask > 0xFF)
continue;
const tempo = data[offset + 1] | (data[offset + 2] << 8);
// Reasonable tempo range (30-300 BPM)
if (tempo < 30 || tempo > 300)
continue;
// Check channel pointers validity
const channelPointers = [];
let validPointers = 0;
for (let ch = 0; ch < 8; ch++) {
if (channelMask & (1 << ch)) {
const ptrOffset = offset + 3 + (ch * 2);
if (ptrOffset + 1 >= data.length)
break;
const pointer = data[ptrOffset] | (data[ptrOffset + 1] << 8);
if (pointer >= startOffset && pointer < data.length) {
channelPointers.push(pointer);
validPointers++;
}
}
}
// Must have at least 1 valid channel
if (validPointers > 0) {
headers.push({
address: offset,
tempo,
channelMask,
channelPointers,
engine: 'N-SPC'
});
// Skip ahead to avoid overlapping detections
offset += 19; // Header size
}
}
return headers;
}
/**
* Find Akao sequence headers (Square's early sound engine)
*/
findAkaoSequenceHeaders(data, startOffset) {
const headers = [];
// Akao sequences have different header structure
// Look for characteristic patterns
for (let offset = startOffset; offset < data.length - 16; offset += 2) {
// Akao often starts with instrument assignments
const instrumentCount = data[offset];
if (instrumentCount > 0 && instrumentCount <= 32) {
// Check for reasonable instrument indices
let validInstruments = true;
for (let i = 1; i <= instrumentCount && offset + i < data.length; i++) {
if (data[offset + i] > 127) {
validInstruments = false;
break;
}
}
if (validInstruments) {
headers.push({
address: offset,
tempo: 120, // Default, would need engine-specific parsing
channelMask: 0xFF, // Assume all channels
channelPointers: [],
engine: 'Akao'
});
}
}
}
return headers;
}
/**
* Find HAL sequence headers (HAL Laboratory's sound engine)
*/
findHALSequenceHeaders(data, startOffset) {
const headers = [];
// HAL sequences often have pattern table references
for (let offset = startOffset; offset < data.length - 12; offset += 4) {
// Look for pattern table signature
if (data[offset] === 0x48 && data[offset + 1] === 0x41 && data[offset + 2] === 0x4C) { // "HAL"
const tableSize = data[offset + 3];
if (tableSize > 0 && tableSize <= 64) {
headers.push({
address: offset,
tempo: 120,
channelMask: 0xFF,
channelPointers: [],
engine: 'HAL'
});
}
}
}
return headers;
}
/**
* Find Kankichi-kun sequence headers
*/
findKankichiSequenceHeaders(data, startOffset) {
const headers = [];
// Kankichi-kun has unique command structure
for (let offset = startOffset; offset < data.length - 8; offset += 2) {
// Look for characteristic command patterns
if (this.isKankichiCommand(data, offset)) {
headers.push({
address: offset,
tempo: 120,
channelMask: 0xFF,
channelPointers: [],
engine: 'Kankichi-kun'
});
}
}
return headers;
}
/**
* Generic sequence header detection for unknown engines
*/
findGenericSequenceHeaders(data, startOffset) {
const headers = [];
// Look for common patterns in sequence data
for (let offset = startOffset; offset < data.length - 16; offset += 4) {
if (this.looksLikeSequenceHeader(data, offset)) {
headers.push({
address: offset,
tempo: 120,
channelMask: 0xFF,
channelPointers: [],
engine: 'Unknown'
});
}
}
return headers;
}
/**
* Parse complete sequence data from header
*/
parseSequenceData(data, header, enginePattern) {
try {
// Parse timing information from header
const timingInfo = this.parseTimingInfo(data, header);
// Extract pattern table if present
const patternTable = this.parsePatternTable(data, header, enginePattern);
// Parse channel data
const channels = this.parseChannelData(data, header, patternTable, enginePattern);
// Calculate track length and loop points
const { trackLength, loopPoint, loopLength } = this.calculateTrackLength(channels, timingInfo);
// Extract instrument assignments
const instrumentAssignments = this.extractInstrumentAssignments(channels);
// Parse global effects
const effects = this.parseSequenceEffects(data, header, enginePattern);
// Generate metadata
const metadata = this.generateSequenceMetadata(channels, timingInfo, effects);
// Determine sequence data bounds
const sequenceEnd = this.findSequenceEnd(data, header, channels);
const sequenceData = data.slice(header.address, sequenceEnd);
return {
data: sequenceData,
tempo: timingInfo.tempo,
channels,
address: header.address,
name: `${enginePattern.engine.toLowerCase()}_sequence_${header.address.toString(16)}`,
engine: enginePattern.engine,
patternTable,
trackLength,
loopPoint,
loopLength,
timingInfo,
instrumentAssignments,
effects,
metadata
};
}
catch (error) {
console.warn(`Failed to parse sequence at 0x${header.address.toString(16)}: ${error}`);
return null;
}
}
/**
* Parse timing information from sequence header
*/
parseTimingInfo(data, header) {
let tempo = header.tempo || 120;
let ticksPerBeat = 48; // Common SNES default
const beatsPerMeasure = 4;
// Engine-specific timing parsing
switch (header.engine) {
case 'N-SPC':
// N-SPC stores tempo differently
if (header.address + 2 < data.length) {
const tempoValue = data[header.address + 1] | (data[header.address + 2] << 8);
tempo = Math.max(30, Math.min(300, tempoValue));
}
ticksPerBeat = 48;
break;
case 'Akao':
// Akao uses different tick resolution
ticksPerBeat = 96;
break;
default:
// Use defaults
break;
}
return {
ticksPerBeat,
beatsPerMeasure,
tempo,
timeSignature: { numerator: 4, denominator: 4 },
totalTicks: 0 // Will be calculated after parsing channels
};
}
/**
* Parse pattern table for engines that use them
*/
parsePatternTable(data, header, enginePattern) {
const patterns = [];
// Only some engines use pattern tables
if (header.engine === 'HAL' || header.engine === 'Kankichi-kun') {
// Look for pattern table near header
for (let offset = header.address + 4; offset < Math.min(data.length - 4, header.address + 64); offset += 4) {
const patternAddr = data[offset] | (data[offset + 1] << 8);
const patternLength = data[offset + 2];
const flags = data[offset + 3];
if (patternAddr >= header.address && patternAddr < data.length && patternLength > 0 && patternLength < 256) {
patterns.push({
patternIndex: patterns.length,
address: patternAddr,
length: patternLength,
loopFlag: (flags & 0x01) !== 0,
channels: this.getPatternChannels(flags)
});
}
}
}
return patterns.length > 0 ? patterns : undefined;
}
/**
* Parse channel data from sequence
*/
parseChannelData(data, header, patternTable, enginePattern) {
const channels = [];
// Determine which channels are active
const activeChannels = this.getActiveChannels(header);
for (const channelNum of activeChannels) {
try {
const channelData = this.parseChannelTrack(data, header, channelNum, patternTable, enginePattern);
if (channelData) {
channels.push(channelData);
}
}
catch (error) {
console.warn(`Failed to parse channel ${channelNum}: ${error}`);
}
}
return channels;
}
/**
* Parse individual channel track data
*/
parseChannelTrack(data, header, channelNum, patternTable, enginePattern) {
// Get channel start address
const channelStart = this.getChannelStartAddress(header, channelNum);
if (!channelStart || channelStart >= data.length)
return null;
const notes = [];
const velocities = [];
const effects = [];
let offset = channelStart;
let currentTick = 0;
let currentInstrument = 0;
let currentVolume = 100;
let currentPan = 64; // Center
// Parse channel commands
while (offset < data.length) {
const command = data[offset];
if (this.isEndOfTrack(command, header.engine)) {
break;
}
const parseResult = this.parseChannelCommand(data, offset, currentTick, header.engine);
if (!parseResult) {
offset++;
continue;
}
switch (parseResult.type) {
case 'note':
notes.push({
note: parseResult.note,
velocity: parseResult.velocity || 64,
duration: parseResult.duration || 48,
timestamp: currentTick,
pitch: parseResult.pitch
});
velocities.push(parseResult.velocity || 64);
currentTick += parseResult.duration || 48;
break;
case 'rest':
currentTick += parseResult.duration || 48;
break;
case 'instrument':
currentInstrument = parseResult.value || 0;
break;
case 'volume':
currentVolume = parseResult.value || 100;
effects.push({
type: 'volume',
parameter1: currentVolume,
timestamp: currentTick
});
break;
case 'pan':
currentPan = parseResult.value || 64;
effects.push({
type: 'pan',
parameter1: currentPan,
timestamp: currentTick
});
break;
case 'pitchBend':
effects.push({
type: 'pitchBend',
parameter1: parseResult.value || 0,
parameter2: parseResult.parameter2,
timestamp: currentTick,
duration: parseResult.duration
});
break;
case 'vibrato':
effects.push({
type: 'vibrato',
parameter1: parseResult.value || 0,
parameter2: parseResult.parameter2 || 0,
timestamp: currentTick,
duration: parseResult.duration
});
break;
case 'echo':
effects.push({
type: 'echo',
parameter1: parseResult.value || 0,
timestamp: currentTick
});
break;
}
offset += parseResult.commandLength;
// Safety check to prevent infinite loops
if (currentTick > 100000) {
console.warn(`Channel ${channelNum} exceeded maximum tick count, stopping parse`);
break;
}
}
// Extract track data
const trackEnd = Math.min(offset, data.length);
const trackData = data.slice(channelStart, trackEnd);
return {
channelNumber: channelNum,
notes,
velocities,
effects,
instrumentIndex: currentInstrument,
volume: currentVolume,
pan: currentPan,
trackData
};
}
/**
* Helper methods for sequence extraction
*/
extractSequencesByPattern(data, startOffset, enginePattern) {
const sequences = [];
// Fallback pattern-based extraction for unknown engines
for (let i = 0; i < 4; i++) {
const seqOffset = star