UNPKG

taglib-wasm

Version:

TagLib for TypeScript platforms: Deno, Node.js, Bun, Electron, browsers, and Cloudflare Workers

389 lines (388 loc) 11.8 kB
import { TagLib } from "./taglib.js"; import { updateTags } from "./simple.js"; import { getGlobalWorkerPool } from "./worker-pool.js"; function join(...paths) { return paths.filter((p) => p).join("/").replace(/\/+/g, "/"); } function extname(path) { const lastDot = path.lastIndexOf("."); if (lastDot === -1 || lastDot === path.length - 1) return ""; return path.slice(lastDot); } const DEFAULT_AUDIO_EXTENSIONS = [ ".mp3", ".m4a", ".mp4", ".flac", ".ogg", ".oga", ".opus", ".wav", ".wv", ".ape", ".mpc", ".tta", ".wma" ]; async function* processDirectoryEntry(path, entryName, isDirectory, isFile, options) { const { recursive = true, extensions = DEFAULT_AUDIO_EXTENSIONS } = options; const fullPath = join(path, entryName); if (isDirectory && recursive) { yield* walkDirectory(fullPath, options); } else if (isFile) { const ext = extname(entryName).toLowerCase(); if (extensions.includes(ext)) { yield fullPath; } } } async function getDirectoryReader() { if (typeof Deno !== "undefined") { return { readDir: async function* (path) { for await (const entry of Deno.readDir(path)) { yield { name: entry.name, isDirectory: entry.isDirectory, isFile: entry.isFile }; } } }; } const isNode = typeof globalThis.process !== "undefined" && globalThis.process.versions?.node; const isBun = typeof globalThis.process !== "undefined" && globalThis.process.versions?.bun; if (isNode || isBun) { const fs = await import("fs/promises"); return { readDir: async function* (path) { const entries = await fs.readdir(path, { withFileTypes: true }); for (const entry of entries) { yield { name: entry.name, isDirectory: entry.isDirectory(), isFile: entry.isFile() }; } } }; } throw new Error("Directory scanning not supported in this runtime"); } async function* walkDirectory(path, options = {}) { const reader = await getDirectoryReader(); for await (const entry of reader.readDir(path)) { yield* processDirectoryEntry( path, entry.name, entry.isDirectory, entry.isFile, options ); } } async function processBatch(files, processor, concurrency) { const results = []; for (let i = 0; i < files.length; i += concurrency) { const chunk = files.slice(i, i + concurrency); const chunkResults = await Promise.all( chunk.map((file) => processor(file)) ); results.push(...chunkResults); } return results; } function extractDynamicsData(tags) { const dynamics = {}; if (tags.REPLAYGAIN_TRACK_GAIN) { dynamics.replayGainTrackGain = tags.REPLAYGAIN_TRACK_GAIN; } if (tags.REPLAYGAIN_TRACK_PEAK) { dynamics.replayGainTrackPeak = tags.REPLAYGAIN_TRACK_PEAK; } if (tags.REPLAYGAIN_ALBUM_GAIN) { dynamics.replayGainAlbumGain = tags.REPLAYGAIN_ALBUM_GAIN; } if (tags.REPLAYGAIN_ALBUM_PEAK) { dynamics.replayGainAlbumPeak = tags.REPLAYGAIN_ALBUM_PEAK; } if (tags.ITUNNORM) { dynamics.appleSoundCheck = tags.ITUNNORM; } return Object.keys(dynamics).length > 0 ? dynamics : void 0; } async function processFileWithWorker(filePath, pool, includeProperties, onProgress, processed, totalFound) { const [tags, properties, pictures] = await Promise.all([ pool.readTags(filePath), includeProperties ? pool.readProperties(filePath) : Promise.resolve(null), pool.readPictures(filePath) ]); const hasCoverArt = pictures.length > 0; const dynamics = extractDynamicsData(tags); if (processed && totalFound) { processed.count++; onProgress?.(processed.count, totalFound, filePath); } return { path: filePath, tags, properties: properties ?? void 0, hasCoverArt, dynamics }; } async function processFileWithTagLib(filePath, taglib, includeProperties, onProgress, processed, totalFound) { const audioFile = await taglib.open(filePath); try { const tags = audioFile.tag(); let properties; if (includeProperties) { const props = audioFile.audioProperties(); if (props) { properties = props; } } const pictures = audioFile.getPictures(); const hasCoverArt = pictures.length > 0; const dynamics = {}; const fields = [ { key: "REPLAYGAIN_TRACK_GAIN", prop: "replayGainTrackGain" }, { key: "REPLAYGAIN_TRACK_PEAK", prop: "replayGainTrackPeak" }, { key: "REPLAYGAIN_ALBUM_GAIN", prop: "replayGainAlbumGain" }, { key: "REPLAYGAIN_ALBUM_PEAK", prop: "replayGainAlbumPeak" } ]; for (const { key, prop } of fields) { const value = audioFile.getProperty(key); if (value) { dynamics[prop] = value; } } let appleSoundCheck = audioFile.getProperty("ITUNNORM"); if (!appleSoundCheck && audioFile.isMP4()) { appleSoundCheck = audioFile.getMP4Item("----:com.apple.iTunes:iTunNORM"); } if (appleSoundCheck) dynamics.appleSoundCheck = appleSoundCheck; if (processed && totalFound) { processed.count++; onProgress?.(processed.count, totalFound, filePath); } return { path: filePath, tags, properties, hasCoverArt, dynamics: Object.keys(dynamics).length > 0 ? dynamics : void 0 }; } finally { audioFile.dispose(); } } async function scanFolder(folderPath, options = {}) { const startTime = Date.now(); const { maxFiles = Infinity, includeProperties = true, continueOnError = true, useWorkerPool = true, workerPool, onProgress } = options; const files = []; const errors = []; const filePaths = []; let fileCount = 0; for await (const filePath of walkDirectory(folderPath, options)) { filePaths.push(filePath); fileCount++; if (fileCount >= maxFiles) break; } const totalFound = filePaths.length; let processed = 0; const shouldUseWorkerPool = useWorkerPool && (workerPool ?? typeof Worker !== "undefined"); let pool = null; if (shouldUseWorkerPool) { pool = workerPool ?? getGlobalWorkerPool(); } const taglib = shouldUseWorkerPool ? null : await TagLib.initialize(); try { if (pool) { const batchSize = Math.min(50, filePaths.length); for (let i = 0; i < filePaths.length; i += batchSize) { const batch = filePaths.slice( i, Math.min(i + batchSize, filePaths.length) ); const batchPromises = batch.map(async (filePath) => { try { await Promise.all([ pool.readTags(filePath), includeProperties ? pool.readProperties(filePath) : Promise.resolve(null), pool.readPictures(filePath) ]); return await processFileWithWorker( filePath, pool, includeProperties, onProgress, { count: processed }, totalFound ).then((result) => { processed++; return result; }); } catch (error) { const err = error instanceof Error ? error : new Error(String(error)); if (continueOnError) { errors.push({ path: filePath, error: err }); processed++; onProgress?.(processed, totalFound, filePath); return { path: filePath, tags: {}, error: err }; } else { throw err; } } }); const batchResults = await Promise.all(batchPromises); files.push(...batchResults.filter((r) => !r.error)); } } else { const processor = async (filePath) => { try { const audioFile = await taglib.open(filePath); try { audioFile.tag(); if (includeProperties) { audioFile.audioProperties(); } audioFile.getPictures(); return await processFileWithTagLib( filePath, taglib, includeProperties, onProgress, { count: processed }, totalFound ).then((result) => { processed++; return result; }); } finally { audioFile.dispose(); } } catch (error) { const err = error instanceof Error ? error : new Error(String(error)); if (continueOnError) { errors.push({ path: filePath, error: err }); processed++; onProgress?.(processed, totalFound, filePath); return { path: filePath, tags: {}, error: err }; } else { throw err; } } }; const concurrency = 4; const batchSize = concurrency * 10; for (let i = 0; i < filePaths.length; i += batchSize) { const batch = filePaths.slice( i, Math.min(i + batchSize, filePaths.length) ); const batchResults = await processBatch(batch, processor, concurrency); files.push(...batchResults.filter((r) => !r.error)); } } } finally { if (pool && !workerPool) { } } return { files, errors, totalFound, totalProcessed: processed, duration: Date.now() - startTime }; } async function updateFolderTags(updates, options = {}) { const startTime = Date.now(); const { continueOnError = true, concurrency = 4 } = options; let successful = 0; const failed = []; const processor = async (update) => { try { await updateTags(update.path, update.tags); successful++; } catch (error) { const err = error instanceof Error ? error : new Error(String(error)); if (continueOnError) { failed.push({ path: update.path, error: err }); } else { throw err; } } }; const batchSize = concurrency * 10; for (let i = 0; i < updates.length; i += batchSize) { const batch = updates.slice(i, Math.min(i + batchSize, updates.length)); await processBatch( batch.map((u) => u.path), async (path) => { const update = batch.find((u) => u.path === path); await processor(update); return { path, tags: {} }; }, concurrency ); } return { successful, failed, duration: Date.now() - startTime }; } async function findDuplicates(folderPath, criteria = ["artist", "title"]) { const result = await scanFolder(folderPath); const duplicates = /* @__PURE__ */ new Map(); for (const file of result.files) { const key = criteria.map((field) => file.tags[field] ?? "").filter((v) => v !== "").join("|"); if (key) { const group = duplicates.get(key) ?? []; group.push(file); duplicates.set(key, group); } } for (const [key, files] of duplicates.entries()) { if (files.length < 2) { duplicates.delete(key); } } return duplicates; } async function exportFolderMetadata(folderPath, outputPath, options) { const result = await scanFolder(folderPath, options); const data = { folder: folderPath, scanDate: (/* @__PURE__ */ new Date()).toISOString(), summary: { totalFiles: result.totalFound, processedFiles: result.totalProcessed, errors: result.errors.length, duration: result.duration }, files: result.files, errors: result.errors }; if (typeof Deno !== "undefined") { await Deno.writeTextFile(outputPath, JSON.stringify(data, null, 2)); } else if (typeof globalThis.process !== "undefined") { const fs = await import("fs/promises"); await fs.writeFile(outputPath, JSON.stringify(data, null, 2)); } } export { exportFolderMetadata, findDuplicates, scanFolder, updateFolderTags };