UNPKG

@lenml/char-card-reader

Version:

SillyTavern character card info reader

1 lines 60.2 kB
{"version":3,"sources":["../src/utils.ts","../src/CharacterBook.ts","../src/metadata/jpeg.ts","../src/metadata/png.ts","../src/metadata/webp.ts","../src/metadata/parser.ts","../src/types.ts","../src/CharacterCard.ts"],"sourcesContent":["export function toBase64(\r\n input: Blob | Buffer | ArrayBuffer | Uint8Array\r\n): Promise<string> {\r\n if (typeof Buffer !== \"undefined\" && Buffer.isBuffer(input)) {\r\n // Node.js Buffer\r\n return Promise.resolve(input.toString(\"base64\"));\r\n }\r\n\r\n if (typeof Blob !== \"undefined\" && input instanceof Blob) {\r\n // Browser Blob\r\n return new Promise((resolve, reject) => {\r\n const reader = new FileReader();\r\n reader.onload = () => {\r\n const result = reader.result as string;\r\n const base64 = result.split(\",\")[1]; // data:*/*;base64,...\r\n resolve(base64);\r\n };\r\n reader.onerror = reject;\r\n reader.readAsDataURL(input);\r\n });\r\n }\r\n\r\n // ArrayBuffer or Uint8Array\r\n let uint8: Uint8Array;\r\n if (input instanceof ArrayBuffer) {\r\n uint8 = new Uint8Array(input);\r\n } else if (input instanceof Uint8Array) {\r\n uint8 = input;\r\n } else {\r\n throw new TypeError(\"Unsupported input type\");\r\n }\r\n\r\n if (typeof Buffer !== \"undefined\") {\r\n // Node.js environment\r\n return Promise.resolve(Buffer.from(uint8).toString(\"base64\"));\r\n } else {\r\n // Browser: use btoa\r\n let binary = \"\";\r\n for (let i = 0; i < uint8.length; i++) {\r\n binary += String.fromCharCode(uint8[i]);\r\n }\r\n return Promise.resolve(btoa(binary));\r\n }\r\n}\r\n\r\nexport function isValidImageUrl(url: any) {\r\n if (typeof url !== \"string\") {\r\n return false;\r\n }\r\n // http 或者 data:\r\n if (\r\n url.startsWith(\"http://\") ||\r\n url.startsWith(\"https://\") ||\r\n url.startsWith(\"data:\")\r\n ) {\r\n return true;\r\n }\r\n\r\n return false;\r\n}\r\n\r\nexport const deepClone: <T>(x: T) => T = globalThis.structuredClone\r\n ? globalThis.structuredClone\r\n : <T>(x: T): T => JSON.parse(JSON.stringify(x));\r\n\r\nexport class Base64 {\r\n static encode(input: string): string {\r\n if (typeof window !== \"undefined\" && typeof window.btoa === \"function\") {\r\n return window.btoa(unescape(encodeURIComponent(input)));\r\n } else if (typeof Buffer !== \"undefined\") {\r\n return Buffer.from(input, \"utf-8\").toString(\"base64\");\r\n } else {\r\n throw new Error(\"Base64.encode: Environment not supported.\");\r\n }\r\n }\r\n\r\n static decode(base64: string): string {\r\n if (typeof window !== \"undefined\" && typeof window.atob === \"function\") {\r\n return decodeURIComponent(escape(window.atob(base64)));\r\n } else if (typeof Buffer !== \"undefined\") {\r\n return Buffer.from(base64, \"base64\").toString(\"utf-8\");\r\n } else {\r\n throw new Error(\"Base64.decode: Environment not supported.\");\r\n }\r\n }\r\n}\r\n\r\nfunction isDeepEqual(a: any, b: any) {\r\n if (a === b) {\r\n return true;\r\n }\r\n if (\r\n a === null ||\r\n b === null ||\r\n typeof a !== \"object\" ||\r\n typeof b !== \"object\"\r\n ) {\r\n return false;\r\n }\r\n if (Array.isArray(a) && Array.isArray(b)) {\r\n if (a.length !== b.length) {\r\n return false;\r\n }\r\n for (let i = 0; i < a.length; i++) {\r\n if (!isDeepEqual(a[i], b[i])) {\r\n return false;\r\n }\r\n }\r\n return true;\r\n }\r\n const keys = Object.keys(a);\r\n if (keys.length !== Object.keys(b).length) {\r\n return false;\r\n }\r\n for (const key of keys) {\r\n if (!isDeepEqual(a[key], b[key])) {\r\n return false;\r\n }\r\n }\r\n return true;\r\n}\r\n\r\nfunction isObject(x: any): x is object {\r\n return typeof x === \"object\" && x !== null;\r\n}\r\nfunction isNoNone(x: any): x is any {\r\n return x !== undefined && x !== null;\r\n}\r\n/**\r\n * Recursively merge objects into a single object.\r\n *\r\n * If all values for a key are objects, merge them recursively.\r\n * If any value for a key is not an object, take the last non-null value.\r\n * @param {...any[]} objects objects to merge\r\n * @returns merged object\r\n */\r\nexport function mergeObjects(...objects: any[]): any {\r\n const keys = new Set(objects.flatMap(Object.keys));\r\n return Object.fromEntries(\r\n Array.from(keys).map((key) => {\r\n const vals = objects.map((x) => x[key]);\r\n if (vals.some(Array.isArray)) {\r\n // NOTE: 直接用最后一个非空,数组没有合并逻辑\r\n return [key, [...vals].reverse().filter(isNoNone).filter(Boolean)[0]];\r\n }\r\n if (vals.some(isObject)) {\r\n return [key, mergeObjects(...vals.filter(isObject))];\r\n }\r\n return [key, [...vals].reverse().filter(isNoNone)[0]];\r\n })\r\n );\r\n}\r\nexport function uniq<T>(arr: T[]): T[] {\r\n return Array.from(new Set(arr));\r\n}\r\n","import { SpecV3 } from \"./spec_types/spec_v3\";\r\nimport { deepClone, uniq } from \"./utils\";\r\n\r\n// lorebook\r\nexport class CharacterBook implements SpecV3.Lorebook {\r\n static from_json(data: any) {\r\n if (typeof data !== \"object\" || data === null) {\r\n throw new Error(\"data must be an object\");\r\n }\r\n let entries: any[] = [];\r\n if (Array.isArray(data)) {\r\n entries = data;\r\n } else {\r\n entries =\r\n // from lorabook.json\r\n Array.isArray(data?.entries)\r\n ? data.entries\r\n : // from character card\r\n Array.isArray(data?.data?.character_book?.entries)\r\n ? data.data.character_book.entries\r\n : [];\r\n }\r\n const book = new CharacterBook(entries);\r\n const character_book =\r\n data?.character_book ?? data?.data?.character_book ?? data;\r\n book.name = character_book?.name;\r\n book.description = character_book?.description;\r\n book.recursive_scanning = character_book?.recursive_scanning ?? true;\r\n book.scan_depth = character_book?.scan_depth ?? 10;\r\n book.extensions = character_book?.extensions ?? {};\r\n return book;\r\n }\r\n\r\n name: string = \"unknown\";\r\n description: string = \"\";\r\n // TODO\r\n token_budget?: number;\r\n recursive_scanning: boolean = true;\r\n extensions: SpecV3.Lorebook[\"extensions\"] = {};\r\n entries: SpecV3.Lorebook[\"entries\"] = [];\r\n scan_depth?: number | undefined = 10;\r\n\r\n constructor(entries: SpecV3.Lorebook[\"entries\"] = []) {\r\n this.entries = deepClone(entries);\r\n this._keys_fix();\r\n }\r\n\r\n public _keys_fix() {\r\n const pattern = /[,|;,;]/g;\r\n // 修复 keys ,有部分情况 keys 错误可能导致搜索出错\r\n for (const entry of this.entries) {\r\n const { keys } = entry;\r\n const fixed_keys: string[] = [];\r\n for (const k of keys) {\r\n if (pattern.test(k)) {\r\n fixed_keys.push(\r\n ...k\r\n .split(pattern)\r\n .map((x) => x.trim())\r\n .filter(Boolean)\r\n );\r\n } else {\r\n fixed_keys.push(k);\r\n }\r\n }\r\n entry.keys = fixed_keys;\r\n }\r\n }\r\n\r\n private _scan(\r\n context: string,\r\n matched: SpecV3.Lorebook[\"entries\"] = [],\r\n current_depth = 1\r\n ): SpecV3.Lorebook[\"entries\"] {\r\n if (current_depth >= (this.scan_depth ?? 10)) {\r\n return uniq(matched);\r\n }\r\n const current_context = [\r\n context,\r\n ...uniq(matched).map((x) => x.content),\r\n ].join(\"\\n\");\r\n const pending_entries = this.entries\r\n .filter((x) => x.content?.trim())\r\n .filter((x) => x.enabled && !matched.includes(x));\r\n if (pending_entries.length === 0) {\r\n return uniq(matched);\r\n }\r\n for (const entry of pending_entries) {\r\n const is_matched = entry.keys.some((k) => current_context.includes(k));\r\n if (is_matched) {\r\n matched.push(entry);\r\n }\r\n }\r\n if (this.recursive_scanning) {\r\n return this._scan(context, matched, current_depth + 1);\r\n }\r\n return uniq(matched);\r\n }\r\n public scan(context: string): SpecV3.Lorebook[\"entries\"] {\r\n const matched = this._scan(context);\r\n const constant = this.entries.filter(\r\n (x) => x.constant && x.enabled && x.content?.trim()\r\n );\r\n return uniq([...matched, ...constant]).sort(\r\n (a, b) => a.insertion_order - b.insertion_order\r\n );\r\n }\r\n}\r\n","import { JpegSegment } from \"./types\";\r\n\r\nexport namespace JPEG {\r\n export function parse_chunks(data: Uint8Array): JpegSegment[] {\r\n const segments: JpegSegment[] = [];\r\n\r\n let offset = 2;\r\n while (offset < data.length) {\r\n if (data[offset] !== 0xff)\r\n throw new Error(`Invalid marker at offset ${offset}`);\r\n\r\n let marker = data[offset + 1];\r\n while (marker === 0xff) {\r\n offset++;\r\n marker = data[offset + 1];\r\n }\r\n\r\n const markerOffset = offset;\r\n offset += 2;\r\n\r\n if (marker === 0xd9 || marker === 0xda) break;\r\n\r\n const length = (data[offset] << 8) | data[offset + 1];\r\n const payloadStart = offset + 2;\r\n const payloadEnd = payloadStart + length - 2;\r\n const segmentData = data.slice(payloadStart, payloadEnd);\r\n\r\n const info: JpegSegment = {\r\n marker: `FF ${marker.toString(16).toUpperCase().padStart(2, \"0\")}`,\r\n offset: markerOffset,\r\n length,\r\n type: \"Other\",\r\n preview: Array.from(segmentData.slice(0, 16))\r\n .map((b) => b.toString(16).padStart(2, \"0\"))\r\n .join(\" \"),\r\n };\r\n\r\n if (\r\n marker === 0xe0 &&\r\n String.fromCharCode(...segmentData.slice(0, 5)) === \"JFIF\\0\"\r\n ) {\r\n info.type = \"JFIF\";\r\n } else if (marker === 0xe1) {\r\n const id = String.fromCharCode(...segmentData.slice(0, 6));\r\n if (id.startsWith(\"Exif\")) info.type = \"EXIF\";\r\n else if (\r\n String.fromCharCode(...segmentData.slice(0, 29)).includes(\r\n \"http://ns.adobe.com/xap/1.0/\"\r\n )\r\n ) {\r\n info.type = \"XMP\";\r\n } else info.type = \"APP1\";\r\n } else if (marker === 0xfe) {\r\n info.type = \"Comment\";\r\n info.comment = new TextDecoder().decode(segmentData);\r\n }\r\n\r\n segments.push(info);\r\n offset = payloadEnd;\r\n }\r\n\r\n return segments;\r\n }\r\n\r\n /**\r\n * 核心:解析 TIFF 结构并提取 UserComment\r\n * 兼容 JPEG Exif 和 WebP Exif\r\n * @param buffer 完整文件的二进制数据\r\n * @param tiffStart TIFF 头 (II/MM) 在 buffer 中的绝对偏移量\r\n */\r\n function parseTiffUserComment(\r\n buffer: Uint8Array,\r\n tiffStart: number\r\n ): string | undefined {\r\n // 1. 边界检查\r\n if (tiffStart >= buffer.length) return undefined;\r\n\r\n // 2. 确定字节序 (Endianness)\r\n // II = 0x4949 (Little Endian), MM = 0x4d4d (Big Endian)\r\n const byteOrderMarker = (buffer[tiffStart] << 8) | buffer[tiffStart + 1];\r\n let isLittleEndian = false;\r\n\r\n if (byteOrderMarker === 0x4949) isLittleEndian = true;\r\n else if (byteOrderMarker === 0x4d4d) isLittleEndian = false;\r\n else return undefined; // 不是合法的 TIFF 结构\r\n\r\n // 定义读取器 (闭包捕获配置)\r\n const readU16 = (offset: number) => {\r\n const p = tiffStart + offset;\r\n if (p + 2 > buffer.length) return 0;\r\n return isLittleEndian\r\n ? buffer[p] | (buffer[p + 1] << 8)\r\n : (buffer[p] << 8) | buffer[p + 1];\r\n };\r\n\r\n const readU32 = (offset: number) => {\r\n const p = tiffStart + offset;\r\n if (p + 4 > buffer.length) return 0;\r\n // 使用 >>> 0 转换为无符号整数\r\n return isLittleEndian\r\n ? (buffer[p] |\r\n (buffer[p + 1] << 8) |\r\n (buffer[p + 2] << 16) |\r\n (buffer[p + 3] << 24)) >>>\r\n 0\r\n : ((buffer[p] << 24) |\r\n (buffer[p + 1] << 16) |\r\n (buffer[p + 2] << 8) |\r\n buffer[p + 3]) >>>\r\n 0;\r\n };\r\n\r\n // 3. 查找 IFD Tag 的通用函数\r\n const findTag = (ifdOffset: number, targetId: number) => {\r\n const count = readU16(ifdOffset);\r\n for (let i = 0; i < count; i++) {\r\n const entryOff = ifdOffset + 2 + i * 12;\r\n if (readU16(entryOff) === targetId) {\r\n return {\r\n type: readU16(entryOff + 2),\r\n count: readU32(entryOff + 4),\r\n // 注意:这里返回的是 Value 字段的偏移量\r\n valueOffsetField: entryOff + 8,\r\n };\r\n }\r\n }\r\n return null;\r\n };\r\n\r\n // 4. 执行查找路径:0th IFD -> Exif SubIFD (0x8769) -> UserComment (0x9286)\r\n\r\n // 获取 0th IFD 偏移量 (TIFF头后的4字节)\r\n const offset0th = readU32(4);\r\n if (offset0th === 0) return undefined;\r\n\r\n // 步骤 A: 找 ExifOffset (0x8769)\r\n let targetIfd = offset0th;\r\n const exifTag = findTag(offset0th, 0x8769);\r\n if (exifTag) {\r\n targetIfd = readU32(exifTag.valueOffsetField);\r\n }\r\n\r\n // 步骤 B: 找 UserComment (0x9286)\r\n const commentTag = findTag(targetIfd, 0x9286);\r\n if (!commentTag) return undefined;\r\n\r\n // 5. 提取数据\r\n const { count, valueOffsetField } = commentTag;\r\n\r\n // 获取数据指针\r\n // UserComment 长度通常远大于4字节,所以 Value Offset 字段存的是数据的指针\r\n let dataPtr = readU32(valueOffsetField);\r\n\r\n // 防御性处理:极少数情况数据 <= 4字节会直接存如果不存指针\r\n if (count <= 4) dataPtr = valueOffsetField; // (虽然 UserComment 基本不可能)\r\n\r\n const absStart = tiffStart + dataPtr;\r\n const rawData = buffer.slice(absStart, absStart + count);\r\n\r\n // 6. 解码 (处理 ASCII\\0\\0\\0 等前缀)\r\n const header = String.fromCharCode(...rawData.slice(0, 8));\r\n if (header.startsWith(\"ASCII\\0\\0\\0\")) {\r\n return new TextDecoder(\"utf-8\").decode(rawData.slice(8));\r\n } else if (header.startsWith(\"UNICODE\\0\")) {\r\n return new TextDecoder(\"utf-16\").decode(rawData.slice(8));\r\n } else {\r\n // 某些非标写法没有头,直接尝试解码\r\n return new TextDecoder(\"utf-8\").decode(rawData);\r\n }\r\n }\r\n\r\n /**\r\n * extract jpeg user comment\r\n */\r\n export function extract_user_comment(\r\n fullFileBuffer: Uint8Array,\r\n segment: any\r\n ): string | undefined {\r\n if (segment.type !== \"EXIF\") return undefined;\r\n\r\n // segment.offset 指向 FF (Marker Start)\r\n // +2 bytes = Length\r\n // +2 bytes = Length Value\r\n // = +4 bytes 到达 Payload (Exif Header)\r\n // +6 bytes (Exif\\0\\0) 到达 TIFF Header\r\n const tiffStart = segment.offset + 10;\r\n\r\n return parseTiffUserComment(fullFileBuffer, tiffStart);\r\n }\r\n}\r\n","import { PngChunk } from \"./types\";\r\n\r\nexport namespace PNG {\r\n export function parse_chunks(data: Uint8Array): PngChunk[] {\r\n const chunks: PngChunk[] = [];\r\n let offset = 8;\r\n\r\n while (offset < data.length) {\r\n if (offset + 8 > data.length) break;\r\n\r\n const length =\r\n ((data[offset] << 24) |\r\n (data[offset + 1] << 16) |\r\n (data[offset + 2] << 8) |\r\n data[offset + 3]) >>>\r\n 0;\r\n\r\n const type = String.fromCharCode(\r\n data[offset + 4],\r\n data[offset + 5],\r\n data[offset + 6],\r\n data[offset + 7]\r\n );\r\n\r\n const chunkStart = offset + 8;\r\n const chunkEnd = chunkStart + length;\r\n if (chunkEnd + 4 > data.length) break;\r\n\r\n const chunkData = data.slice(chunkStart, chunkEnd);\r\n const crc =\r\n ((data[chunkEnd] << 24) |\r\n (data[chunkEnd + 1] << 16) |\r\n (data[chunkEnd + 2] << 8) |\r\n data[chunkEnd + 3]) >>>\r\n 0;\r\n\r\n const chunk: PngChunk = { type, length, crc };\r\n\r\n if (type === \"IHDR\") {\r\n chunk.width =\r\n ((chunkData[0] << 24) |\r\n (chunkData[1] << 16) |\r\n (chunkData[2] << 8) |\r\n chunkData[3]) >>>\r\n 0;\r\n chunk.height =\r\n ((chunkData[4] << 24) |\r\n (chunkData[5] << 16) |\r\n (chunkData[6] << 8) |\r\n chunkData[7]) >>>\r\n 0;\r\n chunk.bitDepth = chunkData[8];\r\n chunk.colorType = chunkData[9];\r\n } else if (type === \"tEXt\") {\r\n const text = new TextDecoder().decode(chunkData);\r\n const sep = text.indexOf(\"\\0\");\r\n if (sep >= 0) {\r\n chunk.keyword = text.slice(0, sep);\r\n chunk.text = text.slice(sep + 1);\r\n } else {\r\n chunk.rawText = text;\r\n }\r\n }\r\n\r\n chunks.push(chunk);\r\n offset = chunkEnd + 4;\r\n }\r\n\r\n return chunks;\r\n }\r\n}\r\n","import { WebPChunk } from \"./types\";\r\n\r\nexport namespace WebP {\r\n export function extract_user_comment(\r\n fullFileBuffer: Uint8Array,\r\n chunk: { offset: number; length: number }\r\n ): string | undefined {\r\n // 1. 确定数据起始位置\r\n // 假设 chunk.offset 指向的是 \"EXIF\" 标签的起始位置\r\n // 结构: [TAG: 4bytes][SIZE: 4bytes][DATA...]\r\n let tiffStart = chunk.offset + 8;\r\n\r\n // 边界检查\r\n if (tiffStart >= fullFileBuffer.length) return undefined;\r\n\r\n // --- 检测并跳过 \"Exif\\0\\0\" 前缀 ---\r\n // 0x45('E'), 0x78('x'), 0x69('i'), 0x66('f')\r\n if (\r\n fullFileBuffer[tiffStart] === 0x45 &&\r\n fullFileBuffer[tiffStart + 1] === 0x78 &&\r\n fullFileBuffer[tiffStart + 2] === 0x69 &&\r\n fullFileBuffer[tiffStart + 3] === 0x66\r\n ) {\r\n // 这是一个脏数据(JPEG APP1 header),WebP 中应当跳过这 6 字节\r\n tiffStart += 6;\r\n }\r\n\r\n // --- 确定字节序 (Endianness) ---\r\n // MM (0x4D4D) = Big Endian\r\n // II (0x4949) = Little Endian\r\n const byteOrderMarker =\r\n (fullFileBuffer[tiffStart] << 8) | fullFileBuffer[tiffStart + 1];\r\n let isLittleEndian = false;\r\n\r\n if (byteOrderMarker === 0x4949) {\r\n isLittleEndian = true;\r\n } else if (byteOrderMarker === 0x4d4d) {\r\n isLittleEndian = false;\r\n } else {\r\n console.error(\"无法识别的 TIFF 头:\", byteOrderMarker.toString(16));\r\n return undefined;\r\n }\r\n\r\n // 定义读取器\r\n const readU16 = (offset: number) => {\r\n const p = tiffStart + offset;\r\n if (p + 2 > fullFileBuffer.length) return 0;\r\n if (isLittleEndian) {\r\n return fullFileBuffer[p] | (fullFileBuffer[p + 1] << 8);\r\n } else {\r\n return (fullFileBuffer[p] << 8) | fullFileBuffer[p + 1];\r\n }\r\n };\r\n\r\n const readU32 = (offset: number) => {\r\n const p = tiffStart + offset;\r\n if (p + 4 > fullFileBuffer.length) return 0;\r\n // 使用 >>> 0 确保是无符号整数\r\n if (isLittleEndian) {\r\n return (\r\n (fullFileBuffer[p] |\r\n (fullFileBuffer[p + 1] << 8) |\r\n (fullFileBuffer[p + 2] << 16) |\r\n (fullFileBuffer[p + 3] << 24)) >>>\r\n 0\r\n );\r\n } else {\r\n return (\r\n ((fullFileBuffer[p] << 24) |\r\n (fullFileBuffer[p + 1] << 16) |\r\n (fullFileBuffer[p + 2] << 8) |\r\n fullFileBuffer[p + 3]) >>>\r\n 0\r\n );\r\n }\r\n };\r\n\r\n // 2. 读取第一个 IFD 的偏移量 (Header 之后的 4 字节)\r\n const firstIFDOffset = readU32(4);\r\n if (firstIFDOffset === 0) return undefined;\r\n\r\n // --- 查找 Tag 的通用函数 ---\r\n const findTagInIFD = (ifdOffset: number, targetTagId: number) => {\r\n const numEntries = readU16(ifdOffset);\r\n\r\n for (let i = 0; i < numEntries; i++) {\r\n // 每个 Entry 12 字节\r\n const entryOffset = ifdOffset + 2 + i * 12;\r\n const tag = readU16(entryOffset);\r\n\r\n if (tag === targetTagId) {\r\n return {\r\n type: readU16(entryOffset + 2),\r\n count: readU32(entryOffset + 4),\r\n // 注意:valueOffset 是存储“数据指针”或者“直接数据”的位置\r\n valueOffsetOrData: entryOffset + 8,\r\n };\r\n }\r\n }\r\n return null;\r\n };\r\n\r\n // 3. 逻辑流程:0th IFD -> Exif SubIFD (0x8769) -> UserComment (0x9286)\r\n\r\n // A. 在 0th IFD 找 ExifOffset\r\n let targetIFDOffset = firstIFDOffset;\r\n const exifPtrEntry = findTagInIFD(firstIFDOffset, 0x8769); // 0x8769 = ExifOffset\r\n\r\n if (exifPtrEntry) {\r\n // 读取指针指向的位置\r\n targetIFDOffset = readU32(exifPtrEntry.valueOffsetOrData);\r\n }\r\n\r\n // B. 在目标 IFD (Exif IFD) 找 UserComment\r\n const userCommentEntry = findTagInIFD(targetIFDOffset, 0x9286); // 0x9286 = UserComment\r\n\r\n if (userCommentEntry) {\r\n const { count, valueOffsetOrData } = userCommentEntry;\r\n\r\n // 获取实际数据位置\r\n // 规则:如果 count * unit_size > 4,则 valueOffsetOrData 存的是指针\r\n // UserComment 类型是 7 (Undefined),单位 1 字节。角色卡通常很大,所以一定是指针。\r\n let rawDataOffset = readU32(valueOffsetOrData);\r\n\r\n // 防御性编程:万一数据极短(<=4字节),虽然不可能存下有效角色卡\r\n if (count <= 4) {\r\n // 数据直接存在这 4 个字节里\r\n // 但对于 UserComment 来说,这几乎不可能发生\r\n rawDataOffset = valueOffsetOrData;\r\n }\r\n\r\n const absDataStart = tiffStart + rawDataOffset;\r\n const rawData = fullFileBuffer.slice(absDataStart, absDataStart + count);\r\n\r\n // 4. 解码数据 (处理 ASCII\\0\\0\\0 头)\r\n const header = String.fromCharCode(...rawData.slice(0, 8));\r\n\r\n if (header.startsWith(\"ASCII\\0\\0\\0\")) {\r\n return new TextDecoder(\"utf-8\").decode(rawData.slice(8));\r\n } else if (header.startsWith(\"UNICODE\\0\")) {\r\n return new TextDecoder(\"utf-16\").decode(rawData.slice(8));\r\n } else {\r\n // 尝试直接解码(兼容没有 Header 的情况)\r\n return new TextDecoder(\"utf-8\").decode(rawData);\r\n }\r\n }\r\n\r\n return undefined;\r\n }\r\n export function parse_chunks(data: Uint8Array): WebPChunk[] {\r\n const chunks: WebPChunk[] = [];\r\n\r\n let offset = 12; // skip RIFF header (12 bytes)\r\n const len = data.length;\r\n\r\n while (offset + 8 <= len) {\r\n const type = String.fromCharCode(\r\n data[offset],\r\n data[offset + 1],\r\n data[offset + 2],\r\n data[offset + 3]\r\n );\r\n\r\n const chunkLength =\r\n data[offset + 4] |\r\n (data[offset + 5] << 8) |\r\n (data[offset + 6] << 16) |\r\n (data[offset + 7] << 24);\r\n\r\n const payloadStart = offset + 8;\r\n const payloadEnd = payloadStart + chunkLength;\r\n\r\n if (payloadEnd > len) break;\r\n\r\n chunks.push({\r\n type,\r\n offset,\r\n length: chunkLength,\r\n preview: Array.from(data.slice(payloadStart, payloadStart + 16))\r\n .map((b) => b.toString(16).padStart(2, \"0\"))\r\n .join(\" \"),\r\n });\r\n\r\n // chunks are padded to even sizes\r\n offset = payloadEnd + (chunkLength % 2);\r\n }\r\n\r\n return chunks;\r\n }\r\n}\r\n","import { JPEG } from \"./jpeg\";\r\nimport { PNG } from \"./png\";\r\nimport { ParsedMetadata } from \"./types\";\r\nimport { WebP } from \"./webp\";\r\n\r\nexport function parseImageMetadata(\r\n buffer: ArrayBuffer | Uint8Array\r\n): ParsedMetadata {\r\n const data = buffer instanceof Uint8Array ? buffer : new Uint8Array(buffer);\r\n\r\n const isPng = data\r\n .slice(0, 8)\r\n .every((b, i) => b === [137, 80, 78, 71, 13, 10, 26, 10][i]);\r\n const isJpeg = data[0] === 0xff && data[1] === 0xd8;\r\n const isWebP =\r\n String.fromCharCode(...data.slice(0, 4)) === \"RIFF\" &&\r\n String.fromCharCode(...data.slice(8, 12)) === \"WEBP\";\r\n\r\n if (isPng) {\r\n return {\r\n format: \"png\",\r\n chunks: PNG.parse_chunks(data),\r\n };\r\n }\r\n\r\n if (isJpeg) {\r\n return {\r\n format: \"jpeg\",\r\n segments: JPEG.parse_chunks(data),\r\n };\r\n }\r\n\r\n if (isWebP) {\r\n return {\r\n format: \"webp\",\r\n chunks: WebP.parse_chunks(data),\r\n };\r\n }\r\n\r\n throw new Error(\"Unsupported image format\");\r\n}\r\n","export namespace CharacterSpec {\r\n export interface Root {\r\n spec: string;\r\n spec_version: string;\r\n data: Data;\r\n name: string;\r\n description: string;\r\n personality: string;\r\n scenario: string;\r\n first_mes: string;\r\n mes_example: string;\r\n creatorcomment: string;\r\n avatar: string;\r\n talkativeness: string;\r\n fav: boolean;\r\n tags: any[];\r\n create_date: string;\r\n }\r\n\r\n export interface Data {\r\n name: string;\r\n description: string;\r\n personality: string;\r\n scenario: string;\r\n first_mes: string;\r\n mes_example: string;\r\n creator_notes: string;\r\n system_prompt: string;\r\n post_history_instructions: string;\r\n tags: string[];\r\n creator: string;\r\n character_version: string;\r\n alternate_greetings: string[];\r\n extensions: Extensions;\r\n group_only_greetings: any[];\r\n character_book: CharacterBook;\r\n }\r\n\r\n export interface Extensions {\r\n talkativeness: string;\r\n fav: boolean;\r\n world: string;\r\n depth_prompt: DepthPrompt;\r\n }\r\n\r\n export interface DepthPrompt {\r\n prompt: string;\r\n depth: number;\r\n role: string;\r\n }\r\n\r\n export interface CharacterBook {\r\n entries: Entry[];\r\n name: string;\r\n extensions: Record<string, any>;\r\n }\r\n\r\n export interface Entry {\r\n id: number;\r\n keys: string[];\r\n secondary_keys: any[];\r\n comment: string;\r\n content: string;\r\n constant: boolean;\r\n selective: boolean;\r\n insertion_order: number;\r\n enabled: boolean;\r\n position: \"before_char\" | \"after_char\";\r\n use_regex: boolean;\r\n extensions: Extensions2;\r\n }\r\n\r\n export interface Extensions2 {\r\n position: number;\r\n exclude_recursion: boolean;\r\n display_index: number;\r\n probability: number;\r\n useProbability: boolean;\r\n depth: number;\r\n selectiveLogic: number;\r\n group: string;\r\n group_override: boolean;\r\n group_weight: number;\r\n prevent_recursion: boolean;\r\n delay_until_recursion: boolean;\r\n scan_depth: any;\r\n match_whole_words: any;\r\n use_group_scoring: boolean;\r\n case_sensitive: any;\r\n automation_id: string;\r\n role: number;\r\n vectorized: boolean;\r\n sticky: number;\r\n cooldown: number;\r\n delay: number;\r\n match_persona_description: boolean;\r\n match_character_description: boolean;\r\n match_character_personality: boolean;\r\n match_character_depth_prompt: boolean;\r\n match_scenario: boolean;\r\n match_creator_notes: boolean;\r\n }\r\n}\r\n\r\nexport type CharRawData = {\r\n spec: string;\r\n spec_version: string;\r\n data: any;\r\n [key: string]: any;\r\n};\r\n\r\nexport class CharacterCardParserError extends Error {}\r\n","import { CharacterBook } from \"./CharacterBook\";\nimport { parseImageMetadata } from \"./metadata/parser\";\nimport { SpecV1 } from \"./spec_types/spec_v1\";\nimport { SpecV2 } from \"./spec_types/spec_v2\";\nimport { SpecV3 } from \"./spec_types/spec_v3\";\nimport { CharacterCardParserError, CharRawData } from \"./types\";\nimport { ParsedMetadata } from \"./metadata/types\";\nimport { CharacterSpec } from \"./types\";\nimport {\n Base64,\n deepClone,\n isValidImageUrl,\n mergeObjects,\n toBase64,\n} from \"./utils\";\nimport { WebP } from \"./metadata/webp\";\nimport { JPEG } from \"./metadata/jpeg\";\n\nexport class CharacterCard {\n static async from_file(file: ArrayBuffer | Uint8Array) {\n const exif_data = parseImageMetadata(file);\n const image_b64: string = await toBase64(file);\n const fallback_avatar = `data:image/${exif_data.format};base64,${image_b64}`;\n const raw_data = this.parse_char_info(file, exif_data);\n return new CharacterCard(\n {\n // default as v1\n spec: \"chara_card_v1\",\n spec_version: \"1.0\",\n data: {},\n ...raw_data,\n },\n fallback_avatar\n );\n }\n\n static from_json(raw_data: CharRawData, fallback_avatar = \"\") {\n return new CharacterCard(raw_data, fallback_avatar);\n }\n\n static parse_char_info(file: ArrayBuffer | Uint8Array, exif: ParsedMetadata) {\n let encoded_text: string | undefined;\n\n // NOTE: About CCV3 keyword checkout this\n // https://github.com/kwaroran/character-card-spec-v3/blob/f3a86af019fbd99f788f7a1155f399655b34ab35/SPEC_V3.md?plain=1#L22-L30\n if (exif.format === \"png\") {\n const v1or2_keys = [\"chara\", \"character_card\"];\n const encoded_ccv_1_2 = exif.chunks.find((x) =>\n v1or2_keys.some((y) => x.keyword?.toLowerCase() === y.toLowerCase())\n );\n const encoded_ccv_3 = exif.chunks.find((x) => x.keyword === \"ccv3\");\n\n encoded_text = encoded_ccv_3?.text ?? encoded_ccv_1_2?.text;\n } else if (exif.format === \"jpeg\") {\n const exif_chunk = exif.segments.find((x) => x.type === \"EXIF\");\n const exif_text = JPEG.extract_user_comment(\n file instanceof Uint8Array ? file : new Uint8Array(file),\n exif_chunk\n );\n encoded_text = exif_text;\n } else if (exif.format === \"webp\") {\n const exif_chunk = exif.chunks.find((x) => x.type === \"EXIF\");\n if (exif_chunk) {\n const exif_data = WebP.extract_user_comment(\n file instanceof Uint8Array ? file : new Uint8Array(file),\n exif_chunk\n );\n encoded_text = exif_data;\n }\n }\n\n if (!encoded_text) {\n throw new CharacterCardParserError(\n \"Failed to extract chara card data from image\"\n );\n }\n\n const json_str = Base64.decode(encoded_text);\n const json = JSON.parse(json_str);\n return json;\n }\n\n constructor(readonly raw_data: CharRawData, readonly fallback_avatar = \"\") {}\n\n async get_avatar(without_fallback = false): Promise<string> {\n const fallback = without_fallback ? \"\" : this.fallback_avatar;\n return [this.raw_data.avatar, this.raw_data.data?.avatar, fallback].filter(\n isValidImageUrl\n )[0];\n }\n\n get avatar(): CharacterSpec.Root[\"avatar\"] {\n return [\n this.raw_data.avatar,\n this.raw_data.data?.avatar,\n this.fallback_avatar,\n ].filter(isValidImageUrl)[0];\n }\n\n get spec(): CharacterSpec.Root[\"spec\"] {\n return this.raw_data.spec || \"unknown\";\n }\n\n get spec_version(): CharacterSpec.Root[\"spec_version\"] {\n return this.raw_data.spec_version || \"unknown\";\n }\n\n get name(): CharacterSpec.Data[\"name\"] {\n switch (this.spec) {\n case \"chara_card_v2\":\n return this.raw_data.data?.name ?? this.raw_data.name;\n case \"chara_card_v3\":\n return this.raw_data.data?.name ?? this.raw_data.name;\n default:\n return this.raw_data.char_name ?? this.raw_data.name ?? \"unknown\";\n }\n }\n\n get description(): CharacterSpec.Data[\"description\"] {\n switch (this.spec) {\n case \"chara_card_v2\":\n return this.raw_data.data?.description ?? this.raw_data.description;\n case \"chara_card_v3\":\n return this.raw_data.data?.description ?? this.raw_data.description;\n default:\n return this.raw_data.description ?? \"unknown\";\n }\n }\n\n get first_message(): CharacterSpec.Data[\"first_mes\"] {\n switch (this.spec) {\n case \"chara_card_v2\":\n return this.raw_data.data?.first_mes ?? this.raw_data.first_mes;\n case \"chara_card_v3\":\n return this.raw_data.data?.first_mes ?? this.raw_data.first_mes;\n default:\n return this.raw_data.first_mes ?? \"unknown\";\n }\n }\n\n get message_example(): CharacterSpec.Root[\"mes_example\"] {\n switch (this.spec) {\n case \"chara_card_v2\":\n return this.raw_data.data?.mes_example ?? this.raw_data.mes_example;\n case \"chara_card_v3\":\n return this.raw_data.data?.mes_example ?? this.raw_data.mes_example;\n default:\n return this.raw_data.mes_example ?? \"unknown\";\n }\n }\n\n get create_date(): CharacterSpec.Root[\"create_date\"] {\n switch (this.spec) {\n case \"chara_card_v2\":\n return this.raw_data.data?.create_date ?? this.raw_data.create_date;\n case \"chara_card_v3\":\n return this.raw_data.data?.create_date ?? this.raw_data.create_date;\n default:\n return this.raw_data.create_date ?? \"unknown\";\n }\n }\n\n get personality(): CharacterSpec.Data[\"personality\"] {\n switch (this.spec) {\n case \"chara_card_v2\":\n return this.raw_data.data?.personality ?? this.raw_data.personality;\n case \"chara_card_v3\":\n return this.raw_data.data?.personality ?? this.raw_data.personality;\n default:\n return this.raw_data.personality ?? \"unknown\";\n }\n }\n\n get scenario(): CharacterSpec.Data[\"scenario\"] {\n switch (this.spec) {\n case \"chara_card_v2\":\n return this.raw_data.data?.scenario ?? this.raw_data.scenario;\n case \"chara_card_v3\":\n return this.raw_data.data?.scenario ?? this.raw_data.scenario;\n default:\n return this.raw_data.scenario ?? \"unknown\";\n }\n }\n\n get alternate_greetings(): CharacterSpec.Data[\"alternate_greetings\"] {\n switch (this.spec) {\n case \"chara_card_v2\":\n return this.raw_data.data?.alternate_greetings;\n case \"chara_card_v3\":\n return this.raw_data.data?.alternate_greetings;\n default:\n return [];\n }\n }\n\n get character_book(): CharacterSpec.CharacterBook {\n switch (this.spec) {\n case \"chara_card_v2\":\n return this.raw_data.data?.character_book;\n case \"chara_card_v3\":\n return this.raw_data.data?.character_book;\n default:\n return {\n entries: [],\n name: this.name,\n extensions: {},\n };\n }\n }\n\n get tags(): CharacterSpec.Data[\"tags\"] {\n switch (this.spec) {\n case \"chara_card_v2\":\n return this.raw_data.data?.tags;\n case \"chara_card_v3\":\n return this.raw_data.data?.tags;\n default:\n return [];\n }\n }\n\n /**\n * Converts the current character card data to the SpecV1 format.\n *\n * This method constructs a SpecV1.TavernCard object by extracting the necessary\n * fields from the current instance's raw data using a getter function. The function\n * retrieves data from multiple sources, including instance properties, the raw data\n * object, and its nested data object. The resulting object contains fields defined\n * in the chara_card_v1 specification, such as name, description, personality, scenario,\n * first message, and example messages.\n *\n * @returns A SpecV1.TavernCard object representing the character card data in SpecV1 format.\n */\n\n public toSpecV1(): SpecV1.TavernCard {\n const getter = (key: string) =>\n (this as any)[key] ?? this.raw_data[key] ?? this.raw_data.data?.[key];\n return {\n name: getter(\"name\") ?? getter(\"char_name\"),\n description: getter(\"description\"),\n personality: getter(\"personality\"),\n scenario: getter(\"scenario\"),\n first_mes: getter(\"first_mes\"),\n mes_example: getter(\"mes_example\"),\n };\n }\n\n /**\n * Converts the current character card data to the SpecV2 format.\n *\n * This method constructs a SpecV2.TavernCardV2 object by extracting the necessary\n * fields from the current instance's raw data using a getter function. The function\n * retrieves data from multiple sources, including instance properties, the raw data\n * object, and its nested data object. The resulting object contains fields defined\n * in the chara_card_v2 specification, including additional fields introduced in\n * later updates.\n *\n * @returns A deep-cloned SpecV2.TavernCardV2 object representing the character card\n * data in SpecV2 format.\n */\n\n public toSpecV2(): SpecV2.TavernCardV2 {\n const getter = (key: string) =>\n (this as any)[key] ?? this.raw_data[key] ?? this.raw_data.data?.[key];\n return deepClone({\n spec: \"chara_card_v2\",\n spec_version: \"2.0\",\n data: {\n // fields from CCV2\n name: getter(\"name\") ?? getter(\"char_name\"),\n description: getter(\"description\"),\n mes_example: getter(\"mes_example\"),\n first_mes: getter(\"first_mes\"),\n personality: getter(\"personality\"),\n scenario: getter(\"scenario\"),\n // New fields start here\n creator_notes: getter(\"creator_notes\"),\n system_prompt: getter(\"system_prompt\"),\n post_history_instructions: getter(\"post_history_instructions\"),\n alternate_greetings: getter(\"alternate_greetings\"),\n character_book: getter(\"character_book\"),\n // May 8th additions\n tags: getter(\"tags\"),\n creator: getter(\"creator\"),\n character_version: getter(\"character_version\"),\n extensions: getter(\"extensions\"),\n },\n });\n }\n\n /**\n * Converts the current character card data to the SpecV3 format.\n *\n * This function utilizes a getter to retrieve properties from the\n * character card's raw data and returns a deep-cloned object\n * conforming to the SpecV3.CharacterCardV3 structure. It includes\n * fields from the CCV2 specification, changes specific to CCV3,\n * and new fields introduced in CCV3.\n *\n * @returns A deep-cloned object representing the character card data\n * in SpecV3 format.\n */\n\n public toSpecV3(): SpecV3.CharacterCardV3 {\n const getter = (key: string) =>\n (this as any)[key] ?? this.raw_data[key] ?? this.raw_data.data?.[key];\n return deepClone({\n spec: \"chara_card_v3\",\n spec_version: \"3.0\",\n data: {\n // fields from CCV2\n name: getter(\"name\") ?? getter(\"char_name\"),\n description: getter(\"description\"),\n tags: getter(\"tags\"),\n creator: getter(\"creator\"),\n character_version: getter(\"character_version\"),\n mes_example: getter(\"mes_example\"),\n extensions: getter(\"extensions\"),\n system_prompt: getter(\"system_prompt\"),\n post_history_instructions: getter(\"post_history_instructions\"),\n first_mes: getter(\"first_mes\"),\n alternate_greetings: getter(\"alternate_greetings\"),\n personality: getter(\"personality\"),\n scenario: getter(\"scenario\"),\n //Changes from CCV2\n creator_notes: getter(\"creator_notes\"),\n character_book: getter(\"character_book\"),\n //New fields in CCV3\n assets: getter(\"assets\"),\n nickname: getter(\"nickname\"),\n creator_notes_multilingual: getter(\"creator_notes_multilingual\"),\n source: getter(\"source\"),\n group_only_greetings: getter(\"group_only_greetings\"),\n creation_date: getter(\"create_date\") ?? getter(\"creation_date\"),\n modification_date: getter(\"modify_date\") ?? getter(\"modification_date\"),\n },\n });\n }\n\n /**\n * Returns the maximum compatible version of the character card\n *\n * this card => merge(v1,v2,v3);\n */\n public toMaxCompatibleSpec():\n | SpecV3.CharacterCardV3\n | SpecV2.TavernCardV2\n | SpecV1.TavernCard {\n return mergeObjects(this.toSpecV1(), this.toSpecV2(), this.toSpecV3());\n }\n\n /**\n * Creates a clone of the current CharacterCard instance in the specified version format.\n *\n * This method generates a new CharacterCard object with the data formatted to match\n * the specified version's specification. It supports conversion to SpecV1, SpecV2, and\n * SpecV3 formats by utilizing the respective `toSpecV1`, `toSpecV2`, and `toSpecV3` methods.\n *\n * @param version - The specification version (\"v1\", \"v2\", or \"v3\") to clone the character card into.\n * Defaults to \"v3\" if not specified.\n *\n * @returns A new CharacterCard instance formatted according to the specified version.\n *\n * @throws Will throw an error if the specified version is unsupported.\n */\n\n public clone(version = \"v3\" as \"v1\" | \"v2\" | \"v3\") {\n let new_raw_data: any = null;\n switch (version) {\n case \"v1\": {\n new_raw_data = this.toSpecV1();\n break;\n }\n case \"v2\": {\n new_raw_data = this.toSpecV2();\n break;\n }\n case \"v3\": {\n new_raw_data = this.toSpecV3();\n break;\n }\n default: {\n throw new Error(`Unsupported version ${version}`);\n }\n }\n return CharacterCard.from_json({\n spec: \"chara_card_v1\",\n spec_version: \"1.0\",\n data: {},\n ...new_raw_data,\n });\n }\n\n public get_book() {\n return CharacterBook.from_json(this.raw_data);\n }\n}\n"],"mappings":"AAAO,SAASA,EACdC,EACiB,CACjB,GAAI,OAAO,OAAW,KAAe,OAAO,SAASA,CAAK,EAExD,OAAO,QAAQ,QAAQA,EAAM,SAAS,QAAQ,CAAC,EAGjD,GAAI,OAAO,KAAS,KAAeA,aAAiB,KAElD,OAAO,IAAI,QAAQ,CAACC,EAASC,IAAW,CACtC,IAAMC,EAAS,IAAI,WACnBA,EAAO,OAAS,IAAM,CAEpB,IAAMC,EADSD,EAAO,OACA,MAAM,GAAG,EAAE,CAAC,EAClCF,EAAQG,CAAM,CAChB,EACAD,EAAO,QAAUD,EACjBC,EAAO,cAAcH,CAAK,CAC5B,CAAC,EAIH,IAAIK,EACJ,GAAIL,aAAiB,YACnBK,EAAQ,IAAI,WAAWL,CAAK,UACnBA,aAAiB,WAC1BK,EAAQL,MAER,OAAM,IAAI,UAAU,wBAAwB,EAG9C,GAAI,OAAO,OAAW,IAEpB,OAAO,QAAQ,QAAQ,OAAO,KAAKK,CAAK,EAAE,SAAS,QAAQ,CAAC,EACvD,CAEL,IAAIC,EAAS,GACb,QAASC,EAAI,EAAGA,EAAIF,EAAM,OAAQE,IAChCD,GAAU,OAAO,aAAaD,EAAME,CAAC,CAAC,EAExC,OAAO,QAAQ,QAAQ,KAAKD,CAAM,CAAC,CACrC,CACF,CAEO,SAASE,EAAgBC,EAAU,CACxC,OAAI,OAAOA,GAAQ,SACV,GAIP,GAAAA,EAAI,WAAW,SAAS,GACxBA,EAAI,WAAW,UAAU,GACzBA,EAAI,WAAW,OAAO,EAM1B,CAEO,IAAMC,EAA4B,WAAW,gBAChD,WAAW,gBACPC,GAAY,KAAK,MAAM,KAAK,UAAUA,CAAC,CAAC,EAEnCC,EAAN,KAAa,CAClB,OAAO,OAAOZ,EAAuB,CACnC,GAAI,OAAO,OAAW,KAAe,OAAO,OAAO,MAAS,WAC1D,OAAO,OAAO,KAAK,SAAS,mBAAmBA,CAAK,CAAC,CAAC,EACjD,GAAI,OAAO,OAAW,IAC3B,OAAO,OAAO,KAAKA,EAAO,OAAO,EAAE,SAAS,QAAQ,EAEpD,MAAM,IAAI,MAAM,2CAA2C,CAE/D,CAEA,OAAO,OAAOI,EAAwB,CACpC,GAAI,OAAO,OAAW,KAAe,OAAO,OAAO,MAAS,WAC1D,OAAO,mBAAmB,OAAO,OAAO,KAAKA,CAAM,CAAC,CAAC,EAChD,GAAI,OAAO,OAAW,IAC3B,OAAO,OAAO,KAAKA,EAAQ,QAAQ,EAAE,SAAS,OAAO,EAErD,MAAM,IAAI,MAAM,2CAA2C,CAE/D,CACF,EAqCA,SAASS,EAASC,EAAqB,CACrC,OAAO,OAAOA,GAAM,UAAYA,IAAM,IACxC,CACA,SAASC,EAASD,EAAkB,CAClC,OAA0BA,GAAM,IAClC,CASO,SAASE,KAAgBC,EAAqB,CACnD,IAAMC,EAAO,IAAI,IAAID,EAAQ,QAAQ,OAAO,IAAI,CAAC,EACjD,OAAO,OAAO,YACZ,MAAM,KAAKC,CAAI,EAAE,IAAKC,GAAQ,CAC5B,IAAMC,EAAOH,EAAQ,IAAKH,GAAMA,EAAEK,CAAG,CAAC,EACtC,OAAIC,EAAK,KAAK,MAAM,OAAO,EAElB,CAACD,EAAK,CAAC,GAAGC,CAAI,EAAE,QAAQ,EAAE,OAAOL,CAAQ,EAAE,OAAO,OAAO,EAAE,CAAC,CAAC,EAElEK,EAAK,KAAKP,CAAQ,EACb,CAACM,EAAKH,EAAa,GAAGI,EAAK,OAAOP,CAAQ,CAAC,CAAC,EAE9C,CAACM,EAAK,CAAC,GAAGC,CAAI,EAAE,QAAQ,EAAE,OAAOL,CAAQ,EAAE,CAAC,CAAC,CACtD,CAAC,CACH,CACF,CACO,SAASM,EAAQC,EAAe,CACrC,OAAO,MAAM,KAAK,IAAI,IAAIA,CAAG,CAAC,CAChC,CCtJO,IAAMC,EAAN,MAAMC,CAAyC,CACpD,OAAO,UAAUC,EAAW,CAC1B,GAAI,OAAOA,GAAS,UAAYA,IAAS,KACvC,MAAM,IAAI,MAAM,wBAAwB,EAE1C,IAAIC,EAAiB,CAAC,EAClB,MAAM,QAAQD,CAAI,EACpBC,EAAUD,EAEVC,EAEE,MAAM,QAAQD,GAAM,OAAO,EACvBA,EAAK,QAEP,MAAM,QAAQA,GAAM,MAAM,gBAAgB,OAAO,EAC/CA,EAAK,KAAK,eAAe,QACzB,CAAC,EAET,IAAME,EAAO,IAAIH,EAAcE,CAAO,EAChCE,EACJH,GAAM,gBAAkBA,GAAM,MAAM,gBAAkBA,EACxD,OAAAE,EAAK,KAAOC,GAAgB,KAC5BD,EAAK,YAAcC,GAAgB,YACnCD,EAAK,mBAAqBC,GAAgB,oBAAsB,GAChED,EAAK,WAAaC,GAAgB,YAAc,GAChDD,EAAK,WAAaC,GAAgB,YAAc,CAAC,EAC1CD,CACT,CAEA,KAAe,UACf,YAAsB,GAEtB,aACA,mBAA8B,GAC9B,WAA4C,CAAC,EAC7C,QAAsC,CAAC,EACvC,WAAkC,GAElC,YAAYD,EAAsC,CAAC,EAAG,CACpD,KAAK,QAAUG,EAAUH,CAAO,EAChC,KAAK,UAAU,CACjB,CAEO,WAAY,CACjB,IAAMI,EAAU,WAEhB,QAAWC,KAAS,KAAK,QAAS,CAChC,GAAM,CAAE,KAAAC,CAAK,EAAID,EACXE,EAAuB,CAAC,EAC9B,QAAWC,KAAKF,EACVF,EAAQ,KAAKI,CAAC,EAChBD,EAAW,KACT,GAAGC,EACA,MAAMJ,CAAO,EACb,IAAKK,GAAMA,EAAE,KAAK,CAAC,EACnB,OAAO,OAAO,CACnB,EAEAF,EAAW,KAAKC,CAAC,EAGrBH,EAAM,KAAOE,CACf,CACF,CAEQ,MACNG,EACAC,EAAsC,CAAC,EACvCC,EAAgB,EACY,CAC5B,GAAIA,IAAkB,KAAK,YAAc,IACvC,OAAOC,EAAKF,CAAO,EAErB,IAAMG,EAAkB,CACtBJ,EACA,GAAGG,EAAKF,CAAO,EAAE,IAAKF,GAAMA,EAAE,OAAO,CACvC,EAAE,KAAK;AAAA,CAAI,EACLM,EAAkB,KAAK,QAC1B,OAAQN,GAAMA,EAAE,SAAS,KAAK,CAAC,EAC/B,OAAQA,GAAMA,EAAE,SAAW,CAACE,EAAQ,SAASF,CAAC,CAAC,EAClD,GAAIM,EAAgB,SAAW,EAC7B,OAAOF,EAAKF,CAAO,EAErB,QAAWN,KAASU,EACCV,EAAM,KAAK,KAAMG,GAAMM,EAAgB,SAASN,CAAC,CAAC,GAEnEG,EAAQ,KAAKN,CAAK,EAGtB,OAAI,KAAK,mBACA,KAAK,MAAMK,EAASC,EAASC,EAAgB,CAAC,EAEhDC,EAAKF,CAAO,CACrB,CACO,KAAKD,EAA6C,CACvD,IAAMC,EAAU,KAAK,MAAMD,CAAO,EAC5BM,EAAW,KAAK,QAAQ,OAC3BP,GAAMA,EAAE,UAAYA,EAAE,SAAWA,EAAE,SAAS,KAAK,CACpD,EACA,OAAOI,EAAK,CAAC,GAAGF,EAAS,GAAGK,CAAQ,CAAC,EAAE,KACrC,CAACC,EAAGC,IAAMD,EAAE,gBAAkBC,EAAE,eAClC,CACF,CACF,ECzGO,IAAUC,MAAV,CACE,SAASC,EAAaC,EAAiC,CAC5D,IAAMC,EAA0B,CAAC,EAE7BC,EAAS,EACb,KAAOA,EAASF,EAAK,QAAQ,CAC3B,GAAIA,EAAKE,CAAM,IAAM,IACnB,MAAM,IAAI,MAAM,4BAA4BA,CAAM,EAAE,EAEtD,IAAIC,EAASH,EAAKE,EAAS,CAAC,EAC5B,KAAOC,IAAW,KAChBD,IACAC,EAASH,EAAKE,EAAS,CAAC,EAG1B,IAAME,EAAeF,EAGrB,GAFAA,GAAU,EAENC,IAAW,KAAQA,IAAW,IAAM,MAExC,IAAME,EAAUL,EAAKE,CAAM,GAAK,EAAKF,EAAKE,EAAS,CAAC,EAC9CI,EAAeJ,EAAS,EACxBK,EAAaD,EAAeD,EAAS,EACrCG,EAAcR,EAAK,MAAMM,EAAcC,CAAU,EAEjDE,EAAoB,CACxB,OAAQ,MAAMN,EAAO,SAAS,EAAE,EAAE,YAAY,EAAE,SAAS,EAAG,GAAG,CAAC,GAChE,OAAQC,EACR,OAAAC,EACA,KAAM,QACN,QAAS,MAAM,KAAKG,EAAY,MAAM,EAAG,EAAE,CAAC,EACzC,IAAKE,GAAMA,EAAE,SAAS,EAAE,EAAE,SAAS,EAAG,GAAG,CAAC,EAC1C,KAAK,GAAG,CACb,EAGEP,IAAW,KACX,OAAO,aAAa,GAAGK,EAAY,MAAM,EAAG,CAAC,CAAC,IAAM,SAEpDC,EAAK,KAAO,OACHN,IAAW,IACT,OAAO,aAAa,GAAGK,EAAY,MAAM,EAAG,CAAC,CAAC,EAClD,WAAW,MAAM,EAAGC,EAAK,KAAO,OAErC,OAAO,aAAa,GAAGD,EAAY,MAAM,EAAG,EAAE,CAAC,EAAE,SAC/C,8BACF,EAEAC,EAAK,KAAO,MACPA,EAAK,KAAO,OACVN,IAAW,MACpBM,EAAK,KAAO,UACZA,EAAK,QAAU,IAAI,YAAY,EAAE,OAAOD,CAAW,GAGrDP,EAAS,KAAKQ,CAAI,EAClBP,EAASK,CACX,CAEA,OAAON,CACT,CA3DOH,EAAS,aAAAC,EAmEhB,SAASY,EACPC,EACAC,EACoB,CAEpB,GAAIA,GAAaD,EAAO,OAAQ,OAIhC,IAAME,EAAmBF,EAAOC,CAAS,GAAK,EAAKD,EAAOC,EAAY,CAAC,EACnEE,EAAiB,GAErB,GAAID,IAAoB,MAAQC,EAAiB,WACxCD,IAAoB,MAAQC,EAAiB,OACjD,QAGL,IAAMC,EAAWd,GAAmB,CAClC,IAAMe,EAAIJ,EAAYX,EACtB,OAAIe,EAAI,EAAIL,EAAO,OAAe,EAC3BG,EACHH,EAAOK,CAAC,EAAKL,EAAOK,EAAI,CAAC,GAAK,EAC7BL,EAAOK,CAAC,GAAK,EAAKL,EAAOK,EAAI,CAAC,CACrC,EAEMC,EAAWhB,GAAmB,CAClC,IAAMe,EAAIJ,EAAYX,EACtB,OAAIe,EAAI,EAAIL,EAAO,OAAe,EAE3BG,GACFH,EAAOK,CAAC,EACNL,EAAOK,EAAI,CAAC,GAAK,EACjBL,EAAOK,EAAI,CAAC,GAAK,GACjBL,EAAOK,EAAI,CAAC,GAAK,MAClB,GACAL,EAAOK,CAAC,GAAK,GACZL,EAAOK,EAAI,CAAC,GAAK,GACjBL,EAAOK,EAAI,CAAC,GAAK,EAClBL,EAAOK,EAAI,CAAC,KACZ,CACR,EAGME,EAAU,CAACC,EAAmBC,IAAqB,CACvD,IAAMC,EAAQN,EAAQI,CAAS,EAC/B,QAASG,EAAI,EAAGA,EAAID,EAAOC,IAAK,CAC9B,IAAMC,EAAWJ,EAAY,EAAIG,EAAI,GACrC,GAAIP,EAAQQ,CAAQ,IAAMH,EACxB,MAAO,CACL,KAAML,EAAQQ,EAAW,CAAC,EAC1B,MAAON,EAAQM,EAAW,CAAC,EAE3B,iBAAkBA,EAAW,CAC/B,CAEJ,CACA,OAAO,IACT,EAKMC,EAAYP,EAAQ,CAAC,EAC3B,GAAIO,IAAc,EAAG,OAGrB,IAAIC,EAAYD,EACVE,EAAUR,EAAQM,EAAW,KAAM,EACrCE,IACFD,EAAYR,EAAQS,EAAQ,gBAAgB,GAI9C,IAAMC,EAAaT,EAAQO,EAAW,KAAM,EAC5C,GAAI,CAACE,EAAY,OAGjB,GAAM,CAAE,MAAAN,EAAO,iBAAAO,CAAiB,EAAID,EAIhCE,EAAUZ,EAAQW,CAAgB,EAGlCP,GAAS,IAAGQ,EAAUD,GAE1B,IAAME,EAAWlB,EAAYiB,EACvBE,EAAUpB,EAAO,MAAMmB,EAAUA,EAAWT,CAAK,EAGjDW,EAAS,OAAO,aAAa,GAAGD,EAAQ,MAAM,EAAG,CAAC,CAAC,EACzD,OAAIC,EAAO,WAAW,aAAa,EAC1B,IAAI,YAAY,OAAO,EAAE,OAAOD,EAAQ,MAAM,CAAC,CAAC,EAC9CC,EAAO,WAAW,WAAW,EAC/B,IAAI,YAAY,QAAQ,EAAE,OAAOD,EAAQ,MAAM,CAAC,CAAC,EAGjD,IAAI,YAAY,OAAO,EAAE,OAAOA,CAAO,CAElD,CAKO,SAASE,EACdC,EACAC,EACoB,CACpB,GAAIA,EAAQ,OAAS,OAAQ,OAO7B,IAAMvB,EAAYuB,EAAQ,OAAS,GAEnC,OAAOzB,EAAqBwB,EAAgBtB,CAAS,CACvD,CAdOf,EAAS,qBAAAoC,IA5KDpC,IAAA,ICAV,IAAUuC,MAAV,CACE,SAASC,EAAaC,EAA8B,CACzD,IAAMC,EAAqB,CAAC,EACxBC,EAAS,EAEb,KAAOA,EAASF,EAAK,QACf,EAAAE,EAAS,EAAIF,EAAK,SADK,CAG3B,IAAMG,GACFH,EAAKE,CAAM,GAAK,GACfF,EAAKE,EAAS,CAAC,GAAK,GACpBF,EAAKE,EAAS,CAAC,GAAK,EACrBF,EAAKE,EAAS,CAAC,KACjB,EAEIE,EAAO,OAAO,aAClBJ,EAAKE,EAAS,CAAC,EACfF,EAAKE,EAAS,CAAC,EACfF,EAAKE,EAAS,CAAC,EACfF,EAAKE,EAAS,CAAC,CACjB,EAEMG,EAAaH,EAAS,EACtBI,EAAWD,EAAaF,EAC9B,GAAIG,EAAW,EAAIN,EAAK,OAAQ,MAEhC,IAAMO,EAAYP,EAAK,MAAMK,EAAYC,CAAQ,EAC3CE,GACFR,EAAKM,CAAQ,GAAK,GACjBN,EAAKM,EAAW,CAAC,GAAK,GACtBN,EAAKM,EAAW,CAAC,GAAK,EACvBN,EAAKM,EAAW,CAAC,KACnB,EAEIG,EAAkB,CAAE,KAAAL,EAAM,OAAAD,EAAQ,IAAAK,CAAI,EAE5C,GAAIJ,IAAS,OACXK,EAAM,OACFF,EAAU,CAAC,GAAK,GACfA,EAAU,CAAC,GAAK,GAChBA,EAAU,CAAC,GAAK,EACjBA,EAAU,CAAC,KACb,EACFE,EAAM,QACFF,EAAU,CAAC,GAAK,GACfA,EAAU,CAAC,GAAK,GAChBA,EAAU,CAAC,GAAK,EACjBA,EAAU,CAAC,KACb,EACFE,EAAM,SAAWF,EAAU,CAAC,EAC5BE,EAAM,UAAYF,EAAU,CAAC,UACpBH,IAAS,OAAQ,CAC1B,IAAMM,EAAO,IAAI,YAAY,EAAE,OAAOH,CAAS,EACzCI,EAAMD,EAAK,QAAQ,IAAI,EACzBC,GAAO,GACTF,EAAM,QAAUC,EAAK,MAAM,EAAGC,CAAG,EACjCF,EAAM,KAAOC,EAAK,MAAMC,EAAM,CAAC,GAE/BF,EAAM,QAAUC,CAEpB,CAEAT,EAAO,KAAKQ,CAAK,EACjBP,EAASI,EAAW,CACtB,CAEA,OAAOL,CACT,CAlEOH,EAAS,aAAAC,IADDD,IAAA,ICAV,IAAUc,MAAV,CACE,SAASC,EACdC,EACAC,EACoB,CAIpB,IAAIC,EAAYD,EAAM,OAAS,EAG/B,GAAIC,GAAaF,EAAe,OAAQ,OAKtCA,EAAeE,CAAS,IAAM,IAC9BF,EAAeE,EAAY,CAAC,IAAM,KAClCF,EAAeE,EAAY,CAAC,IAAM,KAClCF,EAAeE,EAAY,CAAC,IAAM,MAGlCA,GAAa,GAMf,IAAMC,EACHH,EAAeE,CAAS,GAAK,EAAKF,EAAeE,EAAY,CAAC,EAC7DE,EAAiB,GAErB,GAAID,IAAoB,MACtBC,EAAiB,WACRD,IAAoB,MAC7BC,EAAiB,OACZ,CACL,QAAQ,MAAM,8CAAiBD,EAAgB,SAAS,EAAE,CAAC,EAC3D,MACF,CAGA,IAAME,EAAWC,GAAmB,CAClC,IAAMC,EAAIL,EAAYI,EACtB,OAAIC,EAAI,EAAIP,EAAe,OAAe,EACtCI,EACKJ,EAAeO,CAAC,EAAKP,EAAeO,EAAI,CAAC,GAAK,EAE7CP,EAAeO,CAAC,GAAK,EAAKP,EAAeO,EAAI,CAAC,CAE1D,EAEMC,EAAWF,GAAmB,CAClC,IAAMC,EAAIL,EAAYI,EACtB,OAAIC,EAAI,EAAIP,EAAe,OAAe,EAEtCI,GAECJ,EAAeO,CAAC,EACdP,EAAeO,EAAI,CAAC,GAAK,EACzBP,EAAeO,EAAI,CAAC,GAAK,GACzBP,EAAeO,EAAI,CAAC,GAAK,MAC5B,GAIEP,EAAeO,CAAC,GAAK,GACpBP,EAAeO,EAAI,CAAC,GAAK,GACzBP,EAAeO,EAAI,CAAC,GAAK,EAC1BP,EAAeO,EAAI,CAAC,KACtB,CAGN,EAGME,EAAiBD,EAAQ,CAAC,EAChC,GAAIC,IAAmB,EAAG,OAG1B,IAAMC,EAAe,CAACC,EAAmBC,IAAwB,CAC/D,IAAMC,EAAaR,EAAQM,CAAS,EAEpC,QAASG,EAAI,EAAGA,EAAID,EAAYC,IAAK,CAEnC,IAAMC,EAAcJ,EAAY,EAAIG,EAAI,GAGxC,GAFYT,EAAQU,CAAW,IAEnBH,EACV,MAAO,CACL,KA