UNPKG

@autobe/agent

Version:

AI backend server code generator

690 lines (667 loc) • 1.21 MB
import { AgenticaTokenUsage, MicroAgentica } from "@agentica/core"; import { hash, HashSet, HashMap, Pair, Semaphore, Singleton, sleep_for, randint } from "tstl"; import { v4 } from "uuid"; import * as __typia_transform__validateReport from "typia/lib/internal/_validateReport.js"; import { StringUtil, transformOpenApiDocument, AutoBeEndpointComparator } from "@autobe/utils"; import "typia"; import * as __typia_transform__accessExpressionAsString from "typia/lib/internal/_accessExpressionAsString.js"; import { OpenApiTypeChecker, HttpMigration } from "@samchon/openapi"; import { OpenApiV3_1Emender } from "@samchon/openapi/lib/converters/OpenApiV3_1Emender"; import { NamingConvention } from "typia/lib/utils/NamingConvention"; import * as __typia_transform__isUniqueItems from "typia/lib/internal/_isUniqueItems.js"; async function getAutoBeGenerated(compiler, state, histories, tokenUsage, options) { const ret = {}; if (state.analyze === null) return {}; Object.assign(ret, Object.fromEntries(Object.entries(state.analyze.files).map((([key, value]) => [ `docs/analysis/${key.split("/").at(-1)}`, value ])))); if (state.prisma?.step === state.analyze.step) { const schemaFiles = (options?.dbms ?? "postgres") === "postgres" ? state.prisma.schemas : await compiler.prisma.write(state.prisma.result.data, options.dbms); Object.assign(ret, Object.fromEntries(Object.entries(schemaFiles).map((([key, value]) => [ `prisma/schema/${key.split("/").at(-1)}`, value ]))), { "autobe/prisma.json": JSON.stringify(state.prisma.result.data) }); if (state.prisma.compiled.type === "success") ret["docs/ERD.md"] = state.prisma.compiled.document; else if (state.prisma.compiled.type === "failure") ret["prisma/compile-error-reason.log"] = state.prisma.compiled.reason; } if (state.interface?.step === state.analyze.step) { const files = await compiler.interface.write(state.interface.document); Object.assign(ret, state.test?.step === state.interface.step ? Object.fromEntries(Object.entries(files).filter((([key]) => key.startsWith("test/features/") === false))) : files, { "autobe/document.json": JSON.stringify(state.interface.document) }); } if (state.test?.step === state.analyze.step) Object.assign(ret, Object.fromEntries(state.test.files.map((f => [ f.location, f.content ]))), await compiler.test.getTemplate()); if (state.realize?.step === state.analyze.step) { Object.assign(ret, { ...Object.fromEntries(state.realize.functions.map((f => [ f.location, f.content ]))), ...Object.fromEntries(state.realize.authorizations.map((auth => [ [ auth.decorator.location, auth.decorator.content ], [ auth.provider.location, auth.provider.content ], [ auth.payload.location, auth.payload.content ] ])).flat()), ...await compiler.realize.getTemplate(), ...await compiler.realize.controller({ document: state.interface.document, functions: state.realize.functions, authorizations: state.realize.authorizations }) }); } Object.assign(ret, { "autobe/histories.json": JSON.stringify(histories), "autobe/tokenUsage.json": JSON.stringify(tokenUsage) }); return ret; } function emplaceMap(dict, key, generator) { const oldbie = dict.get(key); if (oldbie !== undefined) { return oldbie; } const value = generator(); dict.set(key, value); return value; } class AutoBeAgentBase { constructor(asset) { this.asset = asset; this.listeners_ = new Map; } async getFiles(options) { return getAutoBeGenerated(await this.asset.compiler(), this.asset.state(), this.getHistories(), this.getTokenUsage(), options); } on(type, listener) { emplaceMap(this.listeners_, type, (() => new Set)).add(listener); return this; } off(type, listener) { const set = this.listeners_.get(type); if (set === undefined) return this; set.delete(listener); if (set.size === 0) this.listeners_.delete(type); return this; } async dispatch(event) { const set = this.listeners_.get(event.type); if (set === undefined) return; await Promise.all(Array.from(set).map((async listener => { try { await listener(event); } catch {} }))); } } class AutoBeTokenUsage { constructor(props) { if (props === undefined) { this.facade = new AgenticaTokenUsage; this.analyze = new AgenticaTokenUsage; this.prisma = new AgenticaTokenUsage; this.interface = new AgenticaTokenUsage; this.test = new AgenticaTokenUsage; this.realize = new AgenticaTokenUsage; return; } this.facade = new AgenticaTokenUsage(props.facade); this.analyze = new AgenticaTokenUsage(props.analyze); this.prisma = new AgenticaTokenUsage(props.prisma); this.interface = new AgenticaTokenUsage(props.interface); this.test = new AgenticaTokenUsage(props.test); this.realize = new AgenticaTokenUsage(props.realize); } record(usage, additionalStages = []) { this.facade.increment(usage); additionalStages.forEach((stage => { this[stage].increment(usage); })); } increment(usage) { AutoBeTokenUsage.keys().forEach((key => { this[key].increment(usage[key]); })); return this; } static plus(usageA, usageB) { return new AutoBeTokenUsage({ facade: AgenticaTokenUsage.plus(usageA.facade, usageB.facade), analyze: AgenticaTokenUsage.plus(usageA.analyze, usageB.analyze), prisma: AgenticaTokenUsage.plus(usageA.prisma, usageB.prisma), interface: AgenticaTokenUsage.plus(usageA.interface, usageB.interface), test: AgenticaTokenUsage.plus(usageA.test, usageB.test), realize: AgenticaTokenUsage.plus(usageA.realize, usageB.realize) }); } toJSON() { return { facade: this.facade.toJSON(), analyze: this.analyze.toJSON(), prisma: this.prisma.toJSON(), interface: this.interface.toJSON(), test: this.test.toJSON(), realize: this.realize.toJSON() }; } static keys() { return [ "facade", "analyze", "prisma", "interface", "test", "realize" ]; } } function createAgenticaHistory(props) { if (props.history.type === "userMessage") return { ...props.history, toJSON: () => props.history }; else if (props.history.type === "assistantMessage") return { ...props.history, toJSON: () => props.history }; const operation = props.operations.find((op => op.function.name === props.history.type)); if (operation === undefined) return null; const partial = { id: props.history.id, created_at: props.history.created_at, type: "execute", arguments: { reason: props.history.reason }, value: { success: props.history.type === "analyze" || props.history.type === "interface" ? true : props.history.compiled.type === "success" } }; return { ...partial, protocol: operation.protocol, operation, toJSON: () => ({ ...partial, protocol: operation.protocol, operation: operation.toJSON() }) }; } function assertSchemaModel(model) { if (model === "gemini") throw new Error([ "Error on AutoBeAgent.constructor(): gemini is not supported", "because it does not follow standard JSON schema specification.", "@autobe requires union type (`oneOf` or `anyOf`) for backend code generation,", "but gemini has banned them. Please wait until when `@agentica`", "supports prompt based function calling which can detour gemini's", "restriction of JSON schema specification." ].join(" ")); } function enforceToolCall(agent) { agent.on("request", (event => { if (event.body.tools) event.body.tool_choice = "required"; if (event.body.parallel_tool_calls !== undefined) delete event.body.parallel_tool_calls; })); return agent; } const orchestrateAnalyzeComposer = (ctx, setComposeInput) => { const controller = createController$2({ model: ctx.model, execute: new AutoBeAnalyzeComposerApplication, preExecute: setComposeInput }); const agent = new MicroAgentica({ model: ctx.model, vendor: ctx.vendor, controllers: [ controller ], config: { locale: ctx.config?.locale, executor: { describe: null } }, histories: [ ...ctx.histories().filter((h => h.type === "userMessage" || h.type === "assistantMessage")), { id: v4(), type: "systemMessage", text: "# Overview\n\n- You are the agent that determines the form of the entire document.\n- Because the tool you have has a function to determine all file names, use this function to determine the names of all files.\n- The first page of the file must be a page containing the table of contents, and from the second page, it must be a page corresponding to each table of contents.\n- Please clarify that the name of the table of contents page is the table of contents, such as `toc` or `table of content`.\n- Each document must begin with a number in turn, such as `00`, `01`, `02`, `03`.\n- Do not include database schema document.\n\n\n# 📄 Page Count System Prompt\n\nYou are responsible for determining the appropriate number of pages (documents) to generate.\n\n## Rules:\n\n1. **If the user explicitly requests a number of pages**, use that number *exactly*.\n2. **If the user does not specify a number**, determine a reasonable number of pages that satisfies the user's intent and scope.\n3. The final number of pages **must always match** the length of the `files` array.\n4. The total number of pages **must be greater than 1**.\n5. Always include a **Table of Contents** as one of the pages.\n6. ✅ Example:\n\n * If the user asks for **3 pages**, then the total should be **4 pages**, including the Table of Contents.\n\n## Summary:\n\n> Total pages = (user-specified page count OR inferred appropriate count) + 1 (Table of Contents)\n\nDo **not** forget to include the Table of Contents when calculating the total number of documents.", created_at: (new Date).toISOString() } ] }); enforceToolCall(agent); return agent; }; class AutoBeAnalyzeComposerApplication { compose(input) { return input; } } function createController$2(props) { assertSchemaModel(props.model); const application = collection$h[props.model]; return { protocol: "class", name: "Compose", application, execute: { compose: input => { props.preExecute(input); return props.execute.compose(input); } } }; } const claude$h = { model: "claude", options: { reference: true, separate: null }, functions: [ { name: "compose", parameters: { description: " Prefix, roles, and files\n\n------------------------------\n\nCurrent Type: {@link IComposeInput}", type: "object", properties: { reason: { description: "Reason for the analysis and composition of the project structure.", type: "string" }, prefix: { description: "Prefix for file names and all prisma schema files, table, interface, and\nvariable names.", type: "string" }, roles: { description: "Roles to be assigned for the project", type: "array", items: { $ref: "#/$defs/AutoBeAnalyzeRole" } }, page: { description: "If the user has requested a specific number of pages, enter that number.\nOtherwise, provide an appropriate number of documents needed to meet the\nuser's requirements. This number must always match the length of the files\nproperty, must be greater than 1, and must include the table of contents.\nFor example, if the user requests 3 pages, the total should be 4, including\nthe table of contents.", type: "number" }, files: { description: "# Document files to be generated\n\nFile name must be English and it must contain the numbering and prefix.\n\nThese files represent business documentation that may include:\n\n- Business requirements and functional specifications\n- User journey mapping and use case scenarios\n- Business rules and workflow definitions\n- Service architecture and system design overview\n- Data flow and integration requirements\n- User roles and permission matrix\n- API endpoint specifications and contracts\n- Business logic and validation rules\n\nGenerate files based on actual requirements gathered from conversation. Do\nnot create unnecessary documentation - only generate what is needed to\nproperly define the business requirements and system specifications.\n\n# Page Length Rules\n\nThe number of documents must match the user's request, excluding the table\nof contents. For example, if the user requests 3 pages, a total of 4\ndocuments should be generated, including the table of contents. If the user\ndoes not specify a number, generate a sufficient number of documents to\nadequately support the service.", type: "array", items: { $ref: "#/$defs/PickIFilereasonfilename" } } }, required: [ "reason", "prefix", "roles", "page", "files" ], additionalProperties: false, $defs: { AutoBeAnalyzeRole: { description: "Interface representing a user role definition in the requirements analysis\nphase.\n\nThis interface defines authenticated user roles that will be used throughout\nthe application's authentication and authorization system. Each role\nrepresents a distinct type of user who can register, authenticate, and\ninteract with the system based on their specific permissions and\ncapabilities.\n\nThe roles defined here serve as the foundation for generating:\n\n- Prisma schema models for user authentication tables\n- API endpoint access control decorators\n- Role-based authorization logic in the business layer\n- Test scenarios for different user permission levels", type: "object", properties: { name: { description: 'Unique identifier for the user role.\n\nThis name will be used as a reference throughout the generated codebase,\nincluding Prisma schema model names, authorization decorator parameters,\nand API documentation. Examples: "customer", "admin", "seller",\n"moderator".', type: "string" }, description: { description: "Human-readable description of the role's permissions and capabilities.\n\nThis description helps the AI agents understand the business context and\naccess requirements for each role, guiding the generation of appropriate\nauthorization rules and API endpoint restrictions.", type: "string" } }, required: [ "name", "description" ] }, PickIFilereasonfilename: { description: "From T, pick a set of properties whose keys are in the union K", type: "object", properties: { reason: { description: "Describe briefly why you made this document, and if you have any plans for\nthe next one.", type: "string" }, filename: { description: "Filename to generate or overwrite.", type: "string", pattern: "((.*)\\.md)$" } }, required: [ "reason", "filename" ] }, IComposeInput: { type: "object", properties: { reason: { description: "Reason for the analysis and composition of the project structure.", type: "string" }, prefix: { description: "Prefix for file names and all prisma schema files, table, interface, and\nvariable names.", type: "string" }, roles: { description: "Roles to be assigned for the project", type: "array", items: { $ref: "#/$defs/AutoBeAnalyzeRole" } }, page: { description: "If the user has requested a specific number of pages, enter that number.\nOtherwise, provide an appropriate number of documents needed to meet the\nuser's requirements. This number must always match the length of the files\nproperty, must be greater than 1, and must include the table of contents.\nFor example, if the user requests 3 pages, the total should be 4, including\nthe table of contents.", type: "number" }, files: { description: "# Document files to be generated\n\nFile name must be English and it must contain the numbering and prefix.\n\nThese files represent business documentation that may include:\n\n- Business requirements and functional specifications\n- User journey mapping and use case scenarios\n- Business rules and workflow definitions\n- Service architecture and system design overview\n- Data flow and integration requirements\n- User roles and permission matrix\n- API endpoint specifications and contracts\n- Business logic and validation rules\n\nGenerate files based on actual requirements gathered from conversation. Do\nnot create unnecessary documentation - only generate what is needed to\nproperly define the business requirements and system specifications.\n\n# Page Length Rules\n\nThe number of documents must match the user's request, excluding the table\nof contents. For example, if the user requests 3 pages, a total of 4\ndocuments should be generated, including the table of contents. If the user\ndoes not specify a number, generate a sufficient number of documents to\nadequately support the service.", type: "array", items: { $ref: "#/$defs/PickIFilereasonfilename" } } }, required: [ "reason", "prefix", "roles", "page", "files" ] } } }, output: { $ref: "#/$defs/IComposeInput" }, description: "Compose project structure with roles and files.\n\nDesign a list of roles and initial documents that you need to create for\nthat requirement. Roles define team member responsibilities, while files\ndefine the documentation structure. These are managed separately. If you\ndetermine from the conversation that the user's requirements have not been\nfully gathered, you must stop the analysis and continue collecting the\nremaining requirements. In this case, you do not need to generate any files\nor roles. Simply pass an empty array to `input.files` and `input.roles`.", validate: (() => { const _io0 = input => "string" === typeof input.reason && "string" === typeof input.prefix && (Array.isArray(input.roles) && input.roles.every((elem => "object" === typeof elem && null !== elem && _io1(elem)))) && "number" === typeof input.page && (Array.isArray(input.files) && input.files.every((elem => "object" === typeof elem && null !== elem && _io2(elem)))); const _io1 = input => "string" === typeof input.name && "string" === typeof input.description; const _io2 = input => "string" === typeof input.reason && ("string" === typeof input.filename && RegExp(/(.*)\.md$/).test(input.filename)); const _vo0 = (input, _path, _exceptionable = true) => [ "string" === typeof input.reason || _report(_exceptionable, { path: _path + ".reason", expected: "string", value: input.reason }), "string" === typeof input.prefix || _report(_exceptionable, { path: _path + ".prefix", expected: "string", value: input.prefix }), (Array.isArray(input.roles) || _report(_exceptionable, { path: _path + ".roles", expected: "Array<AutoBeAnalyzeRole>", value: input.roles })) && input.roles.map(((elem, _index3) => ("object" === typeof elem && null !== elem || _report(_exceptionable, { path: _path + ".roles[" + _index3 + "]", expected: "AutoBeAnalyzeRole", value: elem })) && _vo1(elem, _path + ".roles[" + _index3 + "]", _exceptionable) || _report(_exceptionable, { path: _path + ".roles[" + _index3 + "]", expected: "AutoBeAnalyzeRole", value: elem }))).every((flag => flag)) || _report(_exceptionable, { path: _path + ".roles", expected: "Array<AutoBeAnalyzeRole>", value: input.roles }), "number" === typeof input.page || _report(_exceptionable, { path: _path + ".page", expected: "number", value: input.page }), (Array.isArray(input.files) || _report(_exceptionable, { path: _path + ".files", expected: 'Array<Pick<IFile, "reason" | "filename">>', value: input.files })) && input.files.map(((elem, _index4) => ("object" === typeof elem && null !== elem || _report(_exceptionable, { path: _path + ".files[" + _index4 + "]", expected: 'Pick<IFile, "reason" | "filename">', value: elem })) && _vo2(elem, _path + ".files[" + _index4 + "]", _exceptionable) || _report(_exceptionable, { path: _path + ".files[" + _index4 + "]", expected: 'Pick<IFile, "reason" | "filename">', value: elem }))).every((flag => flag)) || _report(_exceptionable, { path: _path + ".files", expected: 'Array<Pick<IFile, "reason" | "filename">>', value: input.files }) ].every((flag => flag)); const _vo1 = (input, _path, _exceptionable = true) => [ "string" === typeof input.name || _report(_exceptionable, { path: _path + ".name", expected: "string", value: input.name }), "string" === typeof input.description || _report(_exceptionable, { path: _path + ".description", expected: "string", value: input.description }) ].every((flag => flag)); const _vo2 = (input, _path, _exceptionable = true) => [ "string" === typeof input.reason || _report(_exceptionable, { path: _path + ".reason", expected: "string", value: input.reason }), "string" === typeof input.filename && RegExp(/(.*)\.md$/).test(input.filename) || _report(_exceptionable, { path: _path + ".filename", expected: "`${string}.md`", value: input.filename }) ].every((flag => flag)); const __is = input => "object" === typeof input && null !== input && _io0(input); let errors; let _report; return input => { if (false === __is(input)) { errors = []; _report = __typia_transform__validateReport._validateReport(errors); ((input, _path, _exceptionable = true) => ("object" === typeof input && null !== input || _report(true, { path: _path + "", expected: "IComposeInput", value: input })) && _vo0(input, _path + "", true) || _report(true, { path: _path + "", expected: "IComposeInput", value: input }))(input, "$input", true); const success = 0 === errors.length; return success ? { success, data: input } : { success, errors, data: input }; } return { success: true, data: input }; }; })() } ] }; const collection$h = { chatgpt: { model: "chatgpt", options: { reference: true, strict: false, separate: null }, functions: [ { name: "compose", parameters: { description: " Prefix, roles, and files\n\n------------------------------\n\nCurrent Type: {@link IComposeInput}", type: "object", properties: { reason: { description: "Reason for the analysis and composition of the project structure.", type: "string" }, prefix: { description: "Prefix for file names and all prisma schema files, table, interface, and\nvariable names.", type: "string" }, roles: { description: "Roles to be assigned for the project", type: "array", items: { $ref: "#/$defs/AutoBeAnalyzeRole" } }, page: { description: "If the user has requested a specific number of pages, enter that number.\nOtherwise, provide an appropriate number of documents needed to meet the\nuser's requirements. This number must always match the length of the files\nproperty, must be greater than 1, and must include the table of contents.\nFor example, if the user requests 3 pages, the total should be 4, including\nthe table of contents.", type: "number" }, files: { description: "# Document files to be generated\n\nFile name must be English and it must contain the numbering and prefix.\n\nThese files represent business documentation that may include:\n\n- Business requirements and functional specifications\n- User journey mapping and use case scenarios\n- Business rules and workflow definitions\n- Service architecture and system design overview\n- Data flow and integration requirements\n- User roles and permission matrix\n- API endpoint specifications and contracts\n- Business logic and validation rules\n\nGenerate files based on actual requirements gathered from conversation. Do\nnot create unnecessary documentation - only generate what is needed to\nproperly define the business requirements and system specifications.\n\n# Page Length Rules\n\nThe number of documents must match the user's request, excluding the table\nof contents. For example, if the user requests 3 pages, a total of 4\ndocuments should be generated, including the table of contents. If the user\ndoes not specify a number, generate a sufficient number of documents to\nadequately support the service.", type: "array", items: { $ref: "#/$defs/PickIFilereasonfilename" } } }, required: [ "reason", "prefix", "roles", "page", "files" ], additionalProperties: false, $defs: { AutoBeAnalyzeRole: { description: "Interface representing a user role definition in the requirements analysis\nphase.\n\nThis interface defines authenticated user roles that will be used throughout\nthe application's authentication and authorization system. Each role\nrepresents a distinct type of user who can register, authenticate, and\ninteract with the system based on their specific permissions and\ncapabilities.\n\nThe roles defined here serve as the foundation for generating:\n\n- Prisma schema models for user authentication tables\n- API endpoint access control decorators\n- Role-based authorization logic in the business layer\n- Test scenarios for different user permission levels", type: "object", properties: { name: { description: 'Unique identifier for the user role.\n\nThis name will be used as a reference throughout the generated codebase,\nincluding Prisma schema model names, authorization decorator parameters,\nand API documentation. Examples: "customer", "admin", "seller",\n"moderator".', type: "string" }, description: { description: "Human-readable description of the role's permissions and capabilities.\n\nThis description helps the AI agents understand the business context and\naccess requirements for each role, guiding the generation of appropriate\nauthorization rules and API endpoint restrictions.", type: "string" } }, required: [ "name", "description" ] }, PickIFilereasonfilename: { description: "From T, pick a set of properties whose keys are in the union K", type: "object", properties: { reason: { description: "Describe briefly why you made this document, and if you have any plans for\nthe next one.", type: "string" }, filename: { description: "Filename to generate or overwrite.\n\n\n@pattern ((.*)\\.md)$", type: "string" } }, required: [ "reason", "filename" ] }, IComposeInput: { type: "object", properties: { reason: { description: "Reason for the analysis and composition of the project structure.", type: "string" }, prefix: { description: "Prefix for file names and all prisma schema files, table, interface, and\nvariable names.", type: "string" }, roles: { description: "Roles to be assigned for the project", type: "array", items: { $ref: "#/$defs/AutoBeAnalyzeRole" } }, page: { description: "If the user has requested a specific number of pages, enter that number.\nOtherwise, provide an appropriate number of documents needed to meet the\nuser's requirements. This number must always match the length of the files\nproperty, must be greater than 1, and must include the table of contents.\nFor example, if the user requests 3 pages, the total should be 4, including\nthe table of contents.", type: "number" }, files: { description: "# Document files to be generated\n\nFile name must be English and it must contain the numbering and prefix.\n\nThese files represent business documentation that may include:\n\n- Business requirements and functional specifications\n- User journey mapping and use case scenarios\n- Business rules and workflow definitions\n- Service architecture and system design overview\n- Data flow and integration requirements\n- User roles and permission matrix\n- API endpoint specifications and contracts\n- Business logic and validation rules\n\nGenerate files based on actual requirements gathered from conversation. Do\nnot create unnecessary documentation - only generate what is needed to\nproperly define the business requirements and system specifications.\n\n# Page Length Rules\n\nThe number of documents must match the user's request, excluding the table\nof contents. For example, if the user requests 3 pages, a total of 4\ndocuments should be generated, including the table of contents. If the user\ndoes not specify a number, generate a sufficient number of documents to\nadequately support the service.", type: "array", items: { $ref: "#/$defs/PickIFilereasonfilename" } } }, required: [ "reason", "prefix", "roles", "page", "files" ] } } }, output: { $ref: "#/$defs/IComposeInput" }, description: "Compose project structure with roles and files.\n\nDesign a list of roles and initial documents that you need to create for\nthat requirement. Roles define team member responsibilities, while files\ndefine the documentation structure. These are managed separately. If you\ndetermine from the conversation that the user's requirements have not been\nfully gathered, you must stop the analysis and continue collecting the\nremaining requirements. In this case, you do not need to generate any files\nor roles. Simply pass an empty array to `input.files` and `input.roles`.", validate: (() => { const _io0 = input => "string" === typeof input.reason && "string" === typeof input.prefix && (Array.isArray(input.roles) && input.roles.every((elem => "object" === typeof elem && null !== elem && _io1(elem)))) && "number" === typeof input.page && (Array.isArray(input.files) && input.files.every((elem => "object" === typeof elem && null !== elem && _io2(elem)))); const _io1 = input => "string" === typeof input.name && "string" === typeof input.description; const _io2 = input => "string" === typeof input.reason && ("string" === typeof input.filename && RegExp(/(.*)\.md$/).test(input.filename)); const _vo0 = (input, _path, _exceptionable = true) => [ "string" === typeof input.reason || _report(_exceptionable, { path: _path + ".reason", expected: "string", value: input.reason }), "string" === typeof input.prefix || _report(_exceptionable, { path: _path + ".prefix", expected: "string", value: input.prefix }), (Array.isArray(input.roles) || _report(_exceptionable, { path: _path + ".roles", expected: "Array<AutoBeAnalyzeRole>", value: input.roles })) && input.roles.map(((elem, _index3) => ("object" === typeof elem && null !== elem || _report(_exceptionable, { path: _path + ".roles[" + _index3 + "]", expected: "AutoBeAnalyzeRole", value: elem })) && _vo1(elem, _path + ".roles[" + _index3 + "]", _exceptionable) || _report(_exceptionable, { path: _path + ".roles[" + _index3 + "]", expected: "AutoBeAnalyzeRole", value: elem }))).every((flag => flag)) || _report(_exceptionable, { path: _path + ".roles", expected: "Array<AutoBeAnalyzeRole>", value: input.roles }), "number" === typeof input.page || _report(_exceptionable, { path: _path + ".page", expected: "number", value: input.page }), (Array.isArray(input.files) || _report(_exceptionable, { path: _path + ".files", expected: 'Array<Pick<IFile, "reason" | "filename">>', value: input.files })) && input.files.map(((elem, _index4) => ("object" === typeof elem && null !== elem || _report(_exceptionable, { path: _path + ".files[" + _index4 + "]", expected: 'Pick<IFile, "reason" | "filename">', value: elem })) && _vo2(elem, _path + ".files[" + _index4 + "]", _exceptionable) || _report(_exceptionable, { path: _path + ".files[" + _index4 + "]", expected: 'Pick<IFile, "reason" | "filename">', value: elem }))).every((flag => flag)) || _report(_exceptionable, { path: _path + ".files", expected: 'Array<Pick<IFile, "reason" | "filename">>', value: input.files }) ].every((flag => flag)); const _vo1 = (input, _path, _exceptionable = true) => [ "string" === typeof input.name || _report(_exceptionable, { path: _path + ".name", expected: "string", value: input.name }), "string" === typeof input.description || _report(_exceptionable, { path: _path + ".description", expected: "string", value: input.description }) ].every((flag => flag)); const _vo2 = (input, _path, _exceptionable = true) => [ "string" === typeof input.reason || _report(_exceptionable, { path: _path + ".reason", expected: "string", value: input.reason }), "string" === typeof input.filename && RegExp(/(.*)\.md$/).test(input.filename) || _report(_exceptionable, { path: _path + ".filename", expected: "`${string}.md`", value: input.filename }) ].every((flag => flag)); const __is = input => "object" === typeof input && null !== input && _io0(input); let errors; let _report; return input => { if (false === __is(input)) { errors = []; _report = __typia_transform__validateReport._validateReport(errors); ((input, _path, _exceptionable = true) => ("object" === typeof input && null !== input || _report(true, { path: _path + "", expected: "IComposeInput", value: input })) && _vo0(input, _path + "", true) || _report(true, { path: _path + "", expected: "IComposeInput", value: input }))(input, "$input", true); const success = 0 === errors.length; return success ? { success, data: input } : { success, errors, data: input }; } return { success: true, data: input }; }; })() } ] }, claude: claude$h, llama: claude$h, deepseek: claude$h, 3.1: claude$h }; const transformAnalyzeReviewerHistories = input => [ { id: v4(), created_at: (new Date).toISOString(), type: "assistantMessage", text: [ "Below are all of the files.", "```json", JSON.stringify(input.files), "```" ].join("\n") }, { id: v4(), created_at: (new Date).toISOString(), type: "systemMessage", text: "# Reviewer Agent Operating Guidelines\n\n## Core Principles\n- **Review only the document currently being viewed.** Ignore any references to other documents, even if implied.\n- If the current document is **not a table of contents page** (i.e., does not start with `00`) and references external documents, **instruct the planner to clear all content and rewrite the current document** to focus solely on its assigned scope.\n- **Do not request the creation of any other pages or documents.** Each agent must write and review **only the single page assigned to them.**\n- Attempts to write or request other pages are strictly prohibited. If such attempts occur, **command the agent to focus exclusively on the current page.**\n- The table of contents page (starting with `00`) is exempt from content rewriting rules unless it contains invalid references.\n- Other documents will be handled by other agents, so **do not request their creation** under any circumstances.\n\n## Role of the Reviewer\n- The Reviewer Agent’s role is to **ensure the document contains sufficient information** before it is delivered to developers.\n- **Review all hyperlinks** currently referenced in the markdown and ensure they are valid or appropriately handled:\n - Internal anchor links (e.g., `#section-title`) must point to existing headings within the document.\n - External document links are allowed only if they do not impact the core content of the current document (unless it’s a table of contents page).\n- **Do not create files** that are not specified in the table of contents.\n- If the user specifies an **exact number of pages**, that number **must be strictly followed.**\n- Reviewers are limited to reviewing **only their assigned single page** and must not engage with other pages or documents.\n- If an agent requests creation of other pages, **command them to stop** and enforce focus on the current page.\n\n## Conditions Requiring `reject`\nThe Reviewer Agent **must** call `reject` with a reason, feedback, and suggestions in the following cases:\n- Document length is **less than 2,000 characters** (excluding table of contents pages).\n- Any section listed in the table of contents is **missing or incomplete**.\n- Internal anchor links (e.g., `#section-title`) point to **non-existent headings** within the document.\n- External document references in a non-table-of-contents page impact the **core content** of the document.\n- Content is **insufficient relative to the number of headings** (e.g., average content per heading is less than 300 characters).\n- Any violation of the **page-based work division** rules (e.g., attempts to write or reference content outside the assigned page).\n\n## Conditions for `accept`\nThe Reviewer Agent **must** call `accept` only when **all** of the following conditions are met:\n- Document length is **between 2,000 and 6,000 characters** (excluding table of contents pages).\n- All sections listed in the table of contents are **fully written** with sufficient detail.\n- All internal anchor links point to **existing headings** within the document.\n- External document references (if any) do not impact the **core content** of the document, or the document is a table of contents page.\n- Content is **sufficient relative to the number of headings** (e.g., average content per heading is at least 300 characters).\n\n## Instructions for Revisions\n- If modifications are needed, **call `reject`** and provide:\n - A **clear reason** for rejection (e.g., “Document is 1,500 characters, below the 2,000-character minimum”).\n - **Detailed feedback** identifying the issue (e.g., “Section [Section Title] is missing”).\n - **Specific suggestions** for correction (e.g., “Add 500 characters to Section [Section Title] with details on [specific topic]”).\n- If the document is too short or lacks content:\n - Compare the number of headings to the text length.\n - Instruct the analyze agent to **expand content** within the current page (e.g., “With 5 headings and 1,500 characters, add 500 characters to Section [Section Title]”).\n- If an internal anchor link points to a non-existent heading:\n - Instruct the analyze agent to **create a new section** with the same title as the hyperlink and insert it under the appropriate heading.\n- If external document references are included in a non-table-of-contents page:\n - Instruct the analyze agent to **integrate the referenced content** into the current page or remove the reference if it’s not critical.\n- Requirements for revisions must follow the **EARS (Easy Approach to Requirements Syntax)** format.\n\n## Prohibited Actions\n- The Reviewer Agent **must not write content** under any circumstances.\n- Reviewers are **independent** and must not be instructed by other agents.\n- The Reviewer’s words are **commands**, not recommendations, and must be followed.\n\n## Guidelines for Document Volume\n- Documents (excluding table of contents) should be **2,000–6,000 characters** for sufficient utility.\n- If the document is too short:\n - Indicate the current character count and the additional characters needed (e.g., “Current length: 1,500 characters; add 500 characters”).\n - Compare the number of headings to the text length and instruct the analyze agent to expand content accordingly (e.g., “With 5 headings, aim for 400 characters per heading”).\n- The table of contents page is exempt from the volume limit.\n- When referencing the table of contents, **clearly state the section name**.\n\n## Guidelines for Hyperlinks\n- **Incomplete internal anchor links** (pointing to non-existent headings) trigger a `reject` call. Instruct the analyze agent to create the missing section.\n- **External document links** are allowed only if they do not impact the core content of the current document (unless it’s a table of contents page). If they do, trigger a `reject` call and instruct integration or removal.\n- If a hyperlink points to a heading within the same document, that heading **must exist**. If it does not, call `reject` and instruct the analyze agent to add the section.\n- External links in non-table-of-contents pages that are not critical to the content are allowed, assuming other agents will handle those documents.\n\n## Q&A Guidelines\n- If the analyze agent asks a question, the Reviewer Agent **must answer** on behalf of the user.\n- **Never ask questions.** Only issue commands.\n\n## Review Completion Conditions\n- Call `accept` only when:\n - All sections listed in the table of contents are **fully written**.\n - All internal hyperlinks are **resolved** (point to existing headings).\n - Document length is **2,000–6,000 characters** (excluding table of contents).\n - External references (if any) do not impact the core content, or the document is a table of contents page.\n- If any sections are incomplete or links unresolved:\n - Call `reject` and instruct the analyze agent to continue writing, specifying the **section title** and a **brief explanation** of the needed content (e.g., “Section [Section Title] lacks details on [topic]; add 300 characters”).\n\n## Iterative Review Workflow\n- If issues persist after revisions, **call `reject` again** with updated reasons, feedback, and suggestions.\n- Example: “Document is still 1,800 characters. Call `reject` and add 300 characters to Section [Section Title] with details on [specific topic].”\n- Continue this process until all conditions for `accept` are met.\n\n## Additional Requirements for Page-Based Work Division\n- Each agent must write and review **only their assigned single page** out of the total pages specified.\n- If an agent attempts to request or create content beyond their assigned page, **immediately command them to focus solely on the current page.**\n- All document length and content sufficiency checks must be confined to the assigned page.\n- If multiple pages exist, the **exact number of pages** must be adhered to, and no additional pages should be created.\n- Enforce strict page-level division to maintain clear boundaries of responsibility and simplify revi