UNPKG

@mariozechner/pi-agent

Version:

General-purpose agent with transport abstraction, state management, and attachment support

1 lines 13.6 kB
{"version":3,"file":"agent.d.ts","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAgB,OAAO,EAA8B,MAAM,qBAAqB,CAAC;AAE7F,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,uBAAuB,CAAC;AAC5D,OAAO,KAAK,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,aAAa,EAAE,MAAM,YAAY,CAAC;AAiDhG,MAAM,WAAW,YAAY;IAC5B,YAAY,CAAC,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC;IACnC,SAAS,EAAE,cAAc,CAAC;IAE1B,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,UAAU,EAAE,KAAK,OAAO,EAAE,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC,CAAC;IAEhF,SAAS,CAAC,EAAE,KAAK,GAAG,eAAe,CAAC;CACpC;AAED,qBAAa,KAAK;IACjB,OAAO,CAAC,MAAM,CAUZ;IACF,OAAO,CAAC,SAAS,CAAsC;IACvD,OAAO,CAAC,eAAe,CAAC,CAAkB;IAC1C,OAAO,CAAC,SAAS,CAAiB;IAClC,OAAO,CAAC,kBAAkB,CAA6D;IACvF,OAAO,CAAC,YAAY,CAAwC;IAC5D,OAAO,CAAC,SAAS,CAA0B;IAC3C,OAAO,CAAC,aAAa,CAAC,CAAgB;IACtC,OAAO,CAAC,oBAAoB,CAAC,CAAa;IAE1C,YAAY,IAAI,EAAE,YAAY,EAK7B;IAED,IAAI,KAAK,IAAI,UAAU,CAEtB;IAED,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC,EAAE,UAAU,KAAK,IAAI,GAAG,MAAM,IAAI,CAGjD;IAGD,eAAe,CAAC,CAAC,EAAE,MAAM,QAExB;IAED,QAAQ,CAAC,CAAC,EAAE,OAAO,IAAI,CAAC,MAAM,CAAC,KAAK,QAEnC;IAED,gBAAgB,CAAC,CAAC,EAAE,aAAa,QAEhC;IAED,YAAY,CAAC,IAAI,EAAE,KAAK,GAAG,eAAe,QAEzC;IAED,YAAY,IAAI,KAAK,GAAG,eAAe,CAEtC;IAED,QAAQ,CAAC,CAAC,EAAE,OAAO,IAAI,CAAC,MAAM,CAAC,KAAK,QAEnC;IAED,eAAe,CAAC,EAAE,EAAE,UAAU,EAAE,QAE/B;IAED,aAAa,CAAC,CAAC,EAAE,UAAU,QAE1B;IAEK,YAAY,CAAC,CAAC,EAAE,UAAU,iBAO/B;IAED,iBAAiB,SAEhB;IAED,aAAa,SAEZ;IAED,KAAK,SAEJ;IAED;;;OAGG;IACH,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC,CAE3B;IAED;;OAEG;IACH,KAAK,SAOJ;IAEK,MAAM,CAAC,KAAK,EAAE,MAAM,EAAE,WAAW,CAAC,EAAE,UAAU,EAAE,iBAuLrD;IAED,OAAO,CAAC,IAAI;CAKZ","sourcesContent":["import type { ImageContent, Message, QueuedMessage, TextContent } from \"@mariozechner/pi-ai\";\nimport { getModel } from \"@mariozechner/pi-ai\";\nimport type { AgentTransport } from \"./transports/types.js\";\nimport type { AgentEvent, AgentState, AppMessage, Attachment, ThinkingLevel } from \"./types.js\";\n\n/**\n * Default message transformer: Keep only LLM-compatible messages, strip app-specific fields.\n * Converts attachments to proper content blocks (images → ImageContent, documents → TextContent).\n */\nfunction defaultMessageTransformer(messages: AppMessage[]): Message[] {\n\treturn messages\n\t\t.filter((m) => {\n\t\t\t// Only keep standard LLM message roles\n\t\t\treturn m.role === \"user\" || m.role === \"assistant\" || m.role === \"toolResult\";\n\t\t})\n\t\t.map((m) => {\n\t\t\tif (m.role === \"user\") {\n\t\t\t\tconst { attachments, ...rest } = m as any;\n\n\t\t\t\t// If no attachments, return as-is\n\t\t\t\tif (!attachments || attachments.length === 0) {\n\t\t\t\t\treturn rest as Message;\n\t\t\t\t}\n\n\t\t\t\t// Convert attachments to content blocks\n\t\t\t\tconst content = Array.isArray(rest.content) ? [...rest.content] : [{ type: \"text\", text: rest.content }];\n\n\t\t\t\tfor (const attachment of attachments as Attachment[]) {\n\t\t\t\t\t// Add image blocks for image attachments\n\t\t\t\t\tif (attachment.type === \"image\") {\n\t\t\t\t\t\tcontent.push({\n\t\t\t\t\t\t\ttype: \"image\",\n\t\t\t\t\t\t\tdata: attachment.content,\n\t\t\t\t\t\t\tmimeType: attachment.mimeType,\n\t\t\t\t\t\t} as ImageContent);\n\t\t\t\t\t}\n\t\t\t\t\t// Add text blocks for documents with extracted text\n\t\t\t\t\telse if (attachment.type === \"document\" && attachment.extractedText) {\n\t\t\t\t\t\tcontent.push({\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: `\\n\\n[Document: ${attachment.fileName}]\\n${attachment.extractedText}`,\n\t\t\t\t\t\t\tisDocument: true,\n\t\t\t\t\t\t} as TextContent);\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\treturn { ...rest, content } as Message;\n\t\t\t}\n\t\t\treturn m as Message;\n\t\t});\n}\n\nexport interface AgentOptions {\n\tinitialState?: Partial<AgentState>;\n\ttransport: AgentTransport;\n\t// Transform app messages to LLM-compatible messages before sending to transport\n\tmessageTransformer?: (messages: AppMessage[]) => Message[] | Promise<Message[]>;\n\t// Queue mode: \"all\" = send all queued messages at once, \"one-at-a-time\" = send one queued message per turn\n\tqueueMode?: \"all\" | \"one-at-a-time\";\n}\n\nexport class Agent {\n\tprivate _state: AgentState = {\n\t\tsystemPrompt: \"\",\n\t\tmodel: getModel(\"google\", \"gemini-2.5-flash-lite-preview-06-17\"),\n\t\tthinkingLevel: \"off\",\n\t\ttools: [],\n\t\tmessages: [],\n\t\tisStreaming: false,\n\t\tstreamMessage: null,\n\t\tpendingToolCalls: new Set<string>(),\n\t\terror: undefined,\n\t};\n\tprivate listeners = new Set<(e: AgentEvent) => void>();\n\tprivate abortController?: AbortController;\n\tprivate transport: AgentTransport;\n\tprivate messageTransformer: (messages: AppMessage[]) => Message[] | Promise<Message[]>;\n\tprivate messageQueue: Array<QueuedMessage<AppMessage>> = [];\n\tprivate queueMode: \"all\" | \"one-at-a-time\";\n\tprivate runningPrompt?: Promise<void>;\n\tprivate resolveRunningPrompt?: () => void;\n\n\tconstructor(opts: AgentOptions) {\n\t\tthis._state = { ...this._state, ...opts.initialState };\n\t\tthis.transport = opts.transport;\n\t\tthis.messageTransformer = opts.messageTransformer || defaultMessageTransformer;\n\t\tthis.queueMode = opts.queueMode || \"one-at-a-time\";\n\t}\n\n\tget state(): AgentState {\n\t\treturn this._state;\n\t}\n\n\tsubscribe(fn: (e: AgentEvent) => void): () => void {\n\t\tthis.listeners.add(fn);\n\t\treturn () => this.listeners.delete(fn);\n\t}\n\n\t// State mutators - update internal state without emitting events\n\tsetSystemPrompt(v: string) {\n\t\tthis._state.systemPrompt = v;\n\t}\n\n\tsetModel(m: typeof this._state.model) {\n\t\tthis._state.model = m;\n\t}\n\n\tsetThinkingLevel(l: ThinkingLevel) {\n\t\tthis._state.thinkingLevel = l;\n\t}\n\n\tsetQueueMode(mode: \"all\" | \"one-at-a-time\") {\n\t\tthis.queueMode = mode;\n\t}\n\n\tgetQueueMode(): \"all\" | \"one-at-a-time\" {\n\t\treturn this.queueMode;\n\t}\n\n\tsetTools(t: typeof this._state.tools) {\n\t\tthis._state.tools = t;\n\t}\n\n\treplaceMessages(ms: AppMessage[]) {\n\t\tthis._state.messages = ms.slice();\n\t}\n\n\tappendMessage(m: AppMessage) {\n\t\tthis._state.messages = [...this._state.messages, m];\n\t}\n\n\tasync queueMessage(m: AppMessage) {\n\t\t// Transform message and queue it for injection at next turn\n\t\tconst transformed = await this.messageTransformer([m]);\n\t\tthis.messageQueue.push({\n\t\t\toriginal: m,\n\t\t\tllm: transformed[0], // undefined if filtered out\n\t\t});\n\t}\n\n\tclearMessageQueue() {\n\t\tthis.messageQueue = [];\n\t}\n\n\tclearMessages() {\n\t\tthis._state.messages = [];\n\t}\n\n\tabort() {\n\t\tthis.abortController?.abort();\n\t}\n\n\t/**\n\t * Returns a promise that resolves when the current prompt completes.\n\t * Returns immediately resolved promise if no prompt is running.\n\t */\n\twaitForIdle(): Promise<void> {\n\t\treturn this.runningPrompt ?? Promise.resolve();\n\t}\n\n\t/**\n\t * Clear all messages and state. Call abort() first if a prompt is in flight.\n\t */\n\treset() {\n\t\tthis._state.messages = [];\n\t\tthis._state.isStreaming = false;\n\t\tthis._state.streamMessage = null;\n\t\tthis._state.pendingToolCalls = new Set<string>();\n\t\tthis._state.error = undefined;\n\t\tthis.messageQueue = [];\n\t}\n\n\tasync prompt(input: string, attachments?: Attachment[]) {\n\t\tconst model = this._state.model;\n\t\tif (!model) {\n\t\t\tthrow new Error(\"No model configured\");\n\t\t}\n\n\t\t// Set up running prompt tracking\n\t\tthis.runningPrompt = new Promise<void>((resolve) => {\n\t\t\tthis.resolveRunningPrompt = resolve;\n\t\t});\n\n\t\t// Build user message with attachments\n\t\tconst content: Array<TextContent | ImageContent> = [{ type: \"text\", text: input }];\n\t\tif (attachments?.length) {\n\t\t\tfor (const a of attachments) {\n\t\t\t\tif (a.type === \"image\") {\n\t\t\t\t\tcontent.push({ type: \"image\", data: a.content, mimeType: a.mimeType });\n\t\t\t\t} else if (a.type === \"document\" && a.extractedText) {\n\t\t\t\t\tcontent.push({\n\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\ttext: `\\n\\n[Document: ${a.fileName}]\\n${a.extractedText}`,\n\t\t\t\t\t\tisDocument: true,\n\t\t\t\t\t} as TextContent);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tconst userMessage: AppMessage = {\n\t\t\trole: \"user\",\n\t\t\tcontent,\n\t\t\tattachments: attachments?.length ? attachments : undefined,\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\tthis.abortController = new AbortController();\n\t\tthis._state.isStreaming = true;\n\t\tthis._state.streamMessage = null;\n\t\tthis._state.error = undefined;\n\n\t\tconst reasoning =\n\t\t\tthis._state.thinkingLevel === \"off\"\n\t\t\t\t? undefined\n\t\t\t\t: this._state.thinkingLevel === \"minimal\"\n\t\t\t\t\t? \"low\"\n\t\t\t\t\t: this._state.thinkingLevel;\n\n\t\tconst cfg = {\n\t\t\tsystemPrompt: this._state.systemPrompt,\n\t\t\ttools: this._state.tools,\n\t\t\tmodel,\n\t\t\treasoning,\n\t\t\tgetQueuedMessages: async <T>() => {\n\t\t\t\t// Return queued messages based on queue mode\n\t\t\t\tif (this.queueMode === \"one-at-a-time\") {\n\t\t\t\t\t// Return only first message\n\t\t\t\t\tif (this.messageQueue.length > 0) {\n\t\t\t\t\t\tconst first = this.messageQueue[0];\n\t\t\t\t\t\tthis.messageQueue = this.messageQueue.slice(1);\n\t\t\t\t\t\treturn [first] as QueuedMessage<T>[];\n\t\t\t\t\t}\n\t\t\t\t\treturn [];\n\t\t\t\t} else {\n\t\t\t\t\t// Return all queued messages at once\n\t\t\t\t\tconst queued = this.messageQueue.slice();\n\t\t\t\t\tthis.messageQueue = [];\n\t\t\t\t\treturn queued as QueuedMessage<T>[];\n\t\t\t\t}\n\t\t\t},\n\t\t};\n\n\t\t// Track all messages generated in this prompt\n\t\tconst generatedMessages: AppMessage[] = [];\n\n\t\ttry {\n\t\t\tlet partial: Message | null = null;\n\n\t\t\t// Transform app messages to LLM-compatible messages (initial set)\n\t\t\tconst llmMessages = await this.messageTransformer(this._state.messages);\n\n\t\t\tfor await (const ev of this.transport.run(\n\t\t\t\tllmMessages,\n\t\t\t\tuserMessage as Message,\n\t\t\t\tcfg,\n\t\t\t\tthis.abortController.signal,\n\t\t\t)) {\n\t\t\t\t// Pass through all events directly\n\t\t\t\tthis.emit(ev as AgentEvent);\n\n\t\t\t\t// Update internal state as needed\n\t\t\t\tswitch (ev.type) {\n\t\t\t\t\tcase \"message_start\": {\n\t\t\t\t\t\t// Track streaming message\n\t\t\t\t\t\tpartial = ev.message;\n\t\t\t\t\t\tthis._state.streamMessage = ev.message;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t\tcase \"message_update\": {\n\t\t\t\t\t\t// Update streaming message\n\t\t\t\t\t\tpartial = ev.message;\n\t\t\t\t\t\tthis._state.streamMessage = ev.message;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t\tcase \"message_end\": {\n\t\t\t\t\t\t// Add completed message to state\n\t\t\t\t\t\tpartial = null;\n\t\t\t\t\t\tthis._state.streamMessage = null;\n\t\t\t\t\t\tthis.appendMessage(ev.message as AppMessage);\n\t\t\t\t\t\tgeneratedMessages.push(ev.message as AppMessage);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t\tcase \"tool_execution_start\": {\n\t\t\t\t\t\tconst s = new Set(this._state.pendingToolCalls);\n\t\t\t\t\t\ts.add(ev.toolCallId);\n\t\t\t\t\t\tthis._state.pendingToolCalls = s;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t\tcase \"tool_execution_end\": {\n\t\t\t\t\t\tconst s = new Set(this._state.pendingToolCalls);\n\t\t\t\t\t\ts.delete(ev.toolCallId);\n\t\t\t\t\t\tthis._state.pendingToolCalls = s;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t\tcase \"turn_end\": {\n\t\t\t\t\t\t// Capture error from turn_end event\n\t\t\t\t\t\tif (ev.message.role === \"assistant\" && ev.message.errorMessage) {\n\t\t\t\t\t\t\tthis._state.error = ev.message.errorMessage;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t\tcase \"agent_end\": {\n\t\t\t\t\t\tthis._state.streamMessage = null;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Handle any remaining partial message\n\t\t\tif (partial && partial.role === \"assistant\" && partial.content.length > 0) {\n\t\t\t\tconst onlyEmpty = !partial.content.some(\n\t\t\t\t\t(c) =>\n\t\t\t\t\t\t(c.type === \"thinking\" && c.thinking.trim().length > 0) ||\n\t\t\t\t\t\t(c.type === \"text\" && c.text.trim().length > 0) ||\n\t\t\t\t\t\t(c.type === \"toolCall\" && c.name.trim().length > 0),\n\t\t\t\t);\n\t\t\t\tif (!onlyEmpty) {\n\t\t\t\t\tthis.appendMessage(partial as AppMessage);\n\t\t\t\t\tgeneratedMessages.push(partial as AppMessage);\n\t\t\t\t} else {\n\t\t\t\t\tif (this.abortController?.signal.aborted) {\n\t\t\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (err: any) {\n\t\t\tconst msg: Message = {\n\t\t\t\trole: \"assistant\",\n\t\t\t\tcontent: [{ type: \"text\", text: \"\" }],\n\t\t\t\tapi: model.api,\n\t\t\t\tprovider: model.provider,\n\t\t\t\tmodel: model.id,\n\t\t\t\tusage: {\n\t\t\t\t\tinput: 0,\n\t\t\t\t\toutput: 0,\n\t\t\t\t\tcacheRead: 0,\n\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t\t},\n\t\t\t\tstopReason: this.abortController?.signal.aborted ? \"aborted\" : \"error\",\n\t\t\t\terrorMessage: err?.message || String(err),\n\t\t\t\ttimestamp: Date.now(),\n\t\t\t};\n\t\t\tthis.appendMessage(msg as AppMessage);\n\t\t\tgeneratedMessages.push(msg as AppMessage);\n\t\t\tthis._state.error = err?.message || String(err);\n\t\t} finally {\n\t\t\tthis._state.isStreaming = false;\n\t\t\tthis._state.streamMessage = null;\n\t\t\tthis._state.pendingToolCalls = new Set<string>();\n\t\t\tthis.abortController = undefined;\n\t\t\tthis.resolveRunningPrompt?.();\n\t\t\tthis.runningPrompt = undefined;\n\t\t\tthis.resolveRunningPrompt = undefined;\n\t\t}\n\t}\n\n\tprivate emit(e: AgentEvent) {\n\t\tfor (const listener of this.listeners) {\n\t\t\tlistener(e);\n\t\t}\n\t}\n}\n"]}