vscode-chat-langchain-bridge
Version:
Create VS Code Chat participants (agents) with LangChain/LangGraph: tool-calling and streaming bridge for LanguageModelChat.
59 lines (53 loc) • 3.63 kB
TypeScript
import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';
import { BaseFunctionCallOptions, BaseLanguageModelInput } from '@langchain/core/language_models/base';
import { BaseChatModelParams, BaseChatModel } from '@langchain/core/language_models/chat_models';
import { ToolCall, AIMessageChunk, BaseMessage } from '@langchain/core/messages';
import { ChatResult, ChatGenerationChunk } from '@langchain/core/outputs';
import { RunnableConfig, Runnable } from '@langchain/core/runnables';
import { LanguageModelChatTool, LanguageModelTool, LanguageModelToolInvocationOptions, CancellationToken, ProviderResult, LanguageModelToolResult, LanguageModelChatRequestOptions, LanguageModelChat, ChatResponseStream, ChatContext } from 'vscode';
import { DynamicStructuredTool, StructuredTool, DynamicTool, DynamicStructuredToolInput } from '@langchain/core/tools';
import { z } from 'zod/v3';
declare class ChatVSCodeTool<T extends ZodObjectAny | Record<string, any> = ZodObjectAny> extends DynamicStructuredTool<T extends ZodObjectAny ? T : ZodObjectAny> implements LanguageModelChatTool, LanguageModelTool<z.infer<T extends ZodObjectAny ? T : ZodObjectAny>> {
inputSchema?: Record<string, unknown>;
constructor(fields: ChatVSCodeToolInput<T>);
static lc_name(): string;
invoke(input: string | ToolCall | {
[x: string]: any;
}, config?: RunnableConfig): Promise<any>;
invoke(options: LanguageModelToolInvocationOptions<z.infer<T extends ZodObjectAny ? T : ZodObjectAny>>, token: CancellationToken): ProviderResult<LanguageModelToolResult>;
}
interface ChatVscodeBaseInput {
model: LanguageModelChat;
token: CancellationToken;
}
interface ChatVSCodeCallOptions extends LanguageModelChatRequestOptions, BaseFunctionCallOptions {
}
interface ChatVSCodeFields extends BaseChatModelParams, ChatVscodeBaseInput {
responseStream: ChatResponseStream;
}
type ChatVSCodeToolType = StructuredTool | DynamicStructuredTool | DynamicTool | ChatVSCodeTool;
type ZodObjectAny = z.ZodObject<any, any, any, any>;
interface ChatVSCodeToolInput<T extends ZodObjectAny | Record<string, any> = ZodObjectAny> extends DynamicStructuredToolInput<T extends ZodObjectAny ? T : ZodObjectAny> {
}
declare class ChatVSCode extends BaseChatModel<ChatVSCodeCallOptions, AIMessageChunk> {
protected model: LanguageModelChat;
token: CancellationToken;
responseStream: ChatResponseStream;
constructor(fields: ChatVSCodeFields);
static lc_name(): string;
_llmType(): string;
_generate(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
_streamResponseChunks(messages: BaseMessage[], options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
bindTools(tools: ChatVSCodeToolType[], kwargs?: Partial<ChatVSCodeCallOptions> | undefined): Runnable<BaseLanguageModelInput, AIMessageChunk, ChatVSCodeCallOptions>;
}
/**
* Ensures every LangChain tool is wrapped in a ChatVSCodeTool instance that
* exposes the schema and invoke function expected by the VS Code chat bridge.
*/
declare function toVSCodeChatTool(tool: ChatVSCodeToolType): ChatVSCodeTool;
/**
* Utility function that converts a VS Code chat history into the LangChain
* message format.
*/
declare function convertVscodeHistory(chatContext: ChatContext): BaseMessage[];
export { ChatVSCode, type ChatVSCodeCallOptions, type ChatVSCodeFields, type ChatVSCodeToolInput, type ChatVSCodeToolType, type ChatVscodeBaseInput, type ZodObjectAny, convertVscodeHistory, toVSCodeChatTool };