@copilotkit/runtime
Version:
<div align="center"> <a href="https://copilotkit.ai" target="_blank"> <img src="https://github.com/copilotkit/copilotkit/raw/main/assets/banner.png" alt="CopilotKit Logo"> </a>
1,475 lines (1,461 loc) • 53.3 kB
JavaScript
import {
__name
} from "./chunk-FHD4JECV.mjs";
// src/service-adapters/langchain/langserve.ts
import { RemoteRunnable } from "langchain/runnables/remote";
var RemoteChain = class {
name;
description;
chainUrl;
parameters;
parameterType;
constructor(options) {
this.name = options.name;
this.description = options.description;
this.chainUrl = options.chainUrl;
this.parameters = options.parameters;
this.parameterType = options.parameterType || "multi";
}
async toAction() {
if (!this.parameters) {
await this.inferLangServeParameters();
}
return {
name: this.name,
description: this.description,
parameters: this.parameters,
handler: async (args) => {
const runnable = new RemoteRunnable({
url: this.chainUrl
});
let input;
if (this.parameterType === "single") {
input = args[Object.keys(args)[0]];
} else {
input = args;
}
return await runnable.invoke(input);
}
};
}
async inferLangServeParameters() {
const supportedTypes = [
"string",
"number",
"boolean"
];
let schemaUrl = this.chainUrl.replace(/\/+$/, "") + "/input_schema";
let schema = await fetch(schemaUrl).then((res) => res.json()).catch(() => {
throw new Error("Failed to fetch langserve schema at " + schemaUrl);
});
if (supportedTypes.includes(schema.type)) {
this.parameterType = "single";
this.parameters = [
{
name: "input",
type: schema.type,
description: "The input to the chain"
}
];
} else if (schema.type === "object") {
this.parameterType = "multi";
this.parameters = Object.keys(schema.properties).map((key) => {
var _a;
let property = schema.properties[key];
if (!supportedTypes.includes(property.type)) {
throw new Error("Unsupported schema type");
}
return {
name: key,
type: property.type,
description: property.description || "",
required: ((_a = schema.required) == null ? void 0 : _a.includes(key)) || false
};
});
} else {
throw new Error("Unsupported schema type");
}
}
};
__name(RemoteChain, "RemoteChain");
// src/service-adapters/openai/openai-adapter.ts
import OpenAI from "openai";
// src/service-adapters/openai/utils.ts
import { parseJson } from "@copilotkit/shared";
function limitMessagesToTokenCount(messages, tools, model, maxTokens) {
maxTokens || (maxTokens = maxTokensForOpenAIModel(model));
const result = [];
const toolsNumTokens = countToolsTokens(model, tools);
if (toolsNumTokens > maxTokens) {
throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);
}
maxTokens -= toolsNumTokens;
for (const message of messages) {
if ([
"system",
"developer"
].includes(message.role)) {
const numTokens = countMessageTokens(model, message);
maxTokens -= numTokens;
if (maxTokens < 0) {
throw new Error("Not enough tokens for system message.");
}
}
}
let cutoff = false;
const reversedMessages = [
...messages
].reverse();
for (const message of reversedMessages) {
if ([
"system",
"developer"
].includes(message.role)) {
result.unshift(message);
continue;
} else if (cutoff) {
continue;
}
let numTokens = countMessageTokens(model, message);
if (maxTokens < numTokens) {
cutoff = true;
continue;
}
result.unshift(message);
maxTokens -= numTokens;
}
return result;
}
__name(limitMessagesToTokenCount, "limitMessagesToTokenCount");
function maxTokensForOpenAIModel(model) {
return maxTokensByModel[model] || DEFAULT_MAX_TOKENS;
}
__name(maxTokensForOpenAIModel, "maxTokensForOpenAIModel");
var DEFAULT_MAX_TOKENS = 128e3;
var maxTokensByModel = {
// o1
o1: 2e5,
"o1-2024-12-17": 2e5,
"o1-mini": 128e3,
"o1-mini-2024-09-12": 128e3,
"o1-preview": 128e3,
"o1-preview-2024-09-12": 128e3,
// o3-mini
"o3-mini": 2e5,
"o3-mini-2025-01-31": 2e5,
// GPT-4
"gpt-4o": 128e3,
"chatgpt-4o-latest": 128e3,
"gpt-4o-2024-08-06": 128e3,
"gpt-4o-2024-05-13": 128e3,
"gpt-4o-mini": 128e3,
"gpt-4o-mini-2024-07-18": 128e3,
"gpt-4-turbo": 128e3,
"gpt-4-turbo-2024-04-09": 128e3,
"gpt-4-0125-preview": 128e3,
"gpt-4-turbo-preview": 128e3,
"gpt-4-1106-preview": 128e3,
"gpt-4-vision-preview": 128e3,
"gpt-4-1106-vision-preview": 128e3,
"gpt-4-32k": 32768,
"gpt-4-32k-0613": 32768,
"gpt-4-32k-0314": 32768,
"gpt-4": 8192,
"gpt-4-0613": 8192,
"gpt-4-0314": 8192,
// GPT-3.5
"gpt-3.5-turbo-0125": 16385,
"gpt-3.5-turbo": 16385,
"gpt-3.5-turbo-1106": 16385,
"gpt-3.5-turbo-instruct": 4096,
"gpt-3.5-turbo-16k": 16385,
"gpt-3.5-turbo-0613": 4096,
"gpt-3.5-turbo-16k-0613": 16385,
"gpt-3.5-turbo-0301": 4097
};
function countToolsTokens(model, tools) {
if (tools.length === 0) {
return 0;
}
const json = JSON.stringify(tools);
return countTokens(model, json);
}
__name(countToolsTokens, "countToolsTokens");
function countMessageTokens(model, message) {
return countTokens(model, message.content || "");
}
__name(countMessageTokens, "countMessageTokens");
function countTokens(model, text) {
return text.length / 3;
}
__name(countTokens, "countTokens");
function convertActionInputToOpenAITool(action) {
return {
type: "function",
function: {
name: action.name,
description: action.description,
parameters: parseJson(action.jsonSchema, {})
}
};
}
__name(convertActionInputToOpenAITool, "convertActionInputToOpenAITool");
function convertMessageToOpenAIMessage(message, options) {
const { keepSystemRole } = options || {
keepSystemRole: false
};
if (message.isTextMessage()) {
let role = message.role;
if (message.role === "system" && !keepSystemRole) {
role = "developer";
}
return {
role,
content: message.content
};
} else if (message.isImageMessage()) {
return {
role: "user",
content: [
{
type: "image_url",
image_url: {
url: `data:image/${message.format};base64,${message.bytes}`
}
}
]
};
} else if (message.isActionExecutionMessage()) {
return {
role: "assistant",
tool_calls: [
{
id: message.id,
type: "function",
function: {
name: message.name,
arguments: JSON.stringify(message.arguments)
}
}
]
};
} else if (message.isResultMessage()) {
return {
role: "tool",
content: message.result,
tool_call_id: message.actionExecutionId
};
}
}
__name(convertMessageToOpenAIMessage, "convertMessageToOpenAIMessage");
function convertSystemMessageToAssistantAPI(message) {
return {
...message,
...[
"system",
"developer"
].includes(message.role) && {
role: "assistant",
content: "THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: " + message.content
}
};
}
__name(convertSystemMessageToAssistantAPI, "convertSystemMessageToAssistantAPI");
// src/service-adapters/openai/openai-adapter.ts
import { randomUUID } from "@copilotkit/shared";
var DEFAULT_MODEL = "gpt-4o";
var OpenAIAdapter = class {
model = DEFAULT_MODEL;
disableParallelToolCalls = false;
_openai;
keepSystemRole = false;
get openai() {
return this._openai;
}
constructor(params) {
this._openai = (params == null ? void 0 : params.openai) || new OpenAI({});
if (params == null ? void 0 : params.model) {
this.model = params.model;
}
this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
this.keepSystemRole = (params == null ? void 0 : params.keepSystemRole) ?? false;
}
async process(request) {
const { threadId: threadIdFromRequest, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
const tools = actions.map(convertActionInputToOpenAITool);
const threadId = threadIdFromRequest ?? randomUUID();
const validToolUseIds = /* @__PURE__ */ new Set();
for (const message of messages) {
if (message.isActionExecutionMessage()) {
validToolUseIds.add(message.id);
}
}
const filteredMessages = messages.filter((message) => {
if (message.isResultMessage()) {
if (!validToolUseIds.has(message.actionExecutionId)) {
return false;
}
validToolUseIds.delete(message.actionExecutionId);
return true;
}
return true;
});
let openaiMessages = filteredMessages.map((m) => convertMessageToOpenAIMessage(m, {
keepSystemRole: this.keepSystemRole
}));
openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
toolChoice = {
type: "function",
function: {
name: forwardedParameters.toolChoiceFunctionName
}
};
}
try {
const stream = this.openai.beta.chat.completions.stream({
model,
stream: true,
messages: openaiMessages,
...tools.length > 0 && {
tools
},
...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
max_tokens: forwardedParameters.maxTokens
},
...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
stop: forwardedParameters.stop
},
...toolChoice && {
tool_choice: toolChoice
},
...this.disableParallelToolCalls && {
parallel_tool_calls: false
},
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
temperature: forwardedParameters.temperature
}
});
eventSource.stream(async (eventStream$) => {
var _a, _b;
let mode = null;
let currentMessageId;
let currentToolCallId;
try {
for await (const chunk of stream) {
if (chunk.choices.length === 0) {
continue;
}
const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
const content = chunk.choices[0].delta.content;
if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
mode = null;
eventStream$.sendTextMessageEnd({
messageId: currentMessageId
});
} else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
mode = null;
eventStream$.sendActionExecutionEnd({
actionExecutionId: currentToolCallId
});
}
if (mode === null) {
if (toolCall == null ? void 0 : toolCall.id) {
mode = "function";
currentToolCallId = toolCall.id;
eventStream$.sendActionExecutionStart({
actionExecutionId: currentToolCallId,
parentMessageId: chunk.id,
actionName: toolCall.function.name
});
} else if (content) {
mode = "message";
currentMessageId = chunk.id;
eventStream$.sendTextMessageStart({
messageId: currentMessageId
});
}
}
if (mode === "message" && content) {
eventStream$.sendTextMessageContent({
messageId: currentMessageId,
content
});
} else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
eventStream$.sendActionExecutionArgs({
actionExecutionId: currentToolCallId,
args: toolCall.function.arguments
});
}
}
if (mode === "message") {
eventStream$.sendTextMessageEnd({
messageId: currentMessageId
});
} else if (mode === "function") {
eventStream$.sendActionExecutionEnd({
actionExecutionId: currentToolCallId
});
}
} catch (error) {
console.error("[OpenAI] Error processing stream:", error);
throw error;
}
eventStream$.complete();
});
} catch (error) {
console.error("[OpenAI] Error during API call:", error);
throw error;
}
return {
threadId
};
}
};
__name(OpenAIAdapter, "OpenAIAdapter");
// src/service-adapters/langchain/utils.ts
import { AIMessage, HumanMessage, SystemMessage, ToolMessage } from "@langchain/core/messages";
import { DynamicStructuredTool } from "@langchain/core/tools";
import { randomId, convertJsonSchemaToZodSchema } from "@copilotkit/shared";
function convertMessageToLangChainMessage(message) {
if (message.isTextMessage()) {
if (message.role == "user") {
return new HumanMessage(message.content);
} else if (message.role == "assistant") {
return new AIMessage(message.content);
} else if (message.role === "system") {
return new SystemMessage(message.content);
}
} else if (message.isActionExecutionMessage()) {
return new AIMessage({
content: "",
tool_calls: [
{
id: message.id,
args: message.arguments,
name: message.name
}
]
});
} else if (message.isResultMessage()) {
return new ToolMessage({
content: message.result,
tool_call_id: message.actionExecutionId
});
}
}
__name(convertMessageToLangChainMessage, "convertMessageToLangChainMessage");
function convertActionInputToLangChainTool(actionInput) {
return new DynamicStructuredTool({
name: actionInput.name,
description: actionInput.description,
schema: convertJsonSchemaToZodSchema(JSON.parse(actionInput.jsonSchema), true),
func: async () => {
return "";
}
});
}
__name(convertActionInputToLangChainTool, "convertActionInputToLangChainTool");
function isAIMessage(message) {
return Object.prototype.toString.call(message) === "[object AIMessage]";
}
__name(isAIMessage, "isAIMessage");
function isAIMessageChunk(message) {
return Object.prototype.toString.call(message) === "[object AIMessageChunk]";
}
__name(isAIMessageChunk, "isAIMessageChunk");
function isBaseMessageChunk(message) {
return Object.prototype.toString.call(message) === "[object BaseMessageChunk]";
}
__name(isBaseMessageChunk, "isBaseMessageChunk");
function maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution) {
if (actionExecution) {
eventStream$.sendActionExecutionResult({
actionExecutionId: actionExecution.id,
actionName: actionExecution.name,
result: "Sending a message"
});
}
}
__name(maybeSendActionExecutionResultIsMessage, "maybeSendActionExecutionResultIsMessage");
async function streamLangChainResponse({ result, eventStream$, actionExecution }) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
if (typeof result === "string") {
if (!actionExecution) {
eventStream$.sendTextMessage(randomId(), result);
} else {
eventStream$.sendActionExecutionResult({
actionExecutionId: actionExecution.id,
actionName: actionExecution.name,
result
});
}
} else if (isAIMessage(result)) {
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
if (result.content) {
eventStream$.sendTextMessage(randomId(), result.content);
}
for (const toolCall of result.tool_calls) {
eventStream$.sendActionExecution({
actionExecutionId: toolCall.id || randomId(),
actionName: toolCall.name,
args: JSON.stringify(toolCall.args)
});
}
} else if (isBaseMessageChunk(result)) {
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
if ((_a = result.lc_kwargs) == null ? void 0 : _a.content) {
eventStream$.sendTextMessage(randomId(), result.content);
}
if ((_b = result.lc_kwargs) == null ? void 0 : _b.tool_calls) {
for (const toolCall of (_c = result.lc_kwargs) == null ? void 0 : _c.tool_calls) {
eventStream$.sendActionExecution({
actionExecutionId: toolCall.id || randomId(),
actionName: toolCall.name,
args: JSON.stringify(toolCall.args)
});
}
}
} else if (result && "getReader" in result) {
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
let reader = result.getReader();
let mode = null;
let currentMessageId;
const toolCallDetails = {
name: null,
id: null,
index: null,
prevIndex: null
};
while (true) {
try {
const { done, value } = await reader.read();
let toolCallName = void 0;
let toolCallId = void 0;
let toolCallArgs = void 0;
let hasToolCall = false;
let content = "";
if (value && value.content) {
content = Array.isArray(value.content) ? ((_d = value.content[0]) == null ? void 0 : _d.text) ?? "" : value.content;
}
if (isAIMessageChunk(value)) {
let chunk = (_e = value.tool_call_chunks) == null ? void 0 : _e[0];
toolCallArgs = chunk == null ? void 0 : chunk.args;
hasToolCall = chunk != void 0;
if (chunk == null ? void 0 : chunk.name)
toolCallDetails.name = chunk.name;
if ((chunk == null ? void 0 : chunk.index) != null) {
toolCallDetails.index = chunk.index;
if (toolCallDetails.prevIndex == null)
toolCallDetails.prevIndex = chunk.index;
}
if (chunk == null ? void 0 : chunk.id)
toolCallDetails.id = chunk.index != null ? `${chunk.id}-idx-${chunk.index}` : chunk.id;
toolCallName = toolCallDetails.name;
toolCallId = toolCallDetails.id;
} else if (isBaseMessageChunk(value)) {
let chunk = (_g = (_f = value.additional_kwargs) == null ? void 0 : _f.tool_calls) == null ? void 0 : _g[0];
toolCallName = (_h = chunk == null ? void 0 : chunk.function) == null ? void 0 : _h.name;
toolCallId = chunk == null ? void 0 : chunk.id;
toolCallArgs = (_i = chunk == null ? void 0 : chunk.function) == null ? void 0 : _i.arguments;
hasToolCall = (chunk == null ? void 0 : chunk.function) != void 0;
}
if (mode === "message" && (toolCallId || done)) {
mode = null;
eventStream$.sendTextMessageEnd({
messageId: currentMessageId
});
} else if (mode === "function" && (!hasToolCall || done)) {
mode = null;
eventStream$.sendActionExecutionEnd({
actionExecutionId: toolCallId
});
}
if (done) {
break;
}
if (mode === null) {
if (hasToolCall && toolCallId && toolCallName) {
mode = "function";
eventStream$.sendActionExecutionStart({
actionExecutionId: toolCallId,
actionName: toolCallName,
parentMessageId: (_j = value.lc_kwargs) == null ? void 0 : _j.id
});
} else if (content) {
mode = "message";
currentMessageId = ((_k = value.lc_kwargs) == null ? void 0 : _k.id) || randomId();
eventStream$.sendTextMessageStart({
messageId: currentMessageId
});
}
}
if (mode === "message" && content) {
eventStream$.sendTextMessageContent({
messageId: currentMessageId,
content
});
} else if (mode === "function" && toolCallArgs) {
if (toolCallDetails.index !== toolCallDetails.prevIndex) {
eventStream$.sendActionExecutionEnd({
actionExecutionId: toolCallId
});
eventStream$.sendActionExecutionStart({
actionExecutionId: toolCallId,
actionName: toolCallName,
parentMessageId: (_l = value.lc_kwargs) == null ? void 0 : _l.id
});
toolCallDetails.prevIndex = toolCallDetails.index;
}
eventStream$.sendActionExecutionArgs({
actionExecutionId: toolCallId,
args: toolCallArgs
});
}
} catch (error) {
console.error("Error reading from stream", error);
break;
}
}
} else if (actionExecution) {
eventStream$.sendActionExecutionResult({
actionExecutionId: actionExecution.id,
actionName: actionExecution.name,
result: encodeResult(result)
});
} else {
throw new Error("Invalid return type from LangChain function.");
}
eventStream$.complete();
}
__name(streamLangChainResponse, "streamLangChainResponse");
function encodeResult(result) {
if (result === void 0) {
return "";
} else if (typeof result === "string") {
return result;
} else {
return JSON.stringify(result);
}
}
__name(encodeResult, "encodeResult");
// src/service-adapters/langchain/langchain-adapter.ts
import { randomUUID as randomUUID2 } from "@copilotkit/shared";
import { awaitAllCallbacks } from "@langchain/core/callbacks/promises";
var LangChainAdapter = class {
options;
/**
* To use LangChain as a backend, provide a handler function to the adapter with your custom LangChain logic.
*/
constructor(options) {
this.options = options;
}
async process(request) {
try {
const { eventSource, model, actions, messages, runId, threadId: threadIdFromRequest } = request;
const threadId = threadIdFromRequest ?? randomUUID2();
const result = await this.options.chainFn({
messages: messages.map(convertMessageToLangChainMessage),
tools: actions.map(convertActionInputToLangChainTool),
model,
threadId,
runId
});
eventSource.stream(async (eventStream$) => {
await streamLangChainResponse({
result,
eventStream$
});
});
return {
threadId
};
} finally {
await awaitAllCallbacks();
}
}
};
__name(LangChainAdapter, "LangChainAdapter");
// src/service-adapters/google/google-genai-adapter.ts
import { ChatGoogle } from "@langchain/google-gauth";
import { AIMessage as AIMessage2 } from "@langchain/core/messages";
var GoogleGenerativeAIAdapter = class extends LangChainAdapter {
constructor(options) {
super({
chainFn: async ({ messages, tools, threadId }) => {
const filteredMessages = messages.filter((message) => {
if (!(message instanceof AIMessage2)) {
return true;
}
return message.content && String(message.content).trim().length > 0 || message.tool_calls && message.tool_calls.length > 0;
});
const model = new ChatGoogle({
modelName: (options == null ? void 0 : options.model) ?? "gemini-1.5-pro",
apiVersion: "v1beta"
}).bindTools(tools);
return model.stream(filteredMessages, {
metadata: {
conversation_id: threadId
}
});
}
});
}
};
__name(GoogleGenerativeAIAdapter, "GoogleGenerativeAIAdapter");
// src/service-adapters/openai/openai-assistant-adapter.ts
import OpenAI2 from "openai";
var OpenAIAssistantAdapter = class {
openai;
codeInterpreterEnabled;
assistantId;
fileSearchEnabled;
disableParallelToolCalls;
keepSystemRole = false;
constructor(params) {
this.openai = params.openai || new OpenAI2({});
this.codeInterpreterEnabled = params.codeInterpreterEnabled === false || true;
this.fileSearchEnabled = params.fileSearchEnabled === false || true;
this.assistantId = params.assistantId;
this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
this.keepSystemRole = (params == null ? void 0 : params.keepSystemRole) ?? false;
}
async process(request) {
var _a, _b;
const { messages, actions, eventSource, runId, forwardedParameters } = request;
let threadId = (_b = (_a = request.extensions) == null ? void 0 : _a.openaiAssistantAPI) == null ? void 0 : _b.threadId;
if (!threadId) {
threadId = (await this.openai.beta.threads.create()).id;
}
const lastMessage = messages.at(-1);
let nextRunId = void 0;
if (lastMessage.isResultMessage() && runId) {
nextRunId = await this.submitToolOutputs(threadId, runId, messages, eventSource);
} else if (lastMessage.isTextMessage()) {
nextRunId = await this.submitUserMessage(threadId, messages, actions, eventSource, forwardedParameters);
} else {
throw new Error("No actionable message found in the messages");
}
return {
runId: nextRunId,
threadId,
extensions: {
...request.extensions,
openaiAssistantAPI: {
threadId,
runId: nextRunId
}
}
};
}
async submitToolOutputs(threadId, runId, messages, eventSource) {
let run = await this.openai.beta.threads.runs.retrieve(threadId, runId);
if (!run.required_action) {
throw new Error("No tool outputs required");
}
const toolCallsIds = run.required_action.submit_tool_outputs.tool_calls.map((toolCall) => toolCall.id);
const resultMessages = messages.filter((message) => message.isResultMessage() && toolCallsIds.includes(message.actionExecutionId));
if (toolCallsIds.length != resultMessages.length) {
throw new Error("Number of function results does not match the number of tool calls");
}
const toolOutputs = resultMessages.map((message) => {
return {
tool_call_id: message.actionExecutionId,
output: message.result
};
});
const stream = this.openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {
tool_outputs: toolOutputs,
...this.disableParallelToolCalls && {
parallel_tool_calls: false
}
});
await this.streamResponse(stream, eventSource);
return runId;
}
async submitUserMessage(threadId, messages, actions, eventSource, forwardedParameters) {
messages = [
...messages
];
const instructionsMessage = messages.shift();
const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : "";
const userMessage = messages.map((m) => convertMessageToOpenAIMessage(m, {
keepSystemRole: this.keepSystemRole
})).map(convertSystemMessageToAssistantAPI).at(-1);
if (userMessage.role !== "user") {
throw new Error("No user message found");
}
await this.openai.beta.threads.messages.create(threadId, {
role: "user",
content: userMessage.content
});
const openaiTools = actions.map(convertActionInputToOpenAITool);
const tools = [
...openaiTools,
...this.codeInterpreterEnabled ? [
{
type: "code_interpreter"
}
] : [],
...this.fileSearchEnabled ? [
{
type: "file_search"
}
] : []
];
let stream = this.openai.beta.threads.runs.stream(threadId, {
assistant_id: this.assistantId,
instructions,
tools,
...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
max_completion_tokens: forwardedParameters.maxTokens
},
...this.disableParallelToolCalls && {
parallel_tool_calls: false
}
});
await this.streamResponse(stream, eventSource);
return getRunIdFromStream(stream);
}
async streamResponse(stream, eventSource) {
eventSource.stream(async (eventStream$) => {
var _a, _b, _c, _d, _e, _f;
let inFunctionCall = false;
let currentMessageId;
let currentToolCallId;
for await (const chunk of stream) {
switch (chunk.event) {
case "thread.message.created":
if (inFunctionCall) {
eventStream$.sendActionExecutionEnd({
actionExecutionId: currentToolCallId
});
}
currentMessageId = chunk.data.id;
eventStream$.sendTextMessageStart({
messageId: currentMessageId
});
break;
case "thread.message.delta":
if (((_a = chunk.data.delta.content) == null ? void 0 : _a[0].type) === "text") {
eventStream$.sendTextMessageContent({
messageId: currentMessageId,
content: (_b = chunk.data.delta.content) == null ? void 0 : _b[0].text.value
});
}
break;
case "thread.message.completed":
eventStream$.sendTextMessageEnd({
messageId: currentMessageId
});
break;
case "thread.run.step.delta":
let toolCallId;
let toolCallName;
let toolCallArgs;
if (chunk.data.delta.step_details.type === "tool_calls" && ((_c = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _c[0].type) === "function") {
toolCallId = (_d = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _d[0].id;
toolCallName = (_e = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _e[0].function.name;
toolCallArgs = (_f = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _f[0].function.arguments;
}
if (toolCallName && toolCallId) {
if (inFunctionCall) {
eventStream$.sendActionExecutionEnd({
actionExecutionId: currentToolCallId
});
}
inFunctionCall = true;
currentToolCallId = toolCallId;
eventStream$.sendActionExecutionStart({
actionExecutionId: currentToolCallId,
parentMessageId: chunk.data.id,
actionName: toolCallName
});
} else if (toolCallArgs) {
eventStream$.sendActionExecutionArgs({
actionExecutionId: currentToolCallId,
args: toolCallArgs
});
}
break;
}
}
if (inFunctionCall) {
eventStream$.sendActionExecutionEnd({
actionExecutionId: currentToolCallId
});
}
eventStream$.complete();
});
}
};
__name(OpenAIAssistantAdapter, "OpenAIAssistantAdapter");
function getRunIdFromStream(stream) {
return new Promise((resolve, reject) => {
let runIdGetter = /* @__PURE__ */ __name((event) => {
if (event.event === "thread.run.created") {
const runId = event.data.id;
stream.off("event", runIdGetter);
resolve(runId);
}
}, "runIdGetter");
stream.on("event", runIdGetter);
});
}
__name(getRunIdFromStream, "getRunIdFromStream");
// src/service-adapters/unify/unify-adapter.ts
import OpenAI3 from "openai";
import { randomId as randomId2, randomUUID as randomUUID3 } from "@copilotkit/shared";
var UnifyAdapter = class {
apiKey;
model;
start;
constructor(options) {
if (options == null ? void 0 : options.apiKey) {
this.apiKey = options.apiKey;
} else {
this.apiKey = "UNIFY_API_KEY";
}
this.model = options == null ? void 0 : options.model;
this.start = true;
}
async process(request) {
const tools = request.actions.map(convertActionInputToOpenAITool);
const openai = new OpenAI3({
apiKey: this.apiKey,
baseURL: "https://api.unify.ai/v0/"
});
const forwardedParameters = request.forwardedParameters;
const messages = request.messages.map((m) => convertMessageToOpenAIMessage(m));
const stream = await openai.chat.completions.create({
model: this.model,
messages,
stream: true,
...tools.length > 0 && {
tools
},
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
temperature: forwardedParameters.temperature
}
});
let model = null;
let currentMessageId;
let currentToolCallId;
request.eventSource.stream(async (eventStream$) => {
var _a, _b;
let mode = null;
for await (const chunk of stream) {
if (this.start) {
model = chunk.model;
currentMessageId = randomId2();
eventStream$.sendTextMessageStart({
messageId: currentMessageId
});
eventStream$.sendTextMessageContent({
messageId: currentMessageId,
content: `Model used: ${model}
`
});
eventStream$.sendTextMessageEnd({
messageId: currentMessageId
});
this.start = false;
}
const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
const content = chunk.choices[0].delta.content;
if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
mode = null;
eventStream$.sendTextMessageEnd({
messageId: currentMessageId
});
} else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
mode = null;
eventStream$.sendActionExecutionEnd({
actionExecutionId: currentToolCallId
});
}
if (mode === null) {
if (toolCall == null ? void 0 : toolCall.id) {
mode = "function";
currentToolCallId = toolCall.id;
eventStream$.sendActionExecutionStart({
actionExecutionId: currentToolCallId,
actionName: toolCall.function.name
});
} else if (content) {
mode = "message";
currentMessageId = chunk.id;
eventStream$.sendTextMessageStart({
messageId: currentMessageId
});
}
}
if (mode === "message" && content) {
eventStream$.sendTextMessageContent({
messageId: currentMessageId,
content
});
} else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
eventStream$.sendActionExecutionArgs({
actionExecutionId: currentToolCallId,
args: toolCall.function.arguments
});
}
}
if (mode === "message") {
eventStream$.sendTextMessageEnd({
messageId: currentMessageId
});
} else if (mode === "function") {
eventStream$.sendActionExecutionEnd({
actionExecutionId: currentToolCallId
});
}
eventStream$.complete();
});
return {
threadId: request.threadId || randomUUID3()
};
}
};
__name(UnifyAdapter, "UnifyAdapter");
// src/service-adapters/groq/groq-adapter.ts
import { Groq } from "groq-sdk";
import { randomUUID as randomUUID4 } from "@copilotkit/shared";
var DEFAULT_MODEL2 = "llama-3.3-70b-versatile";
var GroqAdapter = class {
model = DEFAULT_MODEL2;
disableParallelToolCalls = false;
_groq;
get groq() {
return this._groq;
}
constructor(params) {
this._groq = (params == null ? void 0 : params.groq) || new Groq({});
if (params == null ? void 0 : params.model) {
this.model = params.model;
}
this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
}
async process(request) {
const { threadId, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
const tools = actions.map(convertActionInputToOpenAITool);
let openaiMessages = messages.map((m) => convertMessageToOpenAIMessage(m, {
keepSystemRole: true
}));
openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
toolChoice = {
type: "function",
function: {
name: forwardedParameters.toolChoiceFunctionName
}
};
}
const stream = await this.groq.chat.completions.create({
model,
stream: true,
messages: openaiMessages,
...tools.length > 0 && {
tools
},
...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
max_tokens: forwardedParameters.maxTokens
},
...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
stop: forwardedParameters.stop
},
...toolChoice && {
tool_choice: toolChoice
},
...this.disableParallelToolCalls && {
parallel_tool_calls: false
},
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
temperature: forwardedParameters.temperature
}
});
eventSource.stream(async (eventStream$) => {
var _a, _b;
let mode = null;
let currentMessageId;
let currentToolCallId;
for await (const chunk of stream) {
const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
const content = chunk.choices[0].delta.content;
if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
mode = null;
eventStream$.sendTextMessageEnd({
messageId: currentMessageId
});
} else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
mode = null;
eventStream$.sendActionExecutionEnd({
actionExecutionId: currentToolCallId
});
}
if (mode === null) {
if (toolCall == null ? void 0 : toolCall.id) {
mode = "function";
currentToolCallId = toolCall.id;
eventStream$.sendActionExecutionStart({
actionExecutionId: currentToolCallId,
actionName: toolCall.function.name,
parentMessageId: chunk.id
});
} else if (content) {
mode = "message";
currentMessageId = chunk.id;
eventStream$.sendTextMessageStart({
messageId: currentMessageId
});
}
}
if (mode === "message" && content) {
eventStream$.sendTextMessageContent({
messageId: currentMessageId,
content
});
} else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
eventStream$.sendActionExecutionArgs({
actionExecutionId: currentToolCallId,
args: toolCall.function.arguments
});
}
}
if (mode === "message") {
eventStream$.sendTextMessageEnd({
messageId: currentMessageId
});
} else if (mode === "function") {
eventStream$.sendActionExecutionEnd({
actionExecutionId: currentToolCallId
});
}
eventStream$.complete();
});
return {
threadId: request.threadId || randomUUID4()
};
}
};
__name(GroqAdapter, "GroqAdapter");
// src/service-adapters/anthropic/anthropic-adapter.ts
import Anthropic from "@anthropic-ai/sdk";
// src/service-adapters/anthropic/utils.ts
function limitMessagesToTokenCount2(messages, tools, model, maxTokens) {
maxTokens || (maxTokens = MAX_TOKENS);
const result = [];
const toolsNumTokens = countToolsTokens2(model, tools);
if (toolsNumTokens > maxTokens) {
throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);
}
maxTokens -= toolsNumTokens;
for (const message of messages) {
if (message.role === "system") {
const numTokens = countMessageTokens2(model, message);
maxTokens -= numTokens;
if (maxTokens < 0) {
throw new Error("Not enough tokens for system message.");
}
}
}
let cutoff = false;
const reversedMessages = [
...messages
].reverse();
for (const message of reversedMessages) {
if (message.role === "system") {
result.unshift(message);
continue;
} else if (cutoff) {
continue;
}
let numTokens = countMessageTokens2(model, message);
if (maxTokens < numTokens) {
cutoff = true;
continue;
}
result.unshift(message);
maxTokens -= numTokens;
}
return result;
}
__name(limitMessagesToTokenCount2, "limitMessagesToTokenCount");
var MAX_TOKENS = 128e3;
function countToolsTokens2(model, tools) {
if (tools.length === 0) {
return 0;
}
const json = JSON.stringify(tools);
return countTokens2(model, json);
}
__name(countToolsTokens2, "countToolsTokens");
function countMessageTokens2(model, message) {
return countTokens2(model, JSON.stringify(message.content) || "");
}
__name(countMessageTokens2, "countMessageTokens");
function countTokens2(model, text) {
return text.length / 3;
}
__name(countTokens2, "countTokens");
function convertActionInputToAnthropicTool(action) {
return {
name: action.name,
description: action.description,
input_schema: JSON.parse(action.jsonSchema)
};
}
__name(convertActionInputToAnthropicTool, "convertActionInputToAnthropicTool");
function convertMessageToAnthropicMessage(message) {
if (message.isTextMessage()) {
if (message.role === "system") {
return {
role: "assistant",
content: [
{
type: "text",
text: "THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: " + message.content
}
]
};
} else {
return {
role: message.role === "user" ? "user" : "assistant",
content: [
{
type: "text",
text: message.content
}
]
};
}
} else if (message.isImageMessage()) {
let mediaType;
switch (message.format) {
case "jpeg":
mediaType = "image/jpeg";
break;
case "png":
mediaType = "image/png";
break;
case "webp":
mediaType = "image/webp";
break;
case "gif":
mediaType = "image/gif";
break;
default:
throw new Error(`Unsupported image format: ${message.format}`);
}
return {
role: "user",
content: [
{
type: "image",
source: {
type: "base64",
media_type: mediaType,
data: message.bytes
}
}
]
};
} else if (message.isActionExecutionMessage()) {
return {
role: "assistant",
content: [
{
id: message.id,
type: "tool_use",
input: message.arguments,
name: message.name
}
]
};
} else if (message.isResultMessage()) {
return {
role: "user",
content: [
{
type: "tool_result",
content: message.result,
tool_use_id: message.actionExecutionId
}
]
};
}
}
__name(convertMessageToAnthropicMessage, "convertMessageToAnthropicMessage");
// src/service-adapters/anthropic/anthropic-adapter.ts
import { randomId as randomId3, randomUUID as randomUUID5 } from "@copilotkit/shared";
var DEFAULT_MODEL3 = "claude-3-5-sonnet-latest";
var AnthropicAdapter = class {
model = DEFAULT_MODEL3;
_anthropic;
get anthropic() {
return this._anthropic;
}
constructor(params) {
this._anthropic = (params == null ? void 0 : params.anthropic) || new Anthropic({});
if (params == null ? void 0 : params.model) {
this.model = params.model;
}
}
async process(request) {
const { threadId, model = this.model, messages: rawMessages, actions, eventSource, forwardedParameters } = request;
const tools = actions.map(convertActionInputToAnthropicTool);
const messages = [
...rawMessages
];
const instructionsMessage = messages.shift();
const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : "";
const validToolUseIds = /* @__PURE__ */ new Set();
for (const message of messages) {
if (message.isActionExecutionMessage()) {
validToolUseIds.add(message.id);
}
}
const anthropicMessages = messages.map((message) => {
if (message.isResultMessage()) {
if (!validToolUseIds.has(message.actionExecutionId)) {
return null;
}
validToolUseIds.delete(message.actionExecutionId);
return {
role: "user",
content: [
{
type: "tool_result",
content: message.result,
tool_use_id: message.actionExecutionId
}
]
};
}
return convertMessageToAnthropicMessage(message);
}).filter(Boolean).filter((msg) => {
if (msg.role === "assistant" && Array.isArray(msg.content)) {
const hasEmptyTextOnly = msg.content.length === 1 && msg.content[0].type === "text" && (!msg.content[0].text || msg.content[0].text.trim() === "");
return !hasEmptyTextOnly;
}
return true;
});
const limitedMessages = limitMessagesToTokenCount2(anthropicMessages, tools, model);
let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
toolChoice = {
type: "tool",
name: forwardedParameters.toolChoiceFunctionName
};
}
try {
const createParams = {
system: instructions,
model: this.model,
messages: limitedMessages,
max_tokens: (forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) || 1024,
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) ? {
temperature: forwardedParameters.temperature
} : {},
...tools.length > 0 && {
tools
},
...toolChoice && {
tool_choice: toolChoice
},
stream: true
};
const stream = await this.anthropic.messages.create(createParams);
eventSource.stream(async (eventStream$) => {
let mode = null;
let didOutputText = false;
let currentMessageId = randomId3();
let currentToolCallId = randomId3();
let filterThinkingTextBuffer = new FilterThinkingTextBuffer();
try {
for await (const chunk of stream) {
if (chunk.type === "message_start") {
currentMessageId = chunk.message.id;
} else if (chunk.type === "content_block_start") {
if (chunk.content_block.type === "text") {
didOutputText = false;
filterThinkingTextBuffer.reset();
mode = "message";
} else if (chunk.content_block.type === "tool_use") {
currentToolCallId = chunk.content_block.id;
eventStream$.sendActionExecutionStart({
actionExecutionId: currentToolCallId,
actionName: chunk.content_block.name,
parentMessageId: currentMessageId
});
mode = "function";
}
} else if (chunk.type === "content_block_delta") {
if (chunk.delta.type === "text_delta") {
const text = filterThinkingTextBuffer.onTextChunk(chunk.delta.text);
if (text.length > 0) {
if (!didOutputText) {
eventStream$.sendTextMessageStart({
messageId: currentMessageId
});
didOutputText = true;
}
eventStream$.sendTextMessageContent({
messageId: currentMessageId,
content: text
});
}
} else if (chunk.delta.type === "input_json_delta") {
eventStream$.sendActionExecutionArgs({
actionExecutionId: currentToolCallId,
args: chunk.delta.partial_json
});
}
} else if (chunk.type === "content_block_stop") {
if (mode === "message") {
if (didOutputText) {
eventStream$.sendTextMessageEnd({
messageId: currentMessageId
});
}
} else if (mode === "function") {
eventStream$.sendActionExecutionEnd({
actionExecutionId: currentToolCallId
});
}
}
}
} catch (error) {
console.error("[Anthropic] Error processing stream:", error);
throw error;
}
eventStream$.complete();
});
} catch (error) {
console.error("[Anthropic] Error during API call:", error);
throw error;
}
return {
threadId: threadId || randomUUID5()
};
}
};
__name(AnthropicAdapter, "AnthropicAdapter");
var THINKING_TAG = "<thinking>";
var THINKING_TAG_END = "</thinking>";
var FilterThinkingTextBuffer = /* @__PURE__ */ __name(class FilterThinkingTextBuffer2 {
buffer;
didFilterThinkingTag = false;
constructor() {
this.buffer = "";
}
onTextChunk(text) {
this.buffer += text;
if (this.didFilterThinkingTag) {
return text;
}
const potentialTag = this.buffer.slice(0, THINKING_TAG.length);
if (THINKING_TAG.startsWith(potentialTag)) {