@copilotkit/runtime
Version:
<div align="center"> <a href="https://copilotkit.ai" target="_blank"> <img src="https://github.com/copilotkit/copilotkit/raw/main/assets/banner.png" alt="CopilotKit Logo"> </a>
1,386 lines (1,368 loc) • 266 kB
JavaScript
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
var __commonJS = (cb, mod) => function __require() {
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
};
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from3, except, desc) => {
if (from3 && typeof from3 === "object" || typeof from3 === "function") {
for (let key of __getOwnPropNames(from3))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from3[key], enumerable: !(desc = __getOwnPropDesc(from3, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// package.json
var require_package = __commonJS({
"package.json"(exports, module2) {
module2.exports = {
name: "@copilotkit/runtime",
private: false,
homepage: "https://github.com/CopilotKit/CopilotKit",
repository: {
type: "git",
url: "https://github.com/CopilotKit/CopilotKit.git"
},
publishConfig: {
access: "public"
},
version: "1.9.1",
sideEffects: false,
main: "./dist/index.js",
module: "./dist/index.mjs",
exports: {
".": "./dist/index.js"
},
types: "./dist/index.d.ts",
license: "MIT",
scripts: {
build: 'tsup --onSuccess "pnpm run generate-graphql-schema"',
dev: 'tsup --watch --onSuccess "pnpm run generate-graphql-schema"',
test: "jest --passWithNoTests",
"check-types": "tsc --noEmit",
clean: "rm -rf .turbo && rm -rf node_modules && rm -rf dist && rm -rf .next && rm -rf __snapshots__",
"generate-graphql-schema": "rm -rf __snapshots__ && ts-node ./scripts/generate-gql-schema.ts",
"link:global": "pnpm link --global",
"unlink:global": "pnpm unlink --global"
},
devDependencies: {
"@jest/globals": "^29.7.0",
"@swc/core": "1.5.28",
"@types/express": "^4.17.21",
"@types/jest": "^29.5.12",
"@types/node": "^18.11.17",
"@whatwg-node/server": "^0.9.34",
eslint: "^8.56.0",
"eslint-config-custom": "workspace:*",
jest: "^29.6.4",
nodemon: "^3.1.3",
"ts-jest": "^29.1.1",
"ts-node": "^10.9.2",
tsconfig: "workspace:*",
tsup: "^6.7.0",
typescript: "^5.2.3",
"zod-to-json-schema": "^3.23.5"
},
dependencies: {
"@ag-ui/client": "0.0.28",
"@ag-ui/core": "0.0.28",
"@ag-ui/encoder": "0.0.28",
"@ag-ui/langgraph": "0.0.3",
"@ag-ui/proto": "0.0.28",
"@anthropic-ai/sdk": "^0.27.3",
"@copilotkit/shared": "workspace:*",
"@graphql-yoga/plugin-defer-stream": "^3.3.1",
"@langchain/aws": "^0.1.9",
"@langchain/community": "^0.3.29",
"@langchain/core": "^0.3.38",
"@langchain/google-gauth": "^0.1.0",
"@langchain/langgraph-sdk": "^0.0.70",
"@langchain/openai": "^0.4.2",
"class-transformer": "^0.5.1",
"class-validator": "^0.14.1",
express: "^4.19.2",
graphql: "^16.8.1",
"graphql-scalars": "^1.23.0",
"graphql-yoga": "^5.3.1",
"groq-sdk": "^0.5.0",
langchain: "^0.3.3",
openai: "^4.85.1",
"partial-json": "^0.1.7",
pino: "^9.2.0",
"pino-pretty": "^11.2.1",
"reflect-metadata": "^0.2.2",
rxjs: "^7.8.1",
"type-graphql": "2.0.0-rc.1",
zod: "^3.23.3"
},
peerDependencies: {
"@ag-ui/client": ">=0.0.28",
"@ag-ui/core": ">=0.0.28",
"@ag-ui/encoder": ">=0.0.28",
"@ag-ui/proto": ">=0.0.28"
},
keywords: [
"copilotkit",
"copilot",
"react",
"nextjs",
"nodejs",
"ai",
"assistant",
"javascript",
"automation",
"textarea"
]
};
}
});
// src/index.ts
var src_exports = {};
__export(src_exports, {
AnthropicAdapter: () => AnthropicAdapter,
BedrockAdapter: () => BedrockAdapter,
CopilotRuntime: () => CopilotRuntime,
CustomEventNames: () => CustomEventNames2,
EmptyAdapter: () => EmptyAdapter,
ExperimentalEmptyAdapter: () => ExperimentalEmptyAdapter,
ExperimentalOllamaAdapter: () => ExperimentalOllamaAdapter,
GoogleGenerativeAIAdapter: () => GoogleGenerativeAIAdapter,
GroqAdapter: () => GroqAdapter,
GuardrailsValidationFailureResponse: () => GuardrailsValidationFailureResponse,
LangChainAdapter: () => LangChainAdapter,
LangGraphAgent: () => LangGraphAgent,
MessageStreamInterruptedResponse: () => MessageStreamInterruptedResponse,
OpenAIAdapter: () => OpenAIAdapter,
OpenAIAssistantAdapter: () => OpenAIAssistantAdapter,
RemoteChain: () => RemoteChain,
UnifyAdapter: () => UnifyAdapter,
UnknownErrorResponse: () => UnknownErrorResponse,
addCustomHeaderPlugin: () => addCustomHeaderPlugin,
buildSchema: () => buildSchema,
config: () => config,
convertMCPToolsToActions: () => convertMCPToolsToActions,
copilotKitEndpoint: () => copilotKitEndpoint,
copilotRuntimeNestEndpoint: () => copilotRuntimeNestEndpoint,
copilotRuntimeNextJSAppRouterEndpoint: () => copilotRuntimeNextJSAppRouterEndpoint,
copilotRuntimeNextJSPagesRouterEndpoint: () => copilotRuntimeNextJSPagesRouterEndpoint,
copilotRuntimeNodeExpressEndpoint: () => copilotRuntimeNodeExpressEndpoint,
copilotRuntimeNodeHttpEndpoint: () => copilotRuntimeNodeHttpEndpoint,
createContext: () => createContext,
createLogger: () => createLogger,
extractParametersFromSchema: () => extractParametersFromSchema,
flattenToolCallsNoDuplicates: () => flattenToolCallsNoDuplicates,
generateMcpToolInstructions: () => generateMcpToolInstructions,
getCommonConfig: () => getCommonConfig,
langGraphPlatformEndpoint: () => langGraphPlatformEndpoint,
resolveEndpointType: () => resolveEndpointType
});
module.exports = __toCommonJS(src_exports);
var import_reflect_metadata = require("reflect-metadata");
// src/service-adapters/openai/openai-adapter.ts
var import_openai = __toESM(require("openai"));
// src/service-adapters/openai/utils.ts
var import_shared = require("@copilotkit/shared");
function limitMessagesToTokenCount(messages, tools, model, maxTokens) {
maxTokens || (maxTokens = maxTokensForOpenAIModel(model));
const result = [];
const toolsNumTokens = countToolsTokens(model, tools);
if (toolsNumTokens > maxTokens) {
throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);
}
maxTokens -= toolsNumTokens;
for (const message of messages) {
if ([
"system",
"developer"
].includes(message.role)) {
const numTokens = countMessageTokens(model, message);
maxTokens -= numTokens;
if (maxTokens < 0) {
throw new Error("Not enough tokens for system message.");
}
}
}
let cutoff = false;
const reversedMessages = [
...messages
].reverse();
for (const message of reversedMessages) {
if ([
"system",
"developer"
].includes(message.role)) {
result.unshift(message);
continue;
} else if (cutoff) {
continue;
}
let numTokens = countMessageTokens(model, message);
if (maxTokens < numTokens) {
cutoff = true;
continue;
}
result.unshift(message);
maxTokens -= numTokens;
}
return result;
}
__name(limitMessagesToTokenCount, "limitMessagesToTokenCount");
function maxTokensForOpenAIModel(model) {
return maxTokensByModel[model] || DEFAULT_MAX_TOKENS;
}
__name(maxTokensForOpenAIModel, "maxTokensForOpenAIModel");
var DEFAULT_MAX_TOKENS = 128e3;
var maxTokensByModel = {
// o1
o1: 2e5,
"o1-2024-12-17": 2e5,
"o1-mini": 128e3,
"o1-mini-2024-09-12": 128e3,
"o1-preview": 128e3,
"o1-preview-2024-09-12": 128e3,
// o3-mini
"o3-mini": 2e5,
"o3-mini-2025-01-31": 2e5,
// GPT-4
"gpt-4o": 128e3,
"chatgpt-4o-latest": 128e3,
"gpt-4o-2024-08-06": 128e3,
"gpt-4o-2024-05-13": 128e3,
"gpt-4o-mini": 128e3,
"gpt-4o-mini-2024-07-18": 128e3,
"gpt-4-turbo": 128e3,
"gpt-4-turbo-2024-04-09": 128e3,
"gpt-4-0125-preview": 128e3,
"gpt-4-turbo-preview": 128e3,
"gpt-4-1106-preview": 128e3,
"gpt-4-vision-preview": 128e3,
"gpt-4-1106-vision-preview": 128e3,
"gpt-4-32k": 32768,
"gpt-4-32k-0613": 32768,
"gpt-4-32k-0314": 32768,
"gpt-4": 8192,
"gpt-4-0613": 8192,
"gpt-4-0314": 8192,
// GPT-3.5
"gpt-3.5-turbo-0125": 16385,
"gpt-3.5-turbo": 16385,
"gpt-3.5-turbo-1106": 16385,
"gpt-3.5-turbo-instruct": 4096,
"gpt-3.5-turbo-16k": 16385,
"gpt-3.5-turbo-0613": 4096,
"gpt-3.5-turbo-16k-0613": 16385,
"gpt-3.5-turbo-0301": 4097
};
function countToolsTokens(model, tools) {
if (tools.length === 0) {
return 0;
}
const json = JSON.stringify(tools);
return countTokens(model, json);
}
__name(countToolsTokens, "countToolsTokens");
function countMessageTokens(model, message) {
return countTokens(model, message.content || "");
}
__name(countMessageTokens, "countMessageTokens");
function countTokens(model, text) {
return text.length / 3;
}
__name(countTokens, "countTokens");
function convertActionInputToOpenAITool(action) {
return {
type: "function",
function: {
name: action.name,
description: action.description,
parameters: (0, import_shared.parseJson)(action.jsonSchema, {})
}
};
}
__name(convertActionInputToOpenAITool, "convertActionInputToOpenAITool");
function convertMessageToOpenAIMessage(message, options) {
const { keepSystemRole } = options || {
keepSystemRole: false
};
if (message.isTextMessage()) {
let role = message.role;
if (message.role === "system" && !keepSystemRole) {
role = "developer";
}
return {
role,
content: message.content
};
} else if (message.isImageMessage()) {
return {
role: "user",
content: [
{
type: "image_url",
image_url: {
url: `data:image/${message.format};base64,${message.bytes}`
}
}
]
};
} else if (message.isActionExecutionMessage()) {
return {
role: "assistant",
tool_calls: [
{
id: message.id,
type: "function",
function: {
name: message.name,
arguments: JSON.stringify(message.arguments)
}
}
]
};
} else if (message.isResultMessage()) {
return {
role: "tool",
content: message.result,
tool_call_id: message.actionExecutionId
};
}
}
__name(convertMessageToOpenAIMessage, "convertMessageToOpenAIMessage");
function convertSystemMessageToAssistantAPI(message) {
return {
...message,
...[
"system",
"developer"
].includes(message.role) && {
role: "assistant",
content: "THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: " + message.content
}
};
}
__name(convertSystemMessageToAssistantAPI, "convertSystemMessageToAssistantAPI");
// src/service-adapters/openai/openai-adapter.ts
var import_shared2 = require("@copilotkit/shared");
var DEFAULT_MODEL = "gpt-4o";
var OpenAIAdapter = class {
model = DEFAULT_MODEL;
disableParallelToolCalls = false;
_openai;
keepSystemRole = false;
get openai() {
return this._openai;
}
constructor(params) {
this._openai = (params == null ? void 0 : params.openai) || new import_openai.default({});
if (params == null ? void 0 : params.model) {
this.model = params.model;
}
this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
this.keepSystemRole = (params == null ? void 0 : params.keepSystemRole) ?? false;
}
async process(request) {
const { threadId: threadIdFromRequest, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
const tools = actions.map(convertActionInputToOpenAITool);
const threadId = threadIdFromRequest ?? (0, import_shared2.randomUUID)();
const validToolUseIds = /* @__PURE__ */ new Set();
for (const message of messages) {
if (message.isActionExecutionMessage()) {
validToolUseIds.add(message.id);
}
}
const filteredMessages = messages.filter((message) => {
if (message.isResultMessage()) {
if (!validToolUseIds.has(message.actionExecutionId)) {
return false;
}
validToolUseIds.delete(message.actionExecutionId);
return true;
}
return true;
});
let openaiMessages = filteredMessages.map((m) => convertMessageToOpenAIMessage(m, {
keepSystemRole: this.keepSystemRole
}));
openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
toolChoice = {
type: "function",
function: {
name: forwardedParameters.toolChoiceFunctionName
}
};
}
try {
const stream = this.openai.beta.chat.completions.stream({
model,
stream: true,
messages: openaiMessages,
...tools.length > 0 && {
tools
},
...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
max_tokens: forwardedParameters.maxTokens
},
...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
stop: forwardedParameters.stop
},
...toolChoice && {
tool_choice: toolChoice
},
...this.disableParallelToolCalls && {
parallel_tool_calls: false
},
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
temperature: forwardedParameters.temperature
}
});
eventSource.stream(async (eventStream$) => {
var _a, _b;
let mode = null;
let currentMessageId;
let currentToolCallId;
try {
for await (const chunk of stream) {
if (chunk.choices.length === 0) {
continue;
}
const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
const content = chunk.choices[0].delta.content;
if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
mode = null;
eventStream$.sendTextMessageEnd({
messageId: currentMessageId
});
} else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
mode = null;
eventStream$.sendActionExecutionEnd({
actionExecutionId: currentToolCallId
});
}
if (mode === null) {
if (toolCall == null ? void 0 : toolCall.id) {
mode = "function";
currentToolCallId = toolCall.id;
eventStream$.sendActionExecutionStart({
actionExecutionId: currentToolCallId,
parentMessageId: chunk.id,
actionName: toolCall.function.name
});
} else if (content) {
mode = "message";
currentMessageId = chunk.id;
eventStream$.sendTextMessageStart({
messageId: currentMessageId
});
}
}
if (mode === "message" && content) {
eventStream$.sendTextMessageContent({
messageId: currentMessageId,
content
});
} else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
eventStream$.sendActionExecutionArgs({
actionExecutionId: currentToolCallId,
args: toolCall.function.arguments
});
}
}
if (mode === "message") {
eventStream$.sendTextMessageEnd({
messageId: currentMessageId
});
} else if (mode === "function") {
eventStream$.sendActionExecutionEnd({
actionExecutionId: currentToolCallId
});
}
} catch (error) {
console.error("[OpenAI] Error processing stream:", error);
throw error;
}
eventStream$.complete();
});
} catch (error) {
console.error("[OpenAI] Error during API call:", error);
throw error;
}
return {
threadId
};
}
};
__name(OpenAIAdapter, "OpenAIAdapter");
// src/service-adapters/langchain/utils.ts
var import_messages = require("@langchain/core/messages");
var import_tools = require("@langchain/core/tools");
var import_shared3 = require("@copilotkit/shared");
function convertMessageToLangChainMessage(message) {
if (message.isTextMessage()) {
if (message.role == "user") {
return new import_messages.HumanMessage(message.content);
} else if (message.role == "assistant") {
return new import_messages.AIMessage(message.content);
} else if (message.role === "system") {
return new import_messages.SystemMessage(message.content);
}
} else if (message.isActionExecutionMessage()) {
return new import_messages.AIMessage({
content: "",
tool_calls: [
{
id: message.id,
args: message.arguments,
name: message.name
}
]
});
} else if (message.isResultMessage()) {
return new import_messages.ToolMessage({
content: message.result,
tool_call_id: message.actionExecutionId
});
}
}
__name(convertMessageToLangChainMessage, "convertMessageToLangChainMessage");
function convertActionInputToLangChainTool(actionInput) {
return new import_tools.DynamicStructuredTool({
name: actionInput.name,
description: actionInput.description,
schema: (0, import_shared3.convertJsonSchemaToZodSchema)(JSON.parse(actionInput.jsonSchema), true),
func: async () => {
return "";
}
});
}
__name(convertActionInputToLangChainTool, "convertActionInputToLangChainTool");
function isAIMessage(message) {
return Object.prototype.toString.call(message) === "[object AIMessage]";
}
__name(isAIMessage, "isAIMessage");
function isAIMessageChunk(message) {
return Object.prototype.toString.call(message) === "[object AIMessageChunk]";
}
__name(isAIMessageChunk, "isAIMessageChunk");
function isBaseMessageChunk(message) {
return Object.prototype.toString.call(message) === "[object BaseMessageChunk]";
}
__name(isBaseMessageChunk, "isBaseMessageChunk");
function maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution) {
if (actionExecution) {
eventStream$.sendActionExecutionResult({
actionExecutionId: actionExecution.id,
actionName: actionExecution.name,
result: "Sending a message"
});
}
}
__name(maybeSendActionExecutionResultIsMessage, "maybeSendActionExecutionResultIsMessage");
async function streamLangChainResponse({ result, eventStream$, actionExecution }) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
if (typeof result === "string") {
if (!actionExecution) {
eventStream$.sendTextMessage((0, import_shared3.randomId)(), result);
} else {
eventStream$.sendActionExecutionResult({
actionExecutionId: actionExecution.id,
actionName: actionExecution.name,
result
});
}
} else if (isAIMessage(result)) {
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
if (result.content) {
eventStream$.sendTextMessage((0, import_shared3.randomId)(), result.content);
}
for (const toolCall of result.tool_calls) {
eventStream$.sendActionExecution({
actionExecutionId: toolCall.id || (0, import_shared3.randomId)(),
actionName: toolCall.name,
args: JSON.stringify(toolCall.args)
});
}
} else if (isBaseMessageChunk(result)) {
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
if ((_a = result.lc_kwargs) == null ? void 0 : _a.content) {
eventStream$.sendTextMessage((0, import_shared3.randomId)(), result.content);
}
if ((_b = result.lc_kwargs) == null ? void 0 : _b.tool_calls) {
for (const toolCall of (_c = result.lc_kwargs) == null ? void 0 : _c.tool_calls) {
eventStream$.sendActionExecution({
actionExecutionId: toolCall.id || (0, import_shared3.randomId)(),
actionName: toolCall.name,
args: JSON.stringify(toolCall.args)
});
}
}
} else if (result && "getReader" in result) {
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
let reader = result.getReader();
let mode = null;
let currentMessageId;
const toolCallDetails = {
name: null,
id: null,
index: null,
prevIndex: null
};
while (true) {
try {
const { done, value } = await reader.read();
let toolCallName = void 0;
let toolCallId = void 0;
let toolCallArgs = void 0;
let hasToolCall = false;
let content = "";
if (value && value.content) {
content = Array.isArray(value.content) ? ((_d = value.content[0]) == null ? void 0 : _d.text) ?? "" : value.content;
}
if (isAIMessageChunk(value)) {
let chunk = (_e = value.tool_call_chunks) == null ? void 0 : _e[0];
toolCallArgs = chunk == null ? void 0 : chunk.args;
hasToolCall = chunk != void 0;
if (chunk == null ? void 0 : chunk.name)
toolCallDetails.name = chunk.name;
if ((chunk == null ? void 0 : chunk.index) != null) {
toolCallDetails.index = chunk.index;
if (toolCallDetails.prevIndex == null)
toolCallDetails.prevIndex = chunk.index;
}
if (chunk == null ? void 0 : chunk.id)
toolCallDetails.id = chunk.index != null ? `${chunk.id}-idx-${chunk.index}` : chunk.id;
toolCallName = toolCallDetails.name;
toolCallId = toolCallDetails.id;
} else if (isBaseMessageChunk(value)) {
let chunk = (_g = (_f = value.additional_kwargs) == null ? void 0 : _f.tool_calls) == null ? void 0 : _g[0];
toolCallName = (_h = chunk == null ? void 0 : chunk.function) == null ? void 0 : _h.name;
toolCallId = chunk == null ? void 0 : chunk.id;
toolCallArgs = (_i = chunk == null ? void 0 : chunk.function) == null ? void 0 : _i.arguments;
hasToolCall = (chunk == null ? void 0 : chunk.function) != void 0;
}
if (mode === "message" && (toolCallId || done)) {
mode = null;
eventStream$.sendTextMessageEnd({
messageId: currentMessageId
});
} else if (mode === "function" && (!hasToolCall || done)) {
mode = null;
eventStream$.sendActionExecutionEnd({
actionExecutionId: toolCallId
});
}
if (done) {
break;
}
if (mode === null) {
if (hasToolCall && toolCallId && toolCallName) {
mode = "function";
eventStream$.sendActionExecutionStart({
actionExecutionId: toolCallId,
actionName: toolCallName,
parentMessageId: (_j = value.lc_kwargs) == null ? void 0 : _j.id
});
} else if (content) {
mode = "message";
currentMessageId = ((_k = value.lc_kwargs) == null ? void 0 : _k.id) || (0, import_shared3.randomId)();
eventStream$.sendTextMessageStart({
messageId: currentMessageId
});
}
}
if (mode === "message" && content) {
eventStream$.sendTextMessageContent({
messageId: currentMessageId,
content
});
} else if (mode === "function" && toolCallArgs) {
if (toolCallDetails.index !== toolCallDetails.prevIndex) {
eventStream$.sendActionExecutionEnd({
actionExecutionId: toolCallId
});
eventStream$.sendActionExecutionStart({
actionExecutionId: toolCallId,
actionName: toolCallName,
parentMessageId: (_l = value.lc_kwargs) == null ? void 0 : _l.id
});
toolCallDetails.prevIndex = toolCallDetails.index;
}
eventStream$.sendActionExecutionArgs({
actionExecutionId: toolCallId,
args: toolCallArgs
});
}
} catch (error) {
console.error("Error reading from stream", error);
break;
}
}
} else if (actionExecution) {
eventStream$.sendActionExecutionResult({
actionExecutionId: actionExecution.id,
actionName: actionExecution.name,
result: encodeResult(result)
});
} else {
throw new Error("Invalid return type from LangChain function.");
}
eventStream$.complete();
}
__name(streamLangChainResponse, "streamLangChainResponse");
function encodeResult(result) {
if (result === void 0) {
return "";
} else if (typeof result === "string") {
return result;
} else {
return JSON.stringify(result);
}
}
__name(encodeResult, "encodeResult");
// src/service-adapters/langchain/langchain-adapter.ts
var import_shared4 = require("@copilotkit/shared");
var import_promises = require("@langchain/core/callbacks/promises");
var LangChainAdapter = class {
options;
/**
* To use LangChain as a backend, provide a handler function to the adapter with your custom LangChain logic.
*/
constructor(options) {
this.options = options;
}
async process(request) {
try {
const { eventSource, model, actions, messages, runId, threadId: threadIdFromRequest } = request;
const threadId = threadIdFromRequest ?? (0, import_shared4.randomUUID)();
const result = await this.options.chainFn({
messages: messages.map(convertMessageToLangChainMessage),
tools: actions.map(convertActionInputToLangChainTool),
model,
threadId,
runId
});
eventSource.stream(async (eventStream$) => {
await streamLangChainResponse({
result,
eventStream$
});
});
return {
threadId
};
} finally {
await (0, import_promises.awaitAllCallbacks)();
}
}
};
__name(LangChainAdapter, "LangChainAdapter");
// src/service-adapters/google/google-genai-adapter.ts
var import_google_gauth = require("@langchain/google-gauth");
var import_messages2 = require("@langchain/core/messages");
var GoogleGenerativeAIAdapter = class extends LangChainAdapter {
constructor(options) {
super({
chainFn: async ({ messages, tools, threadId }) => {
const filteredMessages = messages.filter((message) => {
if (!(message instanceof import_messages2.AIMessage)) {
return true;
}
return message.content && String(message.content).trim().length > 0 || message.tool_calls && message.tool_calls.length > 0;
});
const model = new import_google_gauth.ChatGoogle({
modelName: (options == null ? void 0 : options.model) ?? "gemini-1.5-pro",
apiVersion: "v1beta"
}).bindTools(tools);
return model.stream(filteredMessages, {
metadata: {
conversation_id: threadId
}
});
}
});
}
};
__name(GoogleGenerativeAIAdapter, "GoogleGenerativeAIAdapter");
// src/service-adapters/openai/openai-assistant-adapter.ts
var import_openai2 = __toESM(require("openai"));
var OpenAIAssistantAdapter = class {
openai;
codeInterpreterEnabled;
assistantId;
fileSearchEnabled;
disableParallelToolCalls;
keepSystemRole = false;
constructor(params) {
this.openai = params.openai || new import_openai2.default({});
this.codeInterpreterEnabled = params.codeInterpreterEnabled === false || true;
this.fileSearchEnabled = params.fileSearchEnabled === false || true;
this.assistantId = params.assistantId;
this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
this.keepSystemRole = (params == null ? void 0 : params.keepSystemRole) ?? false;
}
async process(request) {
var _a, _b;
const { messages, actions, eventSource, runId, forwardedParameters } = request;
let threadId = (_b = (_a = request.extensions) == null ? void 0 : _a.openaiAssistantAPI) == null ? void 0 : _b.threadId;
if (!threadId) {
threadId = (await this.openai.beta.threads.create()).id;
}
const lastMessage = messages.at(-1);
let nextRunId = void 0;
if (lastMessage.isResultMessage() && runId) {
nextRunId = await this.submitToolOutputs(threadId, runId, messages, eventSource);
} else if (lastMessage.isTextMessage()) {
nextRunId = await this.submitUserMessage(threadId, messages, actions, eventSource, forwardedParameters);
} else {
throw new Error("No actionable message found in the messages");
}
return {
runId: nextRunId,
threadId,
extensions: {
...request.extensions,
openaiAssistantAPI: {
threadId,
runId: nextRunId
}
}
};
}
async submitToolOutputs(threadId, runId, messages, eventSource) {
let run = await this.openai.beta.threads.runs.retrieve(threadId, runId);
if (!run.required_action) {
throw new Error("No tool outputs required");
}
const toolCallsIds = run.required_action.submit_tool_outputs.tool_calls.map((toolCall) => toolCall.id);
const resultMessages = messages.filter((message) => message.isResultMessage() && toolCallsIds.includes(message.actionExecutionId));
if (toolCallsIds.length != resultMessages.length) {
throw new Error("Number of function results does not match the number of tool calls");
}
const toolOutputs = resultMessages.map((message) => {
return {
tool_call_id: message.actionExecutionId,
output: message.result
};
});
const stream = this.openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {
tool_outputs: toolOutputs,
...this.disableParallelToolCalls && {
parallel_tool_calls: false
}
});
await this.streamResponse(stream, eventSource);
return runId;
}
async submitUserMessage(threadId, messages, actions, eventSource, forwardedParameters) {
messages = [
...messages
];
const instructionsMessage = messages.shift();
const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : "";
const userMessage = messages.map((m) => convertMessageToOpenAIMessage(m, {
keepSystemRole: this.keepSystemRole
})).map(convertSystemMessageToAssistantAPI).at(-1);
if (userMessage.role !== "user") {
throw new Error("No user message found");
}
await this.openai.beta.threads.messages.create(threadId, {
role: "user",
content: userMessage.content
});
const openaiTools = actions.map(convertActionInputToOpenAITool);
const tools = [
...openaiTools,
...this.codeInterpreterEnabled ? [
{
type: "code_interpreter"
}
] : [],
...this.fileSearchEnabled ? [
{
type: "file_search"
}
] : []
];
let stream = this.openai.beta.threads.runs.stream(threadId, {
assistant_id: this.assistantId,
instructions,
tools,
...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
max_completion_tokens: forwardedParameters.maxTokens
},
...this.disableParallelToolCalls && {
parallel_tool_calls: false
}
});
await this.streamResponse(stream, eventSource);
return getRunIdFromStream(stream);
}
async streamResponse(stream, eventSource) {
eventSource.stream(async (eventStream$) => {
var _a, _b, _c, _d, _e, _f;
let inFunctionCall = false;
let currentMessageId;
let currentToolCallId;
for await (const chunk of stream) {
switch (chunk.event) {
case "thread.message.created":
if (inFunctionCall) {
eventStream$.sendActionExecutionEnd({
actionExecutionId: currentToolCallId
});
}
currentMessageId = chunk.data.id;
eventStream$.sendTextMessageStart({
messageId: currentMessageId
});
break;
case "thread.message.delta":
if (((_a = chunk.data.delta.content) == null ? void 0 : _a[0].type) === "text") {
eventStream$.sendTextMessageContent({
messageId: currentMessageId,
content: (_b = chunk.data.delta.content) == null ? void 0 : _b[0].text.value
});
}
break;
case "thread.message.completed":
eventStream$.sendTextMessageEnd({
messageId: currentMessageId
});
break;
case "thread.run.step.delta":
let toolCallId;
let toolCallName;
let toolCallArgs;
if (chunk.data.delta.step_details.type === "tool_calls" && ((_c = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _c[0].type) === "function") {
toolCallId = (_d = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _d[0].id;
toolCallName = (_e = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _e[0].function.name;
toolCallArgs = (_f = chunk.data.delta.step_details.tool_calls) == null ? void 0 : _f[0].function.arguments;
}
if (toolCallName && toolCallId) {
if (inFunctionCall) {
eventStream$.sendActionExecutionEnd({
actionExecutionId: currentToolCallId
});
}
inFunctionCall = true;
currentToolCallId = toolCallId;
eventStream$.sendActionExecutionStart({
actionExecutionId: currentToolCallId,
parentMessageId: chunk.data.id,
actionName: toolCallName
});
} else if (toolCallArgs) {
eventStream$.sendActionExecutionArgs({
actionExecutionId: currentToolCallId,
args: toolCallArgs
});
}
break;
}
}
if (inFunctionCall) {
eventStream$.sendActionExecutionEnd({
actionExecutionId: currentToolCallId
});
}
eventStream$.complete();
});
}
};
__name(OpenAIAssistantAdapter, "OpenAIAssistantAdapter");
function getRunIdFromStream(stream) {
return new Promise((resolve, reject) => {
let runIdGetter = /* @__PURE__ */ __name((event) => {
if (event.event === "thread.run.created") {
const runId = event.data.id;
stream.off("event", runIdGetter);
resolve(runId);
}
}, "runIdGetter");
stream.on("event", runIdGetter);
});
}
__name(getRunIdFromStream, "getRunIdFromStream");
// src/service-adapters/unify/unify-adapter.ts
var import_openai3 = __toESM(require("openai"));
var import_shared5 = require("@copilotkit/shared");
var UnifyAdapter = class {
apiKey;
model;
start;
constructor(options) {
if (options == null ? void 0 : options.apiKey) {
this.apiKey = options.apiKey;
} else {
this.apiKey = "UNIFY_API_KEY";
}
this.model = options == null ? void 0 : options.model;
this.start = true;
}
async process(request) {
const tools = request.actions.map(convertActionInputToOpenAITool);
const openai = new import_openai3.default({
apiKey: this.apiKey,
baseURL: "https://api.unify.ai/v0/"
});
const forwardedParameters = request.forwardedParameters;
const messages = request.messages.map((m) => convertMessageToOpenAIMessage(m));
const stream = await openai.chat.completions.create({
model: this.model,
messages,
stream: true,
...tools.length > 0 && {
tools
},
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
temperature: forwardedParameters.temperature
}
});
let model = null;
let currentMessageId;
let currentToolCallId;
request.eventSource.stream(async (eventStream$) => {
var _a, _b;
let mode = null;
for await (const chunk of stream) {
if (this.start) {
model = chunk.model;
currentMessageId = (0, import_shared5.randomId)();
eventStream$.sendTextMessageStart({
messageId: currentMessageId
});
eventStream$.sendTextMessageContent({
messageId: currentMessageId,
content: `Model used: ${model}
`
});
eventStream$.sendTextMessageEnd({
messageId: currentMessageId
});
this.start = false;
}
const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
const content = chunk.choices[0].delta.content;
if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
mode = null;
eventStream$.sendTextMessageEnd({
messageId: currentMessageId
});
} else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
mode = null;
eventStream$.sendActionExecutionEnd({
actionExecutionId: currentToolCallId
});
}
if (mode === null) {
if (toolCall == null ? void 0 : toolCall.id) {
mode = "function";
currentToolCallId = toolCall.id;
eventStream$.sendActionExecutionStart({
actionExecutionId: currentToolCallId,
actionName: toolCall.function.name
});
} else if (content) {
mode = "message";
currentMessageId = chunk.id;
eventStream$.sendTextMessageStart({
messageId: currentMessageId
});
}
}
if (mode === "message" && content) {
eventStream$.sendTextMessageContent({
messageId: currentMessageId,
content
});
} else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
eventStream$.sendActionExecutionArgs({
actionExecutionId: currentToolCallId,
args: toolCall.function.arguments
});
}
}
if (mode === "message") {
eventStream$.sendTextMessageEnd({
messageId: currentMessageId
});
} else if (mode === "function") {
eventStream$.sendActionExecutionEnd({
actionExecutionId: currentToolCallId
});
}
eventStream$.complete();
});
return {
threadId: request.threadId || (0, import_shared5.randomUUID)()
};
}
};
__name(UnifyAdapter, "UnifyAdapter");
// src/service-adapters/groq/groq-adapter.ts
var import_groq_sdk = require("groq-sdk");
var import_shared6 = require("@copilotkit/shared");
var DEFAULT_MODEL2 = "llama-3.3-70b-versatile";
var GroqAdapter = class {
model = DEFAULT_MODEL2;
disableParallelToolCalls = false;
_groq;
get groq() {
return this._groq;
}
constructor(params) {
this._groq = (params == null ? void 0 : params.groq) || new import_groq_sdk.Groq({});
if (params == null ? void 0 : params.model) {
this.model = params.model;
}
this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
}
async process(request) {
const { threadId, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
const tools = actions.map(convertActionInputToOpenAITool);
let openaiMessages = messages.map((m) => convertMessageToOpenAIMessage(m, {
keepSystemRole: true
}));
openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
if ((forwardedParameters == null ? void 0 : forwardedParameters.toolChoice) === "function") {
toolChoice = {
type: "function",
function: {
name: forwardedParameters.toolChoiceFunctionName
}
};
}
const stream = await this.groq.chat.completions.create({
model,
stream: true,
messages: openaiMessages,
...tools.length > 0 && {
tools
},
...(forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) && {
max_tokens: forwardedParameters.maxTokens
},
...(forwardedParameters == null ? void 0 : forwardedParameters.stop) && {
stop: forwardedParameters.stop
},
...toolChoice && {
tool_choice: toolChoice
},
...this.disableParallelToolCalls && {
parallel_tool_calls: false
},
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
temperature: forwardedParameters.temperature
}
});
eventSource.stream(async (eventStream$) => {
var _a, _b;
let mode = null;
let currentMessageId;
let currentToolCallId;
for await (const chunk of stream) {
const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
const content = chunk.choices[0].delta.content;
if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
mode = null;
eventStream$.sendTextMessageEnd({
messageId: currentMessageId
});
} else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
mode = null;
eventStream$.sendActionExecutionEnd({
actionExecutionId: currentToolCallId
});
}
if (mode === null) {
if (toolCall == null ? void 0 : toolCall.id) {
mode = "function";
currentToolCallId = toolCall.id;
eventStream$.sendActionExecutionStart({
actionExecutionId: currentToolCallId,
actionName: toolCall.function.name,
parentMessageId: chunk.id
});
} else if (content) {
mode = "message";
currentMessageId = chunk.id;
eventStream$.sendTextMessageStart({
messageId: currentMessageId
});
}
}
if (mode === "message" && content) {
eventStream$.sendTextMessageContent({
messageId: currentMessageId,
content
});
} else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
eventStream$.sendActionExecutionArgs({
actionExecutionId: currentToolCallId,
args: toolCall.function.arguments
});
}
}
if (mode === "message") {
eventStream$.sendTextMessageEnd({
messageId: currentMessageId
});
} else if (mode === "function") {
eventStream$.sendActionExecutionEnd({
actionExecutionId: currentToolCallId
});
}
eventStream$.complete();
});
return {
threadId: request.threadId || (0, import_shared6.randomUUID)()
};
}
};
__name(GroqAdapter, "GroqAdapter");
// src/lib/integrations/shared.ts
var import_type_graphql26 = require("type-graphql");
// src/graphql/resolvers/copilot.resolver.ts
var import_type_graphql20 = require("type-graphql");
var import_rxjs4 = require("rxjs");
// src/graphql/inputs/generate-copilot-response.input.ts
var import_type_graphql18 = require("type-graphql");
// src/graphql/inputs/message.input.ts
var import_type_graphql3 = require("type-graphql");
// src/graphql/types/enums.ts
var import_type_graphql = require("type-graphql");
var MessageRole;
(function(MessageRole2) {
MessageRole2["user"] = "user";
MessageRole2["assistant"] = "assistant";
MessageRole2["system"] = "system";
MessageRole2["tool"] = "tool";
MessageRole2["developer"] = "developer";
})(MessageRole || (MessageRole = {}));
var CopilotRequestType;
(function(CopilotRequestType2) {
CopilotRequestType2["Chat"] = "Chat";
CopilotRequestType2["Task"] = "Task";
CopilotRequestType2["TextareaCompletion"] = "TextareaCompletion";
CopilotRequestType2["TextareaPopover"] = "TextareaPopover";
CopilotRequestType2["Suggestion"] = "Suggestion";
})(CopilotRequestType || (CopilotRequestType = {}));
var ActionInputAvailability;
(function(ActionInputAvailability2) {
ActionInputAvailability2["disabled"] = "disabled";
ActionInputAvailability2["enabled"] = "enabled";
ActionInputAvailability2["remote"] = "remote";
})(ActionInputAvailability || (ActionInputAvailability = {}));
(0, import_type_graphql.registerEnumType)(MessageRole, {
name: "MessageRole",
description: "The role of the message"
});
(0, import_type_graphql.registerEnumType)(CopilotRequestType, {
name: "CopilotRequestType",
description: "The type of Copilot request"
});
(0, import_type_graphql.registerEnumType)(ActionInputAvailability, {
name: "ActionInputAvailability",
description: "The availability of the frontend action"
});
// src/graphql/types/base/index.ts
var import_type_graphql2 = require("type-graphql");
function _ts_decorate(decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function")
r = Reflect.decorate(decorators, target, key, desc);
else
for (var i = decorators.length - 1; i >= 0; i--)
if (d = decorators[i])
r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
}
__name(_ts_decorate, "_ts_decorate");
function _ts_metadata(k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function")
return Reflect.metadata(k, v);
}
__name(_ts_metadata, "_ts_metadata");
var BaseMessageInput = class {
id;
createdAt;
};
__name(BaseMessageInput, "BaseMessageInput");
_ts_decorate([
(0, import_type_graphql2.Field)(() => String),
_ts_metadata("design:type", String)
], BaseMessageInput.prototype, "id", void 0);
_ts_decorate([
(0, import_type_graphql2.Field)(() => Date),
_ts_metadata("design:type", typeof Date === "undefined" ? Object : Date)
], BaseMessageInput.prototype, "createdAt", void 0);
BaseMessageInput = _ts_decorate([
(0, import_type_graphql2.InputType)()
], BaseMessageInput);
// src/graphql/inputs/message.input.ts
function _ts_decorate2(decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function")
r = Reflect.decorate(decorators, target, key, desc);
else
for (var i = decorators.length - 1; i >= 0; i--)
if (d = decorators[i])
r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
}
__name(_ts_decorate2, "_ts_decorate");
function _ts_metadata2(k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function")
return Reflect.metadata(k, v);
}
__name(_ts_metadata2, "_ts_metadata");
var MessageInput = class extends BaseMessageInput {
textMessage;
actionExecutionMessage;
resultMessage;
agentStateMessage;
imageMessage;
};
__name(MessageInput, "MessageInput");
_ts_decorate2([
(0, import_type_graphql3.Field)(() => TextMessageInput, {
nullable: true
}),
_ts_metadata2("design:type", typeof TextMessageInput === "undefined" ? Object : TextMessageInput)
], MessageInput.prototype, "textMessage", void 0);
_ts_decorate2([
(0, import_type_graphql3.Field)(() => ActionExecutionMessageInput, {
nullable: true
}),
_ts_metadata2("design:type", typeof ActionExecutionMessageInput === "undefined" ? Object : ActionExecutionMessageInput)
], MessageInput.prototype, "actionExecutionMessage", void 0);
_ts_decorate2([
(0, import_type_graphql3.Field)(() => ResultMessageInput, {
nullable: true
}),
_ts_metadata2("design:type", typeof ResultMessageInput === "undefined" ? Object : ResultMessageInput)
], MessageInput.prototype, "resultMessage", void 0);
_ts_decorate2([
(0, import_type_graphql3.Field)(() => AgentStateMessageInput, {
nullable: true
}),
_ts_metadata2("design:type", typeof AgentStateMessageInput === "undefined" ? Object