UNPKG

@baseai/core

Version:

The Web AI Framework's core - BaseAI.dev

1 lines 60.3 kB
{"version":3,"sources":["../src/helpers/stream.ts","../src/common/errors.ts","../src/common/request.ts","../src/data/models.ts","../src/utils/get-llm-api-key.ts","../src/utils/is-prod.ts","../src/utils/local-server-running.ts","../src/utils/get-provider.ts","../src/pipes/pipes.ts"],"sourcesContent":["import {ChatCompletionStream} from 'openai/lib/ChatCompletionStream';\nimport {ChunkStream} from 'src/pipes';\nimport {Stream} from 'openai/streaming';\nimport {ToolCallResult} from 'types/pipes';\n\nexport interface Runner extends ChatCompletionStream<null> {}\n\n/**\n * Converts a ReadableStream into a Runner.\n *\n * @param readableStream - The ReadableStream to convert.\n * @returns The converted Runner.\n */\nexport const fromReadableStream = (readableStream: ReadableStream): Runner => {\n\treturn ChatCompletionStream.fromReadableStream(readableStream);\n};\n\n/**\n * Returns a runner for the given readable stream.\n *\n * @param readableStream - The readable stream to create a runner for.\n * @returns A runner for the given readable stream.\n */\nexport const getRunner = (readableStream: ReadableStream) => {\n\treturn fromReadableStream(readableStream);\n};\n\n/**\n * Retrieves the text part from a given ChunkStream.\n *\n * @param chunk - The ChunkStream object.\n * @returns The text content of the first choice's delta, or an empty string if it doesn't exist.\n */\nexport const getTextPart = (chunk: ChunkStream) => {\n\treturn chunk.choices[0]?.delta?.content || '';\n};\n\n/**\n * Handles the response stream from a given `Response` object.\n *\n * @param {Object} params - The parameters for handling the response stream.\n * @param {Response} params.response - The API response to handle.\n * @param {boolean} params.rawResponse - Optional flag to include raw response headers.\n *\n * @returns {Object} An object containing the processed stream, thread ID, and optionally raw response headers.\n * @returns {ReadableStream<any>} return.stream - The readable stream created from the response.\n * @returns {string | null} return.threadId - The thread ID extracted from the response headers.\n * @returns {Object} [return.rawResponse] - Optional raw response headers.\n * @returns {Record<string, string>} return.rawResponse.headers - The headers from the raw response.\n */\nexport function handleResponseStream({\n\tresponse,\n\trawResponse,\n}: {\n\tresponse: Response;\n\trawResponse?: boolean;\n}): {\n\tstream: any;\n\tthreadId: string | null;\n\trawResponse?: {\n\t\theaders: Record<string, string>;\n\t};\n} {\n\tconst controller = new AbortController();\n\tconst streamSSE = Stream.fromSSEResponse(response, controller);\n\tconst stream = streamSSE.toReadableStream();\n\n\tconst result: {\n\t\tstream: ReadableStream<any>;\n\t\tthreadId: string | null;\n\t\trawResponse?: {\n\t\t\theaders: Record<string, string>;\n\t\t};\n\t} = {\n\t\tstream,\n\t\tthreadId: response.headers.get('lb-thread-id'),\n\t};\n\tif (rawResponse) {\n\t\tresult.rawResponse = {\n\t\t\theaders: Object.fromEntries(response.headers.entries()),\n\t\t};\n\t}\n\treturn result;\n}\n\n/**\n * Retrieves tool calls from a given readable stream.\n *\n * @param stream - The readable stream from which to extract tool calls.\n * @returns A promise that resolves to an array of `ToolCall` objects.\n */\nexport async function getToolsFromStream(\n\tstream: ReadableStream<any>,\n): Promise<ToolCallResult[]> {\n\tlet run = getRunner(stream);\n\tconst {choices} = await run.finalChatCompletion();\n\treturn choices[0].message.tool_calls;\n}\n","import type {Headers} from './../../types/index';\n\nexport class APIError extends Error {\n\treadonly status: number | undefined;\n\treadonly headers: Headers | undefined;\n\treadonly error: Object | undefined;\n\n\treadonly code: string | null | undefined;\n\treadonly param: string | null | undefined;\n\treadonly type: string | undefined;\n\n\treadonly request_id: string | null | undefined;\n\n\tconstructor(\n\t\tstatus: number | undefined,\n\t\terror: Object | undefined,\n\t\tmessage: string | undefined,\n\t\theaders: Headers | undefined,\n\t) {\n\t\tsuper(APIError.makeMessage(status, error, message));\n\t\tthis.status = status;\n\t\tthis.headers = headers;\n\t\tthis.request_id = headers?.['lb-request-id'];\n\n\t\tconst data = error as Record<string, any>;\n\t\tthis.error = data;\n\t\tthis.code = data?.['code'];\n\t\tthis.status = data?.['status'];\n\t\t// this.param = data?.['param'];\n\t\t// this.type = data?.['type'];\n\t}\n\n\tprivate static makeMessage(\n\t\tstatus: number | undefined,\n\t\terror: any,\n\t\tmessage: string | undefined,\n\t): string {\n\t\tconst msg = error?.message\n\t\t\t? typeof error.message === 'string'\n\t\t\t\t? error.message\n\t\t\t\t: JSON.stringify(error.message)\n\t\t\t: error\n\t\t\t\t? JSON.stringify(error)\n\t\t\t\t: message;\n\n\t\tif (status && msg) {\n\t\t\treturn `${status} ${msg}`;\n\t\t}\n\t\tif (status) {\n\t\t\treturn `${status} status code (no body)`;\n\t\t}\n\t\tif (msg) {\n\t\t\treturn msg;\n\t\t}\n\t\treturn '(no status code or body)';\n\t}\n\n\tstatic generate(\n\t\tstatus: number | undefined,\n\t\terrorResponse: Object | undefined,\n\t\tmessage: string | undefined,\n\t\theaders: Headers | undefined,\n\t): APIError {\n\t\tif (!status) {\n\t\t\treturn new APIConnectionError({\n\t\t\t\tcause:\n\t\t\t\t\terrorResponse instanceof Error ? errorResponse : undefined,\n\t\t\t});\n\t\t}\n\n\t\tconst error = (errorResponse as Record<string, any>)?.['error'];\n\n\t\tswitch (status) {\n\t\t\tcase 400:\n\t\t\t\treturn new BadRequestError(status, error, message, headers);\n\t\t\tcase 401:\n\t\t\t\treturn new AuthenticationError(status, error, message, headers);\n\t\t\tcase 403:\n\t\t\t\treturn new PermissionDeniedError(\n\t\t\t\t\tstatus,\n\t\t\t\t\terror,\n\t\t\t\t\tmessage,\n\t\t\t\t\theaders,\n\t\t\t\t);\n\t\t\tcase 404:\n\t\t\t\treturn new NotFoundError(status, error, message, headers);\n\t\t\tcase 409:\n\t\t\t\treturn new ConflictError(status, error, message, headers);\n\t\t\tcase 422:\n\t\t\t\treturn new UnprocessableEntityError(\n\t\t\t\t\tstatus,\n\t\t\t\t\terror,\n\t\t\t\t\tmessage,\n\t\t\t\t\theaders,\n\t\t\t\t);\n\t\t\tcase 429:\n\t\t\t\treturn new RateLimitError(status, error, message, headers);\n\t\t\tdefault:\n\t\t\t\treturn status >= 500\n\t\t\t\t\t? new InternalServerError(status, error, message, headers)\n\t\t\t\t\t: new APIError(status, error, message, headers);\n\t\t}\n\t}\n}\n\nexport class APIConnectionError extends APIError {\n\toverride readonly status: undefined = undefined;\n\n\tconstructor({message, cause}: {message?: string; cause?: Error}) {\n\t\tsuper(undefined, undefined, message || 'Connection error.', undefined);\n\t\tif (cause) (this as Error).cause = cause;\n\t}\n}\n\nexport class APIConnectionTimeoutError extends APIConnectionError {\n\tconstructor({message}: {message?: string} = {}) {\n\t\tsuper({message: message ?? 'Request timed out.'});\n\t}\n}\n\nexport class BadRequestError extends APIError {\n\toverride readonly status: 400 = 400;\n}\n\nexport class AuthenticationError extends APIError {\n\toverride readonly status: 401 = 401;\n}\n\nexport class PermissionDeniedError extends APIError {\n\toverride readonly status: 403 = 403;\n}\n\nexport class NotFoundError extends APIError {\n\toverride readonly status: 404 = 404;\n}\n\nexport class ConflictError extends APIError {\n\toverride readonly status: 409 = 409;\n}\n\nexport class UnprocessableEntityError extends APIError {\n\toverride readonly status: 422 = 422;\n}\n\nexport class RateLimitError extends APIError {\n\toverride readonly status: 429 = 429;\n}\n\nexport class InternalServerError extends APIError {}\n","import {handleResponseStream} from 'src/helpers';\nimport {APIConnectionError, APIError} from './errors';\n\ninterface RequestOptions {\n\tendpoint: string;\n\tmethod: string;\n\theaders?: Record<string, string>;\n\tbody?: any;\n\tstream?: boolean;\n\trawResponse?: boolean;\n}\n\ninterface RequestConfig {\n\tapiKey?: string;\n\tbaseUrl: string;\n\ttimeout?: number;\n\tllmKey?: string;\n}\n\ninterface SendOptions extends RequestOptions {\n\tendpoint: string;\n}\n\ninterface MakeRequestParams {\n\turl: string;\n\toptions: RequestOptions;\n\theaders: Record<string, string>;\n}\n\ninterface HandleGenerateResponseParams {\n\tresponse: Response;\n\tisChat: boolean;\n\tthreadId: string | null;\n\trawResponse: boolean;\n}\n\nexport class Request {\n\tprivate config: RequestConfig;\n\n\tconstructor(config: RequestConfig) {\n\t\tthis.config = config;\n\t}\n\n\tprivate async send<T>({endpoint, ...options}: SendOptions): Promise<T> {\n\t\tconst url = this.buildUrl({endpoint});\n\t\tconst headers = this.buildHeaders({headers: options.headers});\n\n\t\tlet response: Response;\n\t\ttry {\n\t\t\tresponse = await this.makeRequest({\n\t\t\t\turl,\n\t\t\t\toptions: {...options, endpoint},\n\t\t\t\theaders,\n\t\t\t});\n\t\t} catch (error) {\n\t\t\tthrow new APIConnectionError({\n\t\t\t\tcause: error instanceof Error ? error : undefined,\n\t\t\t});\n\t\t}\n\n\t\tif (!response.ok) {\n\t\t\tawait this.handleErrorResponse({response});\n\t\t}\n\n\t\tconst threadId = response.headers.get('lb-thread-id');\n\n\t\tif (options.body?.stream) {\n\t\t\treturn handleResponseStream({\n\t\t\t\tresponse,\n\t\t\t\trawResponse: options.body.rawResponse,\n\t\t\t}) as T;\n\t\t}\n\n\t\treturn this.handleRunResponse({\n\t\t\tresponse,\n\t\t\tisChat: options.body?.chat,\n\t\t\tthreadId,\n\t\t\trawResponse: options.body?.rawResponse ?? false,\n\t\t});\n\t}\n\n\tprivate buildUrl({endpoint}: {endpoint: string}): string {\n\t\treturn `${this.config.baseUrl}${endpoint}`;\n\t}\n\n\tprivate buildHeaders({\n\t\theaders,\n\t}: {\n\t\theaders?: Record<string, string>;\n\t}): Record<string, string> {\n\t\treturn {\n\t\t\t'Content-Type': 'application/json',\n\t\t\tAuthorization: `Bearer ${this.config.apiKey}`,\n\t\t\t'LB-LLM-Key': this.config.llmKey ?? '',\n\t\t\t...headers,\n\t\t};\n\t}\n\n\tprivate async makeRequest({\n\t\turl,\n\t\toptions,\n\t\theaders,\n\t}: MakeRequestParams): Promise<Response> {\n\t\tconst resp = await fetch(url, {\n\t\t\tmethod: options.method,\n\t\t\theaders,\n\t\t\tbody: JSON.stringify(options.body),\n\t\t\t...(this.config.timeout && {\n\t\t\t\tsignal: AbortSignal.timeout(this.config.timeout),\n\t\t\t}),\n\t\t});\n\t\treturn resp;\n\t}\n\n\tprivate async handleErrorResponse({\n\t\tresponse,\n\t}: {\n\t\tresponse: Response;\n\t}): Promise<never> {\n\t\tlet errorBody;\n\t\ttry {\n\t\t\terrorBody = await response.json();\n\t\t} catch {\n\t\t\terrorBody = await response.text();\n\t\t}\n\t\tthrow APIError.generate(\n\t\t\tresponse.status,\n\t\t\terrorBody,\n\t\t\tresponse.statusText,\n\t\t\tObject.fromEntries(response.headers.entries()),\n\t\t);\n\t}\n\n\tprivate async handleRunResponse({\n\t\tresponse,\n\t\tisChat,\n\t\tthreadId,\n\t\trawResponse,\n\t}: HandleGenerateResponseParams): Promise<any> {\n\t\tconst generateResponse = await response.json();\n\t\tconst buildResponse = generateResponse.raw\n\t\t\t? {\n\t\t\t\t\tcompletion: generateResponse.completion,\n\t\t\t\t\t...generateResponse.raw,\n\t\t\t\t}\n\t\t\t: generateResponse;\n\n\t\tconst result: any = {\n\t\t\t...buildResponse,\n\t\t};\n\n\t\tresult.threadId = threadId;\n\n\t\tif (rawResponse) {\n\t\t\tresult.rawResponse = {\n\t\t\t\theaders: Object.fromEntries(response.headers.entries()),\n\t\t\t};\n\t\t}\n\n\t\treturn result;\n\t}\n\n\tasync post<T>(options: Omit<RequestOptions, 'method'>): Promise<T> {\n\t\t// logger('Request.post.options');\n\t\t// logger(options, {depth: null, colors: true});\n\n\t\treturn this.send<T>({...options, method: 'POST'});\n\t}\n\n\tasync get<T>(options: Omit<RequestOptions, 'method' | 'body'>): Promise<T> {\n\t\treturn this.send<T>({...options, method: 'GET'});\n\t}\n\n\tasync put<T>(options: Omit<RequestOptions, 'method'>): Promise<T> {\n\t\treturn this.send<T>({...options, method: 'PUT'});\n\t}\n\n\tasync delete<T>(\n\t\toptions: Omit<RequestOptions, 'method' | 'body'>,\n\t): Promise<T> {\n\t\treturn this.send<T>({...options, method: 'DELETE'});\n\t}\n}\n","export const OPEN_AI: string = 'OpenAI';\nexport const ANTHROPIC: string = 'Anthropic';\nexport const TOGETHER_AI: string = 'Together';\nexport const GOOGLE: string = 'Google';\nexport const GROQ: string = 'Groq';\nexport const COHERE: string = 'Cohere';\nexport const FIREWORKS_AI: string = 'Fireworks AI';\nexport const PERPLEXITY: string = 'Perplexity';\nexport const MISTRAL_AI: string = 'Mistral AI';\nexport const DEEPINFRA: string = 'deepinfra';\nexport const BEDROCK: string = 'bedrock';\nexport const AZURE_OPEN_AI: string = 'azure-openai';\nexport const OLLAMA: string = 'ollama';\nexport const X_AI: string = 'xAI';\n\ninterface Model {\n\tid: string;\n\tprovider: string;\n\tpromptCost: number;\n\tcompletionCost: number;\n\trequestCost?: number;\n}\n\ninterface ModelsByProviderInclCosts {\n\t[provider: string]: Model[];\n}\n\nexport const modelsByProvider: ModelsByProviderInclCosts = {\n\t[OPEN_AI]: [\n\t\t{\n\t\t\tid: 'gpt-4o',\n\t\t\tprovider: OPEN_AI,\n\t\t\tpromptCost: 5.0,\n\t\t\tcompletionCost: 15.0,\n\t\t},\n\t\t{\n\t\t\tid: 'gpt-4o-2024-08-06',\n\t\t\tprovider: OPEN_AI,\n\t\t\tpromptCost: 2.5,\n\t\t\tcompletionCost: 10.0,\n\t\t},\n\t\t{\n\t\t\tid: 'gpt-4o-mini',\n\t\t\tprovider: OPEN_AI,\n\t\t\tpromptCost: 0.15,\n\t\t\tcompletionCost: 0.6,\n\t\t},\n\t\t{\n\t\t\tid: 'gpt-4-turbo',\n\t\t\tprovider: OPEN_AI,\n\t\t\tpromptCost: 10.0,\n\t\t\tcompletionCost: 30.0,\n\t\t},\n\t\t{\n\t\t\tid: 'gpt-4-turbo-preview',\n\t\t\tprovider: OPEN_AI,\n\t\t\tpromptCost: 10.0,\n\t\t\tcompletionCost: 30.0,\n\t\t},\n\t\t{\n\t\t\tid: 'gpt-4-0125-preview',\n\t\t\tprovider: OPEN_AI,\n\t\t\tpromptCost: 10.0,\n\t\t\tcompletionCost: 30.0,\n\t\t},\n\t\t{\n\t\t\tid: 'gpt-4-1106-preview',\n\t\t\tprovider: OPEN_AI,\n\t\t\tpromptCost: 10.0,\n\t\t\tcompletionCost: 30.0,\n\t\t},\n\t\t{\n\t\t\tid: 'gpt-4',\n\t\t\tprovider: OPEN_AI,\n\t\t\tpromptCost: 30.0,\n\t\t\tcompletionCost: 60.0,\n\t\t},\n\t\t{\n\t\t\tid: 'gpt-4-0613',\n\t\t\tprovider: OPEN_AI,\n\t\t\tpromptCost: 30.0,\n\t\t\tcompletionCost: 60.0,\n\t\t},\n\t\t{\n\t\t\tid: 'gpt-4-32k',\n\t\t\tprovider: OPEN_AI,\n\t\t\tpromptCost: 60.0,\n\t\t\tcompletionCost: 120.0,\n\t\t},\n\t\t{\n\t\t\tid: 'gpt-3.5-turbo',\n\t\t\tprovider: OPEN_AI,\n\t\t\tpromptCost: 0.5,\n\t\t\tcompletionCost: 1.5,\n\t\t},\n\t\t{\n\t\t\tid: 'gpt-3.5-turbo-0125',\n\t\t\tprovider: OPEN_AI,\n\t\t\tpromptCost: 0.5,\n\t\t\tcompletionCost: 1.5,\n\t\t},\n\t\t{\n\t\t\tid: 'gpt-3.5-turbo-1106',\n\t\t\tprovider: OPEN_AI,\n\t\t\tpromptCost: 1.0,\n\t\t\tcompletionCost: 2.0,\n\t\t},\n\t\t{\n\t\t\tid: 'gpt-3.5-turbo-16k',\n\t\t\tprovider: OPEN_AI,\n\t\t\tpromptCost: 3.0,\n\t\t\tcompletionCost: 4.0,\n\t\t},\n\t],\n\t[TOGETHER_AI]: [\n\t\t{\n\t\t\tid: 'meta-llama/Llama-3.3-70B-Instruct-Turbo',\n\t\t\tprovider: TOGETHER_AI,\n\t\t\tpromptCost: 0.88,\n\t\t\tcompletionCost: 0.88,\n\t\t},\n\t\t{\n\t\t\tid: 'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo',\n\t\t\tprovider: TOGETHER_AI,\n\t\t\tpromptCost: 5,\n\t\t\tcompletionCost: 5,\n\t\t},\n\t\t{\n\t\t\tid: 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo',\n\t\t\tprovider: TOGETHER_AI,\n\t\t\tpromptCost: 0.88,\n\t\t\tcompletionCost: 0.88,\n\t\t},\n\t\t{\n\t\t\tid: 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo',\n\t\t\tprovider: TOGETHER_AI,\n\t\t\tpromptCost: 0.18,\n\t\t\tcompletionCost: 0.18,\n\t\t},\n\t\t{\n\t\t\tid: 'meta-llama/Llama-3-70b-chat-hf',\n\t\t\tprovider: TOGETHER_AI,\n\t\t\tpromptCost: 0.9,\n\t\t\tcompletionCost: 0.9,\n\t\t},\n\t\t{\n\t\t\tid: 'meta-llama/Llama-3-8b-chat-hf',\n\t\t\tprovider: TOGETHER_AI,\n\t\t\tpromptCost: 0.2,\n\t\t\tcompletionCost: 0.2,\n\t\t},\n\t\t{\n\t\t\tid: 'togethercomputer/Llama-2-7B-32K-Instruct',\n\t\t\tprovider: TOGETHER_AI,\n\t\t\tpromptCost: 0.2,\n\t\t\tcompletionCost: 0.2,\n\t\t},\n\t\t{\n\t\t\tid: 'meta-llama/Llama-2-13b-chat-hf',\n\t\t\tprovider: TOGETHER_AI,\n\t\t\tpromptCost: 0.225,\n\t\t\tcompletionCost: 0.225,\n\t\t},\n\t\t{\n\t\t\tid: 'meta-llama/Llama-2-70b-chat-hf',\n\t\t\tprovider: TOGETHER_AI,\n\t\t\tpromptCost: 0.9,\n\t\t\tcompletionCost: 0.9,\n\t\t},\n\t\t{\n\t\t\tid: 'google/gemma-7b-it',\n\t\t\tprovider: TOGETHER_AI,\n\t\t\tpromptCost: 0.2,\n\t\t\tcompletionCost: 0.2,\n\t\t},\n\t\t{\n\t\t\tid: 'google/gemma-2b-it',\n\t\t\tprovider: TOGETHER_AI,\n\t\t\tpromptCost: 0.1,\n\t\t\tcompletionCost: 0.1,\n\t\t},\n\t\t{\n\t\t\tid: 'mistralai/Mistral-7B-Instruct-v0.1',\n\t\t\tprovider: TOGETHER_AI,\n\t\t\tpromptCost: 0.2,\n\t\t\tcompletionCost: 0.2,\n\t\t},\n\t\t{\n\t\t\tid: 'mistralai/Mistral-7B-Instruct-v0.2',\n\t\t\tprovider: TOGETHER_AI,\n\t\t\tpromptCost: 0.2,\n\t\t\tcompletionCost: 0.2,\n\t\t},\n\t\t{\n\t\t\tid: 'mistralai/Mixtral-8x7B-Instruct-v0.1',\n\t\t\tprovider: TOGETHER_AI,\n\t\t\tpromptCost: 0.6,\n\t\t\tcompletionCost: 0.6,\n\t\t},\n\t\t{\n\t\t\tid: 'mistralai/Mixtral-8x22B-Instruct-v0.1',\n\t\t\tprovider: TOGETHER_AI,\n\t\t\tpromptCost: 1.2,\n\t\t\tcompletionCost: 1.2,\n\t\t},\n\t\t{\n\t\t\tid: 'databricks/dbrx-instruct',\n\t\t\tprovider: TOGETHER_AI,\n\t\t\tpromptCost: 1.2,\n\t\t\tcompletionCost: 1.2,\n\t\t},\n\t],\n\t[ANTHROPIC]: [\n\t\t{\n\t\t\tid: 'claude-3-5-sonnet-latest',\n\t\t\tprovider: ANTHROPIC,\n\t\t\tpromptCost: 3,\n\t\t\tcompletionCost: 15,\n\t\t},\n\t\t{\n\t\t\tid: 'claude-3-5-sonnet-20240620',\n\t\t\tprovider: ANTHROPIC,\n\t\t\tpromptCost: 3,\n\t\t\tcompletionCost: 15,\n\t\t},\n\t\t{\n\t\t\tid: 'claude-3-opus-20240229',\n\t\t\tprovider: ANTHROPIC,\n\t\t\tpromptCost: 15,\n\t\t\tcompletionCost: 75,\n\t\t},\n\t\t{\n\t\t\tid: 'claude-3-sonnet-20240229',\n\t\t\tprovider: ANTHROPIC,\n\t\t\tpromptCost: 3,\n\t\t\tcompletionCost: 15,\n\t\t},\n\t\t{\n\t\t\tid: 'claude-3-haiku-20240307',\n\t\t\tprovider: ANTHROPIC,\n\t\t\tpromptCost: 0.25,\n\t\t\tcompletionCost: 1.25,\n\t\t},\n\t\t{\n\t\t\tid: 'claude-3-5-haiku-20241022',\n\t\t\tprovider: ANTHROPIC,\n\t\t\tpromptCost: 1,\n\t\t\tcompletionCost: 5,\n\t\t},\n\t],\n\t[GROQ]: [\n\t\t{\n\t\t\tid: 'llama-3.3-70b-versatile',\n\t\t\tprovider: GROQ,\n\t\t\tpromptCost: 0.59,\n\t\t\tcompletionCost: 0.79,\n\t\t},\n\t\t{\n\t\t\tid: 'llama-3.1-70b-versatile',\n\t\t\tprovider: GROQ,\n\t\t\tpromptCost: 0.59,\n\t\t\tcompletionCost: 0.79,\n\t\t},\n\t\t{\n\t\t\tid: 'llama-3.1-8b-instant',\n\t\t\tprovider: GROQ,\n\t\t\tpromptCost: 0.59,\n\t\t\tcompletionCost: 0.79,\n\t\t},\n\t\t{\n\t\t\tid: 'llama3-70b-8192',\n\t\t\tprovider: GROQ,\n\t\t\tpromptCost: 0.59,\n\t\t\tcompletionCost: 0.79,\n\t\t},\n\t\t{\n\t\t\tid: 'llama3-8b-8192',\n\t\t\tprovider: GROQ,\n\t\t\tpromptCost: 0.05,\n\t\t\tcompletionCost: 0.1,\n\t\t},\n\t\t{\n\t\t\tid: 'mixtral-8x7b-32768',\n\t\t\tprovider: GROQ,\n\t\t\tpromptCost: 0.27,\n\t\t\tcompletionCost: 0.27,\n\t\t},\n\t\t{\n\t\t\tid: 'gemma2-9b-it',\n\t\t\tprovider: GROQ,\n\t\t\tpromptCost: 0.2,\n\t\t\tcompletionCost: 0.2,\n\t\t},\n\t\t{\n\t\t\tid: 'gemma-7b-it',\n\t\t\tprovider: GROQ,\n\t\t\tpromptCost: 0.07,\n\t\t\tcompletionCost: 0.07,\n\t\t},\n\t],\n\t[GOOGLE]: [\n\t\t{\n\t\t\tid: 'gemini-1.5-pro-latest',\n\t\t\tprovider: GOOGLE,\n\t\t\tpromptCost: 3.5,\n\t\t\tcompletionCost: 10.5,\n\t\t},\n\t\t{\n\t\t\tid: 'gemini-1.5-flash-latest',\n\t\t\tprovider: GOOGLE,\n\t\t\tpromptCost: 0.075,\n\t\t\tcompletionCost: 0.3,\n\t\t},\n\t\t{\n\t\t\tid: 'gemini-1.5-flash-8b-latest',\n\t\t\tprovider: GOOGLE,\n\t\t\tpromptCost: 0.0375,\n\t\t\tcompletionCost: 0.15,\n\t\t},\n\t\t{\n\t\t\tid: 'gemini-pro',\n\t\t\tprovider: GOOGLE,\n\t\t\tpromptCost: 0.5,\n\t\t\tcompletionCost: 1.5,\n\t\t},\n\t],\n\t[COHERE]: [\n\t\t{\n\t\t\tid: 'command-r',\n\t\t\tprovider: COHERE,\n\t\t\tpromptCost: 0.5,\n\t\t\tcompletionCost: 1.5,\n\t\t},\n\t\t{\n\t\t\tid: 'command-r-plus',\n\t\t\tprovider: COHERE,\n\t\t\tpromptCost: 3,\n\t\t\tcompletionCost: 15,\n\t\t},\n\t],\n\t[FIREWORKS_AI]: [\n\t\t{\n\t\t\tid: 'llama-v3p3-70b-instruct',\n\t\t\tprovider: FIREWORKS_AI,\n\t\t\tpromptCost: 0.88,\n\t\t\tcompletionCost: 0.88,\n\t\t},\n\t\t{\n\t\t\tid: 'llama-v3p1-405b-instruct',\n\t\t\tprovider: FIREWORKS_AI,\n\t\t\tpromptCost: 3,\n\t\t\tcompletionCost: 3,\n\t\t},\n\t\t{\n\t\t\tid: 'llama-v3p1-70b-instruct',\n\t\t\tprovider: FIREWORKS_AI,\n\t\t\tpromptCost: 0.9,\n\t\t\tcompletionCost: 0.9,\n\t\t},\n\t\t{\n\t\t\tid: 'llama-v3p1-8b-instruct',\n\t\t\tprovider: FIREWORKS_AI,\n\t\t\tpromptCost: 0.2,\n\t\t\tcompletionCost: 0.2,\n\t\t},\n\t\t{\n\t\t\tid: 'yi-large',\n\t\t\tprovider: FIREWORKS_AI,\n\t\t\tpromptCost: 3,\n\t\t\tcompletionCost: 3,\n\t\t},\n\t\t{\n\t\t\tid: 'llama-v3-70b-instruct',\n\t\t\tprovider: FIREWORKS_AI,\n\t\t\tpromptCost: 0.9,\n\t\t\tcompletionCost: 0.9,\n\t\t},\n\t],\n\t[PERPLEXITY]: [\n\t\t{\n\t\t\tid: 'llama-3.1-sonar-huge-128k-online',\n\t\t\tprovider: PERPLEXITY,\n\t\t\tpromptCost: 5,\n\t\t\tcompletionCost: 5,\n\t\t\trequestCost: 0.005,\n\t\t},\n\t\t{\n\t\t\tid: 'llama-3.1-sonar-large-128k-online',\n\t\t\tprovider: PERPLEXITY,\n\t\t\tpromptCost: 1,\n\t\t\tcompletionCost: 1,\n\t\t\trequestCost: 0.005,\n\t\t},\n\t\t{\n\t\t\tid: 'llama-3.1-sonar-small-128k-online',\n\t\t\tprovider: PERPLEXITY,\n\t\t\tpromptCost: 0.2,\n\t\t\tcompletionCost: 0.2,\n\t\t\trequestCost: 0.005,\n\t\t},\n\t\t{\n\t\t\tid: 'llama-3.1-sonar-large-128k-chat',\n\t\t\tprovider: PERPLEXITY,\n\t\t\tpromptCost: 1,\n\t\t\tcompletionCost: 1,\n\t\t},\n\t\t{\n\t\t\tid: 'llama-3.1-sonar-small-128k-chat',\n\t\t\tprovider: PERPLEXITY,\n\t\t\tpromptCost: 0.2,\n\t\t\tcompletionCost: 0.2,\n\t\t},\n\t],\n\t[MISTRAL_AI]: [\n\t\t{\n\t\t\tid: 'mistral-large-latest',\n\t\t\tprovider: MISTRAL_AI,\n\t\t\tpromptCost: 3,\n\t\t\tcompletionCost: 9,\n\t\t},\n\t\t{\n\t\t\tid: 'open-mistral-nemo',\n\t\t\tprovider: MISTRAL_AI,\n\t\t\tpromptCost: 0.3,\n\t\t\tcompletionCost: 0.3,\n\t\t},\n\t\t{\n\t\t\tid: 'codestral-latest',\n\t\t\tprovider: MISTRAL_AI,\n\t\t\tpromptCost: 1,\n\t\t\tcompletionCost: 3,\n\t\t},\n\t],\n\t[X_AI]: [\n\t\t{\n\t\t\tid: 'grok-beta',\n\t\t\tprovider: X_AI,\n\t\t\tpromptCost: 5,\n\t\t\tcompletionCost: 15,\n\t\t},\n\t],\n};\n\nexport const jsonModeModels = [\n\t'gpt-4o',\n\t'gpt-4o-mini-2024-07-18-free',\n\t'gpt-4o-2024-08-06',\n\t'gpt-4o-mini',\n\t'gpt-4-turbo',\n\t'gpt-4-turbo-preview',\n\t'gpt-4-0125-preview',\n\t'gpt-3.5-turbo',\n\t'gpt-3.5-turbo-0125',\n\t'gpt-3.5-turbo-1106',\n\t'gpt-4-1106-preview',\n\t'mistralai/Mistral-7B-Instruct-v0.1',\n\t'mistralai/Mixtral-8x7B-Instruct-v0.1',\n\t'gemini-1.5-pro-latest',\n\t'gemini-1.5-flash-latest',\n\t'gemini-1.5-flash-8b-latest',\n];\n","import {\n\tANTHROPIC,\n\tCOHERE,\n\tFIREWORKS_AI,\n\tGOOGLE,\n\tGROQ,\n\tOLLAMA,\n\tOPEN_AI,\n\tPERPLEXITY,\n\tTOGETHER_AI,\n\tX_AI,\n} from '../data/models';\n\nexport function getLLMApiKey(modelProvider: string): string {\n\tswitch (true) {\n\t\tcase modelProvider.includes(OPEN_AI):\n\t\t\treturn process.env.OPENAI_API_KEY || '';\n\t\tcase modelProvider === ANTHROPIC:\n\t\t\treturn process.env.ANTHROPIC_API_KEY || '';\n\t\tcase modelProvider === TOGETHER_AI:\n\t\t\treturn process.env.TOGETHER_API_KEY || '';\n\t\tcase modelProvider === GROQ:\n\t\t\treturn process.env.GROQ_API_KEY || '';\n\t\tcase modelProvider === GOOGLE:\n\t\t\treturn process.env.GOOGLE_API_KEY || '';\n\t\tcase modelProvider.includes(COHERE):\n\t\t\treturn process.env.COHERE_API_KEY || '';\n\t\tcase modelProvider.includes(FIREWORKS_AI):\n\t\t\treturn process.env.FIREWORKS_API_KEY || '';\n\t\tcase modelProvider.includes(PERPLEXITY):\n\t\t\treturn process.env.PERPLEXITY_API_KEY || '';\n\t\tcase modelProvider.includes(OLLAMA):\n\t\t\treturn process.env.OLLAMA_API_KEY || '';\n\t\tcase modelProvider.includes(X_AI):\n\t\t\treturn process.env.XAI_API_KEY || '';\n\n\t\tdefault:\n\t\t\tthrow new Error(`Unsupported model provider: ${modelProvider}`);\n\t}\n}\n","const FORCE_PROD = false;\nconst TEST_PROD_LOCALLY = FORCE_PROD;\n\nexport function isProd() {\n\tif (TEST_PROD_LOCALLY) return true;\n\treturn process.env.NODE_ENV === 'production';\n}\n\nexport function isLocal() {\n\treturn process.env.NODE_ENV !== 'production';\n}\n\nexport function getApiUrl(prod?: boolean) {\n\tif (prod) return 'https://api.langbase.com';\n\telse return 'http://localhost:9000';\n\n\t// TODO: Make local port configurable.\n\t// return isProd() ? 'https://api.langbase.com' : 'http://localhost:9000';\n\t// return isProd() ? 'http://localhost:8787' : 'http://localhost:9000';\n}\n","import {getApiUrl, isProd} from './is-prod';\n\nexport async function isLocalServerRunning(): Promise<Boolean> {\n\ttry {\n\t\tconst prod = isProd();\n\t\tconst endpoint = getApiUrl(prod);\n\n\t\tconst response = await fetch(endpoint, {\n\t\t\tmode: 'no-cors',\n\t\t\tcache: 'no-cache', // Prevents caching of the request\n\t\t});\n\n\t\tconst portUseError = `\\nPort 9000 is already in use. \\nTerminate the process running on it. \\nRun \"npx baseai@latest dev\" in an new terminal to start the dev server.\\n`;\n\n\t\tif (!response.ok) {\n\t\t\tconsole.error(portUseError);\n\t\t\treturn false;\n\t\t}\n\n\t\tconst res = (await response.json()) as unknown as {\n\t\t\tsuccess: boolean;\n\t\t};\n\n\t\tif (!res.success) {\n\t\t\tconsole.error(portUseError);\n\t\t\treturn false;\n\t\t}\n\n\t\treturn true;\n\t} catch (error) {\n\t\t// Port is not in use and BaseAI dev server is not running\n\t\tconsole.error(\n\t\t\t`\\nBaseAI dev server is not running. \\nPlease run \"npx baseai@latest dev\" in a new teriminal to start dev server.\\n`,\n\t\t);\n\t\treturn false;\n\t}\n}\n","import {\n\tANTHROPIC,\n\tCOHERE,\n\tFIREWORKS_AI,\n\tGOOGLE,\n\tGROQ,\n\tMISTRAL_AI,\n\tOLLAMA,\n\tOPEN_AI,\n\tPERPLEXITY,\n\tTOGETHER_AI,\n\tX_AI,\n} from '../data/models';\n\ntype Provider =\n\t| typeof OPEN_AI\n\t| typeof ANTHROPIC\n\t| typeof TOGETHER_AI\n\t| typeof GOOGLE\n\t| typeof GROQ\n\t| typeof COHERE\n\t| typeof FIREWORKS_AI\n\t| typeof PERPLEXITY;\n\n/**\n * Retrieves the provider based on the given provider string.\n *\n * @param providerString - The provider string.\n * @returns The corresponding provider object.\n * @throws Error if the provider is unknown.\n */\nexport function getProvider(providerString: string): Provider {\n\tconst providerMap: {[key: string]: Provider} = {\n\t\topenai: OPEN_AI,\n\t\tanthropic: ANTHROPIC,\n\t\ttogether: TOGETHER_AI,\n\t\tgoogle: GOOGLE,\n\t\tgroq: GROQ,\n\t\tcohere: COHERE,\n\t\tfireworks: FIREWORKS_AI,\n\t\tperplexity: PERPLEXITY,\n\t\tollama: OLLAMA,\n\t\txai: X_AI,\n\t\tmistral: MISTRAL_AI,\n\t};\n\n\tconst provider = providerMap[providerString.toLowerCase()];\n\tif (!provider) {\n\t\tthrow new Error(`Unknown provider: ${providerString}`);\n\t}\n\treturn provider;\n}\n","import type {Runner} from 'src/helpers';\nimport {\n\tMessage,\n\tMessageRole,\n\tPipe as PipeI,\n\tToolCallResult,\n\tTools,\n} from '../../types/pipes';\nimport {Request} from '../common/request';\nimport {getLLMApiKey} from '../utils/get-llm-api-key';\nimport {getApiUrl, isProd} from '../utils/is-prod';\nimport {isLocalServerRunning} from 'src/utils/local-server-running';\nimport {getToolsFromStream} from 'src/helpers';\nimport {ANTHROPIC} from 'src/data/models';\nimport {getProvider} from 'src/utils/get-provider';\n\nexport interface Variable {\n\tname: string;\n\tvalue: string;\n}\n\nexport interface RunOptions {\n\tmessages?: Message[];\n\tvariables?: Variable[];\n\tthreadId?: string;\n\trawResponse?: boolean;\n\trunTools?: boolean;\n\ttools?: Tools[];\n\tname?: string; // Pipe name for SDK,\n\tapiKey?: string; // pipe level key for SDK\n\tllmKey?: string; // LLM API key\n}\n\nexport interface RunOptionsStream extends RunOptions {\n\tstream: boolean;\n}\n\nexport interface Usage {\n\tprompt_tokens: number;\n\tcompletion_tokens: number;\n\ttotal_tokens: number;\n}\n\nexport interface RunResponse {\n\tcompletion: string;\n\tthreadId?: string;\n\tid: string;\n\tobject: string;\n\tcreated: number;\n\tmodel: string;\n\tchoices: ChoiceGenerate[];\n\tusage: Usage;\n\tsystem_fingerprint: string | null;\n\trawResponse?: {\n\t\theaders: Record<string, string>;\n\t};\n}\n\nexport interface RunResponseStream {\n\tstream: ReadableStream<any>;\n\tthreadId: string | null;\n\trawResponse?: {\n\t\theaders: Record<string, string>;\n\t};\n}\n\nexport interface PipeOptions extends PipeI {\n\tmaxCalls?: number;\n\tprod?: boolean;\n}\n\ninterface ChoiceGenerate {\n\tindex: number;\n\tmessage: Message;\n\tlogprobs: boolean | null;\n\tfinish_reason: string;\n}\n\ninterface Tool {\n\trun: (...args: any[]) => Promise<any>;\n\tfunction: {\n\t\tname: string;\n\t\tdescription: string;\n\t\tparameters: any;\n\t};\n}\n\nexport class Pipe {\n\tprivate request: Request;\n\tprivate pipe: any;\n\tprivate tools: Record<string, (...args: any[]) => Promise<any>>;\n\tprivate maxCalls: number;\n\tprivate hasTools: boolean;\n\tprivate prod: boolean;\n\tprivate baseUrl: string;\n\tprivate entityApiKey?: string;\n\n\tconstructor(options: PipeOptions) {\n\t\tthis.prod = options.prod ?? isProd();\n\t\tthis.baseUrl = getApiUrl(this.prod);\n\n\t\tthis.request = new Request({\n\t\t\tapiKey: options.apiKey,\n\t\t\tbaseUrl: this.baseUrl,\n\t\t});\n\t\tthis.pipe = options;\n\t\tthis.entityApiKey = options.apiKey;\n\n\t\tdelete this.pipe.prod;\n\t\tdelete this.pipe.apiKey;\n\n\t\tthis.tools = this.getToolsFromPipe(this.pipe);\n\t\tthis.maxCalls = options.maxCalls || 100; // TODO: Find a sane default.\n\t\tthis.hasTools = Object.keys(this.tools).length > 0;\n\t}\n\n\tprivate getToolsFromPipe(\n\t\tpipe: Pipe,\n\t): Record<string, (...args: any[]) => Promise<any>> {\n\t\tconst tools: Record<string, (...args: any[]) => Promise<any>> = {};\n\t\tif (pipe.tools && Array.isArray(pipe.tools)) {\n\t\t\tpipe.tools.forEach((tool: Tool) => {\n\t\t\t\ttools[tool.function.name] = tool.run;\n\t\t\t});\n\t\t}\n\t\treturn tools;\n\t}\n\n\tprivate async runTools(toolCalls: ToolCallResult[]): Promise<Message[]> {\n\t\tconst toolPromises = toolCalls.map(async (toolCall: ToolCallResult) => {\n\t\t\tconst toolName = toolCall.function.name;\n\t\t\tconst toolParameters = JSON.parse(toolCall.function.arguments);\n\t\t\tconst toolFunction = this.tools[toolName];\n\n\t\t\tif (!toolFunction) {\n\t\t\t\tthrow new Error(\n\t\t\t\t\t`Tool ${toolName} not found. If this is intentional, please set runTools to false to disable tool execution by default.`,\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tconst toolResponse = await toolFunction(toolParameters);\n\n\t\t\treturn {\n\t\t\t\ttool_call_id: toolCall.id,\n\t\t\t\trole: 'tool' as MessageRole,\n\t\t\t\tname: toolName,\n\t\t\t\tcontent: JSON.stringify(toolResponse),\n\t\t\t};\n\t\t});\n\n\t\treturn Promise.all(toolPromises);\n\t}\n\n\tprivate hasNoToolCalls(message: Message): boolean {\n\t\treturn !message.tool_calls || message.tool_calls.length === 0;\n\t}\n\n\tprivate getMessagesToSend(\n\t\tmessages: Message[],\n\t\tresponseMessage: Message,\n\t\ttoolResults: Message[],\n\t): Message[] {\n\t\treturn this.prod\n\t\t\t? toolResults\n\t\t\t: [...messages, responseMessage, ...toolResults];\n\t}\n\n\tprivate isStreamRequested(options: RunOptions | RunOptionsStream): boolean {\n\t\treturn 'stream' in options && options.stream === true;\n\t}\n\n\tprivate warnIfToolsWithStream(requestedStream: boolean): void {\n\t\tif (this.hasTools && requestedStream) {\n\t\t\tconsole.warn(\n\t\t\t\t'Warning: Streaming is not yet supported in Anthropic models when tools are present in the pipe. Falling back to non-streaming mode.',\n\t\t\t);\n\t\t}\n\t}\n\n\tprivate async handleStreamResponse(\n\t\toptions: RunOptionsStream,\n\t\tresponse: RunResponseStream,\n\t): Promise<RunResponseStream> {\n\t\tconst endpoint = '/v1/pipes/run';\n\t\tconst stream = this.isStreamRequested(options);\n\t\tconst body = {...options, stream};\n\n\t\tconst [streamForToolCall, streamForReturn] = response.stream.tee();\n\t\tconst tools = await getToolsFromStream(streamForToolCall);\n\n\t\tif (tools.length) {\n\t\t\tlet messages = options.messages || [];\n\n\t\t\tlet currentResponse: RunResponseStream = {\n\t\t\t\tstream: streamForReturn,\n\t\t\t\tthreadId: response.threadId,\n\t\t\t\trawResponse: response.rawResponse,\n\t\t\t};\n\n\t\t\tlet callCount = 0;\n\n\t\t\twhile (callCount < this.maxCalls) {\n\t\t\t\tconst [streamForToolCall, streamForReturn] =\n\t\t\t\t\tcurrentResponse.stream.tee();\n\n\t\t\t\tconst tools = await getToolsFromStream(streamForToolCall);\n\n\t\t\t\tif (tools.length === 0) {\n\t\t\t\t\treturn {\n\t\t\t\t\t\tstream: streamForReturn,\n\t\t\t\t\t\tthreadId: currentResponse.threadId,\n\t\t\t\t\t\trawResponse: response.rawResponse,\n\t\t\t\t\t};\n\t\t\t\t}\n\n\t\t\t\tconst toolResults = await this.runTools(tools);\n\n\t\t\t\tconst responseMessage = {\n\t\t\t\t\trole: 'assistant',\n\t\t\t\t\tcontent: null,\n\t\t\t\t\ttool_calls: tools,\n\t\t\t\t} as Message;\n\n\t\t\t\tmessages = this.getMessagesToSend(\n\t\t\t\t\tmessages,\n\t\t\t\t\tresponseMessage,\n\t\t\t\t\ttoolResults,\n\t\t\t\t);\n\n\t\t\t\tcurrentResponse = await this.createRequest<RunResponseStream>(\n\t\t\t\t\tendpoint,\n\t\t\t\t\t{\n\t\t\t\t\t\t...body,\n\t\t\t\t\t\tmessages,\n\t\t\t\t\t\tthreadId: currentResponse.threadId,\n\t\t\t\t\t},\n\t\t\t\t);\n\n\t\t\t\tcallCount++;\n\t\t\t}\n\t\t}\n\n\t\treturn {\n\t\t\t...response,\n\t\t\tstream: streamForReturn,\n\t\t} as RunResponseStream;\n\t}\n\n\tpublic async run(options: RunOptionsStream): Promise<RunResponseStream>;\n\tpublic async run(options: RunOptions): Promise<RunResponse>;\n\tpublic async run(\n\t\toptions: RunOptions | RunOptionsStream,\n\t): Promise<RunResponse | RunResponseStream> {\n\t\t// logger('pipe.run', this.pipe.name, 'RUN');\n\n\t\tconst endpoint = '/v1/pipes/run';\n\t\t// logger('pipe.run.baseUrl.endpoint', getApiUrl() + endpoint);\n\t\t// logger('pipe.run.options');\n\t\t// logger(options, {depth: null, colors: true});\n\n\t\tconst providerString = this.pipe.model.split(':')[0];\n\t\tconst modelProvider = getProvider(providerString);\n\t\tconst isAnthropic = modelProvider === ANTHROPIC;\n\t\tconst hasTools = this.pipe.tools.length > 0;\n\n\t\t// For SDK\n\t\t// Run the given pipe name\n\t\tif (options.name) {\n\t\t\tthis.pipe = {...this.pipe, name: options.name};\n\t\t}\n\n\t\t// For SDK\n\t\t// Run the pipe against the given Pipe API key\n\t\tif (options.apiKey) {\n\t\t\tthis.request = new Request({\n\t\t\t\tapiKey: options.apiKey,\n\t\t\t\tbaseUrl: this.baseUrl,\n\t\t\t\t...((options.llmKey && {llmKey: options.llmKey}) || {}),\n\t\t\t});\n\t\t}\n\n\t\tif (options.llmKey && !options.apiKey) {\n\t\t\tthis.request = new Request({\n\t\t\t\tapiKey: this.entityApiKey,\n\t\t\t\tbaseUrl: this.baseUrl,\n\t\t\t\tllmKey: options.llmKey,\n\t\t\t});\n\t\t}\n\n\t\tlet stream = this.isStreamRequested(options);\n\n\t\t// Anthropic models don't support streaming with tools.\n\t\tif (isAnthropic && hasTools && stream) {\n\t\t\tthis.warnIfToolsWithStream(stream);\n\t\t\tstream = false;\n\t\t}\n\n\t\tlet runTools = options.runTools ?? true;\n\n\t\t// Do not run tools if they are explicitly provided in the options.\n\t\tif (options.tools && options.tools?.length) {\n\t\t\trunTools = false;\n\t\t}\n\n\t\tdelete options.runTools;\n\n\t\tconst body = {...options, stream};\n\n\t\tlet response = await this.createRequest<\n\t\t\tRunResponse | RunResponseStream\n\t\t>(endpoint, body);\n\t\tif (Object.entries(response).length === 0) {\n\t\t\treturn {} as RunResponse | RunResponseStream;\n\t\t}\n\n\t\tif (!runTools) {\n\t\t\tif (!stream) {\n\t\t\t\treturn response as RunResponse;\n\t\t\t}\n\n\t\t\treturn response as RunResponseStream;\n\t\t}\n\n\t\tif (stream) {\n\t\t\treturn await this.handleStreamResponse(\n\t\t\t\toptions as RunOptionsStream,\n\t\t\t\tresponse as RunResponseStream,\n\t\t\t);\n\t\t}\n\n\t\t// STREAM IS OFF\n\t\tlet messages = options.messages || [];\n\t\tlet currentResponse = response as RunResponse;\n\t\tlet callCount = 0;\n\n\t\twhile (callCount < this.maxCalls) {\n\t\t\tconst responseMessage = currentResponse.choices[0].message;\n\n\t\t\tif (this.hasNoToolCalls(responseMessage)) {\n\t\t\t\t// logger('No more tool calls. Returning final response.');\n\t\t\t\treturn currentResponse;\n\t\t\t}\n\n\t\t\t// logger('\\npipe.run.response.toolCalls');\n\t\t\t// logger(responseMessage.tool_calls, {\n\t\t\t// \tdepth: null,\n\t\t\t// \tcolors: true,\n\t\t\t// });\n\n\t\t\tconst toolResults = await this.runTools(\n\t\t\t\tresponseMessage.tool_calls as ToolCallResult[],\n\t\t\t);\n\t\t\t// logger('\\npipe.run.toolResults');\n\t\t\t// logger(toolResults, {depth: null, colors: true});\n\n\t\t\tmessages = this.getMessagesToSend(\n\t\t\t\tmessages,\n\t\t\t\tresponseMessage,\n\t\t\t\ttoolResults,\n\t\t\t);\n\n\t\t\t// Simulate a delay\n\t\t\t// await new Promise(resolve => setTimeout(resolve, 1000));\n\n\t\t\tcurrentResponse = await this.createRequest<RunResponse>(endpoint, {\n\t\t\t\t...body,\n\t\t\t\tmessages,\n\t\t\t\tstream: false,\n\t\t\t\tthreadId: currentResponse.threadId,\n\t\t\t});\n\n\t\t\tcallCount++;\n\n\t\t\t// Explicitly check if the new response has no tool calls\n\t\t\tif (this.hasNoToolCalls(currentResponse.choices[0].message)) {\n\t\t\t\t// logger(\n\t\t\t\t// \t'New response has no tool calls. Returning final response.',\n\t\t\t\t// );\n\t\t\t\treturn currentResponse;\n\t\t\t}\n\t\t}\n\n\t\tconsole.warn(\n\t\t\t`Reached maximum number of calls (${this.maxCalls}). Returning last response.`,\n\t\t);\n\t\treturn currentResponse;\n\t}\n\n\tprivate async createRequest<T>(endpoint: string, body: any): Promise<T> {\n\t\tconst isProdEnv = this.prod;\n\t\tconst prodOptions = {\n\t\t\tendpoint,\n\t\t\tbody: {\n\t\t\t\t...body,\n\t\t\t\tname: this.pipe.name,\n\t\t\t},\n\t\t};\n\n\t\tlet localOptions = {} as any;\n\n\t\tif (!isProdEnv) {\n\t\t\tconst providerString = this.pipe.model.split(':')[0];\n\t\t\tconst modelProvider = getProvider(providerString);\n\t\t\tlocalOptions = {\n\t\t\t\tendpoint,\n\t\t\t\tbody: {\n\t\t\t\t\t...body,\n\t\t\t\t\tpipe: this.pipe,\n\t\t\t\t\tllmApiKey: getLLMApiKey(modelProvider),\n\t\t\t\t},\n\t\t\t};\n\n\t\t\tconst isServerRunning = await isLocalServerRunning();\n\t\t\tif (!isServerRunning) return {} as T;\n\t\t}\n\n\t\treturn this.request.post<T>(isProdEnv ? prodOptions : localOptions);\n\t}\n}\n\n/**\n * Generates text using the provided options.\n *\n * @param options - The options for generating text.\n * @returns A promise that resolves to the generated text.\n */\nexport const generateText = async (\n\toptions: RunOptions & {pipe: Pipe},\n): Promise<RunResponse> => {\n\treturn options.pipe.run(options);\n};\n\n/**\n * Streams text using the provided options.\n *\n * @param options - The options for streaming text.\n * @returns A promise that resolves to the response of the stream operation.\n */\nexport const streamText = async (\n\toptions: RunOptions & {pipe: Pipe},\n): Promise<RunResponseStream> => {\n\treturn options.pipe.run({...options, stream: true});\n};\n\ninterface ContentChunk {\n\ttype: 'content';\n\tcontent: string;\n}\n\ninterface ToolCallChunk {\n\ttype: 'toolCall';\n\ttoolCall: ToolCallResult;\n}\n\ninterface ChoiceStream {\n\tindex: number;\n\tdelta: Delta;\n\tlogprobs: boolean | null;\n\tfinish_reason: string;\n}\n\ninterface Delta {\n\trole?: MessageRole;\n\tcontent?: string;\n\ttool_calls?: ToolCallResult[];\n}\n\ninterface UnknownChunk {\n\ttype: 'unknown';\n\trawChunk: ChunkStream;\n}\n\nexport interface ChunkStream {\n\tid: string;\n\tobject: string;\n\tcreated: number;\n\tmodel: string;\n\tchoices: ChoiceStream[];\n}\n\nexport interface Chunk {\n\ttype: 'content' | 'toolCall' | 'unknown';\n\tcontent?: string;\n\ttoolCall?: ToolCallResult;\n\trawChunk?: ChunkStream;\n}\n\n/**\n * Processes a chunk and returns a Chunk object.\n *\n * ```ts\n * for await (const chunk of runner) {\n *\t\tconst processedChunk = processChunk({rawChunk: chunk});\n *\t\tif (isContent(processedChunk)) {\n *\t\t\tprocess.stdout.write(processedChunk.content);\n *\t\t}\n *\t}\n * ```\n *\n * @param rawChunk - The raw chunk to process.\n * @returns The processed Chunk object.\n */\nexport const processChunk = ({rawChunk}: {rawChunk: any}): Chunk => {\n\tif (rawChunk.choices[0]?.delta?.content) {\n\t\treturn {type: 'content', content: rawChunk.choices[0].delta.content};\n\t}\n\tif (\n\t\trawChunk.choices[0]?.delta?.tool_calls &&\n\t\trawChunk.choices[0].delta.tool_calls.length > 0\n\t) {\n\t\tconst toolCall = rawChunk.choices[0].delta.tool_calls[0];\n\t\treturn {type: 'toolCall', toolCall};\n\t}\n\treturn {type: 'unknown', rawChunk};\n};\n\n/**\n * Checks if the given chunk is a ContentChunk.\n *\n * @param chunk - The chunk to check.\n * @returns True if the chunk is a ContentChunk, false otherwise.\n */\nexport const isContent = (chunk: Chunk): chunk is ContentChunk =>\n\tchunk.type === 'content';\n\n/**\n * Determines if the given chunk is a ToolCallChunk.\n *\n * @param chunk - The chunk to be evaluated.\n * @returns True if the chunk is of type 'toolCall', otherwise false.\n */\nexport const isToolCall = (chunk: Chunk): chunk is ToolCallChunk =>\n\tchunk.type === 'toolCall';\n\n/**\n * Checks if the given chunk is of type 'unknown'.\n *\n * @param chunk - The chunk to be checked.\n * @returns True if the chunk is of type 'unknown', false otherwise.\n */\nexport const isUnknown = (chunk: Chunk): chunk is UnknownChunk =>\n\tchunk.type === 'unknown';\n\n/**\n * Retrieves the text content from a given ChunkStream.\n *\n * @param chunk - The ChunkStream object.\n * @returns The text content from the ChunkStream.\n */\nexport const getTextContent = (chunk: any): string => {\n\treturn chunk.choices[0]?.delta?.content || '';\n};\n\n/**\n * Retrieves the text delta from a given chunk.\n *\n * @param chunk - The chunk stream to extract the text delta from.\n * @returns The text delta content, or an empty string if it is not available.\n */\nexport const getTextDelta = (chunk: ChunkStream): string => {\n\treturn chunk.choices[0]?.delta?.content || '';\n};\n\n/**\n * Writes the content of a TextStream to the standard output.\n *\n * @param stream - The TextStream to be printed.\n * @returns A Promise that resolves when the printing is complete.\n */\nexport const printStreamToStdout = async (runner: Runner): Promise<void> => {\n\tfor await (const chunk of runner) {\n\t\tconst textPart = chunk.choices[0]?.delta?.content || '';\n\t\tprocess.stdout.write(textPart);\n\t}\n};\n"],"mappings":";AAAA,SAAQ,4BAA2B;AAEnC,SAAQ,cAAa;AAWd,IAAM,qBAAqB,CAAC,mBAA2C;AAC7E,SAAO,qBAAqB,mBAAmB,cAAc;AAC9D;AAQO,IAAM,YAAY,CAAC,mBAAmC;AAC5D,SAAO,mBAAmB,cAAc;AACzC;AAQO,IAAM,cAAc,CAAC,UAAuB;AAjCnD;AAkCC,WAAO,iBAAM,QAAQ,CAAC,MAAf,mBAAkB,UAAlB,mBAAyB,YAAW;AAC5C;AAeO,SAAS,qBAAqB;AAAA,EACpC;AAAA,EACA;AACD,GASE;AACD,QAAM,aAAa,IAAI,gBAAgB;AACvC,QAAM,YAAY,OAAO,gBAAgB,UAAU,UAAU;AAC7D,QAAM,SAAS,UAAU,iBAAiB;AAE1C,QAAM,SAMF;AAAA,IACH;AAAA,IACA,UAAU,SAAS,QAAQ,IAAI,cAAc;AAAA,EAC9C;AACA,MAAI,aAAa;AAChB,WAAO,cAAc;AAAA,MACpB,SAAS,OAAO,YAAY,SAAS,QAAQ,QAAQ,CAAC;AAAA,IACvD;AAAA,EACD;AACA,SAAO;AACR;AAQA,eAAsB,mBACrB,QAC4B;AAC5B,MAAI,MAAM,UAAU,MAAM;AAC1B,QAAM,EAAC,QAAO,IAAI,MAAM,IAAI,oBAAoB;AAChD,SAAO,QAAQ,CAAC,EAAE,QAAQ;AAC3B;;;AC/FO,IAAM,WAAN,MAAM,kBAAiB,MAAM;AAAA,EAWnC,YACC,QACA,OACA,SACA,SACC;AACD,UAAM,UAAS,YAAY,QAAQ,OAAO,OAAO,CAAC;AAClD,SAAK,SAAS;AACd,SAAK,UAAU;AACf,SAAK,aAAa,mCAAU;AAE5B,UAAM,OAAO;AACb,SAAK,QAAQ;AACb,SAAK,OAAO,6BAAO;AACnB,SAAK,SAAS,6BAAO;AAAA,EAGtB;AAAA,EAEA,OAAe,YACd,QACA,OACA,SACS;AACT,UAAM,OAAM,+BAAO,WAChB,OAAO,MAAM,YAAY,WACxB,MAAM,UACN,KAAK,UAAU,MAAM,OAAO,IAC7B,QACC,KAAK,UAAU,KAAK,IACpB;AAEJ,QAAI,UAAU,KAAK;AAClB,aAAO,GAAG,MAAM,IAAI,GAAG;AAAA,IACxB;AACA,QAAI,QAAQ;AACX,aAAO,GAAG,MAAM;AAAA,IACjB;AACA,QAAI,KAAK;AACR,aAAO;AAAA,IACR;AACA,WAAO;AAAA,EACR;AAAA,EAEA,OAAO,SACN,QACA,eACA,SACA,SACW;AACX,QAAI,CAAC,QAAQ;AACZ,aAAO,IAAI,mBAAmB;AAAA,QAC7B,OACC,yBAAyB,QAAQ,gBAAgB;AAAA,MACnD,CAAC;AAAA,IACF;AAEA,UAAM,QAAS,+CAAwC;AAEvD,YAAQ,QAAQ;AAAA,MACf,KAAK;AACJ,eAAO,IAAI,gBAAgB,QAAQ,OAAO,SAAS,OAAO;AAAA,MAC3D,KAAK;AACJ,eAAO,IAAI,oBAAoB,QAAQ,OAAO,SAAS,OAAO;AAAA,MAC/D,KAAK;AACJ,eAAO,IAAI;AAAA,UACV;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD;AAAA,MACD,KAAK;AACJ,eAAO,IAAI,cAAc,QAAQ,OAAO,SAAS,OAAO;AAAA,MACzD,KAAK;AACJ,eAAO,IAAI,cAAc,QAAQ,OAAO,SAAS,OAAO;AAAA,MACzD,KAAK;AACJ,eAAO,IAAI;AAAA,UACV;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD;AAAA,MACD,KAAK;AACJ,eAAO,IAAI,eAAe,QAAQ,OAAO,SAAS,OAAO;AAAA,MAC1D;AACC,eAAO,UAAU,MACd,IAAI,oBAAoB,QAAQ,OAAO,SAAS,OAAO,IACvD,IAAI,UAAS,QAAQ,OAAO,SAAS,OAAO;AAAA,IACjD;AAAA,EACD;AACD;AAEO,IAAM,qBAAN,cAAiC,SAAS;AAAA,EAGhD,YAAY,EAAC,SAAS,MAAK,GAAsC;AAChE,UAAM,QAAW,QAAW,WAAW,qBAAqB,MAAS;AAHtE,SAAkB,SAAoB;AAIrC,QAAI,MAAO,CAAC,KAAe,QAAQ;AAAA,EACpC;AACD;AAQO,IAAM,kBAAN,cAA8B,SAAS;AAAA,EAAvC;AAAA;AACN,SAAkB,SAAc;AAAA;AACjC;AAEO,IAAM,sBAAN,cAAkC,SAAS;AAAA,EAA3C;AAAA;AACN,SAAkB,SAAc;AAAA;AACjC;AAEO,IAAM,wBAAN,cAAoC,SAAS;AAAA,EAA7C;AAAA;AACN,SAAkB,SAAc;AAAA;AACjC;AAEO,IAAM,gBAAN,cAA4B,SAAS;AAAA,EAArC;AAAA;AACN,SAAkB,SAAc;AAAA;AACjC;AAEO,IAAM,gBAAN,cAA4B,SAAS;AAAA,EAArC;AAAA;AACN,SAAkB,SAAc;AAAA;AACjC;AAEO,IAAM,2BAAN,cAAuC,SAAS;AAAA,EAAhD;AAAA;AACN,SAAkB,SAAc;AAAA;AACjC;AAEO,IAAM,iBAAN,cAA6B,SAAS;AAAA,EAAtC;AAAA;AACN,SAAkB,SAAc;AAAA;AACjC;AAEO,IAAM,sBAAN,cAAkC,SAAS;AAAC;;;AChH5C,IAAM,UAAN,MAAc;AAAA,EAGpB,YAAY,QAAuB;AAClC,SAAK,SAAS;AAAA,EACf;AAAA,EAEA,MAAc,KAAQ,EAAC,UAAU,GAAG,QAAO,GAA4B;AA3CxE;AA4CE,UAAM,MAAM,KAAK,SAAS,EAAC,SAAQ,CAAC;AACpC,UAAM,UAAU,KAAK,aAAa,EAAC,SAAS,QAAQ,QAAO,CAAC;AAE5D,QAAI;AACJ,QAAI;AACH,iBAAW,MAAM,KAAK,YAAY;AAAA,QACjC;AAAA,QACA,SAAS,EAAC,GAAG,SAAS,SAAQ;AAAA,QAC9B;AAAA,MACD,CAAC;AAAA,IACF,SAAS,OAAO;AACf,YAAM,IAAI,mBAAmB;AAAA,QAC5B,OAAO,iBAAiB,QAAQ,QAAQ;AAAA,MACzC,CAAC;AAAA,IACF;AAEA,QAAI,CAAC,SAAS,IAAI;AACjB,YAAM,KAAK,oBAAoB,EAAC,SAAQ,CAAC;AAAA,IAC1C;AAEA,UAAM,WAAW,SAAS,QAAQ,IAAI,cAAc;AAEpD,SAAI,aAAQ,SAAR,mBAAc,QAAQ;AACzB,aAAO,qBAAqB;AAAA,QAC3B;AAAA,QACA,aAAa,QAAQ,KAAK;AAAA,MAC3B,CAAC;AAAA,IACF;AAEA,WAAO,KAAK,kBAAkB;AAAA,MAC7B;AAAA,MACA,SAAQ,aAAQ,SAAR,mBAAc;AAAA,MACtB;AAAA,MACA,cAAa,mBAAQ,SAAR,mBAAc,gBAAd,YAA6B;AAAA,IAC3C,CAAC;AAAA,EACF;AAAA,EAEQ,SAAS,EAAC,SAAQ,GAA+B;AACxD,WAAO,GAAG,KAAK,OAAO,OAAO,GAAG,QAAQ;AAAA,EACzC;AAAA,EAEQ,aAAa;AAAA,IACpB;AAAA,EACD,GAE2B;AAzF5B;AA0FE,WAAO;AAAA,MACN,gBAAgB;AAAA,MAChB,eAAe,UAAU,KAAK,OAAO,MAAM;AAAA,MAC3C,eAAc,UAAK,OAAO,WAAZ,YAAsB;AAAA,MACpC,GAAG;AAAA,IACJ;AAAA,EACD;AAAA,EAEA,MAAc,YAAY;AAAA,IACzB;AAAA,IACA;AAAA,IACA;AAAA,EACD,GAAyC;AACxC,UAAM,OAAO,MAAM,MAAM,KAAK;AAAA,MAC7B,QAAQ,QAAQ;AAAA,MAChB;AAAA,MACA,MAAM,KAAK,UAAU,QAAQ,IAAI;AAAA,MACjC,GAAI,KAAK,OAAO,WAAW;AAAA,QAC1B,QAAQ,YAAY,QAAQ,KAAK,OAAO,OAAO;AAAA,MAChD;AAAA,IACD,CAAC;AACD,WAAO;AAAA,EACR;AAAA,EAEA,MAAc,oBAAoB;AAAA,IACjC;AAAA,EACD,GAEmB;AAClB,QAAI;AACJ,QAAI;AACH,kBAAY,MAAM,SAAS,KAAK;AAAA,IACjC,SAAQ;AACP,kBAAY,MAAM,SAAS,KAAK;AAAA,IACjC;AACA,UAAM,SAAS;AAAA,MACd,SAAS;AAAA,MACT;AAAA,MACA,SAAS;AAAA,MACT,OAAO,YAAY,SAAS,QAAQ,QAAQ,CAAC;AAAA,IAC9C;AAAA,EACD;AAAA,EAEA,MAAc,kBAAkB;AAAA,IAC/B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACD,GAA+C;AAC9C,UAAM,mBAAmB,MAAM,SAAS,KAAK;AAC7C,UAAM,gBAAgB,iBAAiB,MACpC;AAAA,MACA,YAAY,iBAAiB;AAAA,MAC7B,GAAG,iBAAiB;AAAA,IACrB,IACC;AAEH,UAAM,SAAc;AAAA,MACnB,GAAG;AAAA,IACJ;AAEA,WAAO,WAAW;AAElB,QAAI,aAAa;AAChB,aAAO,cAAc;AAAA,QACpB,SAAS,OAAO,YAAY,SAAS,QAAQ,QAAQ,CAAC;AAAA,MACvD;AAAA,IACD;AAEA,WAAO;AAAA,EACR;AAAA,EAEA,MAAM,KAAQ,SAAqD;AAIlE,WAAO,KAAK,KAAQ,EAAC,GAAG,SAAS,QAAQ,OAAM,CAAC;AAAA,EACjD;AAAA,EAEA,MAAM,IAAO,SAA8D;AAC1E,WAAO,KAAK,KAAQ,EAAC,GAAG,SAAS,QAAQ,MAAK,CAAC;AAAA,EAChD;AAAA,EAEA,MAAM,IAAO,SAAqD;AACjE,WAAO,KAAK,KAAQ,EAAC,GAAG,SAAS,QAAQ,MAAK,CAAC;AAAA,EAChD;AAAA,EAEA,MAAM,OACL,SACa;AACb,WAAO,KAAK,KAAQ,EAAC,GAAG,SAAS,QAAQ,SAAQ,CAAC;AAAA,EACnD;AACD;;;ACtLO,IAAM,UAAkB;AACxB,IAAM,YAAoB;AAC1B,IAAM,cAAsB;AAC5B,IAAM,SAAiB;AACvB,IAAM,OAAe;AACrB,IAAM,SAAiB;AACvB,IAAM,eAAuB;AAC7B,IAAM,aAAqB;AAC3B,IAAM,aAAqB;AAI3B,IAAM,SAAiB;AACvB,IAAM,OAAe;AAcrB,IAAM,mBAA8C;AAAA,EAC1D,CAAC,OAAO,GAAG;AAAA,IACV;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,EACD;AAAA,EACA,CAAC,WAAW,GAAG;AAAA,IACd;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,EACD;AAAA,EACA,CAAC,SAAS,GAAG;AAAA,IACZ;AAAA,MACC,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,IACjB;AAAA,IACA;AAAA,MACC,IAAI;AAAA,