UNPKG

@langchain/openai

Version:
1 lines 1.79 kB
{"version":3,"file":"prompts.cjs","names":["formattedPrompt: BasePromptValue","convertMessagesToCompletionsMessageParams"],"sources":["../../src/utils/prompts.ts"],"sourcesContent":["import type { BasePromptValue } from \"@langchain/core/prompt_values\";\nimport type { OpenAI } from \"openai\";\nimport { convertMessagesToCompletionsMessageParams } from \"../converters/completions.js\";\n\n/**\n * Convert a formatted LangChain prompt (e.g. pulled from the hub) into\n * a format expected by OpenAI's JS SDK.\n *\n * Requires the \"@langchain/openai\" package to be installed in addition\n * to the OpenAI SDK.\n *\n * @example\n * ```ts\n * import { convertPromptToOpenAI } from \"langsmith/utils/hub/openai\";\n * import { pull } from \"langchain/hub\";\n *\n * import OpenAI from 'openai';\n *\n * const prompt = await pull(\"jacob/joke-generator\");\n * const formattedPrompt = await prompt.invoke({\n * topic: \"cats\",\n * });\n *\n * const { messages } = convertPromptToOpenAI(formattedPrompt);\n *\n * const openAIClient = new OpenAI();\n *\n * const openaiResponse = await openAIClient.chat.completions.create({\n * model: \"gpt-4o-mini\",\n * messages,\n * });\n * ```\n * @param formattedPrompt\n * @returns A partial OpenAI payload.\n */\n// TODO: make this a converter\nexport function convertPromptToOpenAI(formattedPrompt: BasePromptValue): {\n messages: OpenAI.Chat.ChatCompletionMessageParam[];\n} {\n const messages = formattedPrompt.toChatMessages();\n return {\n messages: convertMessagesToCompletionsMessageParams({\n messages,\n }) as OpenAI.Chat.ChatCompletionMessageParam[],\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAoCA,SAAgB,sBAAsBA,iBAEpC;CACA,MAAM,WAAW,gBAAgB,gBAAgB;AACjD,QAAO,EACL,UAAUC,8DAA0C,EAClD,SACD,EAAC,CACH;AACF"}