UNPKG

@langchain/openai

Version:
1 lines 6.63 kB
{"version":3,"file":"llms.d.ts","names":["ClientOptions","OpenAI","OpenAIClient","CallbackManagerForLLMRun","GenerationChunk","LLMResult","BaseLLM","BaseLLMParams","OpenAIApiKey","OpenAICallOptions","OpenAICoreRequestOptions","OpenAIInput","CallOptions","Record","Partial","CompletionCreateParams","Omit","Promise","AsyncGenerator","CompletionCreateParamsStreaming","Completion","AsyncIterable","CompletionCreateParamsNonStreaming","Completions"],"sources":["../src/llms.d.ts"],"sourcesContent":["import { type ClientOptions, OpenAI as OpenAIClient } from \"openai\";\nimport { CallbackManagerForLLMRun } from \"@langchain/core/callbacks/manager\";\nimport { GenerationChunk, type LLMResult } from \"@langchain/core/outputs\";\nimport { BaseLLM, type BaseLLMParams } from \"@langchain/core/language_models/llms\";\nimport type { OpenAIApiKey, OpenAICallOptions, OpenAICoreRequestOptions, OpenAIInput } from \"./types.js\";\nexport type { OpenAICallOptions, OpenAIInput };\n/**\n * Wrapper around OpenAI large language models.\n *\n * To use you should have the `openai` package installed, with the\n * `OPENAI_API_KEY` environment variable set.\n *\n * To use with Azure, import the `AzureOpenAI` class.\n *\n * @remarks\n * Any parameters that are valid to be passed to {@link\n * https://platform.openai.com/docs/api-reference/completions/create |\n * `openai.createCompletion`} can be passed through {@link modelKwargs}, even\n * if not explicitly available on this class.\n * @example\n * ```typescript\n * const model = new OpenAI({\n * modelName: \"gpt-4\",\n * temperature: 0.7,\n * maxTokens: 1000,\n * maxRetries: 5,\n * });\n *\n * const res = await model.invoke(\n * \"Question: What would be a good company name for a company that makes colorful socks?\\nAnswer:\"\n * );\n * console.log({ res });\n * ```\n */\nexport declare class OpenAI<CallOptions extends OpenAICallOptions = OpenAICallOptions> extends BaseLLM<CallOptions> implements Partial<OpenAIInput> {\n static lc_name(): string;\n get callKeys(): string[];\n lc_serializable: boolean;\n get lc_secrets(): {\n [key: string]: string;\n } | undefined;\n get lc_aliases(): Record<string, string>;\n temperature?: number;\n maxTokens?: number;\n topP?: number;\n frequencyPenalty?: number;\n presencePenalty?: number;\n n: number;\n bestOf?: number;\n logitBias?: Record<string, number>;\n model: string;\n /** @deprecated Use \"model\" instead */\n modelName: string;\n modelKwargs?: OpenAIInput[\"modelKwargs\"];\n batchSize: number;\n timeout?: number;\n stop?: string[];\n stopSequences?: string[];\n user?: string;\n streaming: boolean;\n openAIApiKey?: OpenAIApiKey;\n apiKey?: OpenAIApiKey;\n organization?: string;\n protected client: OpenAIClient;\n protected clientConfig: ClientOptions;\n constructor(fields?: Partial<OpenAIInput> & BaseLLMParams & {\n configuration?: ClientOptions;\n });\n /**\n * Get the parameters used to invoke the model\n */\n invocationParams(options?: this[\"ParsedCallOptions\"]): Omit<OpenAIClient.CompletionCreateParams, \"prompt\">;\n /** @ignore */\n _identifyingParams(): Omit<OpenAIClient.CompletionCreateParams, \"prompt\"> & {\n model_name: string;\n } & ClientOptions;\n /**\n * Get the identifying parameters for the model\n */\n identifyingParams(): Omit<OpenAIClient.CompletionCreateParams, \"prompt\"> & {\n model_name: string;\n } & ClientOptions;\n /**\n * Call out to OpenAI's endpoint with k unique prompts\n *\n * @param [prompts] - The prompts to pass into the model.\n * @param [options] - Optional list of stop words to use when generating.\n * @param [runManager] - Optional callback manager to use when generating.\n *\n * @returns The full LLM output.\n *\n * @example\n * ```ts\n * import { OpenAI } from \"langchain/llms/openai\";\n * const openai = new OpenAI();\n * const response = await openai.generate([\"Tell me a joke.\"]);\n * ```\n */\n _generate(prompts: string[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<LLMResult>;\n _streamResponseChunks(input: string, options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<GenerationChunk>;\n /**\n * Calls the OpenAI API with retry logic in case of failures.\n * @param request The request to send to the OpenAI API.\n * @param options Optional configuration for the API call.\n * @returns The response from the OpenAI API.\n */\n completionWithRetry(request: OpenAIClient.CompletionCreateParamsStreaming, options?: OpenAICoreRequestOptions): Promise<AsyncIterable<OpenAIClient.Completion>>;\n completionWithRetry(request: OpenAIClient.CompletionCreateParamsNonStreaming, options?: OpenAICoreRequestOptions): Promise<OpenAIClient.Completions.Completion>;\n /**\n * Calls the OpenAI API with retry logic in case of failures.\n * @param request The request to send to the OpenAI API.\n * @param options Optional configuration for the API call.\n * @returns The response from the OpenAI API.\n */\n protected _getClientOptions(options: OpenAICoreRequestOptions | undefined): OpenAICoreRequestOptions;\n _llmType(): string;\n}\n//# sourceMappingURL=llms.d.ts.map"],"mappings":";;;;;;;;;AAkCA;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiE0FG,cAjErEF,QAiEqEE,CAAAA,oBAjE1CM,iBAiE0CN,GAjEtBM,iBAiEsBN,CAAAA,SAjEKG,OAiELH,CAjEaS,WAiEbT,CAAAA,YAjEqCW,OAiErCX,CAjE6CQ,WAiE7CR,CAAAA,CAAAA;EAA0CC,OAAAA,OAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAAfc,IAAAA,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA,EAAAA;EAOpFhB,eAAaiB,EAAAA,OAAAA;EAA2CT,IAAAA,UAAAA,CAAAA,CAAAA,EAAAA;IAAiDR,CAAAA,GAAakB,EAAAA,MAAAA,CAAAA,EAAAA,MAAAA;EAA3BC,CAAAA,GAAAA,SAAAA;EAARJ,IAAAA,UAAAA,CAAAA,CAAAA,EAjE9FJ,MAiE8FI,CAAAA,MAAAA,EAAAA,MAAAA,CAAAA;EACnFf,WAAaoB,CAAAA,EAAAA,MAAAA;EAA8CZ,SAAAA,CAAAA,EAAAA,MAAAA;EAAmCR,IAAAA,CAAAA,EAAaqB,MAAAA;EAArBN,gBAAAA,CAAAA,EAAAA,MAAAA;EAO9EP,eAAAA,CAAAA,EAAAA,MAAAA;EAAuCA,CAAAA,EAAAA,MAAAA;EAhFeJ,MAAAA,CAAAA,EAAAA,MAAAA;EAAgCQ,SAAAA,CAAAA,EAe/GD,MAf+GC,CAAAA,MAAAA,EAAAA,MAAAA,CAAAA;EAAO,KAAA,EAAA,MAAA;;;gBAmBpHH;;;;;;;iBAOCH;WACNA;;oBAESN;0BACMF;uBACHc,QAAQH,eAAeJ;oBACxBP;;;;;yDAKmCgB,KAAKd,QAAAA,CAAaa;;wBAEnDC,KAAKd,QAAAA,CAAaa;;MAEpCf;;;;uBAIiBgB,KAAKd,QAAAA,CAAaa;;MAEnCf;;;;;;;;;;;;;;;;;gFAiB0EG,2BAA2Bc,QAAQZ;wFAC3BF,2BAA2Be,eAAed;;;;;;;+BAOnGF,QAAAA,CAAaiB,2CAA2CT,2BAA2BO,QAAQI,cAAcnB,QAAAA,CAAakB;+BACtHlB,QAAAA,CAAaoB,8CAA8CZ,2BAA2BO,QAAQf,QAAAA,CAAaqB,WAAAA,CAAYH;;;;;;;uCAO/GV,uCAAuCA"}