rawi
Version:
Rawi (راوي) is the developer-friendly AI CLI that brings the power of 11 major AI providers directly to your terminal. With seamless shell integration, persistent conversations, and 200+ specialized prompt templates, Rawi transforms your command line into
1 lines • 3.65 kB
Source Map (JSON)
{"version":3,"sources":["/home/mkabumattar/work/withrawi/rawi/dist/chunk-WHXXRX57.cjs","../src/libs/providers/ollama/ollama-image-model.ts"],"names":["OllamaImageModel","#config","#settings","modelId","settings","config","abortSignal","prompt","responseHeaders","response","postJsonToApi","ollamaFailedResponseHandler","createJsonResponseHandler"],"mappings":"AAAA;AACA,wDAAwC,uDCAe,0BACvC,IAcHA,CAAAA,WAAN,KAA+C,CAC3CC,CAAAA,CAAAA,CACAC,CAAAA,CAAAA,gBAEA,oBAAA,CAAuB,KAAA,IAG5B,QAAA,CAAA,CAAmB,CACrB,OAAO,IAAA,CAAKD,CAAAA,CAAAA,CAAQ,QACtB,CAEA,IAAI,gBAAA,CAAA,CAA2B,CAC7B,wBAAO,IAAA,CAAKC,CAAAA,CAAAA,CAAU,gBAAA,SAAoB,MAC5C,CAEA,IAAI,qBAAA,CAAA,CAAiC,CACnC,MAAO,CAAA,CACT,CAEA,WAAA,CACEC,CAAAA,CACAC,CAAAA,CACAC,CAAAA,CACA,qCACA,IAAA,CAAK,OAAA,CAAUF,CAAAA,CACf,IAAA,CAAKD,CAAAA,CAAAA,CAAYE,CAAAA,CACjB,IAAA,CAAKH,CAAAA,CAAAA,CAAUI,CACjB,CAEA,MAAM,UAAA,CAAW,CACf,WAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CACF,CAAA,CAEE,CACA,GAAM,CAAC,eAAA,CAAAC,CAAAA,CAAiB,KAAA,CAAOC,CAAQ,CAAA,CAAI,MAAMC,0CAAAA,CAC/C,WAAA,CAAAJ,CAAAA,CACA,IAAA,CAAM,CACJ,MAAA,CAAQC,CAAAA,CACR,KAAA,CAAO,IAAA,CAAK,OACd,CAAA,CACA,qBAAA,CAAuBI,mBAAAA,CACvB,KAAA,CAAO,IAAA,CAAKV,CAAAA,CAAAA,CAAQ,KAAA,CACpB,OAAA,CAAS,IAAA,CAAKA,CAAAA,CAAAA,CAAQ,OAAA,CAAQ,CAAA,CAC9B,yBAAA,CAA2BW,sDAAAA,CAE3B,CAAA,CACA,GAAA,CAAK,CAAA,EAAA;AD7D6mB","file":"/home/mkabumattar/work/withrawi/rawi/dist/chunk-WHXXRX57.cjs","sourcesContent":[null,"import type {ImageModelV2} from '@ai-sdk/provider';\nimport {createJsonResponseHandler, postJsonToApi} from '@ai-sdk/provider-utils';\nimport {z} from 'zod';\n\nimport {ollamaFailedResponseHandler} from './ollama-error.js';\nimport type {\n OllamaImageModelId,\n OllamaImageSettings,\n} from './ollama-image-settings.js';\n\ntype OllamaEmbeddingConfig = {\n baseURL: string;\n fetch?: typeof fetch;\n headers: () => Record<string, string | undefined>;\n provider: string;\n};\nexport class OllamaImageModel implements ImageModelV2 {\n readonly #config: OllamaEmbeddingConfig;\n readonly #settings: OllamaImageSettings;\n\n readonly specificationVersion = 'v2';\n readonly modelId: OllamaImageModelId;\n\n get provider(): string {\n return this.#config.provider;\n }\n\n get maxImagesPerCall(): number {\n return this.#settings.maxImagesPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return false;\n }\n\n constructor(\n modelId: OllamaImageModelId,\n settings: OllamaImageSettings,\n config: OllamaEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.#settings = settings;\n this.#config = config;\n }\n\n async doGenerate({\n abortSignal,\n prompt,\n }: Parameters<ImageModelV2['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV2['doGenerate']>>\n > {\n const {responseHeaders, value: response} = await postJsonToApi({\n abortSignal,\n body: {\n prompt: prompt,\n model: this.modelId,\n },\n failedResponseHandler: ollamaFailedResponseHandler,\n fetch: this.#config.fetch,\n headers: this.#config.headers(),\n successfulResponseHandler: createJsonResponseHandler(\n ollamaImageResponseSchema as any,\n ),\n url: `${this.#config.baseURL}/generate`,\n });\n\n const typedResponse = response as z.infer<typeof ollamaImageResponseSchema>;\n\n return {\n images: typedResponse.images || [],\n warnings: [],\n response: {\n timestamp: new Date(),\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\nconst ollamaImageResponseSchema = z.object({\n images: z.array(z.string()).optional(),\n prompt_eval_count: z.number().nullable(),\n});\n"]}