UNPKG

rawi

Version:

Rawi (راوي) is the developer-friendly AI CLI that brings the power of 11 major AI providers directly to your terminal. With seamless shell integration, persistent conversations, and 200+ specialized prompt templates, Rawi transforms your command line into

1 lines 12.1 kB
{"version":3,"sources":["/home/mkabumattar/work/withrawi/rawi/dist/chunk-3BQKIUKS.cjs","../src/core/providers/index.ts","../src/core/providers/utils.ts"],"names":["askProviders","anthropicProvider","azureProvider","bedrockProvider","cerebrasProvider","deepSeekProvider","googleProvider","mistralProvider","ollamaProvider","openaiProvider","xaiProvider","lmstudioProvider","chatProviders","anthropicChatProvider","azureChatProvider","bedrockChatProvider","cerebrasChatProvider","deepseekChatProvider","googleChatProvider","lmstudioChatProvider","mistralChatProvider","ollamaChatProvider","openaiChatProvider","xaiChatProvider","execProviders","getProvider","name","getAllProviders","getProviderNames","getChatProvider","provider","getChatProviderNames","streamResponse","credentials","prompt","options","streamFn","streamWithGoogle","streamWithOllama","streamWithAnthropic","streamWithOpenAI","streamWithXAI","streamWithAzure","streamWithBedrock","streamWithLMStudio","streamWithDeepSeek","streamWithMistral","streamWithCerebras"],"mappings":"AAAA;AACA,wDAA6D,wDAA8D,wDAA8D,wDAA8D,wDAA8D,wDAA8D,wDAA8D,wDAA8D,wDAA8D,wDAAyD,wDAA8D,wDAAyC,wDAAyC,IC0BzuBA,CAAAA,CAA0D,CACrE,SAAA,CAAWC,mBAAAA,CACX,KAAA,CAAOC,mBAAAA,CACP,OAAA,CAASC,mBAAAA,CACT,QAAA,CAAUC,mBAAAA,CACV,QAAA,CAAUC,mBAAAA,CACV,MAAA,CAAQC,mBAAAA,CACR,OAAA,CAASC,mBAAAA,CACT,MAAA,CAAQC,mBAAAA,CACR,MAAA,CAAQC,mBAAAA,CACR,GAAA,CAAKC,mBAAAA,CACL,QAAA,CAAUC,mBACZ,CAAA,CAeaC,CAAAA,aAA8C,CACzD,SAAA,CAAWC,mBAAAA,CACX,KAAA,CAAOC,mBAAAA,CACP,OAAA,CAASC,mBAAAA,CACT,QAAA,CAAUC,mBAAAA,CACV,QAAA,CAAUC,mBAAAA,CACV,MAAA,CAAQC,mBAAAA,CACR,QAAA,CAAUC,mBAAAA,CACV,OAAA,CAASC,mBAAAA,CACT,MAAA,CAAQC,mBAAAA,CACR,MAAA,CAAQC,mBAAAA,CACR,GAAA,CAAKC,mBACP,CAAA,CAEaC,EAAAA,aAAgB,CAC3B,gBAAA,CAAkB,qBAAA,CAClB,SAAA,CAAW,uBAAA,CACX,KAAA,CAAO,mBAAA,CACP,OAAA,CAAS,qBAAA,CACT,QAAA,CAAU,sBAAA,CACV,QAAA,CAAU,sBAAA,CACV,MAAA,CAAQ,oBAAA,CACR,QAAA,CAAU,sBAAA,CACV,OAAA,CAAS,qBAAA,CACT,MAAA,CAAQ,oBAAA,CACR,MAAA,CAAQ,oBAAA,CACR,GAAA,CAAK,iBACP,CAAA,CAIaC,EAAAA,aAAeC,CAAAA,EACnB1B,CAAAA,CAAa0B,CAAI,CAAA,CAGbC,EAAAA,aAAkB,CAAA,CAAA,EACtB,MAAA,CAAO,MAAA,CAAO3B,CAAY,CAAA,CAGtB4B,EAAAA,aAAmB,CAAA,CAAA,EACvB,MAAA,CAAO,IAAA,CAAK5B,CAAY,CAAA,CAGpB6B,EAAAA,aAAmBH,CAAAA,EAA+B,CAC7D,IAAMI,CAAAA,CAAWlB,CAAAA,CAAcc,CAAI,CAAA,CACnC,EAAA,CAAI,CAACI,CAAAA,CACH,MAAM,IAAI,KAAA,CAAM,CAAA,eAAA,EAAkBJ,CAAI,CAAA,WAAA,CAAa,CAAA,CAErD,OAAOI,CACT,CAAA,CAEaC,EAAAA,aAAuB,CAAA,CAAA,EAC3B,MAAA,CAAO,IAAA,CAAKnB,CAAa,CAAA,CCpE3B,IAAMoB,EAAAA,CAAiB,KAAA,CAC5BC,CAAAA,CACAC,CAAAA,CACAC,CAAAA,CAAiC,CAAC,CAAA,CAAA,EACH,CAC/B,IAAIC,CAAAA,CACJ,MAAA,CAAQH,CAAAA,CAAY,QAAA,CAAU,CAC5B,IAAK,QAAA,CACHG,CAAAA,CAAWC,mBAAAA,CACX,KAAA,CACF,IAAK,QAAA,CACHD,CAAAA,CAAWE,mBAAAA,CACX,KAAA,CACF,IAAK,WAAA,CACHF,CAAAA,CAAWG,mBAAAA,CACX,KAAA,CACF,IAAK,QAAA,CACHH,CAAAA,CAAWI,mBAAAA,CACX,KAAA,CACF,IAAK,KAAA,CACHJ,CAAAA,CAAWK,mBAAAA,CACX,KAAA,CACF,IAAK,OAAA,CACHL,CAAAA,CAAWM,mBAAAA,CACX,KAAA,CACF,IAAK,SAAA,CACHN,CAAAA,CAAWO,mBAAAA,CACX,KAAA,CACF,IAAK,UAAA,CACHP,CAAAA,CAAWQ,mBAAAA,CACX,KAAA,CACF,IAAK,UAAA,CACHR,CAAAA,CAAWS,mBAAAA,CACX,KAAA,CACF,IAAK,SAAA,CACHT,CAAAA,CAAWU,mBAAAA,CACX,KAAA,CACF,IAAK,UAAA,CACHV,CAAAA,CAAWW,mBAAAA,CACX,KAAA,CACF,OAAA,CACE,MAAM,IAAI,KAAA,CAAM,CAAA,sBAAA,EAAyBd,CAAAA,CAAY,QAAQ,CAAA,CAAA;AF7EuxD","file":"/home/mkabumattar/work/withrawi/rawi/dist/chunk-3BQKIUKS.cjs","sourcesContent":[null,"export * from './anthropic.provider.js';\nexport * from './azure.provider.js';\nexport * from './bedrock.provider.js';\nexport * from './cerebras.provider.js';\nexport * from './deepseek.provider.js';\nexport * from './google.provider.js';\nexport * from './lmstudio.provider.js';\nexport * from './mistral.provider.js';\nexport * from './ollama.provider.js';\nexport * from './openai.provider.js';\nexport * from './types.js';\nexport * from './utils.js';\nexport * from './xai.provider.js';\n\nimport type {ProviderConfig, SupportedProvider} from '../shared/types.js';\nimport {anthropicProvider} from './anthropic.provider.js';\nimport {azureProvider} from './azure.provider.js';\nimport {bedrockProvider} from './bedrock.provider.js';\nimport {cerebrasProvider} from './cerebras.provider.js';\nimport {deepSeekProvider} from './deepseek.provider.js';\nimport {googleProvider} from './google.provider.js';\nimport {lmstudioProvider} from './lmstudio.provider.js';\nimport {mistralProvider} from './mistral.provider.js';\nimport {ollamaProvider} from './ollama.provider.js';\nimport {openaiProvider} from './openai.provider.js';\nimport {xaiProvider} from './xai.provider.js';\n\nexport const askProviders: Record<SupportedProvider, ProviderConfig> = {\n anthropic: anthropicProvider,\n azure: azureProvider,\n bedrock: bedrockProvider,\n cerebras: cerebrasProvider,\n deepseek: deepSeekProvider,\n google: googleProvider,\n mistral: mistralProvider,\n ollama: ollamaProvider,\n openai: openaiProvider,\n xai: xaiProvider,\n lmstudio: lmstudioProvider,\n};\n\nimport {anthropicChatProvider} from './anthropic.provider.js';\nimport {azureChatProvider} from './azure.provider.js';\nimport {bedrockChatProvider} from './bedrock.provider.js';\nimport {cerebrasChatProvider} from './cerebras.provider.js';\nimport {deepseekChatProvider} from './deepseek.provider.js';\nimport {googleChatProvider} from './google.provider.js';\nimport {lmstudioChatProvider} from './lmstudio.provider.js';\nimport {mistralChatProvider} from './mistral.provider.js';\nimport {ollamaChatProvider} from './ollama.provider.js';\nimport {openaiChatProvider} from './openai.provider.js';\nimport type {ChatProvider} from './types.js';\nimport {xaiChatProvider} from './xai.provider.js';\n\nexport const chatProviders: Record<string, ChatProvider> = {\n anthropic: anthropicChatProvider,\n azure: azureChatProvider,\n bedrock: bedrockChatProvider,\n cerebras: cerebrasChatProvider,\n deepseek: deepseekChatProvider,\n google: googleChatProvider,\n lmstudio: lmstudioChatProvider,\n mistral: mistralChatProvider,\n ollama: ollamaChatProvider,\n openai: openaiChatProvider,\n xai: xaiChatProvider,\n};\n\nexport const execProviders = {\n 'amazon-bedrock': 'generateWithBedrock',\n anthropic: 'generateWithAnthropic',\n azure: 'generateWithAzure',\n bedrock: 'generateWithBedrock',\n cerebras: 'generateWithCerebras',\n deepseek: 'generateWithDeepSeek',\n google: 'generateWithGoogle',\n lmstudio: 'generateWithLMStudio',\n mistral: 'generateWithMistral',\n ollama: 'generateWithOllama',\n openai: 'generateWithOpenAI',\n xai: 'generateWithXAI',\n} as const;\n\nexport type ExecProviderName = keyof typeof execProviders;\n\nexport const getProvider = (name: SupportedProvider): ProviderConfig => {\n return askProviders[name];\n};\n\nexport const getAllProviders = (): ProviderConfig[] => {\n return Object.values(askProviders);\n};\n\nexport const getProviderNames = (): SupportedProvider[] => {\n return Object.keys(askProviders) as SupportedProvider[];\n};\n\nexport const getChatProvider = (name: string): ChatProvider => {\n const provider = chatProviders[name];\n if (!provider) {\n throw new Error(`Chat provider '${name}' not found`);\n }\n return provider;\n};\n\nexport const getChatProviderNames = (): string[] => {\n return Object.keys(chatProviders);\n};\n\nexport type {ExecGenerationOptions, ExecGenerationResult} from './types.js';\n","import {ContentFilter} from '../content-filter/content-filter.js';\nimport {applyContentFiltering} from '../content-filter/middleware.js';\nimport type {RawiCredentials, StreamingResponse} from '../shared/index.js';\nimport {\n generateWithAnthropic,\n generateWithAzure,\n generateWithBedrock,\n generateWithCerebras,\n generateWithDeepSeek,\n generateWithGoogle,\n generateWithLMStudio,\n generateWithMistral,\n generateWithOllama,\n generateWithOpenAI,\n generateWithXAI,\n streamWithAnthropic,\n streamWithAzure,\n streamWithBedrock,\n streamWithCerebras,\n streamWithDeepSeek,\n streamWithGoogle,\n streamWithLMStudio,\n streamWithMistral,\n streamWithOllama,\n streamWithOpenAI,\n streamWithXAI,\n} from './index.js';\nimport type {ExecGenerationOptions, ExecGenerationResult} from './types.js';\n\nexport interface StreamResponseOptions {\n filtering?: {\n enabled: boolean;\n types?: string[];\n showFiltered?: boolean;\n highlightFiltered?: boolean;\n };\n}\n\nexport const streamResponse = async (\n credentials: RawiCredentials,\n prompt: string,\n options: StreamResponseOptions = {},\n): Promise<StreamingResponse> => {\n let streamFn: any;\n switch (credentials.provider) {\n case 'google':\n streamFn = streamWithGoogle;\n break;\n case 'ollama':\n streamFn = streamWithOllama;\n break;\n case 'anthropic':\n streamFn = streamWithAnthropic;\n break;\n case 'openai':\n streamFn = streamWithOpenAI;\n break;\n case 'xai':\n streamFn = streamWithXAI;\n break;\n case 'azure':\n streamFn = streamWithAzure;\n break;\n case 'bedrock':\n streamFn = streamWithBedrock;\n break;\n case 'lmstudio':\n streamFn = streamWithLMStudio;\n break;\n case 'deepseek':\n streamFn = streamWithDeepSeek;\n break;\n case 'mistral':\n streamFn = streamWithMistral;\n break;\n case 'cerebras':\n streamFn = streamWithCerebras;\n break;\n default:\n throw new Error(`Unsupported provider: ${credentials.provider}`);\n }\n\n if (options.filtering?.enabled) {\n const filter = new ContentFilter({\n enabled: true,\n types: options.filtering.types,\n showFiltered: options.filtering.showFiltered,\n highlightFiltered: options.filtering.highlightFiltered,\n });\n\n const filteredStreamFn = applyContentFiltering(streamFn, {\n filter,\n logStats: false,\n });\n\n return await filteredStreamFn(credentials, prompt);\n }\n\n return await streamFn(credentials, prompt);\n};\n\nexport const processQuery = async (\n credentials: RawiCredentials,\n prompt: string,\n options: {\n streaming?: boolean;\n onChunk?: (chunk: string) => void;\n onComplete?: (fullResponse: string) => void;\n onError?: (error: Error) => void;\n filtering?: {\n enabled: boolean;\n types?: string[];\n showFiltered?: boolean;\n highlightFiltered?: boolean;\n };\n } = {},\n): Promise<string> => {\n try {\n const streamingResponse = await streamResponse(credentials, prompt, {\n filtering: options.filtering,\n });\n let fullResponse = '';\n\n for await (const chunk of streamingResponse.textStream) {\n fullResponse += chunk;\n if (options.onChunk) {\n options.onChunk(chunk);\n }\n }\n\n if (options.onComplete) {\n options.onComplete(fullResponse);\n }\n\n return fullResponse;\n } catch (error) {\n if (options.onError) {\n options.onError(\n error instanceof Error ? error : new Error(String(error)),\n );\n }\n throw error;\n }\n};\n\nexport async function generateWithProvider(\n options: ExecGenerationOptions,\n): Promise<ExecGenerationResult> {\n const providerName = options.credentials.provider;\n\n switch (providerName) {\n case 'ollama': {\n return generateWithOllama(options);\n }\n case 'openai': {\n return generateWithOpenAI(options);\n }\n case 'anthropic': {\n return generateWithAnthropic(options);\n }\n case 'google': {\n return generateWithGoogle(options);\n }\n case 'xai': {\n return generateWithXAI(options);\n }\n case 'deepseek': {\n return generateWithDeepSeek(options);\n }\n case 'mistral': {\n return generateWithMistral(options);\n }\n case 'cerebras': {\n return generateWithCerebras(options);\n }\n case 'lmstudio': {\n return generateWithLMStudio(options);\n }\n case 'azure': {\n return generateWithAzure(options);\n }\n case 'bedrock': {\n return generateWithBedrock(options);\n }\n default:\n throw new Error(`Unsupported provider for exec: ${providerName}`);\n }\n}\n"]}