ai.libx.js
Version:
Unified API bridge for various AI models (LLMs, image/video generation, TTS, STT) - stateless, edge-compatible
117 lines (97 loc) • 3.13 kB
text/typescript
import { BaseAdapter } from './base/BaseAdapter';
import { ChatOptions, ChatResponse, StreamChunk } from '../types';
import { handleProviderError, InvalidRequestError } from '../utils/errors';
import { contentToString } from '../utils/content-helpers';
/**
* Cloudflare Workers AI adapter
*/
export class CloudflareAdapter extends BaseAdapter {
get name(): string {
return 'cloudflare';
}
async chat(options: ChatOptions): Promise<ChatResponse | AsyncIterable<StreamChunk>> {
try {
const apiKey = this.getApiKey(options);
// Cloudflare requires account ID
const accountId = this.config.cloudflareAccountId || options.providerOptions?.cloudflareAccountId;
if (!accountId) {
throw new InvalidRequestError(
'Cloudflare account ID is required. Pass it in the constructor or providerOptions.'
);
}
const baseUrl = this.getBaseUrl(`https://api.cloudflare.com/client/v4/accounts/${accountId}/ai`);
// Strip provider prefix from model if present
const model = options.model.replace(/^cloudflare\//, '');
const request: any = {
messages: options.messages.map((msg) => ({
role: msg.role,
content: contentToString(msg.content),
})),
stream: options.stream || false,
};
// Add optional parameters
if (options.temperature !== undefined) request.temperature = options.temperature;
if (options.maxTokens !== undefined) request.max_tokens = options.maxTokens;
if (options.topP !== undefined) request.top_p = options.topP;
// Merge provider-specific options
if (options.providerOptions) {
const { cloudflareAccountId, ...rest } = options.providerOptions;
Object.assign(request, rest);
}
const response = await this.fetchWithErrorHandling(
`${baseUrl}/run/${model}`,
{
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${apiKey}`,
},
body: JSON.stringify(request),
},
this.name
);
// Note: Cloudflare streaming support may vary by model
if (options.stream) {
return this.handleStreamResponse(response, model);
}
return this.handleNonStreamResponse(await response.json(), model);
} catch (error) {
handleProviderError(error, this.name);
}
}
private handleNonStreamResponse(data: any, model: string): ChatResponse {
// Cloudflare response format
const result = data.result;
if (!result) {
throw new Error('No result in response');
}
const content = result.response || result.text || '';
return {
content,
finishReason: 'stop',
model,
raw: data,
};
}
private async *handleStreamResponse(response: Response, model: string): AsyncIterable<StreamChunk> {
if (!response.body) {
throw new Error('No response body for streaming');
}
const reader = response.body.getReader();
const decoder = new TextDecoder();
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
const text = decoder.decode(value, { stream: true });
if (text) {
yield {
content: text,
};
}
}
} finally {
reader.releaseLock();
}
}
}