@just-every/ensemble
Version:
LLM provider abstraction layer with unified streaming interface
21 lines • 806 B
JavaScript
import { OpenAIChat } from './openai_chat.js';
export class GrokProvider extends OpenAIChat {
constructor() {
super('xai', process.env.XAI_API_KEY, 'https://api.x.ai/v1');
}
prepareParameters(requestParams) {
if (Array.isArray(requestParams.tools)) {
const index = requestParams.tools.findIndex(t => t.type === 'function' && t.function?.name === 'grok_web_search');
if (index !== -1) {
requestParams.tools.splice(index, 1);
requestParams.search_parameters = {
mode: 'on',
return_citations: true,
};
}
}
return super.prepareParameters(requestParams);
}
}
export const grokProvider = new GrokProvider();
//# sourceMappingURL=grok.js.map