@directus/api
Version:
Directus is a real-time API and App dashboard for managing SQL database content
46 lines (45 loc) • 2.28 kB
JavaScript
import { ServiceUnavailableError } from '@directus/errors';
import { convertToModelMessages, stepCountIs, streamText, } from 'ai';
import { buildProviderConfigs, createAIProviderRegistry, getProviderOptions, } from '../../providers/index.js';
import { SYSTEM_PROMPT } from '../constants/system-prompt.js';
import { formatContextForSystemPrompt } from '../utils/format-context.js';
export const createUiStream = async (messages, { provider, model, tools, aiSettings, systemPrompt, context, onUsage }) => {
const configs = buildProviderConfigs(aiSettings);
const providerConfig = configs.find((c) => c.type === provider);
if (!providerConfig) {
throw new ServiceUnavailableError({ service: provider, reason: 'No API key configured for LLM provider' });
}
const registry = createAIProviderRegistry(configs, aiSettings);
const baseSystemPrompt = systemPrompt || SYSTEM_PROMPT;
const contextBlock = context ? formatContextForSystemPrompt(context) : null;
const providerOptions = getProviderOptions(provider, model, aiSettings);
// Compute the full system prompt once to avoid re-computing on each step
const fullSystemPrompt = contextBlock ? baseSystemPrompt + contextBlock : baseSystemPrompt;
const stream = streamText({
system: baseSystemPrompt,
model: registry.languageModel(`${provider}:${model}`),
messages: await convertToModelMessages(messages),
stopWhen: [stepCountIs(10)],
providerOptions,
tools,
/**
* prepareStep is called before each AI step to prepare the system prompt.
* When context exists, we override the system prompt to include context attachments.
* This allows the initial system prompt to be simple while ensuring all steps
* (including tool continuation steps) receive the full context.
*/
prepareStep: () => {
if (contextBlock) {
return { system: fullSystemPrompt };
}
return {};
},
onFinish({ usage }) {
if (onUsage) {
const { inputTokens, outputTokens, totalTokens } = usage;
onUsage({ inputTokens, outputTokens, totalTokens });
}
},
});
return stream;
};