@convex-dev/agent
Version:
A agent component for Convex.
93 lines • 3.84 kB
JavaScript
import { streamText as streamTextAi } from "ai";
import { compressUIMessageChunks, DeltaStreamer, mergeTransforms, } from "./streaming.js";
import { startGeneration } from "./start.js";
import { getModelName, getProviderName } from "../shared.js";
import { errorToString, willContinue } from "./utils.js";
/**
* This behaves like {@link streamText} from the "ai" package except that
* it add context based on the userId and threadId and saves the input and
* resulting messages to the thread, if specified.
* Use {@link continueThread} to get a version of this function already scoped
* to a thread (and optionally userId).
*/
export async function streamText(ctx, component,
/**
* The arguments to the streamText function, similar to the ai sdk's
* {@link streamText} function, along with Agent prompt options.
*/
streamTextArgs,
/**
* The {@link ContextOptions} and {@link StorageOptions}
* options to use for fetching contextual messages and saving input/output messages.
*/
options) {
const { threadId } = options ?? {};
const { args, userId, order, stepOrder, promptMessageId, ...call } = await startGeneration(ctx, component, streamTextArgs, options);
const steps = [];
const streamer = threadId && options.saveStreamDeltas
? new DeltaStreamer(component, ctx, {
throttleMs: typeof options.saveStreamDeltas === "object"
? options.saveStreamDeltas.throttleMs
: undefined,
onAsyncAbort: call.fail,
compress: compressUIMessageChunks,
abortSignal: args.abortSignal,
}, {
threadId,
userId,
agentName: options?.agentName,
model: getModelName(args.model),
provider: getProviderName(args.model),
providerOptions: args.providerOptions,
format: "UIMessageChunk",
order,
stepOrder,
})
: undefined;
const result = streamTextAi({
...args,
abortSignal: streamer?.abortController.signal ?? args.abortSignal,
experimental_transform: mergeTransforms(options?.saveStreamDeltas, streamTextArgs.experimental_transform),
onError: async (error) => {
console.error("onError", error);
await call.fail(errorToString(error.error));
await streamer?.fail(errorToString(error.error));
return streamTextArgs.onError?.(error);
},
prepareStep: async (options) => {
const result = await streamTextArgs.prepareStep?.(options);
if (result) {
const model = result.model ?? options.model;
call.updateModel(model);
// streamer?.updateMetadata({
// model: getModelName(model),
// provider: getProviderName(model),
// providerOptions: options.messages.at(-1)?.providerOptions,
// });
return result;
}
return undefined;
},
onStepFinish: async (step) => {
steps.push(step);
const createPendingMessage = await willContinue(steps, args.stopWhen);
await call.save({ step }, createPendingMessage);
return args.onStepFinish?.(step);
},
});
const stream = streamer?.consumeStream(result.toUIMessageStream());
if ((typeof options?.saveStreamDeltas === "object" &&
!options.saveStreamDeltas.returnImmediately) ||
options?.saveStreamDeltas === true) {
await stream;
await result.consumeStream();
}
const metadata = {
promptMessageId,
order,
savedMessages: call.getSavedMessages(),
messageId: promptMessageId,
};
return Object.assign(result, metadata);
}
//# sourceMappingURL=streamText.js.map