@clearcompass-ai/llm-spec
Version:
A Vercel AI SDK provider for the LARS agent-based backend.
60 lines (59 loc) • 2.12 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.lars = void 0;
exports.createLars = createLars;
const provider_1 = require("@ai-sdk/provider");
const lars_language_model_1 = require("./lars-language-model");
/**
* Creates a new instance of the LARS provider.
* This factory function allows for custom configuration, such as setting a
* different base URL for the API, which is useful for different environments
* (e.g., staging vs. production).
*
* @param settings - Optional configuration for the provider.
* @returns A Vercel AI SDK-compliant provider for LARS models.
*/
function createLars(settings = {}) {
// This map holds the instantiated language models for each domain expert.
const languageModels = {
caselaw: new lars_language_model_1.LarsLanguageModel({
modelId: 'caselaw',
baseURL: settings.baseURL,
}),
puzzle_solver: new lars_language_model_1.LarsLanguageModel({
modelId: 'puzzle_solver',
baseURL: settings.baseURL,
}),
};
// Manually construct the provider object to conform to the ProviderV2 interface.
const provider = (modelId) => {
const model = languageModels[modelId];
if (!model) {
throw new provider_1.NoSuchModelError({ modelId, modelType: 'languageModel' });
}
return model;
};
provider.languageModel = (modelId) => {
const model = languageModels[modelId];
if (!model) {
throw new provider_1.NoSuchModelError({ modelId, modelType: 'languageModel' });
}
return model;
};
return provider;
}
/**
* The default, singleton instance of the LARS provider.
* This is the easiest way for applications to get started, as it
* requires no configuration and will use default settings.
*
* @example
* import { lars } from '@clearcompass-ai/llm-spec';
* import { streamText } from 'ai';
*
* const { textStream } = await streamText({
* model: lars('caselaw'), // or lars.languageModel('caselaw')
* // ...
* });
*/
exports.lars = createLars();