@clearcompass-ai/llm-spec
Version:
A Vercel AI SDK provider for the LARS agent-based backend.
53 lines (52 loc) • 1.94 kB
TypeScript
import { LanguageModelV2, ProviderV2 } from '@ai-sdk/provider';
export type LarsModelId = 'caselaw' | 'puzzle_solver' | (string & {});
/**
* The LARS provider. It conforms to the ProviderV2 interface and offers
* a `languageModel` method to access specific LARS domain experts.
*/
export interface LarsProvider extends ProviderV2 {
(modelId: LarsModelId): LanguageModelV2;
/**
* Creates a new language model for text generation.
* @param modelId The model ID, e.g., "caselaw".
* @returns A `LanguageModelV2` instance.
*/
languageModel: (modelId: LarsModelId) => LanguageModelV2;
}
/**
* Defines the configuration settings for the LARS custom provider.
* This allows for setting a global base URL for the API.
*/
export interface LarsProviderSettings {
/**
* The base URL of the LARS backend API.
* If not provided, it will default to the `LARS_API_BASE_URL`
* environment variable or 'https://api.clearcompass.so' for production.
*/
baseURL?: string;
}
/**
* Creates a new instance of the LARS provider.
* This factory function allows for custom configuration, such as setting a
* different base URL for the API, which is useful for different environments
* (e.g., staging vs. production).
*
* @param settings - Optional configuration for the provider.
* @returns A Vercel AI SDK-compliant provider for LARS models.
*/
export declare function createLars(settings?: LarsProviderSettings): LarsProvider;
/**
* The default, singleton instance of the LARS provider.
* This is the easiest way for applications to get started, as it
* requires no configuration and will use default settings.
*
* @example
* import { lars } from '@clearcompass-ai/llm-spec';
* import { streamText } from 'ai';
*
* const { textStream } = await streamText({
* model: lars('caselaw'), // or lars.languageModel('caselaw')
* // ...
* });
*/
export declare const lars: LarsProvider;