@mem0/vercel-ai-provider
Version:
Vercel AI Provider for providing memory to LLMs
101 lines (94 loc) • 4.24 kB
TypeScript
import { ProviderV2, LanguageModelV2, LanguageModelV2CallOptions, LanguageModelV2Prompt } from '@ai-sdk/provider';
import { OpenAIProviderSettings } from '@ai-sdk/openai';
import { AnthropicProviderSettings } from '@ai-sdk/anthropic';
import { CohereProviderSettings } from '@ai-sdk/cohere';
import { GroqProviderSettings } from '@ai-sdk/groq';
interface Mem0Provider extends ProviderV2 {
(modelId: Mem0ChatModelId, settings?: Mem0ChatSettings): LanguageModelV2;
chat(modelId: Mem0ChatModelId, settings?: Mem0ChatSettings): LanguageModelV2;
completion(modelId: Mem0ChatModelId, settings?: Mem0ChatSettings): LanguageModelV2;
languageModel(modelId: Mem0ChatModelId, settings?: Mem0ChatSettings): LanguageModelV2;
}
interface Mem0ProviderSettings {
baseURL?: string;
/**
* Custom fetch implementation. You can use it as a middleware to intercept
* requests or to provide a custom fetch implementation for e.g. testing
*/
fetch?: typeof fetch;
/**
* Custom headers to include in the requests.
*/
headers?: Record<string, string>;
name?: string;
mem0ApiKey?: string;
apiKey?: string;
provider?: string;
modelType?: "completion" | "chat";
mem0Config?: Mem0Config;
/**
* The configuration for the provider.
*/
config?: LLMProviderSettings;
}
declare function createMem0(options?: Mem0ProviderSettings): Mem0Provider;
declare const mem0: Mem0Provider;
type Mem0ChatModelId = (string & NonNullable<unknown>);
interface Mem0ConfigSettings {
user_id?: string;
app_id?: string;
agent_id?: string;
run_id?: string;
org_name?: string;
project_name?: string;
org_id?: string;
project_id?: string;
metadata?: Record<string, any>;
filters?: Record<string, any>;
infer?: boolean;
page?: number;
page_size?: number;
mem0ApiKey?: string;
top_k?: number;
threshold?: number;
rerank?: boolean;
enable_graph?: boolean;
output_format?: string;
filter_memories?: boolean;
}
interface Mem0ChatConfig extends Mem0ConfigSettings, Mem0ProviderSettings {
}
interface LLMProviderSettings extends OpenAIProviderSettings, AnthropicProviderSettings, CohereProviderSettings, GroqProviderSettings {
}
interface Mem0Config extends Mem0ConfigSettings {
}
interface Mem0ChatSettings extends Mem0ConfigSettings {
}
declare class Mem0GenericLanguageModel implements LanguageModelV2 {
readonly modelId: Mem0ChatModelId;
readonly settings: Mem0ChatSettings;
readonly config: Mem0ChatConfig;
readonly provider_config?: Mem0ProviderSettings | undefined;
readonly specificationVersion = "v2";
readonly defaultObjectGenerationMode = "json";
readonly supportsImageUrls = false;
readonly supportedUrls: Record<string, RegExp[]>;
constructor(modelId: Mem0ChatModelId, settings: Mem0ChatSettings, config: Mem0ChatConfig, provider_config?: Mem0ProviderSettings | undefined);
provider: string;
private processMemories;
doGenerate(options: LanguageModelV2CallOptions): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>>;
doStream(options: LanguageModelV2CallOptions): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>>;
}
declare class Mem0 {
readonly baseURL: string;
readonly headers?: any;
constructor(options?: Mem0ProviderSettings);
private get baseConfig();
chat(modelId: Mem0ChatModelId, settings?: Mem0ChatSettings): Mem0GenericLanguageModel;
completion(modelId: Mem0ChatModelId, settings?: Mem0ChatSettings): Mem0GenericLanguageModel;
}
declare const addMemories: (messages: LanguageModelV2Prompt, config?: Mem0ConfigSettings) => Promise<any>;
declare const retrieveMemories: (prompt: LanguageModelV2Prompt | string, config?: Mem0ConfigSettings) => Promise<string>;
declare const getMemories: (prompt: LanguageModelV2Prompt | string, config?: Mem0ConfigSettings) => Promise<any>;
declare const searchMemories: (prompt: LanguageModelV2Prompt | string, config?: Mem0ConfigSettings) => Promise<any>;
export { Mem0, type Mem0ChatConfig, type Mem0ChatSettings, type Mem0ConfigSettings, type Mem0Provider, type Mem0ProviderSettings, addMemories, createMem0, getMemories, mem0, retrieveMemories, searchMemories };