@lobehub/chat
Version:
Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.
42 lines (35 loc) • 1.4 kB
text/typescript
import type { ChatModelCard } from '@/types/llm';
import { ModelProvider } from '../types';
import { createOpenAICompatibleRuntime } from '../utils/openaiCompatibleFactory';
export interface LMStudioModelCard {
id: string;
}
export const LobeLMStudioAI = createOpenAICompatibleRuntime({
apiKey: 'placeholder-to-avoid-error',
baseURL: 'http://127.0.0.1:1234/v1',
debug: {
chatCompletion: () => process.env.DEBUG_LMSTUDIO_CHAT_COMPLETION === '1',
},
models: async ({ client }) => {
const { LOBE_DEFAULT_MODEL_LIST } = await import('@/config/aiModels');
const modelsPage = (await client.models.list()) as any;
const modelList: LMStudioModelCard[] = modelsPage.data;
return modelList
.map((model) => {
const knownModel = LOBE_DEFAULT_MODEL_LIST.find(
(m) => model.id.toLowerCase() === m.id.toLowerCase(),
);
return {
contextWindowTokens: knownModel?.contextWindowTokens ?? undefined,
displayName: knownModel?.displayName ?? undefined,
enabled: knownModel?.enabled || false,
functionCall: knownModel?.abilities?.functionCall || false,
id: model.id,
reasoning: knownModel?.abilities?.reasoning || false,
vision: knownModel?.abilities?.vision || false,
};
})
.filter(Boolean) as ChatModelCard[];
},
provider: ModelProvider.LMStudio,
});