@lobehub/chat
Version:
Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.
59 lines (50 loc) • 1.86 kB
text/typescript
import type { ChatModelCard } from '@/types/llm';
import { ModelProvider } from '../types';
import { createOpenAICompatibleRuntime } from '../utils/openaiCompatibleFactory';
export interface InternLMModelCard {
id: string;
}
export const LobeInternLMAI = createOpenAICompatibleRuntime({
baseURL: 'https://internlm-chat.intern-ai.org.cn/puyu/api/v1',
chatCompletion: {
handlePayload: (payload) => {
return {
...payload,
stream: !payload.tools,
} as any;
},
},
debug: {
chatCompletion: () => process.env.DEBUG_INTERNLM_CHAT_COMPLETION === '1',
},
models: async ({ client }) => {
const { LOBE_DEFAULT_MODEL_LIST } = await import('@/config/aiModels');
const functionCallKeywords = ['internlm'];
const visionKeywords = ['internvl'];
const modelsPage = (await client.models.list()) as any;
const modelList: InternLMModelCard[] = modelsPage.data;
return modelList
.map((model) => {
const knownModel = LOBE_DEFAULT_MODEL_LIST.find(
(m) => model.id.toLowerCase() === m.id.toLowerCase(),
);
return {
contextWindowTokens: knownModel?.contextWindowTokens ?? undefined,
displayName: knownModel?.displayName ?? undefined,
enabled: knownModel?.enabled || false,
functionCall:
functionCallKeywords.some((keyword) => model.id.toLowerCase().includes(keyword)) ||
knownModel?.abilities?.functionCall ||
false,
id: model.id,
reasoning: knownModel?.abilities?.reasoning || false,
vision:
visionKeywords.some((keyword) => model.id.toLowerCase().includes(keyword)) ||
knownModel?.abilities?.vision ||
false,
};
})
.filter(Boolean) as ChatModelCard[];
},
provider: ModelProvider.InternLM,
});