@yuxilabs/gptp-core
Version:
Core validation, formatting and execution logic for the GPTP file format.
45 lines (44 loc) • 1.86 kB
JavaScript
import { callOpenAI } from '@/adapters/openai/openaiRunner';
import { callMicrosoftAzureOpenAI } from '@/adapters/microsoft/microsoftAzureOpenaiRunner';
import { callAnthropic } from '@/adapters/anthropic/anthropicRunner';
import { callMistral } from '@/adapters/mistral/mistralRunner';
import { callCohere } from '@/adapters/cohere/cohereRunner';
import { callMetaLlama } from '@/adapters/meta/metaLlamaRunner';
import { callLocal } from '@/adapters/local/localRunner';
export function selectProvider(prompt) {
const providers = prompt.connections?.providers || {};
const active = prompt.connections?.active;
let name = active && providers[active] ? active : undefined;
if (!name) {
// default to openai if not configured
return { name: 'openai', type: 'openai', call: callOpenAI };
}
const cfg = providers[name];
const type = (cfg.type || name);
switch (type) {
case 'openai':
return { name, type: 'openai', call: callOpenAI };
case 'microsoft':
return { name, type: 'microsoft', call: callMicrosoftAzureOpenAI };
case 'anthropic':
return { name, type: 'anthropic', call: callAnthropic };
case 'mistral':
return { name, type: 'mistral', call: callMistral };
case 'cohere':
return { name, type: 'cohere', call: callCohere };
case 'meta':
return { name, type: 'meta', call: callMetaLlama };
case 'local':
return { name, type: 'local', call: callLocal };
default:
return { name, type: 'openai', call: callOpenAI };
}
}
export function mapParams(params) {
return {
model: params?.model || 'gpt-4',
temperature: params?.temperature ?? 0.7,
top_p: params?.top_p ?? 1,
max_tokens: params?.max_tokens ?? 512,
};
}