i18n-ai-translate
Version:
Use LLMs to translate your i18n JSON to any language.
64 lines • 2.4 kB
JavaScript
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const generative_ai_1 = require("@google/generative-ai");
const sdk_1 = require("@anthropic-ai/sdk");
const ollama_1 = require("ollama");
const anthropic_1 = __importDefault(require("./anthropic"));
const chatgpt_1 = __importDefault(require("./chatgpt"));
const engine_1 = __importDefault(require("../enums/engine"));
const gemini_1 = __importDefault(require("./gemini"));
const ollama_2 = __importDefault(require("./ollama"));
const openai_1 = __importDefault(require("openai"));
class ChatFactory {
static newChat(engine, model, rateLimiter, apiKey, host) {
let chat;
let params;
switch (engine) {
case engine_1.default.Gemini: {
const genAI = new generative_ai_1.GoogleGenerativeAI(apiKey);
const geminiModel = genAI.getGenerativeModel({ model });
chat = new gemini_1.default(geminiModel, rateLimiter);
params = {
history: [],
};
break;
}
case engine_1.default.ChatGPT: {
const openAI = new openai_1.default({ apiKey: apiKey });
chat = new chatgpt_1.default(openAI, rateLimiter);
params = {
messages: [],
model,
};
break;
}
case engine_1.default.Ollama: {
const llama = new ollama_1.Ollama({ host });
chat = new ollama_2.default(llama);
params = {
messages: [],
model,
};
break;
}
case engine_1.default.Claude: {
const anthropic = new sdk_1.Anthropic({ apiKey: apiKey });
chat = new anthropic_1.default(anthropic, rateLimiter);
params = {
messages: [],
model,
};
break;
}
default:
throw new Error("Invalid engine");
}
chat.startChat(params);
return chat;
}
}
exports.default = ChatFactory;
//# sourceMappingURL=chat_factory.js.map
;