UNPKG

i18n-ai-translate

Version:

Use LLMs to translate your i18n JSON to any language.

32 lines 1.99 kB
"use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.CLI_HELP = exports.DEFAULT_MODEL = exports.FLATTEN_DELIMITER = exports.DEFAULT_TEMPLATED_STRING_SUFFIX = exports.DEFAULT_TEMPLATED_STRING_PREFIX = exports.VERSION = exports.DEFAULT_BATCH_SIZE = void 0; const override_prompt_1 = require("./interfaces/override_prompt"); const engine_1 = __importDefault(require("./enums/engine")); exports.DEFAULT_BATCH_SIZE = 32; exports.VERSION = "3.3.3"; exports.DEFAULT_TEMPLATED_STRING_PREFIX = "{{"; exports.DEFAULT_TEMPLATED_STRING_SUFFIX = "}}"; exports.FLATTEN_DELIMITER = "*"; exports.DEFAULT_MODEL = { [engine_1.default.ChatGPT]: "gpt-4o", [engine_1.default.Gemini]: "gemini-2.0-flash-exp", [engine_1.default.Ollama]: "llama3.3", [engine_1.default.Claude]: "claude-3-5-sonnet-latest", }; exports.CLI_HELP = { BatchSize: "How many keys to process at a time", Engine: "Engine to use (chatgpt, gemini, ollama, or claude)", EnsureChangedTranslation: "Each generated translation key must differ from the input (for keys longer than 4)", Model: `Model to use (e.g. ${Object.values(exports.DEFAULT_MODEL).join(", ")})`, OllamaHost: "The host and port number serving Ollama. 11434 is the default port number.", OverridePromptFile: `Use the prompts from the given JSON file, containing keys for at least one of ${override_prompt_1.OVERRIDE_PROMPT_KEYS.join(", ")}`, RateLimit: "How many milliseconds between requests (defaults to 1s for Gemini, 120ms (at 500RPM) for ChatGPT, 1200ms for Claude)", SkipStylingVerification: "Skip validating the resulting translation's formatting through another query", SkipTranslationVerification: "Skip validating the resulting translation through another query", Verbose: "Print logs about progress", }; //# sourceMappingURL=constants.js.map