UNPKG

ai-platform-converter

Version:

Lossless API parameter conversion between multiple AI platforms (OpenAI, Anthropic, Gemini, DeepSeek, Wenwen, Vertex AI, Huawei, BigModel)

375 lines (374 loc) 13.9 kB
"use strict"; /** * Parameter Validation and Range Adjustment Utility * * This utility provides parameter validation and range adjustment * for cross-platform API conversions. */ Object.defineProperty(exports, "__esModule", { value: true }); exports.validateAndAdjustParameters = validateAndAdjustParameters; exports.suggestParameterMappings = suggestParameterMappings; const common_1 = require("../types/common"); /** * Platform-specific parameter ranges and constraints */ const PLATFORM_CONSTRAINTS = { [common_1.Platform.OpenAI]: { temperature: { min: 0, max: 2, default: 1 }, top_p: { min: 0, max: 1, default: 1 }, max_tokens: { min: 1, max: 128000, default: 4096 }, presence_penalty: { min: -2, max: 2, default: 0 }, frequency_penalty: { min: -2, max: 2, default: 0 } }, [common_1.Platform.Anthropic]: { temperature: { min: 0, max: 1, default: 1 }, top_p: { min: 0, max: 1, default: 1 }, top_k: { min: 0, max: 500, default: 0 }, max_tokens: { min: 1, max: 200000, default: 4096 } }, [common_1.Platform.Gemini]: { temperature: { min: 0, max: 2, default: 1 }, top_p: { min: 0, max: 1, default: 1 }, top_k: { min: 1, max: 1000, default: 40 }, max_output_tokens: { min: 1, max: 8192, default: 1024 }, candidate_count: { min: 1, max: 8, default: 1 } }, [common_1.Platform.DeepSeek]: { temperature: { min: 0, max: 2, default: 1 }, top_p: { min: 0, max: 1, default: 1 }, max_tokens: { min: 1, max: 128000, default: 4096 } }, // OpenAI-compatible platforms use same constraints as OpenAI [common_1.Platform.Wenwen]: { temperature: { min: 0, max: 2, default: 1 }, top_p: { min: 0, max: 1, default: 1 }, max_tokens: { min: 1, max: 128000, default: 4096 } }, [common_1.Platform.VertexAI]: { temperature: { min: 0, max: 2, default: 1 }, top_p: { min: 0, max: 1, default: 1 }, max_tokens: { min: 1, max: 128000, default: 4096 } }, [common_1.Platform.Huawei]: { temperature: { min: 0, max: 2, default: 1 }, top_p: { min: 0, max: 1, default: 1 }, max_tokens: { min: 1, max: 128000, default: 4096 } }, [common_1.Platform.BigModel]: { temperature: { min: 0, max: 2, default: 1 }, top_p: { min: 0, max: 1, default: 1 }, max_tokens: { min: 1, max: 128000, default: 4096 } } }; /** * Validate and adjust parameters for a specific platform */ function validateAndAdjustParameters(request, targetPlatform, _sourcePlatform) { const constraints = PLATFORM_CONSTRAINTS[targetPlatform]; const warnings = []; const errors = []; const adjustedRequest = { ...request }; // Skip validation if platform not supported if (!constraints) { return { isValid: true, adjustedRequest, warnings: [], errors: [] }; } // Validate common parameters for (const [param, constraint] of Object.entries(constraints)) { const value = getNestedValue(adjustedRequest, param); if (value !== undefined && value !== null) { const validation = validateParameter(param, value, constraint, targetPlatform); if (!validation.isValid) { if (validation.canAdjust) { // Adjust the value setNestedValue(adjustedRequest, param, validation.adjustedValue); warnings.push({ parameter: param, message: validation.message, originalValue: value, adjustedValue: validation.adjustedValue, platform: targetPlatform }); } else { errors.push({ parameter: param, message: validation.message, value, platform: targetPlatform, severity: 'error' }); } } } } // Platform-specific validations switch (targetPlatform) { case common_1.Platform.Anthropic: validateAnthropicSpecific(adjustedRequest, warnings, errors); break; case common_1.Platform.Gemini: validateGeminiSpecific(adjustedRequest, warnings, errors); break; case common_1.Platform.DeepSeek: validateDeepSeekSpecific(adjustedRequest, warnings, errors); break; } return { isValid: errors.length === 0, adjustedRequest, warnings, errors }; } /** * Validate a single parameter against constraints */ function validateParameter(param, value, constraint, platform) { if (typeof value !== 'number' || isNaN(value)) { return { isValid: false, message: `${param} must be a valid number`, canAdjust: true, adjustedValue: constraint.default }; } if (value < constraint.min) { return { isValid: false, message: `${param} (${value}) is below minimum ${constraint.min} for ${platform}`, canAdjust: true, adjustedValue: constraint.min }; } if (value > constraint.max) { return { isValid: false, message: `${param} (${value}) exceeds maximum ${constraint.max} for ${platform}`, canAdjust: true, adjustedValue: constraint.max }; } return { isValid: true }; } /** * Platform-specific validations for Anthropic */ function validateAnthropicSpecific(request, _warnings, errors) { // Validate max_tokens is required if (!request.max_tokens || request.max_tokens <= 0) { errors.push({ parameter: 'max_tokens', message: 'max_tokens is required and must be positive for Anthropic', value: request.max_tokens, platform: common_1.Platform.Anthropic, severity: 'critical' }); } // Validate 2025 new parameters if (request.mcp_servers && request.mcp_servers.length > 20) { errors.push({ parameter: 'mcp_servers', message: 'Maximum 20 MCP servers allowed', value: request.mcp_servers.length, platform: common_1.Platform.Anthropic, severity: 'error' }); } if (request.service_tier && !['auto', 'standard_only'].includes(request.service_tier)) { errors.push({ parameter: 'service_tier', message: 'service_tier must be "auto" or "standard_only"', value: request.service_tier, platform: common_1.Platform.Anthropic, severity: 'error' }); } // Validate thinking configuration if (request.thinking) { if (request.thinking.max_thinking_time !== undefined && request.thinking.max_thinking_time < 0) { errors.push({ parameter: 'thinking.max_thinking_time', message: 'max_thinking_time must be non-negative', value: request.thinking.max_thinking_time, platform: common_1.Platform.Anthropic, severity: 'error' }); } if (request.thinking.thinking_budget_tokens !== undefined && request.thinking.thinking_budget_tokens < 0) { errors.push({ parameter: 'thinking.thinking_budget_tokens', message: 'thinking_budget_tokens must be non-negative', value: request.thinking.thinking_budget_tokens, platform: common_1.Platform.Anthropic, severity: 'error' }); } } } /** * Platform-specific validations for Gemini */ function validateGeminiSpecific(request, _warnings, errors) { // Validate contents is required if (!request.contents || request.contents.length === 0) { errors.push({ parameter: 'contents', message: 'contents array is required and cannot be empty for Gemini', value: request.contents, platform: common_1.Platform.Gemini, severity: 'critical' }); } // Validate safety settings if (request.safetySettings) { const validCategories = [ 'HARM_CATEGORY_HARASSMENT', 'HARM_CATEGORY_HATE_SPEECH', 'HARM_CATEGORY_SEXUALLY_EXPLICIT', 'HARM_CATEGORY_DANGEROUS_CONTENT' ]; const validThresholds = [ 'BLOCK_NONE', 'BLOCK_ONLY_HIGH', 'BLOCK_MEDIUM_AND_ABOVE', 'BLOCK_LOW_AND_ABOVE' ]; for (const setting of request.safetySettings) { if (!validCategories.includes(setting.category)) { errors.push({ parameter: 'safetySettings.category', message: `Invalid safety category: ${setting.category}`, value: setting.category, platform: common_1.Platform.Gemini, severity: 'error' }); } if (!validThresholds.includes(setting.threshold)) { errors.push({ parameter: 'safetySettings.threshold', message: `Invalid safety threshold: ${setting.threshold}`, value: setting.threshold, platform: common_1.Platform.Gemini, severity: 'error' }); } } } // Validate tool configuration if (request.toolConfig?.functionCallingConfig) { const validModes = ['AUTO', 'ANY', 'NONE']; if (request.toolConfig.functionCallingConfig.mode && !validModes.includes(request.toolConfig.functionCallingConfig.mode)) { errors.push({ parameter: 'toolConfig.functionCallingConfig.mode', message: `Invalid function calling mode: ${request.toolConfig.functionCallingConfig.mode}`, value: request.toolConfig.functionCallingConfig.mode, platform: common_1.Platform.Gemini, severity: 'error' }); } } } /** * Platform-specific validations for DeepSeek */ function validateDeepSeekSpecific(request, _warnings, errors) { // Validate model const validModels = ['deepseek-chat', 'deepseek-coder', 'deepseek-reasoner']; if (!validModels.includes(request.model)) { errors.push({ parameter: 'model', message: `Invalid model: ${request.model}. Must be one of: ${validModels.join(', ')}`, value: request.model, platform: common_1.Platform.DeepSeek, severity: 'error' }); } // Validate reasoning config for reasoner model if (request.model === 'deepseek-reasoner' && request.reasoning_config) { const config = request.reasoning_config; if (config.max_reasoning_tokens !== undefined && config.max_reasoning_tokens < 0) { errors.push({ parameter: 'reasoning_config.max_reasoning_tokens', message: 'max_reasoning_tokens must be non-negative', value: config.max_reasoning_tokens, platform: common_1.Platform.DeepSeek, severity: 'error' }); } if (config.temperature !== undefined && (config.temperature < 0 || config.temperature > 1)) { errors.push({ parameter: 'reasoning_config.temperature', message: 'reasoning_config.temperature must be between 0 and 1', value: config.temperature, platform: common_1.Platform.DeepSeek, severity: 'error' }); } } // Warn about beta features if (request.prefix_completion) { _warnings.push({ parameter: 'prefix_completion', message: 'prefix_completion is a beta feature and requires beta endpoint', originalValue: request.prefix_completion, platform: common_1.Platform.DeepSeek }); } } /** * Get nested value from object using dot notation */ function getNestedValue(obj, path) { return path.split('.').reduce((current, key) => current?.[key], obj); } /** * Set nested value in object using dot notation */ function setNestedValue(obj, path, value) { const keys = path.split('.'); const lastKey = keys.pop(); const target = keys.reduce((current, key) => { if (!current[key]) current[key] = {}; return current[key]; }, obj); target[lastKey] = value; } /** * Cross-platform parameter mapping suggestions */ function suggestParameterMappings(sourcePlatform, targetPlatform) { const mappings = { 'OpenAI->Anthropic': { 'max_tokens': 'max_tokens', 'stop': 'stop_sequences', 'top_p': 'top_p' }, 'OpenAI->Gemini': { 'max_tokens': 'generationConfig.maxOutputTokens', 'stop': 'generationConfig.stopSequences', 'top_p': 'generationConfig.topP', 'temperature': 'generationConfig.temperature' }, 'Anthropic->OpenAI': { 'max_tokens': 'max_tokens', 'stop_sequences': 'stop', 'top_p': 'top_p' }, 'Gemini->OpenAI': { 'generationConfig.maxOutputTokens': 'max_tokens', 'generationConfig.stopSequences': 'stop', 'generationConfig.topP': 'top_p', 'generationConfig.temperature': 'temperature' } }; const key = `${sourcePlatform}->${targetPlatform}`; return mappings[key] || {}; }