@nullplatform/llm-gateway
Version:
LLM Gateway Core - Main proxy server
481 lines • 22.2 kB
JavaScript
var __esDecorate = (this && this.__esDecorate) || function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) {
function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; }
var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value";
var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null;
var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {});
var _, done = false;
for (var i = decorators.length - 1; i >= 0; i--) {
var context = {};
for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p];
for (var p in contextIn.access) context.access[p] = contextIn.access[p];
context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); };
var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context);
if (kind === "accessor") {
if (result === void 0) continue;
if (result === null || typeof result !== "object") throw new TypeError("Object expected");
if (_ = accept(result.get)) descriptor.get = _;
if (_ = accept(result.set)) descriptor.set = _;
if (_ = accept(result.init)) initializers.unshift(_);
}
else if (_ = accept(result)) {
if (kind === "field") initializers.unshift(_);
else descriptor[key] = _;
}
}
if (target) Object.defineProperty(target, contextIn.name, descriptor);
done = true;
};
var __runInitializers = (this && this.__runInitializers) || function (thisArg, initializers, value) {
var useValue = arguments.length > 2;
for (var i = 0; i < initializers.length; i++) {
value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg);
}
return useValue ? value : void 0;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.AnthropicApiAdapter = void 0;
const llm_gateway_sdk_1 = require("@nullplatform/llm-gateway-sdk");
let AnthropicApiAdapter = (() => {
let _classDecorators = [(0, llm_gateway_sdk_1.ExtensionMetadata)({
name: 'anthropic',
description: 'Anthropic API adapter for LLM Gateway',
})];
let _classDescriptor;
let _classExtraInitializers = [];
let _classThis;
var AnthropicApiAdapter = class {
static { _classThis = this; }
static {
const _metadata = typeof Symbol === "function" && Symbol.metadata ? Object.create(null) : void 0;
__esDecorate(null, _classDescriptor = { value: _classThis }, _classDecorators, { kind: "class", name: _classThis.name, metadata: _metadata }, null, _classExtraInitializers);
AnthropicApiAdapter = _classThis = _classDescriptor.value;
if (_metadata) Object.defineProperty(_classThis, Symbol.metadata, { enumerable: true, configurable: true, writable: true, value: _metadata });
__runInitializers(_classThis, _classExtraInitializers);
}
name = 'anthropic';
basePaths = ['/v1/messages', '/messages'];
streamingStateMap = new Map();
configure(config) {
//Do nothing for now, as no configuration is needed
return;
}
async transformInput(request) {
// Validate required fields
if (!request.model) {
throw new Error('Model is required');
}
if (!request.max_tokens) {
throw new Error('max_tokens is required for Anthropic API');
}
if (!Array.isArray(request.messages) || request.messages.length === 0) {
throw new Error('Messages array is required and must be non-empty');
}
// Transform Anthropic messages to LLM format
const messages = this.transformAnthropicMessagesToLLM(request.messages, request.system);
// Map tools if present
const tools = request.tools?.map(tool => ({
type: 'function',
function: {
name: tool.name,
description: tool.description,
parameters: tool.input_schema
}
}));
// Transform tool_choice
let tool_choice;
if (request.tool_choice) {
switch (request.tool_choice.type) {
case 'auto':
tool_choice = 'auto';
break;
case 'any':
case 'tool':
tool_choice = 'auto'; // Map to closest equivalent
break;
default:
tool_choice = 'auto';
}
}
// Construct metadata
const metadata = {
user_id: request.metadata?.user_id,
original_provider: 'anthropic',
custom: {
top_k: request.top_k,
tool_choice: request.tool_choice,
system: request.system
}
};
// Build final LLMRequest
const llmRequest = {
messages,
model: request.model,
temperature: request.temperature,
max_tokens: request.max_tokens,
top_p: request.top_p,
stop: request.stop_sequences,
stream: request.stream,
tools,
tool_choice,
target_provider: "",
metadata
};
return llmRequest;
}
async validate(request) {
if (!request.model || typeof request.model !== 'string') {
return 'Model must be a non-empty string';
}
if (!request.max_tokens || typeof request.max_tokens !== 'number' || request.max_tokens < 1) {
return 'max_tokens must be a positive number';
}
if (!request.messages || !Array.isArray(request.messages)) {
return 'Messages must be an array';
}
if (request.messages.length === 0) {
return 'Messages array cannot be empty';
}
// Validate each message
for (const [index, message] of request.messages.entries()) {
if (!message.role || !['user', 'assistant'].includes(message.role)) {
return `Message ${index}: role must be one of: user, assistant`;
}
if (!message.content) {
return `Message ${index}: content is required`;
}
// Validate content structure
if (Array.isArray(message.content)) {
for (const [contentIndex, contentBlock] of message.content.entries()) {
if (!contentBlock.type || !['text', 'tool_use', 'tool_result'].includes(contentBlock.type)) {
return `Message ${index}, content ${contentIndex}: type must be one of: text, tool_use, tool_result`;
}
}
}
else if (typeof message.content !== 'string') {
return `Message ${index}: content must be a string or array of content blocks`;
}
}
// Validate temperature
if (request.temperature !== undefined) {
if (typeof request.temperature !== 'number' || request.temperature < 0 || request.temperature > 1) {
return 'Temperature must be a number between 0 and 1';
}
}
// Validate top_p
if (request.top_p !== undefined) {
if (typeof request.top_p !== 'number' || request.top_p < 0 || request.top_p > 1) {
return 'top_p must be a number between 0 and 1';
}
}
return null; // No validation errors
}
async getContentBlockFinish(idx) {
return `event: content_block_stop\ndata: {"type":"content_block_stop","index":${idx} }\n\n`;
}
async transformOutputChunk(processedInput, input, chunk, firstChunk, finalChunk, accumulated) {
let response = "";
const requestId = chunk?.id || accumulated?.id || 'unknown';
// Initialize or get streaming state for this request
if (!this.streamingStateMap.has(requestId)) {
this.streamingStateMap.set(requestId, {
hasStarted: false,
blockStarted: false,
toolBlocksStarted: new Set(),
contentBlockIndex: 0,
lastToolCallId: undefined
});
}
const state = this.streamingStateMap.get(requestId);
if (chunk) {
const choice = chunk.content?.[0];
// Handle message_start (only once per request)
if (!state.hasStarted) {
response += `event: message_start\n`;
response += `data: ${JSON.stringify({
type: "message_start",
message: {
id: requestId,
type: "message",
role: "assistant",
model: chunk.model || input.model,
content: [],
stop_reason: null,
stop_sequence: null,
usage: {
input_tokens: chunk?.usage?.prompt_tokens || 0,
cache_creation_input_tokens: 0,
cache_read_input_tokens: 0,
output_tokens: chunk?.usage?.completion_tokens || 0,
service_tier: "standard"
}
}
})}\n\n`;
state.hasStarted = true;
}
// Handle content_block_start for text content
if (choice?.delta?.content !== undefined && !state.blockStarted) {
response += `event: content_block_start\n`;
response += `data: ${JSON.stringify({
type: "content_block_start",
index: state.contentBlockIndex,
content_block: {
type: "text",
text: ""
}
})}\n\n`;
state.blockStarted = true;
}
// Handle content_block_start for tool calls
if (choice?.delta?.tool_calls) {
const toolCall = choice.delta.tool_calls[0];
if (toolCall?.id && !state.toolBlocksStarted.has(toolCall.id)) {
if (state.blockStarted) {
// Increment contentBlockIndex if text block was started
state.blockStarted = false;
response += await this.getContentBlockFinish(state.contentBlockIndex++);
}
state.blockStarted = true;
response += `event: content_block_start\n`;
response += `data: ${JSON.stringify({
type: "content_block_start",
index: state.contentBlockIndex,
content_block: {
type: "tool_use",
id: toolCall.id,
name: toolCall.function?.name || "",
input: {}
}
})}\n\n`;
state.toolBlocksStarted.add(toolCall.id);
state.lastToolCallId = toolCall.id;
}
}
// Handle content_block_delta for text
if (choice?.delta?.content) {
response += `event: content_block_delta\n`;
response += `data: ${JSON.stringify({
type: "content_block_delta",
index: 0, // Text is always first content block
delta: {
type: "text_delta",
text: choice.delta.content
}
})}\n\n`;
}
// Handle content_block_delta for tool calls
if (choice?.delta?.tool_calls) {
const toolCall = choice.delta.tool_calls[0];
if (toolCall?.function?.arguments) {
response += `event: content_block_delta\n`;
response += `data: ${JSON.stringify({
type: "content_block_delta",
index: state.contentBlockIndex,
delta: {
type: "input_json_delta",
partial_json: toolCall.function.arguments
}
})}\n\n`;
}
}
// Handle content_block_stop when finishing text or tool
if (choice?.finish_reason && state.contentBlockIndex > 0) {
state.blockStarted = false;
response += await this.getContentBlockFinish(state.contentBlockIndex++);
}
// Handle message_delta for usage and stop_reason updates
if (choice?.finish_reason) {
state.finishReason = choice.finish_reason;
}
// Add ping events occasionally (optional, for keeping connection alive)
if (Math.random() < 0.1) { // 10% chance
response += `event: ping\n`;
response += `data: ${JSON.stringify({ type: "ping" })}\n\n`;
}
}
if (finalChunk) {
if (state.blockStarted) {
state.blockStarted = false;
response += await this.getContentBlockFinish(state.contentBlockIndex++);
}
response += `event: message_delta\n`;
response += `data: ${JSON.stringify({
type: "message_delta",
delta: {
stop_reason: this.mapFinishReasonToAnthropic(state.finishReason),
stop_sequence: null
},
usage: {
output_tokens: accumulated?.usage?.completion_tokens || 0
}
})}\n\n`;
response += `event: message_stop\n`;
response += `data: ${JSON.stringify({
type: "message_stop"
})}\n\n`;
// Clean up streaming state
this.streamingStateMap.delete(requestId);
}
return Buffer.from(response);
}
mapFinishReasonToAnthropic(finishReason) {
switch (finishReason) {
case 'stop':
return 'end_turn';
case 'length':
return 'max_tokens';
case 'tool_calls':
return 'tool_use';
default:
return null;
}
}
async transformOutput(processedInput, input, response) {
const choice = response.content[0];
const content = [];
// Handle text content
if (choice.message?.content) {
content.push({
type: 'text',
text: choice.message.content
});
}
// Handle tool calls
if (choice.message?.tool_calls) {
for (const toolCall of choice.message.tool_calls) {
content.push({
type: 'tool_use',
id: toolCall.id,
name: toolCall.function.name,
input: typeof toolCall.function.arguments === 'string'
? JSON.parse(toolCall.function.arguments)
: toolCall.function.arguments
});
}
}
// Map finish reason
let stop_reason = 'end_turn';
switch (choice.finish_reason) {
case 'stop':
stop_reason = 'end_turn';
break;
case 'length':
stop_reason = 'max_tokens';
break;
case 'tool_calls':
stop_reason = 'tool_use';
break;
default:
stop_reason = 'end_turn';
}
const transformed = {
id: response.id,
type: 'message',
role: 'assistant',
content,
model: response.model,
stop_reason,
usage: {
input_tokens: response.usage?.prompt_tokens || 0,
output_tokens: response.usage?.completion_tokens || 0
}
};
return transformed;
}
transformAnthropicMessagesToLLM(anthropicMessages, systemMessage) {
const messages = [];
// Add system message if present
if (systemMessage) {
messages.push({
role: 'system',
content: systemMessage
});
}
// Transform each Anthropic message
for (const msg of anthropicMessages) {
if (typeof msg.content === 'string') {
// Simple text message
messages.push({
role: msg.role === 'user' ? 'user' : 'assistant',
content: msg.content
});
}
else if (Array.isArray(msg.content)) {
// Complex content with multiple blocks
for (const contentBlock of msg.content) {
if (contentBlock.type === 'text') {
messages.push({
role: msg.role === 'user' ? 'user' : 'assistant',
content: contentBlock.text || ''
});
}
else if (contentBlock.type === 'tool_use') {
messages.push({
role: 'assistant',
content: '',
tool_calls: [{
id: contentBlock.id || '',
type: 'function',
function: {
name: contentBlock.name || '',
arguments: JSON.stringify(contentBlock.input || {})
}
}]
});
}
else if (contentBlock.type === 'tool_result') {
messages.push({
role: 'tool',
content: contentBlock.content || '',
tool_call_id: contentBlock.tool_use_id
});
}
}
}
}
return messages;
}
async getNativeAdapters() {
return [
{
path: "/models",
method: 'get',
doRequest: async (request, response) => {
response.json({
"data": [
{
"id": "claude-3-5-sonnet-20241022",
"type": "model",
"display_name": "Claude 3.5 Sonnet"
},
{
"id": "claude-3-5-haiku-20241022",
"type": "model",
"display_name": "Claude 3.5 Haiku"
},
{
"id": "claude-3-opus-20240229",
"type": "model",
"display_name": "Claude 3 Opus"
},
{
"id": "claude-3-sonnet-20240229",
"type": "model",
"display_name": "Claude 3 Sonnet"
},
{
"id": "claude-3-haiku-20240307",
"type": "model",
"display_name": "Claude 3 Haiku"
}
]
});
}
}
];
}
};
return AnthropicApiAdapter = _classThis;
})();
exports.AnthropicApiAdapter = AnthropicApiAdapter;
//# sourceMappingURL=antropic.js.map
;