@robota-sdk/openai
Version:
OpenAI integration for Robota SDK - GPT-4, GPT-3.5, function calling, and tool integration with OpenAI's API
266 lines (259 loc) • 8.37 kB
TypeScript
import OpenAI from 'openai';
import { SimpleLogger, BaseAIProvider, UniversalMessage, ChatOptions } from '@robota-sdk/agents';
/**
* Payload logging data structure
*/
interface OpenAILogData {
model: string;
messagesCount: number;
hasTools: boolean;
temperature?: number | undefined;
maxTokens?: number | undefined;
timestamp: string;
requestId?: string | undefined;
}
/**
* PayloadLogger interface for logging OpenAI API payloads
*
* This interface provides a contract for different logging implementations:
* - FilePayloadLogger: Node.js file-based logging
* - ConsolePayloadLogger: Browser console-based logging
* - Custom implementations: User-defined loggers
*/
interface PayloadLogger {
/**
* Check if logging is enabled
* @returns true if logging is active, false otherwise
*/
isEnabled(): boolean;
/**
* Log API payload data
* @param payload - The API request/response payload data
* @param type - Type of operation ('chat' or 'stream')
*/
logPayload(payload: OpenAILogData, type: 'chat' | 'stream'): Promise<void>;
}
/**
* Configuration options for payload loggers
*/
interface PayloadLoggerOptions {
/**
* Whether logging is enabled
* @defaultValue true
*/
enabled?: boolean;
/**
* Include timestamp in log entries
* @defaultValue true
*/
includeTimestamp?: boolean;
/**
* Logger instance for console output
* @defaultValue DefaultConsoleLogger
*/
logger?: SimpleLogger;
}
/**
* Valid provider option value types
*/
type ProviderOptionValue = string | number | boolean | undefined | null | OpenAI | PayloadLogger | SimpleLogger | ProviderOptionValue[] | {
[key: string]: ProviderOptionValue;
};
/**
* Base provider options interface
*/
interface ProviderOptions {
/**
* Additional provider-specific options
*/
[key: string]: ProviderOptionValue;
}
/**
* OpenAI provider options
*/
interface OpenAIProviderOptions extends ProviderOptions {
/**
* OpenAI API key (required when client is not provided)
*/
apiKey?: string;
/**
* OpenAI organization ID (optional)
*/
organization?: string;
/**
* API request timeout (milliseconds)
*/
timeout?: number;
/**
* API base URL (default: 'https://api.openai.com/v1')
*/
baseURL?: string;
/**
* Response format (default: 'text')
* - 'text': Plain text response
* - 'json_object': Legacy JSON mode (requires system message)
* - 'json_schema': Structured Outputs with schema validation
*/
responseFormat?: 'text' | 'json_object' | 'json_schema';
/**
* JSON schema for structured outputs (required when responseFormat is 'json_schema')
*/
jsonSchema?: {
name: string;
description?: string;
schema?: Record<string, ProviderOptionValue>;
strict?: boolean;
};
/**
* OpenAI client instance (optional: will be created from apiKey if not provided)
*/
client?: OpenAI;
/**
* Payload logger instance for debugging API requests/responses
*
* Use different implementations based on your environment:
* - FilePayloadLogger: Node.js file-based logging
* - ConsolePayloadLogger: Browser console-based logging
* - Custom: Implement PayloadLogger interface
*
* @example
* ```typescript
* // Node.js
* import { FilePayloadLogger } from '@robota-sdk/openai/loggers/file';
* const provider = new OpenAIProvider({
* client: openaiClient,
* payloadLogger: new FilePayloadLogger({ logDir: './logs/openai' })
* });
*
* // Browser
* import { ConsolePayloadLogger } from '@robota-sdk/openai/loggers/console';
* const provider = new OpenAIProvider({
* client: openaiClient,
* payloadLogger: new ConsolePayloadLogger()
* });
* ```
*/
payloadLogger?: PayloadLogger;
/**
* Logger instance for internal OpenAI provider logging
* @defaultValue SilentLogger
*/
logger?: SimpleLogger;
}
/**
* OpenAI provider implementation for Robota
*
* Provides integration with OpenAI's GPT models following BaseAIProvider guidelines.
* Uses OpenAI SDK native types internally for optimal performance and feature support.
*
* @public
*/
declare class OpenAIProvider extends BaseAIProvider {
readonly name = "openai";
readonly version = "1.0.0";
private readonly client;
private readonly options;
private readonly payloadLogger;
private readonly responseParser;
private readonly logger;
constructor(options: OpenAIProviderOptions);
/**
* Initialize payload logger
*/
private initializePayloadLogger;
/**
* Generate response using UniversalMessage
*/
chat(messages: UniversalMessage[], options?: ChatOptions): Promise<UniversalMessage>;
/**
* Generate streaming response using UniversalMessage
*/
chatStream(messages: UniversalMessage[], options?: ChatOptions): AsyncIterable<UniversalMessage>;
supportsTools(): boolean;
validateConfig(): boolean;
dispose(): Promise<void>;
/**
* Convert UniversalMessage array to OpenAI format
*/
private convertToOpenAIMessages;
/**
* Convert tool schemas to OpenAI format
*/
private convertToOpenAITools;
/**
* Validate messages before sending to API
*
* IMPORTANT: OpenAI API Content Handling Policy
* =============================================
*
* Based on OpenAI API documentation and community feedback:
*
* 1. When sending TO OpenAI API:
* - Assistant messages with tool_calls: content MUST be null (not empty string)
* - Regular assistant messages: content can be string or null
* - This prevents "400 Bad Request" errors
*
* 2. When receiving FROM our API (UniversalMessage):
* - All messages must have content as string (TypeScript requirement)
* - Convert null to empty string for type compatibility
*
* 3. This dual handling ensures:
* - OpenAI API compatibility (null for tool calls)
* - TypeScript type safety (string content in UniversalMessage)
* - No infinite loops in tool execution
*
* Reference: OpenAI Community discussions confirm that tool_calls
* require content to be null, not empty string.
*/
protected validateMessages(messages: UniversalMessage[]): void;
}
interface UserMessage {
role: 'user';
content: string | null;
timestamp?: Date;
}
interface SystemMessage {
role: 'system';
content: string | null;
timestamp?: Date;
}
interface ToolMessage {
role: 'tool';
content: string | null;
timestamp?: Date;
toolCallId?: string;
}
/**
* OpenAI Conversation Adapter
*
* Converts between UniversalMessage format and OpenAI native types.
* Provides bidirectional conversion for seamless integration.
*
* @public
*/
declare class OpenAIConversationAdapter {
/**
* Filter messages for OpenAI compatibility
*
* OpenAI has specific requirements:
* - Tool messages must have valid toolCallId
* - Messages must be in proper sequence
* - Tool messages without toolCallId should be excluded
*/
static filterMessagesForOpenAI(messages: UniversalMessage[]): UniversalMessage[];
/**
* Convert UniversalMessage array to OpenAI message format
* Now properly handles tool messages for OpenAI's tool calling feature
*/
static toOpenAIFormat(messages: UniversalMessage[]): OpenAI.Chat.ChatCompletionMessageParam[];
/**
* Convert a single UniversalMessage to OpenAI format
* Handles all message types including tool messages
*/
static convertMessage(msg: UniversalMessage): OpenAI.Chat.ChatCompletionMessageParam;
/**
* Add system prompt to message array if needed
*/
static addSystemPromptIfNeeded(messages: OpenAI.Chat.ChatCompletionMessageParam[], systemPrompt?: string): OpenAI.Chat.ChatCompletionMessageParam[];
}
export { OpenAIConversationAdapter, OpenAIProvider, type OpenAIProviderOptions, type PayloadLogger, type PayloadLoggerOptions, type ProviderOptionValue, type ProviderOptions, type SystemMessage, type ToolMessage, type UserMessage };