jezweb-mcp-core
Version:
Jezweb Model Context Protocol (MCP) Core - A universal server for providing AI tools and resources, designed for seamless integration with various AI models and clients. Features adaptable multi-provider support, comprehensive tool and resource management
129 lines • 5.33 kB
TypeScript
/**
* OpenAI Provider Plugin - OpenAI-specific implementation of the LLM Provider interface
*
* This module refactors the existing OpenAI service to implement the generic LLM provider
* interface, enabling it to work within the provider registry system while maintaining
* full backward compatibility with existing OpenAI functionality.
*
* Key Features:
* - Full OpenAI Assistants API implementation
* - Generic LLM provider interface compliance
* - Backward compatibility with existing OpenAI service
* - Type mapping between OpenAI and generic types
* - Enhanced error handling and validation
*/
import { LLMProvider, LLMProviderFactory, LLMProviderMetadata, GenericAssistant, GenericThread, GenericMessage, GenericRun, GenericRunStep, GenericCreateAssistantRequest, GenericUpdateAssistantRequest, GenericListRequest, GenericListResponse, GenericCreateThreadRequest, GenericUpdateThreadRequest, GenericCreateMessageRequest, GenericUpdateMessageRequest, GenericCreateRunRequest, GenericUpdateRunRequest, GenericSubmitToolOutputsRequest } from '../llm-service.js';
/**
* OpenAI Provider Configuration
*/
export interface OpenAIProviderConfig {
/** OpenAI API key */
apiKey: string;
/** Custom base URL (optional) */
baseUrl?: string;
/** Organization ID (optional) */
organization?: string;
/** Project ID (optional) */
project?: string;
/** Request timeout in milliseconds */
timeout?: number;
/** Maximum retry attempts */
maxRetries?: number;
/** Custom headers */
headers?: Record<string, string>;
}
/**
* OpenAI Provider Implementation
* Implements the generic LLM provider interface for OpenAI
*/
export declare class OpenAIProvider implements LLMProvider {
private openaiService;
private config;
readonly metadata: LLMProviderMetadata;
constructor(config: OpenAIProviderConfig);
/**
* Initialize the provider with configuration
*/
initialize(config: Record<string, any>): Promise<void>;
/**
* Validate provider configuration and connectivity
*/
validateConnection(): Promise<boolean>;
/**
* Assistant Management Methods
*/
createAssistant(request: GenericCreateAssistantRequest): Promise<GenericAssistant>;
listAssistants(request?: GenericListRequest): Promise<GenericListResponse<GenericAssistant>>;
getAssistant(assistantId: string): Promise<GenericAssistant>;
updateAssistant(assistantId: string, request: GenericUpdateAssistantRequest): Promise<GenericAssistant>;
deleteAssistant(assistantId: string): Promise<{
id: string;
deleted: boolean;
}>;
/**
* Thread Management Methods
*/
createThread(request?: GenericCreateThreadRequest): Promise<GenericThread>;
getThread(threadId: string): Promise<GenericThread>;
updateThread(threadId: string, request: GenericUpdateThreadRequest): Promise<GenericThread>;
deleteThread(threadId: string): Promise<{
id: string;
deleted: boolean;
}>;
/**
* Message Management Methods
*/
createMessage(threadId: string, request: GenericCreateMessageRequest): Promise<GenericMessage>;
listMessages(threadId: string, request?: GenericListRequest): Promise<GenericListResponse<GenericMessage>>;
getMessage(threadId: string, messageId: string): Promise<GenericMessage>;
updateMessage(threadId: string, messageId: string, request: GenericUpdateMessageRequest): Promise<GenericMessage>;
deleteMessage(threadId: string, messageId: string): Promise<{
id: string;
deleted: boolean;
}>;
/**
* Run Management Methods
*/
createRun(threadId: string, request: GenericCreateRunRequest): Promise<GenericRun>;
listRuns(threadId: string, request?: GenericListRequest): Promise<GenericListResponse<GenericRun>>;
getRun(threadId: string, runId: string): Promise<GenericRun>;
updateRun(threadId: string, runId: string, request: GenericUpdateRunRequest): Promise<GenericRun>;
cancelRun(threadId: string, runId: string): Promise<GenericRun>;
submitToolOutputs(threadId: string, runId: string, request: GenericSubmitToolOutputsRequest): Promise<GenericRun>;
/**
* Run Step Management Methods
*/
listRunSteps(threadId: string, runId: string, request?: GenericListRequest): Promise<GenericListResponse<GenericRunStep>>;
getRunStep(threadId: string, runId: string, stepId: string): Promise<GenericRunStep>;
/**
* Handle unsupported operations
*/
handleUnsupportedOperation(operation: string, ...args: any[]): Promise<any>;
/**
* Private helper methods
*/
private handleError;
}
/**
* OpenAI Provider Factory
* Creates OpenAI provider instances
*/
export declare class OpenAIProviderFactory implements LLMProviderFactory {
/**
* Create a new OpenAI provider instance
*/
create(config: Record<string, any>): Promise<LLMProvider>;
/**
* Get provider metadata without creating an instance
*/
getMetadata(): LLMProviderMetadata;
/**
* Validate provider configuration
*/
validateConfig(config: Record<string, any>): boolean;
}
/**
* Default OpenAI provider factory instance
*/
export declare const openaiProviderFactory: OpenAIProviderFactory;
//# sourceMappingURL=openai.d.ts.map