context-optimizer-mcp-server
Version:
Context optimization tools MCP server for AI coding assistants - compatible with GitHub Copilot, Cursor AI, and other MCP-supporting assistants
49 lines • 1.78 kB
JavaScript
/**
* OpenAI provider implementation
*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.OpenAIProvider = void 0;
const openai_1 = __importDefault(require("openai"));
const base_1 = require("./base");
class OpenAIProvider extends base_1.BaseLLMProvider {
name = 'OpenAI';
defaultModel = 'gpt-4o-mini';
apiKeyUrl = 'https://platform.openai.com/api-keys';
apiKeyPrefix = 'sk-';
async processRequest(prompt, model, apiKey) {
try {
if (!apiKey) {
return this.createErrorResponse('OpenAI API key not configured');
}
const openai = new openai_1.default({
apiKey: apiKey,
});
const completion = await openai.chat.completions.create({
model: model || this.defaultModel,
messages: [
{
role: 'user',
content: prompt
}
],
temperature: 0.1,
max_tokens: 4000
});
const content = completion.choices[0]?.message?.content;
if (!content) {
return this.createErrorResponse('No response from OpenAI');
}
return this.createSuccessResponse(content);
}
catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
return this.createErrorResponse(`OpenAI processing failed: ${errorMessage}`);
}
}
}
exports.OpenAIProvider = OpenAIProvider;
//# sourceMappingURL=openai.js.map
;