cursorai-errorprompter
Version:
AI-powered runtime error fixing for developers using Cursor
131 lines (126 loc) • 5.24 kB
JavaScript
;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.GPTService = void 0;
const openai_1 = __importDefault(require("openai"));
class GPTService {
constructor(config) {
this.config = config;
this.SUPPORTED_MODELS = ['gpt-4-turbo-preview', 'gpt-4', 'gpt-3.5-turbo'];
this.validateConfig();
this.client = new openai_1.default({
apiKey: config.apiKey,
});
console.log('🤖 GPT Service initialized with model:', config.model);
}
validateConfig() {
if (!this.config.apiKey) {
throw new Error('OpenAI API key is required');
}
if (!this.SUPPORTED_MODELS.includes(this.config.model)) {
throw new Error(`Unsupported model: ${this.config.model}. Supported models: ${this.SUPPORTED_MODELS.join(', ')}`);
}
if (this.config.temperature && (this.config.temperature < 0 || this.config.temperature > 1)) {
throw new Error('Temperature must be between 0 and 1');
}
if (this.config.maxTokens && this.config.maxTokens < 1) {
throw new Error('maxTokens must be greater than 0');
}
}
async getErrorFix(error) {
console.log('📤 Sending error to GPT for analysis...');
const prompt = this.buildPrompt(error);
try {
const completion = await this.client.chat.completions.create({
model: this.config.model,
messages: [
{
role: 'system',
content: 'You are an expert developer assistant. Your task is to analyze error messages and provide specific, actionable fixes. You MUST respond ONLY with a valid JSON object, with no additional text or explanation outside the JSON structure.'
},
{
role: 'user',
content: prompt
}
],
temperature: this.config.temperature ?? 0.3,
max_tokens: this.config.maxTokens ?? 1000,
response_format: { type: "json_object" }
});
console.log('📥 Received response from GPT');
const response = completion.choices[0].message.content;
if (!response) {
throw new Error('No response from GPT');
}
return this.parseResponse(response);
}
catch (error) {
if (error instanceof Error) {
if (error.message.includes('401')) {
throw new Error('Invalid OpenAI API key. Please check your configuration.');
}
else if (error.message.includes('429')) {
throw new Error('OpenAI API rate limit exceeded. Please try again later.');
}
else if (error.message.includes('model')) {
throw new Error(`Model ${this.config.model} is not available. Please check your configuration.`);
}
}
console.error('❌ GPT API Error:', error);
throw new Error(`Failed to get GPT analysis: ${error instanceof Error ? error.message : 'Unknown error'}`);
}
}
parseResponse(response) {
try {
// First try direct JSON parsing
return JSON.parse(response);
}
catch (parseError) {
console.warn('⚠️ Failed to parse direct JSON response, attempting to extract JSON from text');
// Try to extract JSON from the text
const jsonMatch = response.match(/\{[\s\S]*\}/);
if (jsonMatch) {
try {
const extractedJson = JSON.parse(jsonMatch[0]);
console.log('✅ Successfully extracted JSON from response');
return extractedJson;
}
catch (extractError) {
console.warn('⚠️ Failed to parse extracted JSON');
}
}
// If all parsing attempts fail, return a structured response with the raw text
console.warn('⚠️ Using raw response as fallback');
return {
suggestion: response,
explanation: 'GPT provided a direct suggestion without structured format',
confidence: 0.8,
rawResponse: response
};
}
}
buildPrompt(error) {
return `
Error Analysis Request:
Error Type: ${error.type}
Message: ${error.message}
File: ${error.filePath}
Line: ${error.lineNumber}
Code Context:
\`\`\`typescript
${error.codeSnippet.before.join('\n')}
${error.codeSnippet.error}
${error.codeSnippet.after.join('\n')}
\`\`\`
IMPORTANT: You MUST respond ONLY with a valid JSON object in the following format:
{
"suggestion": "The complete code fix",
"explanation": "Brief explanation of why this fixes the issue",
"confidence": 0.0-1.0
}
Do not include any text outside the JSON object.`;
}
}
exports.GPTService = GPTService;