flutty-cli-agent
Version:
Flutty CLI Agent - AI-powered development assistant with web chat interface, context memory, and full tool integration using DeepSeek API
82 lines (71 loc) • 2.22 kB
text/typescript
import { NextRequest, NextResponse } from 'next/server'
import { ChatSession } from '../../../../src/lib/chat-session'
// Access global session store
declare global {
var chatSessions: Map<string, ChatSession> | undefined
}
const chatSessions = global.chatSessions || new Map()
export async function POST(request: NextRequest) {
try {
const { sessionId, message } = await request.json()
if (!sessionId || !message) {
return NextResponse.json(
{ success: false, error: 'sessionId and message are required' },
{ status: 400 }
)
}
// Get the chat session
const chatSession = chatSessions.get(sessionId)
if (!chatSession) {
return NextResponse.json(
{ success: false, error: 'Chat session not found' },
{ status: 404 }
)
}
// Create a readable stream for streaming responses
const encoder = new TextEncoder()
let controller: ReadableStreamDefaultController<any>
const stream = new ReadableStream({
start(streamController) {
controller = streamController
}
})
// Process the message in the background
const processMessage = async () => {
try {
// Send prompt to chat session and stream responses
for await (const chatMessage of chatSession.sendPrompt(message)) {
const jsonMessage = JSON.stringify(chatMessage) + '\n'
controller.enqueue(encoder.encode(jsonMessage))
}
} catch (error: any) {
const errorMessage = JSON.stringify({
type: 'error',
error: error.message,
sessionId
}) + '\n'
controller.enqueue(encoder.encode(errorMessage))
} finally {
controller.close()
}
}
// Start processing
processMessage()
return new Response(stream, {
headers: {
'Content-Type': 'application/json',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
}
})
} catch (error: any) {
console.error('Error processing chat message:', error)
return NextResponse.json(
{
success: false,
error: error.message || 'Failed to process message'
},
{ status: 500 }
)
}
}