@vibeship/devtools
Version:
Comprehensive markdown-based project management system with AI capabilities for Next.js applications
1,637 lines (1,407 loc) • 818 kB
JavaScript
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
}) : x)(function(x) {
if (typeof require !== "undefined") return require.apply(this, arguments);
throw Error('Dynamic require of "' + x + '" is not supported');
});
var __esm = (fn, res) => function __init() {
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
};
var __commonJS = (cb, mod) => function __require2() {
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
};
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
// src/cli/utils/framework-detector.ts
var framework_detector_exports = {};
__export(framework_detector_exports, {
detectFramework: () => detectFramework,
getApiRoutePath: () => getApiRoutePath
});
import { existsSync, readFileSync } from "fs";
import { join } from "path";
async function detectFramework() {
const cwd = process.cwd();
const packageJsonPath2 = join(cwd, "package.json");
if (!existsSync(packageJsonPath2)) {
return {
isNextJs: false,
isAppRouter: false,
hasSrcDir: false,
hasTypeScript: false,
hasTailwind: false
};
}
try {
const packageJson2 = JSON.parse(readFileSync(packageJsonPath2, "utf-8"));
const hasNext = !!(packageJson2.dependencies?.next || packageJson2.devDependencies?.next);
if (!hasNext) {
return {
isNextJs: false,
isAppRouter: false,
hasSrcDir: false,
hasTypeScript: false,
hasTailwind: false
};
}
const version = packageJson2.dependencies?.next || packageJson2.devDependencies?.next;
const versionNumber = version.replace(/[\^~]/, "");
const hasAppDir = existsSync(join(cwd, "app")) || existsSync(join(cwd, "src/app"));
const hasPagesDir = existsSync(join(cwd, "pages")) || existsSync(join(cwd, "src/pages"));
const isAppRouter = hasAppDir;
const hasSrcDir = existsSync(join(cwd, "src"));
const hasTypeScript = existsSync(join(cwd, "tsconfig.json"));
const hasTailwind = !!(packageJson2.dependencies?.tailwindcss || packageJson2.devDependencies?.tailwindcss);
return {
isNextJs: true,
version: versionNumber,
isAppRouter,
hasSrcDir,
hasTypeScript,
hasTailwind
};
} catch (error) {
return {
isNextJs: false,
isAppRouter: false,
hasSrcDir: false,
hasTypeScript: false,
hasTailwind: false
};
}
}
function getApiRoutePath(framework) {
const base = framework.hasSrcDir ? "src" : "";
if (framework.isAppRouter) {
return join(base, "app", "api");
} else {
return join(base, "pages", "api");
}
}
var init_framework_detector = __esm({
"src/cli/utils/framework-detector.ts"() {
"use strict";
}
});
// src/templates/embedded-templates.ts
var EMBEDDED_TEMPLATES;
var init_embedded_templates = __esm({
"src/templates/embedded-templates.ts"() {
"use strict";
EMBEDDED_TEMPLATES = {
"app-router/tasks-stream.ts": `import { NextRequest } from 'next/server';
import { FileScanner, TaskExtractor, CacheManager } from '@vibeship/devtools/server';
import { getConfig } from '@/lib/vibeship-config';
import { createStreamError, getRequestId } from '@/utils/api-errors';
export const runtime = 'nodejs';
export const dynamic = 'force-dynamic';
// Store active connections for broadcasting updates
const clients = new Map<string, ReadableStreamDefaultController>();
// Task change detection
let lastTaskHash: string | null = null;
export async function GET(request: NextRequest) {
const encoder = new TextEncoder();
const clientId = crypto.randomUUID();
// Get query parameters
const searchParams = request.nextUrl.searchParams;
const filter = searchParams.get('filter'); // todo, fixme, etc.
const includeCompleted = searchParams.get('includeCompleted') === 'true';
const stream = new ReadableStream({
async start(controller) {
// Add client to active connections
clients.set(clientId, controller);
// Send initial connection message
const connectMessage = \`data: \${JSON.stringify({
type: 'connected',
clientId,
timestamp: new Date().toISOString()
})}\\n\\n\`;
controller.enqueue(encoder.encode(connectMessage));
// Send initial tasks
try {
const tasks = await scanTasks(filter, includeCompleted);
const taskMessage = \`data: \${JSON.stringify({
type: 'tasks',
tasks,
timestamp: new Date().toISOString()
})}\\n\\n\`;
controller.enqueue(encoder.encode(taskMessage));
} catch (error) {
const errorMessage = createStreamError(error, getRequestId(request));
controller.enqueue(encoder.encode(errorMessage));
}
// Set up polling for changes
const pollInterval = setInterval(async () => {
try {
const tasks = await scanTasks(filter, includeCompleted);
const currentHash = generateTaskHash(tasks);
if (currentHash !== lastTaskHash) {
lastTaskHash = currentHash;
// Broadcast to all connected clients
const updateMessage = \`data: \${JSON.stringify({
type: 'update',
tasks,
timestamp: new Date().toISOString()
})}\\n\\n\`;
for (const [id, client] of clients.entries()) {
try {
client.enqueue(encoder.encode(updateMessage));
} catch (error) {
// Client disconnected, remove from map
clients.delete(id);
}
}
}
} catch (error) {
console.error('Error polling tasks:', error);
}
}, 5000); // Poll every 5 seconds
// Keep connection alive with heartbeat
const heartbeatInterval = setInterval(() => {
try {
const heartbeatMessage = \`data: \${JSON.stringify({
type: 'heartbeat',
timestamp: new Date().toISOString()
})}\\n\\n\`;
controller.enqueue(encoder.encode(heartbeatMessage));
} catch (error) {
// Client disconnected
clearInterval(heartbeatInterval);
clearInterval(pollInterval);
clients.delete(clientId);
}
}, 30000); // Send heartbeat every 30 seconds
// Clean up on close
request.signal.addEventListener('abort', () => {
clearInterval(heartbeatInterval);
clearInterval(pollInterval);
clients.delete(clientId);
controller.close();
});
},
});
return new Response(stream, {
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache, no-transform',
'Connection': 'keep-alive',
'X-Accel-Buffering': 'no', // Disable Nginx buffering
},
});
}
// Helper function to scan tasks
async function scanTasks(filter?: string | null, includeCompleted = false) {
const config = await getConfig();
const scanner = new FileScanner(config);
const extractor = new TaskExtractor();
const cache = new CacheManager({ maxSize: 100, ttl: 60000 }); // 1 minute cache
// Check cache first
const cacheKey = \`tasks:\${filter || 'all'}:\${includeCompleted}\`;
const cached = cache.get(cacheKey);
if (cached) {
return cached;
}
// Scan files
const files = await scanner.scanWithInfo();
const allTasks = [];
for (const file of files) {
const tasks = extractor.extract(file.content, file.path, {
includeContext: true,
parseMetadata: true,
});
// Apply filter if specified
const filteredTasks = filter
? tasks.filter(task => task.type.toLowerCase() === filter.toLowerCase())
: tasks;
// Filter completed tasks if needed
const finalTasks = includeCompleted
? filteredTasks
: filteredTasks.filter(task => !task.metadata?.completed);
allTasks.push(...finalTasks);
}
// Sort by priority and date
allTasks.sort((a, b) => {
// Priority first (high > medium > low)
const priorityOrder = { high: 3, medium: 2, low: 1 };
const aPriority = priorityOrder[a.metadata?.priority || 'low'];
const bPriority = priorityOrder[b.metadata?.priority || 'low'];
if (aPriority !== bPriority) {
return bPriority - aPriority;
}
// Then by date (newer first)
const aDate = a.metadata?.createdAt || 0;
const bDate = b.metadata?.createdAt || 0;
return bDate - aDate;
});
// Cache results
cache.set(cacheKey, allTasks);
return allTasks;
}
// Generate hash for task comparison
function generateTaskHash(tasks: any[]): string {
const taskString = JSON.stringify(tasks.map(t => ({
id: t.id,
type: t.type,
text: t.text,
completed: t.metadata?.completed
})));
// Simple hash function
let hash = 0;
for (let i = 0; i < taskString.length; i++) {
const char = taskString.charCodeAt(i);
hash = ((hash << 5) - hash) + char;
hash = hash & hash; // Convert to 32-bit integer
}
return hash.toString(36);
}`,
"app-router/ai-chat.ts": `import { NextRequest } from 'next/server';
import { FileScanner, MarkdownParser } from '@vibeship/devtools/server';
import { getConfig } from '@/lib/vibeship-config';
// Error handling utility
function getErrorMessage(error: unknown): string {
if (error instanceof Error) return error.message;
return String(error);
}
export const runtime = 'edge'; // Use edge runtime for streaming
export async function POST(request: NextRequest) {
try {
const { messages, context, stream = true } = await request.json();
if (!messages || !Array.isArray(messages)) {
return new Response(
JSON.stringify({ error: 'Messages array is required' }),
{ status: 400, headers: { 'Content-Type': 'application/json' } }
);
}
// Get relevant context from markdown files
const relevantContext = await getRelevantContext(context);
// Prepare system message with context
const systemMessage = {
role: 'system',
content: \`You are an AI assistant helping with a development project.
\${relevantContext ? \`Here is relevant context from the project:\\n\\n\${relevantContext}\` : ''}
Please provide helpful, accurate, and concise responses. When discussing code, use proper markdown formatting.\`
};
const allMessages = [systemMessage, ...messages];
if (!stream) {
// Non-streaming response
const response = await callAIProvider(allMessages, false);
return new Response(
JSON.stringify({ message: response }),
{ headers: { 'Content-Type': 'application/json' } }
);
}
// Streaming response
const encoder = new TextEncoder();
const responseStream = new ReadableStream({
async start(controller) {
try {
// Call AI provider with streaming
const aiStream = await callAIProvider(allMessages, true);
// Process the stream
const reader = aiStream.getReader();
while (true) {
const { done, value } = await reader.read();
if (done) {
// Send final message
controller.enqueue(encoder.encode('data: [DONE]\\n\\n'));
break;
}
// Format as SSE
const sseMessage = \`data: \${JSON.stringify({
type: 'content',
content: value,
timestamp: new Date().toISOString()
})}\\n\\n\`;
controller.enqueue(encoder.encode(sseMessage));
}
} catch (error) {
// Send error message
const errorMessage = \`data: \${JSON.stringify({
type: 'error',
error: getErrorMessage(error),
timestamp: new Date().toISOString()
})}\\n\\n\`;
controller.enqueue(encoder.encode(errorMessage));
} finally {
controller.close();
}
}
});
return new Response(responseStream, {
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
},
});
} catch (error) {
console.error('AI chat error:', error);
return new Response(
JSON.stringify({ error: 'Internal server error' }),
{ status: 500, headers: { 'Content-Type': 'application/json' } }
);
}
}
// Get relevant context from markdown files
async function getRelevantContext(contextRequest?: {
files?: string[];
search?: string;
limit?: number;
}): Promise<string> {
if (!contextRequest) return '';
const config = await getConfig();
const scanner = new FileScanner(config);
const parser = new MarkdownParser();
let contexts: string[] = [];
// Get specific files if requested
if (contextRequest.files?.length) {
for (const file of contextRequest.files) {
try {
const content = await scanner.readFile(file);
const parsed = parser.parse(content);
contexts.push(\`File: \${file}\\n\${parsed.content.slice(0, 1000)}\`);
} catch (error) {
console.error(\`Error reading file \${file}:\`, error);
}
}
}
// Search for relevant content
if (contextRequest.search) {
const files = await scanner.scan();
const relevantFiles = files
.filter((file: any) => file.endsWith('.md') || file.endsWith('.mdx'))
.slice(0, contextRequest.limit || 5);
for (const file of relevantFiles) {
try {
const content = await scanner.readFile(file);
if (content.toLowerCase().includes(contextRequest.search.toLowerCase())) {
const parsed = parser.parse(content);
contexts.push(\`File: \${file}\\n\${parsed.content.slice(0, 500)}\`);
}
} catch (error) {
console.error(\`Error searching file \${file}:\`, error);
}
}
}
return contexts.join('\\n\\n---\\n\\n');
}
// Placeholder for AI provider integration
async function callAIProvider(
messages: any[],
stream: boolean
): Promise<any> {
// This is where you would integrate with your AI provider
// Examples: OpenAI, Anthropic, Azure OpenAI, etc.
// For now, return a mock response
if (!stream) {
return "This is a placeholder response. Please integrate with your preferred AI provider.";
}
// Mock streaming response
const mockStream = new ReadableStream({
async start(controller) {
const response = "This is a streaming placeholder response. Please integrate with your preferred AI provider.";
const words = response.split(' ');
for (const word of words) {
controller.enqueue(word + ' ');
await new Promise(resolve => setTimeout(resolve, 100));
}
controller.close();
}
});
return mockStream;
}
// Example integrations (commented out):
/*
// OpenAI Integration
import OpenAI from 'openai';
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});
async function callOpenAI(messages: any[], stream: boolean) {
if (!stream) {
const completion = await openai.chat.completions.create({
model: "gpt-4",
messages,
});
return completion.choices[0].message.content;
}
const stream = await openai.chat.completions.create({
model: "gpt-4",
messages,
stream: true,
});
return new ReadableStream({
async start(controller) {
for await (const chunk of stream) {
controller.enqueue(chunk.choices[0]?.delta?.content || '');
}
controller.close();
}
});
}
*/
/*
// Anthropic Integration
import Anthropic from '@anthropic-ai/sdk';
const anthropic = new Anthropic({
apiKey: process.env.ANTHROPIC_API_KEY,
});
async function callAnthropic(messages: any[], stream: boolean) {
const systemMessage = messages.find(m => m.role === 'system');
const userMessages = messages.filter(m => m.role !== 'system');
if (!stream) {
const response = await anthropic.messages.create({
model: "claude-3-opus-20240229",
system: systemMessage?.content,
messages: userMessages,
max_tokens: 1024,
});
return response.content[0].text;
}
const stream = await anthropic.messages.create({
model: "claude-3-opus-20240229",
system: systemMessage?.content,
messages: userMessages,
max_tokens: 1024,
stream: true,
});
return new ReadableStream({
async start(controller) {
for await (const chunk of stream) {
if (chunk.type === 'content_block_delta') {
controller.enqueue(chunk.delta.text);
}
}
controller.close();
}
});
}
*/`,
"app-router/files.ts": `import { NextRequest, NextResponse } from 'next/server';
import { FileScanner, PathValidator, Logger } from '@vibeship/devtools/server';
import { getConfig } from '@/lib/vibeship-config';
import { z } from 'zod';
// Request validation schemas
const ReadFileSchema = z.object({
path: z.string().min(1),
encoding: z.enum(['utf8', 'base64']).optional().default('utf8'),
});
const WriteFileSchema = z.object({
path: z.string().min(1),
content: z.string(),
encoding: z.enum(['utf8', 'base64']).optional().default('utf8'),
});
const DeleteFileSchema = z.object({
path: z.string().min(1),
});
const ListFilesSchema = z.object({
path: z.string().optional(),
pattern: z.string().optional(),
recursive: z.boolean().optional().default(true),
});
// GET: Read file or list files
export async function GET(request: NextRequest) {
const logger = new Logger({ prefix: 'FileOperations' });
const requestId = getRequestId(request);
return await withErrorHandling(async () => {
const searchParams = request.nextUrl.searchParams;
const path = searchParams.get('path');
if (path) {
// Read single file
const result = ReadFileSchema.safeParse({
path,
encoding: searchParams.get('encoding') || 'utf8'
});
if (!result.success) {
throw new ApiError(
ErrorCodes.INVALID_PARAMETERS,
400,
{
message: 'Invalid file read parameters',
cause: 'File path or encoding parameter is invalid',
solution: 'Provide a valid file path and encoding (utf8 or base64)',
docs: 'https://vibeship.dev/docs/api/files#read-file',
context: { validationErrors: result.error.flatten() },
},
undefined,
requestId
);
}
const config = await getConfig();
const scanner = new FileScanner(config);
const validator = new PathValidator(config.scanPaths);
// Validate path security
if (!validator.isValid(result.data.path)) {
logger.warn(\`Blocked access to invalid path: \${result.data.path}\`);
return NextResponse.json(
{ error: 'Access denied: Invalid path' },
{ status: 403 }
);
}
try {
const content = await scanner.readFile(result.data.path);
// Handle binary files
if (result.data.encoding === 'base64') {
const buffer = Buffer.from(content);
return NextResponse.json({
path: result.data.path,
content: buffer.toString('base64'),
encoding: 'base64',
size: buffer.length,
});
}
return NextResponse.json({
path: result.data.path,
content,
encoding: 'utf8',
size: content.length,
lines: content.split('\\n').length,
});
} catch (error) {
logger.error(\`Failed to read file \${result.data.path}:\`, error);
return NextResponse.json(
{ error: 'File not found' },
{ status: 404 }
);
}
} else {
// List files
const result = ListFilesSchema.safeParse({
path: searchParams.get('path') || '.',
pattern: searchParams.get('pattern'),
recursive: searchParams.get('recursive') === 'true',
});
if (!result.success) {
throw new ApiError(
ErrorCodes.INVALID_PARAMETERS,
400,
{
message: 'Invalid file read parameters',
cause: 'File path or encoding parameter is invalid',
solution: 'Provide a valid file path and encoding (utf8 or base64)',
docs: 'https://vibeship.dev/docs/api/files#read-file',
context: { validationErrors: result.error.flatten() },
},
undefined,
requestId
);
}
const config = await getConfig();
const scanner = new FileScanner(config);
const files = await scanner.scan(result.data.path || undefined);
// Get file info for each file
const filesWithInfo = await Promise.all(
files.map(async (file) => {
try {
const info = await scanner.getFileInfo(file);
return {
path: file,
size: info.size,
modified: info.mtime,
created: info.birthtime,
isDirectory: info.isDirectory(),
};
} catch (error) {
return {
path: file,
error: 'Failed to get file info',
};
}
})
);
return NextResponse.json({
files: filesWithInfo,
total: filesWithInfo.length,
});
}
} catch (error) {
logger.error('File operation error:', error);
return NextResponse.json(
{ error: 'Internal server error' },
{ status: 500 }
);
}
}
// POST: Write file
export async function POST(request: NextRequest) {
const logger = new Logger({ prefix: 'FileOperations' });
try {
const body = await request.json();
const result = WriteFileSchema.safeParse(body);
if (!result.success) {
return NextResponse.json(
{ error: 'Invalid parameters', details: result.error.flatten() },
{ status: 400 }
);
}
const config = await getConfig();
const validator = new PathValidator({
allowedPaths: config.scanPaths,
basePath: process.cwd(),
});
// Validate path security
if (!validator.isValid(result.data.path)) {
logger.warn(\`Blocked write to invalid path: \${result.data.path}\`);
return NextResponse.json(
{ error: 'Access denied: Invalid path' },
{ status: 403 }
);
}
// Check if file writes are explicitly disabled
const disableWrites = process.env.VIBESHIP_DISABLE_FILE_WRITES === 'true';
if (disableWrites) {
logger.warn('File writes are disabled by VIBESHIP_DISABLE_FILE_WRITES environment variable');
return NextResponse.json({
success: false,
error: 'File writing is disabled in this environment',
path: result.data.path,
}, { status: 403 });
}
// Implement file writing
try {
const scanner = new FileScanner(config);
await scanner.writeFile(result.data.path, result.data.content);
logger.info(\`Successfully wrote file: \${result.data.path}\`);
return NextResponse.json({
success: true,
path: result.data.path,
size: result.data.content.length,
});
} catch (writeError: any) {
logger.error(\`Failed to write file \${result.data.path}:\`, writeError);
return NextResponse.json(
{ error: 'Failed to write file', details: writeError.message },
{ status: 500 }
);
}
} catch (error) {
logger.error('File write error:', error);
return NextResponse.json(
{ error: 'Internal server error' },
{ status: 500 }
);
}
}
// DELETE: Delete file
export async function DELETE(request: NextRequest) {
const logger = new Logger({ prefix: 'FileOperations' });
try {
const body = await request.json();
const result = DeleteFileSchema.safeParse(body);
if (!result.success) {
return NextResponse.json(
{ error: 'Invalid parameters', details: result.error.flatten() },
{ status: 400 }
);
}
const config = await getConfig();
const validator = new PathValidator({
allowedPaths: config.scanPaths,
basePath: process.cwd(),
});
// Validate path security
if (!validator.isValid(result.data.path)) {
logger.warn(\`Blocked delete of invalid path: \${result.data.path}\`);
return NextResponse.json(
{ error: 'Access denied: Invalid path' },
{ status: 403 }
);
}
// In production, implement proper file deletion
// For now, return success with warning
logger.info(\`File delete requested for: \${result.data.path}\`);
return NextResponse.json({
success: true,
path: result.data.path,
warning: 'File deletion is disabled in template. Implement proper file deletion with security checks.',
});
} catch (error) {
logger.error('File delete error:', error);
return NextResponse.json(
{ error: 'Internal server error' },
{ status: 500 }
);
}
}
// PUT: Update file (partial update)
export async function PUT(request: NextRequest) {
const logger = new Logger({ prefix: 'FileOperations' });
try {
const body = await request.json();
const { path, updates } = body;
if (!path || !updates) {
return NextResponse.json(
{ error: 'Path and updates are required' },
{ status: 400 }
);
}
const config = await getConfig();
const validator = new PathValidator({
allowedPaths: config.scanPaths,
basePath: process.cwd(),
});
// Validate path security
if (!validator.isValid(path)) {
logger.warn(\`Blocked update to invalid path: \${path}\`);
return NextResponse.json(
{ error: 'Access denied: Invalid path' },
{ status: 403 }
);
}
// In production, implement proper file updating
// This could support operations like:
// - Append to file
// - Replace specific lines
// - Update frontmatter
// - etc.
logger.info(\`File update requested for: \${path}\`);
return NextResponse.json({
success: true,
path,
warning: 'File updating is disabled in template. Implement proper file updating with security checks.',
});
} catch (error) {
logger.error('File update error:', error);
return NextResponse.json(
{ error: 'Internal server error' },
{ status: 500 }
);
}
}`,
"app-router/tasks.ts": `import { NextRequest, NextResponse } from 'next/server';
import { z } from 'zod';
import { ApiError, ErrorCodes, ErrorFactory, getRequestId, withErrorHandling } from '@/utils/api-errors';
import { loadDependenciesWithFallbacks } from '@/utils/fallback-handlers';
// Request validation schema
const TaskQuerySchema = z.object({
type: z.enum(['todo', 'fixme', 'hack', 'note', 'all']).optional().default('all'),
status: z.enum(['pending', 'completed', 'all']).optional().default('pending'),
priority: z.enum(['high', 'medium', 'low', 'all']).optional().default('all'),
path: z.string().optional(),
search: z.string().optional(),
limit: z.number().min(1).max(1000).optional().default(100),
offset: z.number().min(0).optional().default(0),
sortBy: z.enum(['priority', 'date', 'type', 'path']).optional().default('priority'),
sortOrder: z.enum(['asc', 'desc']).optional().default('desc'),
});
// GET: Scan and return tasks
export async function GET(request: NextRequest) {
const requestId = getRequestId(request);
return await withErrorHandling(async () => {
// Parse query parameters
const searchParams = Object.fromEntries(request.nextUrl.searchParams);
const query = TaskQuerySchema.safeParse({
...searchParams,
limit: searchParams.limit ? parseInt(searchParams.limit) : undefined,
offset: searchParams.offset ? parseInt(searchParams.offset) : undefined,
});
if (!query.success) {
const requestId = getRequestId(request);
const validationError = new ApiError(
ErrorCodes.INVALID_PARAMETERS,
400,
{
message: 'Invalid query parameters provided',
cause: 'One or more query parameters are invalid or missing',
solution: 'Check the API documentation for correct parameter format and values',
docs: 'https://vibeship.dev/docs/api/tasks#query-parameters',
context: { validationErrors: query.error.flatten() },
},
undefined,
requestId
);
return validationError.toResponse();
}
// Load all dependencies with fallbacks
const deps = await loadDependenciesWithFallbacks(requestId);
const { config, FileScanner, TaskExtractor, Logger, CacheManager } = deps;
const logger = new Logger({ prefix: 'TaskScanning' });
logger.info('Config loaded:', { scanPaths: config.scanPaths });
const scanner = new FileScanner(config);
const extractor = new TaskExtractor();
const cache = new CacheManager({ maxSize: 100, ttl: 300000 }); // 5 minute cache
// Generate cache key
const cacheKey = \`tasks:\${JSON.stringify(query.data)}\`;
const cached = cache.get(cacheKey);
if (cached) {
logger.info('Returning cached tasks');
return NextResponse.json(cached);
}
// Scan files
let scanOptions = undefined;
if (query.data.path) {
// Normalize the path - if it doesn't start with . or /, add ./
let normalizedPath = query.data.path;
if (!normalizedPath.startsWith('.') && !normalizedPath.startsWith('/')) {
normalizedPath = \`./\${normalizedPath}\`;
}
scanOptions = { paths: [normalizedPath] };
logger.info(\`Using normalized scan path: \${normalizedPath}\`);
}
const files = await scanner.scanWithInfo(scanOptions);
logger.info(\`Scanned \${files.length} files\`);
const allTasks = [];
// Extract tasks from each file
for (const file of files) {
const tasks = extractor.extract(file.content, file.path, {
includeContext: true,
parseMetadata: true,
contextLines: 3,
});
if (tasks.length > 0) {
logger.info(\`Found \${tasks.length} tasks in \${file.path}\`);
}
allTasks.push(...tasks);
}
logger.info(\`Total tasks found: \${allTasks.length}\`);
// Apply filters
let filteredTasks = allTasks;
// Filter by type
if (query.data.type !== 'all') {
filteredTasks = filteredTasks.filter(
task => task.type.toLowerCase() === query.data.type
);
}
// Filter by status
if (query.data.status !== 'all') {
const isCompleted = query.data.status === 'completed';
filteredTasks = filteredTasks.filter(
task => (task.metadata?.completed || false) === isCompleted
);
}
// Filter by priority
if (query.data.priority !== 'all') {
filteredTasks = filteredTasks.filter(
task => (task.metadata?.priority || 'low') === query.data.priority
);
}
// Search filter
if (query.data.search) {
const searchLower = query.data.search.toLowerCase();
filteredTasks = filteredTasks.filter(
task =>
task.text.toLowerCase().includes(searchLower) ||
task.file.toLowerCase().includes(searchLower) ||
task.context?.toLowerCase().includes(searchLower)
);
}
// Sort tasks
const sortedTasks = sortTasks(filteredTasks, query.data.sortBy, query.data.sortOrder);
// Apply pagination
const totalCount = sortedTasks.length;
const paginatedTasks = sortedTasks.slice(
query.data.offset,
query.data.offset + query.data.limit
);
// Group by type and calculate stats
const grouped = extractor.groupByType(filteredTasks);
const stats = {
total: totalCount,
byType: Object.entries(grouped).map(([type, tasks]) => ({
type,
count: tasks.length,
completed: tasks.filter(t => t.metadata?.completed).length,
})),
byPriority: {
high: filteredTasks.filter(t => t.metadata?.priority === 'high').length,
medium: filteredTasks.filter(t => t.metadata?.priority === 'medium').length,
low: filteredTasks.filter(t => t.metadata?.priority === 'low').length,
},
};
// Enhanced response format with file and path information
const enhancedTasks = paginatedTasks.map(task => ({
...task,
id: task.id || \`\${task.file}-\${task.line}\`,
path: task.file, // Ensure path is included
title: task.text, // Ensure title is included
status: task.metadata?.completed ? 'completed' : 'pending',
}));
const response = {
tasks: enhancedTasks,
pagination: {
total: totalCount,
limit: query.data.limit,
offset: query.data.offset,
hasMore: query.data.offset + query.data.limit < totalCount,
},
stats,
query: query.data,
};
// Cache the response
cache.set(cacheKey, response);
return NextResponse.json(response);
}, requestId, { operation: 'taskScan' });
}
// POST: Update task status
export async function POST(request: NextRequest) {
const requestId = getRequestId(request);
return await withErrorHandling(async () => {
// Load dependencies with fallbacks
const deps = await loadDependenciesWithFallbacks(requestId);
const { Logger } = deps;
const logger = new Logger({ prefix: 'TaskUpdate' });
const body = await request.json();
const { taskId, updates } = body;
if (!taskId || !updates) {
throw ErrorFactory.invalidRequest(
'taskId and updates are required',
!taskId ? 'taskId' : 'updates',
requestId
);
}
// In a real implementation, you would:
// 1. Find the task by ID
// 2. Update the task in the source file
// 3. Return the updated task
logger.info(\`Task update requested for: \${taskId}\`);
return NextResponse.json({
success: true,
taskId,
updates,
warning: 'Task updating is disabled in template. Implement proper task updating.',
});
}, requestId, { operation: 'taskUpdate', taskId });
}
// Helper function to sort tasks
function sortTasks(
tasks: Task[],
sortBy: string,
sortOrder: 'asc' | 'desc'
): Task[] {
const sorted = [...tasks].sort((a, b) => {
let compareValue = 0;
switch (sortBy) {
case 'priority':
const priorityOrder: Record<string, number> = { high: 3, medium: 2, low: 1 };
const aPriority = priorityOrder[a.metadata?.priority || 'low'] || 1;
const bPriority = priorityOrder[b.metadata?.priority || 'low'] || 1;
compareValue = aPriority - bPriority;
break;
case 'date':
const aDate = a.metadata?.createdAt || 0;
const bDate = b.metadata?.createdAt || 0;
compareValue = aDate - bDate;
break;
case 'type':
compareValue = a.type.localeCompare(b.type);
break;
case 'path':
compareValue = a.file.localeCompare(b.file);
break;
}
return sortOrder === 'asc' ? compareValue : -compareValue;
});
return sorted;
}
// Export metadata for Next.js
export const metadata = {
title: 'Task Scanner API',
description: 'Scan and manage tasks in your codebase',
};`,
"pages-router/tasks-stream.ts": `import type { NextApiRequest, NextApiResponse } from 'next';
import { FileScanner, TaskExtractor, CacheManager } from '@vibeship/devtools/server';
import { getConfig } from '@/lib/vibeship-config';
// Store active connections
const clients = new Map<string, NextApiResponse>();
let lastTaskHash: string | null = null;
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
if (req.method !== 'GET') {
res.setHeader('Allow', ['GET']);
return res.status(405).end(\`Method \${req.method} Not Allowed\`);
}
// Set SSE headers
res.setHeader('Content-Type', 'text/event-stream');
res.setHeader('Cache-Control', 'no-cache, no-transform');
res.setHeader('Connection', 'keep-alive');
res.setHeader('X-Accel-Buffering', 'no');
const clientId = crypto.randomUUID();
const { filter, includeCompleted } = req.query;
// Add client to active connections
clients.set(clientId, res);
// Send initial connection message
res.write(\`data: \${JSON.stringify({
type: 'connected',
clientId,
timestamp: new Date().toISOString()
})}\\n\\n\`);
// Send initial tasks
try {
const tasks = await scanTasks(
filter as string | undefined,
includeCompleted === 'true'
);
res.write(\`data: \${JSON.stringify({
type: 'tasks',
tasks,
timestamp: new Date().toISOString()
})}\\n\\n\`);
} catch (error) {
res.write(\`data: \${JSON.stringify({
type: 'error',
error: error.message,
timestamp: new Date().toISOString()
})}\\n\\n\`);
}
// Set up polling for changes
const pollInterval = setInterval(async () => {
try {
const tasks = await scanTasks(
filter as string | undefined,
includeCompleted === 'true'
);
const currentHash = generateTaskHash(tasks);
if (currentHash !== lastTaskHash) {
lastTaskHash = currentHash;
// Broadcast to all connected clients
const updateMessage = \`data: \${JSON.stringify({
type: 'update',
tasks,
timestamp: new Date().toISOString()
})}\\n\\n\`;
for (const [id, client] of clients.entries()) {
try {
client.write(updateMessage);
} catch (error) {
// Client disconnected
clients.delete(id);
}
}
}
} catch (error) {
console.error('Error polling tasks:', error);
}
}, 5000);
// Keep connection alive with heartbeat
const heartbeatInterval = setInterval(() => {
try {
res.write(\`data: \${JSON.stringify({
type: 'heartbeat',
timestamp: new Date().toISOString()
})}\\n\\n\`);
} catch (error) {
// Client disconnected
clearInterval(heartbeatInterval);
clearInterval(pollInterval);
clients.delete(clientId);
}
}, 30000);
// Handle client disconnect
req.on('close', () => {
clearInterval(heartbeatInterval);
clearInterval(pollInterval);
clients.delete(clientId);
res.end();
});
}
// Helper function to scan tasks
async function scanTasks(filter?: string, includeCompleted = false) {
const config = await getConfig();
const scanner = new FileScanner(config);
const extractor = new TaskExtractor();
const cache = new CacheManager({ maxSize: 100, ttl: 60000 });
// Check cache first
const cacheKey = \`tasks:\${filter || 'all'}:\${includeCompleted}\`;
const cached = cache.get(cacheKey);
if (cached) {
return cached;
}
// Scan files
const files = await scanner.scanWithInfo();
const allTasks = [];
for (const file of files) {
const tasks = extractor.extract(file.content, file.path, {
includeContext: true,
parseMetadata: true,
});
// Apply filter if specified
const filteredTasks = filter
? tasks.filter(task => task.type.toLowerCase() === filter.toLowerCase())
: tasks;
// Filter completed tasks if needed
const finalTasks = includeCompleted
? filteredTasks
: filteredTasks.filter(task => !task.metadata?.completed);
allTasks.push(...finalTasks);
}
// Sort by priority and date
allTasks.sort((a, b) => {
const priorityOrder = { high: 3, medium: 2, low: 1 };
const aPriority = priorityOrder[a.metadata?.priority || 'low'];
const bPriority = priorityOrder[b.metadata?.priority || 'low'];
if (aPriority !== bPriority) {
return bPriority - aPriority;
}
const aDate = a.metadata?.createdAt || 0;
const bDate = b.metadata?.createdAt || 0;
return bDate - aDate;
});
// Cache results
cache.set(cacheKey, allTasks);
return allTasks;
}
// Generate hash for task comparison
function generateTaskHash(tasks: any[]): string {
const taskString = JSON.stringify(tasks.map(t => ({
id: t.id,
type: t.type,
text: t.text,
completed: t.metadata?.completed
})));
let hash = 0;
for (let i = 0; i < taskString.length; i++) {
const char = taskString.charCodeAt(i);
hash = ((hash << 5) - hash) + char;
hash = hash & hash;
}
return hash.toString(36);
}`,
"pages-router/ai-chat.ts": `import type { NextApiRequest, NextApiResponse } from 'next';
import { FileScanner, MarkdownParser } from '@vibeship/devtools/server';
import { getConfig } from '@/lib/vibeship-config';
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
if (req.method !== 'POST') {
res.setHeader('Allow', ['POST']);
return res.status(405).end(\`Method \${req.method} Not Allowed\`);
}
try {
const { messages, context, stream = true } = req.body;
if (!messages || !Array.isArray(messages)) {
return res.status(400).json({ error: 'Messages array is required' });
}
// Get relevant context from markdown files
const relevantContext = await getRelevantContext(context);
// Prepare system message with context
const systemMessage = {
role: 'system',
content: \`You are an AI assistant helping with a development project.
\${relevantContext ? \`Here is relevant context from the project:\\n\\n\${relevantContext}\` : ''}
Please provide helpful, accurate, and concise responses. When discussing code, use proper markdown formatting.\`
};
const allMessages = [systemMessage, ...messages];
if (!stream) {
// Non-streaming response
const response = await callAIProvider(allMessages, false);
return res.status(200).json({ message: response });
}
// Set up SSE for streaming
res.setHeader('Content-Type', 'text/event-stream');
res.setHeader('Cache-Control', 'no-cache');
res.setHeader('Connection', 'keep-alive');
// Call AI provider with streaming
const aiStream = await callAIProvider(allMessages, true);
const reader = aiStream.getReader();
// Process the stream
const processStream = async () => {
try {
while (true) {
const { done, value } = await reader.read();
if (done) {
res.write('data: [DONE]\\n\\n');
res.end();
break;
}
// Format as SSE
const sseMessage = \`data: \${JSON.stringify({
type: 'content',
content: value,
timestamp: new Date().toISOString()
})}\\n\\n\`;
res.write(sseMessage);
}
} catch (error) {
const errorMessage = \`data: \${JSON.stringify({
type: 'error',
error: error.message,
timestamp: new Date().toISOString()
})}\\n\\n\`;
res.write(errorMessage);
res.end();
}
};
// Handle client disconnect
req.on('close', () => {
reader.cancel();
res.end();
});
processStream();
} catch (error) {
console.error('AI chat error:', error);
if (!res.headersSent) {
res.status(500).json({ error: 'Internal server error' });
}
}
}
// Get relevant context from markdown files
async function getRelevantContext(contextRequest?: {
files?: string[];
search?: string;
limit?: number;
}): Promise<string> {
if (!contextRequest) return '';
const config = await getConfig();
const scanner = new FileScanner(config);
const parser = new MarkdownParser();
let contexts: string[] = [];
// Get specific files if requested
if (contextRequest.files?.length) {
for (const file of contextRequest.files) {
try {
const content = await scanner.readFile(file);
const parsed = parser.parse(content);
contexts.push(\`File: \${file}\\n\${parsed.content.slice(0, 1000)}\`);
} catch (error) {
console.error(\`Error reading file \${file}:\`, error);
}
}
}
// Search for relevant content
if (contextRequest.search) {
const files = await scanner.scan();
const relevantFiles = files
.filter(file => file.endsWith('.md') || file.endsWith('.mdx'))
.slice(0, contextRequest.limit || 5);
for (const file of relevantFiles) {
try {
const content = await scanner.readFile(file);
if (content.toLowerCase().includes(contextRequest.search.toLowerCase())) {
const parsed = parser.parse(content);
contexts.push(\`File: \${file}\\n\${parsed.content.slice(0, 500)}\`);
}
} catch (error) {
console.error(\`Error searching file \${file}:\`, error);
}
}
}
return contexts.join('\\n\\n---\\n\\n');
}
// Placeholder for AI provider integration
async function callAIProvider(
messages: any[],
stream: boolean
): Promise<any> {
// This is where you would integrate with your AI provider
// Examples: OpenAI, Anthropic, Azure OpenAI, etc.
// For now, return a mock response
if (!stream) {
return "This is a placeholder response. Please integrate with your preferred AI provider.";
}
// Mock streaming response
const mockStream = new ReadableStream({
async start(controller) {
const response = "This is a streaming placeholder response. Please integrate with your preferred AI provider.";
const words = response.split(' ');
for (const word of words) {
controller.enqueue(word + ' ');
await new Promise(resolve => setTimeout(resolve, 100));
}
controller.close();
}
});
return mockStream;
}
// Configuration type for runtime configuration
export const config = {
api: {
bodyParser: {
sizeLimit: '1mb',
},
},
};`,
"pages-router/files.ts": `import type { NextApiRequest, NextApiResponse } from 'next';
import { FileScanner, PathValidator, Logger } from '@vibeship/devtools/server';
import { getConfig } from '@/lib/vibeship-config';
import { z } from 'zod';
// Request validation schemas
const ReadFileSchema = z.object({
path: z.string().min(1),
encoding: z.enum(['utf8', 'base64']).optional().default('utf8'),
});
const WriteFileSchema = z.object({
path: z.string().min(1),
content: z.string(),
encoding: z.enum(['utf8', 'base64']).optional().default('utf8'),
});
const DeleteFileSchema = z.object({
path: z.string().min(1),
});
const ListFilesSchema = z.object({
path: z.string().optional(),
pattern: z.string().optional(),
recursive: z.boolean().optional().default(true),
});
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
const logger = new Logger({ prefix: 'FileOperations' });
try {
switch (req.method) {
case 'GET':
return handleGet(req, res, logger);
case 'POST':
return handlePost(req, res, logger);
case 'PUT':
return handlePut(req, res, logger);
case 'DELETE':
return handleDelete(req, res, logger);
default:
res.setHeader('Allow', ['GET', 'POST', 'PUT', 'DELETE']);
return res.status(405).end(\`Method \${req.method} Not Allowed\`);
}
} catch (error) {
logger.error('File operation error:', error);
return res.status(500).json({ error: 'Internal server error' });
}
}
// GET: Read file or list files
async function handleGet(
req: NextApiRequest,
res: NextApiResponse,
logger: Logger
) {
const { path, encoding, pattern, recursive } = req.query;
if (path && typeof path === 'string') {
// Read single file
const result = ReadFileSchema.safeParse({
path,
encoding: encoding || 'utf8'
});
if (!result.success) {
return res.status(400).json({
error: 'Invalid parameters',
details: result.error.flatten()
});
}
const config = await getConfig();
const scanner = new FileScanner(config);
co