UNPKG

prompt-validation

Version:

Validate and standardize AI prompt outputs, detect hallucinations, enforce patterns, and auto-correct issues.

443 lines (430 loc) 17.7 kB
#!/usr/bin/env node "use strict"; var __create = Object.create; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __getProtoOf = Object.getPrototypeOf; var __hasOwnProp = Object.prototype.hasOwnProperty; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( // If the importer is in node compatibility mode or this is not an ESM // file that has been converted to a CommonJS file using a Babel- // compatible transform (i.e. "__esModule" has not been set), then set // "default" to the CommonJS "module.exports" for node compatibility. isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, mod )); // src/cli.ts var import_commander = require("commander"); // src/validator.ts var import_promises = __toESM(require("fs/promises"), 1); // src/patterns.ts var import_ajv = __toESM(require("ajv"), 1); var ajv = new import_ajv.default(); function validateWithRules(text, rules) { const errors = []; for (const rule of rules) { switch (rule) { case "email": if (!/\b[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,}\b/i.test(text)) { errors.push("Text does not contain a valid email address."); } break; case "json-schema": try { const json = JSON.parse(text); const validate = ajv.compile({ type: "object" }); if (!validate(json)) { errors.push("JSON does not match schema."); } } catch { errors.push("Invalid JSON format."); } break; case "length-limit": if (text.length > 500) { errors.push("Text exceeds length limit (500 chars)."); } break; case "no-hallucination": if (/unicorn/i.test(text)) { errors.push("Hallucination detected: contains 'unicorn'."); } break; default: errors.push(`Unknown rule: ${rule}`); } } return errors; } // src/autocorrect.ts function autoCorrect(text) { let corrected = text; let isValidJson = false; if (/^\s*{/.test(text) || /^\s*\[/.test(text)) { try { corrected = JSON.stringify(JSON.parse(text), null, 2); isValidJson = true; } catch { corrected = text.replace(/,\s*}/g, "}").replace(/,\s*]/g, "]"); } } if (!isValidJson) { if (corrected.length > 0) { corrected = corrected.charAt(0).toUpperCase() + corrected.slice(1); } if (!/[.?!]$/.test(corrected)) { corrected += "."; } } return corrected; } // src/validator.ts async function validateFile(file, options) { const raw = await import_promises.default.readFile(file, "utf-8"); let errors = []; if (options.rules) { errors = validateWithRules(raw, options.rules); } let fixedContent = null; if (options.fix && errors.length > 0) { fixedContent = autoCorrect(raw); await import_promises.default.writeFile(file + ".fixed", fixedContent, "utf-8"); } return { valid: errors.length === 0, errors, fixed: fixedContent !== null }; } // src/cli.ts var import_chalk = __toESM(require("chalk"), 1); var import_promises2 = __toESM(require("fs/promises"), 1); var import_path = __toESM(require("path"), 1); // src/promptImprover.ts async function improvePrompt(input, options = {}) { const targetFormat = options.targetFormat ?? "text"; const temperature = options.temperature ?? 0.2; if (options.llm === "openai" && process.env.OPENAI_API_KEY) { const model = options.model ?? "gpt-4o-mini"; const improved2 = await improveWithOpenAI(input, { model, temperature, targetFormat }); if (improved2) return { improved: improved2, meta: { used: "openai", model } }; } if (options.llm === "gemini" && process.env.GEMINI_API_KEY) { const model = options.model ?? "gemini-1.5-pro"; const improved2 = await improveWithGemini(input, { model, temperature, targetFormat }); if (improved2) return { improved: improved2, meta: { used: "gemini", model } }; } const improved = locallyStructurePrompt(input, targetFormat); return { improved, meta: { used: "local" } }; } function locallyStructurePrompt(input, targetFormat) { const formatTip = targetFormat === "json" ? ` Output Format: - JSON object only, no prose. - Include only the required keys. No additional keys. - Use lowercase snake_case keys. Example: { "summary": "...", "steps": ["...", "..."], "constraints": ["..."], "final_answer": "..." }` : targetFormat === "markdown" ? ` Output Format: - Markdown only. - Use headings, bullet lists, and code blocks where helpful. - End with a short "Summary" section.` : ` Output Format: - Clear, concise textual answer. - Use numbered steps where applicable. - End with a one-line TL;DR.`; return `You are an expert assistant. Objective: - Rewrite the user's prompt into a high-quality, unambiguous instruction that maximizes answer quality. - Preserve the original intent exactly. User Intent: """ ${input.trim()} """ Requirements: - Disambiguate vague terms (timeframes, data ranges, success criteria). - Specify constraints: time, tools, data sources, formatting, tone. - Add step-by-step guidance (decompose complex tasks). - Include evaluation criteria (what makes a good answer). - Add edge cases and non-goals to avoid scope creep. - Keep it as short as possible while precise. ${formatTip} Deliverable: - Return ONLY the improved prompt text. - Do not include explanations or preambles. `; } async function improveWithOpenAI(input, params) { try { const fetchFn = globalThis.fetch; const res = await fetchFn("https://api.openai.com/v1/chat/completions", { method: "POST", headers: { "Content-Type": "application/json", "Authorization": `Bearer ${process.env.OPENAI_API_KEY}` }, body: JSON.stringify({ model: params.model, temperature: params.temperature, messages: [ { role: "system", content: "You rewrite prompts to be clearer, more specific, and better structured." }, { role: "user", content: locallyStructurePrompt(input, params.targetFormat) } ] }) }); if (!res.ok) return null; const data = await res.json(); return data?.choices?.[0]?.message?.content?.trim() ?? null; } catch { return null; } } async function improveWithGemini(input, params) { try { const fetchFn = globalThis.fetch; const url = `https://generativelanguage.googleapis.com/v1beta/models/${params.model}:generateContent?key=${process.env.GEMINI_API_KEY}`; const res = await fetchFn(url, { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify({ contents: [ { role: "user", parts: [{ text: locallyStructurePrompt(input, params.targetFormat) }] } ], generationConfig: { temperature: params.temperature } }) }); if (!res.ok) return null; const data = await res.json(); const text = data?.candidates?.[0]?.content?.parts?.[0]?.text ?? null; return typeof text === "string" ? text.trim() : null; } catch { return null; } } // src/cli.ts var import_boxen = __toESM(require("boxen"), 1); var import_gradient_string = __toESM(require("gradient-string"), 1); var import_ora = __toESM(require("ora"), 1); var import_inquirer = __toESM(require("inquirer"), 1); var import_figlet = __toESM(require("figlet"), 1); var import_clipboardy = __toESM(require("clipboardy"), 1); var program = new import_commander.Command(); function showBanner() { const ascii = import_figlet.default.textSync("Prompt\nValidation", { horizontalLayout: "fitted" }); const title = import_gradient_string.default.atlas.multiline(ascii); const msg = (0, import_boxen.default)( `${title} ${import_chalk.default.dim("\u{1F50E} Validate \u2022 \u2728 Improve \u2022 \u{1F680} Ship better prompts")}`, { padding: 1, borderColor: "cyan", borderStyle: "round" } ); console.log(msg); } async function loadConfigRules(cwd) { try { const configPath = import_path.default.join(cwd, ".promptvalidationrc.json"); const raw = await import_promises2.default.readFile(configPath, "utf-8"); const parsed = JSON.parse(raw); if (Array.isArray(parsed.rules)) return parsed.rules; } catch { } return void 0; } function normalizeRulesOption(rulesOpt) { if (!rulesOpt) return void 0; if (Array.isArray(rulesOpt)) { return rulesOpt.flatMap((s) => s.split(",").map((t) => t.trim())).filter(Boolean); } return rulesOpt.split(",").map((t) => t.trim()).filter(Boolean); } async function readStdin() { return await new Promise((resolve, reject) => { let data = ""; process.stdin.setEncoding("utf-8"); process.stdin.on("data", (chunk) => data += chunk); process.stdin.on("end", () => resolve(data)); process.stdin.on("error", reject); }); } var AVAILABLE_RULES = ["email", "json-schema", "length-limit", "no-hallucination"]; program.name("prompt-validation").description("Validate and fix AI prompt outputs").version("0.1.1"); program.command("check <file>").description("\u{1F50E} Validate a file with rules").option("-r, --rules <rules...>", "Validation rules to apply (comma or space separated)").option("--fix", "Automatically fix common issues").action(async (file, options) => { try { showBanner(); const spinner = (0, import_ora.default)({ text: "Validating...", color: "cyan" }).start(); const cwdRules = await loadConfigRules(process.cwd()); const cliRules = normalizeRulesOption(options.rules); const rules = cliRules ?? cwdRules; const result = await validateFile(file, { ...options, rules }); spinner.stop(); if (result.valid) { console.log(import_chalk.default.green("\u2705 Validation passed!")); } else { console.log(import_chalk.default.red("\u274C Validation failed:")); result.errors.forEach((e) => console.log(" - " + e)); } if (options.fix && result.fixed) { console.log(import_chalk.default.yellow(`\u{1F527} Auto-fixed issues and saved to ${file}.fixed`)); } } catch (err) { console.error(import_chalk.default.red("Error:"), err); } }); program.command("improve <file>").description("\u2728 Improve a prompt for better LLM outputs. Use '-' to read from stdin.").option("--out <file>", "Write improved prompt to a file").option("--format <format>", "Target output guidance (json|markdown|text)", "text").option("--llm <provider>", "Use an LLM to improve (openai|gemini)").option("--model <name>", "Model name (e.g., gpt-4o-mini, gemini-1.5-pro)").option("--temperature <num>", "LLM temperature", (v) => Number(v), 0.2).option("--copy", "Copy improved prompt to clipboard").action(async (file, options) => { try { showBanner(); const spinner = (0, import_ora.default)({ text: "Crafting a better prompt... \u2728", color: "cyan" }).start(); const input = file === "-" ? await readStdin() : await import_promises2.default.readFile(file, "utf-8"); const { improved, meta } = await improvePrompt(input, { targetFormat: options.format, llm: options.llm, model: options.model, temperature: options.temperature }); spinner.stop(); if (options.out) { await import_promises2.default.writeFile(options.out, improved, "utf-8"); console.log((0, import_boxen.default)(import_chalk.default.green(`\u{1F4BE} Saved improved prompt to ${options.out}${meta.model ? ` (via ${meta.used}:${meta.model})` : ""}`), { padding: 1, borderColor: "green" })); } else { console.log((0, import_boxen.default)(improved, { padding: 1, borderColor: "magenta" })); } if (options.copy) { await import_clipboardy.default.write(improved); console.log(import_chalk.default.cyan("\u{1F4CB} Copied improved prompt to clipboard")); } } catch (err) { console.error(import_chalk.default.red("Error:"), err); } }); async function interactiveLoop() { showBanner(); process.on("SIGINT", () => { console.log("\n\u{1F44B} Goodbye!"); process.exit(0); }); while (true) { const { action } = await import_inquirer.default.prompt([ { name: "action", type: "list", message: "What would you like to do?", choices: [ { name: "\u{1F50E} Validate a file", value: "check" }, { name: "\u2728 Improve a prompt", value: "improve" }, { name: "\u{1F4CB} Paste prompt from clipboard and improve", value: "improve-clipboard" }, { name: "\u2699\uFE0F View current rules", value: "rules" }, { name: "\u2753 Help", value: "help" }, { name: "\u{1F6AA} Quit", value: "quit" } ] } ]); if (action === "quit") { console.log("\u{1F44B} Bye!"); break; } if (action === "help") { console.log(import_chalk.default.cyan("Use arrow keys to navigate. This menu loops until you choose Quit or press Ctrl+C.")); continue; } if (action === "rules") { const rules = await loadConfigRules(process.cwd()) ?? AVAILABLE_RULES; console.log((0, import_boxen.default)(`Current rules: - ${rules.join("\n- ")}`, { padding: 1, borderColor: "cyan" })); continue; } if (action === "check") { const answers = await import_inquirer.default.prompt([ { name: "file", message: "Path to file:", type: "input", validate: (v) => !!v || "Required" }, { name: "rules", message: "Select rules:", type: "checkbox", choices: AVAILABLE_RULES, default: await loadConfigRules(process.cwd()) ?? ["json-schema", "length-limit"] }, { name: "fix", message: "Auto-fix issues?", type: "confirm", default: true } ]); const spinner = (0, import_ora.default)({ text: "Validating...", color: "cyan" }).start(); const result = await validateFile(answers.file, { rules: answers.rules, fix: answers.fix }); spinner.stop(); if (result.valid) { console.log(import_chalk.default.green("\u2705 Validation passed!")); } else { console.log(import_chalk.default.red("\u274C Validation failed:")); result.errors.forEach((e) => console.log(" - " + e)); } if (answers.fix && result.fixed) { console.log(import_chalk.default.yellow(`\u{1F527} Auto-fixed issues and saved to ${answers.file}.fixed`)); } continue; } if (action === "improve" || action === "improve-clipboard") { let input = ""; if (action === "improve-clipboard") { try { input = await import_clipboardy.default.read(); } catch { input = ""; } if (!input) { console.log(import_chalk.default.red("Clipboard is empty. Please copy some text and try again.")); continue; } } else { const { file } = await import_inquirer.default.prompt([{ name: "file", message: "Path to prompt file (or '-' for stdin):", type: "input", validate: (v) => !!v || "Required" }]); input = file === "-" ? await readStdin() : await import_promises2.default.readFile(file, "utf-8"); } const { format, llm, model, temperature, shouldCopy, out } = await import_inquirer.default.prompt([ { name: "format", message: "Target format:", type: "list", choices: ["text", "markdown", "json"], default: "text" }, { name: "llm", message: "Use LLM?", type: "list", choices: [ { name: "No (local improver)", value: void 0 }, { name: "OpenAI", value: "openai" }, { name: "Gemini", value: "gemini" } ], default: void 0 }, { name: "model", message: "Model (optional):", type: "input", when: (a) => !!a.llm }, { name: "temperature", message: "Temperature (0-1):", type: "number", default: 0.2 }, { name: "out", message: "Write to file (optional):", type: "input" }, { name: "shouldCopy", message: "Copy improved prompt to clipboard?", type: "confirm", default: true } ]); const spinner = (0, import_ora.default)({ text: "Crafting a better prompt... \u2728", color: "cyan" }).start(); const { improved, meta } = await improvePrompt(input, { targetFormat: format, llm, model, temperature: Number(temperature) || 0.2 }); spinner.stop(); if (out) { await import_promises2.default.writeFile(out, improved, "utf-8"); console.log((0, import_boxen.default)(import_chalk.default.green(`\u{1F4BE} Saved improved prompt to ${out}${meta.model ? ` (via ${meta.used}:${meta.model})` : ""}`), { padding: 1, borderColor: "green" })); } else { console.log((0, import_boxen.default)(improved, { padding: 1, borderColor: "magenta" })); } if (shouldCopy) { await import_clipboardy.default.write(improved); console.log(import_chalk.default.cyan("\u{1F4CB} Copied improved prompt to clipboard")); } continue; } } } program.command("interactive").description("\u{1F9ED} Incredible interactive menu (loops until you quit)").action(async () => { await interactiveLoop(); }); program.parse();