UNPKG

llmverify

Version:

AI Output Verification Toolkit — Local-first LLM safety, hallucination detection, PII redaction, prompt injection defense, and runtime monitoring. Zero telemetry. OWASP LLM Top 10 aligned.

15 lines (14 loc) 479 B
#!/usr/bin/env node /** * llmverify HTTP Server * * Long-running HTTP API server for IDE and external tool integration. * Provides REST endpoints for AI output verification. * * @module server * @author KingCaliber Labs * @license MIT */ declare const app: import("express-serve-static-core").Express; export declare function startServer(port?: number): import("http").Server<typeof import("http").IncomingMessage, typeof import("http").ServerResponse>; export { app };