llmverify
Version:
AI Output Verification Toolkit — Local-first LLM safety, hallucination detection, PII redaction, prompt injection defense, and runtime monitoring. Zero telemetry. OWASP LLM Top 10 aligned.
77 lines (76 loc) • 2.3 kB
TypeScript
/**
* LLM Monitor Wrapper
*
* Drop-in wrapper that adds health monitoring to any LLM client.
* Tracks latency, token rate, response fingerprint, and overall health.
*
* WHAT THIS DOES:
* ✅ Wraps any LLM client with health monitoring
* ✅ Tracks performance metrics over time
* ✅ Detects behavioral drift and anomalies
* ✅ Provides lifecycle hooks for health changes
* ✅ Returns health report with each response
*
* WHAT THIS DOES NOT DO:
* ❌ Modify LLM responses
* ❌ Store prompts or responses (ephemeral only)
* ❌ Make predictions about LLM behavior
* ❌ Block or filter responses (monitoring only)
*
* PRIVACY GUARANTEE:
* - No data is stored or transmitted
* - All analysis is in-memory and ephemeral
* - Prompts and responses are not logged
*
* @module wrapper/monitorLLM
* @author Haiec
* @license MIT
*/
import { MonitorConfig, HealthReport, HealthStatus } from '../types/runtime';
import { BaselineEngine } from '../engines/runtime/baseline';
import { LlmClient } from '../adapters/types';
/**
* Generic LLM client interface (legacy).
* Any client with a generate method can be wrapped.
* @deprecated Use LlmClient from adapters for new code
*/
export interface LLMClient {
generate(opts: GenerateOptions): Promise<GenerateResponse>;
[key: string]: unknown;
}
export interface GenerateOptions {
prompt: string;
model?: string;
system?: string;
temperature?: number;
maxTokens?: number;
[key: string]: unknown;
}
export interface GenerateResponse {
text: string;
tokens?: number;
totalTokens?: number;
model?: string;
finishReason?: string;
[key: string]: unknown;
}
/**
* Union type for any supported client.
*/
export type AnyLLMClient = LLMClient | LlmClient;
/**
* Response from monitored client includes health report.
*/
export interface MonitoredResponse extends GenerateResponse {
llmverify: HealthReport;
}
/**
* Monitored client interface.
*/
export interface MonitoredClient {
generate(opts: GenerateOptions): Promise<MonitoredResponse>;
getBaseline(): ReturnType<BaselineEngine['get']>;
getLastHealth(): HealthStatus;
resetBaseline(): void;
}
export declare function monitorLLM(originalClient: AnyLLMClient, config?: MonitorConfig): MonitoredClient;