@ai2070/l0
Version:
L0: The Missing Reliability Substrate for AI
66 lines • 3.42 kB
TypeScript
import type { L0Telemetry } from "../types/l0";
import type { GuardrailViolation } from "../types/guardrails";
import type { L0Monitor } from "./monitoring";
import type { Tracer, Span, SpanOptions, Attributes } from "@opentelemetry/api";
import type { Meter, Counter, Histogram, UpDownCounter, ObservableGauge, MetricOptions } from "@opentelemetry/api";
import { SpanStatusCode, SpanKind } from "@opentelemetry/api";
import type { EventHandler } from "./event-handlers";
export { SpanStatusCode, SpanKind };
export type { Tracer as OTelTracer, Span as OTelSpan, SpanOptions, Attributes, Meter as OTelMeter, Counter as OTelCounter, Histogram as OTelHistogram, UpDownCounter as OTelUpDownCounter, ObservableGauge as OTelObservableGauge, MetricOptions, };
export interface OpenTelemetryConfig {
tracer?: Tracer;
meter?: Meter;
serviceName?: string;
traceTokens?: boolean;
recordTokenContent?: boolean;
recordGuardrailViolations?: boolean;
defaultAttributes?: Attributes;
}
export declare const SemanticAttributes: {
readonly LLM_SYSTEM: "gen_ai.system";
readonly LLM_REQUEST_MODEL: "gen_ai.request.model";
readonly LLM_RESPONSE_MODEL: "gen_ai.response.model";
readonly LLM_REQUEST_MAX_TOKENS: "gen_ai.request.max_tokens";
readonly LLM_REQUEST_TEMPERATURE: "gen_ai.request.temperature";
readonly LLM_REQUEST_TOP_P: "gen_ai.request.top_p";
readonly LLM_RESPONSE_FINISH_REASON: "gen_ai.response.finish_reasons";
readonly LLM_USAGE_INPUT_TOKENS: "gen_ai.usage.input_tokens";
readonly LLM_USAGE_OUTPUT_TOKENS: "gen_ai.usage.output_tokens";
readonly L0_SESSION_ID: "l0.session_id";
readonly L0_STREAM_COMPLETED: "l0.stream.completed";
readonly L0_FALLBACK_INDEX: "l0.fallback.index";
readonly L0_RETRY_COUNT: "l0.retry.count";
readonly L0_NETWORK_ERROR_COUNT: "l0.network.error_count";
readonly L0_GUARDRAIL_VIOLATION_COUNT: "l0.guardrail.violation_count";
readonly L0_DRIFT_DETECTED: "l0.drift.detected";
readonly L0_TIME_TO_FIRST_TOKEN: "l0.time_to_first_token_ms";
readonly L0_TOKENS_PER_SECOND: "l0.tokens_per_second";
};
export declare class L0OpenTelemetry {
private tracer?;
private meter?;
private config;
private requestCounter?;
private tokenCounter?;
private retryCounter?;
private errorCounter?;
private durationHistogram?;
private ttftHistogram?;
private activeStreamsGauge?;
private activeStreams;
constructor(config: OpenTelemetryConfig);
private initializeMetrics;
traceStream<T>(name: string, fn: (span: Span) => Promise<T>, attributes?: Attributes): Promise<T>;
recordTelemetry(telemetry: L0Telemetry, span?: Span): void;
recordToken(span?: Span, content?: string): void;
recordRetry(reason: string, attempt: number, span?: Span): void;
recordNetworkError(error: Error, errorType: string, span?: Span): void;
recordGuardrailViolation(violation: GuardrailViolation, span?: Span): void;
recordDrift(driftType: string, confidence: number, span?: Span): void;
createSpan(name: string, attributes?: Attributes): Span;
connectMonitor(monitor: L0Monitor): void;
getActiveStreams(): number;
}
export declare function createOpenTelemetry(config: OpenTelemetryConfig): L0OpenTelemetry;
export declare function createOpenTelemetryHandler(config: OpenTelemetryConfig): EventHandler;
//# sourceMappingURL=opentelemetry.d.ts.map