UNPKG

thoughtmcp

Version:

AI that thinks more like humans do - MCP server with human-like cognitive architecture for enhanced reasoning, memory, and self-monitoring

108 lines 3.41 kB
/** * Sensory Processing Layer Implementation * * Implements the first layer of cognitive processing that handles: * - Input tokenization and normalization * - Attention filtering (thalamic gating) * - Pattern detection * - Salience computation * - Semantic chunking */ import { ComponentStatus, ISensoryProcessor, Pattern, SalienceMap } from "../interfaces/cognitive.js"; import { Token } from "../types/core.js"; export interface SensoryInput { raw_input: string; timestamp: number; context_markers: Map<string, unknown>; attention_weights: Float32Array; } export interface ProcessedInput { tokens: Token[]; patterns: Pattern[]; salience_map: SalienceMap; semantic_chunks: SemanticChunk[]; attention_filtered: boolean; } export interface SemanticChunk { tokens: Token[]; coherence_score: number; semantic_category: string; importance: number; } export interface AttentionGate { threshold: number; focus_areas: string[]; suppression_areas: string[]; } /** * SensoryProcessor implements the sensory processing layer of cognitive architecture * Mimics biological sensory processing with attention filtering and pattern detection */ export declare class SensoryProcessor implements ISensoryProcessor { private attention_threshold; private context_buffer; private buffer_size; private pattern_cache; private semantic_categories; private status; /** * Initialize the sensory processor with configuration */ initialize(config: Record<string, unknown>): Promise<void>; /** * Main processing method - implements the sensory processing pipeline */ process(input: string): Promise<ProcessedInput>; /** * Tokenize input with semantic awareness * Implements biological-inspired tokenization similar to cortical processing */ tokenize(input: string): Token[]; /** * Apply attention filtering - mimics thalamic gating * Filters tokens based on relevance and attention scores */ filterAttention(tokens: Token[], threshold: number): Token[]; /** * Detect patterns in filtered tokens * Implements pattern recognition similar to visual cortex processing */ detectPatterns(tokens: Token[]): Pattern[]; /** * Compute salience map for tokens * Determines which tokens deserve attention focus */ computeSalience(tokens: Token[]): SalienceMap; /** * Reset processor state */ reset(): void; /** * Get current component status */ getStatus(): ComponentStatus; private initializePatternModels; private computeSemanticWeight; private computeInitialAttention; private extractContextTags; private computeDynamicThreshold; private isHighImportanceToken; private detectSequentialPatterns; private detectSemanticPatterns; private detectSyntacticPatterns; private detectRepetitionPatterns; private computeTokenSalience; private createSemanticChunks; private updateContextBuffer; private computeSequenceConfidence; private computeSequenceSalience; private computeContextRelevance; private getRelatedWords; private isContentWord; private isFunctionWord; private isNoun; private isVerb; private isAdjective; private classifySemanticCategory; } //# sourceMappingURL=SensoryProcessor.d.ts.map