giga-code
Version:
A personal AI CLI assistant powered by Grok for local development.
104 lines (103 loc) • 3.25 kB
TypeScript
/// <reference types="node" />
import { GrokToolCall } from "../giga/client";
import { ToolResult } from "../types";
import { EventEmitter } from "events";
import { AgentMode } from "../types";
import { ModelInfo } from "../utils/dynamic-model-fetcher";
export interface ChatEntry {
type: "user" | "assistant" | "tool_result";
content: string;
timestamp: Date;
toolCalls?: GrokToolCall[];
toolCall?: GrokToolCall;
toolResult?: {
success: boolean;
output?: string;
error?: string;
metadata?: {
userSummary?: string;
query?: string;
[key: string]: any;
};
};
isStreaming?: boolean;
metrics?: {
prefillTimeMs: number;
decodeTimeMs: number;
outputTokens: number;
tokensPerSecond: number;
};
}
export interface StreamingChunk {
type: "content" | "tool_calls" | "tool_result" | "done" | "token_count" | "status";
content?: string;
toolCalls?: GrokToolCall[];
toolCall?: GrokToolCall;
toolResult?: ToolResult;
tokenCount?: number;
}
export declare class GigaAgent extends EventEmitter {
private gigaClient;
private textEditor;
private bash;
private todoTool;
private confirmationTool;
private mcpTool;
private semanticSearchTool;
private mcpManager;
private ragContextService;
private chatHistory;
private messages;
private tokenCounter;
private tokenTracker;
private availableModels;
private abortController;
private selectedCustomPrompt;
private lastBashOutput;
private getBaseSystemPrompt;
constructor(apiKey: string, groqApiKey?: string);
private emitStatus;
private initializeMcpConnections;
private ensureRagIndexExists;
refreshMcpConnections(): Promise<void>;
processUserMessage(message: string): Promise<ChatEntry[]>;
private messageReducer;
processUserMessageStream(message: string): AsyncGenerator<StreamingChunk, void, unknown>;
private getExpertModelForTool;
private executeTool;
getChatHistory(): ChatEntry[];
getCurrentDirectory(): string;
executeBashCommand(command: string): Promise<ToolResult>;
getCurrentModel(): string;
setModel(model: string, allModels?: ModelInfo[]): void;
abortCurrentOperation(): void;
getTokenTrackerInfo(): {
current: number;
max: number;
percentage: number;
model: string;
conversationId: string;
};
setSelectedCustomPrompt(promptName: string | null): void;
getSelectedCustomPrompt(): string | null;
restoreConversation(chatEntries: ChatEntry[]): void;
updateMode(mode: AgentMode): void;
getCurrentMode(): AgentMode;
getModeConfig(): import("../types").ModeConfig;
toggleMcpServer(serverName: string, enabled: boolean): Promise<{
success: boolean;
message: string;
}>;
getMcpServerStatus(): {
name: string;
enabled: boolean;
connected: boolean;
type: string;
}[];
private getDirectoryStructure;
private getDirectoryContents;
private executeCommandDirectly;
private updateSystemPrompt;
private getRagIndexStatus;
private generateMcpSection;
}