@juspay/neurolink
Version:
Universal AI Development Platform with working MCP integration, multi-provider support, and professional CLI. Built-in tools operational, 58+ external MCP servers discoverable. Connect to filesystem, GitHub, database operations, and more. Build, test, and
37 lines (36 loc) • 1.2 kB
TypeScript
import { type SpawnSyncReturns, type SpawnSyncOptions } from "child_process";
type AllowedCommand = "ollama" | "curl" | "systemctl" | "pkill" | "killall" | "open" | "taskkill" | "start";
/**
* Shared Ollama utilities for CLI commands
*/
export declare class OllamaUtils {
/**
* Secure wrapper around spawnSync to prevent command injection.
*/
static safeSpawn(command: AllowedCommand, args: string[], options?: SpawnSyncOptions): SpawnSyncReturns<string>;
/**
* Check if Ollama command line is available
*/
private static isOllamaCommandReady;
/**
* Validate HTTP API response from Ollama
*/
private static validateApiResponse;
/**
* Check if Ollama HTTP API is ready
*/
private static isOllamaApiReady;
/**
* Wait for Ollama service to become ready with exponential backoff
*/
static waitForOllamaReady(maxAttempts?: number, initialDelay?: number): Promise<boolean>;
/**
* Check if Ollama service is already running
*/
static isOllamaRunning(): boolean;
/**
* Unified Ollama start logic that works across platforms
*/
static startOllamaService(): Promise<void>;
}
export {};