UNPKG

scai

Version:

> **AI-powered CLI for local code analysis, commit message suggestions, and natural-language queries.** 100% local, private, GDPR-friendly, made in Denmark/EU with ❀️.

197 lines (196 loc) β€’ 6.96 kB
import { spawn, execSync } from 'child_process'; import * as readline from 'readline'; import * as fs from 'fs'; import * as path from 'path'; import chalk from 'chalk'; import { platform } from 'os'; import { getDbForRepo } from './db/client.js'; import { readConfig, writeConfig } from './config.js'; import { CONFIG_PATH } from './constants.js'; // Constants const MODEL_PORT = 11434; const REQUIRED_MODELS = ['qwen3-coder:30b']; const OLLAMA_URL = 'https://ollama.com/download'; const VSCODE_URL = 'https://code.visualstudio.com/download'; const isYesMode = process.argv.includes('--yes') || process.env.SCAI_YES === '1'; let ollamaChecked = false; let ollamaAvailable = false; // 🧠 Auto init config/db if missing export async function autoInitIfNeeded() { const cfg = readConfig(); if (!fs.existsSync(CONFIG_PATH)) { console.log(chalk.green('πŸ› οΈ Config not found. Initializing...')); writeConfig({}); } const activeRepo = cfg.activeRepo && cfg.repos[cfg.activeRepo]; if (activeRepo) { const dbPath = path.join(activeRepo.indexDir, 'scai.db'); if (!fs.existsSync(dbPath)) { console.log(chalk.green('πŸ“¦ DB not found. Initializing...')); getDbForRepo(); } } } // πŸ—¨ Prompt user with configurable timeout function promptUser(question, timeout = 20000) { if (isYesMode) return Promise.resolve('y'); const rl = readline.createInterface({ input: process.stdin, output: process.stdout }); return new Promise((resolve) => { const timer = setTimeout(() => { rl.close(); resolve(''); }, timeout); rl.question(question, (answer) => { clearTimeout(timer); rl.close(); resolve(answer.trim()); }); }); } // 🧭 Cross-platform browser opener function openBrowser(url) { const command = platform() === 'win32' ? `start ${url}` : platform() === 'darwin' ? `open ${url}` : `xdg-open ${url}`; try { execSync(command, { stdio: 'ignore' }); } catch { console.log(chalk.yellow('πŸ”— Please manually open:'), url); } } // 🌐 Check if Ollama is running async function isOllamaRunning() { try { const res = await fetch(`http://localhost:${MODEL_PORT}`); return res.ok; } catch { return false; } } // πŸš€ Ensure Ollama server is running async function ensureOllamaRunning() { if (ollamaChecked) return; ollamaChecked = true; if (await isOllamaRunning()) { console.log(chalk.green('βœ… Ollama is already running.')); ollamaAvailable = true; return; } console.log(chalk.yellow('βš™οΈ Ollama is not running. Attempting to start it...')); try { const child = spawn('ollama', ['serve'], { detached: true, stdio: 'ignore', windowsHide: true, }); child.unref(); await new Promise((res) => setTimeout(res, 10000)); if (await isOllamaRunning()) { console.log(chalk.green('βœ… Ollama started successfully.')); ollamaAvailable = true; return; } } catch (err) { if (err.code !== 'ENOENT') { console.log(chalk.red('❌ Unexpected error starting Ollama.')); process.exit(1); } } console.log(chalk.red('❌ Ollama is not installed or not in PATH.')); console.log(chalk.yellow(`πŸ“¦ Ollama is required to run local AI models.`)); const answer = await promptUser(`🌐 Recommended model: ${REQUIRED_MODELS.join(', ')}\nOpen download page in browser? (y/N): `); if (answer.toLowerCase() === 'y') openBrowser(OLLAMA_URL); await promptUser('⏳ Press Enter once Ollama is installed or to continue without it: '); if (await isOllamaRunning()) { console.log(chalk.green('βœ… Ollama detected. Continuing...')); ollamaAvailable = true; } else { console.log(chalk.yellow('⚠️ Ollama not running. Models will not be available until installed.')); ollamaAvailable = false; } } // 🧰 List installed models async function getInstalledModels() { try { const result = execSync('ollama list', { encoding: 'utf-8' }); return result .split('\n') .map((line) => line.split(/\s+/)[0].split(':')[0]) .filter((model) => REQUIRED_MODELS.includes(model)); } catch { return []; } } // πŸ“₯ Suggest required models but don’t block async function ensureModelsDownloaded() { if (!ollamaAvailable) return; const installed = await getInstalledModels(); const missing = REQUIRED_MODELS.filter((m) => !installed.includes(m)); if (!missing.length) { console.log(chalk.green('βœ… All required models are installed.')); return; } console.log(chalk.yellow(`πŸ“¦ Suggested models: ${missing.join(', ')}`)); const answer = await promptUser('⬇️ Download them now? (y/N, continue anyway): '); if (answer.toLowerCase() !== 'y') { console.log(chalk.yellow('⚠️ Continuing without installing models. You can install later via config.')); return; } for (const model of missing) { try { console.log(`⬇️ Pulling ${model}...`); execSync(`ollama pull ${model}`, { stdio: 'inherit' }); console.log(chalk.green(`βœ… Pulled ${model}`)); } catch { console.log(chalk.red(`❌ Failed to pull ${model}, continuing...`)); } } } // 🌟 Check if VSCode CLI is available async function isVSCodeAvailable() { try { execSync('code --version', { stdio: 'ignore' }); return true; } catch { return false; } } // ⚑ Ensure VSCode CLI is installed async function ensureVSCodeInstalled() { if (await isVSCodeAvailable()) { console.log(chalk.green('βœ… VSCode CLI is available.')); return; } console.log(chalk.red('❌ VSCode CLI not found.')); const answer = await promptUser('Do you want to open the VSCode download page? (y/N): '); if (answer.toLowerCase() === 'y') openBrowser(VSCODE_URL); await promptUser('VSCode CLI was not found. If you want to use VSCode features, please install it manually. ' + 'Once installed, press Enter to continue. If you prefer to skip VSCode, just press Enter to continue without it: '); if (await isVSCodeAvailable()) { console.log(chalk.green('βœ… VSCode CLI detected. Continuing...')); } else { console.log(chalk.yellow('⚠️ VSCode CLI still not found. Some features may be disabled.')); } } // 🏁 Main bootstrap logic export async function bootstrap() { await autoInitIfNeeded(); await ensureOllamaRunning(); await ensureModelsDownloaded(); await ensureVSCodeInstalled(); }