UNPKG

autollama

Version:

Modern JavaScript-first RAG framework with contextual embeddings, professional CLI, and one-command deployment

618 lines (521 loc) โ€ข 20.9 kB
#!/usr/bin/env node /** * AutoLlama v3.0 - NPX Installer * ๐Ÿฆ™ The cuddliest context-aware RAG framework * Usage: npx create-autollama [project-name] */ const fs = require('fs-extra'); const path = require('path'); const inquirer = require('inquirer'); const chalk = require('chalk'); const ora = require('ora'); const { execSync, spawn } = require('child_process'); // Llama ASCII Art Collection const LLAMA_ART = { welcome: ` ใ€€ โˆฉ___โˆฉ ใ€€ |ใƒŽใ€€ใ€€ใ€€ใƒฝ ใ€€/ใ€€ใ€€โ—ใ€€ใ€€โ— | ${chalk.cyan.bold('AutoLlama v3.0')} ใ€€|ใ€€ใ€€( _โ—_)ใ€€ใƒŸ ${chalk.gray('Your Context-Aware RAG Companion')} ใ€€ๅฝก๏ฝคใ€€ใ€€|โˆช|ใ€€๏ฝค\` /ใ€€ใ€€ใ€€ ใƒฝใƒŽใ€€/ยด ${chalk.yellow('The fluffiest way to build RAG apps!')} (ใ€€๏ผผ๏ผฟ๏ผฟ๏ผฟ,ใ€€/ ใ€€๏ผผ๏ผฟ,)ใ€€ใ€€/ ใ€€ใ€€ใ€€ใ€€ใ€€ /`, happy: ` ใ€€ โˆฉ___โˆฉ ใ€€ |ใƒŽใ€€ใ€€ใ€€ใƒฝ ใ€€/ใ€€ใ€€โ—•ใ€€ใ€€โ—• | ใ€€|ใ€€ใ€€( โ—ก )ใ€€ใƒŸ ใ€€ๅฝก๏ฝคใ€€ใ€€|โˆช|ใ€€๏ฝค\` /ใ€€ใ€€ใ€€ ใƒฝใƒŽใ€€/ยด`, thinking: ` ใ€€ โˆฉ___โˆฉ ใ€€ |ใƒŽใ€€ใ€€ใ€€ใƒฝ ใ€€/ใ€€ใ€€โ€•ใ€€ใ€€โ€• | ใ€€|ใ€€ใ€€( _โ—_)ใ€€๐Ÿ’ญ ใ€€ๅฝก๏ฝคใ€€ใ€€|โˆช|ใ€€๏ฝค\` /ใ€€ใ€€ใ€€ ใƒฝใƒŽใ€€/ยด`, mini: '๐Ÿฆ™' }; // Llama-themed messages const LLAMA_MESSAGES = { dependencies: [ "๐Ÿฆ™ Gathering the finest npm packages from the digital Andes...", "๐ŸŒฟ Your llama is carefully selecting each dependency...", "๐Ÿ“ฆ Packing supplies for your RAG adventure..." ], database: [ "๐Ÿ”๏ธ Setting up a cozy database pasture...", "๐Ÿ’พ Teaching your llama to remember everything...", "๐Ÿ—„๏ธ Organizing the document grazing grounds..." ], configuration: [ "โœจ Sprinkling some llama magic on your config...", "๐ŸŽจ Customizing your llama's outfit...", "๐Ÿ”ง Adjusting the saddle for perfect fit..." ], completion: [ "๐ŸŽ‰ Your llama herd is ready for action!", "โœจ Setup complete! Time to start your RAG adventure!", "๐Ÿฆ™ *Happy llama humming* Everything is configured!" ] }; // Llama facts to display during long operations const LLAMA_FACTS = [ "๐Ÿฆ™ Fun fact: Llamas hum when they're happy or curious!", "๐Ÿฆ™ Did you know? Llamas are excellent guards for sheep and goats!", "๐Ÿฆ™ Llama wisdom: They can carry 25-30% of their body weight!", "๐Ÿฆ™ Cool fact: Llamas have excellent memories and can learn tasks quickly!", "๐Ÿฆ™ Trivia: Baby llamas are called 'crias' - how cute is that?", "๐Ÿฆ™ Fun fact: Llamas communicate through ear positions and tail movements!", "๐Ÿฆ™ Did you know? Llamas rarely spit at humans - only at other llamas!", "๐Ÿฆ™ Llama fact: They're environmentally friendly with soft, padded feet!" ]; class AutoLlamaInstaller { constructor() { this.projectName = process.argv[2]; this.targetDir = ''; this.config = {}; this.startTime = Date.now(); this.currentFactIndex = 0; } async install() { // Clear console and show welcome console.clear(); this.showWelcome(); try { // Step 1: Get project name with llama flair await this.getProjectName(); // Step 2: Configuration wizard await this.configurationWizard(); // Step 3: Environment detection await this.detectEnvironment(); // Step 4: Install dependencies await this.installDependencies(); // Step 5: Initialize database await this.initializeDatabase(); // Step 6: Final setup await this.finalizeSetup(); // Success celebration! this.celebrateSuccess(); } catch (error) { this.handleError(error); } } showWelcome() { console.log(LLAMA_ART.welcome); console.log(); // Animated typing effect for tagline const tagline = "๐Ÿฆ™ Welcome to the fluffiest RAG framework installation!"; console.log(chalk.cyan(tagline)); console.log(); } async getProjectName() { if (!this.projectName) { const { projectName } = await inquirer.prompt([{ type: 'input', name: 'projectName', message: `${LLAMA_ART.mini} What shall we name your llama project?`, default: 'my-fluffy-rag', validate: (input) => { if (/^[a-z0-9-]+$/.test(input)) return true; return 'Please use lowercase letters, numbers, and hyphens only'; } }]); this.projectName = projectName; } this.targetDir = path.join(process.cwd(), this.projectName); // Check if directory exists if (await fs.pathExists(this.targetDir)) { console.log(chalk.yellow(`\n${LLAMA_ART.mini} Hmm, there's already a ${this.projectName} pasture here...`)); const { overwrite } = await inquirer.prompt([{ type: 'confirm', name: 'overwrite', message: 'Should we clear it for a fresh start?', default: false }]); if (!overwrite) { console.log(chalk.cyan(`\n${LLAMA_ART.mini} No worries! Try again with a different name.`)); console.log(chalk.gray('Your llama will wait patiently for you! ๐Ÿ’™')); process.exit(0); } await fs.remove(this.targetDir); } await fs.ensureDir(this.targetDir); console.log(chalk.green(`โœ“ Created ${chalk.bold(this.projectName)} directory`)); } async configurationWizard() { console.log(chalk.cyan(`\n${LLAMA_ART.mini} Let's customize your llama setup!`)); console.log(chalk.gray('I\'ll ask a few questions to get everything just right.\n')); // First, choose template const TemplateManager = require('../lib/templates/manager'); const templateManager = new TemplateManager(); const template = await templateManager.selectTemplate(); this.config.template = template; // Run template-specific wizard if available if (template.wizardQuestions && template.wizardQuestions.length > 0) { console.log(chalk.cyan(`\n๐Ÿ”ง Customizing ${template.name}...`)); this.config.customization = await templateManager.runTemplateWizard(template); } // Standard questions const standardAnswers = await inquirer.prompt([ { type: 'list', name: 'aiProvider', message: '๐Ÿง  Which AI treats shall we feed your llama?', choices: [ { name: '๐ŸŒŸ OpenAI GPT (Recommended - Premium hay!)', value: 'openai' }, { name: '๐ŸŽญ Anthropic Claude (Gourmet grass!)', value: 'anthropic' }, { name: '๐ŸŒพ I\'ll bring my own treats later', value: 'later' } ], default: 'openai' }, { type: 'password', name: 'apiKey', message: '๐Ÿ”‘ Your API key (or press Enter to add it later):', when: (answers) => answers.aiProvider !== 'later', mask: '๐Ÿฆ™', validate: (input) => { if (!input) return true; // Allow empty if (input.length < 20) return 'That seems a bit short for an API key...'; return true; } }, { type: 'confirm', name: 'installSampleData', message: '๐Ÿ“š Would you like some sample documents to play with?', default: true }, { type: 'list', name: 'llamaPersonality', message: '๐ŸŽจ Choose your llama\'s personality:', choices: [ { name: '๐ŸŽฉ Professional Llama (Serious business mode)', value: 'professional' }, { name: '๐ŸŒˆ Friendly Llama (Balanced and approachable)', value: 'friendly' }, { name: '๐ŸŽช Party Llama (Maximum fun and emojis!)', value: 'party' } ], default: template.configuration?.ui?.theme === 'professional' ? 'professional' : 'friendly' } ]); // Merge all answers this.config = { ...this.config, ...standardAnswers }; this.config.deploymentType = template.deploymentMode || 'local'; console.log(chalk.green(`\n${LLAMA_ART.mini} Excellent choices! Your llama approves!`)); } async detectEnvironment() { const spinner = ora({ text: 'Sniffing around your environment...', spinner: { interval: 80, frames: ['๐Ÿฆ™ ', ' ๐Ÿฆ™ ', ' ๐Ÿฆ™', ' ๐Ÿฆ™ ', '๐Ÿฆ™ '] } }).start(); const capabilities = { hasDocker: await this.checkDocker(), hasPostgres: await this.checkPostgreSQL(), hasGit: await this.checkGit(), nodeVersion: process.version }; this.config.capabilities = capabilities; await this.sleep(1500); // Give time to see the animation const capList = []; capList.push(`Node ${capabilities.nodeVersion}`); if (capabilities.hasDocker) capList.push('Docker'); if (capabilities.hasPostgres) capList.push('PostgreSQL'); if (capabilities.hasGit) capList.push('Git'); spinner.succeed(`Found: ${capList.join(', ')}`); // Provide friendly feedback based on what's found if (this.config.deploymentType === 'docker' && !capabilities.hasDocker) { console.log(chalk.yellow(`\n${LLAMA_ART.mini} Heads up! Docker isn't installed.`)); console.log(chalk.gray('No worries - I\'ll adjust the setup for you!')); this.config.deploymentType = 'local'; } } async installDependencies() { console.log(chalk.cyan(`\n${LLAMA_ART.mini} Time to gather supplies!`)); const spinner = ora({ text: this.getRandomMessage('dependencies'), spinner: { interval: 100, frames: ['๐ŸŒฟ', '๐ŸŒพ', '๐ŸŒฑ', '๐Ÿƒ'] } }).start(); // Show llama facts during installation const factInterval = setInterval(() => { spinner.text = this.getNextLlamaFact(); }, 3000); try { // Apply template to project const TemplateManager = require('../lib/templates/manager'); const templateManager = new TemplateManager(); spinner.text = '๐ŸŽจ Applying template configuration...'; await templateManager.applyTemplate( this.config.template, this.targetDir, this.projectName, this.config.customization ); // Install npm dependencies process.chdir(this.targetDir); spinner.text = '๐Ÿ“ฆ Installing npm packages (this might take a minute)...'; execSync('npm install', { stdio: 'pipe' }); clearInterval(factInterval); spinner.succeed('All dependencies installed! Your llama is well-equipped!'); } catch (error) { clearInterval(factInterval); spinner.fail('Had trouble gathering dependencies'); throw error; } } async generateEnvironmentConfig() { const envContent = this.generateEnvContent(); await fs.writeFile(path.join(this.targetDir, '.env'), envContent); // Create personality-based configuration const config = { deployment: this.config.deploymentType, personality: this.config.llamaPersonality, welcomeMessage: this.getPersonalityWelcome(), database: this.config.deploymentType === 'local' ? { type: 'sqlite', path: './data/autollama.db' } : { type: 'postgresql', url: 'postgresql://autollama:autollama@localhost:5432/autollama' } }; await fs.writeFile( path.join(this.targetDir, 'autollama.config.js'), `// ๐Ÿฆ™ AutoLlama Configuration\nmodule.exports = ${JSON.stringify(config, null, 2)};` ); } generateEnvContent() { const personality = { professional: '# AutoLlama v3.0 Configuration', friendly: '# ๐Ÿฆ™ AutoLlama v3.0 Configuration - Happy grazing!', party: '# ๐Ÿฆ™๐ŸŽ‰ AutoLlama v3.0 - Let\'s party! ๐ŸŽŠ๐Ÿฆ™' }; const lines = [ personality[this.config.llamaPersonality], '# Generated by your friendly setup llama', `# Created: ${new Date().toLocaleDateString()}`, '', '# Deployment Configuration', `DEPLOYMENT_MODE=${this.config.deploymentType}`, `LLAMA_PERSONALITY=${this.config.llamaPersonality}`, '', '# AI Configuration' ]; if (this.config.aiProvider === 'openai') { lines.push(`OPENAI_API_KEY=${this.config.apiKey || 'your_openai_api_key_here'}`); lines.push('AI_PROVIDER=openai'); lines.push('# ๐Ÿฆ™ Tip: Get your key at https://platform.openai.com/api-keys'); } else if (this.config.aiProvider === 'anthropic') { lines.push(`ANTHROPIC_API_KEY=${this.config.apiKey || 'your_anthropic_api_key_here'}`); lines.push('AI_PROVIDER=anthropic'); lines.push('# ๐Ÿฆ™ Tip: Get your key at https://console.anthropic.com/'); } if (this.config.deploymentType === 'local') { lines.push('', '# Local Development Database'); lines.push('DATABASE_TYPE=sqlite'); lines.push('DATABASE_PATH=./data/autollama.db'); lines.push('# ๐Ÿฆ™ Your data lives in a cozy local pasture!'); } else { lines.push('', '# PostgreSQL Configuration'); lines.push('DATABASE_URL=postgresql://autollama:autollama@localhost:5432/autollama'); lines.push('# ๐Ÿฆ™ Professional-grade database for serious llamas'); } lines.push('', '# Vector Database'); lines.push('QDRANT_URL=http://localhost:6333'); if (this.config.deploymentType === 'local') { lines.push('# ๐Ÿฆ™ Qdrant will auto-start in development mode'); } lines.push('', '# RAG Configuration'); lines.push('ENABLE_CONTEXTUAL_EMBEDDINGS=true'); lines.push('CONTEXT_GENERATION_BATCH_SIZE=5'); lines.push('# ๐Ÿฆ™ 60% better accuracy with Anthropic\'s contextual retrieval!'); if (this.config.llamaPersonality === 'party') { lines.push('', '# ๐ŸŽ‰ Party Mode Settings'); lines.push('ENABLE_CONFETTI=true'); lines.push('MAX_EMOJI_LEVEL=11'); } return lines.join('\n'); } async initializeDatabase() { const spinner = ora({ text: this.getRandomMessage('database'), spinner: { interval: 120, frames: ['๐Ÿ”๏ธ ', 'โ›ฐ๏ธ ', '๐Ÿ—ป ', '๐Ÿ”๏ธ '] } }).start(); try { // Use the auto-setup system const { AutoSetup } = require('../lib/startup/auto-setup'); const autoSetup = new AutoSetup({ projectRoot: this.targetDir, skipPrompts: true }); spinner.text = '๐Ÿ’พ Setting up your database pasture...'; const result = await autoSetup.runAutoSetup(); if (result.setup) { spinner.succeed('Database initialized! Your llama has a great memory now!'); if (this.config.installSampleData) { console.log(chalk.gray(` ๐Ÿ“š Sample documents added for testing`)); } } else { spinner.succeed('Database setup completed'); } } catch (error) { spinner.warn('Database setup will complete on first run'); // Don't fail installation if database setup fails } } async finalizeSetup() { const spinner = ora({ text: 'Adding finishing touches...', spinner: { interval: 80, frames: ['โœจ', '๐Ÿ’ซ', 'โญ', '๐ŸŒŸ'] } }).start(); // Create necessary directories await fs.ensureDir(path.join(this.targetDir, 'data')); await fs.ensureDir(path.join(this.targetDir, 'uploads')); await fs.ensureDir(path.join(this.targetDir, 'logs')); // Set executable permissions const binFiles = ['bin/create-autollama.js']; for (const binFile of binFiles) { const binPath = path.join(this.targetDir, binFile); if (await fs.pathExists(binPath)) { await fs.chmod(binPath, '755'); } } await this.sleep(1500); spinner.succeed('Everything is perfect!'); } celebrateSuccess() { const duration = Math.round((Date.now() - this.startTime) / 1000); console.log('\n' + chalk.green('โ•'.repeat(50))); console.log(LLAMA_ART.happy); console.log(chalk.green.bold('\n ๐ŸŽ‰ Installation Complete! ๐ŸŽ‰')); console.log(chalk.gray(` Setup time: ${duration} seconds`)); console.log(chalk.green('โ•'.repeat(50))); console.log(chalk.cyan('\n๐Ÿ“š Next Steps:')); console.log(chalk.white(` cd ${chalk.bold(this.projectName)}`)); // Use template-specific start command const startCommand = this.config.template?.scripts?.dev || (this.config.deploymentType === 'docker' ? 'npm run docker:up' : 'npm run dev'); console.log(chalk.white(` ${startCommand}`)); console.log(chalk.cyan('\n๐ŸŒ Your llama will be grazing at:')); console.log(chalk.white(' โ€ข Application: http://localhost:8080')); console.log(chalk.white(' โ€ข API Docs: http://localhost:8080/api/docs')); console.log(chalk.white(' โ€ข Health: http://localhost:8080/api/health')); // Template-specific information if (this.config.template) { console.log(chalk.cyan(`\n๐ŸŽฏ ${this.config.template.name} Features:`)); this.config.template.features.slice(0, 3).forEach(feature => { console.log(chalk.gray(` โ€ข ${feature}`)); }); if (this.config.template.features.length > 3) { console.log(chalk.gray(` โ€ข ...and ${this.config.template.features.length - 3} more!`)); } } if (!this.config.apiKey && this.config.aiProvider !== 'later') { console.log(chalk.yellow(`\nโš ๏ธ Don't forget to add your ${this.config.aiProvider.toUpperCase()} API key in .env`)); } // Personality-based farewell const farewells = { professional: '\n๐Ÿฆ™ Your AutoLlama instance is ready for deployment.', friendly: '\n๐Ÿฆ™ Happy coding! Your llama is excited to help you build amazing things!', party: '\n๐Ÿฆ™๐ŸŽ‰ LET\'S GOOO! Time to build the most awesome RAG app ever! ๐Ÿš€๐ŸŽŠ' }; console.log(chalk.bold.cyan(farewells[this.config.llamaPersonality])); console.log(chalk.gray('\nNeed help? Visit https://github.com/autollama/autollama')); console.log(); } handleError(error) { console.log(chalk.red(`\n${LLAMA_ART.mini}๐Ÿ’” Oh no! This llama encountered a rocky path...`)); console.log(chalk.yellow('Even the fluffiest llamas stumble sometimes!\n')); console.log(chalk.red('Error details:')); console.log(chalk.gray(` ${error.message}`)); if (error.code === 'EACCES') { console.log(chalk.cyan('\n๐Ÿ’ก Llama tip: Try running with sudo or check your permissions!')); } else if (error.message.includes('npm')) { console.log(chalk.cyan('\n๐Ÿ’ก Llama tip: Make sure npm is installed and up to date!')); } else if (error.message.includes('network')) { console.log(chalk.cyan('\n๐Ÿ’ก Llama tip: Check your internet connection!')); } console.log(chalk.gray('\n๐Ÿ“š For more help, visit: https://github.com/autollama/autollama/issues')); console.log(chalk.gray('๐Ÿฆ™ Your llama believes in you! Try again!')); process.exit(1); } // Utility functions getRandomMessage(category) { const messages = LLAMA_MESSAGES[category]; return messages[Math.floor(Math.random() * messages.length)]; } getNextLlamaFact() { const fact = LLAMA_FACTS[this.currentFactIndex % LLAMA_FACTS.length]; this.currentFactIndex++; return fact; } getPersonalityWelcome() { const welcomes = { professional: 'Welcome to AutoLlama. Enterprise-grade RAG framework.', friendly: '๐Ÿฆ™ Welcome! Your friendly RAG companion is ready to help!', party: '๐Ÿฆ™๐ŸŽ‰ WOOHOO! Welcome to the RAG party! Let\'s build something AMAZING! ๐Ÿš€' }; return welcomes[this.config.llamaPersonality]; } async sleep(ms) { return new Promise(resolve => setTimeout(resolve, ms)); } // Environment detection async checkDocker() { try { execSync('docker --version', { stdio: 'ignore' }); execSync('docker compose version', { stdio: 'ignore' }); return true; } catch { return false; } } async checkPostgreSQL() { try { execSync('psql --version', { stdio: 'ignore' }); return true; } catch { return false; } } async checkGit() { try { execSync('git --version', { stdio: 'ignore' }); return true; } catch { return false; } } } // Handle Ctrl+C gracefully process.on('SIGINT', () => { console.log(chalk.yellow(`\n\n${LLAMA_ART.mini} Installation cancelled!`)); console.log(chalk.gray('Your llama will be here when you\'re ready to try again! ๐Ÿ’™')); process.exit(0); }); // Handle unhandled errors process.on('unhandledRejection', (error) => { console.error(chalk.red(`\n${LLAMA_ART.mini} Unexpected error:`), error.message); console.log(chalk.gray('Please report this at: https://github.com/autollama/autollama/issues')); process.exit(1); }); // Main execution if (require.main === module) { const installer = new AutoLlamaInstaller(); installer.install(); } module.exports = AutoLlamaInstaller;