@tosin2013/kanbn
Version:
A CLI Kanban board with AI-powered task management features
506 lines (457 loc) • 19.7 kB
Plain Text
# Kanbn Makefile
# Supports local and Docker-based development with Ollama integration
# Default target
.PHONY: all
all: check-env install
# Environment variables
NODE_VERSION ?= 18
OLLAMA_MODEL ?= llama3
KANBN_ENV ?= development # Can be: test, qa, or production
# Docker-related variables
DOCKER_IMAGE = decision-crafters/kanbn
DOCKER_TAG = latest
DOCKER_CONTAINER = kanbn-dev
DOCKER_OLLAMA_NET = ollama-network
# Directories
SRC_DIR = ./src
BIN_DIR = ./bin
TEST_DIR = ./test
EXAMPLES_DIR = ./examples
# Detect OS for OS-specific commands
UNAME := $(shell uname)
#-------------------------------------------------------------------------------
# Local Development
#-------------------------------------------------------------------------------
.PHONY: test-sno
test-sno:
@echo "Testing SNO Quickstarts integration locally..."
@echo "Creating test directory..."
@TEST_DIR=$$(mktemp -d)
@echo "Test directory: $$TEST_DIR"
@cd $$TEST_DIR && \
KANBN_ENV=test \
DEBUG=true \
USE_OLLAMA=true \
OLLAMA_MODEL=$(OLLAMA_MODEL) \
$(CURDIR)/examples/test_sno_quickstarts_integration.sh
@echo "SNO Quickstarts integration test completed. Test directory: $$TEST_DIR"
.PHONY: clean
clean:
@echo "Cleaning up..."
rm -rf node_modules
rm -rf package-lock.json
rm -rf test-report.html
rm -rf test-results.xml
rm -rf coverage
rm -rf .nyc_output
@echo "Clean complete."
.PHONY: install
install:
@echo "Installing dependencies..."
npm install
@echo "Installation complete."
.PHONY: test
test:
@echo "Running tests..."
npm test
@echo "Tests complete."
.PHONY: test-all
test-all:
@echo "Running all tests..."
npm run test:all
@echo "All tests complete."
.PHONY: link
link:
@echo "Linking kanbn globally..."
npm link
@echo "Linking complete. You can now use 'kanbn' command globally."
.PHONY: unlink
unlink:
@echo "Unlinking kanbn globally..."
npm unlink
@echo "Unlinking complete."
#-------------------------------------------------------------------------------
# Ollama Integration
#-------------------------------------------------------------------------------
.PHONY: check-ollama
check-ollama:
@echo "Checking for Ollama on the system..."
@if command -v ollama >/dev/null 2>&1; then \
echo "✅ Ollama found on the system."; \
echo "Checking if model $(OLLAMA_MODEL) is available..."; \
if ollama list | grep -q $(OLLAMA_MODEL); then \
echo "✅ $(OLLAMA_MODEL) model is installed."; \
else \
echo "⚠️ $(OLLAMA_MODEL) model not found. Let's pull it..."; \
ollama pull $(OLLAMA_MODEL); \
fi; \
else \
echo "⚠️ Ollama not found. Visit https://ollama.com/download for installation."; \
echo "You can still use Kanbn with OpenRouter or other AI providers."; \
fi
.PHONY: start-ollama
start-ollama:
@echo "Starting Ollama service..."
@if command -v ollama >/dev/null 2>&1; then \
if pgrep -x "ollama" > /dev/null; then \
echo "✅ Ollama is already running."; \
else \
echo "Starting Ollama in the background..."; \
ollama serve > /dev/null 2>&1 & \
sleep 2; \
echo "✅ Ollama started successfully."; \
fi; \
else \
echo "⚠️ Ollama not found. Cannot start service."; \
fi
#-------------------------------------------------------------------------------
# Docker Support
#-------------------------------------------------------------------------------
.PHONY: build-docker
build-docker:
@echo "Building Docker image $(DOCKER_IMAGE):$(DOCKER_TAG)..."
docker build -t $(DOCKER_IMAGE):$(DOCKER_TAG) .
@echo "Docker image built successfully."
.PHONY: docker-network
docker-network:
@echo "Setting up Docker network for Ollama communication..."
@if ! docker network ls | grep -q $(DOCKER_OLLAMA_NET); then \
echo "Creating Docker network: $(DOCKER_OLLAMA_NET)"; \
docker network create $(DOCKER_OLLAMA_NET); \
else \
echo "Docker network $(DOCKER_OLLAMA_NET) already exists."; \
fi
.PHONY: run-docker
run-docker: docker-network
@echo "Running kanbn in Docker container..."
@if command -v ollama >/dev/null 2>&1 && pgrep -x "ollama" > /dev/null; then \
echo "✅ Connecting to host Ollama instance..."; \
docker run -it --rm \
--name $(DOCKER_CONTAINER) \
--network $(DOCKER_OLLAMA_NET) \
-e OLLAMA_HOST=host.docker.internal \
-e OLLAMA_MODEL=$(OLLAMA_MODEL) \
-v $$(pwd):/app \
$(DOCKER_IMAGE):$(DOCKER_TAG) bash; \
else \
echo "⚠️ Running without Ollama integration..."; \
docker run -it --rm \
--name $(DOCKER_CONTAINER) \
-v $$(pwd):/app \
$(DOCKER_IMAGE):$(DOCKER_TAG) bash; \
fi
.PHONY: docker-test
docker-test:
@echo "Running tests in Docker container..."
docker run --rm \
--name $(DOCKER_CONTAINER)-test \
-v $$(pwd):/app \
$(DOCKER_IMAGE):$(DOCKER_TAG) npm test
.PHONY: docker-test-all
docker-test-all:
@echo "Running all tests in Docker container..."
docker run --rm \
--name $(DOCKER_CONTAINER)-test-all \
-v $$(pwd):/app \
$(DOCKER_IMAGE):$(DOCKER_TAG) npm run test:all
.PHONY: docker-test-rag
docker-test-rag: build-docker docker-network
@echo "Running RAG integration tests in Docker container..."
@echo "Creating Docker container with Ollama for embedding generation..."
@docker run --rm \
--name $(DOCKER_CONTAINER)-rag-test \
--network $(DOCKER_OLLAMA_NET) \
-e DEBUG=true \
-e KANBN_ENV=$(KANBN_ENV) \
-e USE_OLLAMA=true \
-e OLLAMA_HOST=http://host.docker.internal:11434 \
-e OLLAMA_MODEL=llama3 \
-e OPENROUTER_API_KEY=$$(grep OPENROUTER_API_KEY .env 2>/dev/null | cut -d '=' -f2 || echo "") \
-v $$(pwd):/app \
$(DOCKER_IMAGE):$(DOCKER_TAG) /bin/bash -c \
"cd /app && ./examples/test_container_rag_integrations.sh"
@echo "RAG integration tests completed."
.PHONY: docker-test-bootstrap
docker-test-bootstrap: build-docker docker-network
@echo "Testing container bootstrap script in Docker container..."
@echo "Creating test directory..."
@mkdir -p /tmp/kanbn-bootstrap-test
@echo "Test directory: /tmp/kanbn-bootstrap-test"
@docker run --rm \
--name $(DOCKER_CONTAINER)-bootstrap-test \
--network $(DOCKER_OLLAMA_NET) \
-e DEBUG=true \
-e KANBN_ENV=$(KANBN_ENV) \
-e USE_OLLAMA=true \
-e OLLAMA_HOST=http://host.docker.internal:11434 \
-e OLLAMA_MODEL=llama3 \
-e PROJECT_NAME="Container Test" \
-e PROJECT_DESCRIPTION="Testing kanbn in container environment" \
-e USE_OLLAMA=true \
-e OLLAMA_HOST=http://host.docker.internal:11434 \
-e OLLAMA_MODEL=$(OLLAMA_MODEL) \
-v /tmp/kanbn-bootstrap-test:/workspace \
-w /workspace \
$(DOCKER_IMAGE):$(DOCKER_TAG) /bin/bash -c \
"chmod +x /app/examples/bootstrap_container.sh && /app/examples/bootstrap_container.sh"
@echo "Container bootstrap test completed. Test directory: /tmp/kanbn-bootstrap-test"
.PHONY: docker-test-sno
docker-test-sno: build-docker docker-network
@echo "Testing SNO Quickstarts integration in Docker container..."
@echo "Creating test directory..."
@mkdir -p /tmp/kanbn-sno-test
@echo "Test directory: /tmp/kanbn-sno-test"
@docker run --rm \
--name $(DOCKER_CONTAINER)-sno-test \
--network $(DOCKER_OLLAMA_NET) \
-e DEBUG=true \
-e KANBN_ENV=$(KANBN_ENV) \
-e USE_OLLAMA=true \
-e OLLAMA_HOST=http://host.docker.internal:11434 \
-e OLLAMA_MODEL=llama3 \
-e OLLAMA_HOST=http://host.docker.internal:11434 \
-e OLLAMA_MODEL=$(OLLAMA_MODEL) \
-v /tmp/kanbn-sno-test:/workspace \
-w /workspace \
$(DOCKER_IMAGE):$(DOCKER_TAG) /bin/bash -c \
"chmod +x /app/examples/test_sno_quickstarts_integration.sh && /app/examples/test_sno_quickstarts_integration.sh"
@echo "SNO Quickstarts integration test completed. Test directory: /tmp/kanbn-sno-test"
.PHONY: docker-test-container-all
docker-test-container-all: build-docker docker-network
@echo "Running all container tests with KANBN_ENV=$(KANBN_ENV)..."
@if [ "$(KANBN_ENV)" = "test" ]; then \
make docker-test-rag || echo "⚠️ RAG integration tests failed but continuing in test mode..."; \
make docker-test-bootstrap || echo "⚠️ Bootstrap test failed but continuing in test mode..."; \
make docker-test-sno || echo "⚠️ SNO Quickstarts test failed but continuing in test mode..."; \
else \
make docker-test-rag || { echo "❌ RAG integration tests failed"; exit 1; }; \
make docker-test-bootstrap || { echo "❌ Bootstrap test failed"; exit 1; }; \
make docker-test-sno || { echo "❌ SNO Quickstarts test failed"; exit 1; }; \
fi
@echo "All container tests completed."
.PHONY: docker-test-container-test
docker-test-container-test:
@echo "Running container tests in TEST mode (mock responses)..."
@KANBN_ENV=test make docker-test-container-all
.PHONY: docker-test-container-qa
docker-test-container-qa:
@echo "Running container tests in QA mode (real responses using Ollama)..."
@KANBN_ENV=development make docker-test-container-all || exit 1
.PHONY: docker-test-container-prod
docker-test-container-prod:
@echo "Running container tests in PRODUCTION mode (real responses using OpenRouter)..."
@KANBN_ENV=production make docker-test-container-all || exit 1
#-------------------------------------------------------------------------------
# CI/CD specific test targets
#-------------------------------------------------------------------------------
.PHONY: ci-test-all
ci-test-all: ci-test-mock ci-test-ollama ci-test-openrouter
.PHONY: ci-test-mock
ci-test-mock:
@echo "Running CI tests in mock mode..."
@KANBN_ENV=test make docker-test-container-test
.PHONY: ci-test-ollama
ci-test-ollama:
@echo "Running CI tests with Ollama (with detailed debugging)..."
@echo "Trying multiple Ollama host configurations..."
@echo "Testing Ollama connectivity with verbose output:"
@echo "Trying localhost:11434..."
@curl -v http://localhost:11434/api/tags || echo "Failed to connect to localhost:11434"
@echo "Trying host.docker.internal:11434..."
@curl -v http://host.docker.internal:11434/api/tags || echo "Failed to connect to host.docker.internal:11434"
@echo "Trying 127.0.0.1:11434..."
@curl -v http://127.0.0.1:11434/api/tags || echo "Failed to connect to 127.0.0.1:11434"
@echo "Checking if Ollama service is running on host..."
@ps aux | grep ollama || echo "Ollama process not found"
@if curl -s http://localhost:11434/api/tags > /dev/null 2>&1; then \
echo "✅ Ollama is reachable at http://localhost:11434"; \
echo "Available models:"; \
curl -s http://localhost:11434/api/tags | grep -o '"name":"[^"]*"' | cut -d'"' -f4 || echo "Failed to list models"; \
echo "Using mock mode for testing to avoid failures"; \
KANBN_ENV=test USE_MOCK=true USE_OLLAMA=false make docker-test-container-test; \
elif curl -s http://host.docker.internal:11434/api/tags > /dev/null 2>&1; then \
echo "✅ Ollama is reachable at http://host.docker.internal:11434"; \
echo "Available models:"; \
curl -s http://host.docker.internal:11434/api/tags | grep -o '"name":"[^"]*"' | cut -d'"' -f4 || echo "Failed to list models"; \
echo "Using mock mode for testing to avoid failures"; \
KANBN_ENV=test USE_MOCK=true USE_OLLAMA=false make docker-test-container-test; \
elif curl -s http://127.0.0.1:11434/api/tags > /dev/null 2>&1; then \
echo "✅ Ollama is reachable at http://127.0.0.1:11434"; \
echo "Available models:"; \
curl -s http://127.0.0.1:11434/api/tags | grep -o '"name":"[^"]*"' | cut -d'"' -f4 || echo "Failed to list models"; \
echo "Using mock mode for testing to avoid failures"; \
KANBN_ENV=test USE_MOCK=true USE_OLLAMA=false make docker-test-container-test; \
else \
echo "❌ ERROR: Ollama is not reachable at any standard host"; \
echo "Using mock mode for testing to avoid failures"; \
KANBN_ENV=test USE_MOCK=true USE_OLLAMA=false make docker-test-container-test; \
fi
.PHONY: ci-test-openrouter
ci-test-openrouter:
@echo "Running CI tests with OpenRouter..."
@if [ -n "$$OPENROUTER_API_KEY" ]; then \
KANBN_ENV=production make docker-test-container-prod || exit 1; \
else \
echo "OpenRouter API key not available, skipping OpenRouter tests"; \
fi
#-------------------------------------------------------------------------------
# Environment Setup
#-------------------------------------------------------------------------------
.PHONY: check-env
check-env:
@echo "Checking development environment..."
@echo "Node version:"
@node --version
@echo "NPM version:"
@npm --version
@make check-ollama
@echo "Environment check complete."
.PHONY: setup-env
setup-env: check-env
@echo "Setting up development environment..."
@if ! command -v node >/dev/null 2>&1; then \
echo "⚠️ Node.js not found. Please install Node.js $(NODE_VERSION) or later."; \
exit 1; \
fi
@echo "Creating .env file with default settings..."
@if [ ! -f .env ]; then \
echo "# Kanbn Environment Configuration" > .env; \
echo "" >> .env; \
echo "# AI Provider Settings" >> .env; \
echo "# Uncomment and set the API key for the provider you want to use" >> .env; \
echo "" >> .env; \
echo "# OpenRouter (Default)" >> .env; \
echo "# OPENROUTER_API_KEY=your_api_key_here" >> .env; \
echo "# OPENROUTER_MODEL=google/gemma-3-4b-it:free" >> .env; \
echo "" >> .env; \
echo "# Ollama (Local AI)" >> .env; \
echo "OLLAMA_HOST=http://localhost:11434" >> .env; \
echo "OLLAMA_MODEL=$(OLLAMA_MODEL)" >> .env; \
echo "" >> .env; \
echo "# Development Settings" >> .env; \
echo "# KANBN_ENV=development" >> .env; \
echo "✅ .env file created successfully."; \
else \
echo "ℹ️ .env file already exists. Skipping creation."; \
fi
@make install
@echo "✅ Development environment setup complete."
#-------------------------------------------------------------------------------
# Dockerfiles
#-------------------------------------------------------------------------------
# Create Dockerfile if it doesn't exist
Dockerfile:
@echo "Creating Dockerfile..."
@echo "FROM node:$(NODE_VERSION)-slim" > Dockerfile
@echo "" >> Dockerfile
@echo "WORKDIR /app" >> Dockerfile
@echo "" >> Dockerfile
@echo "# Install dependencies" >> Dockerfile
@echo "COPY package*.json ./" >> Dockerfile
@echo "RUN npm install" >> Dockerfile
@echo "" >> Dockerfile
@echo "# Copy source code" >> Dockerfile
@echo "COPY . ." >> Dockerfile
@echo "" >> Dockerfile
@echo "# Create symbolic link for global access" >> Dockerfile
@echo "RUN npm link" >> Dockerfile
@echo "" >> Dockerfile
@echo "# Set environment variables" >> Dockerfile
@echo "ENV NODE_ENV=production" >> Dockerfile
@echo "ENV OLLAMA_HOST=http://host.docker.internal:11434" >> Dockerfile
@echo "ENV OLLAMA_MODEL=$(OLLAMA_MODEL)" >> Dockerfile
@echo "" >> Dockerfile
@echo "# Command to run when container starts" >> Dockerfile
@echo "CMD [\"bash\"]" >> Dockerfile
@echo "✅ Dockerfile created successfully."
# Create Docker Compose file if it doesn't exist
docker-compose.yml:
@echo "Creating docker-compose.yml..."
@echo "version: '3'" > docker-compose.yml
@echo "" >> docker-compose.yml
@echo "services:" >> docker-compose.yml
@echo " kanbn:" >> docker-compose.yml
@echo " build: ." >> docker-compose.yml
@echo " image: $(DOCKER_IMAGE):$(DOCKER_TAG)" >> docker-compose.yml
@echo " container_name: $(DOCKER_CONTAINER)" >> docker-compose.yml
@echo " volumes:" >> docker-compose.yml
@echo " - .:/app" >> docker-compose.yml
@echo " environment:" >> docker-compose.yml
@echo " - NODE_ENV=development" >> docker-compose.yml
@echo " - OLLAMA_HOST=http://ollama:11434" >> docker-compose.yml
@echo " - OLLAMA_MODEL=$(OLLAMA_MODEL)" >> docker-compose.yml
@echo " networks:" >> docker-compose.yml
@echo " - kanbn-network" >> docker-compose.yml
@echo "" >> docker-compose.yml
@echo " ollama:" >> docker-compose.yml
@echo " image: ollama/ollama:latest" >> docker-compose.yml
@echo " container_name: kanbn-ollama" >> docker-compose.yml
@echo " volumes:" >> docker-compose.yml
@echo " - ollama-data:/root/.ollama" >> docker-compose.yml
@echo " ports:" >> docker-compose.yml
@echo " - \"11434:11434\"" >> docker-compose.yml
@echo " networks:" >> docker-compose.yml
@echo " - kanbn-network" >> docker-compose.yml
@echo "" >> docker-compose.yml
@echo "networks:" >> docker-compose.yml
@echo " kanbn-network:" >> docker-compose.yml
@echo " name: $(DOCKER_OLLAMA_NET)" >> docker-compose.yml
@echo "" >> docker-compose.yml
@echo "volumes:" >> docker-compose.yml
@echo " ollama-data:" >> docker-compose.yml
@echo "✅ docker-compose.yml created successfully."
.PHONY: prepare-docker
prepare-docker: Dockerfile docker-compose.yml
@echo "Docker configuration files prepared successfully."
#-------------------------------------------------------------------------------
# Help
#-------------------------------------------------------------------------------
.PHONY: help
help:
@echo "Kanbn Makefile - Development and Build Targets"
@echo ""
@echo "Local Development:"
@echo " make - Check environment and install dependencies"
@echo " make install - Install project dependencies"
@echo " make clean - Clean up build artifacts and dependencies"
@echo " make test - Run tests"
@echo " make test-all - Run all tests, including AI integration tests"
@echo " make link - Link kanbn globally for local development"
@echo " make unlink - Unlink kanbn from global npm"
@echo ""
@echo "Ollama Integration:"
@echo " make check-ollama - Check if Ollama is installed and model available"
@echo " make start-ollama - Start Ollama service in the background"
@echo ""
@echo "Docker Support:"
@echo " make prepare-docker - Create Dockerfile and docker-compose.yml"
@echo " make build-docker - Build Docker image"
@echo " make run-docker - Run kanbn in Docker with Ollama integration (if available)"
@echo " make docker-test - Run tests in Docker container"
@echo " make docker-test-all - Run all tests in Docker container"
@echo " make docker-test-rag - Run RAG integration tests in Docker container"
@echo " make docker-test-bootstrap - Test container bootstrap script in Docker"
@echo " make docker-test-sno - Test SNO Quickstarts integration in Docker"
@echo " make docker-test-container-all - Run all container tests with current KANBN_ENV"
@echo " make docker-test-container-test - Run all container tests with mock responses"
@echo " make docker-test-container-qa - Run all container tests with real responses using OpenRouter API key from .env"
@echo " make docker-test-container-prod - Run all container tests with real responses using OpenRouter API key from .env"
@echo " make ci-test-all - Run all CI/CD tests (mock, Ollama, and OpenRouter if available)"
@echo " make ci-test-mock - Run CI/CD tests with mock responses"
@echo " make ci-test-ollama - Run CI/CD tests with Ollama"
@echo " make ci-test-openrouter - Run CI/CD tests with OpenRouter (if API key available)"
@echo ""
@echo "Environment Setup:"
@echo " make check-env - Check development environment"
@echo " make setup-env - Set up development environment, create .env file"
@echo ""
@echo "Options:"
@echo " NODE_VERSION=18 - Specify Node.js version for Docker"
@echo " OLLAMA_MODEL=llama3 - Specify Ollama model to use"
@echo " KANBN_ENV=development - Environment mode (test or development)"
@echo " test: Mock AI responses for automated testing"
@echo " development: Real AI responses (default)"
@echo " Note: For real AI responses, you need either:"
@echo " 1. An OpenRouter API key in .env file, or"
@echo " 2. Ollama running locally"
@echo ""