pame-core-cli
Version:
PAME.AI Core Operating System CLI - Open Source AI Platform for Agentic Commerce
963 lines (861 loc) ⢠27.7 kB
JavaScript
import { Command } from 'commander';
import * as fs from 'fs-extra';
import * as path from 'path';
import chalk from 'chalk';
import { execSync } from 'child_process';
import inquirer from 'inquirer';
export const cursorCommand = new Command('cursor')
.description('Generate advanced Cursor workspace templates for PAME.AI core development')
.option('-p, --project <name>', 'Project name or ID')
.option('-t, --template <type>', 'Template type (landing, app, api, full-stack, infra, platform)', 'full-stack')
.option('-o, --output <dir>', 'Output directory', '.')
.option('--team', 'Include team collaboration features')
.option('--production', 'Include production configurations')
.option('--no-secrets', 'Skip sensitive secrets setup')
.action(async (options) => {
try {
console.log(chalk.blue('š Generating Core Cursor workspace for PAME.AI...'));
// For core team, we might want to select from existing projects
let projectName = options.project;
if (!projectName) {
const { selectedProject } = await inquirer.prompt([{
type: 'list',
name: 'selectedProject',
message: 'Select a PAME.AI project:',
choices: await fetchCoreProjects()
}]);
projectName = selectedProject;
}
// Ensure projectName is defined
if (!projectName) {
throw new Error('Project name is required');
}
const outputDir = path.resolve(options.output || '.');
const workspaceDir = path.join(outputDir, projectName);
// Create workspace directory
await fs.ensureDir(workspaceDir);
// Generate enhanced Cursor rules for core team
await generateCoreCursorRules(workspaceDir, options.template || 'full-stack', projectName, options);
// Generate environment configuration with core team secrets
if (options.includeSecrets !== false) {
await generateCoreEnvironmentConfig(workspaceDir, projectName, options);
}
// Copy advanced templates
await copyCoreTemplates(workspaceDir, options.template || 'full-stack');
// Generate project configuration with CI/CD
await generateCoreProjectConfig(workspaceDir, projectName, options);
// Setup infrastructure as code if needed
if (options.template === 'infra' || options.production) {
await setupInfrastructure(workspaceDir, projectName);
}
// Initialize git with advanced features
await initializeCoreGitRepo(workspaceDir, options);
console.log(chalk.green(`ā
Core Cursor workspace created at: ${workspaceDir}`));
console.log(chalk.cyan('\nš Core Team Features:'));
console.log(chalk.gray(' - Advanced Cursor rules with internal APIs'));
console.log(chalk.gray(' - Production secrets management'));
console.log(chalk.gray(' - CI/CD pipeline configurations'));
console.log(chalk.gray(' - Team collaboration tools'));
console.log(chalk.gray(' - Infrastructure as code templates'));
if (options.team) {
console.log(chalk.yellow('\nš„ Team Features Enabled:'));
console.log(chalk.gray(' - Shared team rules in .cursor/rules/team/'));
console.log(chalk.gray(' - Code review templates'));
console.log(chalk.gray(' - Team-specific environment configs'));
}
}
catch (error) {
console.error(chalk.red('ā Error creating Core Cursor workspace:'), error);
process.exit(1);
}
});
async function fetchCoreProjects() {
// In production, this would fetch from PAME.AI API
return [
'pame-landing',
'pame-app-store',
'pame-developer-platform',
'pame-shared-memory',
'pame-auth-service',
'pame-payment-gateway',
'pame-admin-dashboard'
];
}
async function generateCoreCursorRules(workspaceDir, template, projectName, options) {
const rulesDir = path.join(workspaceDir, '.cursor', 'rules');
await fs.ensureDir(rulesDir);
// Core team main rule with additional context
const coreMainRule = `---
id: pame-core-${projectName}
title: PAME.AI Core Team - ${projectName}
scope: core-development
description: Core team development guidelines with full platform access
alwaysApply: true
---
# PAME.AI Core Team Development - ${projectName}
## Core Team Context
You have full access to PAME.AI internal systems and APIs. This includes:
- Internal service mesh APIs
- Production database access (with audit logging)
- Infrastructure management tools
- Deployment pipelines
## Architecture Decisions
- **Microservices**: Each service should be independently deployable
- **Event-Driven**: Use Kafka/RabbitMQ for async communication
- **API Gateway**: All external APIs go through Kong/Traefik
- **Service Mesh**: Istio for internal service communication
## Core Development Standards
1. **Code Quality**
- 100% TypeScript with strict mode
- Mandatory code reviews by 2+ team members
- Pre-commit hooks for linting and testing
- SonarQube quality gates must pass
2. **Security**
- All PRs must pass security scanning
- Use HashiCorp Vault for secrets
- Implement RBAC for all endpoints
- Regular dependency audits
3. **Performance**
- Response time SLO: p99 < 100ms
- Database queries must use indexes
- Implement caching at multiple layers
- Load testing before production
4. **Monitoring**
- Structured logging with correlation IDs
- Distributed tracing with Jaeger
- Custom metrics in Prometheus
- Alerts for all critical paths
## Internal APIs
- Auth Service: https://auth.internal.pame.ai
- User Service: https://users.internal.pame.ai
- Memory Service: https://memory.internal.pame.ai
- Analytics: https://analytics.internal.pame.ai
## Database Access
\`\`\`typescript
// Use connection pooling
import { db } from '@pame/core-db';
// Always use parameterized queries
const users = await db.query(
'SELECT * FROM users WHERE org_id = $1',
[orgId]
);
\`\`\`
## Deployment
- Feature branches deploy to preview environments
- Staging deploys on merge to develop
- Production requires approval from 2 core members
- Rollback strategy must be documented
@internal-apis.mdx
@infrastructure.mdx
@security-guidelines.mdx
`;
await fs.writeFile(path.join(rulesDir, 'core-main.mdc'), coreMainRule);
// Team collaboration rules
if (options.team) {
const teamDir = path.join(rulesDir, 'team');
await fs.ensureDir(teamDir);
const teamRule = `---
id: pame-team-collaboration
title: Team Collaboration Guidelines
scope: team
description: How the PAME.AI core team collaborates
---
# Team Collaboration
## Code Review Process
1. Create feature branch from develop
2. Open PR with detailed description
3. Tag relevant team members
4. Address all comments before merge
5. Squash commits with meaningful message
## Communication
- Slack: #pame-core-dev for discussions
- Linear: For issue tracking
- Notion: For documentation
- Figma: For design collaboration
## On-Call Rotation
- PagerDuty for incident management
- Runbooks in internal wiki
- Post-mortems for all incidents
- Blameless culture
## Release Process
1. Cut release branch from develop
2. Run full regression suite
3. Deploy to staging
4. QA sign-off required
5. Production deployment window: Tue/Thu 2-4pm PST
`;
await fs.writeFile(path.join(teamDir, 'collaboration.mdc'), teamRule);
}
// Infrastructure rules for platform/infra templates
if (template === 'infra' || template === 'platform') {
const infraRule = `---
id: pame-infrastructure
title: Infrastructure as Code Guidelines
scope: infrastructure
description: Managing PAME.AI infrastructure
globs:
- "**/terraform/**"
- "**/k8s/**"
- "**/helm/**"
---
# Infrastructure Management
## Terraform Standards
- Use remote state in GCS with encryption
- Workspace per environment
- Module versioning required
- Plan output must be reviewed
## Kubernetes
- Helm charts for all deployments
- Namespace isolation per service
- Network policies enforced
- Pod security policies enabled
## CI/CD Pipeline
\`\`\`yaml
# .github/workflows/deploy.yml
name: Deploy to Production
on:
push:
branches: [main]
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Deploy
run: |
terraform apply -auto-approve
\`\`\`
## Monitoring Stack
- Prometheus for metrics
- Grafana for visualization
- ELK stack for logs
- Jaeger for tracing
`;
await fs.writeFile(path.join(rulesDir, 'infrastructure.mdc'), infraRule);
}
}
async function generateCoreEnvironmentConfig(workspaceDir, projectName, options) {
const envContent = `# PAME.AI Core Team Configuration
# Project: ${projectName}
# Environment: ${options.production ? 'production' : 'development'}
# Generated: ${new Date().toISOString()}
# Core Services (Internal)
INTERNAL_AUTH_SERVICE_URL=https://auth.internal.pame.ai
INTERNAL_USER_SERVICE_URL=https://users.internal.pame.ai
INTERNAL_MEMORY_SERVICE_URL=https://memory.internal.pame.ai
INTERNAL_ANALYTICS_URL=https://analytics.internal.pame.ai
# Service Credentials
SERVICE_AUTH_TOKEN=${generateServiceToken()}
SERVICE_SECRET_KEY=${generateSecret()}
# Database Connections
PRIMARY_DATABASE_URL=postgresql://pame_core:${generateSecret()}@db.internal.pame.ai:5432/${projectName}
REPLICA_DATABASE_URL=postgresql://pame_readonly:${generateSecret()}@db-replica.internal.pame.ai:5432/${projectName}
REDIS_URL=redis://cache.internal.pame.ai:6379/0
# Message Queue
RABBITMQ_URL=amqp://pame:${generateSecret()}@mq.internal.pame.ai:5672
KAFKA_BROKERS=kafka-1.internal.pame.ai:9092,kafka-2.internal.pame.ai:9092
# Monitoring & Logging
SENTRY_DSN=https://${generateSecret()}@sentry.pame.ai/core-${projectName}
DATADOG_API_KEY=${generateSecret()}
LOG_LEVEL=${options.production ? 'info' : 'debug'}
# Feature Flags (LaunchDarkly)
LD_SDK_KEY=${generateSecret()}
LD_CLIENT_ID=${generateSecret()}
# Cloud Provider Credentials
GCP_PROJECT_ID=pame-ai-core
GCP_SERVICE_ACCOUNT_KEY=${options.production ? 'vault:secret/gcp/core-sa' : 'local-dev-key'}
AWS_ACCESS_KEY_ID=${options.production ? 'vault:secret/aws/access-key' : 'local-dev-key'}
AWS_SECRET_ACCESS_KEY=${options.production ? 'vault:secret/aws/secret-key' : 'local-dev-key'}
# HashiCorp Vault
VAULT_ADDR=https://vault.internal.pame.ai
VAULT_TOKEN=${options.production ? 'will-be-injected' : generateSecret()}
# Deployment Configuration
ENVIRONMENT=${options.production ? 'production' : 'development'}
DEPLOYMENT_REGION=us-central1
DEPLOYMENT_ZONE=us-central1-a
K8S_NAMESPACE=${projectName}
${options.production ? `
# Production Only
ENABLE_PROFILING=true
ENABLE_TRACING=true
ENABLE_AUDIT_LOGGING=true
SSL_CERT_PATH=/etc/ssl/certs/pame.crt
SSL_KEY_PATH=/etc/ssl/private/pame.key
` : `
# Development Only
ENABLE_DEBUG_ENDPOINTS=true
SKIP_AUTH_VERIFICATION=true
USE_LOCAL_STORAGE=true
`}
`;
await fs.writeFile(path.join(workspaceDir, '.env'), envContent);
// Create Vault configuration
if (options.production) {
const vaultConfig = `# Vault Configuration
path "secret/data/${projectName}/*" {
capabilities = ["read", "list"]
}
path "secret/data/shared/*" {
capabilities = ["read"]
}
path "database/creds/${projectName}" {
capabilities = ["read"]
}
`;
await fs.writeFile(path.join(workspaceDir, 'vault-policy.hcl'), vaultConfig);
}
}
async function copyCoreTemplates(workspaceDir, template) {
// Create advanced project structure
const dirs = [
'src/services',
'src/controllers',
'src/middleware',
'src/utils',
'src/types',
'tests/unit',
'tests/integration',
'tests/e2e',
'scripts',
'docs/api',
'docs/architecture',
'.github/workflows',
'k8s/base',
'k8s/overlays/dev',
'k8s/overlays/staging',
'k8s/overlays/prod'
];
for (const dir of dirs) {
await fs.ensureDir(path.join(workspaceDir, dir));
}
// Create core service template
const serviceTemplate = `import { Logger } from '@pame/core-logger';
import { Metrics } from '@pame/core-metrics';
import { Cache } from '@pame/core-cache';
import { Database } from '@pame/core-db';
export abstract class BaseService {
protected logger: Logger;
protected metrics: Metrics;
protected cache: Cache;
protected db: Database;
constructor(name: string) {
this.logger = new Logger(name);
this.metrics = new Metrics(name);
this.cache = new Cache(name);
this.db = new Database();
}
protected async withMetrics<T>(
operation: string,
fn: () => Promise<T>
): Promise<T> {
const timer = this.metrics.startTimer(operation);
try {
const result = await fn();
timer.success();
return result;
} catch (error) {
timer.failure();
this.logger.error(\`\${operation} failed\`, error);
throw error;
}
}
protected async withCache<T>(
key: string,
ttl: number,
fn: () => Promise<T>
): Promise<T> {
const cached = await this.cache.get<T>(key);
if (cached) return cached;
const result = await fn();
await this.cache.set(key, result, ttl);
return result;
}
}
`;
await fs.writeFile(path.join(workspaceDir, 'src/services/base.service.ts'), serviceTemplate);
// Create middleware template
const middlewareTemplate = `import { Request, Response, NextFunction } from 'express';
import { Logger } from '@pame/core-logger';
import { verify } from '@pame/core-auth';
const logger = new Logger('middleware');
export const authenticate = async (
req: Request,
res: Response,
next: NextFunction
) => {
try {
const token = req.headers.authorization?.replace('Bearer ', '');
if (!token) {
return res.status(401).json({ error: 'No token provided' });
}
const decoded = await verify(token);
req.user = decoded;
req.context = {
userId: decoded.id,
orgId: decoded.orgId,
correlationId: req.headers['x-correlation-id'] || generateId(),
startTime: Date.now()
};
next();
} catch (error) {
logger.error('Authentication failed', error);
res.status(401).json({ error: 'Invalid token' });
}
};
export const authorize = (permissions: string[]) => {
return (req: Request, res: Response, next: NextFunction) => {
const userPermissions = req.user?.permissions || [];
const hasPermission = permissions.some(p => userPermissions.includes(p));
if (!hasPermission) {
return res.status(403).json({ error: 'Insufficient permissions' });
}
next();
};
};
`;
await fs.writeFile(path.join(workspaceDir, 'src/middleware/auth.ts'), middlewareTemplate);
}
async function generateCoreProjectConfig(workspaceDir, projectName, options) {
// Enhanced package.json for core team
const packageJson = {
name: `@pame-core/${projectName}`,
version: '0.1.0',
private: true,
scripts: {
// Development
'dev': 'nodemon --exec ts-node src/index.ts',
'dev:debug': 'nodemon --inspect --exec ts-node src/index.ts',
// Building
'build': 'tsc && npm run build:docker',
'build:docker': 'docker build -t pame/${projectName}:latest .',
// Testing
'test': 'jest',
'test:watch': 'jest --watch',
'test:coverage': 'jest --coverage',
'test:integration': 'jest --testPathPattern=tests/integration',
'test:e2e': 'jest --testPathPattern=tests/e2e',
// Quality
'lint': 'eslint . --ext .ts,.tsx',
'lint:fix': 'eslint . --ext .ts,.tsx --fix',
'type-check': 'tsc --noEmit',
'security-check': 'npm audit && snyk test',
// Database
'db:migrate': 'knex migrate:latest',
'db:rollback': 'knex migrate:rollback',
'db:seed': 'knex seed:run',
// Deployment
'deploy:dev': 'npm run build && kubectl apply -k k8s/overlays/dev',
'deploy:staging': 'npm run build && kubectl apply -k k8s/overlays/staging',
'deploy:prod': 'npm run build && kubectl apply -k k8s/overlays/prod',
// Monitoring
'logs': 'kubectl logs -f deployment/${projectName}',
'metrics': 'curl http://localhost:9090/metrics',
// Documentation
'docs:generate': 'typedoc --out docs/api src',
'docs:serve': 'serve docs/api'
},
dependencies: {
'@pame/core-auth': 'workspace:*',
'@pame/core-db': 'workspace:*',
'@pame/core-logger': 'workspace:*',
'@pame/core-metrics': 'workspace:*',
'@pame/core-cache': 'workspace:*',
'express': '^4.18.0',
'knex': '^2.4.0',
'pg': '^8.10.0',
'redis': '^4.6.0',
'amqplib': '^0.10.0',
'kafkajs': '^2.2.0'
},
devDependencies: {
'@types/node': '^20.0.0',
'@types/express': '^4.17.0',
'@types/jest': '^29.5.0',
'jest': '^29.5.0',
'ts-jest': '^29.1.0',
'nodemon': '^3.0.0',
'ts-node': '^10.9.0',
'typescript': '^5.0.0',
'eslint': '^8.0.0',
'@typescript-eslint/parser': '^5.0.0',
'@typescript-eslint/eslint-plugin': '^5.0.0',
'snyk': '^1.1000.0',
'typedoc': '^0.24.0'
}
};
await fs.writeJSON(path.join(workspaceDir, 'package.json'), packageJson, { spaces: 2 });
// Docker configuration
const dockerfile = `# Multi-stage build for production
FROM node:18-alpine AS builder
WORKDIR /app
COPY package*.json ./
RUN npm ci --only=production
FROM node:18-alpine AS dev-deps
WORKDIR /app
COPY package*.json ./
RUN npm ci
FROM dev-deps AS build
WORKDIR /app
COPY . .
RUN npm run build
FROM node:18-alpine AS runtime
WORKDIR /app
RUN apk add --no-cache dumb-init
# Security: Run as non-root user
RUN addgroup -g 1001 -S nodejs
RUN adduser -S nodejs -u 1001
COPY --from=builder --chown=nodejs:nodejs /app/node_modules ./node_modules
COPY --from=build --chown=nodejs:nodejs /app/dist ./dist
COPY --chown=nodejs:nodejs package*.json ./
USER nodejs
EXPOSE 3000
ENTRYPOINT ["dumb-init", "--"]
CMD ["node", "dist/index.js"]
`;
await fs.writeFile(path.join(workspaceDir, 'Dockerfile'), dockerfile);
// GitHub Actions workflow
const workflow = `name: CI/CD Pipeline
on:
push:
branches: [main, develop]
pull_request:
branches: [main, develop]
jobs:
test:
runs-on: ubuntu-latest
services:
postgres:
image: postgres:14
env:
POSTGRES_PASSWORD: test
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
redis:
image: redis:7
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v3
- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: 18
cache: 'npm'
- name: Install dependencies
run: npm ci
- name: Run linter
run: npm run lint
- name: Type check
run: npm run type-check
- name: Run tests
run: npm run test:coverage
env:
DATABASE_URL: postgresql://postgres:test@localhost:5432/test
REDIS_URL: redis://localhost:6379
- name: Upload coverage
uses: codecov/codecov-action@v3
- name: Security scan
run: npm run security-check
env:
SNYK_TOKEN: \${{ secrets.SNYK_TOKEN }}
build:
needs: test
runs-on: ubuntu-latest
if: github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop'
steps:
- uses: actions/checkout@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Container Registry
uses: docker/login-action@v2
with:
registry: gcr.io
username: _json_key
password: \${{ secrets.GCP_SA_KEY }}
- name: Build and push
uses: docker/build-push-action@v4
with:
context: .
push: true
tags: |
gcr.io/pame-ai-core/${projectName}:latest
gcr.io/pame-ai-core/${projectName}:\${{ github.sha }}
cache-from: type=gha
cache-to: type=gha,mode=max
deploy:
needs: build
runs-on: ubuntu-latest
if: github.ref == 'refs/heads/main'
steps:
- uses: actions/checkout@v3
- name: Setup kubectl
uses: azure/setup-kubectl@v3
- name: Deploy to Kubernetes
run: |
echo "\${{ secrets.KUBECONFIG }}" | base64 -d > kubeconfig
export KUBECONFIG=kubeconfig
kubectl set image deployment/${projectName} ${projectName}=gcr.io/pame-ai-core/${projectName}:\${{ github.sha }}
kubectl rollout status deployment/${projectName}
`;
await fs.ensureDir(path.join(workspaceDir, '.github/workflows'));
await fs.writeFile(path.join(workspaceDir, '.github/workflows/cicd.yml'), workflow);
}
async function setupInfrastructure(workspaceDir, projectName) {
// Terraform configuration
const terraformMain = `terraform {
required_version = ">= 1.0"
backend "gcs" {
bucket = "pame-terraform-state"
prefix = "${projectName}"
}
required_providers {
google = {
source = "hashicorp/google"
version = "~> 4.0"
}
kubernetes = {
source = "hashicorp/kubernetes"
version = "~> 2.0"
}
}
}
provider "google" {
project = var.project_id
region = var.region
}
module "gke" {
source = "terraform-google-modules/kubernetes-engine/google"
version = "~> 25.0"
project_id = var.project_id
name = "${projectName}-cluster"
region = var.region
node_pools = [
{
name = "default-pool"
machine_type = "n2-standard-4"
min_count = 3
max_count = 10
disk_size_gb = 100
disk_type = "pd-ssd"
auto_repair = true
auto_upgrade = true
preemptible = false
}
]
}
module "database" {
source = "./modules/database"
project_id = var.project_id
region = var.region
service_name = "${projectName}"
enable_backup = true
backup_retention_days = 30
}
module "redis" {
source = "./modules/redis"
project_id = var.project_id
region = var.region
service_name = "${projectName}"
memory_size_gb = 4
replica_count = 2
}
`;
const terraformDir = path.join(workspaceDir, 'terraform');
await fs.ensureDir(terraformDir);
await fs.writeFile(path.join(terraformDir, 'main.tf'), terraformMain);
// Kubernetes base configuration
const k8sDeployment = `apiVersion: apps/v1
kind: Deployment
metadata:
name: ${projectName}
labels:
app: ${projectName}
version: v1
spec:
replicas: 3
selector:
matchLabels:
app: ${projectName}
template:
metadata:
labels:
app: ${projectName}
version: v1
annotations:
prometheus.io/scrape: "true"
prometheus.io/port: "9090"
spec:
serviceAccountName: ${projectName}
containers:
- name: ${projectName}
image: gcr.io/pame-ai-core/${projectName}:latest
ports:
- containerPort: 3000
name: http
- containerPort: 9090
name: metrics
env:
- name: NODE_ENV
valueFrom:
configMapKeyRef:
name: ${projectName}-config
key: environment
envFrom:
- secretRef:
name: ${projectName}-secrets
resources:
requests:
cpu: 100m
memory: 256Mi
limits:
cpu: 1000m
memory: 1Gi
livenessProbe:
httpGet:
path: /health
port: 3000
initialDelaySeconds: 30
periodSeconds: 10
readinessProbe:
httpGet:
path: /ready
port: 3000
initialDelaySeconds: 5
periodSeconds: 5
securityContext:
runAsNonRoot: true
runAsUser: 1001
allowPrivilegeEscalation: false
readOnlyRootFilesystem: true
capabilities:
drop:
- ALL
`;
const k8sBaseDir = path.join(workspaceDir, 'k8s/base');
await fs.ensureDir(k8sBaseDir);
await fs.writeFile(path.join(k8sBaseDir, 'deployment.yaml'), k8sDeployment);
}
async function initializeCoreGitRepo(workspaceDir, options) {
// Enhanced .gitignore for core team
const gitignore = `# Dependencies
node_modules/
.pnp
.pnp.js
# Testing
coverage/
.nyc_output/
test-results/
# Production
dist/
build/
*.production
# Environment
.env
.env.*
!.env.example
vault-token
# IDEs
.vscode/
.idea/
*.swp
*.swo
# OS
.DS_Store
Thumbs.db
# Logs
logs/
*.log
npm-debug.log*
yarn-debug.log*
pnpm-debug.log*
# Runtime
pids/
*.pid
*.seed
*.pid.lock
# Cache
.npm/
.eslintcache
.stylelintcache
*.tsbuildinfo
# Terraform
.terraform/
*.tfstate
*.tfstate.*
*.tfvars
!terraform.tfvars.example
# Kubernetes
kubeconfig
*.key
*.crt
*.pem
# Docker
.dockerignore
docker-compose.override.yml
# PAME Core
.pame/
pame-debug.log
core-secrets/
`;
await fs.writeFile(path.join(workspaceDir, '.gitignore'), gitignore);
// Pre-commit hooks
const preCommitHook = `#!/bin/sh
# PAME.AI Core Team Pre-commit Hook
echo "Running pre-commit checks..."
# Run linter
npm run lint
if [ $? -ne 0 ]; then
echo "ā Linting failed. Please fix errors before committing."
exit 1
fi
# Run type check
npm run type-check
if [ $? -ne 0 ]; then
echo "ā Type checking failed. Please fix errors before committing."
exit 1
fi
# Run tests
npm test
if [ $? -ne 0 ]; then
echo "ā Tests failed. Please fix failing tests before committing."
exit 1
fi
# Check for secrets
if git diff --cached --name-only | xargs grep -E "(password|secret|token|key)\\s*=\\s*[\"'][^\"']+[\"']" 2>/dev/null; then
echo "ā Potential secrets detected in commit. Please review and remove."
exit 1
fi
echo "ā
All pre-commit checks passed!"
`;
const gitHooksDir = path.join(workspaceDir, '.git/hooks');
await fs.ensureDir(gitHooksDir);
await fs.writeFile(path.join(gitHooksDir, 'pre-commit'), preCommitHook);
await fs.chmod(path.join(gitHooksDir, 'pre-commit'), '755');
try {
execSync('git init', { cwd: workspaceDir });
execSync('git add .', { cwd: workspaceDir });
execSync(`git commit -m "Initial commit: PAME.AI Core ${options.template} workspace"`, { cwd: workspaceDir });
if (options.team) {
// Set up git flow branches
execSync('git checkout -b develop', { cwd: workspaceDir });
execSync('git checkout -b staging', { cwd: workspaceDir });
execSync('git checkout main', { cwd: workspaceDir });
}
}
catch (error) {
console.warn('Git initialization completed with warnings:', error);
}
}
function generateSecret() {
return Array.from({ length: 32 }, () => Math.random().toString(36).charAt(2)).join('');
}
function generateServiceToken() {
return `pame_service_${Array.from({ length: 48 }, () => Math.random().toString(36).charAt(2)).join('')}`;
}
//# sourceMappingURL=cursor.js.map