UNPKG

scf-deploy

Version:

S3 + CloudFront static deployment automation CLI

1,710 lines (1,672 loc) 89.8 kB
#!/usr/bin/env node // src/cli/cli.ts import { Command as Command6 } from "commander"; // src/cli/commands/deploy.ts import { Command } from "commander"; import chalk9 from "chalk"; // src/cli/utils/logger.ts import chalk from "chalk"; function info(message) { console.log(chalk.blue("\u2139"), message); } function success(message) { console.log(chalk.green("\u2713"), message); } function warn(message) { console.log(chalk.yellow("\u26A0"), message); } function error(message) { console.error(chalk.red("\u2717"), message); } function section(title) { console.log(); console.log(chalk.bold.cyan(`\u2501\u2501\u2501 ${title} \u2501\u2501\u2501`)); console.log(); } function keyValue(key, value) { console.log(chalk.gray(`${key}:`), chalk.white(value)); } // src/core/config/index.ts import chalk2 from "chalk"; // src/core/config/loader.ts import jiti from "jiti"; import { existsSync } from "fs"; import { resolve, join, dirname } from "path"; import { fileURLToPath } from "url"; var CONFIG_FILE_NAMES = [ "scf.config.ts", "scf.config.js", "scf.config.mjs", "scf.config.cjs" ]; function findConfigFile(startDir = process.cwd()) { let currentDir = resolve(startDir); const root = resolve("/"); while (currentDir !== root) { for (const fileName of CONFIG_FILE_NAMES) { const configPath = join(currentDir, fileName); if (existsSync(configPath)) { return configPath; } } const parentDir = dirname(currentDir); if (parentDir === currentDir) { break; } currentDir = parentDir; } return null; } async function loadConfigFile(configPath) { if (!existsSync(configPath)) { throw new Error(`Config file not found: ${configPath}`); } try { const currentFile = fileURLToPath(import.meta.url); const jitiInstance = jiti(currentFile, { interopDefault: true, requireCache: false, esmResolve: true, moduleCache: false }); const absoluteConfigPath = resolve(configPath); const configModule = jitiInstance(absoluteConfigPath); let config; if (typeof configModule === "function") { config = configModule(); } else if (configModule && typeof configModule === "object" && "default" in configModule) { const defaultExport = configModule.default; if (typeof defaultExport === "function") { config = defaultExport(); } else { config = defaultExport; } } else { config = configModule; } return config; } catch (error2) { if (error2 instanceof Error) { throw new Error( `Failed to load config file: ${configPath} ${error2.message}` ); } throw error2; } } async function discoverAndLoadConfig(startDir) { const configPath = findConfigFile(startDir); if (!configPath) { throw new Error( `Config file not found. Please create one of: ${CONFIG_FILE_NAMES.join( ", " )}` ); } const config = await loadConfigFile(configPath); return { config, configPath }; } // src/core/config/merger.ts function deepMerge(target, source) { const result = { ...target }; for (const key in source) { const sourceValue = source[key]; const targetValue = result[key]; if (sourceValue !== void 0 && sourceValue !== null && typeof sourceValue === "object" && !Array.isArray(sourceValue) && targetValue !== void 0 && targetValue !== null && typeof targetValue === "object" && !Array.isArray(targetValue)) { result[key] = deepMerge( targetValue, sourceValue ); } else if (sourceValue !== void 0) { result[key] = sourceValue; } } return result; } function mergeEnvironment(baseConfig, environment) { if (!environment || !baseConfig.environments) { const { environments: _environments2, ...configWithoutEnv } = baseConfig; return configWithoutEnv; } const envConfig = baseConfig.environments[environment]; if (!envConfig) { throw new Error( `Environment "${environment}" not found in config. Available environments: ${Object.keys(baseConfig.environments).join(", ")}` ); } const { environments: _environments, ...baseWithoutEnv } = baseConfig; const merged = deepMerge(baseWithoutEnv, envConfig); return merged; } function applyProfileOverride(config, profileOverride) { if (!profileOverride) { return config; } return { ...config, credentials: { ...config.credentials, profile: profileOverride } }; } // src/core/config/schema.ts import { z } from "zod"; var awsCredentialsSchema = z.object({ profile: z.string().optional(), accessKeyId: z.string().optional(), secretAccessKey: z.string().optional(), sessionToken: z.string().optional() }).optional(); var s3ConfigSchema = z.object({ bucketName: z.string().min(3, "Bucket name must be at least 3 characters").max(63, "Bucket name must be at most 63 characters").regex( /^[a-z0-9][a-z0-9.-]*[a-z0-9]$/, "Bucket name must follow S3 naming rules" ), buildDir: z.string().min(1, "Build directory cannot be empty").optional(), indexDocument: z.string().default("index.html"), errorDocument: z.string().optional(), websiteHosting: z.boolean().default(true), concurrency: z.number().int().min(1).max(100).default(10), gzip: z.boolean().default(true), exclude: z.array(z.string()).default([]) }).optional(); var errorPageSchema = z.object({ errorCode: z.number().int().min(400).max(599), responseCode: z.number().int().min(200).max(599).optional(), responsePath: z.string().optional(), cacheTTL: z.number().int().min(0).optional() }); var cacheWarmingSchema = z.object({ enabled: z.boolean(), paths: z.array(z.string()).default(["/"]), concurrency: z.number().int().min(1).max(10).default(3), // Reduced from 20 to 10, default from 5 to 3 delay: z.number().int().min(100).max(5e3).default(500) // Increased from 0-5000/100 to 100-5000/500 }); var cloudfrontConfigSchema = z.object({ enabled: z.boolean(), priceClass: z.enum(["PriceClass_100", "PriceClass_200", "PriceClass_All"]).optional().default("PriceClass_100"), customDomain: z.object({ domainName: z.string().min(1, "Domain name is required"), certificateArn: z.string().regex(/^arn:aws:acm:/, "Must be a valid ACM certificate ARN"), aliases: z.array(z.string()).optional() }).optional(), defaultTTL: z.number().int().min(0).default(86400), // 1 day maxTTL: z.number().int().min(0).default(31536e3), // 1 year minTTL: z.number().int().min(0).default(0), ipv6: z.boolean().default(true), errorPages: z.array(errorPageSchema).optional(), cacheWarming: cacheWarmingSchema.optional() }).optional(); var configSchema = z.object({ app: z.string().min(1, "App name is required").regex( /^[a-z0-9-]+$/, "App name must contain only lowercase letters, numbers, and hyphens" ), region: z.string().min(1, "AWS region is required").regex( /^[a-z]{2}-[a-z]+-\d+$/, "Must be a valid AWS region (e.g., us-east-1)" ), credentials: awsCredentialsSchema, s3: s3ConfigSchema, cloudfront: cloudfrontConfigSchema, environments: z.record(z.string(), z.any()).optional() }); var loadConfigOptionsSchema = z.object({ configPath: z.string().optional(), env: z.string().optional(), profile: z.string().optional() }); function validateConfig(config) { return configSchema.parse(config); } // src/core/config/utils.ts function validateRequiredFields(config) { const errors = []; if (!config.app) { errors.push("app name is required"); } if (!config.region) { errors.push("region is required"); } if (config.s3 && !config.s3.bucketName) { errors.push("s3.bucketName is required when s3 is configured"); } if (config.cloudfront?.enabled && config.cloudfront.customDomain && !config.cloudfront.customDomain.certificateArn) { errors.push( "cloudfront.customDomain.certificateArn is required when using custom domain" ); } if (errors.length > 0) { throw new Error( `Config validation failed: ${errors.map((e) => ` - ${e}`).join("\n")}` ); } } // src/core/config/index.ts async function loadConfig(options = {}) { const { configPath, env, profile } = options; try { let rawConfig; let resolvedConfigPath; if (configPath) { rawConfig = await loadConfigFile(configPath); resolvedConfigPath = configPath; } else { const result = await discoverAndLoadConfig(); rawConfig = result.config; resolvedConfigPath = result.configPath; } const mergedConfig = mergeEnvironment(rawConfig, env); const configWithProfile = applyProfileOverride(mergedConfig, profile); const validatedConfig = validateConfig(configWithProfile); validateRequiredFields(validatedConfig); if (process.env.SCF_CLI_MODE === "true") { console.log(chalk2.gray(` Loaded config from: ${resolvedConfigPath}`)); if (env) { console.log(chalk2.gray(` Environment: ${env}`)); } } return validatedConfig; } catch (error2) { if (error2 instanceof Error) { throw new Error(`Failed to load configuration: ${error2.message}`); } throw error2; } } // src/core/aws/credentials.ts import { fromEnv, fromIni, fromInstanceMetadata, fromContainerMetadata } from "@aws-sdk/credential-providers"; async function getCredentials(config) { let credentialProvider; let source; let profile; if (config.credentials?.accessKeyId && config.credentials?.secretAccessKey) { const { accessKeyId, secretAccessKey, sessionToken } = config.credentials; credentialProvider = async () => ({ accessKeyId, secretAccessKey, sessionToken }); source = "config"; } else if (config.credentials?.profile) { profile = config.credentials.profile; credentialProvider = fromIni({ profile }); source = "profile"; } else if (process.env.AWS_ACCESS_KEY_ID && process.env.AWS_SECRET_ACCESS_KEY) { credentialProvider = fromEnv(); source = "environment"; } else { const defaultProfile = process.env.AWS_PROFILE || "default"; try { credentialProvider = fromIni({ profile: defaultProfile }); source = "profile"; profile = defaultProfile; } catch { try { credentialProvider = fromContainerMetadata(); source = "instance-metadata"; } catch { credentialProvider = fromInstanceMetadata(); source = "instance-metadata"; } } } try { const credentials = await credentialProvider(); return { credentials, source, profile }; } catch (error2) { throw new Error( `Failed to resolve AWS credentials. Please configure credentials using one of: 1. Config file (credentials.accessKeyId + secretAccessKey) 2. Config file (credentials.profile) 3. Environment variables (AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) 4. AWS profile (~/.aws/credentials) 5. IAM role (EC2/ECS instance metadata) Original error: ${error2 instanceof Error ? error2.message : String(error2)}` ); } } function createCredentialProvider(config) { return async () => { const { credentials } = await getCredentials(config); return credentials; }; } // src/core/aws/verify.ts import { STSClient, GetCallerIdentityCommand } from "@aws-sdk/client-sts"; async function verifyCredentials(credentials, region) { const client = new STSClient({ region, credentials }); try { const command = new GetCallerIdentityCommand({}); const response = await client.send(command); if (!response.Account || !response.Arn || !response.UserId) { throw new Error("Invalid STS response: missing required fields"); } return { accountId: response.Account, arn: response.Arn, userId: response.UserId }; } catch (error2) { if (error2 instanceof Error) { if ("Code" in error2 || "$metadata" in error2) { throw new Error( `AWS credentials verification failed: ${error2.message} Please check your credentials and try again.` ); } throw error2; } throw new Error(`Unknown error during credentials verification: ${String(error2)}`); } } // src/core/aws/client.ts import { S3Client } from "@aws-sdk/client-s3"; import { CloudFrontClient } from "@aws-sdk/client-cloudfront"; import { STSClient as STSClient2 } from "@aws-sdk/client-sts"; function createS3Client(config) { return new S3Client({ region: config.region, credentials: createCredentialProvider(config) }); } function createCloudFrontClient(config) { return new CloudFrontClient({ region: "us-east-1", // CloudFront API is only in us-east-1 credentials: createCredentialProvider(config) }); } // src/core/aws/s3-bucket.ts import { HeadBucketCommand, CreateBucketCommand, PutBucketWebsiteCommand, PutBucketPolicyCommand, DeletePublicAccessBlockCommand, PutBucketTaggingCommand, GetBucketTaggingCommand } from "@aws-sdk/client-s3"; async function bucketExists(client, bucketName) { try { await client.send(new HeadBucketCommand({ Bucket: bucketName })); return true; } catch (error2) { if (error2 && typeof error2 === "object" && ("name" in error2 && error2.name === "NotFound" || "$metadata" in error2 && typeof error2.$metadata === "object" && error2.$metadata !== null && "httpStatusCode" in error2.$metadata && error2.$metadata.httpStatusCode === 404)) { return false; } throw error2; } } async function createBucket(client, bucketName, region) { try { const command = new CreateBucketCommand({ Bucket: bucketName, // CreateBucketConfiguration is only required for regions other than us-east-1 ...region !== "us-east-1" && { CreateBucketConfiguration: { LocationConstraint: region } } }); await client.send(command); } catch (error2) { if (error2 && typeof error2 === "object" && "name" in error2 && error2.name === "BucketAlreadyOwnedByYou") { return; } throw error2; } } async function configureBucketWebsite(client, bucketName, indexDocument = "index.html", errorDocument) { const command = new PutBucketWebsiteCommand({ Bucket: bucketName, WebsiteConfiguration: { IndexDocument: { Suffix: indexDocument }, ...errorDocument && { ErrorDocument: { Key: errorDocument } } } }); await client.send(command); } async function setBucketPublicReadPolicy(client, bucketName) { try { await client.send( new DeletePublicAccessBlockCommand({ Bucket: bucketName }) ); } catch (_error) { } const policy = { Version: "2012-10-17", Statement: [ { Sid: "PublicReadGetObject", Effect: "Allow", Principal: "*", Action: "s3:GetObject", Resource: `arn:aws:s3:::${bucketName}/*` } ] }; const command = new PutBucketPolicyCommand({ Bucket: bucketName, Policy: JSON.stringify(policy) }); await client.send(command); } async function ensureBucket(client, bucketName, region, options = {}) { const { websiteHosting = true, indexDocument = "index.html", errorDocument, publicRead = true } = options; const exists = await bucketExists(client, bucketName); if (!exists) { await createBucket(client, bucketName, region); } if (websiteHosting) { await configureBucketWebsite(client, bucketName, indexDocument, errorDocument); } if (publicRead) { await setBucketPublicReadPolicy(client, bucketName); } } async function tagBucketForRecovery(client, bucketName, app, environment) { try { await client.send( new PutBucketTaggingCommand({ Bucket: bucketName, Tagging: { TagSet: [ { Key: "scf:managed", Value: "true" }, { Key: "scf:app", Value: app }, { Key: "scf:environment", Value: environment }, { Key: "scf:tool", Value: "scf-deploy" } ] } }) ); } catch (error2) { console.warn("Warning: Failed to tag S3 bucket for recovery"); } } async function getBucketTags(client, bucketName) { try { const result = await client.send( new GetBucketTaggingCommand({ Bucket: bucketName }) ); const tags = {}; if (result.TagSet) { for (const tag of result.TagSet) { if (tag.Key && tag.Value) { tags[tag.Key] = tag.Value; } } } return tags; } catch { return {}; } } function getBucketWebsiteUrl(bucketName, region) { if (region === "us-east-1") { return `http://${bucketName}.s3-website-us-east-1.amazonaws.com`; } return `http://${bucketName}.s3-website.${region}.amazonaws.com`; } // src/core/aws/s3-deployer.ts import chalk4 from "chalk"; import ora from "ora"; import cliProgress from "cli-progress"; // src/core/deployer/file-scanner.ts import glob from "fast-glob"; import { hashFile } from "hasha"; import { lookup as getMimeType } from "mime-types"; import { stat } from "fs/promises"; import { join as join2, sep } from "path"; var GZIPPABLE_EXTENSIONS = /* @__PURE__ */ new Set([ ".html", ".htm", ".css", ".js", ".mjs", ".json", ".xml", ".svg", ".txt", ".md", ".csv", ".ts", ".tsx", ".jsx" ]); function shouldGzipFile(filePath) { const ext = filePath.substring(filePath.lastIndexOf(".")).toLowerCase(); return GZIPPABLE_EXTENSIONS.has(ext); } function getContentType(filePath) { const mimeType = getMimeType(filePath); return mimeType || "application/octet-stream"; } function pathToS3Key(relativePath) { return relativePath.split(sep).join("/"); } async function scanFiles(options) { const { buildDir, exclude = [], followSymlinks = false } = options; const patterns = ["**/*"]; const files = await glob(patterns, { cwd: buildDir, absolute: false, ignore: exclude, onlyFiles: true, followSymbolicLinks: followSymlinks, dot: true // Include dotfiles }); const fileInfos = []; for (const file of files) { const absolutePath = join2(buildDir, file); const relativePath = file; const stats = await stat(absolutePath); const hash = await hashFile(absolutePath, { algorithm: "sha256" }); const contentType = getContentType(absolutePath); const shouldGzip = shouldGzipFile(absolutePath); const key = pathToS3Key(relativePath); fileInfos.push({ absolutePath, relativePath, key, size: stats.size, hash, contentType, shouldGzip }); } return fileInfos; } // src/core/deployer/build-detector.ts import { existsSync as existsSync2, statSync, readdirSync } from "fs"; import { join as join3 } from "path"; import chalk3 from "chalk"; var BUILD_DIR_CANDIDATES = [ "dist", // Vite, Rollup, etc. "build", // Create React App, Next.js, etc. "out", // Next.js static export ".output/public", // Nuxt 3 "_site", // Jekyll, 11ty "output" // Some SSGs ]; var SSR_BUILD_DIRS = [".next", ".nuxt"]; function hasIndexHtml(dirPath) { try { const indexPath = join3(dirPath, "index.html"); return existsSync2(indexPath) && statSync(indexPath).isFile(); } catch { return false; } } function hasDeployableFiles(dirPath) { try { const entries = readdirSync(dirPath, { withFileTypes: true }); if (!hasIndexHtml(dirPath)) { return false; } const hasFiles = entries.some((entry) => entry.isFile()); if (!hasFiles) { return false; } const webFileExtensions = [".html", ".js", ".css", ".json"]; const hasWebFiles = entries.some((entry) => { if (entry.isFile()) { return webFileExtensions.some((ext) => entry.name.endsWith(ext)); } return false; }); return hasWebFiles; } catch { return false; } } function detectBuildDirectory(cwd = process.cwd()) { for (const candidate of BUILD_DIR_CANDIDATES) { const candidatePath = join3(cwd, candidate); if (existsSync2(candidatePath)) { const stats = statSync(candidatePath); if (stats.isDirectory()) { if (hasDeployableFiles(candidatePath)) { return candidate; } } } } return null; } function isSSRBuildDir(buildDir) { const normalizedPath = buildDir.replace(/^\.\//, ""); return SSR_BUILD_DIRS.some((ssrDir) => normalizedPath === ssrDir); } function validateBuildDirectory(buildDir, cwd = process.cwd()) { const absolutePath = join3(cwd, buildDir); if (isSSRBuildDir(buildDir)) { throw new Error( `Cannot deploy ${chalk3.cyan(buildDir)} directory to S3/CloudFront. ${chalk3.yellow("\u26A0 This is a server-side rendering (SSR) build directory.")} S3/CloudFront only supports static files. To deploy with Next.js: 1. Add ${chalk3.cyan("output: 'export'")} to ${chalk3.cyan("next.config.ts")}: ` + chalk3.gray(` const nextConfig = { `) + chalk3.gray(` output: 'export', `) + chalk3.gray(` images: { unoptimized: true }, `) + chalk3.gray(` }; `) + ` 2. Rebuild your project: ${chalk3.cyan("npm run build")} This will create an ${chalk3.cyan("out")} directory with static files.` ); } if (!existsSync2(absolutePath)) { throw new Error( `Build directory not found: ${chalk3.cyan(buildDir)} Please build your project first or specify the correct build directory.` ); } const stats = statSync(absolutePath); if (!stats.isDirectory()) { throw new Error(`Build path is not a directory: ${chalk3.cyan(buildDir)}`); } if (!hasDeployableFiles(absolutePath)) { const hasIndex = hasIndexHtml(absolutePath); if (!hasIndex) { throw new Error( `Build directory does not contain ${chalk3.cyan("index.html")}: ${chalk3.cyan(buildDir)} This directory cannot be deployed as a static website. Please build your project first: ${chalk3.cyan("npm run build")}` ); } else { throw new Error( `Build directory does not contain valid web files: ${chalk3.cyan(buildDir)} Please build your project first before deploying.` ); } } } function getBuildDirectory(providedBuildDir, cwd = process.cwd()) { if (providedBuildDir) { validateBuildDirectory(providedBuildDir, cwd); return providedBuildDir; } const detectedDir = detectBuildDirectory(cwd); if (!detectedDir) { throw new Error( `No build directory found. Searched for: ${BUILD_DIR_CANDIDATES.map((d) => chalk3.cyan(d)).join( ", " )} Please build your project first: ${chalk3.gray("# For Vite/Rollup projects")} ${chalk3.cyan("npm run build")} ${chalk3.gray("# For Next.js projects")} ${chalk3.cyan("npm run build")} Or specify a custom build directory in ${chalk3.cyan( "scf.config.ts" )}: ${chalk3.gray("s3: { buildDir: './your-build-dir' }")}` ); } return detectedDir; } // src/core/deployer/s3-uploader.ts import { PutObjectCommand } from "@aws-sdk/client-s3"; import { Upload } from "@aws-sdk/lib-storage"; import { createReadStream, readFileSync } from "fs"; import { gzip } from "zlib"; import { promisify } from "util"; import pLimit from "p-limit"; var gzipAsync = promisify(gzip); async function gzipFile(filePath) { const content = readFileSync(filePath); return gzipAsync(content); } async function uploadFile(client, bucketName, file, options = {}) { const { gzip: enableGzip = true, dryRun = false } = options; const startTime = Date.now(); try { if (dryRun) { return { file, success: true, status: "uploaded", duration: Date.now() - startTime }; } const params = { Bucket: bucketName, Key: file.key, ContentType: file.contentType }; if (enableGzip && file.shouldGzip) { const compressed = await gzipFile(file.absolutePath); params.Body = compressed; params.ContentEncoding = "gzip"; } else { params.Body = createReadStream(file.absolutePath); } if (file.size > 5 * 1024 * 1024) { const upload = new Upload({ client, params }); await upload.done(); } else { const command = new PutObjectCommand(params); await client.send(command); } return { file, success: true, status: "uploaded", duration: Date.now() - startTime }; } catch (error2) { return { file, success: false, status: "failed", error: error2 instanceof Error ? error2.message : String(error2), duration: Date.now() - startTime }; } } async function uploadFiles(client, bucketName, files, options = {}, onProgress) { const { concurrency = 10 } = options; const limit = pLimit(concurrency); const results = []; let completed = 0; const uploadPromises = files.map( (file) => limit(async () => { const result = await uploadFile(client, bucketName, file, options); results.push(result); completed++; if (onProgress) { onProgress(completed, files.length, file); } return result; }) ); await Promise.all(uploadPromises); return results; } function calculateTotalSize(files) { return files.reduce((sum, file) => sum + file.size, 0); } function formatBytes(bytes) { if (bytes === 0) return "0 Bytes"; const k = 1024; const sizes = ["Bytes", "KB", "MB", "GB"]; const i = Math.floor(Math.log(bytes) / Math.log(k)); return Math.round(bytes / Math.pow(k, i) * 100) / 100 + " " + sizes[i]; } // src/core/state/manager.ts import fs from "fs"; import path from "path"; var DEFAULT_STATE_DIR = ".deploy"; var STATE_VERSION = "1.0.0"; function getStateFilePath(options = {}) { const { stateDir = DEFAULT_STATE_DIR, environment = "default" } = options; const fileName = environment === "default" ? "state.json" : `state.${environment}.json`; return path.join(process.cwd(), stateDir, fileName); } function stateExists(options = {}) { const filePath = getStateFilePath(options); return fs.existsSync(filePath); } function loadState(options = {}) { const filePath = getStateFilePath(options); if (!fs.existsSync(filePath)) { return null; } try { const content = fs.readFileSync(filePath, "utf-8"); const state = JSON.parse(content); if (!state.app || !state.environment || !state.resources) { throw new Error("Invalid state file structure"); } return state; } catch (error2) { throw new Error(`Failed to load state file: ${error2.message}`); } } function saveState(state, options = {}) { const filePath = getStateFilePath(options); const stateDir = path.dirname(filePath); if (!fs.existsSync(stateDir)) { fs.mkdirSync(stateDir, { recursive: true }); } try { if (!state.version) { state.version = STATE_VERSION; } state.lastDeployed = (/* @__PURE__ */ new Date()).toISOString(); const content = JSON.stringify(state, null, 2); fs.writeFileSync(filePath, content, "utf-8"); } catch (error2) { throw new Error(`Failed to save state file: ${error2.message}`); } } function deleteState(options = {}) { const filePath = getStateFilePath(options); if (!fs.existsSync(filePath)) { return false; } try { fs.unlinkSync(filePath); const stateDir = path.dirname(filePath); const files = fs.readdirSync(stateDir); if (files.length === 0) { fs.rmdirSync(stateDir); } return true; } catch (error2) { throw new Error(`Failed to delete state file: ${error2.message}`); } } function initializeState(app, environment = "default") { return { app, environment, lastDeployed: (/* @__PURE__ */ new Date()).toISOString(), resources: {}, files: {}, version: STATE_VERSION }; } function getOrCreateState(app, options = {}) { const { environment = "default" } = options; const existingState = loadState(options); if (existingState) { return existingState; } return initializeState(app, environment); } function listStateFiles(stateDir = DEFAULT_STATE_DIR) { const stateDirPath = path.join(process.cwd(), stateDir); if (!fs.existsSync(stateDirPath)) { return []; } try { const files = fs.readdirSync(stateDirPath); return files.filter( (file) => file.startsWith("state") && file.endsWith(".json") ); } catch (error2) { console.error(`Failed to list state files: ${error2}`); return []; } } // src/core/state/file-state.ts function compareFileHashes(currentFiles, previousHashes) { const added = []; const modified = []; const unchanged = []; const deleted = []; const currentPaths = /* @__PURE__ */ new Set(); for (const file of currentFiles) { const { key, hash } = file; currentPaths.add(key); const previousHash = previousHashes[key]; if (!previousHash) { added.push({ path: key, hash, status: "added" }); } else if (previousHash !== hash) { modified.push({ path: key, hash, status: "modified", previousHash }); } else { unchanged.push({ path: key, hash, status: "unchanged", previousHash }); } } for (const [path3, hash] of Object.entries(previousHashes)) { if (!currentPaths.has(path3)) { deleted.push({ path: path3, hash, status: "deleted", previousHash: hash }); } } return { added, modified, unchanged, deleted, totalChanges: added.length + modified.length + deleted.length }; } function getFilesToUpload(currentFiles, previousHashes) { const changes = compareFileHashes(currentFiles, previousHashes); const pathsToUpload = /* @__PURE__ */ new Set([ ...changes.added.map((f) => f.path), ...changes.modified.map((f) => f.path) ]); return currentFiles.filter((file) => pathsToUpload.has(file.key)); } function updateFileHashes(state, files) { const newHashes = {}; for (const file of files) { newHashes[file.key] = file.hash; } return { ...state, files: newHashes }; } function getFileCount(state) { return Object.keys(state.files).length; } function formatFileChanges(changes) { const lines = []; if (changes.added.length > 0) { lines.push(`\u2713 Added: ${changes.added.length} files`); } if (changes.modified.length > 0) { lines.push(`\u2713 Modified: ${changes.modified.length} files`); } if (changes.deleted.length > 0) { lines.push(`\u2713 Deleted: ${changes.deleted.length} files`); } if (changes.unchanged.length > 0) { lines.push(`\u25CB Unchanged: ${changes.unchanged.length} files`); } return lines.join("\n"); } // src/core/state/resource-state.ts function updateS3Resource(state, resource) { return { ...state, resources: { ...state.resources, s3: resource } }; } function updateCloudFrontResource(state, resource) { return { ...state, resources: { ...state.resources, cloudfront: resource } }; } function getS3Resource(state) { return state.resources.s3; } function getCloudFrontResource(state) { return state.resources.cloudfront; } function getResourceSummary(state) { const s3 = getS3Resource(state); const cloudfront = getCloudFrontResource(state); return { hasS3: !!s3, hasCloudFront: !!cloudfront, s3BucketName: s3?.bucketName, s3Region: s3?.region, distributionId: cloudfront?.distributionId, distributionUrl: cloudfront?.distributionUrl }; } function formatResourceSummary(state) { const summary = getResourceSummary(state); const lines = []; lines.push(`App: ${state.app}`); lines.push(`Environment: ${state.environment}`); lines.push(`Last Deployed: ${new Date(state.lastDeployed).toLocaleString()}`); lines.push(""); if (summary.hasS3) { lines.push("S3 Bucket:"); lines.push(` Name: ${summary.s3BucketName}`); lines.push(` Region: ${summary.s3Region}`); const s3 = getS3Resource(state); if (s3?.websiteUrl) { lines.push(` URL: ${s3.websiteUrl}`); } lines.push(""); } if (summary.hasCloudFront) { lines.push("CloudFront Distribution:"); lines.push(` ID: ${summary.distributionId}`); lines.push(` URL: ${summary.distributionUrl}`); const cf = getCloudFrontResource(state); if (cf?.aliases && cf.aliases.length > 0) { lines.push(` Aliases: ${cf.aliases.join(", ")}`); } lines.push(""); } const fileCount = Object.keys(state.files).length; lines.push(`Files: ${fileCount} tracked`); return lines.join("\n"); } function getResourceIdentifiers(state) { const s3 = getS3Resource(state); const cloudfront = getCloudFrontResource(state); return { s3BucketName: s3?.bucketName, s3Region: s3?.region, distributionId: cloudfront?.distributionId }; } // src/core/aws/s3-deployer.ts async function deployToS3(config, options = {}) { const startTime = Date.now(); if (!config.s3) { throw new Error("S3 configuration is required"); } const { bucketName, buildDir: providedBuildDir, indexDocument = "index.html", errorDocument, websiteHosting = true, gzip: gzip2 = true, concurrency = 10, exclude = [] } = config.s3; const { showProgress = true, dryRun = false, environment = "default", useIncrementalDeploy = true, forceFullDeploy = false, saveState: shouldSaveState = true } = options; let spinner = null; if (showProgress) { spinner = ora("Detecting build directory...").start(); } const buildDir = getBuildDirectory(providedBuildDir); if (spinner) { spinner.succeed(`Build directory detected: ${chalk4.cyan(buildDir)}`); } if (showProgress) { spinner = ora("Scanning files...").start(); } const files = await scanFiles({ buildDir, exclude }); const totalSize = calculateTotalSize(files); if (spinner) { spinner.succeed( `Found ${chalk4.cyan(files.length)} files (${chalk4.cyan( formatBytes(totalSize) )})` ); } if (files.length === 0) { throw new Error( `No files found in build directory: ${chalk4.cyan(buildDir)} Please build your project first: ${chalk4.cyan("npm run build")} ${chalk4.cyan("yarn build")} ${chalk4.cyan("pnpm build")}` ); } const s3Client = createS3Client(config); if (showProgress) { spinner = ora("Checking S3 bucket...").start(); } try { await ensureBucket(s3Client, bucketName, config.region, { websiteHosting, indexDocument, errorDocument, publicRead: true }); await tagBucketForRecovery(s3Client, bucketName, config.app, environment); if (spinner) { spinner.succeed(`S3 bucket ready: ${chalk4.cyan(bucketName)}`); } } catch (error2) { if (spinner) { spinner.fail("Failed to setup S3 bucket"); } throw error2; } let state = loadState({ environment }); let filesToUpload = files; if (useIncrementalDeploy && !forceFullDeploy && state) { if (showProgress) { spinner = ora("Analyzing file changes...").start(); } const changes = compareFileHashes(files, state.files); if (spinner) { spinner.succeed("File changes analyzed"); } console.log(); console.log(formatFileChanges(changes)); console.log(); if (changes.totalChanges === 0) { console.log( chalk4.green("\u2728 No changes detected. Deployment not needed.") ); return { totalFiles: files.length, uploaded: 0, skipped: files.length, failed: 0, totalSize, compressedSize: totalSize, duration: Date.now() - startTime, results: files.map((file) => ({ file, success: true, status: "skipped" })) }; } filesToUpload = getFilesToUpload(files, state.files); console.log( chalk4.blue( `\u{1F4E4} Uploading ${chalk4.cyan(filesToUpload.length)} changed files... ` ) ); } else { if (forceFullDeploy && showProgress) { console.log(chalk4.yellow("\u26A0 Force full deployment enabled\n")); } console.log(chalk4.blue("\n\u{1F4E4} Uploading files...\n")); } let progressBar = null; if (showProgress && !dryRun) { progressBar = new cliProgress.SingleBar( { format: "Progress |" + chalk4.cyan("{bar}") + "| {percentage}% | {value}/{total} files | {current}", barCompleteChar: "\u2588", barIncompleteChar: "\u2591", hideCursor: true }, cliProgress.Presets.shades_classic ); progressBar.start(files.length, 0, { current: "" }); } const uploadResults = await uploadFiles( s3Client, bucketName, filesToUpload, { gzip: gzip2, concurrency, dryRun }, (completed, _total, currentFile) => { if (progressBar) { progressBar.update(completed, { current: currentFile.relativePath }); } } ); if (progressBar) { progressBar.stop(); } const uploaded = uploadResults.filter((r) => r.status === "uploaded").length; const skipped = uploadResults.filter((r) => r.status === "skipped").length; const failed = uploadResults.filter((r) => r.status === "failed").length; const uploadedFiles = uploadResults.filter((r) => r.success).map((r) => r.file); const compressedSize = calculateTotalSize(uploadedFiles); console.log(); if (uploaded > 0) { console.log(chalk4.green(`\u2713 Uploaded: ${uploaded} files`)); } if (skipped > 0) { console.log(chalk4.gray(`\u25CB Skipped: ${skipped} files (unchanged)`)); } if (failed > 0) { console.log(chalk4.red(`\u2717 Failed: ${failed} files`)); uploadResults.filter((r) => !r.success).forEach((r) => { console.log(chalk4.red(` - ${r.file.relativePath}: ${r.error}`)); }); } console.log(); console.log(chalk4.gray(`Total size: ${formatBytes(totalSize)}`)); if (gzip2) { const savings = totalSize - compressedSize; const savingsPercent = Math.round(savings / totalSize * 100); console.log( chalk4.gray( `Compressed: ${formatBytes( compressedSize )} (${savingsPercent}% reduction)` ) ); } const duration = Date.now() - startTime; console.log(chalk4.gray(`Duration: ${(duration / 1e3).toFixed(2)}s`)); const websiteUrl = websiteHosting ? getBucketWebsiteUrl(bucketName, config.region) : void 0; if (websiteHosting && !dryRun) { console.log(); console.log(chalk4.green("\u{1F310} Website URL:"), chalk4.cyan(websiteUrl)); } if (shouldSaveState && !dryRun && uploaded > 0) { if (!state) { state = getOrCreateState(config.app, { environment }); } state = updateS3Resource(state, { bucketName, region: config.region, websiteUrl }); state = updateFileHashes(state, files); try { saveState(state, { environment }); if (showProgress) { console.log(); console.log( chalk4.gray( `\u2713 State saved (.deploy/state${environment !== "default" ? `.${environment}` : ""}.json)` ) ); } } catch (error2) { console.log(); console.log(chalk4.yellow(`\u26A0 Failed to save state: ${error2.message}`)); } } return { totalFiles: files.length, uploaded, skipped, failed, totalSize, compressedSize, duration, results: uploadResults }; } // src/core/aws/cloudfront-distribution.ts import { GetDistributionCommand, CreateDistributionCommand, UpdateDistributionCommand, GetDistributionConfigCommand, waitUntilDistributionDeployed } from "@aws-sdk/client-cloudfront"; async function distributionExists(client, distributionId) { try { await client.send( new GetDistributionCommand({ Id: distributionId }) ); return true; } catch (error2) { if (error2 && typeof error2 === "object" && ("name" in error2 && error2.name === "NoSuchDistribution" || "$metadata" in error2 && typeof error2.$metadata === "object" && error2.$metadata !== null && "httpStatusCode" in error2.$metadata && error2.$metadata.httpStatusCode === 404)) { return false; } throw error2; } } async function getDistribution(client, distributionId) { try { const response = await client.send( new GetDistributionCommand({ Id: distributionId }) ); return response.Distribution || null; } catch (error2) { if (error2 && typeof error2 === "object" && ("name" in error2 && error2.name === "NoSuchDistribution" || "$metadata" in error2 && typeof error2.$metadata === "object" && error2.$metadata !== null && "httpStatusCode" in error2.$metadata && error2.$metadata.httpStatusCode === 404)) { return null; } throw error2; } } function getS3OriginDomain(bucketName, region) { if (region === "us-east-1") { return `${bucketName}.s3-website-us-east-1.amazonaws.com`; } return `${bucketName}.s3-website.${region}.amazonaws.com`; } async function createDistribution(client, options) { const { s3BucketName, s3Region, indexDocument = "index.html", customDomain, priceClass = "PriceClass_100", defaultTTL = 86400, // 1 day maxTTL = 31536e3, // 1 year minTTL = 0, ipv6 = true } = options; const originDomain = getS3OriginDomain(s3BucketName, s3Region); const callerReference = `scf-${Date.now()}`; const distributionConfig = { CallerReference: callerReference, Comment: `Created by SCF for ${s3BucketName}`, Enabled: true, DefaultRootObject: indexDocument, Origins: { Quantity: 1, Items: [ { Id: `S3-${s3BucketName}`, DomainName: originDomain, CustomOriginConfig: { HTTPPort: 80, HTTPSPort: 443, OriginProtocolPolicy: "http-only", OriginSslProtocols: { Quantity: 1, Items: ["TLSv1.2"] } } } ] }, DefaultCacheBehavior: { TargetOriginId: `S3-${s3BucketName}`, ViewerProtocolPolicy: "redirect-to-https", AllowedMethods: { Quantity: 2, Items: ["GET", "HEAD"], CachedMethods: { Quantity: 2, Items: ["GET", "HEAD"] } }, Compress: true, ForwardedValues: { QueryString: false, Cookies: { Forward: "none" }, Headers: { Quantity: 0 } }, MinTTL: minTTL, DefaultTTL: defaultTTL, MaxTTL: maxTTL, TrustedSigners: { Enabled: false, Quantity: 0 } }, PriceClass: priceClass, IsIPV6Enabled: ipv6 }; if (customDomain) { distributionConfig.Aliases = { Quantity: customDomain.aliases?.length || 1, Items: customDomain.aliases || [customDomain.domainName] }; distributionConfig.ViewerCertificate = { ACMCertificateArn: customDomain.certificateArn, SSLSupportMethod: "sni-only", MinimumProtocolVersion: "TLSv1.2_2021" }; } else { distributionConfig.ViewerCertificate = { CloudFrontDefaultCertificate: true }; } const command = { DistributionConfig: distributionConfig }; const response = await client.send(new CreateDistributionCommand(command)); if (!response.Distribution) { throw new Error("Failed to create distribution: No distribution returned"); } return response.Distribution; } async function updateDistribution(client, distributionId, updates) { const configResponse = await client.send( new GetDistributionConfigCommand({ Id: distributionId }) ); const currentConfig = configResponse.DistributionConfig; const etag = configResponse.ETag; if (!currentConfig || !etag) { throw new Error("Failed to get distribution configuration"); } if (updates.priceClass) { currentConfig.PriceClass = updates.priceClass; } if (!currentConfig.DefaultCacheBehavior) { throw new Error("Distribution configuration missing DefaultCacheBehavior"); } if (updates.defaultTTL !== void 0) { currentConfig.DefaultCacheBehavior.DefaultTTL = updates.defaultTTL; } if (updates.maxTTL !== void 0) { currentConfig.DefaultCacheBehavior.MaxTTL = updates.maxTTL; } if (updates.minTTL !== void 0) { currentConfig.DefaultCacheBehavior.MinTTL = updates.minTTL; } if (updates.ipv6 !== void 0) { currentConfig.IsIPV6Enabled = updates.ipv6; } if (updates.customDomain) { currentConfig.Aliases = { Quantity: updates.customDomain.aliases?.length || 1, Items: updates.customDomain.aliases || [updates.customDomain.domainName] }; currentConfig.ViewerCertificate = { ACMCertificateArn: updates.customDomain.certificateArn, SSLSupportMethod: "sni-only", MinimumProtocolVersion: "TLSv1.2_2021" }; } const command = { Id: distributionId, DistributionConfig: currentConfig, IfMatch: etag }; const response = await client.send(new UpdateDistributionCommand(command)); if (!response.Distribution) { throw new Error("Failed to update distribution: No distribution returned"); } return response.Distribution; } async function waitForDistributionDeployed(client, distributionId, options = {}) { const { maxWaitTime = 1200, minDelay = 20, maxDelay = 60 } = options; await waitUntilDistributionDeployed( { client, maxWaitTime, minDelay, maxDelay }, { Id: distributionId } ); } function getDistributionDomainName(distribution) { return distribution.DomainName || ""; } function getDistributionUrl(distribution) { const domainName = getDistributionDomainName(distribution); return domainName ? `https://${domainName}` : ""; } // src/core/aws/cloudfront-invalidation.ts import { CreateInvalidationCommand, GetInvalidationCommand, waitUntilInvalidationCompleted } from "@aws-sdk/client-cloudfront"; async function createInvalidation(client, distributionId, options) { const { paths, callerReference = `scf-${Date.now()}` } = options; const invalidationBatch = { Paths: { Quantity: paths.length, Items: paths }, CallerReference: callerReference }; const response = await client.send( new CreateInvalidationCommand({ DistributionId: distributionId, InvalidationBatch: invalidationBatch }) ); if (!response.Invalidation) { throw new Error("Failed to create invalidation: No invalidation returned"); } return response.Invalidation; } async function waitForInvalidationCompleted(client, distributionId, invalidationId, options = {}) { const { maxWaitTime = 600, minDelay = 20, maxDelay = 60 } = options; await waitUntilInvalidationCompleted( { client, maxWaitTime, minDelay, maxDelay }, { DistributionId: distributionId, Id: invalidationId } ); } async function invalidateCache(client, distributionId, paths, options = {}) { const { wait = true, maxWaitTime, minDelay, maxDelay } = options; const invalidation = await createInvalidation(client, distributionId, { paths }); if (wait && invalidation.Id) { await waitForInvalidationCompleted(client, distributionId, invalidation.Id, { maxWaitTime, minDelay, maxDelay }); } return invalidation; } async function invalidateAll(client, distributionId, options = {}) { return invalidateCache(client, distributionId, ["/*"], options); } // src/core/aws/cloudfront-deployer.ts import chalk5 from "chalk"; import ora2 from "ora"; async function deployToCloudFront(config, s3DeploymentStats, options = {}) { const startTime = Date.now(); if (!config.cloudfront?.enabled) { throw new Error("CloudFront is not enabled in configuration"); } if (!config.s3?.bucketName) { throw new Error("S3 bucket name is required for CloudFront deployment"); } const { distributionId: existingDistributionId, invalidatePaths, invalidateAll: shouldInvalidateAll = false, waitForDeployment = true, waitForInvalidation = true, showProgress = true, environment = "default", saveState: shouldSaveState = true } = options; const cloudFrontConfig = config.cloudfront; const s3Config = config.s3; const cfClient = createCloudFrontClient(config); let state = loadState({ environment }); const stateDistributionId = state ? getCloudFrontResource(state)?.distributionId : void 0; const resolvedDistributionId = existingDistributionId || stateDistributionId; let spinner = null; let distributionId; let isNewDistribution = false; let distribution; if (resolvedDistributionId) { if (showProgress) { spinner = ora2("Checking CloudFront distribution...").start(); } const exists = await distributionExists(cfClient, resolvedDistributionId); if (exists) { distribution = await getDistribution(cfClient, resolvedDistributionId); distributionId = resolvedDistributionId; if (spinner) { spinner.succeed( `CloudFront distribution found: ${chalk5.cyan(distributionId)}` ); } const hasUpdates = cloudFrontConfig.priceClass || cloudFrontConfig.customDomain || cloudFrontConfig.defaultTTL !== void 0; if (hasUpdates) { if (showProgress) { spinner = ora2("Updating CloudFront distribution...").start(); } const updateOptions = { priceClass: cloudFrontConfig.priceClass, customDomain: cloudFrontConfig.customDomain, defaultTTL: cloudFrontConfig.defaultTTL, maxTTL: cloudFrontConfig.maxTTL, minTTL: cloudFrontConfig.minTTL, ipv6: cloudFrontConfig.ipv6 }; distribution = await updateDistribution( cfClient, distributionId, updateOptions ); if (spinner) { spinner.succeed("CloudFront distribution updated"); } } } else { throw