scf-deploy
Version:
S3 + CloudFront static deployment automation CLI
1,761 lines (1,724 loc) • 63.2 kB
JavaScript
// src/core/config/index.ts
import chalk from "chalk";
// src/core/config/loader.ts
import jiti from "jiti";
import { existsSync } from "fs";
import { resolve, join, dirname } from "path";
import { fileURLToPath } from "url";
var CONFIG_FILE_NAMES = [
"scf.config.ts",
"scf.config.js",
"scf.config.mjs",
"scf.config.cjs"
];
function findConfigFile(startDir = process.cwd()) {
let currentDir = resolve(startDir);
const root = resolve("/");
while (currentDir !== root) {
for (const fileName of CONFIG_FILE_NAMES) {
const configPath = join(currentDir, fileName);
if (existsSync(configPath)) {
return configPath;
}
}
const parentDir = dirname(currentDir);
if (parentDir === currentDir) {
break;
}
currentDir = parentDir;
}
return null;
}
async function loadConfigFile(configPath) {
if (!existsSync(configPath)) {
throw new Error(`Config file not found: ${configPath}`);
}
try {
const currentFile = fileURLToPath(import.meta.url);
const jitiInstance = jiti(currentFile, {
interopDefault: true,
requireCache: false,
esmResolve: true,
moduleCache: false
});
const absoluteConfigPath = resolve(configPath);
const configModule = jitiInstance(absoluteConfigPath);
let config;
if (typeof configModule === "function") {
config = configModule();
} else if (configModule && typeof configModule === "object" && "default" in configModule) {
const defaultExport = configModule.default;
if (typeof defaultExport === "function") {
config = defaultExport();
} else {
config = defaultExport;
}
} else {
config = configModule;
}
return config;
} catch (error) {
if (error instanceof Error) {
throw new Error(
`Failed to load config file: ${configPath}
${error.message}`
);
}
throw error;
}
}
async function discoverAndLoadConfig(startDir) {
const configPath = findConfigFile(startDir);
if (!configPath) {
throw new Error(
`Config file not found. Please create one of: ${CONFIG_FILE_NAMES.join(
", "
)}`
);
}
const config = await loadConfigFile(configPath);
return {
config,
configPath
};
}
// src/core/config/merger.ts
function deepMerge(target, source) {
const result = { ...target };
for (const key in source) {
const sourceValue = source[key];
const targetValue = result[key];
if (sourceValue !== void 0 && sourceValue !== null && typeof sourceValue === "object" && !Array.isArray(sourceValue) && targetValue !== void 0 && targetValue !== null && typeof targetValue === "object" && !Array.isArray(targetValue)) {
result[key] = deepMerge(
targetValue,
sourceValue
);
} else if (sourceValue !== void 0) {
result[key] = sourceValue;
}
}
return result;
}
function mergeEnvironment(baseConfig, environment) {
if (!environment || !baseConfig.environments) {
const { environments: _environments2, ...configWithoutEnv } = baseConfig;
return configWithoutEnv;
}
const envConfig = baseConfig.environments[environment];
if (!envConfig) {
throw new Error(
`Environment "${environment}" not found in config. Available environments: ${Object.keys(baseConfig.environments).join(", ")}`
);
}
const { environments: _environments, ...baseWithoutEnv } = baseConfig;
const merged = deepMerge(baseWithoutEnv, envConfig);
return merged;
}
function applyProfileOverride(config, profileOverride) {
if (!profileOverride) {
return config;
}
return {
...config,
credentials: {
...config.credentials,
profile: profileOverride
}
};
}
// src/core/config/schema.ts
import { z } from "zod";
var awsCredentialsSchema = z.object({
profile: z.string().optional(),
accessKeyId: z.string().optional(),
secretAccessKey: z.string().optional(),
sessionToken: z.string().optional()
}).optional();
var s3ConfigSchema = z.object({
bucketName: z.string().min(3, "Bucket name must be at least 3 characters").max(63, "Bucket name must be at most 63 characters").regex(
/^[a-z0-9][a-z0-9.-]*[a-z0-9]$/,
"Bucket name must follow S3 naming rules"
),
buildDir: z.string().min(1, "Build directory cannot be empty").optional(),
indexDocument: z.string().default("index.html"),
errorDocument: z.string().optional(),
websiteHosting: z.boolean().default(true),
concurrency: z.number().int().min(1).max(100).default(10),
gzip: z.boolean().default(true),
exclude: z.array(z.string()).default([])
}).optional();
var errorPageSchema = z.object({
errorCode: z.number().int().min(400).max(599),
responseCode: z.number().int().min(200).max(599).optional(),
responsePath: z.string().optional(),
cacheTTL: z.number().int().min(0).optional()
});
var cacheWarmingSchema = z.object({
enabled: z.boolean(),
paths: z.array(z.string()).default(["/"]),
concurrency: z.number().int().min(1).max(10).default(3),
// Reduced from 20 to 10, default from 5 to 3
delay: z.number().int().min(100).max(5e3).default(500)
// Increased from 0-5000/100 to 100-5000/500
});
var cloudfrontConfigSchema = z.object({
enabled: z.boolean(),
priceClass: z.enum(["PriceClass_100", "PriceClass_200", "PriceClass_All"]).optional().default("PriceClass_100"),
customDomain: z.object({
domainName: z.string().min(1, "Domain name is required"),
certificateArn: z.string().regex(/^arn:aws:acm:/, "Must be a valid ACM certificate ARN"),
aliases: z.array(z.string()).optional()
}).optional(),
defaultTTL: z.number().int().min(0).default(86400),
// 1 day
maxTTL: z.number().int().min(0).default(31536e3),
// 1 year
minTTL: z.number().int().min(0).default(0),
ipv6: z.boolean().default(true),
errorPages: z.array(errorPageSchema).optional(),
cacheWarming: cacheWarmingSchema.optional()
}).optional();
var configSchema = z.object({
app: z.string().min(1, "App name is required").regex(
/^[a-z0-9-]+$/,
"App name must contain only lowercase letters, numbers, and hyphens"
),
region: z.string().min(1, "AWS region is required").regex(
/^[a-z]{2}-[a-z]+-\d+$/,
"Must be a valid AWS region (e.g., us-east-1)"
),
credentials: awsCredentialsSchema,
s3: s3ConfigSchema,
cloudfront: cloudfrontConfigSchema,
environments: z.record(z.string(), z.any()).optional()
});
var loadConfigOptionsSchema = z.object({
configPath: z.string().optional(),
env: z.string().optional(),
profile: z.string().optional()
});
function validateConfig(config) {
return configSchema.parse(config);
}
// src/core/config/utils.ts
function defineConfig(config) {
return config;
}
function generateExampleConfig(appName = "my-app") {
return `import { defineConfig } from 'scf';
export default defineConfig({
app: '${appName}',
region: 'ap-northeast-2',
s3: {
bucketName: '${appName}-bucket',
buildDir: './dist',
indexDocument: 'index.html',
errorDocument: '404.html',
websiteHosting: true,
gzip: true,
concurrency: 10,
},
cloudfront: {
enabled: false,
priceClass: 'PriceClass_100',
defaultTTL: 86400,
ipv6: true,
},
// Environment-specific configurations
environments: {
dev: {
s3: {
bucketName: '${appName}-dev',
},
},
prod: {
s3: {
bucketName: '${appName}-prod',
},
cloudfront: {
enabled: true,
priceClass: 'PriceClass_All',
// Uncomment to use custom domain
// customDomain: {
// domainName: 'example.com',
// certificateArn: 'arn:aws:acm:us-east-1:123456789012:certificate/...',
// },
},
},
},
});
`;
}
function validateRequiredFields(config) {
const errors = [];
if (!config.app) {
errors.push("app name is required");
}
if (!config.region) {
errors.push("region is required");
}
if (config.s3 && !config.s3.bucketName) {
errors.push("s3.bucketName is required when s3 is configured");
}
if (config.cloudfront?.enabled && config.cloudfront.customDomain && !config.cloudfront.customDomain.certificateArn) {
errors.push(
"cloudfront.customDomain.certificateArn is required when using custom domain"
);
}
if (errors.length > 0) {
throw new Error(
`Config validation failed:
${errors.map((e) => ` - ${e}`).join("\n")}`
);
}
}
// src/core/config/index.ts
async function loadConfig(options = {}) {
const { configPath, env, profile } = options;
try {
let rawConfig;
let resolvedConfigPath;
if (configPath) {
rawConfig = await loadConfigFile(configPath);
resolvedConfigPath = configPath;
} else {
const result = await discoverAndLoadConfig();
rawConfig = result.config;
resolvedConfigPath = result.configPath;
}
const mergedConfig = mergeEnvironment(rawConfig, env);
const configWithProfile = applyProfileOverride(mergedConfig, profile);
const validatedConfig = validateConfig(configWithProfile);
validateRequiredFields(validatedConfig);
if (process.env.SCF_CLI_MODE === "true") {
console.log(chalk.gray(` Loaded config from: ${resolvedConfigPath}`));
if (env) {
console.log(chalk.gray(` Environment: ${env}`));
}
}
return validatedConfig;
} catch (error) {
if (error instanceof Error) {
throw new Error(`Failed to load configuration:
${error.message}`);
}
throw error;
}
}
// src/core/aws/credentials.ts
import {
fromEnv,
fromIni,
fromInstanceMetadata,
fromContainerMetadata
} from "@aws-sdk/credential-providers";
async function getCredentials(config) {
let credentialProvider;
let source;
let profile;
if (config.credentials?.accessKeyId && config.credentials?.secretAccessKey) {
const { accessKeyId, secretAccessKey, sessionToken } = config.credentials;
credentialProvider = async () => ({
accessKeyId,
secretAccessKey,
sessionToken
});
source = "config";
} else if (config.credentials?.profile) {
profile = config.credentials.profile;
credentialProvider = fromIni({ profile });
source = "profile";
} else if (process.env.AWS_ACCESS_KEY_ID && process.env.AWS_SECRET_ACCESS_KEY) {
credentialProvider = fromEnv();
source = "environment";
} else {
const defaultProfile = process.env.AWS_PROFILE || "default";
try {
credentialProvider = fromIni({ profile: defaultProfile });
source = "profile";
profile = defaultProfile;
} catch {
try {
credentialProvider = fromContainerMetadata();
source = "instance-metadata";
} catch {
credentialProvider = fromInstanceMetadata();
source = "instance-metadata";
}
}
}
try {
const credentials = await credentialProvider();
return {
credentials,
source,
profile
};
} catch (error) {
throw new Error(
`Failed to resolve AWS credentials.
Please configure credentials using one of:
1. Config file (credentials.accessKeyId + secretAccessKey)
2. Config file (credentials.profile)
3. Environment variables (AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
4. AWS profile (~/.aws/credentials)
5. IAM role (EC2/ECS instance metadata)
Original error: ${error instanceof Error ? error.message : String(error)}`
);
}
}
function createCredentialProvider(config) {
return async () => {
const { credentials } = await getCredentials(config);
return credentials;
};
}
// src/core/aws/verify.ts
import { STSClient, GetCallerIdentityCommand } from "@aws-sdk/client-sts";
async function verifyCredentials(credentials, region) {
const client = new STSClient({
region,
credentials
});
try {
const command = new GetCallerIdentityCommand({});
const response = await client.send(command);
if (!response.Account || !response.Arn || !response.UserId) {
throw new Error("Invalid STS response: missing required fields");
}
return {
accountId: response.Account,
arn: response.Arn,
userId: response.UserId
};
} catch (error) {
if (error instanceof Error) {
if ("Code" in error || "$metadata" in error) {
throw new Error(
`AWS credentials verification failed: ${error.message}
Please check your credentials and try again.`
);
}
throw error;
}
throw new Error(`Unknown error during credentials verification: ${String(error)}`);
}
}
function formatAccountInfo(info) {
return [
`Account ID: ${info.accountId}`,
`User ARN: ${info.arn}`,
`User ID: ${info.userId}`
].join("\n");
}
// src/core/aws/client.ts
import { S3Client } from "@aws-sdk/client-s3";
import { CloudFrontClient } from "@aws-sdk/client-cloudfront";
import { STSClient as STSClient2 } from "@aws-sdk/client-sts";
function createS3Client(config) {
return new S3Client({
region: config.region,
credentials: createCredentialProvider(config)
});
}
function createCloudFrontClient(config) {
return new CloudFrontClient({
region: "us-east-1",
// CloudFront API is only in us-east-1
credentials: createCredentialProvider(config)
});
}
function createSTSClient(config) {
return new STSClient2({
region: config.region,
credentials: createCredentialProvider(config)
});
}
function createS3ClientWithOptions(config, options = {}) {
return new S3Client({
region: options.region ?? config.region,
credentials: createCredentialProvider(config),
requestHandler: options.requestTimeout ? { requestTimeout: options.requestTimeout } : void 0,
maxAttempts: options.maxAttempts
});
}
function createCloudFrontClientWithOptions(config, options = {}) {
return new CloudFrontClient({
region: "us-east-1",
// CloudFront API is always in us-east-1
credentials: createCredentialProvider(config),
requestHandler: options.requestTimeout ? { requestTimeout: options.requestTimeout } : void 0,
maxAttempts: options.maxAttempts
});
}
// src/core/aws/s3-bucket.ts
import {
HeadBucketCommand,
CreateBucketCommand,
PutBucketWebsiteCommand,
PutBucketPolicyCommand,
DeletePublicAccessBlockCommand,
PutBucketTaggingCommand,
GetBucketTaggingCommand
} from "@aws-sdk/client-s3";
async function bucketExists(client, bucketName) {
try {
await client.send(new HeadBucketCommand({ Bucket: bucketName }));
return true;
} catch (error) {
if (error && typeof error === "object" && ("name" in error && error.name === "NotFound" || "$metadata" in error && typeof error.$metadata === "object" && error.$metadata !== null && "httpStatusCode" in error.$metadata && error.$metadata.httpStatusCode === 404)) {
return false;
}
throw error;
}
}
async function createBucket(client, bucketName, region) {
try {
const command = new CreateBucketCommand({
Bucket: bucketName,
// CreateBucketConfiguration is only required for regions other than us-east-1
...region !== "us-east-1" && {
CreateBucketConfiguration: {
LocationConstraint: region
}
}
});
await client.send(command);
} catch (error) {
if (error && typeof error === "object" && "name" in error && error.name === "BucketAlreadyOwnedByYou") {
return;
}
throw error;
}
}
async function configureBucketWebsite(client, bucketName, indexDocument = "index.html", errorDocument) {
const command = new PutBucketWebsiteCommand({
Bucket: bucketName,
WebsiteConfiguration: {
IndexDocument: {
Suffix: indexDocument
},
...errorDocument && {
ErrorDocument: {
Key: errorDocument
}
}
}
});
await client.send(command);
}
async function setBucketPublicReadPolicy(client, bucketName) {
try {
await client.send(
new DeletePublicAccessBlockCommand({
Bucket: bucketName
})
);
} catch (_error) {
}
const policy = {
Version: "2012-10-17",
Statement: [
{
Sid: "PublicReadGetObject",
Effect: "Allow",
Principal: "*",
Action: "s3:GetObject",
Resource: `arn:aws:s3:::${bucketName}/*`
}
]
};
const command = new PutBucketPolicyCommand({
Bucket: bucketName,
Policy: JSON.stringify(policy)
});
await client.send(command);
}
async function ensureBucket(client, bucketName, region, options = {}) {
const {
websiteHosting = true,
indexDocument = "index.html",
errorDocument,
publicRead = true
} = options;
const exists = await bucketExists(client, bucketName);
if (!exists) {
await createBucket(client, bucketName, region);
}
if (websiteHosting) {
await configureBucketWebsite(client, bucketName, indexDocument, errorDocument);
}
if (publicRead) {
await setBucketPublicReadPolicy(client, bucketName);
}
}
async function tagBucketForRecovery(client, bucketName, app, environment) {
try {
await client.send(
new PutBucketTaggingCommand({
Bucket: bucketName,
Tagging: {
TagSet: [
{ Key: "scf:managed", Value: "true" },
{ Key: "scf:app", Value: app },
{ Key: "scf:environment", Value: environment },
{ Key: "scf:tool", Value: "scf-deploy" }
]
}
})
);
} catch (error) {
console.warn("Warning: Failed to tag S3 bucket for recovery");
}
}
function getBucketWebsiteUrl(bucketName, region) {
if (region === "us-east-1") {
return `http://${bucketName}.s3-website-us-east-1.amazonaws.com`;
}
return `http://${bucketName}.s3-website.${region}.amazonaws.com`;
}
// src/core/aws/s3-deployer.ts
import chalk3 from "chalk";
import ora from "ora";
import cliProgress from "cli-progress";
// src/core/deployer/file-scanner.ts
import glob from "fast-glob";
import { hashFile } from "hasha";
import { lookup as getMimeType } from "mime-types";
import { stat } from "fs/promises";
import { join as join2, sep } from "path";
var GZIPPABLE_EXTENSIONS = /* @__PURE__ */ new Set([
".html",
".htm",
".css",
".js",
".mjs",
".json",
".xml",
".svg",
".txt",
".md",
".csv",
".ts",
".tsx",
".jsx"
]);
function shouldGzipFile(filePath) {
const ext = filePath.substring(filePath.lastIndexOf(".")).toLowerCase();
return GZIPPABLE_EXTENSIONS.has(ext);
}
function getContentType(filePath) {
const mimeType = getMimeType(filePath);
return mimeType || "application/octet-stream";
}
function pathToS3Key(relativePath) {
return relativePath.split(sep).join("/");
}
async function scanFiles(options) {
const { buildDir, exclude = [], followSymlinks = false } = options;
const patterns = ["**/*"];
const files = await glob(patterns, {
cwd: buildDir,
absolute: false,
ignore: exclude,
onlyFiles: true,
followSymbolicLinks: followSymlinks,
dot: true
// Include dotfiles
});
const fileInfos = [];
for (const file of files) {
const absolutePath = join2(buildDir, file);
const relativePath = file;
const stats = await stat(absolutePath);
const hash = await hashFile(absolutePath, { algorithm: "sha256" });
const contentType = getContentType(absolutePath);
const shouldGzip = shouldGzipFile(absolutePath);
const key = pathToS3Key(relativePath);
fileInfos.push({
absolutePath,
relativePath,
key,
size: stats.size,
hash,
contentType,
shouldGzip
});
}
return fileInfos;
}
async function calculateFileHash(filePath) {
return hashFile(filePath, { algorithm: "sha256" });
}
function filterChangedFiles(files, existingHashes) {
return files.filter((file) => {
const existingHash = existingHashes[file.key];
return !existingHash || existingHash !== file.hash;
});
}
function groupFilesByCompression(files) {
const gzipped = [];
const plain = [];
for (const file of files) {
if (file.shouldGzip) {
gzipped.push(file);
} else {
plain.push(file);
}
}
return { gzipped, plain };
}
// src/core/deployer/build-detector.ts
import { existsSync as existsSync2, statSync, readdirSync } from "fs";
import { join as join3 } from "path";
import chalk2 from "chalk";
var BUILD_DIR_CANDIDATES = [
"dist",
// Vite, Rollup, etc.
"build",
// Create React App, Next.js, etc.
"out",
// Next.js static export
".output/public",
// Nuxt 3
"_site",
// Jekyll, 11ty
"output"
// Some SSGs
];
var SSR_BUILD_DIRS = [".next", ".nuxt"];
function hasIndexHtml(dirPath) {
try {
const indexPath = join3(dirPath, "index.html");
return existsSync2(indexPath) && statSync(indexPath).isFile();
} catch {
return false;
}
}
function hasDeployableFiles(dirPath) {
try {
const entries = readdirSync(dirPath, { withFileTypes: true });
if (!hasIndexHtml(dirPath)) {
return false;
}
const hasFiles = entries.some((entry) => entry.isFile());
if (!hasFiles) {
return false;
}
const webFileExtensions = [".html", ".js", ".css", ".json"];
const hasWebFiles = entries.some((entry) => {
if (entry.isFile()) {
return webFileExtensions.some((ext) => entry.name.endsWith(ext));
}
return false;
});
return hasWebFiles;
} catch {
return false;
}
}
function detectBuildDirectory(cwd = process.cwd()) {
for (const candidate of BUILD_DIR_CANDIDATES) {
const candidatePath = join3(cwd, candidate);
if (existsSync2(candidatePath)) {
const stats = statSync(candidatePath);
if (stats.isDirectory()) {
if (hasDeployableFiles(candidatePath)) {
return candidate;
}
}
}
}
return null;
}
function isSSRBuildDir(buildDir) {
const normalizedPath = buildDir.replace(/^\.\//, "");
return SSR_BUILD_DIRS.some((ssrDir) => normalizedPath === ssrDir);
}
function validateBuildDirectory(buildDir, cwd = process.cwd()) {
const absolutePath = join3(cwd, buildDir);
if (isSSRBuildDir(buildDir)) {
throw new Error(
`Cannot deploy ${chalk2.cyan(buildDir)} directory to S3/CloudFront.
${chalk2.yellow("\u26A0 This is a server-side rendering (SSR) build directory.")}
S3/CloudFront only supports static files. To deploy with Next.js:
1. Add ${chalk2.cyan("output: 'export'")} to ${chalk2.cyan("next.config.ts")}:
` + chalk2.gray(` const nextConfig = {
`) + chalk2.gray(` output: 'export',
`) + chalk2.gray(` images: { unoptimized: true },
`) + chalk2.gray(` };
`) + ` 2. Rebuild your project:
${chalk2.cyan("npm run build")}
This will create an ${chalk2.cyan("out")} directory with static files.`
);
}
if (!existsSync2(absolutePath)) {
throw new Error(
`Build directory not found: ${chalk2.cyan(buildDir)}
Please build your project first or specify the correct build directory.`
);
}
const stats = statSync(absolutePath);
if (!stats.isDirectory()) {
throw new Error(`Build path is not a directory: ${chalk2.cyan(buildDir)}`);
}
if (!hasDeployableFiles(absolutePath)) {
const hasIndex = hasIndexHtml(absolutePath);
if (!hasIndex) {
throw new Error(
`Build directory does not contain ${chalk2.cyan("index.html")}: ${chalk2.cyan(buildDir)}
This directory cannot be deployed as a static website.
Please build your project first:
${chalk2.cyan("npm run build")}`
);
} else {
throw new Error(
`Build directory does not contain valid web files: ${chalk2.cyan(buildDir)}
Please build your project first before deploying.`
);
}
}
}
function getBuildDirectory(providedBuildDir, cwd = process.cwd()) {
if (providedBuildDir) {
validateBuildDirectory(providedBuildDir, cwd);
return providedBuildDir;
}
const detectedDir = detectBuildDirectory(cwd);
if (!detectedDir) {
throw new Error(
`No build directory found.
Searched for: ${BUILD_DIR_CANDIDATES.map((d) => chalk2.cyan(d)).join(
", "
)}
Please build your project first:
${chalk2.gray("# For Vite/Rollup projects")}
${chalk2.cyan("npm run build")}
${chalk2.gray("# For Next.js projects")}
${chalk2.cyan("npm run build")}
Or specify a custom build directory in ${chalk2.cyan(
"scf.config.ts"
)}:
${chalk2.gray("s3: { buildDir: './your-build-dir' }")}`
);
}
return detectedDir;
}
// src/core/deployer/s3-uploader.ts
import { PutObjectCommand } from "@aws-sdk/client-s3";
import { Upload } from "@aws-sdk/lib-storage";
import { createReadStream, readFileSync } from "fs";
import { gzip } from "zlib";
import { promisify } from "util";
import pLimit from "p-limit";
var gzipAsync = promisify(gzip);
async function gzipFile(filePath) {
const content = readFileSync(filePath);
return gzipAsync(content);
}
async function uploadFile(client, bucketName, file, options = {}) {
const { gzip: enableGzip = true, dryRun = false } = options;
const startTime = Date.now();
try {
if (dryRun) {
return {
file,
success: true,
status: "uploaded",
duration: Date.now() - startTime
};
}
const params = {
Bucket: bucketName,
Key: file.key,
ContentType: file.contentType
};
if (enableGzip && file.shouldGzip) {
const compressed = await gzipFile(file.absolutePath);
params.Body = compressed;
params.ContentEncoding = "gzip";
} else {
params.Body = createReadStream(file.absolutePath);
}
if (file.size > 5 * 1024 * 1024) {
const upload = new Upload({
client,
params
});
await upload.done();
} else {
const command = new PutObjectCommand(params);
await client.send(command);
}
return {
file,
success: true,
status: "uploaded",
duration: Date.now() - startTime
};
} catch (error) {
return {
file,
success: false,
status: "failed",
error: error instanceof Error ? error.message : String(error),
duration: Date.now() - startTime
};
}
}
async function uploadFiles(client, bucketName, files, options = {}, onProgress) {
const { concurrency = 10 } = options;
const limit = pLimit(concurrency);
const results = [];
let completed = 0;
const uploadPromises = files.map(
(file) => limit(async () => {
const result = await uploadFile(client, bucketName, file, options);
results.push(result);
completed++;
if (onProgress) {
onProgress(completed, files.length, file);
}
return result;
})
);
await Promise.all(uploadPromises);
return results;
}
function calculateTotalSize(files) {
return files.reduce((sum, file) => sum + file.size, 0);
}
function formatBytes(bytes) {
if (bytes === 0) return "0 Bytes";
const k = 1024;
const sizes = ["Bytes", "KB", "MB", "GB"];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return Math.round(bytes / Math.pow(k, i) * 100) / 100 + " " + sizes[i];
}
// src/core/state/manager.ts
import fs from "fs";
import path from "path";
var DEFAULT_STATE_DIR = ".deploy";
var STATE_VERSION = "1.0.0";
function getStateFilePath(options = {}) {
const { stateDir = DEFAULT_STATE_DIR, environment = "default" } = options;
const fileName = environment === "default" ? "state.json" : `state.${environment}.json`;
return path.join(process.cwd(), stateDir, fileName);
}
function stateExists(options = {}) {
const filePath = getStateFilePath(options);
return fs.existsSync(filePath);
}
function loadState(options = {}) {
const filePath = getStateFilePath(options);
if (!fs.existsSync(filePath)) {
return null;
}
try {
const content = fs.readFileSync(filePath, "utf-8");
const state = JSON.parse(content);
if (!state.app || !state.environment || !state.resources) {
throw new Error("Invalid state file structure");
}
return state;
} catch (error) {
throw new Error(`Failed to load state file: ${error.message}`);
}
}
function saveState(state, options = {}) {
const filePath = getStateFilePath(options);
const stateDir = path.dirname(filePath);
if (!fs.existsSync(stateDir)) {
fs.mkdirSync(stateDir, { recursive: true });
}
try {
if (!state.version) {
state.version = STATE_VERSION;
}
state.lastDeployed = (/* @__PURE__ */ new Date()).toISOString();
const content = JSON.stringify(state, null, 2);
fs.writeFileSync(filePath, content, "utf-8");
} catch (error) {
throw new Error(`Failed to save state file: ${error.message}`);
}
}
function deleteState(options = {}) {
const filePath = getStateFilePath(options);
if (!fs.existsSync(filePath)) {
return false;
}
try {
fs.unlinkSync(filePath);
const stateDir = path.dirname(filePath);
const files = fs.readdirSync(stateDir);
if (files.length === 0) {
fs.rmdirSync(stateDir);
}
return true;
} catch (error) {
throw new Error(`Failed to delete state file: ${error.message}`);
}
}
function initializeState(app, environment = "default") {
return {
app,
environment,
lastDeployed: (/* @__PURE__ */ new Date()).toISOString(),
resources: {},
files: {},
version: STATE_VERSION
};
}
function getOrCreateState(app, options = {}) {
const { environment = "default" } = options;
const existingState = loadState(options);
if (existingState) {
return existingState;
}
return initializeState(app, environment);
}
function listStateFiles(stateDir = DEFAULT_STATE_DIR) {
const stateDirPath = path.join(process.cwd(), stateDir);
if (!fs.existsSync(stateDirPath)) {
return [];
}
try {
const files = fs.readdirSync(stateDirPath);
return files.filter(
(file) => file.startsWith("state") && file.endsWith(".json")
);
} catch (error) {
console.error(`Failed to list state files: ${error}`);
return [];
}
}
function getStateDir(stateDir = DEFAULT_STATE_DIR) {
return path.join(process.cwd(), stateDir);
}
function ensureStateDir(stateDir = DEFAULT_STATE_DIR) {
const stateDirPath = getStateDir(stateDir);
if (!fs.existsSync(stateDirPath)) {
fs.mkdirSync(stateDirPath, { recursive: true });
}
}
// src/core/state/file-state.ts
function compareFileHashes(currentFiles, previousHashes) {
const added = [];
const modified = [];
const unchanged = [];
const deleted = [];
const currentPaths = /* @__PURE__ */ new Set();
for (const file of currentFiles) {
const { key, hash } = file;
currentPaths.add(key);
const previousHash = previousHashes[key];
if (!previousHash) {
added.push({
path: key,
hash,
status: "added"
});
} else if (previousHash !== hash) {
modified.push({
path: key,
hash,
status: "modified",
previousHash
});
} else {
unchanged.push({
path: key,
hash,
status: "unchanged",
previousHash
});
}
}
for (const [path2, hash] of Object.entries(previousHashes)) {
if (!currentPaths.has(path2)) {
deleted.push({
path: path2,
hash,
status: "deleted",
previousHash: hash
});
}
}
return {
added,
modified,
unchanged,
deleted,
totalChanges: added.length + modified.length + deleted.length
};
}
function getFilesToUpload(currentFiles, previousHashes) {
const changes = compareFileHashes(currentFiles, previousHashes);
const pathsToUpload = /* @__PURE__ */ new Set([
...changes.added.map((f) => f.path),
...changes.modified.map((f) => f.path)
]);
return currentFiles.filter((file) => pathsToUpload.has(file.key));
}
function updateFileHashes(state, files) {
const newHashes = {};
for (const file of files) {
newHashes[file.key] = file.hash;
}
return {
...state,
files: newHashes
};
}
function mergeFileHashes(state, files) {
const mergedHashes = {
...state.files
};
for (const file of files) {
mergedHashes[file.key] = file.hash;
}
return {
...state,
files: mergedHashes
};
}
function removeDeletedFiles(state, deletedPaths) {
const newHashes = { ...state.files };
for (const path2 of deletedPaths) {
delete newHashes[path2];
}
return {
...state,
files: newHashes
};
}
function getFileHash(state, filePath) {
return state.files[filePath];
}
function hasFile(state, filePath) {
return filePath in state.files;
}
function getFilePaths(state) {
return Object.keys(state.files);
}
function getFileCount(state) {
return Object.keys(state.files).length;
}
function createFileHashMap(files) {
const hashMap = {};
for (const file of files) {
hashMap[file.key] = file.hash;
}
return hashMap;
}
function formatFileChanges(changes) {
const lines = [];
if (changes.added.length > 0) {
lines.push(`\u2713 Added: ${changes.added.length} files`);
}
if (changes.modified.length > 0) {
lines.push(`\u2713 Modified: ${changes.modified.length} files`);
}
if (changes.deleted.length > 0) {
lines.push(`\u2713 Deleted: ${changes.deleted.length} files`);
}
if (changes.unchanged.length > 0) {
lines.push(`\u25CB Unchanged: ${changes.unchanged.length} files`);
}
return lines.join("\n");
}
function getIncrementalStats(changes) {
return {
needsUpload: changes.added.length + changes.modified.length,
canSkip: changes.unchanged.length,
needsDelete: changes.deleted.length,
totalChanges: changes.totalChanges
};
}
// src/core/state/resource-state.ts
function updateS3Resource(state, resource) {
return {
...state,
resources: {
...state.resources,
s3: resource
}
};
}
function updateCloudFrontResource(state, resource) {
return {
...state,
resources: {
...state.resources,
cloudfront: resource
}
};
}
function updateResources(state, resources) {
return {
...state,
resources: {
...state.resources,
...resources
}
};
}
function getS3Resource(state) {
return state.resources.s3;
}
function getCloudFrontResource(state) {
return state.resources.cloudfront;
}
function hasS3Resource(state) {
return !!state.resources.s3;
}
function hasCloudFrontResource(state) {
return !!state.resources.cloudfront;
}
function removeS3Resource(state) {
const newResources = { ...state.resources };
delete newResources.s3;
return {
...state,
resources: newResources
};
}
function removeCloudFrontResource(state) {
const newResources = { ...state.resources };
delete newResources.cloudfront;
return {
...state,
resources: newResources
};
}
function clearResources(state) {
return {
...state,
resources: {}
};
}
function createS3ResourceState(bucketName, region, websiteUrl) {
return {
bucketName,
region,
websiteUrl
};
}
function createCloudFrontResourceState(distributionId, domainName, distributionUrl, aliases) {
return {
distributionId,
domainName,
distributionUrl,
aliases
};
}
function getResourceSummary(state) {
const s3 = getS3Resource(state);
const cloudfront = getCloudFrontResource(state);
return {
hasS3: !!s3,
hasCloudFront: !!cloudfront,
s3BucketName: s3?.bucketName,
s3Region: s3?.region,
distributionId: cloudfront?.distributionId,
distributionUrl: cloudfront?.distributionUrl
};
}
function formatResourceSummary(state) {
const summary = getResourceSummary(state);
const lines = [];
lines.push(`App: ${state.app}`);
lines.push(`Environment: ${state.environment}`);
lines.push(`Last Deployed: ${new Date(state.lastDeployed).toLocaleString()}`);
lines.push("");
if (summary.hasS3) {
lines.push("S3 Bucket:");
lines.push(` Name: ${summary.s3BucketName}`);
lines.push(` Region: ${summary.s3Region}`);
const s3 = getS3Resource(state);
if (s3?.websiteUrl) {
lines.push(` URL: ${s3.websiteUrl}`);
}
lines.push("");
}
if (summary.hasCloudFront) {
lines.push("CloudFront Distribution:");
lines.push(` ID: ${summary.distributionId}`);
lines.push(` URL: ${summary.distributionUrl}`);
const cf = getCloudFrontResource(state);
if (cf?.aliases && cf.aliases.length > 0) {
lines.push(` Aliases: ${cf.aliases.join(", ")}`);
}
lines.push("");
}
const fileCount = Object.keys(state.files).length;
lines.push(`Files: ${fileCount} tracked`);
return lines.join("\n");
}
function hasAnyResource(state) {
return hasS3Resource(state) || hasCloudFrontResource(state);
}
function getResourceIdentifiers(state) {
const s3 = getS3Resource(state);
const cloudfront = getCloudFrontResource(state);
return {
s3BucketName: s3?.bucketName,
s3Region: s3?.region,
distributionId: cloudfront?.distributionId
};
}
function validateResourceState(state) {
const errors = [];
const s3 = getS3Resource(state);
if (s3) {
if (!s3.bucketName) {
errors.push("S3 resource missing bucketName");
}
if (!s3.region) {
errors.push("S3 resource missing region");
}
}
const cloudfront = getCloudFrontResource(state);
if (cloudfront) {
if (!cloudfront.distributionId) {
errors.push("CloudFront resource missing distributionId");
}
if (!cloudfront.domainName) {
errors.push("CloudFront resource missing domainName");
}
if (!cloudfront.distributionUrl) {
errors.push("CloudFront resource missing distributionUrl");
}
}
if (!state.app) {
errors.push("State missing app name");
}
if (!state.environment) {
errors.push("State missing environment");
}
if (!state.lastDeployed) {
errors.push("State missing lastDeployed timestamp");
}
return {
valid: errors.length === 0,
errors
};
}
// src/core/aws/s3-deployer.ts
async function deployToS3(config, options = {}) {
const startTime = Date.now();
if (!config.s3) {
throw new Error("S3 configuration is required");
}
const {
bucketName,
buildDir: providedBuildDir,
indexDocument = "index.html",
errorDocument,
websiteHosting = true,
gzip: gzip2 = true,
concurrency = 10,
exclude = []
} = config.s3;
const {
showProgress = true,
dryRun = false,
environment = "default",
useIncrementalDeploy = true,
forceFullDeploy = false,
saveState: shouldSaveState = true
} = options;
let spinner = null;
if (showProgress) {
spinner = ora("Detecting build directory...").start();
}
const buildDir = getBuildDirectory(providedBuildDir);
if (spinner) {
spinner.succeed(`Build directory detected: ${chalk3.cyan(buildDir)}`);
}
if (showProgress) {
spinner = ora("Scanning files...").start();
}
const files = await scanFiles({
buildDir,
exclude
});
const totalSize = calculateTotalSize(files);
if (spinner) {
spinner.succeed(
`Found ${chalk3.cyan(files.length)} files (${chalk3.cyan(
formatBytes(totalSize)
)})`
);
}
if (files.length === 0) {
throw new Error(
`No files found in build directory: ${chalk3.cyan(buildDir)}
Please build your project first:
${chalk3.cyan("npm run build")}
${chalk3.cyan("yarn build")}
${chalk3.cyan("pnpm build")}`
);
}
const s3Client = createS3Client(config);
if (showProgress) {
spinner = ora("Checking S3 bucket...").start();
}
try {
await ensureBucket(s3Client, bucketName, config.region, {
websiteHosting,
indexDocument,
errorDocument,
publicRead: true
});
await tagBucketForRecovery(s3Client, bucketName, config.app, environment);
if (spinner) {
spinner.succeed(`S3 bucket ready: ${chalk3.cyan(bucketName)}`);
}
} catch (error) {
if (spinner) {
spinner.fail("Failed to setup S3 bucket");
}
throw error;
}
let state = loadState({ environment });
let filesToUpload = files;
if (useIncrementalDeploy && !forceFullDeploy && state) {
if (showProgress) {
spinner = ora("Analyzing file changes...").start();
}
const changes = compareFileHashes(files, state.files);
if (spinner) {
spinner.succeed("File changes analyzed");
}
console.log();
console.log(formatFileChanges(changes));
console.log();
if (changes.totalChanges === 0) {
console.log(
chalk3.green("\u2728 No changes detected. Deployment not needed.")
);
return {
totalFiles: files.length,
uploaded: 0,
skipped: files.length,
failed: 0,
totalSize,
compressedSize: totalSize,
duration: Date.now() - startTime,
results: files.map((file) => ({
file,
success: true,
status: "skipped"
}))
};
}
filesToUpload = getFilesToUpload(files, state.files);
console.log(
chalk3.blue(
`\u{1F4E4} Uploading ${chalk3.cyan(filesToUpload.length)} changed files...
`
)
);
} else {
if (forceFullDeploy && showProgress) {
console.log(chalk3.yellow("\u26A0 Force full deployment enabled\n"));
}
console.log(chalk3.blue("\n\u{1F4E4} Uploading files...\n"));
}
let progressBar = null;
if (showProgress && !dryRun) {
progressBar = new cliProgress.SingleBar(
{
format: "Progress |" + chalk3.cyan("{bar}") + "| {percentage}% | {value}/{total} files | {current}",
barCompleteChar: "\u2588",
barIncompleteChar: "\u2591",
hideCursor: true
},
cliProgress.Presets.shades_classic
);
progressBar.start(files.length, 0, { current: "" });
}
const uploadResults = await uploadFiles(
s3Client,
bucketName,
filesToUpload,
{
gzip: gzip2,
concurrency,
dryRun
},
(completed, _total, currentFile) => {
if (progressBar) {
progressBar.update(completed, {
current: currentFile.relativePath
});
}
}
);
if (progressBar) {
progressBar.stop();
}
const uploaded = uploadResults.filter((r) => r.status === "uploaded").length;
const skipped = uploadResults.filter((r) => r.status === "skipped").length;
const failed = uploadResults.filter((r) => r.status === "failed").length;
const uploadedFiles = uploadResults.filter((r) => r.success).map((r) => r.file);
const compressedSize = calculateTotalSize(uploadedFiles);
console.log();
if (uploaded > 0) {
console.log(chalk3.green(`\u2713 Uploaded: ${uploaded} files`));
}
if (skipped > 0) {
console.log(chalk3.gray(`\u25CB Skipped: ${skipped} files (unchanged)`));
}
if (failed > 0) {
console.log(chalk3.red(`\u2717 Failed: ${failed} files`));
uploadResults.filter((r) => !r.success).forEach((r) => {
console.log(chalk3.red(` - ${r.file.relativePath}: ${r.error}`));
});
}
console.log();
console.log(chalk3.gray(`Total size: ${formatBytes(totalSize)}`));
if (gzip2) {
const savings = totalSize - compressedSize;
const savingsPercent = Math.round(savings / totalSize * 100);
console.log(
chalk3.gray(
`Compressed: ${formatBytes(
compressedSize
)} (${savingsPercent}% reduction)`
)
);
}
const duration = Date.now() - startTime;
console.log(chalk3.gray(`Duration: ${(duration / 1e3).toFixed(2)}s`));
const websiteUrl = websiteHosting ? getBucketWebsiteUrl(bucketName, config.region) : void 0;
if (websiteHosting && !dryRun) {
console.log();
console.log(chalk3.green("\u{1F310} Website URL:"), chalk3.cyan(websiteUrl));
}
if (shouldSaveState && !dryRun && uploaded > 0) {
if (!state) {
state = getOrCreateState(config.app, { environment });
}
state = updateS3Resource(state, {
bucketName,
region: config.region,
websiteUrl
});
state = updateFileHashes(state, files);
try {
saveState(state, { environment });
if (showProgress) {
console.log();
console.log(
chalk3.gray(
`\u2713 State saved (.deploy/state${environment !== "default" ? `.${environment}` : ""}.json)`
)
);
}
} catch (error) {
console.log();
console.log(chalk3.yellow(`\u26A0 Failed to save state: ${error.message}`));
}
}
return {
totalFiles: files.length,
uploaded,
skipped,
failed,
totalSize,
compressedSize,
duration,
results: uploadResults
};
}
// src/core/aws/cloudfront-distribution.ts
import {
GetDistributionCommand,
CreateDistributionCommand,
UpdateDistributionCommand,
GetDistributionConfigCommand,
waitUntilDistributionDeployed
} from "@aws-sdk/client-cloudfront";
async function distributionExists(client, distributionId) {
try {
await client.send(
new GetDistributionCommand({
Id: distributionId
})
);
return true;
} catch (error) {
if (error && typeof error === "object" && ("name" in error && error.name === "NoSuchDistribution" || "$metadata" in error && typeof error.$metadata === "object" && error.$metadata !== null && "httpStatusCode" in error.$metadata && error.$metadata.httpStatusCode === 404)) {
return false;
}
throw error;
}
}
async function getDistribution(client, distributionId) {
try {
const response = await client.send(
new GetDistributionCommand({
Id: distributionId
})
);
return response.Distribution || null;
} catch (error) {
if (error && typeof error === "object" && ("name" in error && error.name === "NoSuchDistribution" || "$metadata" in error && typeof error.$metadata === "object" && error.$metadata !== null && "httpStatusCode" in error.$metadata && error.$metadata.httpStatusCode === 404)) {
return null;
}
throw error;
}
}
function getS3OriginDomain(bucketName, region) {
if (region === "us-east-1") {
return `${bucketName}.s3-website-us-east-1.amazonaws.com`;
}
return `${bucketName}.s3-website.${region}.amazonaws.com`;
}
async function createDistribution(client, options) {
const {
s3BucketName,
s3Region,
indexDocument = "index.html",
customDomain,
priceClass = "PriceClass_100",
defaultTTL = 86400,
// 1 day
maxTTL = 31536e3,
// 1 year
minTTL = 0,
ipv6 = true
} = options;
const originDomain = getS3OriginDomain(s3BucketName, s3Region);
const callerReference = `scf-${Date.now()}`;
const distributionConfig = {
CallerReference: callerReference,
Comment: `Created by SCF for ${s3BucketName}`,
Enabled: true,
DefaultRootObject: indexDocument,
Origins: {
Quantity: 1,
Items: [
{
Id: `S3-${s3BucketName}`,
DomainName: originDomain,
CustomOriginConfig: {
HTTPPort: 80,
HTTPSPort: 443,
OriginProtocolPolicy: "http-only",
OriginSslProtocols: {
Quantity: 1,
Items: ["TLSv1.2"]
}
}
}
]
},
DefaultCacheBehavior: {
TargetOriginId: `S3-${s3BucketName}`,
ViewerProtocolPolicy: "redirect-to-https",
AllowedMethods: {
Quantity: 2,
Items: ["GET", "HEAD"],
CachedMethods: {
Quantity: 2,
Items: ["GET", "HEAD"]
}
},
Compress: true,
ForwardedValues: {
QueryString: false,
Cookies: {
Forward: "none"
},
Headers: {
Quantity: 0
}
},
MinTTL: minTTL,
DefaultTTL: defaultTTL,
MaxTTL: maxTTL,
TrustedSigners: {
Enabled: false,
Quantity: 0
}
},
PriceClass: priceClass,
IsIPV6Enabled: ipv6
};
if (customDomain) {
distributionConfig.Aliases = {
Quantity: customDomain.aliases?.length || 1,
Items: customDomain.aliases || [customDomain.domainName]
};
distributionConfig.ViewerCertificate = {
ACMCertificateArn: customDomain.certificateArn,
SSLSupportMethod: "sni-only",
MinimumProtocolVersion: "TLSv1.2_2021"
};
} else {
distributionConfig.ViewerCertificate = {
CloudFrontDefaultCertificate: true
};
}
const command = {
DistributionConfig: distributionConfig
};
const response = await client.send(new CreateDistributionCommand(command));
if (!response.Distribution) {
throw new Error("Failed to create distribution: No distribution returned");
}
return response.Distribution;
}
async function updateDistribution(client, distributionId, updates) {
const configResponse = await client.send(
new GetDistributionConfigCommand({
Id: distributionId
})
);
const currentConfig = configResponse.DistributionConfig;
const etag = configResponse.ETag;
if (!currentConfig || !etag) {
throw new Error("Failed to get distribution configuration");
}
if (updates.priceClass) {
currentConfig.PriceClass = updates.priceClass;
}
if (!currentConfig.DefaultCacheBehavior) {
throw new Error("Distribution configuration missing DefaultCacheBehavior");
}
if (updates.defaultTTL !== void 0) {
currentConfig.DefaultCacheBehavior.DefaultTTL = updates.defaultTTL;
}
if (updates.maxTTL !== void 0) {
currentConfig.DefaultCacheBehavior.MaxTTL = updates.maxTTL;
}
if (updates.minTTL !== void 0) {
currentConfig.DefaultCacheBehavior.MinTTL = updates.minTTL;
}
if (updates.ipv6 !== void 0) {
currentConfig.IsIPV6Enabled = updates.ipv6;
}
if (updates.customDomain) {
currentConfig.Aliases = {
Quantity: updates.customDomain.aliases?.length || 1,
Items: updates.customDomain.aliases || [updates.customDomain.domainName]
};
currentConfig.ViewerCertificate = {
ACMCertificateArn: updates.customDomain.certificateArn,
SSLSupportMethod: "sni-only",
MinimumProtocolVersion: "TLSv1.2_2021"
};
}
const command = {
Id: distributionId,
DistributionConfig: currentConfig,
IfMatch: etag
};
const response = await client.send(new UpdateDistributionCommand(command));
if (!response.Distribution) {
throw new Error("Failed to update distribution: No distribution returned");
}
return response.Dis