superflare
Version:
A full-stack framework for Cloudflare Workers.
1,377 lines (1,337 loc) • 39.9 kB
JavaScript
import {
__require,
modelToTableName,
toSnakeCase
} from "./chunk-CPBR7YIF.mjs";
// cli.ts
import makeCLI from "yargs";
import { hideBin } from "yargs/helpers";
// cli/logger.ts
import { format } from "node:util";
import CLITable from "cli-table3";
import chalk from "chalk";
var Logger = class {
constructor() {
this.debug = (...args) => this.doLog("debug", args);
this.info = (...args) => this.doLog("info", args);
this.log = (...args) => this.doLog("log", args);
this.warn = (...args) => this.doLog("warn", args);
this.error = (...args) => this.doLog("error", args);
}
table(data) {
const keys = data.length === 0 ? [] : Object.keys(data[0]);
const t = new CLITable({
head: keys.map((k) => chalk.bold.blue(k))
});
t.push(...data.map((row) => keys.map((k) => row[k])));
return this.doLog("log", [t.toString()]);
}
doLog(messageLevel, args) {
console[messageLevel](format(...args));
}
};
var logger = new Logger();
// cli/generate/job.ts
import { writeFile } from "node:fs/promises";
import path from "node:path";
// cli/stubs/job.stub.ts
function jobTemplate(name) {
return `import { Job } from "superflare";
export class ${name} extends Job {
constructor() {
super();
}
async handle(): Promise<void> {
// Handle the job
}
}
Job.register(${name});
`;
}
// cli/generate/job.ts
function jobOptions(arg) {
return arg.positional("name", {
type: "string",
demandOption: true,
description: "The name of the Job to generate"
}).option("path", {
type: "string",
description: "The path to generate the Job in",
default: path.join(process.cwd(), "app", "jobs")
});
}
async function jobHandler(yargs) {
logger.log(`Generating Job ${yargs.name}`);
const output = jobTemplate(yargs.name);
const jobPath = path.join(yargs.path, `${yargs.name}.ts`);
await writeFile(jobPath, output);
logger.log(`Generated Job ${yargs.name} at ${jobPath}`);
}
// cli/generate/migration.ts
import { mkdir as mkdir2, readdir as readdir2, writeFile as writeFile3 } from "node:fs/promises";
import { join } from "node:path";
// cli/migrate.ts
import path4 from "node:path";
import fs2, { mkdir, readdir, writeFile as writeFile2 } from "node:fs/promises";
import fsSync from "node:fs";
// cli/wrangler.ts
import { spawn } from "node:child_process";
async function runWranglerCommand(command) {
let stdout = "";
let stderr = "";
const child = spawn("npx", ["wrangler@latest", ...command], {
shell: true,
env: {
...process.env,
// TODO: Remove when D1 is stable.
NO_D1_WARNING: "true"
}
});
child.stderr.on("data", (data) => {
stderr += data;
});
child.stdout.on("data", (data) => {
stdout += data;
});
return new Promise((resolve, reject) => {
child.on("close", (code) => {
if (code === 0) {
resolve({ code, stdout, stderr });
return;
}
reject({ code, stdout, stderr });
});
});
}
async function wranglerMigrate() {
return await runWranglerCommand([
"d1",
"migrations",
"apply",
"DB",
"--local",
"-j"
]);
}
// cli/d1-types.ts
import pluralize from "pluralize";
import fs from "fs";
import path2 from "path";
var SUPERFLARE_TYPES_FILE = "superflare.env.d.ts";
function modelTypesAsInterface(modelClass, types) {
const interfaceName = modelClass + "Row";
const typesAsString = types.map(
(type) => ` ${type.name}${type.nullable ? "?" : ""}: ${type.type};`
);
return `interface ${interfaceName} {
${typesAsString.join("\n")}
}`;
}
var ignoreSqliteTable = (table) => table.startsWith("sqlite_") || table === "d1_migrations";
function generateTypesFromSqlite(db2) {
const tableList = db2.prepare("PRAGMA table_list").all().filter(
(table) => !ignoreSqliteTable(table.name)
);
const types = [];
for (const table of tableList) {
const tableInfo = db2.prepare(`PRAGMA table_info(${table.name})`).all();
const tableTypes = [];
for (const column of tableInfo) {
const type = sqliteColumnTypeToSuperflareType(column.type.toLowerCase());
tableTypes.push({
name: column.name,
type,
nullable: column.pk ? false : !column.notnull
});
}
types.push({ model: tableNameToModel(table.name), types: tableTypes });
}
return types;
}
function sqliteColumnTypeToSuperflareType(type) {
switch (type) {
case "integer":
return "number";
case "boolean":
return "boolean";
case "text":
default:
return "string";
}
}
function tableNameToModel(tableName) {
return tableName.split("_").map((part) => part[0].toUpperCase() + part.slice(1)).map((part) => pluralize.singular(part)).join("");
}
function syncSuperflareTypes(pathToRoot, pathToModels, types, options = {}) {
let typesFileSources = [];
const results = types.map((type) => {
try {
typesFileSources.push(modelTypesAsInterface(type.model, type.types));
fs.readFileSync(`${pathToModels}/${type.model}.ts`, "utf-8");
return {
model: type.model,
status: "synced"
};
} catch (e) {
if (options == null ? void 0 : options.createIfNotFound) {
const modelSource = `import { Model } from 'superflare';
export class ${type.model} extends Model {
toJSON(): ${type.model}Row {
return super.toJSON();
}
}
Model.register(${type.model});
export interface ${type.model} extends ${type.model}Row {};`;
fs.writeSync(
fs.openSync(`${pathToModels}/${type.model}.ts`, "w"),
modelSource
);
return {
model: type.model,
status: "created"
};
}
return {
model: type.model,
status: "not-found"
};
}
});
const typesBanner = `// This file is automatically generated by Superflare. Do not edit directly.
`;
const typesFileSource = typesBanner + typesFileSources.join("\n\n");
fs.writeFileSync(
path2.join(pathToRoot, SUPERFLARE_TYPES_FILE),
typesFileSource
);
return results;
}
// cli/db/seed.ts
import { register } from "esbuild-register/dist/node";
import path3 from "node:path";
// cli/d1-database.ts
async function createD1Database(sqliteDbPath, logger2 = console.log) {
const { npxImport } = await import("npx-import");
const [{ D1Database, D1DatabaseAPI }, { createSQLiteDB: createSQLiteDB2 }] = await npxImport(["@miniflare/d1", "@miniflare/shared"], logger2);
const sqliteDb = await createSQLiteDB2(sqliteDbPath);
return new D1Database(new D1DatabaseAPI(sqliteDb));
}
async function createSQLiteDB(dbPath, logger2 = console.log) {
const { npxImport } = await import("npx-import");
const { createSQLiteDB: create } = await npxImport("@miniflare/shared", logger2);
return create(dbPath);
}
// cli/db/seed.ts
function seedOptions(yargs) {
return yargs.option("db", {
alias: "d",
describe: "Path to the database",
// Default to the path in the .wrangler directory
default: path3.join(
process.cwd(),
".wrangler",
"state",
"d1",
"db.sqlite"
)
}).option("seed-path", {
describe: "Path to the seed file",
default: path3.join(process.cwd(), "db", "seed.ts")
});
}
async function seedHandler(argv) {
const dbPath = argv.db;
const seedPath = argv.seedPath;
await seedDb(dbPath, seedPath);
}
async function seedDb(dbPath, seedPath) {
if (seedPath) {
logger.info(`Seeding database...`);
register();
try {
const seedModule = __require(seedPath);
const d1Database = await createD1Database(dbPath, logger.log);
if (seedModule.default) {
await seedModule.default(d1Database);
logger.info(`Seeding complete!`);
} else {
logger.warn(`Warning: Did not find a default export in ${seedPath}.`);
}
} catch (e) {
logger.error(`Error seeding database: ${e.message}`);
}
}
}
// cli/migrate.ts
import { register as register2 } from "esbuild-register/dist/node";
function defaultSuperflareMigrationsPath(rootPath = process.cwd()) {
return path4.join(rootPath, "db", "migrations");
}
function defaultSuperflareDatabasePath(rootPath = process.cwd()) {
var _a, _b;
try {
const wranglerConfig = fsSync.readFileSync(
path4.join(rootPath, "wrangler.json"),
"utf8"
);
const wranglerConfigJson = JSON.parse(wranglerConfig);
const d1DatabaseId = (_b = (_a = wranglerConfigJson == null ? void 0 : wranglerConfigJson.d1_databases) == null ? void 0 : _a[0]) == null ? void 0 : _b.database_id;
return path4.join(
rootPath,
".wrangler",
"state",
"v3",
"d1",
d1DatabaseId,
"db.sqlite"
);
} catch (e) {
return path4.join(
rootPath,
".wrangler",
"state",
"d1",
"db.sqlite"
);
}
}
function migrateOptions(yargs) {
return yargs.option("db", {
alias: "d",
describe: "Path to the database",
// Default to the path in the .wrangler directory
default: defaultSuperflareDatabasePath()
}).option("models", {
alias: "m",
describe: "Path to the models directory",
// Default to the path in the app directory
default: path4.join(process.cwd(), "app", "models")
}).option("superflare-migrations", {
describe: "Path to the Superflare migrations directory",
// Default to the path in the app directory
default: defaultSuperflareMigrationsPath()
}).option("wrangler-migrations", {
describe: "Path to the Wrangler migrations directory",
// Default to the path in the app directory
default: path4.join(process.cwd(), "migrations")
}).option("create", {
boolean: true,
alias: "c",
describe: "Create a model if it doesn't exist",
default: false
}).option("fresh", {
alias: "f",
boolean: true,
default: false,
describe: "Run a fresh migration by dropping the existing database"
}).option("seed", {
alias: "s",
describe: "Seed the database after migrating",
boolean: true,
default: false
}).option("seed-path", {
describe: "Path to the seed file",
default: path4.join(process.cwd(), "db", "seed.ts")
});
}
async function migrateHandler(argv) {
const fresh = argv.fresh;
const modelsDirectory = argv.models;
const dbPath = argv.db;
const superflareMigrationsPath = argv.superflareMigrations;
const wranglerMigrationsPath = argv.wranglerMigrations;
logger.info(`Compiling migrations...`);
await compileMigrations(superflareMigrationsPath, wranglerMigrationsPath);
if (fresh) {
logger.info("Dropping existing database...");
try {
await fs2.rm(dbPath);
} catch (_e) {
}
}
logger.info(`Migrating database...`);
try {
const results2 = await wranglerMigrate();
logger.info(results2.stdout);
} catch (e) {
logger.error(
"\u274C An error occurred while running wrangler migrations:\n" + e.stderr || e.stdout || e.message
);
process.exit(1);
}
const db2 = await createSQLiteDB(dbPath, logger.log);
const seed = argv.seed;
const seedPath = argv.seedPath;
if (seed && seedPath) {
await seedDb(dbPath, seedPath);
}
logger.info("Generating types from database...");
const types = generateTypesFromSqlite(db2);
const results = syncSuperflareTypes(process.cwd(), modelsDirectory, types, {
createIfNotFound: argv.create
});
logger.table(results);
logger.info("Done!");
}
async function compileMigrations(pathToSuperflareMigrations, pathToWranglerMigrations) {
const { unregister } = register2({});
await mkdir(pathToSuperflareMigrations, { recursive: true });
const migrations = (await readdir(pathToSuperflareMigrations)).filter((filename) => filename.endsWith(".ts")).map((filename) => {
const migration = __require(path4.join(
pathToSuperflareMigrations,
filename
));
return {
filename,
schema: migration.default()
};
});
await mkdir(pathToWranglerMigrations, { recursive: true });
for (const migration of migrations) {
const sql = Array.isArray(migration.schema) ? migration.schema.map((s) => s.toSql()).join("\n\n") : migration.schema.toSql();
const migrationNumber = migration.filename.split(/_/)[0];
const timestamp = (/* @__PURE__ */ new Date()).toISOString();
let banner = `-- Migration number: ${migrationNumber} ${timestamp}
`;
banner += `-- Autogenerated by Superflare. Do not edit this file directly.`;
const contents = `${banner}
${sql}`;
await writeFile2(
path4.join(
pathToWranglerMigrations,
migration.filename.replace(".ts", ".sql")
),
contents
);
}
unregister();
}
// cli/stubs/migration.stub.ts
function blankMigration(contents = "// return ...") {
return `import { Schema } from 'superflare';
export default function () {
${contents}
}`;
}
// cli/generate/migration.ts
function migrationOptions(yargs) {
return yargs.positional("name", {
describe: "The name of the migration",
type: "string"
}).option("db", {
describe: "The local database binding to use for the migration",
default: ""
});
}
async function migrationHandler(argv) {
const name = argv.name;
generateMigration(name);
}
async function generateMigration(name, rootPath = process.cwd()) {
const wranglerMigrationsPath = join(rootPath, "migrations");
let nextMigrationNumberInteger = 0;
try {
const existingMigrations = await readdir2(wranglerMigrationsPath);
nextMigrationNumberInteger = getNextMigrationNumber(existingMigrations);
} catch (_e) {
}
const nextMigrationNumber = nextMigrationNumberInteger.toString().padStart(4, "0");
const migrationName = `${nextMigrationNumber}_${toSnakeCase(name)}`;
const migrationsPath = defaultSuperflareMigrationsPath(rootPath);
await mkdir2(migrationsPath, { recursive: true });
const migrationPath = join(migrationsPath, `${migrationName}.ts`);
await writeFile3(migrationPath, blankMigration());
logger.info(`Migration generated at ${migrationPath}`);
}
function getNextMigrationNumber(existingMigrations) {
if (!existingMigrations.length) {
return 0;
}
const mostRecentMigration = existingMigrations.filter((migration) => migration.match(/^[0-9]{4}_/)).sort().pop();
const mostRecentMigrationNumber = mostRecentMigration == null ? void 0 : mostRecentMigration.split("_")[0];
if (!mostRecentMigrationNumber) {
throw new Error("Could not determine most recent migration number");
}
return parseInt(mostRecentMigrationNumber, 10) + 1;
}
// cli/generate/model.ts
import { mkdir as mkdir3, readFile, writeFile as writeFile4 } from "node:fs/promises";
import path5 from "node:path";
// cli/stubs/model.stub.ts
function modelTemplate(name) {
return `import { Model } from "superflare";
export class ${name} extends Model {
toJSON(): ${name}Row {
return super.toJSON();
}
}
Model.register(${name});
export interface ${name} extends ${name}Row {}
`;
}
// cli/generate/model.ts
function modelOptions(yargs) {
return yargs.option("name", {
type: "string",
description: "Name of the model",
required: true
}).option("path", {
type: "string",
description: "The path to generate the Model in",
default: path5.join(process.cwd(), "app", "models")
}).option("migration", {
alias: "m",
type: "boolean",
description: "Generate a migration for the model",
default: false
});
}
async function modelHandler(argv) {
const { name } = argv;
logger.log(`Generating model ${name}`);
const output = modelTemplate(name);
const modelPath = path5.join(argv.path, `${name}.ts`);
await mkdir3(path5.dirname(modelPath), { recursive: true });
await writeFile4(modelPath, output);
const typeFilePath = path5.join(process.cwd(), SUPERFLARE_TYPES_FILE);
try {
let contents = await readFile(typeFilePath, "utf-8");
contents += `
interface ${name}Row {};`;
await writeFile4(typeFilePath, contents);
} catch (_e) {
const contents = `interface ${name}Row {};`;
await writeFile4(typeFilePath, contents);
}
logger.log(`Generated model ${name} at ${modelPath}`);
if (argv.migration) {
const tableName = modelToTableName(name);
const migrationName = `create_${tableName}`;
generateMigration(migrationName);
}
}
// cli/generate.ts
function generate(yargs) {
return yargs.command(
"job <name>",
"Generate a Job",
// @ts-expect-error: IDK
jobOptions,
jobHandler
).command(
"migration <name>",
"Generate a Migration",
// @ts-expect-error: IDK
migrationOptions,
migrationHandler
).command(
"model <name>",
"Generate a Model",
// @ts-expect-error: IDK
modelOptions,
modelHandler
);
}
// cli/console.ts
import { start } from "node:repl";
import { readdir as readdir3 } from "node:fs/promises";
import { register as register3 } from "esbuild-register/dist/node";
import { homedir } from "node:os";
import { inspect } from "node:util";
import path6 from "node:path";
function consoleOptions(yargs) {
return yargs.option("db", {
alias: "d",
describe: "Path to the database",
// Default to the path in the .wrangler directory
default: path6.join(
process.cwd(),
".wrangler",
"state",
"d1",
"db.sqlite"
)
}).option("models", {
alias: "m",
describe: "Path to the models directory",
// Default to the path in the app directory
default: path6.join(process.cwd(), "app", "models")
});
}
async function consoleHandler(argv) {
const modelsDirectory = argv.models;
const dbPath = argv.db;
return createRepl({ modelsDirectory, dbPath });
}
async function createRepl({
modelsDirectory,
dbPath
}) {
register3();
const server = start({
prompt: ">> ",
input: process.stdin,
output: process.stdout,
terminal: process.stdout.isTTY && !parseInt(process.env.NODE_NO_READLINE, 10),
useGlobal: true,
writer: (output) => {
return inspect(output, {
colors: true,
showProxy: false
}).split("\n").map((line, idx) => idx === 0 ? "=> " + line : " " + line).join("\n") + "\n";
}
});
const historyPath = `${homedir()}/.superflare_history`;
server.setupHistory(historyPath, () => {
});
const db2 = await createD1Database(dbPath);
server.context["db"] = db2;
server.eval(
`const {setConfig} = require('superflare'); setConfig({database: { default: db }});`,
server.context,
"repl",
() => {
}
);
const models = (await readdir3(modelsDirectory)).filter(
// Must be a TS file and start with a capital letter
(file) => file.endsWith(".ts") && /^[A-Z]/.test(file)
);
for (const modelFileName of models) {
const module = __require(`${modelsDirectory}/${modelFileName}`);
const model = modelFileName.replace(".ts", "");
server.context[model] = module[model];
}
server.displayPrompt();
return server;
}
// cli/dev.ts
import { spawn as spawn2 } from "node:child_process";
// cli/config.ts
import { register as register4 } from "esbuild-register/dist/node";
import path7 from "node:path";
async function getSuperflareConfig(workingDir, logger2) {
var _a, _b;
register4();
const envProxies = {};
const ctxStub = {
env: new Proxy(
{},
{
get: (_target, prop) => {
envProxies[prop] = Symbol(prop);
return envProxies[prop];
}
}
)
};
try {
const config = __require(path7.join(workingDir, "superflare.config.ts"));
const results = config.default(ctxStub);
const flippedEnvProxies = Object.entries(envProxies).reduce(
(acc, [key, value]) => {
acc[value] = key;
return acc;
},
{}
);
const d1Bindings = Object.keys((_a = results == null ? void 0 : results.database) != null ? _a : {}).map((key) => {
const binding = results.database[key];
if (typeof binding === "symbol") {
return flippedEnvProxies[binding];
}
}).filter(Boolean);
const r2Bindings = Object.keys((_b = results == null ? void 0 : results.storage) != null ? _b : {}).map((key) => {
const binding = results.storage[key].binding;
if (typeof binding === "symbol") {
return flippedEnvProxies[binding];
}
}).filter(Boolean);
return {
d1: d1Bindings,
r2: r2Bindings
};
} catch (e) {
logger2 == null ? void 0 : logger2.debug(`Error loading superflare.config.ts: ${e.message}`);
return null;
}
}
async function getWranglerJsonConfig(workingDir, logger2) {
try {
const config = __require(path7.join(workingDir, "wrangler.json"));
return config;
} catch (e) {
logger2 == null ? void 0 : logger2.debug(`Error loading wrangler.json: ${e.message}`);
return null;
}
}
// cli/dev.ts
function devOptions(yargs) {
return yargs.option("mode", {
type: "string",
choices: ["workers", "pages"],
description: "Whether to run in workers or pages mode. Defaults to workers.",
default: "workers"
}).positional("entrypoint", {
type: "string",
description: "The entrypoint to use for the workers dev server."
}).option("compatibility-date", {
type: "string",
description: "The date to use for compatibility mode. Defaults to the current date.",
default: (/* @__PURE__ */ new Date()).toISOString().split("T")[0]
}).option("port", {
type: "number",
description: "The port to run the dev server on. Defaults to 8788.",
default: 8788
}).option("binding", {
alias: "b",
type: "array",
description: "A binding to pass to the dev command. Can be specified multiple times. Works for both workers and pages.",
default: []
}).option("live-reload", {
type: "boolean",
description: "Whether to enable live reload. Defaults to true.",
default: true
});
}
async function devHandler(argv) {
var _a;
const isPagesMode = argv.mode === "pages";
const isWorkersMode = !isPagesMode;
logger.info(
`Starting "wrangler" in ${isPagesMode ? "pages" : "workers"} mode...`
);
const config = await getSuperflareConfig(process.cwd(), logger);
if (!config) {
logger.warn(
"Warning: Did not find a `superflare.config.ts` in your project. You will want to add one in order to specify D1 and R2 bindings for Superflare to use."
);
}
const d1Bindings = config == null ? void 0 : config.d1;
if (isPagesMode && d1Bindings && Array.isArray(d1Bindings) && d1Bindings.length) {
logger.info(`Using D1 binding: ${d1Bindings.join(", ")}`);
}
const r2Bindings = config == null ? void 0 : config.r2;
if (isPagesMode && r2Bindings && Array.isArray(r2Bindings) && r2Bindings.length) {
logger.info(`Using R2 bindings: ${r2Bindings.join(", ")}`);
}
const wranglerJsonConfig = await getWranglerJsonConfig(process.cwd(), logger);
const workersEntrypoint = (_a = argv.entrypoint) != null ? _a : wranglerJsonConfig == null ? void 0 : wranglerJsonConfig.main;
if (isWorkersMode && !workersEntrypoint) {
logger.error(
"Error: You must set a `main` value pointing to your entrypoint in your `wrangler.json` in order to run in workers mode."
);
process.exit(1);
}
const normalizedBindings = argv.binding.map((binding) => {
if (typeof binding !== "string")
return binding;
const [key, value] = binding.split("=");
return isWorkersMode ? `${key}:${value}` : `${key}=${value}`;
});
const args = [
"wrangler",
isPagesMode && "pages",
"dev",
isPagesMode && "public",
isWorkersMode && workersEntrypoint,
isWorkersMode && "--site public",
"--compatibility-date",
argv.compatibilityDate,
"---compatibility-flag",
"nodejs_compat",
"--port",
argv.port,
isPagesMode && (d1Bindings == null ? void 0 : d1Bindings.length) && d1Bindings.map((d1Binding) => `--d1 ${d1Binding}`).join(" "),
isPagesMode && (r2Bindings == null ? void 0 : r2Bindings.length) && r2Bindings.map((r2Binding) => `--r2 ${r2Binding}`).join(" "),
...normalizedBindings.map(
(binding) => `--${isPagesMode ? "binding" : "var"} ${binding}`
),
"--local",
"--persist",
"--experimental-json-config",
"--test-scheduled",
argv.liveReload && "--live-reload"
].filter(Boolean);
spawn2("npx", args, {
stdio: "inherit",
shell: true,
env: {
...process.env,
// TODO: Remove this when D1 is stable
NO_D1_WARNING: "true"
}
});
}
// cli/db/index.ts
function db(yargs) {
return yargs.command(
"seed",
"\u{1F331} Seed your database with data",
seedOptions,
seedHandler
);
}
// cli/new.ts
import {
confirm,
cancel,
intro,
isCancel,
multiselect,
outro,
spinner,
text,
note
} from "@clack/prompts";
import { cp, mkdtemp, readFile as readFile2, rmdir, writeFile as writeFile5 } from "fs/promises";
import { join as join2, normalize } from "path";
import { tmpdir } from "os";
import { pipeline } from "stream/promises";
import gunzipMaybe from "gunzip-maybe";
import { extract } from "tar-fs";
import { spawn as spawn3 } from "child_process";
import { randomBytes } from "crypto";
function newOptions(yargs) {
return yargs.option("template", {
alias: "t",
type: "string",
description: "The template to use",
default: "remix"
}).option("ref", {
type: "string",
description: "Optional GitHub ref to use for templates"
}).positional("name", {
type: "string",
description: "The name of the app to create"
});
}
async function newHandler(argv) {
intro(`Create a new Superflare app`);
const s = spinner();
s.start(
"Welcome! Checking to make sure you have the Wrangler CLI installed and authenticated..."
);
if (!await ensureWranglerAuthenticated()) {
s.stop("Hmm. Looks like you're not logged in yet.");
const wantsToLogIn = await confirm({
message: "You need to be logged into Wrangler to create a Superflare app. Log in now?"
});
if (isCancel(wantsToLogIn) || !wantsToLogIn) {
cancel(
"You need to be logged into Wrangler to be able to create a Superflare app."
);
process.exit(0);
}
await wranglerLogin();
}
s.stop("Everything looks good!");
note(
"Before using R2, Queues, and Durable objects,\nmake sure you've enabled them in the Cloudflare Dashboard.\nhttps://dash.cloudflare.com/\nOtherwise, the following commands might fail! \u{1F62C}",
"\u{1F44B} Heads-up:"
);
let path8 = "";
if (!argv.name) {
const defaultPath = "./my-superflare-app";
const pathResponse = await text({
message: "Where would you like to create your app?",
placeholder: defaultPath
});
if (isCancel(pathResponse)) {
cancel("Never mind!");
process.exit(0);
}
path8 = pathResponse || defaultPath;
} else {
path8 = argv.name;
if (!path8.startsWith(".") && !path8.startsWith("/")) {
path8 = `./${path8}`;
}
}
const appName = path8.split("/").pop();
if (!appName) {
throw new Error("Invalid path");
}
if (!appName.match(/^[a-z0-9-]+$/)) {
throw new Error(
`Invalid app name: ${appName}. App names can only contain lowercase letters, numbers, and dashes.`
);
}
s.start(`Creating a new Remix Superflare app in ${path8}...`);
await generateTemplate(path8, appName, argv.template || "remix", argv.ref);
s.stop(`App created!`);
async function buildPlan() {
const selections = await multiselect({
message: `What features of Superflare do you plan to use? We'll create the resources for you.`,
options: [
{
value: "database",
label: "Database Models",
hint: "We'll create D1 a database for you"
},
{
value: "storage",
label: "Storage",
hint: "We'll create a R2 bucket for you"
},
{
value: "queue",
label: "Queues",
hint: "We'll create a Queue consumer and producer for you"
},
{
value: "broadcasting",
label: "Broadcasting",
hint: "We'll set up a Durable Object for you"
},
{
value: "scheduledTasks",
label: "Scheduled Tasks",
hint: "We'll set up a cron trigger for you"
}
],
initialValues: [
"database",
"storage",
"queue",
"broadcasting",
"scheduledTasks"
]
});
if (isCancel(selections)) {
cancel("Never mind!");
process.exit(0);
}
const plan2 = {};
selections.forEach((selection) => {
switch (selection) {
case "database":
plan2.d1 = `${appName}-db`;
break;
case "storage":
plan2.r2 = `${appName}-bucket`;
break;
case "queue":
plan2.queue = `${appName}-queue`;
break;
case "broadcasting":
plan2.durableOject = `Channel`;
break;
case "scheduledTasks":
plan2.scheduledTasks = true;
break;
default:
break;
}
});
const confirmMessage = `We'll create the following resources for you:
${plan2.d1 ? ` - D1 Database: ${plan2.d1} (bound as DB)` : ""}
${plan2.r2 ? ` - R2 Bucket: ${plan2.r2} (bound as BUCKET)` : ""}
${plan2.queue ? ` - Queue: ${plan2.queue} (bound as QUEUE)` : ""}
${plan2.durableOject ? ` - Durable Object: Channel (bound as CHANNEL)` : ""}
${plan2.scheduledTasks ? ` - Scheduled Tasks: A cron trigger for every minute` : ""}
Do you want to continue?`;
const confirmation = await confirm({
message: confirmMessage
});
if (!confirmation || isCancel(confirmation)) {
return await buildPlan();
}
return plan2;
}
const plan = await buildPlan();
s.start("Creating resources...");
const promises = [];
if (plan.d1) {
promises.push(createD1Database2(plan.d1));
}
if (plan.r2) {
promises.push(createR2Bucket(plan.r2));
}
if (plan.queue) {
promises.push(createQueue(plan.queue));
}
if (plan.durableOject) {
promises.push(setUpDurableObject(path8));
}
if (plan.scheduledTasks) {
promises.push(
Promise.resolve({
success: true,
message: "\u2705 Scheduled Tasks: Set up cron trigger for every minute",
wranglerConfig: {
triggers: {
crons: ["* * * * *"]
}
}
})
);
}
const results = await Promise.all(promises);
let wranglerConfig = {
name: appName
};
results.forEach((result) => {
if (result.wranglerConfig) {
wranglerConfig = {
...wranglerConfig,
...result.wranglerConfig
};
}
});
await addToWranglerConfig(wranglerConfig, path8);
await writeSuperflareConfig(
results.map((r) => r.superflareConfig).filter(Boolean),
path8
);
const appKey = randomBytes(256).toString("base64");
await setSecret("APP_KEY", appKey, path8);
s.stop("Done creating resources!");
const allResults = results.map((r) => r.message);
note(allResults.join("\n"), "Here's what we did:");
outro(
`You're all set! \`cd ${path8}\`, run \`npm install --legacy-peer-deps\`, and then \`npx superflare migrate\` to get started.`
);
}
async function generateTemplate(path8, appName, template, ref) {
const gitHubRepo = `jplhomer/superflare`;
const templatePath = `templates/${template}`;
const tempDir = await downloadGitHubTarball(gitHubRepo, ref);
await cp(join2(tempDir, templatePath), path8, { recursive: true });
await rmdir(tempDir, { recursive: true });
const pkgJsonPath = join2(path8, "package.json");
const pkgJson = JSON.parse(await readFile2(pkgJsonPath, "utf-8"));
pkgJson.name = appName;
await writeFile5(pkgJsonPath, JSON.stringify(pkgJson, null, 2));
}
async function downloadGitHubTarball(gitHubRepo, ref) {
const tempDir = await mkdtemp(join2(tmpdir(), "superflare-"));
const release = await fetch(
`https://api.github.com/repos/${gitHubRepo}/releases/latest`,
{
headers: {
"user-agent": "Superflare CLI"
}
}
);
const { name } = await release.json();
const gitHubRef = ref || name || "main";
const downloadUrl = new URL(
`https://api.github.com/repos/${gitHubRepo}/tarball/${gitHubRef}`
);
const response = await fetch(downloadUrl.toString(), {
headers: {
"user-agent": "Superflare CLI"
}
});
await pipeline(
// Download
// @ts-ignore
response.body,
// Decompress
gunzipMaybe(),
// Unpack
extract(tempDir, {
strip: 1,
filter: (name2) => {
name2 = name2.replace(tempDir, "");
return !name2.startsWith(normalize("/templates/"));
}
})
);
return tempDir;
}
async function createD1Database2(name) {
var _a, _b;
try {
const result = await runWranglerCommand(["d1", "create", name]);
const databaseId = (_b = (_a = result.stdout.split("\n").find((line) => line.startsWith("database_id"))) == null ? void 0 : _a.split("=")[1]) == null ? void 0 : _b.trim().replace(/"/g, "");
if (!databaseId) {
return {
success: false,
message: `\u{1F914} D1 Database: ${name} created, but we couldn't parse the ID. Check your Cloudflare Dashboard to find it.`
};
}
return {
success: true,
message: `\u2705 D1 Database: ${name} created!`,
wranglerConfig: {
d1_databases: [
{
binding: "DB",
name,
database_id: databaseId
}
]
},
superflareConfig: `database: {
default: ctx.env.DB,
},`
};
} catch (e) {
return {
success: false,
message: `\u274C D1 Database: ${e.stderr || e.stdout || e.message}`
};
}
}
async function createR2Bucket(name) {
try {
await runWranglerCommand(["r2", "bucket", "create", name]);
return {
success: true,
message: `\u2705 R2 Bucket: ${name} created!`,
wranglerConfig: {
r2_buckets: [
{
binding: "BUCKET",
bucket_name: name,
preview_bucket_name: "BUCKET"
}
]
},
superflareConfig: `storage: {
default: {
binding: ctx.env.BUCKET,
},
},`
};
} catch (e) {
return {
success: false,
message: `\u274C R2 Bucket: ${e.stderr || e.stdout || e.message}`
};
}
}
async function createQueue(name) {
try {
await runWranglerCommand(["queues", "create", name]);
return {
success: true,
message: `\u2705 Queue: ${name} created!`,
wranglerConfig: {
queues: {
producers: [
{
queue: name,
binding: "QUEUE"
}
],
consumers: [
{
queue: name
}
]
}
},
superflareConfig: `queues: {
default: ctx.env.QUEUE,
},`
};
} catch (e) {
return {
success: false,
message: `\u274C Queue: ${e.stderr || e.stdout || e.message}`
};
}
}
async function setUpDurableObject(pathName) {
const workerPath = join2(pathName, "worker.ts");
const contents = await readFile2(workerPath, "utf-8");
await writeFile5(
workerPath,
`${contents}
export { Channel } from "superflare";`
);
return {
success: true,
message: `\u2705 Durable Object: Added binding and Channel export to worker.ts`,
wranglerConfig: {
durable_objects: {
bindings: [
{
name: "CHANNELS",
class_name: "Channel"
}
]
},
migrations: [
{
tag: "v1",
new_classes: ["Channel"]
}
]
},
superflareConfig: `channels: {
default: {
binding: ctx.env.CHANNELS,
},
},`
};
}
async function addToWranglerConfig(config, pathName) {
let wranglerConfigPath = join2(pathName, "wrangler.toml");
try {
throw new Error("not implemented for TOML yet");
} catch (e) {
wranglerConfigPath = join2(pathName, "wrangler.json");
const wranglerConfig = JSON.parse(
await readFile2(wranglerConfigPath, "utf-8")
);
await writeFile5(
wranglerConfigPath,
JSON.stringify(
{
...wranglerConfig,
...config
},
null,
2
)
);
}
}
async function writeSuperflareConfig(chunks, pathName) {
const superflareConfigPath = join2(pathName, "superflare.config.ts");
let contents = `import { defineConfig } from "superflare";
export default defineConfig<Env>((ctx) => {
return {
`;
const indentation = " ";
contents += indentation + "appKey: ctx.env.APP_KEY,\n";
chunks.forEach((chunk) => {
chunk.split("\n").forEach((line) => {
contents += `${indentation}${line}
`;
});
});
contents += ` };
});`;
await writeFile5(superflareConfigPath, contents);
}
async function ensureWranglerAuthenticated() {
try {
const result = await runWranglerCommand(["whoami"]);
return !result.stdout.includes("You are not authenticated");
} catch (_e) {
return false;
}
}
async function wranglerLogin() {
return await new Promise((resolve, reject) => {
spawn3("npx", ["wrangler", "login"], { stdio: "inherit" }).on(
"close",
(code) => {
if (code === 0) {
resolve();
} else {
reject();
}
}
);
});
}
async function setSecret(key, value, path8) {
const devVarsPath = join2(path8, ".dev.vars");
let contents = "";
try {
contents = await readFile2(devVarsPath, "utf-8");
} catch (_e) {
}
contents += `${key}=${value}`;
await writeFile5(devVarsPath, contents);
}
// cli.ts
var resetColor = "\x1B[0m";
var fgGreenColor = "\x1B[32m";
var CommandLineArgsError = class extends Error {
};
function createCLIParser(argv) {
const superflare = makeCLI(argv).strict().scriptName("superflare").wrap(null).version(false).option("v", {
describe: "Show version number",
alias: "version",
type: "boolean"
});
superflare.help().alias("help", "h");
superflare.command(
["*"],
false,
() => {
},
async (args) => {
if (args._.length > 0) {
throw new CommandLineArgsError(`Unknown command: ${args._}.`);
} else {
superflare.showHelp("log");
}
}
);
const subHelp = {
command: ["*"],
handler: async (args) => {
setImmediate(
() => superflare.parse([...args._.map((a) => `${a}`), "--help"])
);
}
};
superflare.command(
"migrate",
"\u{1F3D7}\uFE0F Migrate your database and update types",
migrateOptions,
migrateHandler
);
superflare.command(
"dev [entrypoint]",
"\u{1F3C4} Start the development server",
devOptions,
devHandler
);
superflare.command(
["generate", "g"],
"\u{1F307} Scaffold useful things",
(yargs) => {
return generate(yargs.command(subHelp));
}
);
superflare.command(
"new [name]",
"\u{1F3B8} Create a new Superflare project",
newOptions,
newHandler
);
superflare.command(
["console", "c"],
"\u{1F52E} Open an interactive developer console",
// @ts-expect-error idk
consoleOptions,
consoleHandler
);
superflare.command("db", "\u{1F5C4}\uFE0F Manage your database", (yargs) => {
return db(yargs.command(subHelp));
});
return superflare;
}
async function main(argv) {
const superflare = createCLIParser(argv);
try {
await superflare.parse();
} catch (e) {
logger.log("");
if (e instanceof CommandLineArgsError) {
logger.error(e.message);
await createCLIParser([...argv, "--help"]).parse();
} else {
logger.error(e instanceof Error ? e.message : e);
logger.log(
`${fgGreenColor}%s${resetColor}`,
"If you think this is a bug then please create an issue at https://github.com/jplhomer/superflare/issues/new"
);
}
throw e;
}
}
main(hideBin(process.argv));
export {
CommandLineArgsError
};