appwrite-utils-cli
Version:
Appwrite Utility Functions to help with database management, data conversion, data import, migrations, and much more. Meant to be used as a CLI tool, I do not recommend installing this in frontend environments.
558 lines (496 loc) • 17.4 kB
text/typescript
import {
Compression,
Databases,
Permission,
Query,
Role,
Storage,
type Models,
} from "node-appwrite";
import { tryAwaitWithRetry, type AppwriteConfig } from "appwrite-utils";
import { getClientFromConfig } from "../utils/getClientFromConfig.js";
import { ulid } from "ulidx";
import type { BackupCreate } from "./schemas.js";
import { logOperation } from "../shared/operationLogger.js";
import { splitIntoBatches } from "../shared/migrationHelpers.js";
import { retryFailedPromises } from "../utils/retryFailedPromises.js";
import { InputFile } from "node-appwrite/file";
import { MessageFormatter, Messages } from "../shared/messageFormatter.js";
import { ProgressManager } from "../shared/progressManager.js";
export const getStorage = (config: AppwriteConfig) => {
const client = getClientFromConfig(config);
return new Storage(client!);
};
export const listBuckets = async (
storage: Storage,
queries?: string[],
search?: string
) => {
return await storage.listBuckets(queries, search);
};
export const getBucket = async (storage: Storage, bucketId: string) => {
return await storage.getBucket(bucketId);
};
export const createBucket = async (
storage: Storage,
bucket: Omit<Models.Bucket, "$id" | "$createdAt" | "$updatedAt">,
bucketId?: string
) => {
return await storage.createBucket(
bucketId ?? ulid(),
bucket.name,
bucket.$permissions,
bucket.fileSecurity,
bucket.enabled,
bucket.maximumFileSize,
bucket.allowedFileExtensions,
bucket.compression as Compression,
bucket.encryption,
bucket.antivirus
);
};
export const updateBucket = async (
storage: Storage,
bucket: Models.Bucket,
bucketId: string
) => {
return await storage.updateBucket(
bucketId,
bucket.name,
bucket.$permissions,
bucket.fileSecurity,
bucket.enabled,
bucket.maximumFileSize,
bucket.allowedFileExtensions,
bucket.compression as Compression,
bucket.encryption,
bucket.antivirus
);
};
export const deleteBucket = async (storage: Storage, bucketId: string) => {
return await storage.deleteBucket(bucketId);
};
export const getFile = async (
storage: Storage,
bucketId: string,
fileId: string
) => {
return await storage.getFile(bucketId, fileId);
};
export const listFiles = async (
storage: Storage,
bucketId: string,
queries?: string[],
search?: string
) => {
return await storage.listFiles(bucketId, queries, search);
};
export const deleteFile = async (
storage: Storage,
bucketId: string,
fileId: string
) => {
return await storage.deleteFile(bucketId, fileId);
};
export const ensureDatabaseConfigBucketsExist = async (
storage: Storage,
config: AppwriteConfig,
databases: Models.Database[] = []
) => {
for (const db of databases) {
const database = config.databases?.find((d) => d.$id === db.$id);
if (database?.bucket) {
try {
await storage.getBucket(database.bucket.$id);
console.log(`Bucket ${database.bucket.$id} already exists.`);
} catch (e) {
const permissions: string[] = [];
if (
database.bucket.permissions &&
database.bucket.permissions.length > 0
) {
for (const permission of database.bucket.permissions) {
switch (permission.permission) {
case "read":
permissions.push(Permission.read(permission.target));
break;
case "create":
permissions.push(Permission.create(permission.target));
break;
case "update":
permissions.push(Permission.update(permission.target));
break;
case "delete":
permissions.push(Permission.delete(permission.target));
break;
case "write":
permissions.push(Permission.write(permission.target));
break;
default:
console.warn(`Unknown permission: ${permission.permission}`);
break;
}
}
}
try {
await storage.createBucket(
database.bucket.$id,
database.bucket.name,
permissions,
database.bucket.fileSecurity,
database.bucket.enabled,
database.bucket.maximumFileSize,
database.bucket.allowedFileExtensions,
database.bucket.compression as Compression,
database.bucket.encryption,
database.bucket.antivirus
);
console.log(`Bucket ${database.bucket.$id} created successfully.`);
} catch (createError) {
// console.error(
// `Failed to create bucket ${database.bucket.$id}:`,
// createError
// );
}
}
}
}
};
export const wipeDocumentStorage = async (
storage: Storage,
bucketId: string,
options: { skipConfirmation?: boolean } = {}
): Promise<void> => {
MessageFormatter.warning(`About to delete all files in bucket: ${bucketId}`);
if (!options.skipConfirmation) {
const { ConfirmationDialogs } = await import("../shared/confirmationDialogs.js");
const confirmed = await ConfirmationDialogs.confirmDestructiveOperation({
operation: "Storage Wipe",
targets: [bucketId],
consequences: [
"Delete ALL files in the storage bucket",
"This action cannot be undone",
],
requireExplicitConfirmation: true,
confirmationText: "DELETE FILES",
});
if (!confirmed) {
MessageFormatter.info("Storage wipe cancelled by user");
return;
}
}
MessageFormatter.progress(`Scanning files in bucket: ${bucketId}`);
let moreFiles = true;
let lastFileId: string | undefined;
const allFiles: string[] = [];
// First pass: collect all file IDs
while (moreFiles) {
const queries = [Query.limit(100)];
if (lastFileId) {
queries.push(Query.cursorAfter(lastFileId));
}
const filesPulled = await tryAwaitWithRetry(
async () => await storage.listFiles(bucketId, queries)
);
if (filesPulled.files.length === 0) {
moreFiles = false;
break;
} else if (filesPulled.files.length > 0) {
const fileIds = filesPulled.files.map((file) => file.$id);
allFiles.push(...fileIds);
}
moreFiles = filesPulled.files.length === 100;
if (moreFiles) {
lastFileId = filesPulled.files[filesPulled.files.length - 1].$id;
}
}
if (allFiles.length === 0) {
MessageFormatter.info("No files found in bucket");
return;
}
// Second pass: delete files with progress tracking
const progress = ProgressManager.create(`wipe-${bucketId}`, allFiles.length, {
title: `Deleting files from ${bucketId}`,
});
try {
for (let i = 0; i < allFiles.length; i++) {
const fileId = allFiles[i];
await tryAwaitWithRetry(
async () => await storage.deleteFile(bucketId, fileId)
);
progress.update(i + 1, `Deleted file: ${fileId.slice(0, 20)}...`);
}
progress.stop();
MessageFormatter.success(`All ${MessageFormatter.formatNumber(allFiles.length)} files in bucket ${bucketId} have been deleted`);
} catch (error) {
progress.fail(error instanceof Error ? error.message : String(error));
throw error;
}
};
export const initOrGetDocumentStorage = async (
storage: Storage,
config: AppwriteConfig,
dbId: string,
bucketName?: string
) => {
const bucketId =
bucketName ??
`${config.documentBucketId}_${dbId.toLowerCase().replace(" ", "")}`;
try {
return await tryAwaitWithRetry(
async () => await storage.getBucket(bucketId)
);
} catch (e) {
return await tryAwaitWithRetry(
async () =>
await storage.createBucket(bucketId, `${dbId} Storage`, [
Permission.read(Role.any()),
Permission.create(Role.users()),
Permission.update(Role.users()),
Permission.delete(Role.users()),
])
);
}
};
export const initOrGetBackupStorage = async (
config: AppwriteConfig,
storage: Storage
) => {
try {
return await tryAwaitWithRetry(
async () => await storage.getBucket("backup")
);
} catch (e) {
return await initOrGetDocumentStorage(
storage,
config,
"backups",
"Database Backups"
);
}
};
export const backupDatabase = async (
config: AppwriteConfig,
database: Databases,
databaseId: string,
storage: Storage
): Promise<void> => {
const startTime = Date.now();
MessageFormatter.banner("Database Backup", `Backing up database: ${databaseId}`);
MessageFormatter.info(Messages.BACKUP_STARTED(databaseId));
let data: BackupCreate = {
database: "",
collections: [],
documents: [],
};
const backupOperation = await logOperation(database, databaseId, {
operationType: "backup",
collectionId: "",
data: "Starting backup...",
progress: 0,
total: 100,
error: "",
status: "in_progress",
}, undefined, config.useMigrations);
let progress: ProgressManager | null = null;
let totalDocuments = 0;
let processedDocuments = 0;
try {
const db = await tryAwaitWithRetry(
async () => await database.get(databaseId)
);
data.database = JSON.stringify(db);
// First pass: count collections and documents for progress tracking
MessageFormatter.step(1, 3, "Analyzing database structure");
let lastCollectionId = "";
let moreCollections = true;
let totalCollections = 0;
while (moreCollections) {
const collectionResponse = await tryAwaitWithRetry(
async () =>
await database.listCollections(databaseId, [
Query.limit(500),
...(lastCollectionId ? [Query.cursorAfter(lastCollectionId)] : []),
])
);
totalCollections += collectionResponse.collections.length;
// Count documents in each collection
for (const { $id: collectionId } of collectionResponse.collections) {
try {
const documentCount = await tryAwaitWithRetry(
async () => (await database.listDocuments(databaseId, collectionId, [Query.limit(1)])).total
);
totalDocuments += documentCount;
} catch (error) {
MessageFormatter.warning(`Could not count documents in collection ${collectionId}`);
}
}
moreCollections = collectionResponse.collections.length === 500;
if (moreCollections) {
lastCollectionId = collectionResponse.collections[collectionResponse.collections.length - 1].$id;
}
}
const totalItems = totalCollections + totalDocuments;
progress = ProgressManager.create(`backup-${databaseId}`, totalItems, {
title: `Backing up ${databaseId}`,
});
MessageFormatter.step(2, 3, `Processing ${totalCollections} collections and ${totalDocuments} documents`);
// Second pass: actual backup with progress tracking
lastCollectionId = "";
moreCollections = true;
while (moreCollections) {
const collectionResponse = await tryAwaitWithRetry(
async () =>
await database.listCollections(databaseId, [
Query.limit(500),
...(lastCollectionId ? [Query.cursorAfter(lastCollectionId)] : []),
])
);
for (const {
$id: collectionId,
name: collectionName,
} of collectionResponse.collections) {
try {
const collection = await tryAwaitWithRetry(
async () => await database.getCollection(databaseId, collectionId)
);
data.collections.push(JSON.stringify(collection));
progress?.increment(1, `Processing collection: ${collectionName}`);
let lastDocumentId = "";
let moreDocuments = true;
let collectionDocumentCount = 0;
while (moreDocuments) {
const documentResponse = await tryAwaitWithRetry(
async () =>
await database.listDocuments(databaseId, collectionId, [
Query.limit(500),
...(lastDocumentId
? [Query.cursorAfter(lastDocumentId)]
: []),
])
);
collectionDocumentCount += documentResponse.documents.length;
const documentPromises = documentResponse.documents.map(
({ $id: documentId }) =>
database.getDocument(databaseId, collectionId, documentId)
);
const promiseBatches = splitIntoBatches(documentPromises);
const documentsPulled = [];
for (const batch of promiseBatches) {
const successfulDocuments = await retryFailedPromises(batch);
documentsPulled.push(...successfulDocuments);
// Update progress for each batch
progress?.increment(successfulDocuments.length,
`Processing ${collectionName}: ${processedDocuments + successfulDocuments.length}/${totalDocuments} documents`
);
processedDocuments += successfulDocuments.length;
}
data.documents.push({
collectionId: collectionId,
data: JSON.stringify(documentsPulled),
});
if (backupOperation) {
await logOperation(
database,
databaseId,
{
operationType: "backup",
collectionId: collectionId,
data: `Backing up, ${data.collections.length} collections so far`,
progress: processedDocuments,
total: totalDocuments,
error: "",
status: "in_progress",
},
backupOperation.$id,
config.useMigrations
);
}
moreDocuments = documentResponse.documents.length === 500;
if (moreDocuments) {
lastDocumentId =
documentResponse.documents[
documentResponse.documents.length - 1
].$id;
}
}
MessageFormatter.success(
`Collection ${collectionName} backed up with ${MessageFormatter.formatNumber(collectionDocumentCount)} documents`
);
} catch (error) {
MessageFormatter.warning(
`Collection ${collectionName} could not be backed up: ${error instanceof Error ? error.message : String(error)}`
);
continue;
}
}
moreCollections = collectionResponse.collections.length === 500;
if (moreCollections) {
lastCollectionId =
collectionResponse.collections[
collectionResponse.collections.length - 1
].$id;
}
}
MessageFormatter.step(3, 3, "Creating backup file");
const bucket = await initOrGetDocumentStorage(storage, config, databaseId);
const backupData = JSON.stringify(data);
const backupSize = Buffer.byteLength(backupData, 'utf8');
const fileName = `${new Date().toISOString()}-${databaseId}.json`;
const inputFile = InputFile.fromPlainText(backupData, fileName);
const fileCreated = await storage.createFile(
bucket!.$id,
ulid(),
inputFile
);
progress?.stop();
if (backupOperation) {
await logOperation(
database,
databaseId,
{
operationType: "backup",
collectionId: "",
data: fileCreated.$id,
progress: totalItems,
total: totalItems,
error: "",
status: "completed",
},
backupOperation.$id,
config.useMigrations
);
}
const duration = Date.now() - startTime;
MessageFormatter.operationSummary("Backup", {
database: databaseId,
collections: data.collections.length,
documents: processedDocuments,
fileSize: MessageFormatter.formatBytes(backupSize),
backupFile: fileName,
bucket: bucket!.$id,
}, duration);
MessageFormatter.success(Messages.BACKUP_COMPLETED(databaseId, backupSize));
} catch (error) {
progress?.fail(error instanceof Error ? error.message : String(error));
MessageFormatter.error("Backup failed", error instanceof Error ? error : new Error(String(error)));
if (backupOperation) {
await logOperation(
database,
databaseId,
{
operationType: "backup",
collectionId: "",
data: "Backup failed",
progress: 0,
total: totalDocuments,
error: String(error),
status: "error",
},
backupOperation.$id,
config.useMigrations
);
}
throw error;
}
};