UNPKG

ravendb

Version:
207 lines 9.32 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.DatabaseSmuggler = void 0; const StringUtil_js_1 = require("../../Utility/StringUtil.js"); const index_js_1 = require("../../Exceptions/index.js"); const HttpUtil_js_1 = require("../../Utility/HttpUtil.js"); const StreamUtil_js_1 = require("../../Utility/StreamUtil.js"); const node_path_1 = require("node:path"); const BackupUtils_js_1 = require("./BackupUtils.js"); const OperationCompletionAwaiter_js_1 = require("../Operations/OperationCompletionAwaiter.js"); const GetNextOperationIdCommand_js_1 = require("../Commands/GetNextOperationIdCommand.js"); const RavenCommand_js_1 = require("../../Http/RavenCommand.js"); class DatabaseSmuggler { _store; _databaseName; _requestExecutor; constructor(store, databaseName) { this._store = store; this._databaseName = databaseName ?? store.database; if (this._databaseName) { this._requestExecutor = store.getRequestExecutor(this._databaseName); } else { this._requestExecutor = null; } } forDatabase(databaseName) { if (StringUtil_js_1.StringUtil.equalsIgnoreCase(databaseName, this._databaseName)) { return this; } return new DatabaseSmuggler(this._store, databaseName); } async export(options, toFile) { const directory = (0, node_path_1.dirname)((0, node_path_1.resolve)(toFile)); const { existsSync, mkdirSync, createWriteStream } = await import("node:fs"); if (!existsSync(directory)) { mkdirSync(directory, { recursive: true }); } return await this._export(options, async (response) => { const fileStream = createWriteStream(toFile); await (0, StreamUtil_js_1.pipelineAsync)(response, fileStream); }); } async _export(options, handleStreamResponse) { if (!options) { (0, index_js_1.throwError)("InvalidArgumentException", "Options cannot be null"); } if (!this._requestExecutor) { (0, index_js_1.throwError)("InvalidOperationException", "Cannot use smuggler without a database defined, did you forget to call 'forDatabase'?"); } const getOperationIdCommand = new GetNextOperationIdCommand_js_1.GetNextOperationIdCommand(); await this._requestExecutor.execute(getOperationIdCommand); const operationId = getOperationIdCommand.result; const command = new ExportCommand(this._requestExecutor.conventions, options, handleStreamResponse, operationId, getOperationIdCommand.nodeTag); await this._requestExecutor.execute(command); return new OperationCompletionAwaiter_js_1.OperationCompletionAwaiter(this._requestExecutor, this._requestExecutor.conventions, operationId, getOperationIdCommand.nodeTag); } async importIncremental(options, fromDirectory) { const { statSync, readdirSync } = await import("node:fs"); const mProvider = f => statSync(f).mtimeMs; const files = readdirSync(fromDirectory) .filter(x => BackupUtils_js_1.BackupUtils.BACKUP_FILE_SUFFIXES.includes("." + (0, node_path_1.extname)(x))) .sort((a, b) => BackupUtils_js_1.BackupUtils.comparator(a, b, mProvider)); if (!files.length) { return; } const oldOperateOnTypes = DatabaseSmuggler.configureOptionsFromIncrementalImport(options); for (let i = 0; i < files.length - 1; i++) { const filePath = files[i]; await this.import(options, (0, node_path_1.resolve)(filePath)); } options.operateOnTypes = oldOperateOnTypes; const lastFile = files.at(-1); await this.import(options, (0, node_path_1.resolve)(lastFile)); } static configureOptionsFromIncrementalImport(options) { options.operateOnTypes.push("Tombstones"); options.operateOnTypes.push("CompareExchangeTombstones"); // we import the indexes and Subscriptions from the last file only, const oldOperateOnTypes = [...options.operateOnTypes]; options.operateOnTypes = options.operateOnTypes.filter(x => x !== "Indexes" && x !== "Subscriptions"); return oldOperateOnTypes; } async import(options, fromFile) { let countOfFileParts = 0; const { existsSync } = await import("node:fs"); let result; do { result = await this._import(options, fromFile); countOfFileParts++; fromFile = StringUtil_js_1.StringUtil.format("{0}.part{1}", fromFile, countOfFileParts); } while (existsSync(fromFile)); return result; } async _import(options, file) { if (!options) { (0, index_js_1.throwError)("InvalidArgumentException", "Options cannot be null"); } if (!file) { (0, index_js_1.throwError)("InvalidArgumentException", "File cannot be null"); } if (!this._requestExecutor) { (0, index_js_1.throwError)("InvalidOperationException", "Cannot use smuggler without a database defined, did you forget to call 'forDatabase'?"); } const getOperationIdCommand = new GetNextOperationIdCommand_js_1.GetNextOperationIdCommand(); await this._requestExecutor.execute(getOperationIdCommand); const operationId = getOperationIdCommand.result; const command = new ImportCommand(this._requestExecutor.conventions, options, file, operationId, getOperationIdCommand.nodeTag); await this._requestExecutor.execute(command); return new OperationCompletionAwaiter_js_1.OperationCompletionAwaiter(this._requestExecutor, this._requestExecutor.conventions, operationId, getOperationIdCommand.nodeTag); } } exports.DatabaseSmuggler = DatabaseSmuggler; class ExportCommand extends RavenCommand_js_1.RavenCommand { _options; _handleStreamResponse; _operationId; constructor(conventions, options, handleStreamResponse, operationId, nodeTag) { super(); if (!conventions) { (0, index_js_1.throwError)("InvalidArgumentException", "Conventions cannot be null"); } if (!options) { (0, index_js_1.throwError)("InvalidArgumentException", "Options cannot be null"); } if (!handleStreamResponse) { (0, index_js_1.throwError)("InvalidArgumentException", "HandleStreamResponse cannot be null"); } this._handleStreamResponse = handleStreamResponse; const { operateOnTypes, ...restOptions } = options; this._options = conventions.objectMapper.toObjectLiteral({ operateOnTypes: operateOnTypes.join(","), ...restOptions }); this._operationId = operationId; this._selectedNodeTag = nodeTag; } get isReadRequest() { return false; } createRequest(node) { const uri = node.url + "/databases/" + node.database + "/smuggler/export?operationId=" + this._operationId; const body = this._serializer.serialize(this._options); const headers = HttpUtil_js_1.HeadersBuilder.create() .typeAppJson().build(); return { method: "POST", uri, body, headers }; } async processResponse(cache, response, bodyStream, url) { await this._handleStreamResponse(bodyStream); return "Automatic"; } } class ImportCommand extends RavenCommand_js_1.RavenCommand { _options; _file; _operationId; get isReadRequest() { return false; } constructor(conventions, options, file, operationId, nodeTag) { super(); this._responseType = "Empty"; if (!file) { (0, index_js_1.throwError)("InvalidArgumentException", "File cannot be null"); } if (!conventions) { (0, index_js_1.throwError)("InvalidArgumentException", "Conventions cannot be null"); } if (!options) { (0, index_js_1.throwError)("InvalidArgumentException", "Options cannot be null"); } this._file = file; const { operateOnTypes, ...restOptions } = options; this._options = conventions.objectMapper.toObjectLiteral({ operateOnTypes: operateOnTypes.join(","), ...restOptions }); this._operationId = operationId; this._selectedNodeTag = nodeTag; } async send(agent, requestOptions) { const { body } = requestOptions; const { readFileSync } = await import("node:fs"); if (body instanceof FormData) { const buffer = readFileSync(this._file); body.append("name", new Blob([buffer], { type: "text/plain" })); } return super.send(agent, requestOptions); } createRequest(node) { const uri = node.url + "/databases/" + node.database + "/smuggler/import?operationId=" + this._operationId; const multipart = new FormData(); multipart.append("importOptions", this._serializer.serialize(this._options)); // we append file in send method return { method: "POST", uri, body: multipart, }; } } //# sourceMappingURL=DatabaseSmuggler.js.map