nextcloud-chunk-file-upload
Version:
Nextcloud chunk file upload client
157 lines (156 loc) • 6.81 kB
JavaScript
;
function _classPrivateMethodInitSpec(e, a) { _checkPrivateRedeclaration(e, a), a.add(e); }
function _classPrivateFieldInitSpec(e, t, a) { _checkPrivateRedeclaration(e, t), t.set(e, a); }
function _checkPrivateRedeclaration(e, t) { if (t.has(e)) throw new TypeError("Cannot initialize the same private elements twice on an object"); }
function _classPrivateFieldGet(s, a) { return s.get(_assertClassBrand(s, a)); }
function _classPrivateFieldSet(s, a, r) { return s.set(_assertClassBrand(s, a), r), r; }
function _assertClassBrand(e, t, n) { if ("function" == typeof e ? e === t : e.has(t)) return arguments.length < 3 ? t : n; throw new TypeError("Private element is not present on this object"); }
const fs = require('fs');
const crypto = require('crypto');
const path = require('path');
const axios = require('axios');
const Event = require('./event');
var _uploadUrl = /*#__PURE__*/new WeakMap();
var _filesUrl = /*#__PURE__*/new WeakMap();
var _auth = /*#__PURE__*/new WeakMap();
var _Upload_brand = /*#__PURE__*/new WeakSet();
class Upload {
constructor(url, userspace, username, password) {
_classPrivateMethodInitSpec(this, _Upload_brand);
_classPrivateFieldInitSpec(this, _uploadUrl, void 0);
_classPrivateFieldInitSpec(this, _filesUrl, void 0);
_classPrivateFieldInitSpec(this, _auth, void 0);
_classPrivateFieldSet(_uploadUrl, this, `${url.replace(/\/+$/, '')}/uploads/${userspace}`);
_classPrivateFieldSet(_filesUrl, this, `${url.replace(/\/+$/, '')}/files/${userspace}`);
_classPrivateFieldSet(_auth, this, {
username: username,
password: password
});
}
uploadFile(localPath, remotePath, chunkSize = 2 * 1024 * 1024, retryChunks = 5, createDirsRecursively = false, deleteChunksOnFailure = false) {
return new Promise(async (resolve, reject) => {
const chunkPath = `${_classPrivateFieldGet(_uploadUrl, this)}/${crypto.randomBytes(32).toString('hex')}`;
try {
await _assertClassBrand(_Upload_brand, this, _createDir).call(this, chunkPath);
} catch (e) {
return reject(new Event(localPath, null, 'Failed creating temporary upload directory', false, e === null || e === void 0 ? void 0 : e.response));
}
const identifierLength = ('' + fs.statSync(localPath)['size']).length;
const stream = fs.createReadStream(localPath, {
highWaterMark: chunkSize
});
let chunkNo = 0;
let chunkOffset = 0;
let readable = false;
stream.on('readable', async () => {
// Make sure the upload happens only one time. The event is triggered twice: The first time when the stream is
// ready for reading and the second when the end of the stream has been reached.
if (readable) {
return;
}
readable = true;
let chunk;
while (chunk = stream.read()) {
const offsetIdentifier = ('' + chunkOffset).padStart(identifierLength, '0');
chunkOffset += chunk.length - 1;
const limitIdentifier = ('' + chunkOffset).padStart(identifierLength, '0');
chunkOffset++;
let success = false;
let lastHttpErrorEvent;
for (let i = 0; i <= retryChunks && !success; i++) {
try {
await axios.request({
method: 'put',
url: `${chunkPath}/${offsetIdentifier}-${limitIdentifier}`,
auth: _classPrivateFieldGet(_auth, this),
data: chunk
});
success = true;
} catch (e) {
lastHttpErrorEvent = e;
success = false;
}
}
if (!success) {
var _lastHttpErrorEvent;
if (deleteChunksOnFailure) {
await _assertClassBrand(_Upload_brand, this, _deleteChunks).call(this, chunkPath);
}
return reject(new Event(localPath, chunkNo, 'Failed uploading chunk, max retries reached', false, (_lastHttpErrorEvent = lastHttpErrorEvent) === null || _lastHttpErrorEvent === void 0 ? void 0 : _lastHttpErrorEvent.response));
}
chunkNo++;
}
if (createDirsRecursively) {
const remoteDir = path.dirname(remotePath);
try {
await _assertClassBrand(_Upload_brand, this, _createDirsRecursively).call(this, remoteDir);
} catch (e) {
if (deleteChunksOnFailure) {
await _assertClassBrand(_Upload_brand, this, _deleteChunks).call(this, chunkPath);
}
return reject(new Event(localPath, null, `Failed creating remote directory ${remoteDir}`, false, e === null || e === void 0 ? void 0 : e.response));
}
}
try {
await axios.request({
method: 'move',
url: `${chunkPath}/.file`,
auth: _classPrivateFieldGet(_auth, this),
headers: {
Destination: `${_classPrivateFieldGet(_filesUrl, this)}/${remotePath.replace(/^\/+/, '')}`
}
});
resolve(new Event(localPath, chunkNo, null, true));
} catch (e) {
if (deleteChunksOnFailure) {
await _assertClassBrand(_Upload_brand, this, _deleteChunks).call(this, chunkPath);
}
return reject(new Event(localPath, chunkNo, 'Failed to glue the chunks together', false, e === null || e === void 0 ? void 0 : e.response));
}
}).on('error', () => {
reject(new Event(localPath, chunkNo, 'Failed reading the local file'));
});
});
}
}
function _createDir(remotePath) {
return axios.request({
method: 'mkcol',
url: remotePath,
auth: _classPrivateFieldGet(_auth, this)
});
}
function _createDirsRecursively(remotePath) {
return new Promise(async (resolve, reject) => {
// Remove root path from list of dirs to check
const dirs = remotePath.split('/').filter(path => !!path);
if (!dirs.length) {
return resolve();
}
let currentPath = '';
for (let dir of dirs) {
currentPath += `/${dir}`;
try {
await _assertClassBrand(_Upload_brand, this, _createDir).call(this, `${_classPrivateFieldGet(_filesUrl, this)}${currentPath}`);
} catch (e) {
var _e$response;
if (!(e !== null && e !== void 0 && (_e$response = e.response) !== null && _e$response !== void 0 && _e$response.data) || !e.response.data.includes('<s:message>The resource you tried to create already exists</s:message>')) {
return reject(e);
}
}
}
resolve();
});
}
async function _deleteChunks(chunkPath) {
try {
await axios.request({
method: 'delete',
url: chunkPath,
auth: _classPrivateFieldGet(_auth, this)
});
} catch (e) {
// Ignore delete errors
}
}
module.exports = Upload;