@opentiny/vue-renderless
Version:
An enterprise-class UI component library, support both Vue.js 2 and Vue.js 3, as well as PC and mobile.
1,531 lines • 72.7 kB
JavaScript
import {
__spreadProps,
__spreadValues
} from "../chunk-G2ADBYYC.js";
import { extend } from "@opentiny/utils";
import { xss, logger, sha256 } from "@opentiny/utils";
import { uploadAjax } from "@opentiny/utils";
import { isObject } from "@opentiny/utils";
import { isEmptyObject } from "@opentiny/utils";
let initTokenPromise = null;
const noopFnCreator = (fn, propName) => {
const noFn = () => {
if (propName) {
return Promise.reject(
new Error(`[TINY Error][FileUpload] Prop ${propName} is mandatory when the framework service is not used`)
);
} else {
return Promise.reject(
new Error("[TINY Error][FileUpload] Prop action is mandatory when the framework service is not used")
);
}
};
return fn || noFn;
};
const initService = ({
props,
service
}) => {
const { network = {}, common = {} } = service || {};
const { request, get, post, all, spread, CancelToken = {} } = network;
let requestFn;
if (!isEmptyObject(props.hwh5)) {
const { HWH5 } = props.hwh5;
const { uploadToEDM } = HWH5();
requestFn = props.httpRequest || uploadToEDM;
} else if (request) {
requestFn = props.httpRequest || request;
} else {
requestFn = props.httpRequest || uploadAjax;
}
return {
get: noopFnCreator(get),
post: noopFnCreator(post),
request: noopFnCreator(request),
all: noopFnCreator(all),
spread: noopFnCreator(spread),
cancelToken: noopFnCreator(CancelToken.source),
getSingleUploadUrl: noopFnCreator(common.getSingleUploadUrl),
getFileUploadUrl: noopFnCreator(common.getFileUploadUrl),
getFileDownloadUrl: noopFnCreator(common.getFileDownloadUrl),
getSingleDownloadUrl: noopFnCreator(common.getSingleDownloadUrl),
getPackageDownloadUrl: noopFnCreator(common.getPackageDownloadUrl),
getAsyncPackageDownload: noopFnCreator(common.getAsyncPackageDownload),
getLargeFileInitUrl: noopFnCreator(common.getLargeFileInitUrl),
getChunkUploadUrl: noopFnCreator(common.getChunkUploadUrl),
getPreviewUrl: noopFnCreator(common.getPreviewUrl),
getDocumentInfoUrl: noopFnCreator(common.getDocumentInfoUrl),
getPreviewUrlBatch: noopFnCreator(common.getPreviewUrlBatch),
httpRequest: noopFnCreator(requestFn, "httpRequest")
};
};
const computedUploadDisabled = ({ props, state }) => () => props.disabled || (state.form || {}).disabled;
const computedUploadingSize = ({ state, constants }) => () => state.uploadingFiles.reduce(
(total, file) => total + file.status !== constants.FILE_STATUS.FAIL ? file.size : 0,
0
);
const watchListType = ({ constants, state, api }) => (type) => {
if ([
constants.LIST_TYPE.PICTURE_CARD,
constants.LIST_TYPE.PICTURE,
constants.LIST_TYPE.PICTURE_SINGLE,
constants.LIST_TYPE.DRAG_SINGLE
].includes(type)) {
state.uploadFiles = state.uploadFiles.map((file) => {
file.type = api.getFileSourceType({ file });
if (!file.url && file.raw) {
try {
file.url = URL.createObjectURL(file.raw);
} catch (err) {
return null;
}
}
return file;
});
}
};
const watchFileList = ({ constants, state, props, api }) => (fileList) => {
let uploadFiles = fileList && fileList.map((file) => {
file.uid = file.uid || Date.now() + state.tempIndex++;
file.status = file.status || constants.FILE_STATUS.SUCESS;
return file;
});
if ([constants.LIST_TYPE.PICTURE_SINGLE, constants.LIST_TYPE.DRAG_SINGLE].includes(props.listType)) {
uploadFiles = uploadFiles.slice(0, 1);
}
state.uploadFiles = uploadFiles;
};
const isNonFuncPropBeforeUpload = ({
flag,
doUpload,
file
}) => !flag && doUpload(file);
const onBeforeIsPromise = ({
before,
rawFile,
file,
doUpload,
autoRemove,
api
}) => {
before.then(
(processedFile) => {
const fileType = Object.prototype.toString.call(processedFile);
if (fileType === "[object File]" || fileType === "[object Blob]") {
if (fileType === "[object Blob]") {
processedFile = new File([processedFile], rawFile.name, { type: rawFile.type });
}
for (const p in rawFile) {
Object.prototype.hasOwnProperty.call(rawFile, p) && (processedFile[p] = rawFile[p]);
}
file.raw = processedFile;
}
doUpload(file);
},
() => {
if (autoRemove) {
if (!Array.isArray(rawFile)) {
api.handleRemove(null, rawFile);
} else {
rawFile.forEach((raw) => api.handleRemove(null, raw));
}
}
}
);
};
const isAcceptType = (acceptArray, file, constants, fileType) => {
return acceptArray.some((type) => {
if (type.toLowerCase() === constants.IMAGE_TYPE) {
return constants.FILE_TYPE.PICTURE.split("/").includes(fileType);
}
return new RegExp(`(${type.trim()})$`, "i").test(file.name);
});
};
const getFileType = ({ file }) => {
const { name, url } = file;
let fileType = "";
if (name && /\.[^.]+$/.test(name)) {
fileType = name.split(".")[name.split(".").length - 1].toLowerCase();
} else if (url && /\.[.]+$/.test(url)) {
fileType = url.split(".")[url.split(".").length - 1].toLowerCase();
}
return fileType;
};
const remove = ({
api,
file,
autoRemove
}) => {
if (autoRemove) {
const rawFile = file.raw;
if (Array.isArray(rawFile)) {
rawFile.forEach((raw) => api.handleRemove(null, raw));
} else {
api.handleRemove(null, rawFile);
}
}
};
const beforeUpload = ({
props,
api,
Modal,
constants,
t,
state
}) => (file, autoRemove, doUpload, isMergeUpload = false) => {
if (state.isEdm && file.name.length > 255) {
remove({ api, file, autoRemove });
return Modal.message({
message: `${t(constants.EDM.THEFILENAME)}"${file.name}"${t(constants.EDM.FILENAMEEXCEEDS)}`,
status: "warning"
});
}
if (file) {
let isValid = true;
const accept = state.isEdm ? state.accept : props.accept;
const types = constants.FILE_TYPE[state.triggerClickType.toUpperCase()];
const acceptArray = accept ? accept.split(",") : [];
let fileType = "";
if (isMergeUpload) {
const fileRow = [];
fileType = file.raw.flatMap((f) => {
const type = getFileType({ file: f });
if (accept) {
const isExist = isAcceptType(acceptArray, f, constants, type);
isExist ? fileRow.push(f) : remove({ api, file: f, autoRemove });
} else {
fileRow.push(f);
}
return type;
});
if (!fileRow.length) {
isValid = false;
}
file.raw = fileRow;
} else {
fileType = getFileType({ file });
if (accept) {
const isExist = isAcceptType(acceptArray, file, constants, fileType);
!isExist && (isValid = false);
}
}
if (state.triggerClickType && types) {
const isExist = types.split("/").includes(fileType);
!isExist && (isValid = false);
}
if (!isValid) {
remove({ api, file, autoRemove });
return Modal.message({
message: fileType ? t(constants.EDM.notSupport, { format: fileType }) : t(constants.EDM.NOT_SUPPORT_NO_SUFFIX),
status: "warning"
});
}
}
let flag = typeof props.beforeUpload === "function";
isNonFuncPropBeforeUpload({ flag, doUpload, file });
if (flag) {
const { rawFile = file.raw, before = props.beforeUpload(rawFile) } = {};
if (before && before.then) {
onBeforeIsPromise({ before, rawFile, file, doUpload, autoRemove, api });
} else if (before !== false) {
doUpload(file);
} else {
if (autoRemove) {
if (Array.isArray(rawFile)) {
rawFile.forEach((raw) => api.handleRemove(null, raw));
} else {
api.handleRemove(null, rawFile);
}
}
}
}
};
const startUpload = ({
state,
constants,
vm,
Modal,
api,
t
}) => (file, isList) => {
if (state.isHwh5) {
vm.$refs[constants.UPLOAD_INNER].$refs[constants.UPLOAD_INNER_TEMPLATE].upload(file.raw);
return;
}
if (file.size > state.docSize && file.size > state.chunkSize) {
file.isLargeFile = true;
isList && state.uploadFiles.forEach((f) => {
if (f.cacheSign === file.cacheSign) {
f.percentage = 0;
}
});
api.largeDocumentUpload(file);
Modal.message({
message: `${file.name}${t(constants.EDM.LARGEFILEKEY)}`,
status: "warning"
});
} else {
vm.$refs[constants.UPLOAD_INNER].$refs[constants.UPLOAD_INNER_TEMPLATE].upload(file.raw);
}
};
const calcFileForMobile = (rawFile, file) => {
const fileName = rawFile.name.lastIndexOf(".");
const fileNameLen = rawFile.name.length;
file.fileType = rawFile.name.substring(fileName + 1, fileNameLen);
const size = rawFile.size / 1024;
if (size < 1024) {
file.size = Math.round(size * 10 ** 1) / 10 ** 1 + "KB";
} else {
const fileSize = size / 1024;
file.size = Math.round(fileSize * 10 ** 1) / 10 ** 1 + "MB";
}
};
const properFileSize = ({
props,
state,
api,
constants,
Modal,
t
}) => (file) => {
if ([void 0, null].includes(file.size))
return true;
let maxSize = 0;
if (Array.isArray(props.fileSize) && props.fileSize[1]) {
maxSize = state.isEdm ? Math.min(state.singleMaxSize, props.fileSize[1] / 1024) : Math.max(props.fileSize[0] / 1024, props.fileSize[1] / 1024);
} else {
maxSize = state.isEdm ? Math.min(state.singleMaxSize) : props.fileSize / 1024;
}
if (state.isEdm || Array.isArray(props.fileSize) && props.fileSize[1]) {
if (!isNaN(Number(maxSize)) && file.size > maxSize * 1024 * 1024) {
Modal.message({
message: t(constants.EDM.EXCEED, {
fileName: file.name,
maxSize: api.formatFileSize(Number(maxSize * 1024 * 1024))
}),
status: "warning"
});
return false;
}
}
if (file.size <= 0) {
Modal.message({
message: t(constants.EDM.FILEEMPTY),
status: "warning"
});
return false;
}
const userMin = props.fileSize && (props.fileSize[0] || props.fileSize) || 0;
if (file.size <= userMin * 1024) {
Modal.message({
message: `${t(constants.EDM.SIZE, { fileName: file.name, minSize: api.formatFileSize(Number(userMin), "KB"), sizeUnit: "" })}`,
status: "warning"
});
return false;
}
return true;
};
const addFileToList = ({
api,
constants,
emit,
props,
state,
mode
}) => (rawFile, updateId, reUpload) => {
!reUpload && (rawFile.uid = Date.now() + state.tempIndex++);
let file = { status: constants.FILE_STATUS.READY, name: rawFile.name, size: rawFile.size };
Object.assign(file, { percentage: 0, uid: rawFile.uid, raw: rawFile, response: {} });
file.type = api.getFileSourceType({ file });
if (state.isEdm) {
let fileBase = { serverName: "", docRelativePath: "", docId: "", docVersion: "", cacheSign: rawFile.uid };
file = Object.assign(file, fileBase);
props.edm.upload.isFolder && rawFile.webkitRelativePath && (file.path = rawFile.webkitRelativePath.match(/.*\//g)[0]);
}
state.cacheDocuments[file.uid] = file;
mode === "mobile" && calcFileForMobile(rawFile, file);
if ([
constants.LIST_TYPE.PICTURE_CARD,
constants.LIST_TYPE.PICTURE,
constants.LIST_TYPE.PICTURE_SINGLE,
constants.LIST_TYPE.DRAG_SINGLE
].includes(props.listType)) {
try {
if (state.isHwh5) {
file.url = rawFile.filePath;
} else {
file.url = URL.createObjectURL(rawFile);
}
} catch (err) {
return;
}
}
if (state.isEdm && state.isSuccess) {
const proper = api.properFileSize(file);
if (!proper) {
return;
}
state.updateId = updateId || props.edm.updateId || "";
if (reUpload) {
const index = state.uploadFiles.findIndex((item) => item.uid === file.uid);
state.uploadFiles.splice(index, 1);
} else if (state.updateId) {
const index = state.uploadFiles.findIndex((item) => item.docId === updateId);
state.uploadFiles.splice(index, 1, file);
emit("change", file, state.uploadFiles);
return;
}
}
if (!state.isEdm) {
const proper = api.properFileSize(file);
if (!proper) {
return;
}
}
state.uploadFiles.push(file);
state.currentUploadingFileUids.push(file.uid);
emit("change", file, state.uploadFiles);
};
const getFileHash = ({
emit,
Modal,
constants,
t,
state
}) => ({ file, showTips }) => {
if (showTips) {
Modal.message({
message: `${t(constants.EDM.CALCHASH)}`,
status: "warning"
});
}
return new Promise((resolve) => {
const reader = new FileReader();
reader.readAsArrayBuffer(file.raw);
reader.onload = async (e) => {
if (file.status === constants.FILE_STATUS.FAIL)
return;
const hash = sha256(e.target && e.target.result);
file.hash = file.raw.hash = hash;
resolve(hash);
emit("hash-progress", 100);
};
reader.onerror = (err) => {
file.status = constants.FILE_STATUS.FAIL;
emit("error", err, file, state.uploadFiles);
};
});
};
const handleHwh5Files = (files, hwh5) => {
const fileMap = hwh5 && hwh5.fileMap;
return files.map((file) => {
if (file instanceof File)
return file;
let url;
let f = {};
if (isObject(file)) {
url = file.url;
f = file;
} else {
url = file;
}
const [name, index] = url.match(/[^/]*$/);
const [type] = url.match(/\.[^.]*$/);
const filePath = url.substring(0, index);
const updateFile2 = __spreadProps(__spreadValues({}, f), { type, name, filePath, webkitRelativePath: filePath });
return typeof fileMap === "function" ? fileMap(updateFile2) : updateFile2;
});
};
const handleStart = ({
api,
constants,
props,
state,
vm
}) => (rawFiles, updateId, reUpload = false) => {
if (state.isHwh5) {
rawFiles = handleHwh5Files(rawFiles, props.hwh5);
}
state.currentUploadingFileUids = [];
rawFiles.forEach((rawFile) => api.addFileToList(rawFile, updateId, reUpload));
const { UPLOADING, READY } = constants.FILE_STATUS;
state.uploadingFiles = state.uploadFiles.filter((file) => [UPLOADING, READY].includes(file.status));
if (state.isEdm && state.isSuccess) {
rawFiles.forEach((rawFile) => {
const file = api.getFile(rawFile);
if (!file)
return;
api.beforeUpload(file, true, (file2) => {
typeof props.edm.upload.loading === "function" && props.edm.upload.loading(file2);
new Promise((resolve) => {
if (state.isHwh5)
return resolve();
let isLargeFileHash = false;
if (props.edm.isCheckCode !== true)
return resolve();
if (file2.size > state.docSize && file2.size > state.chunkSize) {
if (!state.isEntireCheckCode) {
return resolve();
} else {
isLargeFileHash = true;
}
}
api.getFileHash({ file: file2, showTips: isLargeFileHash }).then((hash) => resolve(hash));
}).then(() => {
if (props.autoUpload) {
const tokenParams = { token: props.edm.upload.token, file: file2, type: "upload" };
api.getToken(tokenParams).then((data) => {
if (data) {
file2.status = constants.FILE_STATUS.UPLOADING;
api.startUpload(file2, true);
}
});
}
});
});
});
}
if (!state.isEdm && props.autoUpload) {
if (props.multiple && props.mergeService) {
const handler = (file) => vm.$refs[constants.UPLOAD_INNER].$refs[constants.UPLOAD_INNER_TEMPLATE].upload(file.raw);
rawFiles.length && api.beforeUpload({ raw: rawFiles }, true, handler, true);
} else {
rawFiles.forEach((rawFile) => {
const file = api.getFile(rawFile);
if (!file)
return;
const handler = (file2) => vm.$refs[constants.UPLOAD_INNER].$refs[constants.UPLOAD_INNER_TEMPLATE].upload(file2.raw);
api.beforeUpload(file, true, handler);
});
}
}
};
const calcUploadingFilesInfo = ({ state, constants }) => () => {
let percentage;
if (state.isHwh5) {
const totalPercentage = state.uploadingFiles.reduce((total, file) => {
const curPercentage = file.status !== constants.FILE_STATUS.FAIL ? file.percentage / 100 : 0;
return total + curPercentage;
}, 0);
percentage = Math.floor(totalPercentage / state.uploadingFiles.length * 100);
} else {
const totalLoadedSize = state.uploadingFiles.reduce((loadedSize, file) => {
const loaded = file.status !== constants.FILE_STATUS.FAIL ? file.size * file.percentage / 100 : 0;
return loadedSize + loaded;
}, 0);
percentage = Math.floor(totalLoadedSize / state.uploadingSize * 100);
}
percentage = Math.min(percentage, 100);
const uploadedFiles = state.uploadingFiles.filter((file) => file.percentage === 100);
return {
percentage,
uploadList: state.uploadingFiles,
uploadedCount: uploadedFiles.length
};
};
const handleProgress = ({ api, constants, emit, state }) => (event, rawFile) => {
if (Array.isArray(rawFile)) {
state.uploadFiles.forEach((file) => {
if (rawFile.some((raw) => file.uid === raw.uid)) {
file.status = constants.FILE_STATUS.UPLOADING;
if (event.lengthComputable) {
file.percentage = Math.floor(event.loaded * 100 / event.total) || 0;
}
emit("progress", file, state.uploadFiles, api.calcUploadingFilesInfo());
}
});
} else {
const file = api.getFile(rawFile);
if (file) {
file.status = constants.FILE_STATUS.UPLOADING;
if (state.isHwh5) {
const { progress } = JSON.parse(event);
file.percentage = progress;
if (file.percentage >= 100) {
file.isFinished = true;
}
emit("progress", file, state.uploadFiles, api.calcUploadingFilesInfo());
} else {
if (event.lengthComputable && !file.isLargeFile) {
file.percentage = Math.floor(event.loaded * 100 / event.total) || 0;
if (file.percentage >= 100) {
file.isFinished = true;
}
emit("progress", file, state.uploadFiles, api.calcUploadingFilesInfo());
}
}
}
}
};
const handleSuccess = ({
api,
constants,
emit,
state,
props,
Modal,
t
}) => (res, rawFile) => {
var _a;
const currentUploadFiles = state.uploadFiles.filter((file) => state.currentUploadingFileUids.includes(file.uid));
if (Array.isArray(rawFile)) {
state.uploadFiles.forEach((file) => {
if (rawFile.some((raw) => file.uid === raw.uid)) {
file.status = constants.FILE_STATUS.SUCESS;
file.percentage = 100;
file.response = res;
emit("success", res, file, currentUploadFiles);
emit("change", file, state.uploadFiles);
delete file.cancelToken;
}
});
} else {
const file = api.getFile(rawFile);
const status = (_a = res == null ? void 0 : res.data) == null ? void 0 : _a.status;
const { STATUS_SPECIAL_CHARACTERS, NOT_SUPPORT_SPECIAL_CHARACTERS } = constants.EDM;
file && delete file.cancelToken;
if (props.edm.upload && file && res.data && status !== 200) {
if (status === STATUS_SPECIAL_CHARACTERS) {
Modal.message({
message: `${t(NOT_SUPPORT_SPECIAL_CHARACTERS)}`,
status: "warning"
});
}
file.status = constants.FILE_STATUS.FAIL;
emit("error", res, file, state.uploadFiles);
return;
}
if (file) {
file.status = constants.FILE_STATUS.SUCESS;
file.percentage = 100;
if (!file.isFinished) {
emit("progress", file, state.uploadFiles, api.calcUploadingFilesInfo());
}
file.isLargeFile && delete res.config;
file.response = res;
if (state.isEdm) {
const result = state.isHwh5 ? res : res.data.result;
if (!result)
return;
file.serverName = result.serverName;
file.docRelativePath = result.docRelativePath;
file.docId = result.docId;
file.docVersion = result.version;
file.docSize = result.docSize;
file.isLargeFile && delete file.raw;
Object.assign(file, result);
}
emit("success", res, file, currentUploadFiles);
emit("change", file, state.uploadFiles);
}
}
api.clearUploadingFiles();
};
const handleError = ({
api,
constants,
emit,
state,
props
}) => (err, rawFile) => {
const file = api.getFile(rawFile);
if (!file)
return;
file.status = constants.FILE_STATUS.FAIL;
file.percentage = 100;
if (!state.isEdm && !props.reUploadable) {
state.uploadFiles.splice(state.uploadFiles.indexOf(file), 1);
}
api.clearUploadingFiles();
emit("error", err, file, state.uploadFiles);
emit("change", file, state.uploadFiles);
};
const handleRemove = ({
api,
emit,
props,
state,
constants
}) => (file, raw) => {
if (raw) {
file = api.getFile(raw);
}
let doRemove = () => {
api.abort(file);
let fileList = state.uploadFiles;
fileList.splice(fileList.indexOf(file), 1);
emit("remove", __spreadProps(__spreadValues({}, file), { status: constants.FILE_STATUS.FAIL }), fileList);
};
if (!props.beforeRemove) {
doRemove();
} else if (typeof props.beforeRemove === "function") {
const before = props.beforeRemove(file, state.uploadFiles);
if (before && before.then) {
before.then(
() => {
doRemove();
},
() => void 0
);
} else if (before !== false) {
doRemove();
}
}
};
const handleReUpload = ({ vm, constants }) => (file) => {
const { READY } = constants.FILE_STATUS;
file.status = READY;
file.percentage = 0;
vm.$refs[constants.UPLOAD_INNER].$refs[constants.UPLOAD_INNER_TEMPLATE].upload(file.raw);
};
const handleReUploadTotal = (api) => (files) => {
files.forEach((file) => {
if (file.status === "fail") {
api.handleReUpload(file);
}
});
};
const clearUploadingFiles = ({ constants, state }) => () => {
const { SUCESS, FAIL } = constants.FILE_STATUS;
const isUploadComplete = state.uploadingFiles.every((file) => [SUCESS, FAIL].includes(file.status));
if (isUploadComplete) {
state.uploadingFiles = [];
}
};
const getFile = (state) => (rawFile) => {
let fileList = state.uploadFiles;
let target;
fileList.every((item) => {
target = rawFile.uid === item.uid ? item : null;
return !target;
});
return target;
};
const abort = ({ constants, vm, state }) => (file) => {
const { READY, UPLOADING, FAIL } = constants.FILE_STATUS;
if (file) {
state.uploadingFiles.forEach((f) => {
const uid = file.uid || file;
if (f.uid === uid && [READY, UPLOADING].includes(f.status)) {
f.status = FAIL;
}
});
} else {
state.uploadingFiles.forEach((f) => {
if ([READY, UPLOADING].includes(f.status)) {
f.status = FAIL;
}
});
}
vm.$refs[constants.UPLOAD_INNER].$refs[constants.UPLOAD_INNER_TEMPLATE].abort(file);
};
const abortDownload = ({ state }) => (file, batch = false) => {
const cancel = (docId) => {
if (!docId)
return;
const cancels = state.downloadCancelToken[docId];
cancels && cancels.forEach((cancel2) => cancel2());
delete state.downloadCancelToken[docId];
const clearDataFn = state.downloadCancelData[docId];
clearDataFn && clearDataFn(docId);
};
if (Array.isArray(file)) {
if (batch) {
cancel(
file.map((f) => f.docId || f).sort().join(",")
);
} else {
file.forEach((f) => f && cancel(file.docId || file));
}
} else if (file) {
cancel(file.docId || file);
} else {
Object.keys(state.downloadCancelToken).forEach((docId) => {
cancel(docId);
});
}
};
const clearFiles = (state) => () => {
state.uploadFiles = [];
};
const submit = ({
api,
constants,
vm,
state,
props
}) => () => {
const files = state.uploadFiles.filter((file) => file.status === constants.FILE_STATUS.READY);
if (state.isEdm && state.isSuccess) {
files.forEach((file) => {
api.getToken({
token: props.edm.upload.token,
file,
type: "upload"
}).then((data) => {
if (data) {
api.beforeUpload(file, false, (file2) => {
api.startUpload(file2);
});
}
});
});
} else {
if (props.multiple && props.mergeService) {
const rawFiles = files.map((file) => file.raw);
rawFiles.length && api.beforeUpload({ raw: rawFiles }, false, (file) => {
vm.$refs[constants.UPLOAD_INNER].$refs[constants.UPLOAD_INNER_TEMPLATE].upload(file.raw);
});
} else {
files.forEach((file) => {
api.beforeUpload(file, false, (file2) => {
vm.$refs[constants.UPLOAD_INNER].$refs[constants.UPLOAD_INNER_TEMPLATE].upload(file2.raw);
});
});
}
}
};
const handleClick = ({ constants, vm }) => () => vm.$refs[constants.UPLOAD_INNER].$refs[constants.UPLOAD_INNER_TEMPLATE].handleClick();
const getFileUploadUrl = (service) => () => service.getFileUploadUrl();
const updateUrl = ({ api, props, state }) => () => {
if (props.action) {
state.url = props.action;
} else {
api.getFileUploadUrl().then((url) => state.url = url);
}
};
const handleFileClick = ({ props, emit }) => (file) => {
emit("preview", file);
props.preview && props.preview(file);
};
const getTranslateFile = ({
api,
isChunk,
isLessThan17G,
file,
state
}) => (data, type, index) => {
if (isChunk) {
if (index === 0) {
state.downloadCancelData[file.docId] = api.setWriterFile({ data, index, isLessThan17G, file });
}
} else {
const content = data.headers["content-disposition"];
const name = content ? content.match(/fileName.?=(.*)/)[1] || content.match(/fileName=(.*)/)[1] : "";
let type2 = "application/zip";
if (!name.includes(".")) {
type2 = data.headers["content-type"];
} else if (type2 !== "zip") {
type2 = "application / x - xls";
}
const blob = new Blob([data.data], { type: type2 });
aLinkDownload({ blob, name });
}
};
const aLinkDownload = ({ blob, name }) => {
if (window && window.navigator.msSaveOrOpenBlob) {
window.navigator.msSaveOrOpenBlob(blob, decodeURIComponent(name));
return;
}
const url = window.URL || window.webkitURL || window.moxURL;
const downloadHref = xss.filterUrl(url.createObjectURL(blob));
let downloadLink = document.createElement("a");
downloadLink.href = downloadHref;
downloadLink.download = decodeURIComponent(name);
downloadLink.click();
url.revokeObjectURL && url.revokeObjectURL(downloadHref);
};
const getHandleSuccess = ({
state,
downloadOps,
file,
translateFile,
isChunk,
isLessThan17G
}) => (data, type, index) => {
if (isChunk) {
const res = isLessThan17G ? data.data : new Uint8Array(data.data);
let downloadChunkFile = state.downloadChunkFile[file.docId];
if (!downloadChunkFile) {
downloadChunkFile = {};
}
downloadChunkFile[index] = res;
translateFile(data, type, index);
} else {
typeof downloadOps.loading === "function" && downloadOps.loading(file);
translateFile(data, type);
}
return true;
};
const getCalcProgress = () => (evt) => {
let total;
if (evt.target && evt.target.getResponseHeader) {
total = Number(evt.target.getResponseHeader("Content-Size"));
} else {
total = Number(evt.total);
}
total = Math.max(total, evt.loaded);
let progress = Math.ceil(evt.loaded / total * 100) || 0;
progress = Math.max(progress, 0);
progress = Math.min(progress, 100);
return progress;
};
const modifyServiceUrlSingle = ({ state, props, constants }) => ({ file, serviceUrl, range }) => {
if (typeof file === "object") {
const downloadOps = props.edm.download || {};
const paramsWhitelist = Array.isArray(downloadOps.paramsWhitelist) ? downloadOps.paramsWhitelist : [];
const downloadParamsWhitelist = state.downloadParamsWhitelist.concat(paramsWhitelist);
let tempFile = {};
downloadParamsWhitelist.forEach((key) => tempFile[key] = file[key]);
tempFile = Object.assign(tempFile, range);
delete tempFile.docId;
delete tempFile.docVersion;
delete tempFile["x-download-sign"];
for (let key in tempFile) {
const value = tempFile[key];
const dataType = typeof value;
if (!~["undefined", "object", "function"].indexOf(dataType)) {
serviceUrl += `&${key}=${value}`;
}
}
file.status = constants.FILE_STATUS.DOWNLOADING;
file.percentage = 0;
}
return serviceUrl;
};
const getKiaScanTip = ({ Modal, constants, t }) => ({ data }) => {
if (data.status === constants.EDM.KIASTATUS) {
return Modal.message({
message: `${t(constants.EDM.KIASCANTIP)}`,
status: "warning"
});
}
};
const validateDownloadStatus = ({ state, Modal }) => ({
downloadOps,
file,
isLessThan17G,
data
}) => {
const errorHandle = ({ state: state2, file: file2, errRes, Modal: Modal2, downloadOps: downloadOps2 }) => {
if (state2.currentDownloadFiles && state2.currentDownloadFiles.docId === file2.docId)
return;
if (errRes && errRes.message) {
Modal2.message({
message: errRes.message,
status: "warning"
});
}
state2.currentDownloadFiles = file2;
if (typeof downloadOps2.fail === "function") {
downloadOps2.fail(errRes, file2);
}
};
if (data.data && data.data.type && data.data.type.includes("application/json")) {
const reader = new FileReader();
reader.onload = (e) => {
const errRes = JSON.parse(e.target.result);
errorHandle({ state, file, errRes, Modal, downloadOps });
};
reader.readAsText(data.data);
return true;
}
if (!isLessThan17G && data.headers["content-type"].includes("application/json")) {
const errRes = JSON.parse(String.fromCharCode.apply(null, new Uint8Array(data.data)));
errorHandle({ state, file, errRes, Modal, downloadOps });
return true;
}
};
const createDownloadCancelToken = ({ state, service }) => (file) => {
let docId;
if (Array.isArray(file)) {
docId = file.map((f) => f.docId || f).sort().join(",");
} else {
docId = file.docId || file;
}
if (!state.downloadCancelToken[docId]) {
state.downloadCancelToken[docId] = [];
}
const { cancel, token } = service.cancelToken();
state.downloadCancelToken[docId].push(cancel);
return token;
};
const downloadFileSingle = ({
service,
constants,
props,
state,
api,
emit
}) => (args) => {
let {
file,
batchIndex,
isChunk,
calcProgress,
handleSuccess: handleSuccess2,
range = {},
isBatch,
isLessThan17G,
url: fileUrl
} = args;
let promise;
if (fileUrl) {
promise = Promise.resolve(fileUrl);
} else {
promise = service.getSingleDownloadUrl().then((url) => {
let serviceUrl = url.replace(/{docId}/, file.docId || file) + `${~url.indexOf("?") ? "&" : "?"}x-download-sign=true&docVersion=${file.docVersion || ""}${file.decryptKey ? "&decryptKey=" + file.decryptKey : ""}`;
serviceUrl = api.modifyServiceUrlSingle({ file, serviceUrl, range });
return serviceUrl;
});
}
promise.then((url) => {
url = xss.filterUrl(url);
let params = {
withCredentials: props.withCredentials,
headers: Object.assign(props.headers, state.headers),
responseType: isChunk && !isLessThan17G ? "arraybuffer" : "blob",
hideErr: true,
cancelToken: api.createDownloadCancelToken(file),
onDownloadProgress(evt) {
let progress = calcProgress(evt, isChunk);
if (progress !== 100) {
!isChunk && emit("download", progress, evt, {}, file);
}
if (typeof file === "object") {
file.percentage = progress;
}
}
};
service.get(url, params).then((data) => {
if (api.getKiaScanTip({ data }))
return;
if (api.validateDownloadStatus({ downloadOps: props.edm.download || {}, file, isLessThan17G, data }))
return;
handleSuccess2(data, "", range.index);
const { checkcode, "content-size": fileSize } = data.headers;
!isChunk && emit("download", 100, "", { checkcode, fileSize }, file);
if (typeof file === "object") {
file.percentage = 100;
setTimeout(() => file.status = constants.FILE_STATUS.SUCESS, 1e3);
}
api.afterDownload({ batchIndex, data, file });
}).catch((data) => {
if (data.response && state.errorStatusCodes.includes(data.response.status)) {
const downloadOps = props.edm.download || {};
const tokenParams = { token: downloadOps.token, file, type: "download" };
api.getToken(tokenParams).then((data2) => {
api.afterDownload({ batchIndex, data: data2, file, range, isChunk, isBatch, isLessThan17G });
});
}
});
});
};
const downloadFileBatch = ({
api,
service,
props,
state,
emit
}) => (args) => {
let { downloadOps, file, calcProgress, handleSuccess: handleSuccess2, range = {}, isLessThan17G } = args;
let tokenParams = { token: downloadOps.packageToken, file, type: "download" };
const { asyncPackages } = downloadOps || {};
api.getToken(tokenParams).then((data) => {
if (!data) {
return;
}
const params = { downloadTOs: [], attachdownloadTOs: [], isZip: "true", transformType: "sync", type: "package" };
file.forEach((item) => {
if (item.wmType) {
params.attachdownloadTOs.push(item);
} else {
params.downloadTOs.push(item);
}
});
if (asyncPackages) {
api.downloadAsyncPackage(params);
return;
}
service.getPackageDownloadUrl().then((url) => {
url = xss.filterUrl(url + `${~url.indexOf("?") ? "&" : "?"}x-download-sign=true`);
service.post(
url,
__spreadValues(__spreadValues({}, params), range),
{
withCredentials: props.withCredentials,
headers: Object.assign(props.headers, state.headers),
responseType: "blob",
cancelToken: api.createDownloadCancelToken(file),
onDownloadProgress(evt) {
let progress = calcProgress(evt);
if (progress !== 100) {
emit("download", progress, evt, {}, file);
}
}
}
).then((data2) => {
if (api.getKiaScanTip({ data: data2 }))
return;
if (api.validateDownloadStatus({ downloadOps: props.edm.download || {}, file, isLessThan17G, data: data2 }))
return;
const { "content-size": fileSize, checkcode } = data2.headers;
emit("download", 100, "", { fileSize, checkcode }, file);
handleSuccess2(data2, "zip");
});
});
});
};
const downloadAsyncPackage = ({ state, props, service, api, constants }) => (params) => {
return service.getAsyncPackageDownload().then((url) => {
service.request({
method: "post",
url: xss.filterUrl(url),
withCredentials: props.withCredentials,
headers: Object.assign(props.headers, state.headers),
data: params
}).then((res) => {
if (res && res.data && res.data.status === 200) {
const files = (res.data.result || []).map(({ downloadLink, fileSize }) => {
return { url: downloadLink, fileSize };
});
const { SIZE_17G } = constants.EDM;
const isBatch = false;
const isChunk = false;
const downloadOps = props.edm.download || {};
files.forEach((file) => {
const isLessThan17G = !file.fileSize || file.fileSize < SIZE_17G * 1024;
const translateFile = getTranslateFile({ api, isChunk, isLessThan17G, file, state });
const handleSuccess2 = getHandleSuccess({
downloadOps,
file,
translateFile,
isChunk,
state,
isLessThan17G
});
const calcProgress = getCalcProgress();
const args = {
url: file.url,
calcProgress,
handleSuccess: handleSuccess2,
downloadOps,
file,
isLessThan17G,
isFinished: false,
range: {},
batchIndex: 0,
isBatch,
isChunk
};
api.downloadFileSingle(args);
});
}
});
});
};
const downloadFileSingleHwh5 = ({ state, props, emit, constants }) => ({ file }) => {
const { HWH5, appId } = props.hwh5;
const { downloadToEDM } = HWH5();
const options = {
edmAuth: {
edmToken: state.headers[constants.EDM.EDMTOKEN],
appId
},
docId: file.docId || file,
docVersion: file.docVersion,
filePath: file.filePath,
progress: 1,
onProgress: (event) => {
const { progress } = JSON.parse(event);
if (progress * 1 !== 100) {
emit("download", progress, event, {}, file);
}
}
};
downloadToEDM(options).then((data) => {
emit("download", 100, "", { data }, file);
});
};
const downloadFile = ({ api, state }) => (file) => {
state.currentDownloadFiles = "";
if (!state.isEdm) {
api.ordinaryDownload(file);
} else {
const isBatch = Array.isArray(file);
if (state.isHwh5) {
let files = file;
if (!isBatch) {
files = [file];
}
files.forEach((f) => api.downloadFileSingleInner({ file: f, isBatch: false }));
return;
}
if (isBatch) {
api.downloadFileInner({ file, isBatch });
} else {
api.downloadFileSingleInner({ file, isBatch });
}
}
};
const downloadFileSingleInner = ({ props, state, api, constants }) => ({ file, isBatch }) => {
const { SIZE_17G } = constants.EDM;
const downloadOps = props.edm.download || {};
let tokenParams = { token: downloadOps.token, file, type: "download" };
api.getToken(tokenParams).then((data) => {
if (!data)
return;
if (state.isHwh5) {
api.downloadFileSingleHwh5({ file });
return;
}
const param = { docId: file.docId, docVersion: file.docVersion };
const promise = state.hasFileInfoInterface ? api.getDownloadFileInfo(param) : Promise.resolve();
promise.then((fileInfo) => {
const { fileSize } = fileInfo || {};
const isLargeFile = fileSize > state.docSize && fileSize > state.chunkSize;
if (fileSize && isLargeFile) {
api.largeDocumentDownload({ file: fileInfo, isBatch, isLessThan17G: fileSize < SIZE_17G * 1024 });
} else {
api.downloadFileInner({ file, isBatch });
}
});
});
};
const getDownloadFileInfo = ({ api, state, props, service }) => ({ docId, docVersion }) => {
return service.getDocumentInfoUrl().then((url) => {
return new Promise((resolve, reject) => {
service.request({
method: "post",
url: xss.filterUrl(url),
withCredentials: props.withCredentials,
headers: Object.assign(props.headers, state.headers),
cancelToken: api.createDownloadCancelToken({ docId }),
data: { docInfoVO: { ids: [docId], docType: "", docVersion: docVersion || "" } }
}).then((res) => {
const { data } = res || {};
if (data && data.status === 200) {
const fileInfo = data.result.outDocQueryList && data.result.outDocQueryList[0].verInfo[0].docInfo[0];
resolve(fileInfo);
} else {
reject(res);
}
});
});
});
};
const largeDocumentDownload = ({ api, state }) => ({ file, isBatch, isLessThan17G }) => {
const { fileSize, docId, docName } = file;
const chunkSize = Math.ceil(fileSize / state.chunkSize);
state.downloadChunkFile[docId] = { chunkNum: chunkSize, fileSize, docName };
file.chunkSize = chunkSize;
const batches = api.sliceDownloadChunk(file);
api.batchSegmentDownload({ batchIndex: 0, batches, docId: file.docId, isBatch, isLessThan17G });
};
const sliceDownloadChunk = ({ state }) => (file) => {
const chunkSize = file.chunkSize;
const downloadChunkArray = [[]];
state.downloadBatchQueue[file.docId + "-0"] = 0;
state.downloadBatchQueueListen[file.docId + "-0"] = 0;
let startRange = 0;
let endRange = -1;
for (let i = 0; i < chunkSize; i++) {
startRange = endRange + 1;
endRange = Math.min(file.fileSize, startRange + state.chunkSize - 1);
if (endRange < startRange) {
return downloadChunkArray;
}
const lastIndex = downloadChunkArray.length - 1;
if (downloadChunkArray[lastIndex].length < state.downloadChunkLimit) {
downloadChunkArray[lastIndex].push({ startRange, endRange, index: i });
} else {
state.downloadBatchQueue[file.docId + "-" + downloadChunkArray.length] = 0;
state.downloadBatchQueueListen[file.docId + "-" + downloadChunkArray.length] = 0;
downloadChunkArray.push([]);
downloadChunkArray[lastIndex + 1].push({
startRange,
endRange,
index: i
});
}
}
return downloadChunkArray;
};
const batchSegmentDownload = ({ state, api }) => ({ batchIndex, batches, docId, isBatch, isLessThan17G }) => {
if (batchIndex < batches.length) {
const batch = batches[batchIndex];
const key = docId + "-" + batchIndex;
Object.defineProperty(state.downloadBatchQueue, key, {
get() {
return state.downloadBatchQueueListen[key];
},
set(value) {
state.downloadBatchQueueListen[key] = value;
if (value >= batch.length) {
api.batchSegmentDownload({
docId,
batchIndex: ++batchIndex,
batches,
isBatch,
isLessThan17G
});
}
}
});
let index = 0;
while (batch.length - index > 0) {
api.downloadFileInner({
batchIndex,
range: batch[index++],
file: { docId },
isBatch,
isChunk: true,
isLessThan17G
});
}
}
};
const downloadFileInner = ({ api, props, state }) => ({ batchIndex, file, range, isBatch, isChunk, isLessThan17G }) => {
const downloadOps = props.edm.download || {};
const translateFile = getTranslateFile({ api, isChunk, isLessThan17G, file, state });
const handleSuccess2 = getHandleSuccess({ downloadOps, file, translateFile, isChunk, state, isLessThan17G });
const calcProgress = getCalcProgress();
let { isFinished = false } = {};
if (!isBatch) {
const args = {
calcProgress,
isFinished,
handleSuccess: handleSuccess2,
range,
batchIndex,
isBatch,
downloadOps,
file,
isChunk,
isLessThan17G
};
api.downloadFileSingle(args);
return;
}
const params = { downloadOps, file, calcProgress, handleSuccess: handleSuccess2, range, isLessThan17G };
isBatch && api.downloadFileBatch(params);
};
const afterDownload = ({ api, state }) => ({ batchIndex, range, data, file, isBatch, isChunk, isLessThan17G }) => {
if (data.status === 200) {
const key = file.docId + "-" + batchIndex;
if (state.downloadBatchQueue[key] !== void 0) {
state.downloadBatchQueue[key] += 1;
} else {
state.downloadBatchQueue[key] = 0;
}
} else {
const key = file.docId + "-" + range.index;
const countDownloadReplay = state.downloadReplayAtoms[key];
if (countDownloadReplay && countDownloadReplay >= 2) {
const msgArray = [
"The number of retry times exceeds the threshold! [docId:",
file.docId,
", chunk:",
range.index,
"]"
];
logger.warn(msgArray.join(""));
delete state.downloadReplayAtoms[key];
} else {
if (state.downloadReplayAtoms[key] === void 0) {
state.downloadReplayAtoms[key] = 0;
}
const msgArray = ["replay ", countDownloadReplay, "! [docId:", file.docId, ", chunk:", range.index, "]"];
logger.warn(msgArray.join(""));
state.downloadReplayAtoms[key] += 1;
api.downloadFileInner({ batchIndex, range, file, isBatch, isChunk, isLessThan17G });
}
}
};
const setWriterFile = ({
state,
emit,
Streamsaver
}) => ({ data, index, isLessThan17G, file }) => {
let { fileStream, writer, fileData = [], downloaded = 0 } = {};
const { checkcode } = data.headers;
const content = data.headers["content-disposition"];
let { chunkNum, fileSize, docName } = state.downloadChunkFile[file.docId];
if (content) {
docName = content.match(/fileName.?=(.*)/)[1] || content.match(/fileName=(.*)/)[1] || docName;
}
if (!isLessThan17G) {
fileStream = Streamsaver.createWriteStream(docName, { size: data.byteLength });
writer = fileStream.getWriter();
}
const writerStreamSaver = () => {
const downloadChunkFile = state.downloadChunkFile[file.docId] || {};
let chunk = downloadChunkFile[index];
if (chunk) {
if (!isLessThan17G) {
writer.write(chunk).then(() => {
downloaded += chunk.byteLength;
downloadChunkFile[index] = null;
delete downloadChunkFile[index];
if (index + 1 >= chunkNum) {
delete state.downloadChunkFile[file.docId];
emit("download", 100, "", { fileSize, checkcode }, file);
writer.close();
} else {
const progress = Math.ceil(downloaded / fileSize * 100) || 0;
progress !== 100 && emit("download", progress, "", { fileSize, checkcode }, file);
index++;
writerStreamSaver();
}
});
} else {
fileData.push(chunk);
downloaded += chunk.size;
state.downloadChunkFile[file.docId][index] = null;
delete state.downloadChunkFile[file.docId][index];
if (index + 1 >= chunkNum) {
delete state.downloadChunkFile[file.docId];
aLinkDownload({ blob: new Blob(fileData), name: docName });
emit("download", 100, "", { fileSize, checkcode }, file);
} else {
const progress = Math.ceil(downloaded / fileSize * 100) || 0;
progress !== 100 && emit("download", progress, "", { fileSize, checkcode }, file);
index++;
writerStreamSaver();
}
}
} else {
setTimeout(() => writerStreamSaver(), 1e3);
}
};
writerStreamSaver();
return (docId) => {
const downloadChunkFile = state.downloadChunkFile[docId];
Object.keys(downloadChunkFile).forEach((k) => downloadChunkFile[k] = null);
delete state.downloadChunkFile[docId];
if (isLessThan17G) {
fileData = [];
} else {
writer && writer.close();
}
};
};
const getFormData = ({ constants, props, state }) => ({ formData, file, type }) => {
if (state.isEdm && props.edm.upload) {
const params = Object.assign({}, props.data || {}, props.edm.upload.params || {});
for (let key in params) {
formData.set(key, params[key] || "");
}
}
if (props.edm.isCheckCode === true) {
formData.append(constants.EDM.ISCHECKCODE, "Y");
file.hash && formData.append(constants.EDM.CHECKCODE, file.hash);
} else {
formData.append(constants.EDM.ISCHECKCODE, "N");
}
const updateId = state.updateId || props.edm.updateId;
if (type === constants.EDM.CHUNKINIT) {
formData.append(constants.EDM.FILESIZE, file.size);
formData.append(constants.EDM.CHUNKS, file.chunkSize);
formData.append(constants.EDM.FILENAME, file.name);
updateId && formData.append(constants.EDM.DOCID, updateId);
} else {
formData.append(constants.EDM.MULTIPART, file, file.filename);
formData.append(constants.EDM.CHUNK, file.chunk);
formData.append(constants.EDM.LOWERNAME, file.filename);
const docId = updateId || file.docId;
formData.append(constants.EDM.DOCID, docId);
}
updateId && formData.append("updateFile", true);
return formData;
};
const largeDocumentUpload = ({
api,
Modal,
state,
t,
emit,
constants
}) => (file) => {
const chunkSize = Math.ceil(file.size / state.chunkSize);
file.chunkSize = chunkSize;
file.cancelToken = [];
api.segmentUploadInit(file).then((data) => {
if (data && data.docId) {
file.records = data.chunks;
file.docId = data.docId;
state.largeFileInfo[data.docId] = file;
const batches = api.sliceChunk(file);
api.batchSegmentUpload({
docId: data.docId,
batchIndex: 0,
batches,
progress: { size: file.size, trunks: [], file }
});
} else {
Modal.message({
message: t("ui.fileUpload.init"),
status: "warning",
duration: "1000"
});
const file2 = api.getFile(file2);
state.uploadFiles.splice(state.uploadFiles.indexOf(file2), 1);
}
}).catch((err) => {
file.status = constants.FILE_STATUS.FAIL;
emit("error", err, file, state.uploadFiles);
});
};
const segmentUploadInit = ({
api,
props,
service,
state,
constants
}) => (file) => {
const formData = new FormData();
return new Promise((resolve, reject) => {
service.getLargeFileInitUrl().then((data) => {
service.request({
method: "post",
url: xss.filterUrl(data),
data: api.getFormData({ formData, file, type: constants.EDM.CHUNKINIT }),
withCredentials: props.withCredentials,
headers: Object.assign(props.headers, state.headers)
}).then((data2) => {
if (data2.data.status === 200) {
resolve(data2.data.result);
} else {
reject(data2);
}
});
});
});
};
const afterUpload = ({
data,
file,
batchIndex,
state,
api,
progress
}) => {
if (data.status === 200) {
const key = file.docId + "-" + batchIndex;
if (state.batchQueue[key] !== void 0) {
state.batchQueue[key] += 1;
} else {
state.batchQueue[key] = 0;
}
} else {
const key = file.docId + "-" + file.chunk;
const countReplay = state.replayAtoms[key];
if (countReplay && countReplay >= 2) {
const msgArray = [
"The number of retry times exceeds the threshold! [docId:",
file.docId,
", chunk:",
file.chunk,
"]"
];
logger.warn(msgArray