@opengis/fastify-table
Version:
core-plugins
320 lines (319 loc) • 13.1 kB
JavaScript
/* eslint-disable no-console */
/* eslint-disable no-param-reassign */
/* eslint-disable no-plusplus */
/* eslint-disable no-await-in-loop */
/* eslint-disable no-nested-ternary */
import path from "node:path";
import { createHash } from "node:crypto";
import { existsSync } from "node:fs";
import { appendFile, mkdir, readFile, rm, writeFile } from "node:fs/promises";
import config from "../../../../config.js";
import logger from "../../../plugins/logger/getLogger.js";
import getTemplate from "../../../plugins/table/funcs/getTemplate.js";
import getMeta from "../../../plugins/pg/funcs/getMeta.js";
import pgClients from "../../../plugins/pg/pgClients.js";
import eventStream from "../../../plugins/util/funcs/eventStream.js";
import getData from "../../table/functions/getData.js";
import getFolder from "../../../plugins/crud/funcs/utils/getFolder.js";
import metaFormat from "../../../plugins/table/funcs/metaFormat/index.js";
import jsonToXls from "./utils/jsonToXls.js";
import jsonToCsv from "./utils/jsonToCsv.js";
import formatResult from "./utils/formatResult.js";
import jsonlToJsonFile from "./utils/jsonlToJsonFile.js";
import pubsub from "./utils/pubsub.js";
const startStreamWithTotal = 10000;
const rootDir = getFolder(config, "local");
/**
* Експорт даних з таблиці
*
* @method GET
* @alias exportTable
* @type api
* @tag export
* @summary Експорт даних у таблицю(xlsx, csv, json, geojson)
* @priority 1
* @example
* /api/export?table=com_property.subjects.table&format=csv&cols=economy_type,name_ua
* @param {String} format Формат документу на виході
* @param {Boolean} nocache Чи використовувати кеш
* @param {String} table Таблиця в БД
* @param {String|Number} filter Параметр фільтру для застосування до експортованих даних
* @errors 400, 500
* @returns {Number} status Номер помилки
* @returns {String} error Опис помилки
* @returns {String|Object} message Повертає SQL запит або opt або рядки SQL запиту
* @returns {String} file Шлях до файла для скачування або відображення
*/
export default async function exportTable({ pg = pgClients.client, headers, user, columns: columns1, cls, query = {}, host = "127.0.0.1", tableSql, sourceName, }, reply) {
const { id, cols, search, format = "json", table, filter = "empty", nocache, formatAnswer = "file", sql, stream, } = query;
if (!table && !tableSql) {
return reply.status(400).send("not enough params: table");
}
if (!["csv", "xlsx", "json", "geojson"].includes(format)) {
return reply.status(400).send("param format is invalid");
}
const slice = query.slice ? Math.min(+query.slice || 16, 16) : null;
const date = new Date();
const sufixName = `${filter}-${cols || "all"}-${search}-${query.limit || "unlimited"}-${query.slice || "full"}-${id}`;
const sufixDate = [
date.getFullYear(),
date.getMonth(),
date.getDate(),
date.getHours(),
].join("-");
const objInfo = createHash("md5")
.update([sufixName, sufixDate].join("-"))
.digest("hex");
const fileName = (table || (tableSql ? createHash("md5").update(tableSql).digest("hex") : ""))
.concat("_")
.concat(objInfo)
.concat(".")
.concat(format);
const filePath = path.join(rootDir, "/files/temp", fileName);
const ext = path.extname(filePath);
const cacheFile = existsSync(filePath);
const filePathJSON = ["csv", "xlsx", "geojson"].includes(format)
? filePath.replace(ext, ".json")
: filePath;
const cacheFileJSON = existsSync(filePathJSON);
// return from cache
if (cacheFile && !sql && !nocache && !config.disableCache) {
return formatResult({
filePath,
formatAnswer,
folder: rootDir,
reply,
});
}
const loadTable = await getTemplate("table", table);
const meta = await getMeta({ pg, table: loadTable?.table || table });
const viewSql = await getTemplate("view", loadTable?.table || table);
if (!meta?.pk && !meta?.view && !tableSql && !viewSql) {
return reply.status(404).send("table not found");
}
if (format === "geojson" && !meta?.geom) {
return reply
.status(400)
.send("Ця форма не містить полів геометрії. Виберіть тип, який не потребує геометрії для вивантаження");
}
const options = {
pg,
params: {
id,
table,
},
query: {
filter,
search,
limit: slice,
sql,
},
headers,
user,
sufix: false,
};
// check total count, debug sql etc.
const result = tableSql || viewSql
? await pg
.query(`select count(*) as total, json_agg(row_to_json(q)) as rows from (select * from (${tableSql || viewSql})q ${slice ? `limit ${slice}` : ""} )q where ${loadTable?.key && id
? `${loadTable?.key}::text = '${id}'`
: "true"}`)
.then((el) => el.rows?.[0] || {})
: await getData(options, reply, true);
if (sql)
return result;
if (!result?.rows?.length) {
return reply.status(200).send("Немає даних, які можна експортувати");
}
const { total, filtered: filtered1 = result.total || 0 } = result;
const filtered = slice ? result.count || slice : filtered1;
const limit = startStreamWithTotal > filtered
? filtered
: Math.min(query.limit || 1000, startStreamWithTotal) ||
startStreamWithTotal;
Object.assign(options, { limit });
const colmodel = (columns1 || loadTable?.columns || meta?.columns || [])?.map((el) => ({
name: el.name,
data: el.data || el.option,
title: el.title || el.ua,
type: el.type || el.format || "text",
html: el.html,
})); // skip html to avoid errors
// get present columns
const columns = cols === "all" || !cols
? colmodel
: colmodel
?.filter((el) => (el.type || /\./.test(el.name)) &&
!el?.hidden &&
(cols?.split(",")?.length
? cols.split(",").includes(el.name)
: true))
?.filter((el) => Object.hasOwn(el, "export") ? !el.export : true);
const htmls = columns
.filter((el) => el.html)
.reduce((acc, curr) => ({
...acc,
[curr.name]: curr.html,
}), {});
const columnList = columns?.map((el) => /\./.test(el.name)
? `${el.name.split(".")[0]}->>'${el.name.split(".")[1]}' as ${el.name
.split(".")
.pop()}` // check for json data
: el.name);
const isStream = ((!slice && +filtered > startStreamWithTotal) || stream) &&
process.env.NODE_ENV !== "test" &&
!process.env.VITEST;
const sendOriginal = isStream ? eventStream(reply) : () => { };
const redisKey = `exportTable:${fileName}`;
const send = isStream
? await pubsub(redisKey, (msg, finishStream = false) => {
sendOriginal(msg, finishStream);
}, query.reload && user?.user_type?.includes?.("admin"))
: () => { };
// delete old file, prevent append
if (cacheFile && (nocache || config.disableCache)) {
await rm(filePath);
}
// create directory if not exists
await mkdir(path.dirname(filePath), { recursive: true });
// export xlsx / csv / json
const source = loadTable?.title || loadTable?.ua || table || sourceName;
const interval = setInterval(async () => {
send("process query...");
}, 5000);
// start stream only if total exceed limit, but use while anyway
const res = {};
let offset = 0;
let page = 1;
let seq = 0;
send(`Всього в реєстрі: ${result.total} (${filtered} з урахуванням фільтрів)`);
if (!cacheFileJSON || nocache || config.disableCache) {
// delete old json line file, prevent append
if (existsSync(filePathJSON.replace(/.json$/, ".jsonl"))) {
await rm(filePathJSON.replace(/.json$/, ".jsonl"), {
recursive: true,
force: true,
});
}
// delete old file, prevent append
if (existsSync(filePathJSON)) {
await rm(filePathJSON, { recursive: true, force: true });
}
// create empty temporary json lines file to append to
await writeFile(filePathJSON.replace(/.json$/, ".jsonl"), "");
while (+filtered - offset > 0 && !res?.error) {
try {
send(`Оброблено: ${offset}/${filtered}`);
const { rows = [] } = tableSql || viewSql
? await pg.query(`select * from (${tableSql || viewSql})q where ${loadTable?.key && id
? `${loadTable?.key}::text = '${id}'`
: "true"} limit ${options.limit} offset ${offset}`)
: await getData({ ...options, query: { page, ...options.query } }, reply, true);
send(`seq: ${++seq}`);
send(`Обробка ${rows.length} об'єктів...`);
if (!rows.length) {
send("Обробка даних успішно завершена");
break;
}
await metaFormat({
rows,
cls,
htmls,
sufix: false,
}, pg);
// skip non present after metaFormat
if (!tableSql) {
rows.forEach((row) => {
Object.keys(row)
.filter((el) => !columnList.includes(el))
.forEach((key) => delete row[key]);
});
}
const data = rows.map((row) => JSON.stringify(row)).join("\n") + "\n";
await appendFile(filePathJSON.replace(/.json$/, ".jsonl"), data);
offset += rows.length;
page++;
}
catch (err) {
send(`error: ${err.toString()}`);
logger.file("export/table/error", {
filePath: filePathJSON,
total,
filtered,
offset,
result: res,
error: err.toString(),
stack: err.stack,
});
Object.assign(res, { error: err.toString() });
}
}
}
// convert json line to json array
await jsonlToJsonFile(filePathJSON.replace(/.json$/, ".jsonl"));
clearInterval(interval);
if (res.error) {
send("finish");
sendOriginal(res.error, 1);
return reply.status(500).send(res.error);
}
logger.file("export/table", {
table,
format,
total,
filtered,
time: Date.now() - date.getTime(),
});
if (format !== "json") {
const txt = nocache || config.disableCache || !cacheFileJSON
? `Сформовано файл формату json. Початок конвертації в ${format}...`
: `Знайдено файл формату json. Початок конвертації в ${format}...`;
send(txt);
}
if (format === "geojson") {
const rows = JSON.parse((await readFile(filePathJSON, "utf8")) || "[]");
const geojson = {
type: "FeatureCollection",
features: rows.map((row) => ({
type: "Feature",
name: "export",
geometry: row.geom,
properties: Object.fromEntries(Object.entries(row).filter(([key]) => key !== "geom")),
})),
};
await mkdir(path.dirname(filePath), { recursive: true });
await writeFile(filePath, JSON.stringify(geojson));
}
const resp = {};
if (format === "csv") {
await jsonToCsv({
filePath: filePathJSON,
send,
colmodel,
domain: host,
source,
columnList,
});
}
if (format === "xlsx") {
await jsonToXls({
filePath: filePathJSON,
send,
colmodel,
domain: host,
source,
resp,
});
}
if (resp.error) {
return reply.status(resp.status || 500).send(resp.error);
}
send("finish");
sendOriginal("Файл успішно сформовано. Натистіть кнопку ще раз для завантаження даних", 1);
return formatResult({
filePath,
formatAnswer,
folder: rootDir,
reply,
});
}