@opengis/fastify-table
Version:
core-plugins
157 lines (124 loc) • 7.28 kB
JavaScript
/* import {
logger, autoIndex, getSelect, getTemplate, getSelectVal, pgClients, applyHook,
} from '../../../../utils.js'; */
import logger from '../../../plugins/logger/getLogger.js';
import autoIndex from '../../../plugins/pg/funcs/autoIndex.js';
import getSelect from '../../../plugins/table/funcs/getSelect.js';
import getSelectVal from '../../../plugins/table/funcs/metaFormat/getSelectVal.js';
import pgClients from '../../../plugins/pg/pgClients.js';
import applyHook from '../../../plugins/hook/funcs/applyHook.js';
import getTemplate from '../../../plugins/table/funcs/getTemplate.js';
export default async function filterAPI(req, reply, iscalled) {
const time = Date.now();
const {
params, pg = pgClients.client, user = {}, query = {},
} = req;
const hookData = await applyHook('preFilter', { pg, table: params.table });
const loadTable = await getTemplate('table', params.table);
const {
table, sql, query: tableQuery, form, extra, filterState, filterCustom,
} = hookData || loadTable || {};
if (!table) {
return { status: 404, message: 'not found' };
}
const sqlTable = sql?.filter?.((el) => !el?.disabled && el?.sql?.replace)?.map?.((el, i) => ` left join lateral (${el.sql.replace('{{uid}}', user?.uid)}) ${el.name || `t${i}`} on 1=1 `)?.join?.('') || '';
const { fields: columns = [] } = await pg.query(`select * from ${table} t ${sqlTable} limit 0`);
const { fields = [] } = await pg.query(`select * from ${table} t limit 0`);
const optimizedSQL = `select * from ${table} t ${sqlTable} where ${tableQuery || '1=1'}`.replace(/{{uid}}/g, user?.uid);
if (query?.sql && user?.user_type === 'admin') return optimizedSQL;
const filters = (hookData?.filters || loadTable?.filter_list || loadTable?.filters || loadTable?.filterList || []).concat(loadTable?.filterSql || []);
// admin.custom_column - user filter NA-165
const { rows: properties = [] } = await pg.query('select column_id, name, title, format, data from admin.custom_column where entity=$1 and uid=$2 and filter', [params.table, user?.uid]);
properties.forEach((row) => filters.push({
id: row.name, name: row.name, ua: row.title, type: row.format, data: row.data,
}));
// KRYVYIRIH-231
autoIndex({ table, columns: filters.filter((el) => columns?.find?.((item) => item?.name === el.name)) })
.catch(err => {
console.error(err.toString());
logger.file('autoindex/error', { name: params?.table, error: err.toString(), stack: err.stack });
});
const loadTemplate = form ? await getTemplate('form', form) : {};
const extraColumns = extra ? Object.keys(loadTemplate?.schema || {}).filter(key => !columns.find(col => col.name === key)) : [];
filters?.forEach?.(el => Object.assign(el, { id: el.id || el.name, title: el.title || el.ua, extra: extraColumns.includes(el.id || el.name) }));
await Promise.all(filters.filter((el) => el.data && el.id && el.type !== 'Autocomplete' && !el.sql).map(async (el) => {
const cls = await getSelect(el.data, pg);
if (!cls) return;
const { dataTypeID } = columns.find((item) => item.name === el.id) || {};
if (el.extra && Array.isArray(cls?.arr || cls)) {
const countArr = await pg.query('select value_text as id, count(*) from crm.extra_data where property_key=$1 and property_entity=$2 group by value_text', [el.id, table]);
const options = countArr.rows.map(cel => {
const data = (cls?.arr || cls).find(c => c.id === cel.id);
return { ...cel, ...data };
});
Object.assign(el, { options });
return;
}
const q = pg.pgType[dataTypeID]?.includes('[]')
? `select unnest(${el.id})::text as id,count(*) from (${optimizedSQL})q group by unnest(${el.id}) limit 100`
: `select ${el.id}::text as id,count(*) from (${optimizedSQL})q group by ${el.id} limit 100`;
const countArr = await pg.queryCache(q, { table });
if (countArr.timeout) {
Object.assign(el, { timeout: countArr.timeout });
console.log('timeout filter', params.table, el.id);
logger.file('timeout/filter', { table: params.table, type: 'cls', filter: el.id });
}
const ids = countArr.rows.map(el1 => el1.id);
const clsData = await getSelectVal({ pg, values: ids, name: el.data });
const options = countArr.rows.map(cel => {
const data = cls?.arr?.find(c => c.id?.toString?.() === cel.id?.toString?.()) || { text: clsData[cel.id]?.text || clsData[cel.id] };
return { ...cel, ...data };
});
Object.assign(el, { options });
}));
const q = ((filterState || []).concat(filterCustom || [])).filter((el) => el.name && el.sql).map((el) => `select count(*), '${el.name}' as name from (${optimizedSQL})q where ${el.sql}`).join(' union all ');
const { rows = [], timeout: timeout1 } = q ? await pg.queryCache(q) : {};
if (timeout1) logger.file('timeout/filter', { table: params.table, type: 'state/custom' });
if (rows?.length) {
((filterState || []).concat(filterCustom || [])).filter((el) => el.name && el.sql).forEach((el) => {
const { count } = rows.find((row) => row.name === el.name) || {};
Object.assign(el, { count, sql: undefined });
});
}
const sqlList = loadTable?.sql
?.filter((el) => !el.disabled && el?.sql?.replace)
?.map((el, i) => ` left join lateral (${el.filter ? el.sql.replace(/limit 1/ig, '') : el.sql}) as ${el.name || `t${i + 1}`} on 1=1 `)
?.join(' ') || '';
// percentile_cont - alternative
await Promise.all(filters.filter((el) => el.name && el.type === 'Range' && fields?.find?.((item) => item?.name === el.name)).map(async (el) => {
const data = await pg.queryCache(
`select array[
min(${el.name}),
percentile_disc(0.25) within group (order by ${el.name}),
percentile_disc(0.5) within group (order by ${el.name}),
percentile_disc(0.75) within group (order by ${el.name}),
max(${el.name})
] as range from ${table} ${sqlList && false ? ` t ${sqlList}` : ''} where ${tableQuery || '1=1'}`,
{ table },
).then(res => {
if (res.timeout) {
logger.file('timeout/filter', { table: params.table, type: 'Range', filter: res.name });
return res;
}
return res.rows?.[0]?.range;
});
Object.assign(el, { data });
}));
const sqlFilters = (filterCustom || []).filter((el) => el.name && el.sql);
const q1 = sqlFilters.map((el) => `select count(*), '${el.name}' as name from ${table} where ${tableQuery || '1=1'} and ${el.sql}`).join(' union all ');
const { rows: sqlRows = [], timeout } = q1 ? await pg.queryCache(q1, { table }) : {};
if (timeout) logger.file('timeout/filter', { table: params.table, type: 'sqlFilters' });
if (sqlRows?.length) {
sqlFilters.forEach((el) => {
const { count } = sqlRows.find((row) => row.name === el.name) || {};
Object.assign(el, { count, sql: undefined });
});
}
return {
time: Date.now() - time,
list: filters?.map?.(el => ({ ...el, sql: iscalled ? el.sql : undefined })),
custom: filterCustom,
inline: loadTable?.filterInline,
state: filterState,
};
}