dbgate-tools
Version:
Auxiliary tools for other DbGate packages.
199 lines (198 loc) • 6.99 kB
JavaScript
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.tokenizeBySearchFilter = exports.filterNameCompoud = exports.filterName = void 0;
const compact_1 = __importDefault(require("lodash/compact"));
const startCase_1 = __importDefault(require("lodash/startCase"));
function parseTokenTree(filter) {
const factors = filter
.split(',')
.map(x => x.trim())
.filter(x => x.length > 0);
return {
factors: factors.map(x => ({
tokens: x
.split(' ')
.map(x => x.trim())
.filter(x => x.length > 0),
})),
};
}
function camelMatch(filter, text) {
if (!text)
return false;
if (!filter)
return true;
if (filter.replace(/[A-Z]/g, '').length == 0) {
const textCapitals = (0, startCase_1.default)(text).replace(/[^A-Z]/g, '');
const pattern = '.*' + filter.split('').join('.*') + '.*';
const re = new RegExp(pattern);
return re.test(textCapitals);
}
else {
return text.toUpperCase().includes(filter.toUpperCase());
}
}
function filterName(filter, ...names) {
if (!filter)
return true;
// const camelVariants = [name.replace(/[^A-Z]/g, '')]
const tree = parseTokenTree(filter);
if (tree.factors.length == 0)
return true;
const namesCompacted = (0, compact_1.default)(names);
for (const factor of tree.factors) {
let factorOk = true;
for (const token of factor.tokens) {
const found = namesCompacted.find(name => camelMatch(token, name));
if (!found)
factorOk = false;
}
if (factorOk) {
return true;
}
}
return false;
}
exports.filterName = filterName;
function clasifyCompoudCategory(tokens, namesCompactedMain, namesCompactedChild) {
let isMainOnly = true;
let isChildOnly = true;
for (const token of tokens) {
const foundMain = namesCompactedMain.find(name => camelMatch(token, name));
const foundChild = namesCompactedChild.find(name => camelMatch(token, name));
if (!foundMain && !foundChild)
return 'none';
if (!foundMain)
isMainOnly = false;
if (!foundChild)
isChildOnly = false;
}
if (isMainOnly && isChildOnly)
return 'both';
if (isMainOnly)
return 'main';
if (isChildOnly)
return 'child';
return 'none';
}
function filterNameCompoud(filter, namesMain, namesChild) {
if (!filter)
return 'both';
// const camelVariants = [name.replace(/[^A-Z]/g, '')]
const tree = parseTokenTree(filter);
const namesCompactedMain = (0, compact_1.default)(namesMain);
const namesCompactedChild = (0, compact_1.default)(namesChild);
if (tree.factors.length == 0)
return 'both';
const factorRes = [];
for (const factor of tree.factors) {
const category = clasifyCompoudCategory(factor.tokens, namesCompactedMain, namesCompactedChild);
factorRes.push(category);
}
if (factorRes.includes('both'))
return 'both';
if (factorRes.includes('main') && factorRes.includes('child'))
return 'both';
if (factorRes.includes('main'))
return 'main';
if (factorRes.includes('child'))
return 'child';
return 'none';
}
exports.filterNameCompoud = filterNameCompoud;
function tokenizeBySearchFilter(text, filter) {
var _a, _b;
const camelTokens = [];
const stdTokens = [];
for (const token of filter
.split(/[ ,]/)
.map(x => x.trim())
.filter(x => x.length > 0)) {
if (token.replace(/[A-Z]/g, '').length == 0) {
camelTokens.push(token);
}
else {
stdTokens.push(token.toUpperCase());
}
}
let res = [
{
text,
isMatch: false,
},
];
for (const token of camelTokens) {
const nextres = [];
for (const item of res) {
const indexes = [];
for (const char of token) {
if (indexes.length == 0 && char == ((_a = item.text[0]) === null || _a === void 0 ? void 0 : _a.toUpperCase())) {
// handle first letter of camelcase
indexes.push(0);
}
else {
const index = item.text.indexOf(char, indexes.length > 0 ? indexes[indexes.length - 1] + 1 : 0);
if (index < 0) {
indexes.push(-1);
}
else {
indexes.push(index);
}
}
}
if (indexes.some(x => x < 0)) {
nextres.push(item);
}
else {
let lastIndex = 0;
for (let i = 0; i < indexes.length; i++) {
if (indexes[i] > lastIndex) {
nextres.push({ text: item.text.substring(lastIndex, indexes[i]), isMatch: false });
}
nextres.push({ text: item.text.substring(indexes[i], indexes[i] + 1), isMatch: true });
lastIndex = indexes[i] + 1;
}
nextres.push({ text: item.text.substring(lastIndex), isMatch: false });
}
}
res = nextres;
}
for (const token of stdTokens) {
const nextres = [];
for (const item of res) {
const index = (_b = item.text) === null || _b === void 0 ? void 0 : _b.toUpperCase().indexOf(token);
if (index < 0) {
nextres.push(item);
}
else {
nextres.push({ text: item.text.substring(0, index), isMatch: false });
nextres.push({ text: item.text.substring(index, index + token.length), isMatch: true });
nextres.push({ text: item.text.substring(index + token.length), isMatch: false });
}
}
res = nextres;
}
res = res.filter(x => x.text.length > 0);
if (res.length == 1 && !res[0].isMatch) {
return null;
}
return res;
// const result = [];
// let lastMatch = 0;
// for (const token of tokens) {
// const index = text.indexOf(token, lastMatch);
// if (index < 0) {
// result.push({ token, isMatch: false });
// continue;
// }
// result.push({ token: text.substring(lastMatch, index), isMatch: false });
// result.push({ token: text.substring(index, index + token.length), isMatch: true });
// lastMatch = index + token.length;
// }
// result.push({ token: text.substring(lastMatch), isMatch: false });
// return result;
}
exports.tokenizeBySearchFilter = tokenizeBySearchFilter;