UNPKG

scim2-filter

Version:
155 lines 4.6 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); const EOT = { type: "EOT", literal: "" }; function tokenizer(f) { const ret = []; let rest = f; const patterns = /^(?:(\s+)|(-?\d+(?:\.\d+)?(?:[eE][-+]?\d+)?)|("(?:[^"]|\\.|\n)*")|([[\]()])|(\w[-\w\._:\/%]*))/; let n; while ((n = patterns.exec(rest))) { if (n[1] || n[0].length === 0) { // } else if (n[2]) { ret.push({ literal: n[2], type: "Number" }); } else if (n[3]) { ret.push({ literal: n[3], type: "Quoted" }); } else if (n[4]) { ret.push({ literal: n[4], type: "Blacket" }); } else if (n[5]) { ret.push({ literal: n[5], type: "Word" }); } rest = rest.substring(n.index + n[0].length); } if (rest.length !== 0) { throw new Error(`unexpected token ${rest}`); } ret.push(EOT); return ret; } exports.tokenizer = tokenizer; class Tokens { constructor(list) { this.list = list; this.i = 0; this.current = this.list[this.i]; } getList() { return this.list.map((a, i) => i == this.i ? `[${a.literal}]` : a.literal); } peek() { return this.current || EOT; } forward() { this.current = this.list[++this.i]; return this; } shift() { const c = this.peek(); this.forward(); return c; } } exports.Tokens = Tokens; const cops = new Set(["eq", "ne", "co", "sw", "ew", "gt", "lt", "ge", "le"]); const sops = new Set(["pr"]); function parseFilter(list) { return parseInxif(parseExpression(list), list, Precedence.LOWEST); } exports.parseFilter = parseFilter; function parseExpression(list) { const t = list.shift(); if (t.literal == "(") { const filter = parseFilter(list); const close = list.shift(); if (close.literal !== ")") { throw new Error(`Unexpected token [${close.literal}(${close.type})] expected ')'`); } return filter; } else if (t.literal.toLowerCase() == "not") { return { op: "not", filter: parseFilter(list) }; } else if (t.type == "Word") { return readValFilter(t, list); } else { throw new Error(`Unexpected token ${t.literal} (${t.type})`); } } exports.parseExpression = parseExpression; var Precedence; (function (Precedence) { Precedence[Precedence["LOWEST"] = 1] = "LOWEST"; Precedence[Precedence["OR"] = 2] = "OR"; Precedence[Precedence["AND"] = 3] = "AND"; })(Precedence || (Precedence = {})); const PRECEDENCE = { 'or': Precedence.OR, 'and': Precedence.AND }; function parseInxif(left, list, precede) { const op = list.peek().literal.toLowerCase(); const p = PRECEDENCE[op]; if (!p || precede >= p) { return left; } const filters = [left]; while (list.peek().literal.toLowerCase() === op) { let r = parseExpression(list.forward()); const rr = list.peek().literal.toLowerCase(); if (PRECEDENCE[rr] > p) { r = parseInxif(r, list, p); } filters.push(r); } return parseInxif({ op, filters }, list, precede); } function readValFilter(left, list) { if (left.type !== "Word") { throw new Error(`Unexpected token ${left.literal} expected Word`); } const attrPath = left.literal; const t = list.shift(); const op = t.literal.toLowerCase(); if (cops.has(op)) { var compValue = parseCompValue(list); return { op, attrPath, compValue }; } else if (sops.has(op)) { return { op, attrPath }; } else if (op == "[") { const valFilter = parseFilter(list); const close = list.shift(); if (close.literal !== "]") { throw new Error(`Unexpected token ${close.literal} expected ']'`); } return { op: "[]", attrPath, valFilter }; } else { throw new Error(`Unexpected token ${attrPath} ${t.literal} as valFilter operator`); } } function parseCompValue(list) { const t = list.shift(); try { const v = JSON.parse(t.literal); if (v === null || typeof v == "string" || typeof v == "number" || typeof v == "boolean") { return v; } else { throw new Error(`${t.literal} is ${typeof v} (un supported value)`); } } catch (e) { throw new Error(`[${t.literal}(${t.type})] is not json`); } } //# sourceMappingURL=parser.js.map