staticql
Version:
Type-safe query engine for static content including Markdown, YAML, JSON, and more.
411 lines (410 loc) • 17.6 kB
JavaScript
import { resolveField } from "./utils/field.js";
import { resolveDirectRelation, resolveThroughRelation, } from "./utils/relationResolver.js";
import { Indexer } from "./Indexer.js";
import { createPageInfo, decodeCursor, encodeCursor, getPageSlice, } from "./utils/pagenation.js";
import { asArray } from "./utils/normalize.js";
/**
* QueryBuilder allows for type-safe querying and joining of static structured data.
*/
export class QueryBuilder {
constructor(sourceName, loader, indexer, resolver, logger) {
this.sourceName = sourceName;
this.loader = loader;
this.indexer = indexer;
this.resolver = resolver;
this.logger = logger;
this.joins = [];
this.filters = [];
this._orderByKey = "slug";
this._orderByDirection = "asc";
this._cursorDirection = "after";
this._pageSize = 20;
}
/**
* Adds a relation to join with.
*
* @param relationKey - Name of the relation as defined in the config.
* @returns This instance (chainable).
*/
join(relationKey) {
this.joins = [...this.joins, relationKey];
return this;
}
where(field, op, value) {
this.filters.push({ field, op, value });
return this;
}
/**
* Finds and returns a record by its slug.
*
* @param slug The slug (unique identifier) of the record to retrieve.
* @returns The found record, with joins applied if necessary.
*/
async find(slug) {
const rsc = this.resolver.resolveOne(this.sourceName);
const requiresJoin = this.joins.length > 0;
let data = (await this.loader.loadBySlug(this.sourceName, slug));
if (requiresJoin)
data = (await this.applyJoins([data], rsc))[0];
return data;
}
/**
* Specifies the sorting order for the query.
*
* @param key - Field to order by. Default is "slug".
* @param direction - Sort direction: "asc" or "desc". Default is "asc".
* @returns This instance (for method chaining).
*/
orderBy(key, direction = "asc") {
this._orderByKey = key;
this._orderByDirection = direction;
return this;
}
/**
* Sets the pagination cursor for the query.
*
* @param cursor - The encoded cursor string (usually Base64).
* Use the `endCursor` from the previous page's `pageInfo` for forward pagination,
* or the `startCursor` for backward pagination.
* @param direction - Pagination direction: `"after"` for next page, `"before"` for previous page.
* Defaults to `"after"`.
* @returns This instance (for method chaining).
*/
cursor(value, direction = "after") {
this._cursorValue = value;
this._cursorDirection = direction;
return this;
}
/**
* Sets the number of records to return per page.
*
* @param n - The maximum number of records to return for this query (page size).
* Should be a positive integer.
* @returns This instance (for method chaining).
*/
pageSize(n) {
this._pageSize = n;
return this;
}
/**
* Executes the query and returns matching records.
*
* @returns Matched data records.
*/
async exec() {
const rsc = this.resolver.resolveOne(this.sourceName);
const requiresJoin = this.joins.length > 0;
const { page, pageInfo } = await this.compose();
const slugs = page.flatMap((x) => Object.keys(x.ref));
let data = (await this.loader.loadBySlugs(this.sourceName, slugs));
if (requiresJoin)
data = await this.applyJoins(data, rsc);
return { data, pageInfo };
}
/**
* Returns only the index page without loading full data.
*
* @returns Index page, pageInfo.
*/
async peek() {
return await this.compose();
}
/**
* Compose pages.
*/
async compose() {
const rsc = this.resolver.resolveOne(this.sourceName);
const filters = this.extractIndexFilters(rsc);
const orderByKey = String(this._orderByKey);
const empty = {
page: [],
pageInfo: {
hasNextPage: false,
hasPreviousPage: false,
startCursor: undefined,
endCursor: undefined,
},
};
let matched = await this.getMatchedIndexes(this.sourceName, filters, rsc);
let page;
let pageInfo;
const encodeCursorCallback = (item) => {
const refsLength = Object.keys(item.ref).length;
const slug = Object.keys(item.ref)[refsLength - 1];
const orderValue = Object.values(item.ref)[refsLength - 1][orderByKey];
return encodeCursor({ order: { [orderByKey]: orderValue[0] }, slug });
};
if (matched.length) {
const cursorObj = this._cursorValue
? decodeCursor(this._cursorValue)
: undefined;
const startIndex = this.getStartIdx(matched, cursorObj);
page = getPageSlice(matched, startIndex, this._pageSize, this._cursorDirection);
pageInfo = createPageInfo(page, this._pageSize, startIndex, matched.length, this._cursorDirection, encodeCursorCallback);
}
else if (!matched.length && !filters.length) {
// no conditions
if (!rsc.indexes[orderByKey]) {
throw new Error(`[${this.sourceName}] needs index: ${orderByKey}`);
}
const indexDir = rsc.indexes[orderByKey].dir;
const isDesc = this._orderByDirection === "desc";
const isAfter = this._cursorDirection === "after";
let hasPreviousPage;
let hasNextPage;
if (isAfter) {
page = await Array.fromAsync(this.indexer.readForwardPrefixIndexLines(indexDir, this._pageSize + 1, this._cursorValue, orderByKey, isDesc));
}
else {
const data = await Array.fromAsync(this.indexer.readBackwardPrefixIndexLines(indexDir, this._pageSize + 1, this._cursorValue, orderByKey, isDesc));
// In backward pagination, regardless of ascending or descending order,
// the retrieved data is ordered by the scan direction, so you need to reverse the results before returning them.
page = data.reverse();
}
if (!page.length)
return empty;
// set hasPreviousPage
hasPreviousPage = isAfter
? !!this._cursorValue
: page.length > this._pageSize;
// set hasNextPage
hasNextPage = isAfter
? page.length > this._pageSize
: !!this._cursorValue;
page = page.slice(0, this._pageSize);
pageInfo = {
hasPreviousPage,
hasNextPage,
startCursor: encodeCursorCallback(page[0]),
endCursor: encodeCursorCallback(page[page.length - 1]),
};
}
else {
return empty;
}
return { page, pageInfo };
}
/**
* Get the starting position from the specified cursor.
*/
getStartIdx(matched, cursorObj) {
if (!cursorObj)
return 0;
const orderByKey = String(this._orderByKey);
return matched.findIndex((item) => {
for (const [slug, values] of Object.entries(item.ref)) {
let match = slug === cursorObj.slug;
if (orderByKey && cursorObj.order[orderByKey]) {
const orderValue = values[orderByKey]?.[0];
match = match && orderValue === cursorObj.order[orderByKey];
}
return match;
}
return false;
});
}
/**
* Categorizes filters into index-usable filters.
*/
extractIndexFilters(rsc) {
const indexableFields = new Set([
"slug",
...Object.keys(rsc.indexes ?? {}),
]);
const indexedFilters = this.filters.filter((f) => indexableFields.has(f.field));
const fallbackFilters = this.filters.filter((f) => !indexableFields.has(f.field));
if (fallbackFilters.length > 0) {
throw new Error(`[${this.sourceName}] needs index: ${JSON.stringify(fallbackFilters)}`);
}
return indexedFilters;
}
/**
* Applies configured joins (relations) to the result set.
*/
async applyJoins(result, rsc) {
for (const key of this.joins) {
const rel = rsc.relations?.[key];
if (!rel)
throw new Error(`Unknown relation: ${key}`);
if (rel.type === "hasOneThrough" || rel.type === "hasManyThrough") {
result = await this.applyThroughRelation(result, key, rel);
}
else if (rel.type === "hasOne" ||
rel.type === "hasMany" ||
rel.type === "belongsTo" ||
rel.type === "belongsToMany") {
result = await this.applyDirectRelation(result, key, rel);
}
}
return result;
}
/**
* Direct relations: hasOne, hasMany, belongsTo, belongsToMany
*/
async applyDirectRelation(result, key, rel) {
const directRel = rel;
let foreignData = [];
if (directRel.type === "belongsTo" || directRel.type === "belongsToMany") {
// For belongsTo and belongsToMany, use foreignKey-based filtering
const allLocalVals = result.flatMap((row) => resolveField(row, directRel.localKey));
const uniqueIndexes = (await this.getMatchedIndexes(directRel.to, [{ field: directRel.foreignKey, op: "in", value: allLocalVals }], this.resolver.resolveOne(directRel.to))) ?? [];
foreignData = await this.loader.loadBySlugs(directRel.to, uniqueIndexes.map((index) => Object.keys(index.ref)).flat());
}
else {
// For hasOne and hasMany, localKey values are treated as slugs
const allSlugs = result.flatMap((row) => resolveField(row, directRel.localKey));
const uniqueSlugs = Array.from(new Set(allSlugs));
foreignData = await this.loader.loadBySlugs(directRel.to, uniqueSlugs);
}
return result.map((row) => {
if (directRel.type === "belongsTo" ||
directRel.type === "belongsToMany") {
// Inverse lookup: match localKey values to foreignKey values
const localVals = resolveField(row, directRel.localKey);
const related = foreignData.filter((targetRow) => {
const foreignVals = resolveField(targetRow, directRel.foreignKey);
return localVals.some((val) => foreignVals.includes(val));
});
return { ...row, [key]: related };
}
else {
const relValue = resolveDirectRelation(row, directRel, foreignData);
return {
...row,
[key]: directRel.type === "hasOne" ? relValue ?? null : relValue ?? [],
};
}
});
}
/**
* For "hasOneThrough" and "hasManyThrough" relations
*/
async applyThroughRelation(result, key, rel) {
const sourceSlugs = result.flatMap((row) => resolveField(row, rel.sourceLocalKey));
// If the intermediate table doesn't use "slug", index-based lookup is required
const uniqueSourceIndexes = (await this.getMatchedIndexes(rel.through, [
{
field: rel.throughForeignKey,
op: "in",
value: sourceSlugs,
},
], this.resolver.resolveOne(rel.through))) ?? [];
const throughData = await this.loader.loadBySlugs(rel.through, uniqueSourceIndexes.map((index) => Object.keys(index.ref)).flat());
const targetSlugs = throughData.flatMap((t) => resolveField(t, rel.throughLocalKey));
// If the target table doesn't use "slug", index-based lookup is required
const uniqueTargetIndexes = (await this.getMatchedIndexes(rel.to, [{ field: rel.targetForeignKey, op: "in", value: targetSlugs }], this.resolver.resolveOne(rel.through))) ?? [];
const targetData = await this.loader.loadBySlugs(rel.to, uniqueTargetIndexes.map((index) => Object.keys(index.ref)).flat());
return result.map((row) => {
const relValue = resolveThroughRelation(row, rel, throughData, targetData);
return {
...row,
[key]: rel.type === "hasOneThrough" ? relValue ?? null : relValue ?? [],
};
});
}
/**
* Resolves matched slugs using index data based on filters.
*/
async getMatchedIndexes(sourceName, indexedFilters, rsc, andMode = true) {
let matched = null;
for (let i = 0; i < indexedFilters.length; i++) {
const filter = indexedFilters[i];
const { field, op, value } = filter;
let matchedIndexes = [];
// direct slug lookup: eq or in on slug can bypass index files
if (field === "slug" &&
(op === "eq" || (op === "in" && Array.isArray(value)))) {
const slugs = asArray(value).map((v) => String(v));
matchedIndexes = slugs.map((slug) => ({
v: slug,
vs: slug,
ref: { [slug]: { slug: [slug] } },
}));
}
else if (andMode && matched && i > 0) {
// for the second (narrow down from matched)
const indexConfig = rsc.indexes?.[field];
const depth = indexConfig?.depth ?? Indexer.indexDepth;
let entries = [];
for (const v of asArray(value)) {
const searchValue = String(v);
const searchPrefix = this.indexer.getPrefixIndexPath(searchValue, depth);
const candidates = matched.filter((m) => {
const mSlug = Object.keys(m.ref)[0];
const mField = m.ref[mSlug]?.[field];
if (!mField)
return false;
return mField.some((p) => op === "startsWith"
? p.startsWith(searchPrefix)
: p === searchPrefix);
});
// no more match
if (!candidates.length)
continue;
if (searchValue.length <= depth) {
// index match
entries.push(...candidates);
}
else {
// partial index match
const found = await this.indexer.findIndexLines(sourceName, field, searchValue);
if (!found)
continue;
// extract match
entries.push(...matched.filter((m) => {
const mSlug = Object.keys(m.ref)[0];
return found.some((line) => !!line.ref[mSlug]);
}));
}
}
matchedIndexes.push(...entries);
// no more match
if (!matchedIndexes.length)
return [];
}
else {
// for the first
if (Object.keys(rsc.indexes ?? {}).length) {
if (op === "eq") {
matchedIndexes =
(await this.indexer.findIndexLines(sourceName, field, String(value))) ?? [];
}
else if (op === "startsWith") {
matchedIndexes =
(await this.indexer.findIndexLines(sourceName, field, String(value), (indexValue, argValue) => indexValue.startsWith(argValue))) ?? [];
}
else if (op === "in" && Array.isArray(value)) {
const buff = new Set();
for (const keyValue of value) {
buff.add(this.indexer.findIndexLines(sourceName, field, String(keyValue)));
}
const f = (await Promise.all([...buff])).flat();
matchedIndexes.push(...f.filter((i) => !!i));
}
}
}
if (andMode) {
matched = matchedIndexes;
}
else {
matched = [...(matched ?? []), ...matchedIndexes];
}
}
const matchedArray = matched ?? [];
matchedArray.sort((a, b) => {
const [, avs] = Object.entries(a.ref)[0];
const [, bvs] = Object.entries(b.ref)[0];
const av = String(avs[String(this._orderByKey)]);
const bv = String(bvs[String(this._orderByKey)]);
const aEmpty = av == null || av === "";
const bEmpty = bv == null || bv === "";
if (aEmpty || bEmpty) {
throw new Error("orderby need index");
}
return this._orderByDirection === "desc"
? bv.localeCompare(av)
: av.localeCompare(bv);
});
return matchedArray.length ? matchedArray : [];
}
}