miniml
Version:
A minimal, embeddable semantic data modeling language for generating SQL queries from YAML model definitions. Inspired by LookML.
107 lines (105 loc) • 3.72 kB
JavaScript
import { loadYamlFile, loadYamlFileSync, parseYAML } from "./yaml.js";
import { renderJinjaTemplate } from "./jinja.js";
export function createModel(obj, file) {
const model = typeof obj === "string" ? parseYAML(obj) : obj;
if (!model.join)
model.join = {};
if (!model.dimensions)
model.dimensions = {};
if (!model.measures)
model.measures = {};
if (!model.date_field)
model.date_field = defaultDateField(Object.keys(model.dimensions));
if (model.date_field)
model.date_field = model.date_field.trim();
if (model.default_date_range)
model.default_date_range = model.default_date_range.trim();
validateModel(model);
expandDimensions(model.dimensions);
expandMeasures(model.measures);
expandModelInfo(model, file);
return model;
}
export async function loadModel(file) {
const obj = await loadYamlFile(file);
return createModel(obj, file);
}
export function loadModelSync(file) {
const obj = loadYamlFileSync(file);
return createModel(obj, file);
}
function defaultDateField(keys) {
const predicates = [
key => key === "date",
key => key === "timestamp",
key => key.endsWith("date") ||
key.startsWith("date") ||
key.endsWith("time") ||
key.endsWith("_at") ||
key.endsWith("_on") ||
key.endsWith("_until") ||
key.includes("datetime")
];
keys = keys.map(key => key.toLowerCase());
for (const predicate of predicates) {
const key = keys.find(predicate);
if (key)
return key;
}
}
function expandDimensions(dimensions) {
expandMetadataDefs(dimensions);
for (const obj of Object.values(dimensions))
if (!obj.sql)
obj.sql = obj.key;
else if (!/\s+AS\s+[a-z0-9_]+$/i.test(obj.sql))
obj.sql = `${obj.sql} AS ${obj.key}`;
}
function expandMeasures(measures) {
expandMetadataDefs(measures);
for (const obj of Object.values(measures))
if (!obj.sql)
obj.sql = `SUM(${obj.key}) AS ${obj.key}`;
else if (!/\s+AS\s+[a-z0-9_]+$/i.test(obj.sql))
obj.sql = `${obj.sql} AS ${obj.key}`;
}
function expandMetadataDefs(dictionary) {
for (const key of Object.keys(dictionary)) {
const obj = dictionary[key];
if (typeof obj === "string")
dictionary[key] = { key, description: obj };
else if (Array.isArray(obj))
dictionary[key] = { key, description: obj[0], sql: obj[1], join: obj[2] };
}
}
function expandModelInfo(model, file) {
model.info = `
## DIMENSIONS
{%- for dimension in dimensions %}
- \`{{ dimension.key }}\` {{ dimension.description }}
{%- endfor %}
## MEASURES
{%- for measure in measures %}
- \`{{ measure.key }}\` {{ measure.description }}
{%- endfor %}
${model.info || ""}`.trim();
model.info = renderJinjaTemplate(model.info, {
dimensions: Object.keys(model.dimensions).map(key => ({ key, description: model.dimensions[key].description })),
measures: Object.keys(model.measures).map(key => ({ key, description: model.measures[key].description }))
});
if (!model.dialect && file)
model.dialect = inferModelDialect(file);
if (model.dialect)
model.info += `\n\nUse ${model.dialect.toUpperCase()} syntax for generating SQL filter expressions.`;
}
function inferModelDialect(file) {
if (file.includes("bigquery"))
return "bigquery";
else if (file.includes("snowflake"))
return "snowflake";
else
throw new Error(`Unable to determine dialect for model file: ${file}`);
}
function validateModel(model) {
}
//# sourceMappingURL=load.js.map