UNPKG

@featurevisor/core

Version:

Core package of Featurevisor for Node.js usage

330 lines 15.5 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.getCustomDatafile = getCustomDatafile; exports.buildDatafile = buildDatafile; const fs = require("fs"); const config_1 = require("../config"); const utils_1 = require("../utils"); const hashes_1 = require("./hashes"); const traffic_1 = require("./traffic"); const getFeatureRanges_1 = require("./getFeatureRanges"); const convertToV1_1 = require("./convertToV1"); async function getCustomDatafile(options) { let featuresToInclude; if (options.featureKey) { const requiredChain = await options.datasource.getRequiredFeaturesChain(options.featureKey); featuresToInclude = Array.from(requiredChain); } const existingState = await options.datasource.readState(options.environment); const datafileContent = await buildDatafile(options.projectConfig, options.datasource, { schemaVersion: options.schemaVersion || config_1.SCHEMA_VERSION, revision: options.revision || "tester", environment: options.environment, features: featuresToInclude, inflate: options.inflate, }, existingState); return datafileContent; } async function buildDatafile(projectConfig, datasource, options, existingState) { const segmentKeysUsedByTag = new Set(); const attributeKeysUsedByTag = new Set(); const { featureRanges, featureIsInGroup } = await (0, getFeatureRanges_1.getFeatureRanges)(projectConfig, datasource); // features const features = []; const featuresDirectory = projectConfig.featuresDirectoryPath; if (fs.existsSync(featuresDirectory)) { const featureFiles = await datasource.listFeatures(); for (const featureKey of featureFiles) { const parsedFeature = await datasource.readFeature(featureKey); if (parsedFeature.archived === true) { continue; } if (options.tag && parsedFeature.tags.indexOf(options.tag) === -1) { continue; } if (options.features && options.features.indexOf(featureKey) === -1) { continue; } let expose; let rules; let force; if (options.environment) { expose = parsedFeature.expose?.[options.environment]; rules = parsedFeature.rules?.[options.environment]; force = parsedFeature.force?.[options.environment]; } else { expose = parsedFeature.expose; rules = parsedFeature.rules; force = parsedFeature.force; } if (expose === false) { continue; } if (Array.isArray(expose)) { const exposeTags = expose; if (options.tag && exposeTags.indexOf(options.tag) === -1) { continue; } } for (const parsedRule of rules) { const extractedSegmentKeys = (0, utils_1.extractSegmentKeysFromGroupSegments)(parsedRule.segments); extractedSegmentKeys.forEach((segmentKey) => segmentKeysUsedByTag.add(segmentKey)); } const feature = { key: featureKey, deprecated: parsedFeature.deprecated === true ? true : undefined, bucketBy: parsedFeature.bucketBy || projectConfig.defaultBucketBy, required: parsedFeature.required, disabledVariationValue: parsedFeature.disabledVariationValue, variations: Array.isArray(parsedFeature.variations) ? parsedFeature.variations.map((variation) => { const mappedVariation = { value: variation.value, weight: variation.weight, // @NOTE: added so state files can maintain weight info, but datafiles don't need this. find a way to remove it from datafiles later variables: variation.variables, variableOverrides: variation.variableOverrides, }; if (variation.variableOverrides) { const variableOverrides = variation.variableOverrides; const variableKeys = Object.keys(variableOverrides); for (const variableKey of variableKeys) { mappedVariation.variableOverrides[variableKey] = variableOverrides[variableKey].map((override) => { if (typeof override.conditions !== "undefined") { const extractedAttributeKeys = (0, utils_1.extractAttributeKeysFromConditions)(override.conditions); extractedAttributeKeys.forEach((attributeKey) => attributeKeysUsedByTag.add(attributeKey)); return { conditions: projectConfig.stringify && typeof override.conditions !== "string" ? JSON.stringify(override.conditions) : override.conditions, value: override.value, }; } if (typeof override.segments !== "undefined") { const extractedSegmentKeys = (0, utils_1.extractSegmentKeysFromGroupSegments)(override.segments); extractedSegmentKeys.forEach((segmentKey) => segmentKeysUsedByTag.add(segmentKey)); return { segments: projectConfig.stringify && typeof override.segments !== "string" ? JSON.stringify(override.segments) : override.segments, value: override.value, }; } return override; }); } } return mappedVariation; }) : undefined, traffic: (0, traffic_1.getTraffic)(parsedFeature.variations, rules, existingState.features[featureKey], featureRanges.get(featureKey) || []).map((t) => { return { ...t, segments: typeof t.segments !== "string" && projectConfig.stringify ? JSON.stringify(t.segments) : t.segments, }; }), ranges: featureRanges.get(featureKey) || undefined, }; // update state in memory, so that next datafile build can use it (in case it contains the same feature) existingState.features[featureKey] = { variations: Array.isArray(feature.variations) ? feature.variations.map((v) => { return { value: v.value, weight: v.weight || 0, }; }) : undefined, traffic: feature.traffic.map((t) => { return { key: t.key, percentage: t.percentage, allocation: t.allocation && t.allocation.map((a) => { return { variation: a.variation, range: a.range, }; }), }; }), }; if (featureIsInGroup[featureKey] === true) { feature.ranges = featureRanges.get(featureKey); } if (parsedFeature.variablesSchema) { const variableKeys = Object.keys(parsedFeature.variablesSchema); feature.variablesSchema = {}; for (const variableKey of variableKeys) { const v = parsedFeature.variablesSchema[variableKey]; feature.variablesSchema[variableKey] = { key: variableKey, type: v.type, defaultValue: v.defaultValue, deprecated: v.deprecated === true ? true : undefined, useDefaultWhenDisabled: v.useDefaultWhenDisabled === true ? true : undefined, disabledValue: typeof v.disabledValue !== "undefined" ? v.disabledValue : undefined, }; } } if (force) { feature.force = force.map((f) => { if (f.segments) { const extractedSegmentKeys = (0, utils_1.extractSegmentKeysFromGroupSegments)(f.segments); extractedSegmentKeys.forEach((segmentKey) => segmentKeysUsedByTag.add(segmentKey)); f.segments = typeof f.segments !== "string" && projectConfig.stringify ? JSON.stringify(f.segments) : f.segments; } if (f.conditions) { f.conditions = typeof f.conditions !== "string" && projectConfig.stringify ? JSON.stringify(f.conditions) : f.conditions; } return f; }); } features.push(feature); } } // segments const segments = []; const segmentsDirectory = projectConfig.segmentsDirectoryPath; if (fs.existsSync(segmentsDirectory)) { const segmentFiles = await datasource.listSegments(); for (const segmentKey of segmentFiles) { const parsedSegment = await datasource.readSegment(segmentKey); if (parsedSegment.archived === true) { continue; } if (segmentKeysUsedByTag.has(segmentKey) === false) { continue; } const extractedAttributeKeys = (0, utils_1.extractAttributeKeysFromConditions)(parsedSegment.conditions); extractedAttributeKeys.forEach((attributeKey) => attributeKeysUsedByTag.add(attributeKey)); const segment = { key: segmentKey, conditions: typeof parsedSegment.conditions !== "string" && projectConfig.stringify === true ? JSON.stringify(parsedSegment.conditions) : parsedSegment.conditions, }; segments.push(segment); } } // attributes const attributes = []; const attributesDirectory = projectConfig.attributesDirectoryPath; if (fs.existsSync(attributesDirectory)) { const attributeFiles = await datasource.listAttributes(); for (const attributeKey of attributeFiles) { const parsedAttribute = await datasource.readAttribute(attributeKey); if (parsedAttribute.archived === true) { continue; } if (attributeKeysUsedByTag.has(attributeKey) === false) { continue; } const attribute = { key: attributeKey, type: parsedAttribute.type, }; attributes.push(attribute); } } // inflate if (options.inflate && options.inflate >= 2) { const allFeatureKeys = features.map((f) => f.key); const allSegmentKeys = segments.map((s) => s.key); const allAttributeKeys = attributes.map((a) => a.key); for (let i = 0; i < options.inflate - 1; i++) { // feature for (const featureKey of allFeatureKeys) { const originalFeature = features.find((f) => f.key === featureKey); features.unshift({ ...originalFeature, key: `${originalFeature.key}-${i}`, }); } // segment for (const segmentKey of allSegmentKeys) { const originalSegment = segments.find((s) => s.key === segmentKey); segments.unshift({ ...originalSegment, key: `${originalSegment.key}-${i}`, }); } // attribute for (const attributeKey of allAttributeKeys) { const originalAttribute = attributes.find((a) => a.key === attributeKey); attributes.unshift({ ...originalAttribute, key: `${originalAttribute.key}-${i}`, }); } } } // schema v1 if (options.schemaVersion === "1") { return (0, convertToV1_1.convertToV1)({ revision: options.revision, projectConfig, attributes, features, segments, }); } // schema v2 const datafileContentV2 = { schemaVersion: "2", revision: options.revision, segments: {}, features: {}, }; datafileContentV2.segments = segments.reduce((acc, segment) => { // key check needed for supporting v1 datafile generation if (segment.key) { acc[segment.key] = segment; delete acc[segment.key].key; // remove key from segment, as it is not needed in v2 datafile } return acc; }, {}); datafileContentV2.features = features.reduce((acc, feature) => { if (!feature.key) { return acc; } const featureKey = feature.key; const featureV2 = feature; // remove key, as it is not needed in v2 datafile delete featureV2.key; // remove variablesSchema[key].key if (featureV2.variablesSchema) { for (const [variableKey, variable] of Object.entries(featureV2.variablesSchema)) { if (variable.key) { delete featureV2.variablesSchema[variableKey].key; } } } acc[featureKey] = featureV2; return acc; }, {}); // add feature hashes for change detection const segmentHashes = (0, hashes_1.getSegmentHashes)(datafileContentV2.segments); Object.keys(datafileContentV2.features).forEach((featureKey) => { const hash = (0, hashes_1.generateHashForFeature)(featureKey, datafileContentV2.features, segmentHashes); datafileContentV2.features[featureKey].hash = hash; // check needed to support --inflate option if (existingState.features[featureKey]) { existingState.features[featureKey].hash = hash; } }); if (options.revisionFromHash) { const datafileHash = (0, hashes_1.generateHashForDatafile)(datafileContentV2); datafileContentV2.revision = `${datafileHash}`; } return datafileContentV2; } //# sourceMappingURL=buildDatafile.js.map