@netlify/content-engine
Version:
488 lines • 21.9 kB
JavaScript
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.contentEngine = void 0;
const async_mutex_1 = require("async-mutex");
const graphql_1 = require("../graphql");
const index_1 = require("./../datastore/index");
const express_1 = __importDefault(require("express"));
const inspector_1 = __importDefault(require("inspector"));
const opentracing_1 = require("opentracing");
const reporter_1 = __importDefault(require("../reporter"));
const tracer_1 = require("../utils/tracer");
const detect_port_in_use_and_prompt_1 = require("../utils/detect-port-in-use-and-prompt");
const _1 = require(".");
const redux_1 = require("../redux");
const datastore_1 = require("../datastore");
const print_instructions_1 = require("../utils/print-instructions");
const __1 = require("..");
const context_1 = __importDefault(require("../schema/context"));
const framework_hooks_1 = require("../framework-hooks");
const ledger_writer_1 = require("./replication/ledger-writer");
const ledger_reader_1 = require("./replication/ledger-reader");
const ledger_dependency_manager_1 = require("./replication/ledger-dependency-manager");
const configuration_hash_1 = require("./replication/configuration-hash/configuration-hash");
const cache_lmdb_1 = __importDefault(require("../utils/cache-lmdb"));
const tracer = (0, opentracing_1.globalTracer)();
const syncDataMutex = new async_mutex_1.Mutex();
const openDebuggerPort = (debugInfo) => {
if (inspector_1.default.url() !== undefined) {
return; // fixes #26708
}
if (debugInfo.break) {
inspector_1.default.open(debugInfo.port, undefined, true);
// eslint-disable-next-line no-debugger
debugger;
}
else {
inspector_1.default.open(debugInfo.port);
}
};
// Return a user-supplied port otherwise the default Node.js debugging port
const getDebugPort = (port) => port ?? 9229;
const getDebugInfo = (program) => {
if (Object.prototype.hasOwnProperty.call(program, `inspect`)) {
return {
port: getDebugPort(program.inspect),
break: false,
};
}
else if (Object.prototype.hasOwnProperty.call(program, `inspectBrk`)) {
return {
port: getDebugPort(program.inspectBrk),
break: true,
};
}
else {
return null;
}
};
const contentEngine = ({ verbose, openTracingConfigFile, host, port, directory, ledger, engineConfig, frameworkHooks: frameworkHooksPath, pluginDirectories, } = { directory: `` }) => {
if (frameworkHooksPath) {
require(frameworkHooksPath);
}
framework_hooks_1.frameworkHooks.startup.before();
directory ||= process.cwd();
// provide global Gatsby object
global.__GATSBY = process.env.GATSBY_NODE_GLOBALS
? JSON.parse(process.env.GATSBY_NODE_GLOBALS)
: {};
const hostname = `0.0.0.0`;
const isLedgerWriter = ledger?.type === `WRITER`;
const isLedgerReader = ledger?.type === `READER`;
if ((!ledger?.api && ledger?.resourceId) ||
(!ledger?.resourceId && ledger?.api)) {
throw new Error(`A ledger.api and ledger.resourceId must both be provided if one is. resourceId: ${ledger.resourceId}, api: ${ledger.api}`);
}
const ledgerWriter = isLedgerWriter
? new ledger_writer_1.LedgerWriter({
ledgerId: ledger.resourceId,
serverUrl: ledger.api,
actionsToWatch: [
`CREATE_NODE`,
`DELETE_MISSING_NODE`,
`DELETE_NODE`,
`ADD_FIELD_TO_NODE`,
// Possibly needed for Gatsby plugin support:
// `BUILD_TYPE_METADATA`,
// `API_FINISHED`,
// `TOUCH_NODE`,
// `ENABLE_STATEFUL_SOURCE_PLUGIN`,
],
})
: undefined;
const schemaHashCache = new cache_lmdb_1.default({
name: `schema-hash-cache`,
encoding: `json`,
directory,
}).init();
const ledgerDependencyManager = new ledger_dependency_manager_1.LedgerDependencyManager({ directory });
if (ledgerWriter && engineConfig) {
ledgerDependencyManager.checkEngineConfigForPluginInstalls(engineConfig);
}
const resourceId = ledger?.resourceId;
const ledgerReader = isLedgerReader
? new ledger_reader_1.LedgerReader({
handleAction: (action) => {
if (action.type !== `SYNCHRONIZER_CONTROL_SEQUENCE`) {
ledgerDependencyManager.checkLedgerActionForPluginInstall(action);
if (
// don't emit internal ledger actions
action.type !==
`LEDGER_DEPENDENCY_MANAGER_SHOULD_INSTALL_DYNAMIC_CONNECTOR`) {
redux_1.store.dispatch(action);
}
}
},
serverUrl: ledger.api,
directory,
resourceId: ledger.resourceId,
})
: undefined;
const program = {
directory,
sitePackageJson: require(`${directory}/package.json`),
port: (typeof port === `string` ? parseInt(port, 10) : port) || 8000,
hostname,
host: host || `localhost`,
store: redux_1.store,
reporter: reporter_1.default,
openTracingConfigFile: ``,
debugInfo: null,
};
program.debugInfo = getDebugInfo(program);
console.info(`[content-engine] starting "${program.sitePackageJson.name || `unknown`}" engine`);
program.verbose = verbose || false;
reporter_1.default.setVerbose(program.verbose);
if (program.debugInfo) {
openDebuggerPort(program.debugInfo);
}
if (openTracingConfigFile) {
(0, tracer_1.initTracer)(openTracingConfigFile);
}
async function loadData({ webhookBody, parentSpan, shouldBuildSchema = true, connector, } = {
shouldBuildSchema: true,
}) {
const activity = reporter_1.default.activityTimer(`update extension data`);
activity.start();
try {
await framework_hooks_1.frameworkHooks.customizeSchema.before();
await (0, _1.customizeSchema)({
parentSpan,
// can't refresh as a reader because it will emit CLEAR_SCHEMA_CUSTOMIZATION and we'll lose our types
// we can only do refresh: true if dependency manager has installed plugins or we're not a reader
refresh: !isLedgerReader || ledgerDependencyManager.didInstallPlugins,
});
await framework_hooks_1.frameworkHooks.customizeSchema.after();
await framework_hooks_1.frameworkHooks.sourceNodes.before();
await (0, _1.sourceNodes)({
parentSpan,
webhookBody,
store: redux_1.store,
webhookSourcePluginName: connector,
});
await framework_hooks_1.frameworkHooks.sourceNodes.after();
await framework_hooks_1.frameworkHooks.buildSchema.before();
if (shouldBuildSchema) {
await (0, _1.buildSchema)({
parentSpan,
});
}
await framework_hooks_1.frameworkHooks.buildSchema.after();
(0, redux_1.saveState)();
}
finally {
activity.end();
}
}
let app;
let server;
let serverShouldBeRunning = false;
let initialized = false;
async function createExpressApp() {
const appExists = Boolean(app);
if (!appExists && serverShouldBeRunning) {
try {
program.port = await (0, detect_port_in_use_and_prompt_1.detectPortInUseAndPrompt)(program.port, hostname);
}
catch (e) {
if (e.message === `USER_REJECTED`) {
process.exit(0);
}
throw e;
}
app = (0, express_1.default)();
app.use((req, res, next) => {
try {
decodeURIComponent(req.path);
}
catch (e) {
return res.status(500).send(`URI malformatted`);
}
return next();
});
}
}
const publicInitialize = async ({ parentSpan }) => {
await (0, datastore_1.getDataStore)().ready();
await createExpressApp();
if (ledgerReader) {
const { previouslyInstalled, totalInstalled } = await ledgerDependencyManager.installPendingPlugins();
if (totalInstalled > 0 || (!initialized && previouslyInstalled > 0)) {
await (0, _1.initialize)({
parentSpan,
program,
reporter: reporter_1.default,
app,
isLedgerReader: Boolean(ledgerReader),
engineConfig: {
...engineConfig,
plugins: ledgerDependencyManager.getEnginePluginsConfig(),
},
pluginDirectories,
});
initialized = true;
await (0, _1.customizeSchema)({
parentSpan,
// can't refresh as a reader because it will emit CLEAR_SCHEMA_CUSTOMIZATION and we'll lose our types
// we can only do refresh: true if dependency manager has installed plugins or we're not a reader
refresh: !isLedgerReader || ledgerDependencyManager.didInstallPlugins,
});
}
await (0, _1.buildSchema)({
parentSpan,
});
console.info(`Installed ${totalInstalled} plugins from ledger, initialized, then built the GraphQL schema.`);
}
if (!initialized) {
console.info(`[content-engine] initializing`);
if (ledgerWriter) {
await ledgerDependencyManager.installPendingPluginsForWriter();
}
await (0, _1.initialize)({
parentSpan,
program,
reporter: reporter_1.default,
app,
engineConfig: ledgerWriter
? {
...engineConfig,
plugins: engineConfig?.plugins?.map((plugin) => ({
...plugin,
// Need to reset this as it has side effects
parentDir: undefined,
})),
}
: engineConfig,
isLedgerReader: Boolean(ledgerReader),
pluginDirectories,
});
initialized = true;
}
};
framework_hooks_1.frameworkHooks.startup.after();
async function startGraphQlServerWithArgs() {
serverShouldBeRunning = true;
await createExpressApp();
return (0, _1.startGraphQLServer)({
parentSpan: undefined,
program,
app,
store: redux_1.store,
reporter: reporter_1.default,
loadData,
});
}
return {
config: async () => {
throw new Error(`contentEngine().config() can only be called when contentEngine({ runInSubProcess: true }) is set.`);
},
onStdOut: () => {
throw new Error(`contentEngine().onStdOut() can only be called when contentEngine({ runInSubProcess: true }) is set.`);
},
onStdErr: () => {
throw new Error(`contentEngine().onStdErr() can only be called when contentEngine({ runInSubProcess: true }) is set.`);
},
onMessage: () => {
throw new Error(`contentEngine().onMessage() can only be called when contentEngine({ runInSubProcess: true }) is set.`);
},
sendMessage: () => {
throw new Error(`contentEngine().sendMessage() can only be called when contentEngine({ runInSubProcess: true }) is set.`);
},
clearListeners: () => {
throw new Error(`contentEngine().clearListeners() can only be called when contentEngine({ runInSubProcess: true }) is set.`);
},
initialize: async (args) => {
const parentSpan = tracer.startSpan(`initialize`);
await publicInitialize({ ...args, parentSpan });
const sourcingConfigurationId = await (0, configuration_hash_1.computeSourcingConfigurationId)(directory);
parentSpan.finish();
return sourcingConfigurationId;
},
startGraphQLServer: async () => {
serverShouldBeRunning = true;
await publicInitialize({ parentSpan: undefined });
await startGraphQlServerWithArgs();
},
clearIndexes: () => {
(0, datastore_1.getDataStore)().clearIndexes();
},
sync: async ({ runServer, webhookBody, buildSchema: shouldBuildSchema = true, connector, ledger, } = {
buildSchema: true,
}) => {
if (ledger && !isLedgerReader && !isLedgerWriter) {
throw new Error(`Cannot call engine.sync({ ledger: {...} }) without setting const engine = contentEngine({ ledger: { type: "READER" | "WRITER" }})`);
}
if (ledger && ledger.type === `WRITER` && isLedgerReader) {
throw new Error(`Cannot call engine.sync({ ledger: { type: "WRITER" } }) on an engine with ledger.type: "READER"`);
}
if (ledger && ledger.type === `READER` && isLedgerWriter) {
throw new Error(`Cannot call engine.sync({ ledger: { type: "READER" } }) on an engine with ledger.type: "WRITER"`);
}
const release = await syncDataMutex.acquire();
try {
console.info(`[content-engine] sync started`);
await framework_hooks_1.frameworkHooks.sync.before();
if (typeof runServer !== `undefined`) {
serverShouldBeRunning = runServer;
}
const parentSpan = tracer.startSpan(`sync-data`);
let totalActions = 0;
let returnedConfigurationId = ``;
let invalidations = [];
let schemaHash;
let schemaHashChanged = undefined;
if (ledgerReader && ledger?.type === `READER`) {
if (!ledger.configurationId) {
throw new Error(`engine.sync({ ledger: { type: "READER" } }) requires a configurationId to be provided.`);
}
const endBlockVersionId = ledger.untilBlockId;
totalActions = await ledgerReader.read({
endBlockVersionId,
cacheId: ledger.cacheId,
configurationId: ledger.configurationId,
onAction: ledger.onAction,
headers: ledger.headers,
});
await publicInitialize({ parentSpan });
}
else {
// need to initialize before we can write the ledger so that the configuration hash can be computed properly
await publicInitialize({
parentSpan,
});
if (ledgerWriter && ledger?.type === `WRITER`) {
await ledgerWriter.openStream({
blockVersionId: ledger.writeBlockId,
cacheId: ledger.cacheId,
directory,
headers: ledger.headers,
// If we're building the schema, we're doing a full sync, so we can skip invalidations
skipInvalidations: !!shouldBuildSchema,
});
}
try {
await loadData({
parentSpan,
webhookBody,
shouldBuildSchema,
connector,
});
}
catch (e) {
console.error(`[content-engine] error loading data`, e);
throw e;
}
(0, redux_1.savePartialStateToDisk)([`inferenceMetadata`]);
if (resourceId && ledgerWriter && ledger?.type === `WRITER`) {
const result = await ledgerWriter.finalizeStream();
totalActions = result.actionCount;
if (result.configurationId)
returnedConfigurationId = result.configurationId;
if (result.schemaHash) {
const previousSchemaHash = await schemaHashCache.get(resourceId);
schemaHash = result.schemaHash;
await schemaHashCache.set(resourceId, result.schemaHash);
schemaHashChanged = previousSchemaHash !== result.schemaHash;
}
if (result.invalidations) {
invalidations = result.invalidations;
}
}
}
if (serverShouldBeRunning && !server) {
server = await startGraphQlServerWithArgs();
}
if (server && !serverShouldBeRunning) {
server.close();
}
await (0, datastore_1.getDataStore)().ready();
parentSpan.finish();
if (serverShouldBeRunning && server) {
(0, print_instructions_1.printInstructions)(program);
}
await framework_hooks_1.frameworkHooks.sync.after();
return {
totalLedgerActions: totalActions,
configurationId: returnedConfigurationId,
schemaHash,
invalidations,
schemaHashChanged,
};
}
finally {
release();
}
},
stop: () => {
throw new Error(`contentEngine().stop() is only available when running in a subprocess with contentEngine({ runInSubProcess: true }) set.`);
},
restart: () => {
throw new Error(`contentEngine().restart() is only available when running in a subprocess with contentEngine({ runInSubProcess: true }) set.`);
},
test: (0, __1.throwOutsideTestEnv)({
getNodes: index_1.getNodes,
getNodesByType: index_1.getNodesByType,
getNode: index_1.getNode,
query: async (query, variables) => {
if (!initialized) {
throw new Error(`contentEngine().query() called before first contentEngine().sync() completed.\nSchema customization has not run yet so the GraphQL API cannot be queried.`);
}
const { schema } = redux_1.store.getState();
const result = await (0, graphql_1.graphql)({
schema,
source: query,
rootValue: {},
contextValue: (0, context_1.default)({
schema: redux_1.store.getState().schema,
schemaComposer: redux_1.store.getState().schemaCustomization.composer,
context: {},
customContext: redux_1.store.getState().schemaCustomization.context,
}),
variableValues: variables,
});
return result;
},
}),
query: async (query, variables, querySettings) => {
if (!initialized) {
throw new Error(`contentEngine().query() called before first contentEngine().sync() completed.\nSchema customization has not run yet so the GraphQL API cannot be queried.`);
}
const { schema } = redux_1.store.getState();
const cacheTags = new Set();
const schemaComposer = redux_1.store.getState().schemaCustomization.composer;
const context = querySettings?.context
? { ...querySettings.context, cacheTags, schemaComposer }
: { cacheTags, schemaComposer };
const result = await (0, graphql_1.graphql)({
schema,
source: query,
rootValue: {},
contextValue: (0, context_1.default)({
schema: redux_1.store.getState().schema,
schemaComposer,
context,
customContext: redux_1.store.getState().schemaCustomization.context,
}),
variableValues: variables,
});
result.extensions ||= {};
result.extensions.cacheTags = Array.from(cacheTags);
return result;
},
getProcess() {
throw new Error(`contentEngine().process is only available when running in a subprocess with contentEngine({ runInSubProcess: true }) set.`);
},
async buildSchema() {
await framework_hooks_1.frameworkHooks.buildSchema.before();
await (0, _1.buildSchema)({
parentSpan: undefined,
});
await framework_hooks_1.frameworkHooks.buildSchema.after();
},
};
};
exports.contentEngine = contentEngine;
//# sourceMappingURL=content-engine.js.map
;