envio
Version:
A latency and sync speed optimized, developer friendly blockchain data indexer.
823 lines (685 loc) • 25.2 kB
JavaScript
// Generated by ReScript, PLEASE EDIT WITH CARE
;
var Utils = require("./Utils.res.js");
var Hrtime = require("./bindings/Hrtime.res.js");
var Js_exn = require("rescript/lib/js/js_exn.js");
var Belt_Array = require("rescript/lib/js/belt_Array.js");
var Belt_Result = require("rescript/lib/js/belt_Result.js");
var PromClient = require("prom-client");
var S$RescriptSchema = require("rescript-schema/src/S.res.js");
var loadEntitiesDurationCounter = new PromClient.Counter({
name: "load_entities_processing_time_spent",
help: "Duration spend on loading entities",
labelNames: []
});
var eventRouterDurationCounter = new PromClient.Counter({
name: "event_router_processing_time_spent",
help: "Duration spend on event routing",
labelNames: []
});
var executeBatchDurationCounter = new PromClient.Counter({
name: "execute_batch_processing_time_spent",
help: "Duration spend on executing batch",
labelNames: []
});
var allChainsSyncedToHead = new PromClient.Gauge({
name: "hyperindex_synced_to_head",
help: "All chains fully synced",
labelNames: []
});
var sourceChainHeight = new PromClient.Gauge({
name: "chain_block_height",
help: "Chain Height of Source Chain",
labelNames: ["chainId"]
});
function schemaIsString(_schema) {
while(true) {
var schema = _schema;
var match = schema.t;
if (typeof match !== "object") {
if (match === "string") {
return true;
} else {
return false;
}
}
switch (match.TAG) {
case "option" :
case "null" :
_schema = match._0;
continue ;
default:
return false;
}
};
}
function getLabelNames(schema) {
var match = schema.t;
if (typeof match !== "object") {
return {
TAG: "Error",
_0: "Label schema must be an object"
};
}
if (match.TAG !== "object") {
return {
TAG: "Error",
_0: "Label schema must be an object"
};
}
var items = match.items;
var nonStringFields = Belt_Array.reduce(items, [], (function (nonStringFields, item) {
if (schemaIsString(item.schema)) {
return nonStringFields;
} else {
return Belt_Array.concat(nonStringFields, [item.location]);
}
}));
if (nonStringFields.length === 0) {
return {
TAG: "Ok",
_0: Belt_Array.map(items, (function (item) {
return item.location;
}))
};
}
var nonStringItems = nonStringFields.join(", ");
return {
TAG: "Error",
_0: "Label schema must be an object with string (or optional string) values. Non string values: " + nonStringItems
};
}
var Labels = {
schemaIsString: schemaIsString,
getLabelNames: getLabelNames
};
var metricNames = new Set();
function MakeSafePromMetric(M) {
var makeOrThrow = function (name, help, labelSchema) {
var labelNames = getLabelNames(labelSchema);
if (labelNames.TAG !== "Ok") {
return Js_exn.raiseError(labelNames._0);
}
if (metricNames.has(name)) {
return Js_exn.raiseError("Duplicate prometheus metric name: " + name);
}
metricNames.add(name);
var metric = M.make({
name: name,
help: help,
labelNames: labelNames._0
});
return {
metric: metric,
labelSchema: labelSchema
};
};
var handleFloat = function (param, labels, value) {
M.handleFloat(M.labels(param.metric, S$RescriptSchema.reverseConvertToJsonOrThrow(labels, param.labelSchema)), value);
};
var handleInt = function (param, labels, value) {
M.handleInt(M.labels(param.metric, S$RescriptSchema.reverseConvertToJsonOrThrow(labels, param.labelSchema)), value);
};
var increment = function (param, labels) {
return M.labels(param.metric, S$RescriptSchema.reverseConvertToJsonOrThrow(labels, param.labelSchema)).inc();
};
var incrementMany = function (param, labels, value) {
return M.labels(param.metric, S$RescriptSchema.reverseConvertToJsonOrThrow(labels, param.labelSchema)).inc(value);
};
return {
makeOrThrow: makeOrThrow,
handleInt: handleInt,
handleFloat: handleFloat,
increment: increment,
incrementMany: incrementMany
};
}
function makeOrThrow(name, help, labelSchema) {
var labelNames = getLabelNames(labelSchema);
if (labelNames.TAG !== "Ok") {
return Js_exn.raiseError(labelNames._0);
}
if (metricNames.has(name)) {
return Js_exn.raiseError("Duplicate prometheus metric name: " + name);
}
metricNames.add(name);
var metric = new PromClient.Counter({
name: name,
help: help,
labelNames: labelNames._0
});
return {
metric: metric,
labelSchema: labelSchema
};
}
function handleFloat(param, labels, value) {
var prim0 = param.metric.labels(S$RescriptSchema.reverseConvertToJsonOrThrow(labels, param.labelSchema));
prim0.inc(value);
}
function handleInt(param, labels, value) {
var prim0 = param.metric.labels(S$RescriptSchema.reverseConvertToJsonOrThrow(labels, param.labelSchema));
prim0.inc(value);
}
function increment(param, labels) {
return param.metric.labels(S$RescriptSchema.reverseConvertToJsonOrThrow(labels, param.labelSchema)).inc();
}
function incrementMany(param, labels, value) {
return param.metric.labels(S$RescriptSchema.reverseConvertToJsonOrThrow(labels, param.labelSchema)).inc(value);
}
var SafeCounter = {
makeOrThrow: makeOrThrow,
handleInt: handleInt,
handleFloat: handleFloat,
increment: increment,
incrementMany: incrementMany
};
function makeOrThrow$1(name, help, labelSchema) {
var labelNames = getLabelNames(labelSchema);
if (labelNames.TAG !== "Ok") {
return Js_exn.raiseError(labelNames._0);
}
if (metricNames.has(name)) {
return Js_exn.raiseError("Duplicate prometheus metric name: " + name);
}
metricNames.add(name);
var metric = new PromClient.Gauge({
name: name,
help: help,
labelNames: labelNames._0
});
return {
metric: metric,
labelSchema: labelSchema
};
}
function handleFloat$1(param, labels, value) {
var prim0 = param.metric.labels(S$RescriptSchema.reverseConvertToJsonOrThrow(labels, param.labelSchema));
prim0.set(value);
}
function handleInt$1(param, labels, value) {
var prim0 = param.metric.labels(S$RescriptSchema.reverseConvertToJsonOrThrow(labels, param.labelSchema));
prim0.set(value);
}
function increment$1(param, labels) {
return param.metric.labels(S$RescriptSchema.reverseConvertToJsonOrThrow(labels, param.labelSchema)).inc();
}
function incrementMany$1(param, labels, value) {
return param.metric.labels(S$RescriptSchema.reverseConvertToJsonOrThrow(labels, param.labelSchema)).inc(value);
}
var SafeGauge = {
makeOrThrow: makeOrThrow$1,
handleInt: handleInt$1,
handleFloat: handleFloat$1,
increment: increment$1,
incrementMany: incrementMany$1
};
function makeSafeHistogramOrThrow(name, help, labelSchema, backets) {
var histogram = new PromClient.Histogram({
name: name,
help: help,
labelNames: Belt_Result.getExn(getLabelNames(labelSchema)),
buckets: backets
});
return function (labels) {
return histogram.labels(S$RescriptSchema.reverseConvertToJsonOrThrow(labels, labelSchema)).startTimer();
};
}
var labelSchema = S$RescriptSchema.schema(function (s) {
return {
group: s.m(S$RescriptSchema.string),
stat: s.m(S$RescriptSchema.string),
label: s.m(S$RescriptSchema.string)
};
});
var gauge = makeOrThrow$1("benchmark_summary_data", "All data points collected during indexer benchmark", labelSchema);
function set(group, label, n, mean, stdDev, min, max, sum) {
handleFloat$1(gauge, {
group: group,
stat: "n",
label: label
}, n);
handleFloat$1(gauge, {
group: group,
stat: "mean",
label: label
}, mean);
handleFloat$1(gauge, {
group: group,
stat: "min",
label: label
}, min);
handleFloat$1(gauge, {
group: group,
stat: "max",
label: label
}, max);
handleFloat$1(gauge, {
group: group,
stat: "sum",
label: label
}, sum);
if (stdDev !== undefined) {
return handleFloat$1(gauge, {
group: group,
stat: "stdDev",
label: label
}, stdDev);
}
}
var BenchmarkSummaryData = {
labelSchema: labelSchema,
gauge: gauge,
set: set
};
function incrementLoadEntityDurationCounter(duration) {
loadEntitiesDurationCounter.inc(duration);
}
function incrementEventRouterDurationCounter(duration) {
eventRouterDurationCounter.inc(duration);
}
function incrementExecuteBatchDurationCounter(duration) {
executeBatchDurationCounter.inc(duration);
}
function setSourceChainHeight(blockNumber, chainId) {
sourceChainHeight.labels({
chainId: chainId
}).set(blockNumber);
}
function setAllChainsSyncedToHead() {
allChainsSyncedToHead.set(1);
}
var labelSchema$1 = S$RescriptSchema.schema(function (s) {
return {
label: s.m(S$RescriptSchema.string)
};
});
var gauge$1 = makeOrThrow$1("benchmark_counters", "All counters collected during indexer benchmark", labelSchema$1);
function set$1(label, millis, totalRuntimeMillis) {
handleFloat$1(gauge$1, {
label: label
}, millis);
handleFloat$1(gauge$1, {
label: "Total Run Time (ms)"
}, totalRuntimeMillis);
}
var BenchmarkCounters = {
labelSchema: labelSchema$1,
gauge: gauge$1,
set: set$1
};
var chainIdLabelsSchema = S$RescriptSchema.object(function (s) {
return s.f("chainId", S$RescriptSchema.coerce(S$RescriptSchema.string, S$RescriptSchema.$$int));
});
var gauge$2 = makeOrThrow$1("envio_info", "Information about the indexer", S$RescriptSchema.schema(function (s) {
return {
version: s.m(S$RescriptSchema.string)
};
}));
function set$2(version) {
handleInt$1(gauge$2, {
version: version
}, 1);
}
var Info = {
gauge: gauge$2,
set: set$2
};
var gauge$3 = makeOrThrow$1("envio_indexing_addresses", "The number of addresses indexed on chain. Includes both static and dynamic addresses.", chainIdLabelsSchema);
function set$3(addressesCount, chainId) {
handleInt$1(gauge$3, chainId, addressesCount);
}
var IndexingAddresses = {
gauge: gauge$3,
set: set$3
};
var gauge$4 = makeOrThrow$1("envio_indexing_max_concurrency", "The maximum number of concurrent queries to the chain data-source.", chainIdLabelsSchema);
function set$4(maxConcurrency, chainId) {
handleInt$1(gauge$4, chainId, maxConcurrency);
}
var IndexingMaxConcurrency = {
gauge: gauge$4,
set: set$4
};
var gauge$5 = makeOrThrow$1("envio_indexing_concurrency", "The number of executing concurrent queries to the chain data-source.", chainIdLabelsSchema);
function set$5(concurrency, chainId) {
handleInt$1(gauge$5, chainId, concurrency);
}
var IndexingConcurrency = {
gauge: gauge$5,
set: set$5
};
var gauge$6 = makeOrThrow$1("envio_indexing_partitions", "The number of partitions used to split fetching logic by addresses and block ranges.", chainIdLabelsSchema);
function set$6(partitionsCount, chainId) {
handleInt$1(gauge$6, chainId, partitionsCount);
}
var IndexingPartitions = {
gauge: gauge$6,
set: set$6
};
var counter = makeOrThrow("envio_indexing_idle_time", "The number of milliseconds the indexer source syncing has been idle. A high value may indicate the source sync is a bottleneck.", chainIdLabelsSchema);
var IndexingIdleTime = {
counter: counter
};
var counter$1 = makeOrThrow("envio_indexing_source_waiting_time", "The number of milliseconds the indexer has been waiting for new blocks.", chainIdLabelsSchema);
var IndexingSourceWaitingTime = {
counter: counter$1
};
var counter$2 = makeOrThrow("envio_indexing_query_time", "The number of milliseconds spent performing queries to the chain data-source.", chainIdLabelsSchema);
var IndexingQueryTime = {
counter: counter$2
};
var gauge$7 = makeOrThrow$1("envio_indexing_buffer_size", "The current number of items in the indexing buffer.", chainIdLabelsSchema);
function set$7(bufferSize, chainId) {
handleInt$1(gauge$7, chainId, bufferSize);
}
var IndexingBufferSize = {
gauge: gauge$7,
set: set$7
};
var gauge$8 = new PromClient.Gauge({
name: "envio_indexing_target_buffer_size",
help: "The target buffer size per chain for indexing. The actual number of items in the queue may exceed this value, but the indexer always tries to keep the buffer filled up to this target."
});
function set$8(targetBufferSize) {
gauge$8.set(targetBufferSize);
}
var IndexingTargetBufferSize = {
gauge: gauge$8,
set: set$8
};
var deprecatedGauge = new PromClient.Gauge({
name: "chain_block_height_fully_fetched",
help: "Block height fully fetched by indexer",
labelNames: ["chainId"]
});
var gauge$9 = makeOrThrow$1("envio_indexing_buffer_block_number", "The highest block number that has been fully fetched by the indexer.", chainIdLabelsSchema);
function set$9(blockNumber, chainId) {
deprecatedGauge.labels({
chainId: chainId
}).set(blockNumber);
handleInt$1(gauge$9, chainId, blockNumber);
}
var IndexingBufferBlockNumber = {
deprecatedGauge: deprecatedGauge,
gauge: gauge$9,
set: set$9
};
var gauge$10 = makeOrThrow$1("envio_indexing_end_block", "The block number to stop indexing at. (inclusive)", chainIdLabelsSchema);
function set$10(endBlock, chainId) {
handleInt$1(gauge$10, chainId, endBlock);
}
var IndexingEndBlock = {
gauge: gauge$10,
set: set$10
};
var sourceLabelsSchema = S$RescriptSchema.schema(function (s) {
return {
source: s.m(S$RescriptSchema.string),
chainId: s.m(S$RescriptSchema.coerce(S$RescriptSchema.string, S$RescriptSchema.$$int))
};
});
var gauge$11 = makeOrThrow$1("envio_source_height", "The latest known block number reported by the source. This value may lag behind the actual chain height, as it is updated only when queried.", sourceLabelsSchema);
function set$11(sourceName, chainId, blockNumber) {
handleInt$1(gauge$11, {
source: sourceName,
chainId: chainId
}, blockNumber);
}
var SourceHeight = {
gauge: gauge$11,
set: set$11
};
var startTimer = makeSafeHistogramOrThrow("envio_source_get_height_duration", "Duration of the source get height requests in seconds", sourceLabelsSchema, [
0.1,
0.5,
1,
10
]);
var SourceGetHeightDuration = {
startTimer: startTimer
};
var gauge$12 = makeOrThrow$1("envio_reorg_count", "Total number of reorgs detected", chainIdLabelsSchema);
function increment$2(chain) {
increment$1(gauge$12, chain);
}
var ReorgCount = {
gauge: gauge$12,
increment: increment$2
};
var gauge$13 = makeOrThrow$1("envio_reorg_detection_block_number", "The block number where reorg was detected the last time. This doesn't mean that the block was reorged, this is simply where we found block hash to be different.", chainIdLabelsSchema);
function set$12(blockNumber, chain) {
handleInt$1(gauge$13, chain, blockNumber);
}
var ReorgDetectionBlockNumber = {
gauge: gauge$13,
set: set$12
};
var gauge$14 = new PromClient.Gauge({
name: "envio_reorg_threshold",
help: "Whether indexing is currently within the reorg threshold"
});
function set$13(isInReorgThreshold) {
gauge$14.set(isInReorgThreshold ? 1 : 0);
}
var ReorgThreshold = {
gauge: gauge$14,
set: set$13
};
var gauge$15 = new PromClient.Gauge({
name: "envio_rollback_enabled",
help: "Whether rollback on reorg is enabled"
});
function set$14(enabled) {
gauge$15.set(enabled ? 1 : 0);
}
var RollbackEnabled = {
gauge: gauge$15,
set: set$14
};
var timeCounter = new PromClient.Counter({
name: "envio_rollback_time",
help: "Rollback on reorg total time in milliseconds"
});
var counter$3 = new PromClient.Counter({
name: "envio_rollback_count",
help: "Number of successful rollbacks on reorg"
});
function increment$3(timeMillis) {
timeCounter.inc(Hrtime.intFromMillis(timeMillis));
counter$3.inc();
}
var RollbackSuccess = {
timeCounter: timeCounter,
counter: counter$3,
increment: increment$3
};
var entityNameLabelsSchema = S$RescriptSchema.object(function (s) {
return s.f("entity", S$RescriptSchema.string);
});
var timeCounter$1 = makeOrThrow("envio_rollback_history_prune_time", "The total time spent pruning entity history which is not in the reorg threshold. (milliseconds)", entityNameLabelsSchema);
var counter$4 = makeOrThrow("envio_rollback_history_prune_count", "Number of successful entity history prunes", entityNameLabelsSchema);
function increment$4(timeMillis, entityName) {
handleInt(timeCounter$1, entityName, Hrtime.intFromMillis(timeMillis));
increment(counter$4, entityName);
}
var RollbackHistoryPrune = {
entityNameLabelsSchema: entityNameLabelsSchema,
timeCounter: timeCounter$1,
counter: counter$4,
increment: increment$4
};
var gauge$16 = makeOrThrow$1("envio_rollback_target_block_number", "The block number reorg was rollbacked to the last time.", chainIdLabelsSchema);
function set$15(blockNumber, chain) {
handleInt$1(gauge$16, chain, blockNumber);
}
var RollbackTargetBlockNumber = {
gauge: gauge$16,
set: set$15
};
var gauge$17 = makeOrThrow$1("envio_processing_block_number", "The latest item block number included in the currently processing batch for the chain.", chainIdLabelsSchema);
function set$16(blockNumber, chainId) {
handleInt$1(gauge$17, chainId, blockNumber);
}
var ProcessingBlockNumber = {
gauge: gauge$17,
set: set$16
};
var gauge$18 = makeOrThrow$1("envio_processing_batch_size", "The number of items included in the currently processing batch for the chain.", chainIdLabelsSchema);
function set$17(batchSize, chainId) {
handleInt$1(gauge$18, chainId, batchSize);
}
var ProcessingBatchSize = {
gauge: gauge$18,
set: set$17
};
var gauge$19 = new PromClient.Gauge({
name: "envio_processing_max_batch_size",
help: "The maximum number of items to process in a single batch."
});
function set$18(maxBatchSize) {
gauge$19.set(maxBatchSize);
}
var ProcessingMaxBatchSize = {
gauge: gauge$19,
set: set$18
};
var gauge$20 = makeOrThrow$1("envio_progress_block_number", "The block number of the latest block processed and stored in the database.", chainIdLabelsSchema);
function set$19(blockNumber, chainId) {
handleInt$1(gauge$20, chainId, blockNumber);
}
var ProgressBlockNumber = {
gauge: gauge$20,
set: set$19
};
var deprecatedGauge$1 = new PromClient.Gauge({
name: "events_processed",
help: "Total number of events processed",
labelNames: ["chainId"]
});
var gauge$21 = makeOrThrow$1("envio_progress_events_count", "The number of events processed and reflected in the database.", chainIdLabelsSchema);
function set$20(processedCount, chainId) {
deprecatedGauge$1.labels({
chainId: chainId
}).set(processedCount);
handleInt$1(gauge$21, chainId, processedCount);
}
var ProgressEventsCount = {
deprecatedGauge: deprecatedGauge$1,
gauge: gauge$21,
set: set$20
};
var effectLabelsSchema = S$RescriptSchema.object(function (s) {
return s.f("effect", S$RescriptSchema.string);
});
var gauge$22 = makeOrThrow$1("envio_effect_calls_count", "The number of calls to the effect. Including both handler execution and cache hits.", effectLabelsSchema);
function set$21(callsCount, effectName) {
handleInt$1(gauge$22, effectName, callsCount);
}
var EffectCallsCount = {
gauge: gauge$22,
set: set$21
};
var gauge$23 = makeOrThrow$1("envio_effect_cache_count", "The number of items in the effect cache.", effectLabelsSchema);
function set$22(count, effectName) {
handleInt$1(gauge$23, effectName, count);
}
var EffectCacheCount = {
gauge: gauge$23,
set: set$22
};
var operationLabelsSchema = S$RescriptSchema.object(function (s) {
return s.f("operation", S$RescriptSchema.string);
});
var timeCounter$2 = makeOrThrow("envio_storage_load_time", "Processing time taken to load data from storage. (milliseconds)", operationLabelsSchema);
var totalTimeCounter = makeOrThrow("envio_storage_load_total_time", "Cumulative time spent loading data from storage during the indexing process. (milliseconds)", operationLabelsSchema);
var counter$5 = makeOrThrow("envio_storage_load_count", "Cumulative number of successful storage load operations during the indexing process.", operationLabelsSchema);
var whereSizeCounter = makeOrThrow("envio_storage_load_where_size", "Cumulative number of filter conditions ('where' items) used in storage load operations during the indexing process.", operationLabelsSchema);
var sizeCounter = makeOrThrow("envio_storage_load_size", "Cumulative number of records loaded from storage during the indexing process.", operationLabelsSchema);
var operations = {};
function startOperation(operation) {
var operationRef = operations[operation];
if (operationRef !== undefined) {
operationRef.pendingCount = operationRef.pendingCount + 1 | 0;
} else {
operations[operation] = {
pendingCount: 1,
timerRef: Hrtime.makeTimer()
};
}
return Hrtime.makeTimer();
}
function endOperation(timerRef, operation, whereSize, size) {
var operationRef = operations[operation];
operationRef.pendingCount = operationRef.pendingCount - 1 | 0;
if (operationRef.pendingCount === 0) {
handleInt(timeCounter$2, operation, Hrtime.intFromMillis(Hrtime.toMillis(Hrtime.timeSince(operationRef.timerRef))));
Utils.Dict.deleteInPlace(operations, operation);
}
handleInt(totalTimeCounter, operation, Hrtime.intFromMillis(Hrtime.toMillis(Hrtime.timeSince(timerRef))));
increment(counter$5, operation);
handleInt(whereSizeCounter, operation, whereSize);
handleInt(sizeCounter, operation, size);
}
var StorageLoad = {
operationLabelsSchema: operationLabelsSchema,
timeCounter: timeCounter$2,
totalTimeCounter: totalTimeCounter,
counter: counter$5,
whereSizeCounter: whereSizeCounter,
sizeCounter: sizeCounter,
operations: operations,
startOperation: startOperation,
endOperation: endOperation
};
exports.loadEntitiesDurationCounter = loadEntitiesDurationCounter;
exports.eventRouterDurationCounter = eventRouterDurationCounter;
exports.executeBatchDurationCounter = executeBatchDurationCounter;
exports.allChainsSyncedToHead = allChainsSyncedToHead;
exports.sourceChainHeight = sourceChainHeight;
exports.Labels = Labels;
exports.metricNames = metricNames;
exports.MakeSafePromMetric = MakeSafePromMetric;
exports.SafeCounter = SafeCounter;
exports.SafeGauge = SafeGauge;
exports.makeSafeHistogramOrThrow = makeSafeHistogramOrThrow;
exports.BenchmarkSummaryData = BenchmarkSummaryData;
exports.incrementLoadEntityDurationCounter = incrementLoadEntityDurationCounter;
exports.incrementEventRouterDurationCounter = incrementEventRouterDurationCounter;
exports.incrementExecuteBatchDurationCounter = incrementExecuteBatchDurationCounter;
exports.setSourceChainHeight = setSourceChainHeight;
exports.setAllChainsSyncedToHead = setAllChainsSyncedToHead;
exports.BenchmarkCounters = BenchmarkCounters;
exports.chainIdLabelsSchema = chainIdLabelsSchema;
exports.Info = Info;
exports.IndexingAddresses = IndexingAddresses;
exports.IndexingMaxConcurrency = IndexingMaxConcurrency;
exports.IndexingConcurrency = IndexingConcurrency;
exports.IndexingPartitions = IndexingPartitions;
exports.IndexingIdleTime = IndexingIdleTime;
exports.IndexingSourceWaitingTime = IndexingSourceWaitingTime;
exports.IndexingQueryTime = IndexingQueryTime;
exports.IndexingBufferSize = IndexingBufferSize;
exports.IndexingTargetBufferSize = IndexingTargetBufferSize;
exports.IndexingBufferBlockNumber = IndexingBufferBlockNumber;
exports.IndexingEndBlock = IndexingEndBlock;
exports.sourceLabelsSchema = sourceLabelsSchema;
exports.SourceHeight = SourceHeight;
exports.SourceGetHeightDuration = SourceGetHeightDuration;
exports.ReorgCount = ReorgCount;
exports.ReorgDetectionBlockNumber = ReorgDetectionBlockNumber;
exports.ReorgThreshold = ReorgThreshold;
exports.RollbackEnabled = RollbackEnabled;
exports.RollbackSuccess = RollbackSuccess;
exports.RollbackHistoryPrune = RollbackHistoryPrune;
exports.RollbackTargetBlockNumber = RollbackTargetBlockNumber;
exports.ProcessingBlockNumber = ProcessingBlockNumber;
exports.ProcessingBatchSize = ProcessingBatchSize;
exports.ProcessingMaxBatchSize = ProcessingMaxBatchSize;
exports.ProgressBlockNumber = ProgressBlockNumber;
exports.ProgressEventsCount = ProgressEventsCount;
exports.effectLabelsSchema = effectLabelsSchema;
exports.EffectCallsCount = EffectCallsCount;
exports.EffectCacheCount = EffectCacheCount;
exports.StorageLoad = StorageLoad;
/* loadEntitiesDurationCounter Not a pure module */