@interopio/gateway
Version:
[](https://www.npmjs.com/package/@interopio/gateway)
4 lines • 795 kB
Source Map (JSON)
{
"version": 3,
"sources": ["../src/common/filters.ts", "../src/logger.ts", "../src/common/utilities.ts", "../src/domains/metrics/filters.ts", "../src/metrics/common.ts", "../src/worker/nodeWorker.ts", "../src/worker/core.ts", "../src/metrics/worker-common.ts", "../src/metrics/publisher.ts", "../src/metrics/rest.ts", "../src/metrics/file.ts", "../src/main/index.ts", "../src/main/main.ts", "../src/common/encoders.ts", "../src/gateway/core.ts", "../src/constants.ts", "../src/node.ts", "../src/common/messages.ts", "../src/common/types.ts", "../src/reason.ts", "../src/restrictions/compat.ts", "../src/restrictions/antlr4/RestrictionsParser.ts", "../src/restrictions/antlr4/RestrictionsLexer.ts", "../src/restrictions/antlr4/RestrictionsVisitor.ts", "../src/restrictions/visitor.ts", "../src/restrictions.ts", "../src/common/ids.ts", "../src/state/core.ts", "../src/state/peers.ts", "../src/local-node/core.ts", "../src/domains/global/constants.ts", "../src/domains/global/core.ts", "../src/domains/global/messages.ts", "../src/common/peerIdentity.ts", "../src/common/jwt.ts", "../src/common/tokens.ts", "../src/common/context/ops.ts", "../src/address.ts", "../src/domains/global/state.ts", "../src/common/actionLogger.ts", "../src/common/context/constants.ts", "../src/common/context/messages.ts", "../src/common/context/state.ts", "../src/auth/impl.ts", "../src/auth/core.ts", "../src/common/asyncSerializer.ts", "../src/auth/basic.ts", "../src/auth/oauth2.ts", "../src/auth/custom.ts", "../src/mesh/channel-mesh.ts", "../src/domains/agm/core.ts", "../src/domains/agm/constants.ts", "../src/domains/agm/calls.ts", "../src/domains/agm/messages.ts", "../src/domains/agm/utilities.ts", "../src/domains/agm/mthds.ts", "../src/domains/agm/state.ts", "../src/domains/agm/subscriptions.ts", "../src/domains/activity/constants.ts", "../src/domains/activity/core.ts", "../src/state/types/activity.ts", "../src/domains/activity/state.ts", "../src/domains/activity/activities.ts", "../src/domains/activity/messages.ts", "../src/domains/activity/gatewayRequest.ts", "../src/domains/activity/factories.ts", "../src/domains/metrics/core.ts", "../src/domains/metrics/constants.ts", "../src/state/types/metrics.ts", "../src/metrics/custom.ts", "../src/gateway/metrics.ts", "../src/domains/context/core.ts", "../src/state/types/context.ts", "../src/domains/context/constants.ts", "../src/domains/context/messages.ts", "../src/domains/bus/core.ts", "../src/domains/bus/constants.ts", "../src/domains/bus/messages.ts", "../src/gateway/clients.ts", "../src/gateway/scavenger.ts", "../src/versions.ts", "../src/state/types/common.ts", "../src/mesh-node/core.ts", "../src/common/ws/factory.ts", "../src/mesh/ws/broker/client.ts", "../src/mesh/compat.ts", "../src/mesh/ws-mesh.ts", "../src/mesh/ws-mesh/relay.ts", "../src/mesh/ws-headers.ts", "../src/mesh/rest-directory.ts", "../src/gateway/visibility.ts", "../src/mesh/static-directory.ts"],
"sourcesContent": ["import {IOGateway} from '../../gateway';\n\nfunction isExactMatch(matcher: IOGateway.Filtering.Matcher): matcher is IOGateway.Filtering.ExactMatch {\n return typeof matcher === 'string';\n}\n\nexport function valueMatches(lhs: IOGateway.Filtering.Matcher, rhs?: unknown): boolean {\n if (isExactMatch(lhs)) {\n return lhs === rhs\n } else {\n if (typeof rhs === 'string') {\n return lhs.test(rhs);\n }\n return false;\n }\n}\n\nexport function valuesMatch(matchers: IOGateway.Filtering.Matcher[], value?: string): boolean {\n for (const matcher of matchers) {\n if (valueMatches(matcher, value)) {\n return true;\n }\n }\n return false;\n}\n//https://cljs.github.io/api/syntax/regex\nconst CLOJURESCRIPT_REGEX = /^#(\\(\\?(?<flags>[im]*)\\))?(?<pattern>.*)$/;\n\nconst JAVASCRIPT_REGEX = /^\\/(?<pattern>(?:[^/\\\\]|\\\\.)+)\\/(?<flags>[gimsuyd]*)$/;\n\nexport function regexify(expression: IOGateway.Filtering.Matcher): IOGateway.Filtering.Matcher {\n if (typeof expression === 'string') {\n let groups = JAVASCRIPT_REGEX.exec(expression)?.groups;\n groups ??= CLOJURESCRIPT_REGEX.exec(expression)?.groups;\n\n if (groups?.pattern) {\n return new RegExp(groups.pattern, groups.flags ?? '');\n }\n\n }\n return expression;\n}\n", "import * as core from '@interopio/gateway/logging/core';\n\nexport function getLogger(name: string) {\n return core.getLogger(name);\n}\n", "import { DomainKeys, type Peer } from '../state/types/state.ts';\nimport type { JoinedPeer } from '../state/peers.ts';\nimport { type Draft, produce } from 'immer';\n\nexport type WithRequired<Type, Key extends keyof Type> = Type & {\n [Property in Key]-?: Type[Property]\n}\n\nexport type Omit<Type, Key extends keyof Type> = Pick<Type, Exclude<keyof Type, Key>>\nexport type PartialBy<Type, Key extends keyof Type> = Pick<Partial<Type>, Key> & Omit<Type, Key>;\n\nexport function removev<T>(el: T, c: T[]): T[] {\n return c.reduce((r: T[], e: T) => {\n if (e === el) {\n return r;\n }\n return r.concat(e);\n }, [] as T[]);\n}\n\nexport function ensureDomain<D extends DomainKeys>(peer: Peer, domain: D): JoinedPeer<D> {\n if (peer[domain]) {\n return peer as JoinedPeer<D>;\n }\n return produce(peer, (p: Draft<Peer>) => {\n /* eslint-disable @typescript-eslint/no-explicit-any */\n p[domain] = {} as unknown as any;\n }) as JoinedPeer<D>;\n}\n\nexport function friendlily(o) {\n // Set => array\n // Map => object\n // function is not tested\n if (o instanceof Set) {\n return Array.from(o as Set<unknown>).map(friendlily);\n } else if (o instanceof Array) {\n return o.map(friendlily);\n } else if (o instanceof Map) {\n return friendlily(Object.fromEntries(o));\n } else if (o instanceof Object) {\n return Object.keys(o).reduce((a, k) => {\n a[k] = friendlily(o[k]);\n return a;\n }, {});\n } else {\n return o;\n }\n}\n\nfunction replacerWithPath(replacer: (this: unknown, field: string | number, value: unknown, path: string) => unknown): (this: unknown, field: string | number, value: unknown) => unknown {\n const paths = new Map<unknown, string>();\n return function (this: unknown, field, value) {\n const path = paths.get(this) + (Array.isArray(this) ? `[${field}]` : `.${field}`);\n if (value === Object(value)) paths.set(value, path);\n if (value instanceof RegExp) value = `#${value.source}`;\n return replacer.call(this, field, value, path.replace(/undefined\\.\\.?/, ''));\n };\n}\n/**\n * Creates a function to be used as JSON.stringify() replacer that will replace values for fields in specified paths.\n *\n * @param paths field paths to be sanitized.\n */\nexport function sanitizerOf(...paths: string[]): (this: unknown, field: string | number, value: unknown) => unknown {\n return replacerWithPath((field: string | number, value: unknown, path: string) => {\n if (paths.indexOf(path) !== -1) {\n return '******';\n }\n return value;\n });\n}\n\nconst KEY_COMPARE_FN: ((a: string, b: string) => number) | undefined = (a: string, b: string) => a.localeCompare(b);\n/**\n * Converts an object to a string representations that is stable enough to be used as property or map key.\n */\nexport function objectAsKey(obj: Record<string, unknown>) {\n return JSON.stringify(obj, Object.keys(obj).sort(KEY_COMPARE_FN));\n}\n\nexport function some<T>(e: T | undefined): e is T {\n return !!e;\n}\n", "import {valueMatches, valuesMatch} from '../../common/filters.js';\nimport {IOGateway} from '../../../gateway';\n\nexport function publisherMatches(publisherFilter: Record<string, IOGateway.Filtering.Matcher>,\n repoId: IOGateway.Metrics.Identity): boolean {\n for (const [k, v] of Object.entries(publisherFilter)) {\n const rv = repoId[k];\n if (!valueMatches(v, rv)) {\n return false;\n }\n }\n return true;\n}\n\nexport function metricMatches(metricFilter: IOGateway.Filtering.Matcher[],\n name: IOGateway.Metrics.Name): boolean {\n return valuesMatch(metricFilter, name);\n}\n\nexport function isAllowed(repoId: IOGateway.Metrics.Identity,\n name: IOGateway.Metrics.Name,\n filters?: IOGateway.MetricFilters): boolean {\n const result = filters?.publishers?.reduce((r: boolean | undefined, filterConfig) => {\n if (publisherMatches(filterConfig.identity, repoId)) {\n const blocked = filterConfig.metrics.block ?? filterConfig.metrics.blacklist ?? [];\n if (blocked.length > 0 && metricMatches(blocked, name)) {\n return false;\n } else {\n const allowed = filterConfig.metrics.allow ?? filterConfig.metrics.whitelist ?? [];\n return r || metricMatches(allowed, name);\n }\n }\n return r;\n }, undefined);\n if (result !== undefined) {\n return result;\n }\n const nonMatched: IOGateway.Filtering.Action = filters?.non_matched ?? 'allow';\n return nonMatched === 'allow' || nonMatched === 'whitelist';\n}\n", "import {DataPoint} from './core.js';\nimport {getLogger} from '../logger.js';\nimport {objectAsKey} from '../common/utilities.js';\nimport {IOGateway} from '../../gateway';\n\nconst log = getLogger('gateway.metrics.common');\n\nexport type MetricsEvent = StatusUpdateEvent | DataPointEvent;\n\nexport type StatusUpdateEvent = {\n identity: IOGateway.Metrics.Identity\n status: IOGateway.Metrics.Status\n metadata?: Record<string, unknown>\n}\n\nexport type DataPointEvent = {\n identity: IOGateway.Metrics.Identity,\n metric: IOGateway.Metrics.Definition,\n datapoint: Omit<DataPoint, 'name'>\n}\n\nfunction extractMetadata(metadata?: Record<string, unknown>) {\n if (metadata && Object.keys(metadata).length > 0) {\n return metadata;\n }\n return undefined;\n}\n\nfunction value(v: string | number | object): IOGateway.Metrics.Value {\n if (typeof v === 'object') {\n const composite = Object.entries(v).reduce((acc, [key, val]) => {\n acc[key] = value(val);\n return acc;\n }, {});\n return {value: composite};\n } else if (typeof v === 'string') {\n return {value: v};\n } else {\n return {value: v};\n }\n}\n\nfunction datapoint(dp: Omit<DataPoint, 'name'>): IOGateway.Metrics.DataPoint {\n return {timestamp: dp.timestamp ?? Date.now(), value: value(dp.value)};\n}\n\n\nfunction groupBy<T>(data: Iterable<T>, keyFn: (t: T) => string): Iterable<[string, T[]]> {\n return Array.from(data).reduce((acc, t) => {\n const key = keyFn(t);\n const values = acc.get(key) ?? [];\n if (values.length === 0) {\n acc.set(key, values);\n }\n values.push(t);\n return acc;\n }, new Map<string, T[]>());\n}\n\n/**\n * Conflates a vector of metric updates based on their repository's identity.\n */\nexport function* conflateRepo(maxCount: number, data: MetricsEvent[]): Generator<IOGateway.Metrics.Update> {\n if (log.enabledFor('trace')) {\n log.debug(`conflating ${JSON.stringify(data)}`);\n }\n const dataByIdentity = groupBy(data, (update) => objectAsKey(update.identity));\n\n let current: IOGateway.Metrics.Update | undefined = undefined;\n for (const [, dps] of dataByIdentity) {\n // need to port partition-all?\n for (const e of dps) {\n if (e['status']) {\n const {identity, status, metadata} = e as StatusUpdateEvent;\n // this is a status event\n // emit the current + status message and clear current\n if (current) {\n yield current;\n }\n yield {identity, status, metrics: undefined, metadata: extractMetadata(metadata)};\n current = undefined;\n } else {\n\n // this is a data point\n const {metric: definition, identity, datapoint: dp} = (e as DataPointEvent);\n const name = definition?.name;\n if (!current) {\n current = {identity};\n }\n current.metrics = current.metrics ?? {};\n current.metrics[name] = current.metrics[name] ?? {definition: {...definition}, datapoints: []};\n delete (current.metrics[name].definition as { name?: string })['name'];\n\n current.metrics[name].datapoints.push(datapoint(dp));\n }\n }\n if (current) {\n yield current;\n }\n }\n}\n", "import { fork, ChildProcess } from 'child_process';\nimport {EventEmitter} from 'events';\nimport {fileURLToPath} from 'url';\n\ntype ProcessMessage = {\n type: 'error' | 'message'\n message: string\n data?: unknown\n}\n\nexport class NodeWorker implements Worker {\n private readonly events = new EventEmitter();\n private readonly ps: ChildProcess;\n constructor(url: string | URL, args: readonly string[], options?) {\n this.events = new EventEmitter();\n const modulePath = fileURLToPath(url);\n this.ps = fork(modulePath, args, options);\n this.ps.on('message', (message: string) => {\n let parsed: ProcessMessage;\n try {\n parsed = JSON.parse(message) as ProcessMessage;\n if (parsed?.type === undefined) {\n this.events.emit('messageerror', new Error(`Invalid message: ${message}`));\n }\n } catch (e) {\n this.events.emit('messageerror', e);\n return;\n }\n if (parsed.type === 'error') {\n this.events.emit('error', new Error(parsed.message));\n } else {\n this.events.emit('message', {data: parsed.data});\n }\n });\n this.ps.stdout?.on('data', (data) => {\n console.log(data);\n });\n this.ps.stderr?.on('data', (data) => {\n console.error(data);\n });\n this.ps.on('error', (error) => {\n this.events.emit('error', error);\n });\n this.events.on('message', (message) => {\n if (this.onmessage) {\n this.onmessage(message);\n }\n });\n this.events.on('error', (error) => {\n if (this.onerror) {\n this.onerror(error);\n }\n });\n this.events.on('messageerror', (error) => {\n if (this.onmessageerror) {\n this.onmessageerror(error);\n }\n });\n\n }\n onerror: ((error) => void) | null = null;\n onmessage: ((message) => void) | null = null;\n onmessageerror: ((error) => void) | null = null;\n\n\n postMessage(message: unknown): void {\n this.ps.send({data: message}, (error) =>{\n if (error) {\n this.events.emit('error', error);\n }\n });\n }\n\n terminate(): void {\n this.ps.kill();\n }\n\n addEventListener<K extends keyof WorkerEventMap>(type: K, listener: (this: Worker, ev: WorkerEventMap[K]) => unknown, options?: boolean | AddEventListenerOptions): void {\n this.events.addListener(type, listener);\n }\n removeEventListener<K extends keyof WorkerEventMap>(type: K, listener: (this: Worker, ev: WorkerEventMap[K]) => unknown, options?: boolean | EventListenerOptions): void {\n this.events.removeListener(type, listener);\n }\n\n dispatchEvent(event: Event): boolean {\n return this.events.emit(event.type, event);\n }\n}\n", "\nasync function nodeWorker(url: URL, parameters?: Record<string, string>, options?) {\n const {NodeWorker} = await import('./nodeWorker.js');\n const args = Object.entries(parameters ?? {}).map(([k,v]) => `--${k}=${v}`);\n return new NodeWorker(url, args, options);\n}\n\nasync function webWorker(url: URL, parameters?: Record<string, string>, options?: & WorkerOptions) {\n Object.entries(parameters ?? {}).forEach(([k, v]) => url.searchParams.append(k, v));\n return new Worker(url, options);\n}\n\nexport function newWorker({url, parameters, options}: {url: URL, parameters?: Record<string, string>, options?: unknown}): Promise<Worker> {\n return (\n typeof process !== 'undefined'\n ? nodeWorker(url, parameters, options)\n : webWorker(url, parameters, options as WorkerOptions))\n ;\n}\n", "import type { Logger } from '../../types/logging/api';\nimport type { WorkerCommandType } from './publisher.ts';\nimport type { PublishCommand } from './publisher/types.ts';\nimport { Metrics } from '../../types/metrics/api';\n\n// common code for worker and in-process publisher execution\n\nasync function importPreload(log: Logger, module: string) {\n try {\n const preload = await import(module);\n if (typeof preload === 'function') {\n return await preload();\n }\n if (typeof preload.default === 'function') {\n return await preload.default();\n }\n }\n catch (e) {\n log.error(`failed to load preload`, e);\n }\n}\n\nasync function importHandler(log: Logger, publishFn: string, cfg: unknown) {\n log.debug(`loading publisher from ${publishFn}`);\n let factory: {\n name?: string,\n create: (cfg: unknown, logger: Logger) => Promise<(command: PublishCommand) => Promise<unknown>>;\n } = await import(publishFn);\n if (typeof factory['default'] === 'object') {\n factory = factory['default'] as typeof factory;\n }\n return await factory.create(cfg, log.child(`publisher.${factory.name ?? publishFn}`));\n}\n\nexport class WorkerCommandExecutor {\n readonly #log: Logger;\n #handler?: (command: PublishCommand) => Promise<unknown>;\n #cleanupArg: unknown;\n\n constructor(log: Logger) {\n this.#log = log;\n }\n\n async #startHandler(arg: WorkerCommandType['start']): Promise<void> {\n const { publishFn, cfg } = arg;\n if (cfg?.preload) {\n const result = await importPreload(this.#log, cfg.preload);\n if (typeof result === 'object') {\n for (const key in result) {\n cfg[key] = result[key];\n }\n }\n }\n let handler: (command: PublishCommand) => Promise<unknown>;\n if (typeof publishFn === 'function') {\n handler = publishFn;\n }\n else {\n handler = await importHandler(this.#log, publishFn, cfg);\n }\n const cleanupArg = await handler('start');\n this.#handler = handler;\n this.#cleanupArg = cleanupArg;\n }\n\n async #stopHandler() {\n const cleanup = await this.#handler?.('stop');\n if (typeof cleanup === 'function') {\n await cleanup(this.#cleanupArg);\n }\n }\n\n async #handleUpdate(update: Metrics.Update) {\n if (!this.#handler) {\n throw new Error('worker not started');\n }\n await this.#handler(update);\n }\n\n async execute<C extends keyof WorkerCommandType>(cmd: C, arg: WorkerCommandType[C]) {\n switch (cmd) {\n case 'start': {\n await this.#startHandler(arg as WorkerCommandType['start']);\n break;\n }\n case 'stop': {\n await this.#stopHandler();\n break;\n }\n case 'update': {\n await this.#handleUpdate(arg as Metrics.Update);\n break;\n }\n }\n }\n}\n", "import * as core from './core.ts';\nimport * as common from './common.ts';\nimport {newWorker} from '../worker/core.ts'\nimport type { Logger, LogEvent, LogConfig } from '../../types/logging/core';\nimport { getLogger, logEvent } from '@interopio/gateway/logging/core';\nimport {deserializeError} from 'serialize-error';\nimport {sanitizerOf} from '../common/utilities.ts';\nimport {isAllowed} from '../domains/metrics/filters.ts';\nimport { WorkerCommandExecutor } from './worker-common.ts';\nimport type { PublishCommand } from './publisher/types.ts';\nimport { IOGateway } from '../../gateway';\n\nconst log: Logger = getLogger('gateway.metrics.publisher');\n\nfunction makeStatusUpdate(repoId: IOGateway.Metrics.Identity,\n status: core.Status, metadata?: unknown): common.StatusUpdateEvent {\n const statusUpdate = {identity: repoId, status: {...status, 'updated-at': status.updated, 'expires-at': status.expires}};\n if (metadata) {\n statusUpdate['metadata'] = metadata;\n }\n return statusUpdate;\n}\n\ntype LatestStatus = {\n status: Omit<core.Status, 'timestamp' | 'expires' | 'expires-at' | 'updated-at'>\n timestamp?: number,\n expires?: number,\n initial: boolean,\n stopped: boolean\n}\n\nclass BasicRepository implements core.Repository {\n private running: boolean = true;\n private readonly latestStatus: LatestStatus = {\n initial: true,\n stopped: false,\n status: {state: 0, updated: Date.now(), description: 'Running'}\n };\n private heartbeatCleanup?: NodeJS.Timeout;\n private readonly metrics: Map<string, IOGateway.Metrics.Definition> = new Map();\n\n constructor(private readonly publisher: Publisher,\n private readonly filters: IOGateway.MetricFilters | undefined,\n private readonly repoId: IOGateway.Metrics.Identity,\n private readonly heartbeatInterval: number,\n private readonly metadata?: unknown) {\n }\n\n start() {\n log.info(`starting repository for ${JSON.stringify(this.repoId)} with heartbeat interval ${this.heartbeatInterval}ms`);\n const now = Date.now();\n if (this.latestStatus.stopped) {\n this.latestStatus.status.state = 0;\n this.latestStatus.status.description = 'Running';\n this.latestStatus.status.updated = now;\n this.latestStatus.initial = true;\n this.latestStatus.stopped = false;\n }\n if (this.heartbeatInterval >= 0) {\n const latest = this.latestStatus.status;\n const status: core.Status = {\n ...latest,\n timestamp: now,\n expires: now + (3 * this.heartbeatInterval)\n };\n const msg = makeStatusUpdate(this.repoId, status, this.metadata);\n this.publisher(msg);\n }\n this.running = true;\n if (this.heartbeatInterval > 0) {\n this.heartbeatCleanup = setInterval(() => {\n if (!this.running) {\n return;\n }\n const now = Date.now();\n const latest = this.latestStatus.status;\n const status: core.Status = {\n ...latest,\n timestamp: now,\n expires: now + (3 * this.heartbeatInterval)\n };\n const msg = makeStatusUpdate(this.repoId, status, this.metadata);\n this.publisher(msg);\n }, this.heartbeatInterval);\n }\n }\n\n stop() {\n log.info(`stopping repository for ${JSON.stringify(this.repoId)}`);\n this.running = false;\n if (this.heartbeatCleanup) {\n clearInterval(this.heartbeatCleanup);\n }\n const now = Date.now();\n this.latestStatus.stopped = true;\n this.status({\n state: -1,\n timestamp: now,\n updated: now,\n expires: now,\n description: 'Repository Stopped'\n });\n }\n\n add(definitions: core.Definitions) {\n for (const def of definitions) {\n const key = def.name;\n if (!(this.metrics.has(key) || !(this.filters === undefined || isAllowed(this.repoId, key, this.filters)))) {\n this.metrics.set(key, def);\n }\n }\n }\n\n publish(dataPoints: core.DataPoints) {\n if (this.running && dataPoints.length > 0) {\n for (const dp of dataPoints) {\n const metric = this.metrics.get(dp.name);\n if (metric) {\n const msg: common.DataPointEvent = {\n identity: this.repoId,\n metric: metric,\n datapoint: {...dp}\n };\n delete msg.datapoint['name'];\n this.publisher(msg);\n }\n }\n }\n }\n\n status(status: core.Status) {\n const publisherStatus: LatestStatus = {...this.latestStatus};\n Object.assign(publisherStatus.status, status);\n publisherStatus.timestamp = status.timestamp;\n publisherStatus.expires = status.expires;\n\n const statusChanged = publisherStatus.status.state !== this.latestStatus.status.state;\n if (this.latestStatus.initial || statusChanged) {\n publisherStatus.status.updated = status.timestamp;\n }\n publisherStatus.initial = false;\n\n Object.assign(this.latestStatus, publisherStatus);\n\n if (this.running || publisherStatus.stopped) {\n const msg = makeStatusUpdate(this.repoId, status, this.metadata);\n this.publisher(msg);\n }\n }\n}\n\nexport class BasicRepositoryFactory implements core.RepositoryFactory {\n constructor(\n private readonly config: IOGateway.BasicMetricsPublisherConfig | undefined,\n private readonly publisher: Publisher) {\n }\n\n repository(peerIdentity: IOGateway.Metrics.Identity, opts?: core.Options): core.Repository {\n const peerMetadata = opts?.metadata;\n const heartbeatInterval = this.config?.heartbeats ?? 0;\n return new BasicRepository(this.publisher, this.config?.filters, peerIdentity, heartbeatInterval, peerMetadata);\n }\n\n async shutdown() {\n await this.publisher.close();\n }\n\n on(listener: (event: common.MetricsEvent) => void) {\n this.publisher.on(listener);\n }\n}\n\n\nexport interface Publisher {\n (msg: common.MetricsEvent): void\n on(listener: (event: common.MetricsEvent) => void): void\n close(): Promise<void>\n}\n\nexport type WorkerConfig = {\n preload?: string,\n worker?: { url: URL, parameters: {logLevel?: LogConfig['level']}, options?: unknown }\n publishFn?: ((command: PublishCommand) => Promise<unknown>) | string\n}\n\n\nexport interface WorkerCommandType {\n \"start\": {\n cfg: BasicPublisherConfig | undefined,\n publishFn: ((command: PublishCommand) => Promise<unknown>) | string\n }\n \"update\": IOGateway.Metrics.Update\n \"stop\": unknown\n}\n\nexport type WorkerRequest<C extends keyof WorkerCommandType> = {\n id: number\n cmd: C,\n arg: WorkerCommandType[C]\n}\n\nexport type BasicPublisherConfig = IOGateway.BasicMetricsPublisherConfig & {preload?: string, worker?: {url: URL, options?: unknown}};\n\nexport async function basicPublisher<C extends BasicPublisherConfig>(\n cfg: C | undefined,\n xform: (data: common.MetricsEvent[]) => Iterable<IOGateway.Metrics.Update>,\n publishFn: ((command: PublishCommand) => Promise<unknown>) | string\n): Promise<Publisher> {\n log.info(`creating publisher with configuration\\n${JSON.stringify(cfg, sanitizerOf('authentication.password'))}`);\n\n try {\n const workerCfg = cfg?.worker;\n if (workerCfg === undefined) {\n return await publishInProc(publishFn as (command: PublishCommand) => Promise<unknown>, cfg ?? {}, xform);\n }\n else {\n const cfgWithoutWorker = cfg ?\n Object.fromEntries(\n Object.entries(cfg)\n .filter(([key]) => key !== 'worker')\n ) : {};\n return await publishInWorker(publishFn as string, cfgWithoutWorker, xform, workerCfg);\n }\n }\n catch (e) {\n log.error(`Failed to create basic publisher`, e);\n throw e;\n }\n}\n\nabstract class AbstractDroppingPublisher {\n readonly #cfg: IOGateway.BasicMetricsPublisherConfig;\n\n readonly #maxQueueSize: number;\n readonly #processing: Map<number, {resolver: {resolve, reject}}>;\n readonly #promises: Map<number, Promise<void>>;\n readonly #listeners: ((event: common.MetricsEvent) => void)[];\n #lastId: number;\n\n constructor(cfg: IOGateway.BasicMetricsPublisherConfig) {\n const {buffer_size: bufferSize} = cfg;\n this.#cfg = cfg;\n\n this.#maxQueueSize = bufferSize ?? 1000;\n this.#processing = new Map<number, { resolver: { resolve, reject } }>();\n this.#promises = new Map<number, Promise<void>>();\n this.#listeners = [];\n this.#lastId = 0;\n }\n\n async start(publishFn: string | ((command: PublishCommand) => Promise<unknown>)) {\n try {\n /*const context = */await this.enqueue('start', {cfg: this.#cfg, publishFn})[1];\n log.info(`publisher [${publishFn}] started`);\n }\n catch (e) {\n log.error(`error starting publisher [${publishFn}]: ${e}`);\n throw e;\n }\n }\n\n next(u: IOGateway.Metrics.Update) {\n const [id, p] = this.enqueue<void, 'update'>('update', u);\n p.catch((e) => {\n log.warn(`update [${id}] error`, e);\n }).finally(() => {\n this.#promises.delete(id);\n });\n }\n\n async stop(rejectProcessing = false, timeout = 1000) {\n log.info(`stopping publisher worker rejectProcessing: ${rejectProcessing}, timeout: ${timeout}, promises: ${this.#promises.size}`);\n const rejectProcessingFn = (reason: 'now' | 'timeout') => {\n this.#processing.forEach(({resolver}) => {\n resolver.reject(new Error(`reject on stop: ${reason}`));\n });\n };\n if (rejectProcessing) {\n rejectProcessingFn('now');\n }\n const pending = Promise.allSettled(this.#promises);\n const stopPromise = this.enqueue('stop', undefined)[1];\n await new Promise<void>((resolve, reject) => {\n const timeoutId = setTimeout(() => {\n rejectProcessingFn('timeout');\n reject(new Error('timeout'));\n }, timeout);\n pending.then((outcomes) => {\n clearTimeout(timeoutId);\n for (const outcome of outcomes) {\n if (outcome.status === 'rejected') {\n log.error(`Pending future failed with ${outcome.reason} on stop`)\n }\n }\n resolve();\n });\n });\n await stopPromise;\n if (this.#promises.size > 0) {\n log.error(`uncleared promises: ${this.#promises.size}`);\n }\n log.info(`publisher worker stopped`);\n }\n\n private enqueue<T, K extends keyof WorkerCommandType>(cmd: K, arg: WorkerCommandType[K]): [number, Promise<T>] {\n if (this.#processing.size >= this.#maxQueueSize) {\n log.warn(`processing queue is full. dropping cmd: ${JSON.stringify(cmd)}`);\n }\n const id = ++this.#lastId;\n return [id, new Promise<T>((resolve, reject) => {\n this.#processing.set(id, {resolver: {resolve, reject}});\n this.processRequest(id, cmd, arg);\n })];\n }\n\n protected abstract processRequest<K extends keyof WorkerCommandType>(id: number, cmd: K, arg: WorkerCommandType[K]): void;\n\n protected processResult(id: number, result: unknown, error?: Error) {\n const p = this.#processing.get(id);\n if (p) {\n const {resolver} = p;\n try {\n if (error) {\n resolver.reject(error);\n } else {\n resolver.resolve(result);\n }\n } finally {\n this.#processing.delete(id);\n }\n } else {\n log.error(`unknown message id: ${id}`);\n }\n }\n\n emit(event: common.MetricsEvent) {\n this.#listeners.forEach(l => l(event));\n }\n\n on(listener: (event: common.MetricsEvent) => void) {\n this.#listeners.push(listener);\n }\n\n}\n\nclass InProcPublisher extends AbstractDroppingPublisher {\n readonly #executor: WorkerCommandExecutor;\n constructor(cfg: IOGateway.BasicMetricsPublisherConfig) {\n super(cfg);\n this.#executor = new WorkerCommandExecutor(log);\n }\n\n protected processRequest<K extends keyof WorkerCommandType>(id: number, cmd: K, arg: WorkerCommandType[K]) {\n this.#executor.execute(cmd, arg).then(() => {\n this.processResult(id, 'ok');\n }).catch((error) => {\n this.processResult(id, 'error', error instanceof Error ? error : new Error(String(error)));\n });\n }\n}\n\nclass WorkerPublisher extends AbstractDroppingPublisher {\n private readonly worker: Worker\n constructor(worker: Worker, cfg: Omit<BasicPublisherConfig, 'worker'>) {\n super(cfg);\n this.worker = worker;\n }\n\n protected processRequest<K extends keyof WorkerCommandType>(id: number, cmd: K, arg: WorkerCommandType[K]) {\n const request: WorkerRequest<K> = {id, cmd, arg};\n this.worker.postMessage(request);\n }\n\n async stop(rejectProcessing = false, timeout = 1000) {\n try {\n return await super.stop(rejectProcessing, timeout);\n }\n finally {\n this.worker.terminate();\n }\n }\n\n async start(publishFn: string) {\n await new Promise<void>((resolve) => {\n this.worker.onerror = (event) => {\n log.error(`error from worker: ${event.message}`);\n };\n this.worker.onmessageerror = (event) => {\n log.error(`error receiving message: ${event.data}`);\n }\n this.worker.onmessage = (event) => {\n if (event.data.ready) {\n resolve();\n return;\n }\n if (event.data.id) {\n const {id, result, error} = event.data;\n super.processResult(id, result, error == undefined ? undefined : deserializeError(error));\n } else if (event.data.log) {\n const {level, time, name, message, data} = event.data.log as LogEvent;\n logEvent({level, time: new Date(time), name, message, data: data.map(d => deserializeError(d))});\n } else if (event.data.event) {\n super.emit(event.data.event);\n }\n };\n });\n try {\n await super.start(publishFn);\n log.info(`publisher worker [${publishFn}] started`);\n } catch (e) {\n log.error(`error starting publisher worker [${publishFn}]: ${e}`);\n this.worker.terminate();\n throw e;\n }\n }\n}\n\nasync function publishInProc(publishFn: (command: PublishCommand) => Promise<unknown>, cfg: BasicPublisherConfig, xform: (data: common.MetricsEvent[]) => Iterable<IOGateway.Metrics.Update>): Promise<Publisher> {\n const inProc = new InProcPublisher(cfg);\n\n const publisher: Publisher = (msg: common.MetricsEvent) => {\n for (const u of xform([msg])) {\n inProc.next(u);\n }\n }\n\n await inProc.start(publishFn);\n\n publisher.close = async (): Promise<void> => {\n await inProc.stop();\n }\n\n publisher.on = (listener: (event: common.MetricsEvent) => void): void => {\n inProc.on(listener);\n };\n return publisher;\n\n}\n\nasync function publishInWorker(publishFn: string,\n cfg: Omit<BasicPublisherConfig, 'worker'>,\n xform: (data: common.MetricsEvent[]) => Iterable<IOGateway.Metrics.Update>,\n workerCfg: {url: URL, parameters?: Record<string, string>, options?: unknown}): Promise<Publisher> {\n\n\n const worker = new WorkerPublisher(await newWorker(workerCfg), cfg);\n await worker.start(publishFn);\n\n\n const publisher: Publisher = (msg: common.MetricsEvent): void => {\n for (const u of xform([msg])) {\n worker.next(u);\n }\n };\n publisher.close = async (): Promise<void> => {\n await worker.stop();\n };\n publisher.on = (listener: (event: common.MetricsEvent) => void): void => {\n worker.on(listener);\n };\n\n return publisher;\n}\n\nexport function basicRepositoryFactory(config: IOGateway.BasicMetricsPublisherConfig | undefined, publisher: Publisher): BasicRepositoryFactory {\n return new BasicRepositoryFactory(config, publisher);\n}\n", "import {basicRepositoryFactory, basicPublisher, WorkerConfig} from './publisher.js';\nimport {conflateRepo} from './common.js';\nimport * as core from './core.js';\nimport {IOGateway} from '../../gateway';\n\nexport type RestConfig = IOGateway.RestMetricsPublisherConfig & WorkerConfig\n\nexport async function restPublisher(config?: RestConfig) {\n const conflationMaxCount = config?.conflation?.[\"max-datapoints-repo\"] ?? 50;\n return await basicPublisher<RestConfig>(\n config,\n (data) => conflateRepo(conflationMaxCount, data),\n config?.publishFn ?? '@interopio/gateway/metrics/publisher/rest'\n );\n}\n\nexport async function restRepositoryFactory(config?: RestConfig): Promise<core.RepositoryFactory> {\n return basicRepositoryFactory(config, await restPublisher(config));\n}\n", "import {basicPublisher, basicRepositoryFactory, Publisher, WorkerConfig} from './publisher.js';\nimport {conflateRepo} from './common.js';\nimport * as core from './core.js';\nimport {getLogger} from '../logger.js';\nimport {IOGateway} from '../../gateway';\n\nconst log = getLogger('gateway.metrics.file');\n\nexport type FileConfig = IOGateway.FileMetricsPublisherConfig & WorkerConfig\n\nasync function filePublisher(config: FileConfig): Promise<Publisher> {\n const conflationMaxCount = config.conflation?.[\"max-datapoints-repo\"] ?? 50;\n log.info(`will record metrics to ${config.location}`);\n return await basicPublisher(\n config,\n (data) => conflateRepo(conflationMaxCount ?? 1, data),\n config?.publishFn ?? '@interopio/gateway/metrics/publisher/file');\n}\n\nexport async function fileRepositoryFactory(config: FileConfig) : Promise<core.RepositoryFactory> {\n return basicRepositoryFactory(config, await filePublisher(config));\n}\n", "import * as IOGateway from './main.ts';\n\nexport { IOGateway };\n\nexport default IOGateway.Factory;\n", "export * as Encoding from '../common/encoders.ts';\nexport * as Logging from '@interopio/gateway/logging/core';\nexport * as Filtering from '../common/filters.ts';\nexport * from '../gateway/core.ts';\n", "import t from 'transit-js';\n\nclass BiMap {\n private readonly map: Map<string, string>;\n private readonly reverse = new Map<string, string>();\n constructor(map?: Map<string, string>) {\n if (map) {\n for (const [k, v] of map) {\n this.reverse.set(v, k);\n }\n } else {\n // just to not be undefined\n map = this.reverse;\n }\n this.map = map;\n }\n\n public get(key: string): string | undefined {\n return this.map.get(key);\n }\n public rev(value: string): string | undefined {\n return this.reverse.get(value);\n }\n}\n\nfunction namespacedKeyword(value: string, namespaces: BiMap): Keyword {\n const idx = value.indexOf('/');\n if (idx === -1) {\n return t.keyword(value);\n }\n const prefix = value.substring(0, idx);\n const namespace = namespaces?.get(prefix) ?? prefix;\n return t.keyword(namespace + value.substring(idx));\n}\ninterface Keyword {\n name(): string\n namespace(): string | null\n}\nfunction parseKeyword(keyword: Keyword, namespaces: BiMap): string {\n const name = keyword.name();\n const namespace = keyword.namespace();\n if (namespace === null) {\n return name;\n }\n const prefix = namespaces.rev(namespace) ?? namespace;\n return prefix + '/' + name;\n}\n\nfunction toTransit<T>(m: T, namespaces: BiMap, keywordize?: Map<string, KeywordizeCommand>, root = '') {\n if (m instanceof Array) {\n return m.map((v) => {\n // todo support array paths\n return v;\n });\n } else if (m instanceof Object) {\n const data = t.map();\n for (const k in m) {\n const kc = keywordize?.get(root);\n const key = kc === null ? k : namespacedKeyword(k, namespaces);\n const v = m[k];\n const path = `${root}/${k}`; // allow over\n\n const vc = keywordize?.get(path);\n const keywordizeValue = vc !== undefined && typeof v === 'string' && (vc === '*' || vc?.has(v));\n if (keywordizeValue) {\n data.set(key, namespacedKeyword(v, namespaces));\n } else {\n if (kc === null && !keywordize?.has(path)) {\n keywordize?.set(path, null);\n }\n const value = toTransit(v, namespaces, keywordize, path);\n data.set(key, value);\n }\n }\n return data;\n } else {\n return m;\n }\n}\n/* eslint-disable @typescript-eslint/no-explicit-any */\nfunction toJS<T>(data: any, namespaces: BiMap): T {\n if (t.isKeyword(data)) {\n return parseKeyword(data as Keyword, namespaces) as T;\n } else if (t.isMap(data)) {\n const map = {};\n for (const [k, v] of data) {\n const key = t.isKeyword(k) ? parseKeyword(k as Keyword, namespaces) : k;\n map[key] = toJS(v, namespaces);\n }\n return map as T;\n } else if (t.isList(data) || data instanceof Array) {\n const list = [];\n for (const e of data) {\n list.push(toJS(e, namespaces));\n }\n return list as T;\n }\n return data;\n}\n\n// exports\nexport type Codec<T, R> = {\n encode: (msg: T) => R;\n decode: (data: R) => T;\n};\n\nexport type KeywordizeCommand\n = '*' // keywordize for all string values for ex {:type :cluster}\n | Set<string> // keywordize only specific values\n | null // stop keywordization (even for keys)\n ;\n\nexport function transit<T>(opts?: { keywordize?: Map<string, KeywordizeCommand>, verbose?: boolean, namespaces?: Map<string, string> }): Codec<T, string> {\n const type: t.Encodings = opts?.verbose ? 'json-verbose' : 'json';\n const namespaces = new BiMap(opts?.namespaces);\n return {\n encode: (msg: T): string => {\n const data = toTransit(msg, namespaces, opts?.keywordize);\n return t.writer(type).write(data);\n },\n decode: (data: string): T => {\n let map;\n try {\n map = t.reader(type).read(data);\n } catch (err) {\n throw new Error(`\"${data}\" is not valid TRANSIT`, {cause: err});\n }\n return toJS<T>(map, namespaces);\n }\n }\n}\n\nexport function json<T>(): Codec<T, string> {\n return {\n encode: JSON.stringify,\n decode: JSON.parse\n }\n}\n\nfunction identity<T>(v: T): T {\n return v;\n}\n\nfunction transform<T>(v: T): T {\n // clojurescript compatibility with (js->clj)\n // https://github.com/clojure/clojurescript/blob/067eaef03678a06d83caf3f66ddf20f9ee71db5b/src/main/cljs/cljs/core.cljs#L11193\n\n // deep transform arrays\n if (v instanceof Array || Array.isArray(v)) {\n return v.map(transform) as T;\n }\n // deep transform objects (but not dates for example)\n if (v?.constructor === Object) {\n return Object.keys(v).reduce((a: T, k: string) => {\n a[k] = transform(v[k]);\n return a;\n }, {} as T);\n }\n // nullify undefined values\n if (v === undefined) {\n return null as T;\n }\n return v;\n}\n\nexport function direct<T>(opts?: {cljs?: boolean}): Codec<T, T> {\n return {\n encode: identity,\n decode: (opts?.cljs ?? false) ? transform : identity\n }\n}\n", "import { WebAPIs } from '../globals.ts';\nimport { getLogger } from '../logger.ts';\nimport { localNode } from '../local-node/core.ts';\nimport { type Authenticators, globalDomain } from '../domains/global/core.ts';\nimport { type Authenticator } from '../auth/core.ts';\nimport * as basic from '../auth/basic.ts';\nimport * as oauth2 from '../auth/oauth2.ts';\nimport * as custom from '../auth/custom.ts';\nimport { channelCluster } from '../mesh/channel-mesh.ts';\nimport type { WebSocketHeaderOptions } from \"../mesh/ws-headers.ts\";\nimport type { GatewayNode, MessageType, RequestType } from '../node.ts';\nimport type { LocalAddress } from '../state/types/common.ts';\nimport { agmDomain } from '../domains/agm/core.ts';\nimport { activityDomain } from '../domains/activity/core.ts';\nimport { metricsDomain, type MetricPublishers } from '../domains/metrics/core.ts';\nimport { metrics } from './metrics.ts';\nimport { contextDomain, ContextDomainOptions } from '../domains/context/core.ts';\nimport { busDomain } from '../domains/bus/core.ts';\nimport Scavenger from './scavenger.ts';\nimport { GATEWAY_VERSION } from '../versions.ts';\nimport { sanitizerOf, WithRequired } from '../common/utilities.ts';\nimport { meshNode } from '../mesh-node/core.ts';\nimport type { Domain } from '../domain.ts';\nimport { websocketBrokerMesh } from '../mesh/ws/broker/client.ts';\nimport { websocketCluster } from '../mesh/ws-mesh.ts';\nimport { restDirectory } from '../mesh/rest-directory.ts';\nimport { type ClientInfo, type Clients, addClient, removeClient } from './clients.ts';\nimport * as GatewayEncoders from '../common/encoders.ts';\nimport { IOGateway } from '../../gateway';\nimport { defaultPeerRestrictions, defaultRestrictions } from './visibility.ts';\nimport type { Mesh } from '../mesh.ts';\nimport { staticDirectory } from '../mesh/static-directory.ts';\n\nconst log = getLogger('gateway');\n\nfunction authenticators(config?: IOGateway.AuthenticationConfig,\n globals?: IOGateway.GatewayConfig['globals']): Authenticators {\n const available = (config?.available as Exclude<string, 'default' | 'available'>[] ?? ['basic']).reduce((a, k) => {\n if (k === 'basic') {\n a[k] = basic.authenticator(config?.basic ?? {});\n return a;\n }\n if (k === 'oauth2') {\n a[k] = oauth2.authenticator(config?.oauth2 ?? {}, { ...globals });\n return a;\n }\n const authConfig = config?.[k] ?? {};\n if (authConfig['authenticator']) {\n const authFn = authConfig['authenticator'];\n const configWithoutAuthenticator = {...authConfig};\n delete configWithoutAuthenticator['authenticator'];\n a[k] = custom.authenticator(configWithoutAuthenticator, authFn);\n return a;\n }\n return a;\n\n }, {} as Record<string, Authenticator>);\n return {'default': config?.default as string ?? 'basic', available};\n}\n\nfunction createWebSocketCluster(endpoint: string,\n environment: { gateway: { version: string }, [key: string]: unknown },\n relays?: string,\n directory?: IOGateway.StaticMeshDirectoryConfig | IOGateway.RestMeshDirectoryConfig,\n opts?: WebAPIs & WebSocketHeaderOptions): Mesh {\n let relaysUri = relays ?? endpoint;\n if (relays === undefined && relaysUri.startsWith('http')) {\n relaysUri = relaysUri.replace('http', 'ws');\n relaysUri += relaysUri.endsWith('/') ? '' : '/';\n relaysUri += 'relays';\n }\n if (directory?.['members'] !== undefined) {\n return websocketCluster(\n {\n relays: `${relaysUri}`\n },\n staticDirectory((directory as IOGateway.StaticMeshDirectoryConfig).members),\n opts);\n } else {\n const announceUri = (directory as IOGateway.RestMeshDirectoryConfig | undefined)?.uri ?? endpoint;\n const announceInterval = (directory as IOGateway.RestMeshDirectoryConfig | undefined)?.interval;\n const announceMetadata = { ...environment.gateway, ...(directory as IOGateway.RestMeshDirectoryConfig | undefined)?.metadata };\n return websocketCluster(\n {\n relays: `${relaysUri}`\n },\n restDirectory({uri: announceUri, announceInterval}, { metadata: announceMetadata, ...opts }),\n opts);\n }\n}\n\nfunction gatewayNode<T extends MessageType = MessageType>(domains: Domain<T>[],\n environment: { gateway: { version: string }, [key: string]: unknown },\n nodeId?: string,\n signingKey?: string,\n mesh?: IOGateway.MeshConfig,\n globals?: IOGateway.GatewayConfig['globals']): GatewayNode {\n if (mesh) {\n no