@actyx/sdk
Version:
Actyx SDK
198 lines • 11.2 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.eventsMonotonicEmulated = void 0;
/*
* Actyx SDK: Functions for writing distributed apps
* deployed on peer-to-peer networks, without any servers.
*
* Copyright (C) 2021 Actyx AG
*/
/* eslint-disable @typescript-eslint/no-explicit-any */
const Option_1 = require("fp-ts/lib/Option");
const Ord_1 = require("fp-ts/lib/Ord");
const rxjs_1 = require("../../node_modules/rxjs");
const operators_1 = require("../../node_modules/rxjs/operators");
const types_1 = require("../types");
const util_1 = require("../util");
const log_1 = require("./log");
const bufferOp_1 = require("../util/bufferOp");
const eventKeyGreater = (0, Ord_1.gt)(types_1.EventKey.ord);
const eventKeyGreaterEq = (0, Ord_1.geq)(types_1.EventKey.ord);
const GenericSemantics = 'generic-snapshot-v2';
const horizonFilter = (horizon) => (x) => eventKeyGreaterEq(x, horizon);
/**
* Create a new endpoint, based on the given EventStore and SnapshotStore.
* The returned function itself is stateless between subsequent calls --
* all state is within the EventStore itself.
*/
const eventsMonotonicEmulated = (eventStore,
// No snapshots are actually available in V2 so far.
snapshotStore) => {
// Stream realtime events from the given point on.
// As soon as time-travel would occur, the stream terminates with a TimetravelMsg.
const realtimeFrom = (fishId, subscriptions, fixedStart) => {
const realtimeEvents = eventStore.subscribe(fixedStart.from, subscriptions);
const rtAfterHorizon = fixedStart.horizon
? realtimeEvents.pipe((0, operators_1.filter)(horizonFilter(fixedStart.horizon)))
: realtimeEvents;
let latest = fixedStart.latestEventKey;
let tt = false;
const liveBuffered = rtAfterHorizon.pipe((0, bufferOp_1.bufferOp)(1), (0, operators_1.filter)((x) => x.length > 0), (0, operators_1.mergeMap)((nextUnsorted) => {
// Don't spam the logs. And avoid esoteric race conditions due to triggering multiple snapshot invalidations.
if (tt) {
return rxjs_1.EMPTY;
}
const next = nextUnsorted.sort(types_1.EventKey.ord.compare);
// Take while we are going strictly forwards
const nextKey = next[0];
const nextIsOlderThanLatest = eventKeyGreater(latest, nextKey);
if (nextIsOlderThanLatest) {
tt = true;
log_1.default.submono.debug('started from', fixedStart.from, 'got triggered by stream', nextKey.stream, 'offset', nextKey.offset);
return (0, rxjs_1.from)(snapshotStore
.invalidateSnapshots(GenericSemantics, fishId, nextKey)
.then(() => timeTravelMsg(fishId, latest, next)));
}
log_1.default.submono.debug('order-check passed: ' + types_1.EventKey.format(nextKey) + ' > ' + types_1.EventKey.format(latest), 'for realtime event');
// We have captured `latest` in the closure and are updating it here
const newLatest = next[next.length - 1];
latest = {
lamport: newLatest.lamport,
stream: newLatest.stream,
offset: newLatest.offset,
};
return (0, rxjs_1.of)({
type: types_1.MsgType.events,
events: next,
caughtUp: true,
});
}),
// Buffer live events for a small amount of time, so we don’t update state too often.
// Should be handled by the `caughtUp` flag in the store-side impl.
(0, util_1.takeWhileInclusive)((m) => m.type !== types_1.MsgType.timetravel));
return liveBuffered;
};
// The only reason we need the "catch up to present" step is that `allEvents` makes no effort whatsoever
// to give you a proper ordering for *known* events; so we must take care of it by first streaming *to* present.
// Stream events monotonically from the given point on.
// This function is needed, because `realtimeFrom` will return *past* data out of order, too.
// So in order to have a meaningful shot at reaching a stable state, we must first "forward-stream" up to the known present,
// and then switch over to "realtime" streaming.
const monotonicFrom = (fishId, subscriptions, present, fixedStart = {
from: {},
latestEventKey: types_1.EventKey.zero,
}) => {
let latest = fixedStart.latestEventKey;
const persisted = eventStore.query(fixedStart.from, present, subscriptions, types_1.EventsSortOrder.Ascending);
const persistedAfterHorizon = fixedStart.horizon
? persisted.pipe((0, operators_1.filter)(horizonFilter(fixedStart.horizon)))
: persisted;
const persistedChunked = persistedAfterHorizon
// Speed up Fish hydration by applying chunks
.pipe((0, operators_1.bufferCount)(32), (0, operators_1.map)((chunk) => ({
type: types_1.MsgType.events,
events: chunk,
caughtUp: false,
})), (0, operators_1.tap)((msg) => (latest = msg.events[msg.events.length - 1])));
const realtimeStream = (0, rxjs_1.defer)(() => realtimeFrom(fishId, subscriptions, {
from: present,
latestEventKey: latest,
horizon: fixedStart.horizon,
}));
return (0, rxjs_1.concat)(persistedChunked, (0, rxjs_1.of)({
type: types_1.MsgType.events,
events: [],
// Empty chunk with caughtUp=true, to trigger emission of current state.
// The proper impl should obviously set caughtUp=true for the final proper (nonempty) chunk;
// but we have a hard time detecting the final chunk here.
caughtUp: true,
}), realtimeStream);
};
// Given a FixedStart point, check whether we can reach `present` without time travel.
// If so, apply whenValid. Otherwise apply whenInvalid to the earliest chunk between start and present.
const validateFixedStart = (subscriptions, present, attemptStartFrom, whenInvalid, whenValid) => {
const earliestNewEvents = eventStore.query(attemptStartFrom.from, present, subscriptions, types_1.EventsSortOrder.Ascending);
// FIXME: Store should filter
const afterHorizon = attemptStartFrom.horizon
? earliestNewEvents.pipe((0, operators_1.filter)(horizonFilter(attemptStartFrom.horizon)))
: earliestNewEvents;
const earliestNew = afterHorizon.pipe((0, operators_1.defaultIfEmpty)(null), (0, operators_1.first)());
// Find the earliest persistent chunk after the starting point and see whether it’s after the FixedStart
return earliestNew.pipe((0, operators_1.concatMap)((earliest) => earliest && eventKeyGreater(attemptStartFrom.latestEventKey, earliest)
? whenInvalid(earliest)
: whenValid()));
};
// Client thinks it has valid offsets to start from -- it may be wrong, though!
const startFromFixedOffsets = (fishId, subscriptions, present) => (attemptStartFrom) => {
const whenValid = () => monotonicFrom(fishId, subscriptions, present, attemptStartFrom);
const whenInvalid = (earliest) => {
log_1.default.submono.debug(fishId, 'discarding outdated requested FixedStart', types_1.EventKey.format(attemptStartFrom.latestEventKey), 'due to', types_1.EventKey.format(earliest));
// TODO this time travel msg should also have a good `high` element
// (consider this if/when ever implementing this Rust-side)
return (0, rxjs_1.of)(timeTravelMsg(fishId, attemptStartFrom.latestEventKey, [earliest]));
};
return validateFixedStart(subscriptions, present, attemptStartFrom, whenInvalid, whenValid);
};
const tryReadSnapshot = async (fishId) => {
const retrieved = await snapshotStore.retrieveSnapshot(GenericSemantics, fishId, 0);
util_1.runStats.counters.add(`snapshot-wanted/${fishId}`);
return (0, Option_1.map)((x) => {
util_1.runStats.counters.add(`snapshot-found/${fishId}`);
return x;
})((0, Option_1.fromNullable)(retrieved));
};
// Try start from a snapshot we have found. The snapshot may be outdated, though.
const startFromSnapshot = (fishId, subscriptions, present) => (snap) => {
const fixedStart = {
from: snap.offsets,
horizon: snap.horizon,
latestEventKey: snap.eventKey,
};
const whenInvalid = (earliest) => {
log_1.default.submono.debug(fishId, 'discarding outdated snapshot', types_1.EventKey.format(snap.eventKey), 'due to', types_1.EventKey.format(earliest));
return (0, rxjs_1.from)(snapshotStore.invalidateSnapshots('generic-snapshot-v2', fishId, earliest)).pipe((0, operators_1.first)(), (0, operators_1.concatMap)(() => observeMonotonicFromSnapshot(fishId, subscriptions)));
};
const whenValid = () => (0, rxjs_1.concat)((0, rxjs_1.of)(stateMsg(fishId, snap)), monotonicFrom(fishId, subscriptions, present, {
from: snap.offsets,
latestEventKey: snap.eventKey,
horizon: snap.horizon,
}));
return validateFixedStart(subscriptions, present, fixedStart, whenInvalid, whenValid);
};
const observeMonotonicFromSnapshot = (fishId, subscriptions) => {
return (0, rxjs_1.combineLatest)([
(0, rxjs_1.from)(tryReadSnapshot(fishId)).pipe((0, operators_1.first)()),
(0, rxjs_1.from)(eventStore.offsets()).pipe((0, operators_1.map)(({ present }) => present)),
]).pipe((0, operators_1.concatMap)(([maybeSnapshot, present]) => (0, Option_1.fold)(
// No snapshot found -> start from scratch
() => monotonicFrom(fishId, subscriptions, present), startFromSnapshot(fishId, subscriptions, present))(maybeSnapshot)));
};
return (fishId, subscriptions, attemptStartFrom) => {
if (attemptStartFrom) {
// Client explicitly requests us to start at a certain point
return (0, rxjs_1.from)(eventStore.offsets()).pipe((0, operators_1.concatMap)((offsets) => startFromFixedOffsets(fishId, subscriptions, offsets.present)(attemptStartFrom)));
}
else {
// `from` NOT given -> try finding a snapshot
return observeMonotonicFromSnapshot(fishId, subscriptions);
}
};
};
exports.eventsMonotonicEmulated = eventsMonotonicEmulated;
const stateMsg = (fishId, snapshot) => {
log_1.default.submono.info(fishId, 'picking up from local snapshot', types_1.EventKey.format(snapshot.eventKey));
return {
type: types_1.MsgType.state,
snapshot,
};
};
const timeTravelMsg = (fishId, previousHead, next) => {
log_1.default.submono.info(fishId, 'must time-travel back to:', types_1.EventKey.format(next[0]));
const high = (0, util_1.getInsertionIndex)(next, previousHead, types_1.EventKey.ord.compare) - 1;
return {
type: types_1.MsgType.timetravel,
trigger: next[0],
high: next[high],
};
};
//# sourceMappingURL=subscribe_monotonic_emulated.js.map