@codeforbreakfast/eventsourcing-store-postgres
Version:
Production-ready PostgreSQL event store with Effect integration - Scalable, ACID-compliant event persistence with type-safe database operations and streaming
470 lines (449 loc) • 27.5 kB
JavaScript
// src/postgres.ts
import { PgClient, PgMigrator } from "@effect/sql-pg";
import { Config, Effect as Effect5, Layer, pipe, Option } from "effect";
// src/migrations/index.ts
import { Effect as Effect4 } from "effect";
// src/migrations/0001_add_events.ts
import { SqlClient } from "@effect/sql";
import { Effect } from "effect";
var _0001_add_events_default = Effect.flatMap(SqlClient.SqlClient, (sql) => sql`
CREATE TABLE events (
stream_id varchar(255) NOT NULL,
event_number integer NOT NULL,
event_payload varchar NOT NULL,
PRIMARY KEY (stream_id, event_number)
)
`);
// src/migrations/0002_add_notification_trigger.ts
import { SqlClient as SqlClient2 } from "@effect/sql";
import { Effect as Effect2 } from "effect";
var _0002_add_notification_trigger_default = Effect2.flatMap(SqlClient2.SqlClient, (sql) => sql`
-- Create the notification trigger function
CREATE OR REPLACE FUNCTION notify_event() RETURNS TRIGGER AS $$
BEGIN
PERFORM pg_notify(
'eventstore_events_' || NEW.stream_id,
json_build_object(
'stream_id', NEW.stream_id,
'event_number', NEW.event_number,
'event_payload', NEW.event_payload
)::text
);
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
-- Create the trigger on the events table
DROP TRIGGER IF EXISTS events_notify_trigger ON events;
CREATE TRIGGER events_notify_trigger
AFTER INSERT ON events
FOR EACH ROW EXECUTE FUNCTION notify_event();
`);
// src/migrations/0003_add_global_notification_channel.ts
import { SqlClient as SqlClient3 } from "@effect/sql";
import { Effect as Effect3 } from "effect";
var _0003_add_global_notification_channel_default = Effect3.flatMap(SqlClient3.SqlClient, (sql) => sql`
-- Update the notification trigger function to also notify on a global channel
CREATE OR REPLACE FUNCTION notify_event() RETURNS TRIGGER AS $$
BEGIN
-- Notify stream-specific channel for subscribe()
PERFORM pg_notify(
'eventstore_events_' || NEW.stream_id,
json_build_object(
'stream_id', NEW.stream_id,
'event_number', NEW.event_number,
'event_payload', NEW.event_payload
)::text
);
-- Notify global channel for subscribeAll()
PERFORM pg_notify(
'eventstore_events_all',
json_build_object(
'stream_id', NEW.stream_id,
'event_number', NEW.event_number,
'event_payload', NEW.event_payload
)::text
);
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
`);
// src/migrations/index.ts
var migrations = [
[1, "add_events", Effect4.succeed(_0001_add_events_default)],
[2, "add_notification_trigger", Effect4.succeed(_0002_add_notification_trigger_default)],
[3, "add_global_notification_channel", Effect4.succeed(_0003_add_global_notification_channel_default)]
];
var loader = Effect4.succeed(migrations);
// src/postgres.ts
class PgConfiguration extends Effect5.Tag("PgConfiguration")() {
}
var PgLive = pipe(PgConfiguration, Effect5.map((config) => {
const { username, password, database, host, port, maxConnections } = config;
return PgClient.layer({
username,
password,
database,
host,
port,
...maxConnections !== undefined && { maxConnections }
});
}), Layer.unwrapEffect);
var makePgConfigurationLive = (prefix) => Layer.effect(PgConfiguration, pipe(Config.nested(Config.all([
Config.string("USERNAME"),
Config.redacted("PASSWORD"),
Config.string("DATABASE"),
Config.string("HOST"),
Config.integer("PORT"),
Config.option(Config.integer("MAX_CONNECTIONS"))
]), prefix), Effect5.map(([username, password, database, host, port, maxConnections]) => ({
username,
password,
database,
host,
port,
...Option.isSome(maxConnections) && { maxConnections: maxConnections.value }
}))));
var PgConfigurationLive = makePgConfigurationLive("PG");
var PostgresLive = pipe({
loader,
table: "eventstore_migrations"
}, PgMigrator.layer, Layer.provide(PgLive), Layer.provideMerge(PgLive));
// src/sqlEventStore.ts
import { SqlClient as SqlClient4, SqlResolver } from "@effect/sql";
import { Effect as Effect10, Layer as Layer6, Match, Schema as Schema2, Sink, Stream as Stream3, identity, pipe as pipe6 } from "effect";
import {
EventNumber,
EventStreamId as EventStreamId3,
EventStreamPosition as EventStreamPosition2,
eventStoreError as eventStoreError3,
ConcurrencyConflictError
} from "@codeforbreakfast/eventsourcing-store";
// src/connectionManager.ts
import { PgClient as PgClient2 } from "@effect/sql-pg";
import { Context, Duration, Effect as Effect6, Layer as Layer2, Schedule, pipe as pipe2 } from "effect";
import { connectionError } from "@codeforbreakfast/eventsourcing-store";
class ConnectionManager extends Context.Tag("ConnectionManager")() {
}
var healthCheckQuery = "SELECT 1 AS health_check";
var executeHealthCheck = (listenConnection) => pipe2(listenConnection, (client) => client.unsafe(healthCheckQuery), Effect6.tapError((error) => Effect6.logError("PostgreSQL notification listener health check failed", { error })), Effect6.mapError(connectionError.retryable("health check notification listener")), Effect6.as(undefined));
var executeShutdown = (_listenConnection) => Effect6.logInfo("PostgreSQL notification listener connection cleanup initiated");
var ConnectionManagerLive = Layer2.effect(ConnectionManager, pipe2(PgClient2.PgClient, Effect6.tapError((error) => Effect6.logError("Failed to establish notification listener connection", {
error
})), Effect6.mapError(connectionError.retryable("establish notification listener connection")), Effect6.map((listenConnection) => ({
getListenConnection: Effect6.succeed(listenConnection),
healthCheck: executeHealthCheck(listenConnection),
shutdown: executeShutdown(listenConnection)
}))));
var createRetrySchedule = () => pipe2(Schedule.exponential(Duration.millis(100), 1.5), Schedule.whileOutput((d) => Duration.toMillis(d) < 60000));
var withConnectionHealth = Effect6.retry(createRetrySchedule());
// src/eventStreamTracker.ts
import { Effect as Effect7, HashMap, Layer as Layer3, Option as Option2, SynchronizedRef, pipe as pipe3 } from "effect";
class EventStreamTracker extends Effect7.Tag("EventStreamTracker")() {
}
var getCurrentLastEvent = (lastEvents, streamId) => Option2.getOrElse(HashMap.get(lastEvents, streamId), () => -1);
var processEventWithTracking = (lastEventNumbers) => (position, event) => SynchronizedRef.modify(lastEventNumbers, (lastEvents) => {
const currentLastEvent = getCurrentLastEvent(lastEvents, position.streamId);
return position.eventNumber > currentLastEvent ? [Option2.some(event), HashMap.set(lastEvents, position.streamId, position.eventNumber)] : [Option2.none(), lastEvents];
});
var EventStreamTrackerLive = () => Layer3.effect(EventStreamTracker, pipe3(HashMap.empty(), SynchronizedRef.make, Effect7.map((lastEventNumbers) => ({
processEvent: processEventWithTracking(lastEventNumbers)
}))));
// src/notificationListener.ts
import { PgClient as PgClient3 } from "@effect/sql-pg";
import { Effect as Effect8, Layer as Layer4, Stream, Ref, Queue, Schema, HashSet, pipe as pipe4 } from "effect";
import {
EventStreamId,
eventStoreError
} from "@codeforbreakfast/eventsourcing-store";
var makeChannelName = (streamId) => `eventstore_events_${streamId}`;
var ALL_EVENTS_CHANNEL = "eventstore_events_all";
var decodeNotificationSchema = Schema.decodeUnknown(Schema.Struct({
stream_id: Schema.String,
event_number: Schema.Number,
event_payload: Schema.String
}));
var parseNotificationPayload = (jsonString) => pipe4(jsonString, (str) => Effect8.try({
try: () => JSON.parse(str),
catch: eventStoreError.read(undefined, "Failed to parse notification payload")
}), Effect8.flatMap(decodeNotificationSchema), Effect8.mapError(eventStoreError.read(undefined, "Failed to validate notification payload schema")));
class NotificationListener extends Effect8.Tag("NotificationListener")() {
}
var parseStreamIdAndQueueNotification = (notificationQueue, isAllEvents) => (payload) => pipe4(payload.stream_id, Schema.decode(EventStreamId), Effect8.flatMap((parsedStreamId) => Queue.offer(notificationQueue, {
streamId: parsedStreamId,
payload,
isAllEvents
})), Effect8.mapError(eventStoreError.read(undefined, "Failed to parse stream_id from notification")));
var parseAndQueueStreamNotification = (rawPayload, notificationQueue, isAllEvents) => pipe4(rawPayload, parseNotificationPayload, Effect8.flatMap(parseStreamIdAndQueueNotification(notificationQueue, isAllEvents)));
var processRawNotification = (notificationQueue, channelName, isAllEvents) => (rawPayload) => pipe4(parseAndQueueStreamNotification(rawPayload, notificationQueue, isAllEvents), Effect8.catchAll((error) => Effect8.logError(`Failed to process notification for ${channelName}`, {
error
})));
var startListeningOnChannel = (client, notificationQueue, channelName) => pipe4(channelName, client.listen, Stream.tap(processRawNotification(notificationQueue, channelName, false)), Stream.runDrain, Effect8.fork, Effect8.asVoid);
var activateChannelAndStartListening = (activeChannels, client, notificationQueue, channelName) => pipe4(Ref.update(activeChannels, HashSet.add(channelName)), Effect8.tap(() => startListeningOnChannel(client, notificationQueue, channelName)));
var activateAndListen = (activeChannels, client, notificationQueue, streamId) => {
const channelName = makeChannelName(streamId);
return activateChannelAndStartListening(activeChannels, client, notificationQueue, channelName);
};
var startListenForStream = (activeChannels, client, notificationQueue) => (streamId) => pipe4(activateAndListen(activeChannels, client, notificationQueue, streamId), Effect8.mapError(eventStoreError.subscribe(streamId, "Failed to listen to stream")));
var removeChannelFromActive = (activeChannels, channelName) => pipe4(activeChannels, Ref.update(HashSet.remove(channelName)), Effect8.asVoid);
var removeChannelForStream = (activeChannels, streamId) => {
const channelName = makeChannelName(streamId);
return removeChannelFromActive(activeChannels, channelName);
};
var stopListenForStream = (activeChannels) => (streamId) => pipe4(removeChannelForStream(activeChannels, streamId), Effect8.mapError(eventStoreError.subscribe(streamId, "Failed to unlisten from stream")));
var createNotificationsStream = (notificationQueue) => pipe4(notificationQueue, Queue.take, Stream.repeatEffect, Stream.mapError(eventStoreError.read(undefined, "Failed to read notification queue")));
var startListenerService = pipe4("PostgreSQL notification listener started with LISTEN/NOTIFY support", Effect8.logInfo, Effect8.asVoid);
var clearActiveChannels = Ref.set(HashSet.empty());
var stopListenerService = (activeChannels) => pipe4("PostgreSQL notification listener stopped", Effect8.logInfo, Effect8.andThen(clearActiveChannels(activeChannels)), Effect8.asVoid);
var parseStreamIdFromPayload = (payload) => pipe4(payload.stream_id, Schema.decode(EventStreamId), Effect8.mapError(eventStoreError.read(undefined, "Failed to parse stream_id from notification")));
var queueParsedNotification = (notificationQueue, payload, streamId, isAllEvents) => Queue.offer(notificationQueue, {
streamId,
payload,
isAllEvents
});
var parseAndQueue = (notificationQueue, isAllEvents) => (payload) => pipe4(payload, parseStreamIdFromPayload, Effect8.flatMap((streamId) => queueParsedNotification(notificationQueue, payload, streamId, isAllEvents)));
var processAllEventsNotification = (notificationQueue) => (rawPayload) => pipe4(rawPayload, parseNotificationPayload, Effect8.flatMap(parseAndQueue(notificationQueue, true)), Effect8.catchAll((error) => Effect8.logError(`Failed to process all-events notification`, { error })));
var startListeningOnAllEventsChannel = (client, notificationQueue) => pipe4(ALL_EVENTS_CHANNEL, client.listen, Stream.tap(processAllEventsNotification(notificationQueue)), Stream.runDrain, Effect8.fork, Effect8.asVoid);
var activateAllEventsChannel = (activeChannels, client, notificationQueue) => pipe4(Ref.update(activeChannels, HashSet.add(ALL_EVENTS_CHANNEL)), Effect8.tap(() => startListeningOnAllEventsChannel(client, notificationQueue)));
var listenAllEvents = (activeChannels, client, notificationQueue) => pipe4(activeChannels, Ref.get, Effect8.flatMap((channels) => Effect8.if(HashSet.has(channels, ALL_EVENTS_CHANNEL), {
onTrue: () => Effect8.succeed(undefined),
onFalse: () => activateAllEventsChannel(activeChannels, client, notificationQueue)
})), Effect8.mapError(eventStoreError.subscribe("*", "Failed to listen to all events")));
var unlistenAllEvents = (activeChannels) => pipe4(removeChannelFromActive(activeChannels, ALL_EVENTS_CHANNEL), Effect8.mapError(eventStoreError.subscribe("*", "Failed to unlisten from all events")));
var buildNotificationListener = ({
client,
activeChannels,
notificationQueue
}) => ({
listen: startListenForStream(activeChannels, client, notificationQueue),
unlisten: stopListenForStream(activeChannels),
listenAll: listenAllEvents(activeChannels, client, notificationQueue),
unlistenAll: unlistenAllEvents(activeChannels),
notifications: createNotificationsStream(notificationQueue),
start: startListenerService,
stop: stopListenerService(activeChannels)
});
var createNotificationListenerDependencies = {
client: PgClient3.PgClient,
activeChannels: Ref.make(HashSet.empty()),
notificationQueue: Queue.unbounded()
};
var NotificationListenerLive = Layer4.effect(NotificationListener, pipe4(Effect8.all(createNotificationListenerDependencies), Effect8.map(buildNotificationListener)));
// src/subscriptionManager.ts
import {
Duration as Duration2,
Effect as Effect9,
HashMap as HashMap2,
Layer as Layer5,
Option as Option3,
PubSub,
Schedule as Schedule2,
Stream as Stream2,
SynchronizedRef as SynchronizedRef2,
pipe as pipe5
} from "effect";
import {
eventStoreError as eventStoreError2
} from "@codeforbreakfast/eventsourcing-store";
class SubscriptionManager extends Effect9.Tag("SubscriptionManager")() {
}
var updateHashMapWithPubSub = (streamId, subs) => (pubsub) => {
const data = { pubsub };
return HashMap2.set(subs, streamId, data);
};
var createPubSubAndUpdateHashMap = (streamId, subs) => pipe5(256, PubSub.bounded, Effect9.map(updateHashMapWithPubSub(streamId, subs)), Effect9.runSync);
var getOrCreateSubscription = (streamId) => (subs) => pipe5(subs, HashMap2.get(streamId), Option3.match({
onNone: () => createPubSubAndUpdateHashMap(streamId, subs),
onSome: () => subs
}));
var extractSubscriptionData = (streamId) => (subscriptions) => pipe5(subscriptions, HashMap2.get(streamId), Option3.match({
onNone: () => Effect9.die("Subscription should exist but doesn't"),
onSome: Effect9.succeed
}));
var updateStateWithNewSubscription = (streamId) => (state) => pipe5(state.streams, getOrCreateSubscription(streamId), (streams) => ({
...state,
streams
}));
var getOrCreatePubSub = (ref, streamId) => pipe5(ref, SynchronizedRef2.updateAndGet(updateStateWithNewSubscription(streamId)), Effect9.map((state) => state.streams), Effect9.flatMap(extractSubscriptionData(streamId)));
var updateStateByRemovingStream = (streamId) => (state) => ({
...state,
streams: HashMap2.remove(state.streams, streamId)
});
var removeSubscription = (ref, streamId) => pipe5(ref, SynchronizedRef2.update(updateStateByRemovingStream(streamId)), Effect9.as(undefined));
var publishEventToPubSub = (event, streamId) => (subData) => pipe5(subData.pubsub, PubSub.publish(event), Effect9.tapError((error) => Effect9.logError("Failed to publish event to subscribers", {
error,
streamId
})));
var publishToSubscriptionIfExists = (streamId, event) => (subscriptions) => pipe5(subscriptions, HashMap2.get(streamId), Option3.match({
onNone: () => Effect9.succeed(undefined),
onSome: publishEventToPubSub(event, streamId)
}));
var publishToStream = (ref, streamId, event) => pipe5(ref, SynchronizedRef2.get, Effect9.map((state) => state.streams), Effect9.flatMap(publishToSubscriptionIfExists(streamId, event)));
var createRetrySchedule2 = () => pipe5(Schedule2.exponential(Duration2.millis(100), 1.5), Schedule2.whileOutput((d) => Duration2.toMillis(d) < 30000));
var createStreamFromPubSub = (pubsub) => pipe5(pubsub.pubsub, (p) => Stream2.fromPubSub(p), Stream2.retry(createRetrySchedule2()));
var createSubscriptionStream = (ref) => (streamId) => pipe5(getOrCreatePubSub(ref, streamId), Effect9.map(createStreamFromPubSub), Effect9.mapError(eventStoreError2.subscribe(streamId, "Failed to subscribe to stream")));
var unsubscribeFromStreamWithErrorHandling = (ref) => (streamId) => pipe5(removeSubscription(ref, streamId), Effect9.mapError(eventStoreError2.subscribe(streamId, "Failed to unsubscribe from stream")));
var publishEventWithErrorHandling = (ref) => (streamId, event) => pipe5(publishToStream(ref, streamId, event), Effect9.mapError(eventStoreError2.write(streamId, "Failed to publish event to subscribers")));
var createAllEventsStreamFromPubSub = (pubsub) => Stream2.retry(Stream2.fromPubSub(pubsub), createRetrySchedule2());
var subscribeToAllEventsStream = (ref) => () => pipe5(ref, SynchronizedRef2.get, Effect9.map((state) => createAllEventsStreamFromPubSub(state.allEventsPubSub)), Effect9.mapError(eventStoreError2.subscribe("*", "Failed to subscribe to all events")));
var publishToAllEventsPubSub = (position, event) => (pubsub) => pipe5(pubsub, PubSub.publish({ position, event }), Effect9.tapError((error) => Effect9.logError("Failed to publish to all-events", { error, streamId: position.streamId })));
var publishEventToAllEventsPubSub = (position, event) => (state) => pipe5(state.allEventsPubSub, publishToAllEventsPubSub(position, event));
var publishToAllEventsWithErrorHandling = (ref) => (position, event) => pipe5(ref, SynchronizedRef2.get, Effect9.flatMap(publishEventToAllEventsPubSub(position, event)), Effect9.mapError(eventStoreError2.write(position.streamId, "Failed to publish to all-events subscribers")));
var createSubscriptionManagerService = (ref) => ({
subscribeToStream: createSubscriptionStream(ref),
unsubscribeFromStream: unsubscribeFromStreamWithErrorHandling(ref),
publishEvent: publishEventWithErrorHandling(ref),
subscribeToAllEvents: subscribeToAllEventsStream(ref),
publishToAllEvents: publishToAllEventsWithErrorHandling(ref)
});
var makeSubscriptionManagerState = (allEventsPubSub) => ({
streams: HashMap2.empty(),
allEventsPubSub
});
var createManagerFromState = (allEventsPubSub) => pipe5(allEventsPubSub, makeSubscriptionManagerState, SynchronizedRef2.make, Effect9.map(createSubscriptionManagerService));
var SubscriptionManagerLive = Layer5.effect(SubscriptionManager, pipe5(PubSub.unbounded(), Effect9.flatMap(createManagerFromState)));
// src/sqlEventStore.ts
class EventRowService extends Effect10.Tag("EventRowService")() {
}
class EventRow extends Schema2.Class("EventRow")({
stream_id: EventStreamId3,
event_number: EventNumber,
event_payload: Schema2.String
}) {
}
var createEventRowResolvers = (sql) => Effect10.all({
insertEventRow: SqlResolver.ordered("InsertEventRow", {
Request: EventRow,
Result: EventRow,
execute: (requests) => sql`
INSERT INTO events ${sql.insert(requests)}
RETURNING events.*
`
}),
selectAllEventsInStream: SqlResolver.grouped("SelectAllEventRowsInStream", {
Request: EventStreamId3,
RequestGroupKey: identity,
Result: EventRow,
ResultGroupKey: (row) => row.stream_id,
execute: (ids) => sql`
SELECT * FROM events
WHERE ${sql.in("stream_id", ids)}
ORDER BY event_number
`
}),
selectAllEvents: SqlResolver.grouped("SelectAllEventRows", {
Request: Schema2.Null,
RequestGroupKey: identity,
Result: EventRow,
ResultGroupKey: () => null,
execute: () => sql`
SELECT * FROM events
ORDER BY stream_id, event_number
`
})
});
var buildEventRowServiceInterface = ({
insertEventRow,
selectAllEventsInStream,
selectAllEvents
}) => ({
insert: insertEventRow.execute,
selectAllEventsInStream: selectAllEventsInStream.execute,
selectAllEvents: selectAllEvents.execute
});
var mapResolversToService = (sql) => pipe6(sql, createEventRowResolvers, Effect10.map(buildEventRowServiceInterface), Effect10.mapError(eventStoreError3.write(undefined, "Failed to initialize event row service")));
var EventRowServiceLive = Layer6.effect(EventRowService, pipe6(SqlClient4.SqlClient, Effect10.flatMap(mapResolversToService)));
var EventSubscriptionServicesLive = Layer6.mergeAll(SubscriptionManagerLive, pipe6(EventStreamTrackerLive(), Layer6.provide(ConnectionManagerLive)), pipe6(NotificationListenerLive, Layer6.provide(ConnectionManagerLive)));
var concatStreams = (liveStream) => (historicalStream) => Stream3.concat(historicalStream, liveStream);
var getHistoricalEventsAndConcatWithLive = (eventRows, from) => (liveStream) => pipe6(from.streamId, eventRows.selectAllEventsInStream, Effect10.map((events) => {
const filteredEvents = events.filter((event) => event.event_number >= from.eventNumber).map((event) => event.event_payload);
return Stream3.fromIterable(filteredEvents);
}), Effect10.map(concatStreams(liveStream)));
var bridgeStreamNotification = (subscriptionManager, streamId, payload) => pipe6(subscriptionManager.publishEvent(streamId, payload.event_payload), Effect10.catchAll((error) => Effect10.logError(`Failed to bridge stream notification for stream ${streamId}`, {
error
})));
var bridgeAllEventsNotification = (subscriptionManager, streamId, payload) => pipe6(subscriptionManager.publishToAllEvents({ streamId, eventNumber: payload.event_number }, payload.event_payload), Effect10.catchAll((error) => Effect10.logError(`Failed to bridge all-events notification for stream ${streamId}`, {
error
})));
var bridgeNotificationEvent = (subscriptionManager) => (notification) => pipe6(notification.isAllEvents, Match.value, Match.when(true, () => bridgeAllEventsNotification(subscriptionManager, notification.streamId, notification.payload)), Match.when(false, () => bridgeStreamNotification(subscriptionManager, notification.streamId, notification.payload)), Match.exhaustive);
var consumeNotifications = (notificationListener, subscriptionManager) => pipe6(notificationListener.notifications, Stream3.mapEffect(bridgeNotificationEvent(subscriptionManager)), Stream3.runDrain, Effect10.fork, Effect10.asVoid);
var startNotificationListener = (notificationListener, subscriptionManager) => pipe6(notificationListener.start, Effect10.andThen(consumeNotifications(notificationListener, subscriptionManager)));
var readHistoricalEvents = (eventRows) => (from) => pipe6(from.streamId, eventRows.selectAllEventsInStream, Effect10.map((events) => {
const filteredEvents = events.filter((event) => event.event_number >= from.eventNumber).map((event) => event.event_payload);
return Stream3.fromIterable(filteredEvents);
}), Effect10.map((stream) => Stream3.mapError(stream, eventStoreError3.read(from.streamId, "Failed to read historical events"))), Effect10.mapError(eventStoreError3.read(from.streamId, "Failed to read historical events")));
var subscribeToLiveStream = (subscriptionManager, streamId) => subscriptionManager.subscribeToStream(streamId);
var subscribeToStreamWithHistory = (eventRows, subscriptionManager, notificationListener) => (from) => pipe6(from.streamId, notificationListener.listen, Effect10.andThen(subscribeToLiveStream(subscriptionManager, from.streamId)), Effect10.flatMap(getHistoricalEventsAndConcatWithLive(eventRows, from)), Effect10.map((stream) => Stream3.mapError(stream, eventStoreError3.read(from.streamId, "Failed to subscribe to stream"))), Effect10.mapError(eventStoreError3.read(from.streamId, "Failed to subscribe to stream")));
var createWriteError = (streamId, error) => pipe6(error, eventStoreError3.write(streamId, "Failed to append event"));
var appendEventToStream = (eventRows) => (end, payload) => pipe6(end.streamId, eventRows.selectAllEventsInStream, Effect10.map((events) => events.length === 0 ? -1 : events[events.length - 1]?.event_number), Effect10.flatMap((last) => Effect10.if(end.eventNumber === 0 && last === -1 || last !== undefined && last === end.eventNumber - 1, {
onTrue: () => Effect10.succeed(end),
onFalse: () => Effect10.fail(new ConcurrencyConflictError({
expectedVersion: end.eventNumber,
actualVersion: (last ?? -1) + 1,
streamId: end.streamId
}))
})), Effect10.flatMap((end2) => eventRows.insert({
event_number: end2.eventNumber,
stream_id: end2.streamId,
event_payload: payload
})), Effect10.map((row) => ({
streamId: row.stream_id,
eventNumber: row.event_number + 1
})), Effect10.tapError((error) => Effect10.logError("Error writing to event store", { error })), Effect10.mapError((error) => error instanceof ConcurrencyConflictError ? error : createWriteError(end.streamId, error)), Effect10.flatMap(Schema2.decode(EventStreamPosition2)));
var makeSqlEventStoreWithSubscriptionManager = (subscriptionManager, notificationListener) => {
return pipe6(EventRowService, Effect10.map((eventRowService) => ({
eventRows: eventRowService,
subscriptionManager,
notificationListener
})), Effect10.tap(({ notificationListener: notificationListener2, subscriptionManager: subscriptionManager2 }) => startNotificationListener(notificationListener2, subscriptionManager2)), Effect10.map(({ eventRows, subscriptionManager: subscriptionManager2, notificationListener: notificationListener2 }) => {
const eventStore = {
append: (to) => {
const sink = Sink.foldEffect(to, () => true, appendEventToStream(eventRows));
return sink;
},
read: readHistoricalEvents(eventRows),
subscribe: subscribeToStreamWithHistory(eventRows, subscriptionManager2, notificationListener2),
subscribeAll: () => subscribeToAllStreams(subscriptionManager2, notificationListener2)
};
return eventStore;
}));
};
var subscribeToAllStreams = (subscriptionManager, notificationListener) => pipe6(notificationListener.listenAll, Effect10.andThen(subscriptionManager.subscribeToAllEvents()));
class SqlEventStore extends Effect10.Tag("SqlEventStore")() {
}
var buildSqlEventStore = ({
subscriptionManager,
notificationListener
}) => makeSqlEventStoreWithSubscriptionManager(subscriptionManager, notificationListener);
var getSqlEventStoreDependencies = {
subscriptionManager: SubscriptionManager,
notificationListener: NotificationListener
};
var SqlEventStoreEffect = Layer6.effect(SqlEventStore, pipe6(Effect10.all(getSqlEventStoreDependencies), Effect10.flatMap(buildSqlEventStore)));
var SqlEventStoreLive = pipe6(SqlEventStoreEffect, Layer6.provide(Layer6.mergeAll(EventSubscriptionServicesLive, EventRowServiceLive)));
var sqlEventStore = pipe6(Effect10.all(getSqlEventStoreDependencies), Effect10.flatMap(buildSqlEventStore));
export {
withConnectionHealth,
sqlEventStore,
makeSqlEventStoreWithSubscriptionManager,
makePgConfigurationLive,
makeChannelName,
loader,
SubscriptionManagerLive,
SubscriptionManager,
SqlEventStoreLive,
SqlEventStore,
PostgresLive,
PgLive,
PgConfigurationLive,
PgConfiguration,
NotificationListenerLive,
NotificationListener,
EventSubscriptionServicesLive,
EventStreamTrackerLive,
EventStreamTracker,
EventRowServiceLive,
EventRowService,
ConnectionManagerLive,
ConnectionManager,
ALL_EVENTS_CHANNEL
};