@aws-amplify/datastore
Version:
AppSyncLocal support for aws-amplify
727 lines (725 loc) • 38.7 kB
JavaScript
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.SyncEngine = exports.ControlMessage = void 0;
const tslib_1 = require("tslib");
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
const utils_1 = require("@aws-amplify/core/internals/utils");
const core_1 = require("@aws-amplify/core");
const rxjs_1 = require("rxjs");
const api_graphql_1 = require("@aws-amplify/api-graphql");
const predicates_1 = require("../predicates");
const types_1 = require("../types");
const util_1 = require("../util");
const datastoreConnectivity_1 = tslib_1.__importDefault(require("./datastoreConnectivity"));
const merger_1 = require("./merger");
const outbox_1 = require("./outbox");
const mutation_1 = require("./processors/mutation");
const subscription_1 = require("./processors/subscription");
const sync_1 = require("./processors/sync");
const utils_2 = require("./utils");
const logger = new core_1.ConsoleLogger('DataStore');
const ownSymbol = Symbol('sync');
var ControlMessage;
(function (ControlMessage) {
ControlMessage["SYNC_ENGINE_STORAGE_SUBSCRIBED"] = "storageSubscribed";
ControlMessage["SYNC_ENGINE_SUBSCRIPTIONS_ESTABLISHED"] = "subscriptionsEstablished";
ControlMessage["SYNC_ENGINE_SYNC_QUERIES_STARTED"] = "syncQueriesStarted";
ControlMessage["SYNC_ENGINE_SYNC_QUERIES_READY"] = "syncQueriesReady";
ControlMessage["SYNC_ENGINE_MODEL_SYNCED"] = "modelSynced";
ControlMessage["SYNC_ENGINE_OUTBOX_MUTATION_ENQUEUED"] = "outboxMutationEnqueued";
ControlMessage["SYNC_ENGINE_OUTBOX_MUTATION_PROCESSED"] = "outboxMutationProcessed";
ControlMessage["SYNC_ENGINE_OUTBOX_STATUS"] = "outboxStatus";
ControlMessage["SYNC_ENGINE_NETWORK_STATUS"] = "networkStatus";
ControlMessage["SYNC_ENGINE_READY"] = "ready";
})(ControlMessage || (exports.ControlMessage = ControlMessage = {}));
class SyncEngine {
getModelSyncedStatus(modelConstructor) {
return this.modelSyncedStatus.get(modelConstructor);
}
constructor(schema, namespaceResolver, modelClasses, userModelClasses, storage, modelInstanceCreator, conflictHandler, errorHandler, syncPredicates, amplifyConfig = {}, authModeStrategy, amplifyContext, connectivityMonitor) {
this.schema = schema;
this.namespaceResolver = namespaceResolver;
this.modelClasses = modelClasses;
this.userModelClasses = userModelClasses;
this.storage = storage;
this.modelInstanceCreator = modelInstanceCreator;
this.syncPredicates = syncPredicates;
this.amplifyConfig = amplifyConfig;
this.authModeStrategy = authModeStrategy;
this.amplifyContext = amplifyContext;
this.connectivityMonitor = connectivityMonitor;
this.online = false;
this.modelSyncedStatus = new WeakMap();
this.connectionDisrupted = false;
this.runningProcesses = new utils_1.BackgroundProcessManager();
this.waitForSleepState = new Promise(resolve => {
this.syncQueriesObservableStartSleeping = resolve;
});
const MutationEventCtor = this.modelClasses
.MutationEvent;
this.outbox = new outbox_1.MutationEventOutbox(this.schema, MutationEventCtor, modelInstanceCreator, ownSymbol);
this.modelMerger = new merger_1.ModelMerger(this.outbox, ownSymbol);
this.syncQueriesProcessor = new sync_1.SyncProcessor(this.schema, this.syncPredicates, this.amplifyConfig, this.authModeStrategy, errorHandler, this.amplifyContext);
this.subscriptionsProcessor = new subscription_1.SubscriptionProcessor(this.schema, this.syncPredicates, this.amplifyConfig, this.authModeStrategy, errorHandler, this.amplifyContext);
this.mutationsProcessor = new mutation_1.MutationProcessor(this.schema, this.storage, this.userModelClasses, this.outbox, this.modelInstanceCreator, MutationEventCtor, this.amplifyConfig, this.authModeStrategy, errorHandler, conflictHandler, this.amplifyContext);
this.datastoreConnectivity =
this.connectivityMonitor || new datastoreConnectivity_1.default();
}
start(params) {
return new rxjs_1.Observable(observer => {
logger.log('starting sync engine...');
let subscriptions = [];
this.runningProcesses.add(async () => {
try {
await this.setupModels(params);
}
catch (err) {
observer.error(err);
return;
}
// this is awaited at the bottom. so, we don't need to register
// this explicitly with the context. it's already contained.
const startPromise = new Promise((resolve, reject) => {
const doneStarting = resolve;
const failedStarting = reject;
this.datastoreConnectivity.status().subscribe(async ({ online }) => this.runningProcesses.isOpen &&
this.runningProcesses.add(async (onTerminate) => {
// From offline to online
if (online && !this.online) {
this.online = online;
observer.next({
type: ControlMessage.SYNC_ENGINE_NETWORK_STATUS,
data: {
active: this.online,
},
});
this.stopDisruptionListener = this.startDisruptionListener();
// #region GraphQL Subscriptions
const [ctlSubsObservable, dataSubsObservable] = this.subscriptionsProcessor.start();
try {
await new Promise((_resolve, _reject) => {
onTerminate.then(_reject);
const ctlSubsSubscription = ctlSubsObservable.subscribe({
next: msg => {
if (msg === subscription_1.CONTROL_MSG.CONNECTED) {
_resolve();
}
},
error: err => {
_reject(err);
const handleDisconnect = this.disconnectionHandler();
handleDisconnect(err);
},
});
subscriptions.push(ctlSubsSubscription);
});
}
catch (err) {
observer.error(err);
failedStarting();
return;
}
logger.log('Realtime ready');
observer.next({
type: ControlMessage.SYNC_ENGINE_SUBSCRIPTIONS_ESTABLISHED,
});
// #endregion
// #region Base & Sync queries
try {
await new Promise((_resolve, _reject) => {
const syncQuerySubscription = this.syncQueriesObservable().subscribe({
next: message => {
const { type } = message;
if (type ===
ControlMessage.SYNC_ENGINE_SYNC_QUERIES_READY) {
_resolve();
}
observer.next(message);
},
complete: () => {
_resolve();
},
error: error => {
_reject(error);
},
});
if (syncQuerySubscription) {
subscriptions.push(syncQuerySubscription);
}
});
}
catch (error) {
observer.error(error);
failedStarting();
return;
}
// #endregion
// #region process mutations (outbox)
subscriptions.push(this.mutationsProcessor
.start()
.subscribe(({ modelDefinition, model: item, hasMore }) => this.runningProcesses.add(async () => {
const modelConstructor = this.userModelClasses[modelDefinition.name];
const model = this.modelInstanceCreator(modelConstructor, item);
await this.storage.runExclusive(storage => this.modelMerger.merge(storage, model, modelDefinition));
observer.next({
type: ControlMessage.SYNC_ENGINE_OUTBOX_MUTATION_PROCESSED,
data: {
model: modelConstructor,
element: model,
},
});
observer.next({
type: ControlMessage.SYNC_ENGINE_OUTBOX_STATUS,
data: {
isEmpty: !hasMore,
},
});
}, 'mutation processor event')));
// #endregion
// #region Merge subscriptions buffer
subscriptions.push(dataSubsObservable.subscribe(([_transformerMutationType, modelDefinition, item]) => this.runningProcesses.add(async () => {
const modelConstructor = this.userModelClasses[modelDefinition.name];
const model = this.modelInstanceCreator(modelConstructor, item);
await this.storage.runExclusive(storage => this.modelMerger.merge(storage, model, modelDefinition));
}, 'subscription dataSubsObservable event')));
// #endregion
}
else if (!online) {
this.online = online;
observer.next({
type: ControlMessage.SYNC_ENGINE_NETWORK_STATUS,
data: {
active: this.online,
},
});
subscriptions.forEach(sub => {
sub.unsubscribe();
});
subscriptions = [];
}
doneStarting();
}, 'datastore connectivity event'));
});
this.storage
.observe(null, null, ownSymbol)
.pipe((0, rxjs_1.filter)(({ model }) => {
const modelDefinition = this.getModelDefinition(model);
return modelDefinition.syncable === true;
}))
.subscribe({
next: async ({ opType, model, element, condition }) => this.runningProcesses.add(async () => {
const namespace = this.schema.namespaces[this.namespaceResolver(model)];
const MutationEventConstructor = this.modelClasses
.MutationEvent;
const modelDefinition = this.getModelDefinition(model);
const graphQLCondition = (0, utils_2.predicateToGraphQLCondition)(condition, modelDefinition);
const mutationEvent = (0, utils_2.createMutationInstanceFromModelOperation)(namespace.relationships, this.getModelDefinition(model), opType, model, element, graphQLCondition, MutationEventConstructor, this.modelInstanceCreator);
await this.outbox.enqueue(this.storage, mutationEvent);
observer.next({
type: ControlMessage.SYNC_ENGINE_OUTBOX_MUTATION_ENQUEUED,
data: {
model,
element,
},
});
observer.next({
type: ControlMessage.SYNC_ENGINE_OUTBOX_STATUS,
data: {
isEmpty: false,
},
});
await startPromise;
// Set by the this.datastoreConnectivity.status().subscribe() loop
if (this.online) {
this.mutationsProcessor.resume();
}
}, 'storage event'),
});
observer.next({
type: ControlMessage.SYNC_ENGINE_STORAGE_SUBSCRIBED,
});
const hasMutationsInOutbox = (await this.outbox.peek(this.storage)) === undefined;
observer.next({
type: ControlMessage.SYNC_ENGINE_OUTBOX_STATUS,
data: {
isEmpty: hasMutationsInOutbox,
},
});
await startPromise;
observer.next({
type: ControlMessage.SYNC_ENGINE_READY,
});
}, 'sync start');
});
}
async getModelsMetadataWithNextFullSync(currentTimeStamp) {
const modelLastSync = new Map((await this.runningProcesses.add(() => this.getModelsMetadata(), 'sync/index getModelsMetadataWithNextFullSync')).map(({ namespace, model, lastSync, lastFullSync, fullSyncInterval }) => {
const nextFullSync = lastFullSync + fullSyncInterval;
const syncFrom = !lastFullSync || nextFullSync < currentTimeStamp
? 0 // perform full sync if expired
: lastSync; // perform delta sync
return [
this.schema.namespaces[namespace].models[model],
[namespace, syncFrom],
];
}));
return modelLastSync;
}
syncQueriesObservable() {
if (!this.online) {
return (0, rxjs_1.of)({}); // TODO(v6): fix this
}
return new rxjs_1.Observable(observer => {
let syncQueriesSubscription;
this.runningProcesses.isOpen &&
this.runningProcesses.add(async (onTerminate) => {
let terminated = false;
while (!observer.closed && !terminated) {
const count = new WeakMap();
const modelLastSync = await this.getModelsMetadataWithNextFullSync(Date.now());
const paginatingModels = new Set(modelLastSync.keys());
let lastFullSyncStartedAt;
let syncInterval;
let start;
let syncDuration;
let lastStartedAt;
await new Promise((resolve, _reject) => {
if (!this.runningProcesses.isOpen)
resolve();
onTerminate.then(() => {
resolve();
});
syncQueriesSubscription = this.syncQueriesProcessor
.start(modelLastSync)
.subscribe({
next: async ({ namespace, modelDefinition, items, done, startedAt, isFullSync, }) => {
const modelConstructor = this.userModelClasses[modelDefinition.name];
if (!count.has(modelConstructor)) {
count.set(modelConstructor, {
new: 0,
updated: 0,
deleted: 0,
});
start = (0, util_1.getNow)();
lastStartedAt =
lastStartedAt === undefined
? startedAt
: Math.max(lastStartedAt, startedAt);
}
/**
* If there are mutations in the outbox for a given id, those need to be
* merged individually. Otherwise, we can merge them in batches.
*/
await this.storage.runExclusive(async (storage) => {
const idsInOutbox = await this.outbox.getModelIds(storage);
const oneByOne = [];
const page = items.filter(item => {
const itemId = (0, utils_2.getIdentifierValue)(modelDefinition, item);
if (!idsInOutbox.has(itemId)) {
return true;
}
oneByOne.push(item);
return false;
});
const opTypeCount = [];
for (const item of oneByOne) {
const opType = await this.modelMerger.merge(storage, item, modelDefinition);
if (opType !== undefined) {
opTypeCount.push([item, opType]);
}
}
opTypeCount.push(...(await this.modelMerger.mergePage(storage, modelConstructor, page, modelDefinition)));
const counts = count.get(modelConstructor);
opTypeCount.forEach(([, opType]) => {
switch (opType) {
case types_1.OpType.INSERT:
counts.new++;
break;
case types_1.OpType.UPDATE:
counts.updated++;
break;
case types_1.OpType.DELETE:
counts.deleted++;
break;
default:
throw new Error(`Invalid opType ${opType}`);
}
});
});
if (done) {
const { name: modelName } = modelDefinition;
// #region update last sync for type
let modelMetadata = await this.getModelMetadata(namespace, modelName);
const { lastFullSync, fullSyncInterval } = modelMetadata;
syncInterval = fullSyncInterval;
lastFullSyncStartedAt =
lastFullSyncStartedAt === undefined
? lastFullSync
: Math.max(lastFullSyncStartedAt, isFullSync ? startedAt : lastFullSync);
modelMetadata = this.modelClasses
.ModelMetadata.copyOf(modelMetadata, draft => {
draft.lastSync = startedAt;
draft.lastFullSync = isFullSync
? startedAt
: modelMetadata.lastFullSync;
});
await this.storage.save(modelMetadata, undefined, ownSymbol);
// #endregion
const counts = count.get(modelConstructor);
this.modelSyncedStatus.set(modelConstructor, true);
observer.next({
type: ControlMessage.SYNC_ENGINE_MODEL_SYNCED,
data: {
model: modelConstructor,
isFullSync,
isDeltaSync: !isFullSync,
counts,
},
});
paginatingModels.delete(modelDefinition);
if (paginatingModels.size === 0) {
syncDuration = (0, util_1.getNow)() - start;
resolve();
observer.next({
type: ControlMessage.SYNC_ENGINE_SYNC_QUERIES_READY,
});
syncQueriesSubscription.unsubscribe();
}
}
},
error: error => {
observer.error(error);
},
});
observer.next({
type: ControlMessage.SYNC_ENGINE_SYNC_QUERIES_STARTED,
data: {
models: Array.from(paginatingModels).map(({ name }) => name),
},
});
});
// null is cast to 0 resulting in unexpected behavior.
// undefined in arithmetic operations results in NaN also resulting in unexpected behavior.
// If lastFullSyncStartedAt is null this is the first sync.
// Assume lastStartedAt is is also newest full sync.
let msNextFullSync;
if (!lastFullSyncStartedAt) {
msNextFullSync = syncInterval - syncDuration;
}
else {
msNextFullSync =
lastFullSyncStartedAt +
syncInterval -
(lastStartedAt + syncDuration);
}
logger.debug(`Next fullSync in ${msNextFullSync / 1000} seconds. (${new Date(Date.now() + msNextFullSync)})`);
// TODO: create `BackgroundProcessManager.sleep()` ... but, need to put
// a lot of thought into what that contract looks like to
// support possible use-cases:
//
// 1. non-cancelable
// 2. cancelable, unsleep on exit()
// 3. cancelable, throw Error on exit()
// 4. cancelable, callback first on exit()?
// 5. ... etc. ? ...
//
// TLDR; this is a lot of complexity here for a sleep(),
// but, it's not clear to me yet how to support an
// extensible, centralized cancelable `sleep()` elegantly.
await this.runningProcesses.add(async (onRunningProcessTerminate) => {
let unsleep;
const sleep = new Promise(resolve => {
unsleep = resolve;
setTimeout(unsleep, msNextFullSync);
});
onRunningProcessTerminate.then(() => {
terminated = true;
this.syncQueriesObservableStartSleeping();
unsleep();
});
this.unsleepSyncQueriesObservable = unsleep;
this.syncQueriesObservableStartSleeping();
return sleep;
}, 'syncQueriesObservable sleep');
this.unsleepSyncQueriesObservable = null;
this.waitForSleepState = new Promise(resolve => {
this.syncQueriesObservableStartSleeping = resolve;
});
}
}, 'syncQueriesObservable main');
});
}
disconnectionHandler() {
return (msg) => {
// This implementation is tied to AWSAppSyncRealTimeProvider 'Connection closed', 'Timeout disconnect' msg
if (api_graphql_1.CONTROL_MSG.CONNECTION_CLOSED === msg ||
api_graphql_1.CONTROL_MSG.TIMEOUT_DISCONNECT === msg) {
this.datastoreConnectivity.socketDisconnected();
}
};
}
unsubscribeConnectivity() {
this.datastoreConnectivity.unsubscribe();
}
/**
* Stops all subscription activities and resolves when all activies report
* that they're disconnected, done retrying, etc..
*/
async stop() {
logger.debug('stopping sync engine');
/**
* Gracefully disconnecting subscribers first just prevents *more* work
* from entering the pipelines.
*/
this.unsubscribeConnectivity();
/**
* Stop listening for websocket connection disruption
*/
this.stopDisruptionListener && this.stopDisruptionListener();
/**
* aggressively shut down any lingering background processes.
* some of this might be semi-redundant with unsubscribing. however,
* unsubscribing doesn't allow us to wait for settling.
* (Whereas `stop()` does.)
*/
await this.mutationsProcessor.stop();
await this.subscriptionsProcessor.stop();
await this.datastoreConnectivity.stop();
await this.syncQueriesProcessor.stop();
await this.runningProcesses.close();
await this.runningProcesses.open();
logger.debug('sync engine stopped and ready to restart');
}
async setupModels(params) {
const { fullSyncInterval } = params;
const ModelMetadataConstructor = this.modelClasses
.ModelMetadata;
const models = [];
let savedModel;
Object.values(this.schema.namespaces).forEach(namespace => {
Object.values(namespace.models)
.filter(({ syncable }) => syncable)
.forEach(model => {
models.push([namespace.name, model]);
if (namespace.name === util_1.USER) {
const modelConstructor = this.userModelClasses[model.name];
this.modelSyncedStatus.set(modelConstructor, false);
}
});
});
const promises = models.map(async ([namespace, model]) => {
const modelMetadata = await this.getModelMetadata(namespace, model.name);
const syncPredicate = predicates_1.ModelPredicateCreator.getPredicates(this.syncPredicates.get(model), false);
const lastSyncPredicate = syncPredicate
? JSON.stringify(syncPredicate)
: null;
if (modelMetadata === undefined) {
[[savedModel]] = await this.storage.save(this.modelInstanceCreator(ModelMetadataConstructor, {
model: model.name,
namespace,
lastSync: null,
fullSyncInterval,
lastFullSync: null,
lastSyncPredicate,
}), undefined, ownSymbol);
}
else {
const prevSyncPredicate = modelMetadata.lastSyncPredicate
? modelMetadata.lastSyncPredicate
: null;
const syncPredicateUpdated = prevSyncPredicate !== lastSyncPredicate;
[[savedModel]] = await this.storage.save(ModelMetadataConstructor.copyOf(modelMetadata, draft => {
draft.fullSyncInterval = fullSyncInterval;
// perform a base sync if the syncPredicate changed in between calls to DataStore.start
// ensures that the local store contains all the data specified by the syncExpression
if (syncPredicateUpdated) {
draft.lastSync = null;
draft.lastFullSync = null;
draft.lastSyncPredicate = lastSyncPredicate;
}
}));
}
return savedModel;
});
const result = {};
for (const modelMetadata of await Promise.all(promises)) {
const { model: modelName } = modelMetadata;
result[modelName] = modelMetadata;
}
return result;
}
async getModelsMetadata() {
const ModelMetadataCtor = this.modelClasses
.ModelMetadata;
const modelsMetadata = await this.storage.query(ModelMetadataCtor);
return modelsMetadata;
}
async getModelMetadata(namespace, model) {
const ModelMetadataCtor = this.modelClasses
.ModelMetadata;
const predicate = predicates_1.ModelPredicateCreator.createFromAST(this.schema.namespaces[util_1.SYNC].models[ModelMetadataCtor.name], { and: [{ namespace: { eq: namespace } }, { model: { eq: model } }] });
const [modelMetadata] = await this.storage.query(ModelMetadataCtor, predicate, {
page: 0,
limit: 1,
});
return modelMetadata;
}
getModelDefinition(modelConstructor) {
const namespaceName = this.namespaceResolver(modelConstructor);
const modelDefinition = this.schema.namespaces[namespaceName].models[modelConstructor.name];
return modelDefinition;
}
static getNamespace() {
const namespace = {
name: util_1.SYNC,
relationships: {},
enums: {
OperationType: {
name: 'OperationType',
values: ['CREATE', 'UPDATE', 'DELETE'],
},
},
nonModels: {},
models: {
MutationEvent: {
name: 'MutationEvent',
pluralName: 'MutationEvents',
syncable: false,
fields: {
id: {
name: 'id',
type: 'ID',
isRequired: true,
isArray: false,
},
model: {
name: 'model',
type: 'String',
isRequired: true,
isArray: false,
},
data: {
name: 'data',
type: 'String',
isRequired: true,
isArray: false,
},
modelId: {
name: 'modelId',
type: 'String',
isRequired: true,
isArray: false,
},
operation: {
name: 'operation',
type: {
enum: 'Operationtype',
},
isArray: false,
isRequired: true,
},
condition: {
name: 'condition',
type: 'String',
isArray: false,
isRequired: true,
},
},
},
ModelMetadata: {
name: 'ModelMetadata',
pluralName: 'ModelsMetadata',
syncable: false,
fields: {
id: {
name: 'id',
type: 'ID',
isRequired: true,
isArray: false,
},
namespace: {
name: 'namespace',
type: 'String',
isRequired: true,
isArray: false,
},
model: {
name: 'model',
type: 'String',
isRequired: true,
isArray: false,
},
lastSync: {
name: 'lastSync',
type: 'Int',
isRequired: false,
isArray: false,
},
lastFullSync: {
name: 'lastFullSync',
type: 'Int',
isRequired: false,
isArray: false,
},
fullSyncInterval: {
name: 'fullSyncInterval',
type: 'Int',
isRequired: true,
isArray: false,
},
lastSyncPredicate: {
name: 'lastSyncPredicate',
type: 'String',
isRequired: false,
isArray: false,
},
},
},
},
};
return namespace;
}
/**
* listen for websocket connection disruption
*
* May indicate there was a period of time where messages
* from AppSync were missed. A sync needs to be triggered to
* retrieve the missed data.
*/
startDisruptionListener() {
return core_1.Hub.listen('api', (data) => {
if (data.source === 'PubSub' &&
data.payload.event === api_graphql_1.CONNECTION_STATE_CHANGE) {
const connectionState = data.payload.data
.connectionState;
switch (connectionState) {
// Do not need to listen for ConnectionDisruptedPendingNetwork
// Normal network reconnection logic will handle the sync
case api_graphql_1.ConnectionState.ConnectionDisrupted:
this.connectionDisrupted = true;
break;
case api_graphql_1.ConnectionState.Connected:
if (this.connectionDisrupted) {
this.scheduleSync();
}
this.connectionDisrupted = false;
break;
}
}
});
}
/*
* Schedule a sync to start when syncQueriesObservable enters sleep state
* Start sync immediately if syncQueriesObservable is already in sleep state
*/
scheduleSync() {
return (this.runningProcesses.isOpen &&
this.runningProcesses.add(() => this.waitForSleepState.then(() => {
// unsleepSyncQueriesObservable will be set if waitForSleepState has resolved
this.unsleepSyncQueriesObservable();
})));
}
}
exports.SyncEngine = SyncEngine;
//# sourceMappingURL=index.js.map