UNPKG

wallet-storage-client

Version:
223 lines 10.4 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.SyncState = void 0; const _1 = require("."); const index_client_1 = require("../../../index.client"); class SyncState extends _1.EntityBase { constructor(api) { const now = new Date(); super(api || { syncStateId: 0, created_at: now, updated_at: now, userId: 0, storageIdentityKey: '', storageName: '', init: false, refNum: '', status: 'unknown', when: undefined, errorLocal: undefined, errorOther: undefined, satoshis: undefined, syncMap: JSON.stringify((0, _1.createSyncMap)()) }); this.errorLocal = this.api.errorLocal ? JSON.parse(this.api.errorLocal) : undefined; this.errorOther = this.api.errorOther ? JSON.parse(this.api.errorOther) : undefined; this.syncMap = JSON.parse(this.api.syncMap); this.validateSyncMap(this.syncMap); } validateSyncMap(sm) { for (const key of Object.keys(sm)) { const esm = sm[key]; if (typeof esm.maxUpdated_at === 'string') esm.maxUpdated_at = new Date(esm.maxUpdated_at); } } static async fromStorage(storage, userIdentityKey, remoteSettings) { const { user } = (0, index_client_1.verifyTruthy)(await storage.findOrInsertUser(userIdentityKey)); let { syncState: api } = (0, index_client_1.verifyTruthy)(await storage.findOrInsertSyncStateAuth({ userId: user.userId, identityKey: userIdentityKey }, remoteSettings.storageIdentityKey, remoteSettings.storageName)); const ss = new SyncState(api); return ss; } /** * Handles both insert and update based on id value: zero indicates insert. * @param storage * @param notSyncMap if not new and true, excludes updating syncMap in storage. * @param trx */ async updateStorage(storage, notSyncMap, trx) { this.updated_at = new Date(); this.updateApi(notSyncMap && this.id > 0); if (this.id === 0) { await storage.insertSyncState(this.api); } else { const update = { ...this.api }; if (notSyncMap) delete update.syncMap; delete update.created_at; await storage.updateSyncState((0, index_client_1.verifyId)(this.id), update, trx); } } updateApi(notSyncMap) { this.api.errorLocal = this.apiErrorLocal; this.api.errorOther = this.apiErrorOther; if (!notSyncMap) this.api.syncMap = this.apiSyncMap; } // Pass through api properties set created_at(v) { this.api.created_at = v; } get created_at() { return this.api.created_at; } set updated_at(v) { this.api.updated_at = v; } get updated_at() { return this.api.updated_at; } set userId(v) { this.api.userId = v; } get userId() { return this.api.userId; } set storageIdentityKey(v) { this.api.storageIdentityKey = v; } get storageIdentityKey() { return this.api.storageIdentityKey; } set storageName(v) { this.api.storageName = v; } get storageName() { return this.api.storageName; } set init(v) { this.api.init = v; } get init() { return this.api.init; } set refNum(v) { this.api.refNum = v; } get refNum() { return this.api.refNum; } set status(v) { this.api.status = v; } get status() { return this.api.status; } set when(v) { this.api.when = v; } get when() { return this.api.when; } set satoshis(v) { this.api.satoshis = v; } get satoshis() { return this.api.satoshis; } get apiErrorLocal() { return this.errorToString(this.errorLocal); } get apiErrorOther() { return this.errorToString(this.errorOther); } get apiSyncMap() { return JSON.stringify(this.syncMap); } get id() { return this.api.syncStateId; } set id(id) { this.api.syncStateId = id; } get entityName() { return 'table.SyncState'; } get entityTable() { return 'sync_states'; } static mergeIdMap(fromMap, toMap) { for (const [key, value] of Object.entries(fromMap)) { const fromValue = fromMap[key]; const toValue = toMap[key]; if (toValue !== undefined && toValue !== fromValue) throw new index_client_1.sdk.WERR_INVALID_PARAMETER('syncMap', `an unmapped id or the same mapped id. ${key} maps to ${toValue} not equal to ${fromValue}`); if (toValue === undefined) toMap[key] = value; } } /** * Merge additions to the syncMap * @param iSyncMap */ mergeSyncMap(iSyncMap) { SyncState.mergeIdMap(iSyncMap.provenTx.idMap, this.syncMap.provenTx.idMap); SyncState.mergeIdMap(iSyncMap.outputBasket.idMap, this.syncMap.outputBasket.idMap); SyncState.mergeIdMap(iSyncMap.transaction.idMap, this.syncMap.transaction.idMap); SyncState.mergeIdMap(iSyncMap.provenTxReq.idMap, this.syncMap.provenTxReq.idMap); SyncState.mergeIdMap(iSyncMap.txLabel.idMap, this.syncMap.txLabel.idMap); SyncState.mergeIdMap(iSyncMap.output.idMap, this.syncMap.output.idMap); SyncState.mergeIdMap(iSyncMap.outputTag.idMap, this.syncMap.outputTag.idMap); SyncState.mergeIdMap(iSyncMap.certificate.idMap, this.syncMap.certificate.idMap); SyncState.mergeIdMap(iSyncMap.commission.idMap, this.syncMap.commission.idMap); } /** * Eliminate any properties besides code and description */ errorToString(e) { if (!e) return undefined; const es = { code: e.code, description: e.description, stack: e.stack }; return JSON.stringify(es); } equals(ei, syncMap) { return false; } async mergeNew(storage, userId, syncMap, trx) { } async mergeExisting(storage, since, ei, syncMap, trx) { return false; } makeRequestSyncChunkArgs(forIdentityKey, forStorageIdentityKey, maxRoughSize, maxItems) { const a = { identityKey: forIdentityKey, maxRoughSize: maxRoughSize || 20000000, maxItems: maxItems || 1000, offsets: [], since: this.when, fromStorageIdentityKey: this.storageIdentityKey, toStorageIdentityKey: forStorageIdentityKey }; for (const ess of [ this.syncMap.provenTx, this.syncMap.outputBasket, this.syncMap.outputTag, this.syncMap.txLabel, this.syncMap.transaction, this.syncMap.output, this.syncMap.txLabelMap, this.syncMap.outputTagMap, this.syncMap.certificate, this.syncMap.certificateField, this.syncMap.commission, this.syncMap.provenTxReq, ]) { a.offsets.push({ name: ess.entityName, offset: ess.count }); } return a; } async processSyncChunk(writer, args, chunk) { var _a; const mes = [ new _1.MergeEntity(chunk.provenTxs, index_client_1.entity.ProvenTx.mergeFind, this.syncMap.provenTx), new _1.MergeEntity(chunk.outputBaskets, index_client_1.entity.OutputBasket.mergeFind, this.syncMap.outputBasket), new _1.MergeEntity(chunk.outputTags, index_client_1.entity.OutputTag.mergeFind, this.syncMap.outputTag), new _1.MergeEntity(chunk.txLabels, index_client_1.entity.TxLabel.mergeFind, this.syncMap.txLabel), new _1.MergeEntity(chunk.transactions, index_client_1.entity.Transaction.mergeFind, this.syncMap.transaction), new _1.MergeEntity(chunk.outputs, index_client_1.entity.Output.mergeFind, this.syncMap.output), new _1.MergeEntity(chunk.txLabelMaps, index_client_1.entity.TxLabelMap.mergeFind, this.syncMap.txLabelMap), new _1.MergeEntity(chunk.outputTagMaps, index_client_1.entity.OutputTagMap.mergeFind, this.syncMap.outputTagMap), new _1.MergeEntity(chunk.certificates, index_client_1.entity.Certificate.mergeFind, this.syncMap.certificate), new _1.MergeEntity(chunk.certificateFields, index_client_1.entity.CertificateField.mergeFind, this.syncMap.certificateField), new _1.MergeEntity(chunk.commissions, index_client_1.entity.Commission.mergeFind, this.syncMap.commission), new _1.MergeEntity(chunk.provenTxReqs, index_client_1.entity.ProvenTxReq.mergeFind, this.syncMap.provenTxReq), ]; let updates = 0; let inserts = 0; let maxUpdated_at = undefined; let done = true; // Merge User if (chunk.user) { const ei = chunk.user; const { found, eo } = await index_client_1.entity.User.mergeFind(writer, this.userId, ei); if (found) { if (await eo.mergeExisting(writer, args.since, ei)) { maxUpdated_at = (0, index_client_1.maxDate)(maxUpdated_at, ei.updated_at); updates++; } } } // Merge everything else... for (const me of mes) { const r = await me.merge(args.since, writer, this.userId, this.syncMap); // The counts become the offsets for the next chunk. me.esm.count += (((_a = me.stateArray) === null || _a === void 0 ? void 0 : _a.length) || 0); updates += r.updates; inserts += r.inserts; maxUpdated_at = (0, index_client_1.maxDate)(maxUpdated_at, me.esm.maxUpdated_at); // If any entity type either did not report results or if there were at least one, then we aren't done. if (me.stateArray === undefined || me.stateArray.length > 0) done = false; //if (me.stateArray !== undefined && me.stateArray.length > 0) // console.log(`merged ${me.stateArray?.length} ${me.esm.entityName} ${r.inserts} inserted, ${r.updates} updated`); } if (done) { // Next batch starts further in the future with offsets of zero. this.when = maxUpdated_at; for (const me of mes) me.esm.count = 0; } await this.updateStorage(writer, false); return { done, maxUpdated_at, updates, inserts }; } } exports.SyncState = SyncState; //# sourceMappingURL=SyncState.js.map