UNPKG

mongo2mongo

Version:

Sync one MongoDB collection to another MongoDB collection

99 lines (98 loc) 3.45 kB
import _ from 'lodash/fp.js'; import * as mongoChangeStream from 'mongochangestream'; export const initSync = (redis, source, destination, options = {}) => { const mapper = options.mapper || (_.identity); // Initialize sync const sync = mongoChangeStream.initSync(redis, source, options); // Use emitter from mongochangestream const emitter = sync.emitter; const emit = (event, data) => { emitter.emit(event, { type: event, ...data }); }; /** * Process change stream events. */ const processChangeStreamRecords = async (docs) => { const operations = []; for (const doc of docs) { if (doc.operationType === 'insert') { operations.push({ insertOne: { document: mapper(doc.fullDocument), }, }); } else if (doc.operationType === 'update' || doc.operationType === 'replace') { const replacement = doc.fullDocument ? mapper(doc.fullDocument) : {}; operations.push({ replaceOne: { filter: { _id: doc.documentKey._id }, replacement, upsert: true, }, }); } else if (doc.operationType === 'delete') { operations.push({ deleteOne: { filter: { _id: doc.documentKey._id }, }, }); } } const result = await destination.bulkWrite(operations, { // Operations must be ordered ordered: true, }); const numSuccess = _.flow(_.pick([ 'insertedCount', 'modifiedCount', 'deletedCount', 'upsertedCount', ]), Object.values, _.sum)(result); const numFailed = operations.length - numSuccess; emit('process', { success: numSuccess, fail: numFailed, changeStream: true, }); }; const processRecords = async (docs) => { const operations = docs.map(({ fullDocument }) => ({ insertOne: { document: mapper(fullDocument) }, })); // Operations are unordered const result = await destination.bulkWrite(operations, { ordered: false }); const numSuccess = result.insertedCount; const numFailed = operations.length - numSuccess; emit('process', { success: numSuccess, fail: numFailed, initialScan: true, }); }; const processChangeStream = (options) => sync.processChangeStream(processChangeStreamRecords, { ...options, pipeline: [ { $unset: ['updateDescription'] }, ...(options?.pipeline ?? []), ], }); const runInitialScan = (options) => sync.runInitialScan(processRecords, options); return { ...sync, /** * Process MongoDB change stream for the given collection. * `options.batchSize` defaults to 500. * `options.timeout` defaults to 30 seconds. */ processChangeStream, /** * Run initial collection scan. `options.batchSize` defaults to 500. * Sorting defaults to `_id`. */ runInitialScan, emitter, }; };