UNPKG

pouchdb

Version:

PouchDB is a pocket-sized database

1,818 lines (1,606 loc) 301 kB
'use strict'; function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } var Md5 = _interopDefault(require('spark-md5')); var uuid = require('uuid'); var vuvuzela = _interopDefault(require('vuvuzela')); var EE = _interopDefault(require('events')); function isBinaryObject(object) { return (typeof ArrayBuffer !== 'undefined' && object instanceof ArrayBuffer) || (typeof Blob !== 'undefined' && object instanceof Blob); } /** * @template {ArrayBuffer | Blob} T * @param {T} object * @returns {T} */ function cloneBinaryObject(object) { return object instanceof ArrayBuffer ? object.slice(0) : object.slice(0, object.size, object.type); } // most of this is borrowed from lodash.isPlainObject: // https://github.com/fis-components/lodash.isplainobject/ // blob/29c358140a74f252aeb08c9eb28bef86f2217d4a/index.js var funcToString = Function.prototype.toString; var objectCtorString = funcToString.call(Object); function isPlainObject(value) { var proto = Object.getPrototypeOf(value); /* istanbul ignore if */ if (proto === null) { // not sure when this happens, but I guess it can return true; } var Ctor = proto.constructor; return (typeof Ctor == 'function' && Ctor instanceof Ctor && funcToString.call(Ctor) == objectCtorString); } function clone(object) { var newObject; var i; var len; if (!object || typeof object !== 'object') { return object; } if (Array.isArray(object)) { newObject = []; for (i = 0, len = object.length; i < len; i++) { newObject[i] = clone(object[i]); } return newObject; } // special case: to avoid inconsistencies between IndexedDB // and other backends, we automatically stringify Dates if (object instanceof Date && isFinite(object)) { return object.toISOString(); } if (isBinaryObject(object)) { return cloneBinaryObject(object); } if (!isPlainObject(object)) { return object; // don't clone objects like Workers } newObject = {}; for (i in object) { /* istanbul ignore else */ if (Object.prototype.hasOwnProperty.call(object, i)) { var value = clone(object[i]); if (typeof value !== 'undefined') { newObject[i] = value; } } } return newObject; } function once(fun) { var called = false; return function (...args) { /* istanbul ignore if */ if (called) { // this is a smoke test and should never actually happen throw new Error('once called more than once'); } else { called = true; fun.apply(this, args); } }; } function toPromise(func) { //create the function we will be returning return function (...args) { // Clone arguments args = clone(args); var self = this; // if the last argument is a function, assume its a callback var usedCB = (typeof args[args.length - 1] === 'function') ? args.pop() : false; var promise = new Promise(function (fulfill, reject) { var resp; try { var callback = once(function (err, mesg) { if (err) { reject(err); } else { fulfill(mesg); } }); // create a callback for this invocation // apply the function in the orig context args.push(callback); resp = func.apply(self, args); if (resp && typeof resp.then === 'function') { fulfill(resp); } } catch (e) { reject(e); } }); // if there is a callback, call it back if (usedCB) { promise.then(function (result) { usedCB(null, result); }, usedCB); } return promise; }; } function logApiCall(self, name, args) { /* istanbul ignore if */ if (self.constructor.listeners('debug').length) { var logArgs = ['api', self.name, name]; for (var i = 0; i < args.length - 1; i++) { logArgs.push(args[i]); } self.constructor.emit('debug', logArgs); // override the callback itself to log the response var origCallback = args[args.length - 1]; args[args.length - 1] = function (err, res) { var responseArgs = ['api', self.name, name]; responseArgs = responseArgs.concat( err ? ['error', err] : ['success', res] ); self.constructor.emit('debug', responseArgs); origCallback(err, res); }; } } function adapterFun(name, callback) { return toPromise(function (...args) { if (this._closed) { return Promise.reject(new Error('database is closed')); } if (this._destroyed) { return Promise.reject(new Error('database is destroyed')); } var self = this; logApiCall(self, name, args); if (!this.taskqueue.isReady) { return new Promise(function (fulfill, reject) { self.taskqueue.addTask(function (failed) { if (failed) { reject(failed); } else { fulfill(self[name].apply(self, args)); } }); }); } return callback.apply(this, args); }); } // like underscore/lodash _.pick() function pick(obj, arr) { var res = {}; for (var i = 0, len = arr.length; i < len; i++) { var prop = arr[i]; if (prop in obj) { res[prop] = obj[prop]; } } return res; } // Most browsers throttle concurrent requests at 6, so it's silly // to shim _bulk_get by trying to launch potentially hundreds of requests // and then letting the majority time out. We can handle this ourselves. var MAX_NUM_CONCURRENT_REQUESTS = 6; function identityFunction(x) { return x; } function formatResultForOpenRevsGet(result) { return [{ ok: result }]; } // shim for P/CouchDB adapters that don't directly implement _bulk_get function bulkGet(db, opts, callback) { var requests = opts.docs; // consolidate into one request per doc if possible var requestsById = new Map(); requests.forEach(function (request) { if (requestsById.has(request.id)) { requestsById.get(request.id).push(request); } else { requestsById.set(request.id, [request]); } }); var numDocs = requestsById.size; var numDone = 0; var perDocResults = new Array(numDocs); function collapseResultsAndFinish() { var results = []; perDocResults.forEach(function (res) { res.docs.forEach(function (info) { results.push({ id: res.id, docs: [info] }); }); }); callback(null, {results}); } function checkDone() { if (++numDone === numDocs) { collapseResultsAndFinish(); } } function gotResult(docIndex, id, docs) { perDocResults[docIndex] = {id, docs}; checkDone(); } var allRequests = []; requestsById.forEach(function (value, key) { allRequests.push(key); }); var i = 0; function nextBatch() { if (i >= allRequests.length) { return; } var upTo = Math.min(i + MAX_NUM_CONCURRENT_REQUESTS, allRequests.length); var batch = allRequests.slice(i, upTo); processBatch(batch, i); i += batch.length; } function processBatch(batch, offset) { batch.forEach(function (docId, j) { var docIdx = offset + j; var docRequests = requestsById.get(docId); // just use the first request as the "template" // TODO: The _bulk_get API allows for more subtle use cases than this, // but for now it is unlikely that there will be a mix of different // "atts_since" or "attachments" in the same request, since it's just // replicate.js that is using this for the moment. // Also, atts_since is aspirational, since we don't support it yet. var docOpts = pick(docRequests[0], ['atts_since', 'attachments']); docOpts.open_revs = docRequests.map(function (request) { // rev is optional, open_revs disallowed return request.rev; }); // remove falsey / undefined revisions docOpts.open_revs = docOpts.open_revs.filter(identityFunction); var formatResult = identityFunction; if (docOpts.open_revs.length === 0) { delete docOpts.open_revs; // when fetching only the "winning" leaf, // transform the result so it looks like an open_revs // request formatResult = formatResultForOpenRevsGet; } // globally-supplied options ['revs', 'attachments', 'binary', 'ajax', 'latest'].forEach(function (param) { if (param in opts) { docOpts[param] = opts[param]; } }); db.get(docId, docOpts, function (err, res) { var result; /* istanbul ignore if */ if (err) { result = [{error: err}]; } else { result = formatResult(res); } gotResult(docIdx, docId, result); nextBatch(); }); }); } nextBatch(); } var hasLocal; try { localStorage.setItem('_pouch_check_localstorage', 1); hasLocal = !!localStorage.getItem('_pouch_check_localstorage'); } catch (e) { hasLocal = false; } function hasLocalStorage() { return hasLocal; } const nextTick = typeof queueMicrotask === "function" ? queueMicrotask : function nextTick(fn) { Promise.resolve().then(fn); }; class Changes extends EE { constructor() { super(); this._listeners = {}; if (hasLocalStorage()) { addEventListener("storage", (e) => { this.emit(e.key); }); } } addListener(dbName, id, db, opts) { if (this._listeners[id]) { return; } var inprogress = false; var self = this; function eventFunction() { if (!self._listeners[id]) { return; } if (inprogress) { inprogress = 'waiting'; return; } inprogress = true; var changesOpts = pick(opts, [ 'style', 'include_docs', 'attachments', 'conflicts', 'filter', 'doc_ids', 'view', 'since', 'query_params', 'binary', 'return_docs' ]); function onError() { inprogress = false; } db.changes(changesOpts).on('change', function (c) { if (c.seq > opts.since && !opts.cancelled) { opts.since = c.seq; opts.onChange(c); } }).on('complete', function () { if (inprogress === 'waiting') { nextTick(eventFunction); } inprogress = false; }).on('error', onError); } this._listeners[id] = eventFunction; this.on(dbName, eventFunction); } removeListener(dbName, id) { if (!(id in this._listeners)) { return; } super.removeListener(dbName, this._listeners[id]); delete this._listeners[id]; } notifyLocalWindows(dbName) { //do a useless change on a storage thing //in order to get other windows's listeners to activate if (hasLocalStorage()) { localStorage[dbName] = (localStorage[dbName] === "a") ? "b" : "a"; } } notify(dbName) { this.emit(dbName); this.notifyLocalWindows(dbName); } } function guardedConsole(method) { /* istanbul ignore else */ if (typeof console !== 'undefined' && typeof console[method] === 'function') { var args = Array.prototype.slice.call(arguments, 1); console[method].apply(console, args); } } function randomNumber(min, max) { var maxTimeout = 600000; // Hard-coded default of 10 minutes min = parseInt(min, 10) || 0; max = parseInt(max, 10); if (max !== max || max <= min) { max = (min || 1) << 1; //doubling } else { max = max + 1; } // In order to not exceed maxTimeout, pick a random value between half of maxTimeout and maxTimeout if (max > maxTimeout) { min = maxTimeout >> 1; // divide by two max = maxTimeout; } var ratio = Math.random(); var range = max - min; return ~~(range * ratio + min); // ~~ coerces to an int, but fast. } function defaultBackOff(min) { var max = 0; if (!min) { max = 2000; } return randomNumber(min, max); } // designed to give info to browser users, who are disturbed // when they see http errors in the console function explainError(status, str) { guardedConsole('info', 'The above ' + status + ' is totally normal. ' + str); } class PouchError extends Error { constructor(status, error, reason) { super(); this.status = status; this.name = error; this.message = reason; this.error = true; } toString() { return JSON.stringify({ status: this.status, name: this.name, message: this.message, reason: this.reason }); } } var UNAUTHORIZED = new PouchError(401, 'unauthorized', "Name or password is incorrect."); var MISSING_BULK_DOCS = new PouchError(400, 'bad_request', "Missing JSON list of 'docs'"); var MISSING_DOC = new PouchError(404, 'not_found', 'missing'); var REV_CONFLICT = new PouchError(409, 'conflict', 'Document update conflict'); var INVALID_ID = new PouchError(400, 'bad_request', '_id field must contain a string'); var MISSING_ID = new PouchError(412, 'missing_id', '_id is required for puts'); var RESERVED_ID = new PouchError(400, 'bad_request', 'Only reserved document ids may start with underscore.'); var NOT_OPEN = new PouchError(412, 'precondition_failed', 'Database not open'); var UNKNOWN_ERROR = new PouchError(500, 'unknown_error', 'Database encountered an unknown error'); var BAD_ARG = new PouchError(500, 'badarg', 'Some query argument is invalid'); var INVALID_REQUEST = new PouchError(400, 'invalid_request', 'Request was invalid'); var QUERY_PARSE_ERROR = new PouchError(400, 'query_parse_error', 'Some query parameter is invalid'); var DOC_VALIDATION = new PouchError(500, 'doc_validation', 'Bad special document member'); var BAD_REQUEST = new PouchError(400, 'bad_request', 'Something wrong with the request'); var NOT_AN_OBJECT = new PouchError(400, 'bad_request', 'Document must be a JSON object'); var DB_MISSING = new PouchError(404, 'not_found', 'Database not found'); var IDB_ERROR = new PouchError(500, 'indexed_db_went_bad', 'unknown'); var WSQ_ERROR = new PouchError(500, 'web_sql_went_bad', 'unknown'); var LDB_ERROR = new PouchError(500, 'levelDB_went_went_bad', 'unknown'); var FORBIDDEN = new PouchError(403, 'forbidden', 'Forbidden by design doc validate_doc_update function'); var INVALID_REV = new PouchError(400, 'bad_request', 'Invalid rev format'); var FILE_EXISTS = new PouchError(412, 'file_exists', 'The database could not be created, the file already exists.'); var MISSING_STUB = new PouchError(412, 'missing_stub', 'A pre-existing attachment stub wasn\'t found'); var INVALID_URL = new PouchError(413, 'invalid_url', 'Provided URL is invalid'); function createError(error, reason) { function CustomPouchError(reason) { // inherit error properties from our parent error manually // so as to allow proper JSON parsing. var names = Object.getOwnPropertyNames(error); for (var i = 0, len = names.length; i < len; i++) { if (typeof error[names[i]] !== 'function') { this[names[i]] = error[names[i]]; } } if (this.stack === undefined) { this.stack = (new Error()).stack; } if (reason !== undefined) { this.reason = reason; } } CustomPouchError.prototype = PouchError.prototype; return new CustomPouchError(reason); } function generateErrorFromResponse(err) { if (typeof err !== 'object') { var data = err; err = UNKNOWN_ERROR; err.data = data; } if ('error' in err && err.error === 'conflict') { err.name = 'conflict'; err.status = 409; } if (!('name' in err)) { err.name = err.error || 'unknown'; } if (!('status' in err)) { err.status = 500; } if (!('message' in err)) { err.message = err.message || err.reason; } if (!('stack' in err)) { err.stack = (new Error()).stack; } return err; } function tryFilter(filter, doc, req) { try { return !filter(doc, req); } catch (err) { var msg = 'Filter function threw: ' + err.toString(); return createError(BAD_REQUEST, msg); } } function filterChange(opts) { var req = {}; var hasFilter = opts.filter && typeof opts.filter === 'function'; req.query = opts.query_params; return function filter(change) { if (!change.doc) { // CSG sends events on the changes feed that don't have documents, // this hack makes a whole lot of existing code robust. change.doc = {}; } var filterReturn = hasFilter && tryFilter(opts.filter, change.doc, req); if (typeof filterReturn === 'object') { return filterReturn; } if (filterReturn) { return false; } if (!opts.include_docs) { delete change.doc; } else if (!opts.attachments) { for (var att in change.doc._attachments) { /* istanbul ignore else */ if (Object.prototype.hasOwnProperty.call(change.doc._attachments, att)) { change.doc._attachments[att].stub = true; } } } return true; }; } // shim for Function.prototype.name, // Determine id an ID is valid // - invalid IDs begin with an underescore that does not begin '_design' or // '_local' // - any other string value is a valid id // Returns the specific error object for each case function invalidIdError(id) { var err; if (!id) { err = createError(MISSING_ID); } else if (typeof id !== 'string') { err = createError(INVALID_ID); } else if (/^_/.test(id) && !(/^_(design|local)/).test(id)) { err = createError(RESERVED_ID); } if (err) { throw err; } } // Checks if a PouchDB object is "remote" or not. This is function isRemote(db) { if (typeof db._remote === 'boolean') { return db._remote; } /* istanbul ignore next */ if (typeof db.type === 'function') { guardedConsole('warn', 'db.type() is deprecated and will be removed in ' + 'a future version of PouchDB'); return db.type() === 'http'; } /* istanbul ignore next */ return false; } function listenerCount(ee, type) { return 'listenerCount' in ee ? ee.listenerCount(type) : EE.listenerCount(ee, type); } function parseDesignDocFunctionName(s) { if (!s) { return null; } var parts = s.split('/'); if (parts.length === 2) { return parts; } if (parts.length === 1) { return [s, s]; } return null; } function normalizeDesignDocFunctionName(s) { var normalized = parseDesignDocFunctionName(s); return normalized ? normalized.join('/') : null; } // originally parseUri 1.2.2, now patched by us // (c) Steven Levithan <stevenlevithan.com> // MIT License var keys = ["source", "protocol", "authority", "userInfo", "user", "password", "host", "port", "relative", "path", "directory", "file", "query", "anchor"]; var qName ="queryKey"; var qParser = /(?:^|&)([^&=]*)=?([^&]*)/g; // use the "loose" parser /* eslint no-useless-escape: 0 */ var parser = /^(?:(?![^:@]+:[^:@\/]*@)([^:\/?#.]+):)?(?:\/\/)?((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?)(((\/(?:[^?#](?![^?#\/]*\.[^?#\/.]+(?:[?#]|$)))*\/?)?([^?#\/]*))(?:\?([^#]*))?(?:#(.*))?)/; function parseUri(str) { var m = parser.exec(str); var uri = {}; var i = 14; while (i--) { var key = keys[i]; var value = m[i] || ""; var encoded = ['user', 'password'].indexOf(key) !== -1; uri[key] = encoded ? decodeURIComponent(value) : value; } uri[qName] = {}; uri[keys[12]].replace(qParser, function ($0, $1, $2) { if ($1) { uri[qName][$1] = $2; } }); return uri; } // Based on https://github.com/alexdavid/scope-eval v0.0.3 // (source: https://unpkg.com/scope-eval@0.0.3/scope_eval.js) // This is basically just a wrapper around new Function() function scopeEval(source, scope) { var keys = []; var values = []; for (var key in scope) { if (Object.prototype.hasOwnProperty.call(scope, key)) { keys.push(key); values.push(scope[key]); } } keys.push(source); return Function.apply(null, keys).apply(null, values); } // this is essentially the "update sugar" function from daleharvey/pouchdb#1388 // the diffFun tells us what delta to apply to the doc. it either returns // the doc, or false if it doesn't need to do an update after all function upsert(db, docId, diffFun) { return db.get(docId) .catch(function (err) { /* istanbul ignore next */ if (err.status !== 404) { throw err; } return {}; }) .then(function (doc) { // the user might change the _rev, so save it for posterity var docRev = doc._rev; var newDoc = diffFun(doc); if (!newDoc) { // if the diffFun returns falsy, we short-circuit as // an optimization return {updated: false, rev: docRev}; } // users aren't allowed to modify these values, // so reset them here newDoc._id = docId; newDoc._rev = docRev; return tryAndPut(db, newDoc, diffFun); }); } function tryAndPut(db, doc, diffFun) { return db.put(doc).then(function (res) { return { updated: true, rev: res.rev }; }, function (err) { /* istanbul ignore next */ if (err.status !== 409) { throw err; } return upsert(db, doc._id, diffFun); }); } var thisAtob = function (str) { return atob(str); }; var thisBtoa = function (str) { return btoa(str); }; // Abstracts constructing a Blob object, so it also works in older // browsers that don't support the native Blob constructor (e.g. // old QtWebKit versions, Android < 4.4). function createBlob(parts, properties) { /* global BlobBuilder,MSBlobBuilder,MozBlobBuilder,WebKitBlobBuilder */ parts = parts || []; properties = properties || {}; try { return new Blob(parts, properties); } catch (e) { if (e.name !== "TypeError") { throw e; } var Builder = typeof BlobBuilder !== 'undefined' ? BlobBuilder : typeof MSBlobBuilder !== 'undefined' ? MSBlobBuilder : typeof MozBlobBuilder !== 'undefined' ? MozBlobBuilder : WebKitBlobBuilder; var builder = new Builder(); for (var i = 0; i < parts.length; i += 1) { builder.append(parts[i]); } return builder.getBlob(properties.type); } } // From http://stackoverflow.com/questions/14967647/ (continues on next line) // encode-decode-image-with-base64-breaks-image (2013-04-21) function binaryStringToArrayBuffer(bin) { var length = bin.length; var buf = new ArrayBuffer(length); var arr = new Uint8Array(buf); for (var i = 0; i < length; i++) { arr[i] = bin.charCodeAt(i); } return buf; } function binStringToBluffer(binString, type) { return createBlob([binaryStringToArrayBuffer(binString)], {type}); } function b64ToBluffer(b64, type) { return binStringToBluffer(thisAtob(b64), type); } //Can't find original post, but this is close //http://stackoverflow.com/questions/6965107/ (continues on next line) //converting-between-strings-and-arraybuffers function arrayBufferToBinaryString(buffer) { var binary = ''; var bytes = new Uint8Array(buffer); var length = bytes.byteLength; for (var i = 0; i < length; i++) { binary += String.fromCharCode(bytes[i]); } return binary; } // shim for browsers that don't support it function readAsBinaryString(blob, callback) { var reader = new FileReader(); var hasBinaryString = typeof reader.readAsBinaryString === 'function'; reader.onloadend = function (e) { var result = e.target.result || ''; if (hasBinaryString) { return callback(result); } callback(arrayBufferToBinaryString(result)); }; if (hasBinaryString) { reader.readAsBinaryString(blob); } else { reader.readAsArrayBuffer(blob); } } function blobToBinaryString(blobOrBuffer, callback) { readAsBinaryString(blobOrBuffer, function (bin) { callback(bin); }); } function blobToBase64(blobOrBuffer, callback) { blobToBinaryString(blobOrBuffer, function (base64) { callback(thisBtoa(base64)); }); } // simplified API. universal browser support is assumed function readAsArrayBuffer(blob, callback) { var reader = new FileReader(); reader.onloadend = function (e) { var result = e.target.result || new ArrayBuffer(0); callback(result); }; reader.readAsArrayBuffer(blob); } // this is not used in the browser var setImmediateShim = self.setImmediate || self.setTimeout; var MD5_CHUNK_SIZE = 32768; function rawToBase64(raw) { return thisBtoa(raw); } function appendBlob(buffer, blob, start, end, callback) { if (start > 0 || end < blob.size) { // only slice blob if we really need to blob = blob.slice(start, end); } readAsArrayBuffer(blob, function (arrayBuffer) { buffer.append(arrayBuffer); callback(); }); } function appendString(buffer, string, start, end, callback) { if (start > 0 || end < string.length) { // only create a substring if we really need to string = string.substring(start, end); } buffer.appendBinary(string); callback(); } function binaryMd5(data, callback) { var inputIsString = typeof data === 'string'; var len = inputIsString ? data.length : data.size; var chunkSize = Math.min(MD5_CHUNK_SIZE, len); var chunks = Math.ceil(len / chunkSize); var currentChunk = 0; var buffer = inputIsString ? new Md5() : new Md5.ArrayBuffer(); var append = inputIsString ? appendString : appendBlob; function next() { setImmediateShim(loadNextChunk); } function done() { var raw = buffer.end(true); var base64 = rawToBase64(raw); callback(base64); buffer.destroy(); } function loadNextChunk() { var start = currentChunk * chunkSize; var end = start + chunkSize; currentChunk++; if (currentChunk < chunks) { append(buffer, data, start, end, next); } else { append(buffer, data, start, end, done); } } loadNextChunk(); } function stringMd5(string) { return Md5.hash(string); } /** * Creates a new revision string that does NOT include the revision height * For example '56649f1b0506c6ca9fda0746eb0cacdf' */ function rev(doc, deterministic_revs) { if (!deterministic_revs) { return uuid.v4().replace(/-/g, '').toLowerCase(); } var mutateableDoc = Object.assign({}, doc); delete mutateableDoc._rev_tree; return stringMd5(JSON.stringify(mutateableDoc)); } var uuid$1 = uuid.v4; // mimic old import, only v4 is ever used elsewhere // We fetch all leafs of the revision tree, and sort them based on tree length // and whether they were deleted, undeleted documents with the longest revision // tree (most edits) win // The final sort algorithm is slightly documented in a sidebar here: // http://guide.couchdb.org/draft/conflicts.html function winningRev(metadata) { var winningId; var winningPos; var winningDeleted; var toVisit = metadata.rev_tree.slice(); var node; while ((node = toVisit.pop())) { var tree = node.ids; var branches = tree[2]; var pos = node.pos; if (branches.length) { // non-leaf for (var i = 0, len = branches.length; i < len; i++) { toVisit.push({pos: pos + 1, ids: branches[i]}); } continue; } var deleted = !!tree[1].deleted; var id = tree[0]; // sort by deleted, then pos, then id if (!winningId || (winningDeleted !== deleted ? winningDeleted : winningPos !== pos ? winningPos < pos : winningId < id)) { winningId = id; winningPos = pos; winningDeleted = deleted; } } return winningPos + '-' + winningId; } // Pretty much all below can be combined into a higher order function to // traverse revisions // The return value from the callback will be passed as context to all // children of that node function traverseRevTree(revs, callback) { var toVisit = revs.slice(); var node; while ((node = toVisit.pop())) { var pos = node.pos; var tree = node.ids; var branches = tree[2]; var newCtx = callback(branches.length === 0, pos, tree[0], node.ctx, tree[1]); for (var i = 0, len = branches.length; i < len; i++) { toVisit.push({pos: pos + 1, ids: branches[i], ctx: newCtx}); } } } function sortByPos(a, b) { return a.pos - b.pos; } function collectLeaves(revs) { var leaves = []; traverseRevTree(revs, function (isLeaf, pos, id, acc, opts) { if (isLeaf) { leaves.push({rev: pos + "-" + id, pos, opts}); } }); leaves.sort(sortByPos).reverse(); for (var i = 0, len = leaves.length; i < len; i++) { delete leaves[i].pos; } return leaves; } // returns revs of all conflicts that is leaves such that // 1. are not deleted and // 2. are different than winning revision function collectConflicts(metadata) { var win = winningRev(metadata); var leaves = collectLeaves(metadata.rev_tree); var conflicts = []; for (var i = 0, len = leaves.length; i < len; i++) { var leaf = leaves[i]; if (leaf.rev !== win && !leaf.opts.deleted) { conflicts.push(leaf.rev); } } return conflicts; } // compact a tree by marking its non-leafs as missing, // and return a list of revs to delete function compactTree(metadata) { var revs = []; traverseRevTree(metadata.rev_tree, function (isLeaf, pos, revHash, ctx, opts) { if (opts.status === 'available' && !isLeaf) { revs.push(pos + '-' + revHash); opts.status = 'missing'; } }); return revs; } // `findPathToLeaf()` returns an array of revs that goes from the specified // leaf rev to the root of that leaf’s branch. // // eg. for this rev tree: // 1-9692 ▶ 2-37aa ▶ 3-df22 ▶ 4-6e94 ▶ 5-df4a ▶ 6-6a3a ▶ 7-57e5 // ┃ ┗━━━━━━▶ 5-8d8c ▶ 6-65e0 // ┗━━━━━━▶ 3-43f6 ▶ 4-a3b4 // // For a `targetRev` of '7-57e5', `findPathToLeaf()` would return ['7-57e5', '6-6a3a', '5-df4a'] // The `revs` argument has the same structure as what `revs_tree` has on e.g. // the IndexedDB representation of the rev tree datastructure. Please refer to // tests/unit/test.purge.js for examples of what these look like. // // This function will throw an error if: // - The requested revision does not exist // - The requested revision is not a leaf function findPathToLeaf(revs, targetRev) { let path = []; const toVisit = revs.slice(); let node; while ((node = toVisit.pop())) { const { pos, ids: tree } = node; const rev = `${pos}-${tree[0]}`; const branches = tree[2]; // just assuming we're already working on the path up towards our desired leaf. path.push(rev); // we've reached the leaf of our dreams, so return the computed path. if (rev === targetRev) { //…unleeeeess if (branches.length !== 0) { throw new Error('The requested revision is not a leaf'); } return path.reverse(); } // this is based on the assumption that after we have a leaf (`branches.length == 0`), we handle the next // branch. this is true for all branches other than the path leading to the winning rev (which is 7-57e5 in // the example above. i've added a reset condition for branching nodes (`branches.length > 1`) as well. if (branches.length === 0 || branches.length > 1) { path = []; } // as a next step, we push the branches of this node to `toVisit` for visiting it during the next iteration for (let i = 0, len = branches.length; i < len; i++) { toVisit.push({ pos: pos + 1, ids: branches[i] }); } } if (path.length === 0) { throw new Error('The requested revision does not exist'); } return path.reverse(); } // build up a list of all the paths to the leafs in this revision tree function rootToLeaf(revs) { var paths = []; var toVisit = revs.slice(); var node; while ((node = toVisit.pop())) { var pos = node.pos; var tree = node.ids; var id = tree[0]; var opts = tree[1]; var branches = tree[2]; var isLeaf = branches.length === 0; var history = node.history ? node.history.slice() : []; history.push({id, opts}); if (isLeaf) { paths.push({pos: (pos + 1 - history.length), ids: history}); } for (var i = 0, len = branches.length; i < len; i++) { toVisit.push({pos: pos + 1, ids: branches[i], history}); } } return paths.reverse(); } // for a better overview of what this is doing, read: function sortByPos$1(a, b) { return a.pos - b.pos; } // classic binary search function binarySearch(arr, item, comparator) { var low = 0; var high = arr.length; var mid; while (low < high) { mid = (low + high) >>> 1; if (comparator(arr[mid], item) < 0) { low = mid + 1; } else { high = mid; } } return low; } // assuming the arr is sorted, insert the item in the proper place function insertSorted(arr, item, comparator) { var idx = binarySearch(arr, item, comparator); arr.splice(idx, 0, item); } // Turn a path as a flat array into a tree with a single branch. // If any should be stemmed from the beginning of the array, that's passed // in as the second argument function pathToTree(path, numStemmed) { var root; var leaf; for (var i = numStemmed, len = path.length; i < len; i++) { var node = path[i]; var currentLeaf = [node.id, node.opts, []]; if (leaf) { leaf[2].push(currentLeaf); leaf = currentLeaf; } else { root = leaf = currentLeaf; } } return root; } // compare the IDs of two trees function compareTree(a, b) { return a[0] < b[0] ? -1 : 1; } // Merge two trees together // The roots of tree1 and tree2 must be the same revision function mergeTree(in_tree1, in_tree2) { var queue = [{tree1: in_tree1, tree2: in_tree2}]; var conflicts = false; while (queue.length > 0) { var item = queue.pop(); var tree1 = item.tree1; var tree2 = item.tree2; if (tree1[1].status || tree2[1].status) { tree1[1].status = (tree1[1].status === 'available' || tree2[1].status === 'available') ? 'available' : 'missing'; } for (var i = 0; i < tree2[2].length; i++) { if (!tree1[2][0]) { conflicts = 'new_leaf'; tree1[2][0] = tree2[2][i]; continue; } var merged = false; for (var j = 0; j < tree1[2].length; j++) { if (tree1[2][j][0] === tree2[2][i][0]) { queue.push({tree1: tree1[2][j], tree2: tree2[2][i]}); merged = true; } } if (!merged) { conflicts = 'new_branch'; insertSorted(tree1[2], tree2[2][i], compareTree); } } } return {conflicts, tree: in_tree1}; } function doMerge(tree, path, dontExpand) { var restree = []; var conflicts = false; var merged = false; var res; if (!tree.length) { return {tree: [path], conflicts: 'new_leaf'}; } for (var i = 0, len = tree.length; i < len; i++) { var branch = tree[i]; if (branch.pos === path.pos && branch.ids[0] === path.ids[0]) { // Paths start at the same position and have the same root, so they need // merged res = mergeTree(branch.ids, path.ids); restree.push({pos: branch.pos, ids: res.tree}); conflicts = conflicts || res.conflicts; merged = true; } else if (dontExpand !== true) { // The paths start at a different position, take the earliest path and // traverse up until it as at the same point from root as the path we // want to merge. If the keys match we return the longer path with the // other merged After stemming we don't want to expand the trees var t1 = branch.pos < path.pos ? branch : path; var t2 = branch.pos < path.pos ? path : branch; var diff = t2.pos - t1.pos; var candidateParents = []; var trees = []; trees.push({ids: t1.ids, diff, parent: null, parentIdx: null}); while (trees.length > 0) { var item = trees.pop(); if (item.diff === 0) { if (item.ids[0] === t2.ids[0]) { candidateParents.push(item); } continue; } var elements = item.ids[2]; for (var j = 0, elementsLen = elements.length; j < elementsLen; j++) { trees.push({ ids: elements[j], diff: item.diff - 1, parent: item.ids, parentIdx: j }); } } var el = candidateParents[0]; if (!el) { restree.push(branch); } else { res = mergeTree(el.ids, t2.ids); el.parent[2][el.parentIdx] = res.tree; restree.push({pos: t1.pos, ids: t1.ids}); conflicts = conflicts || res.conflicts; merged = true; } } else { restree.push(branch); } } // We didnt find if (!merged) { restree.push(path); } restree.sort(sortByPos$1); return { tree: restree, conflicts: conflicts || 'internal_node' }; } // To ensure we don't grow the revision tree infinitely, we stem old revisions function stem(tree, depth) { // First we break out the tree into a complete list of root to leaf paths var paths = rootToLeaf(tree); var stemmedRevs; var result; for (var i = 0, len = paths.length; i < len; i++) { // Then for each path, we cut off the start of the path based on the // `depth` to stem to, and generate a new set of flat trees var path = paths[i]; var stemmed = path.ids; var node; if (stemmed.length > depth) { // only do the stemming work if we actually need to stem if (!stemmedRevs) { stemmedRevs = {}; // avoid allocating this object unnecessarily } var numStemmed = stemmed.length - depth; node = { pos: path.pos + numStemmed, ids: pathToTree(stemmed, numStemmed) }; for (var s = 0; s < numStemmed; s++) { var rev = (path.pos + s) + '-' + stemmed[s].id; stemmedRevs[rev] = true; } } else { // no need to actually stem node = { pos: path.pos, ids: pathToTree(stemmed, 0) }; } // Then we remerge all those flat trees together, ensuring that we don't // connect trees that would go beyond the depth limit if (result) { result = doMerge(result, node, true).tree; } else { result = [node]; } } // this is memory-heavy per Chrome profiler, avoid unless we actually stemmed if (stemmedRevs) { traverseRevTree(result, function (isLeaf, pos, revHash) { // some revisions may have been removed in a branch but not in another delete stemmedRevs[pos + '-' + revHash]; }); } return { tree: result, revs: stemmedRevs ? Object.keys(stemmedRevs) : [] }; } function merge(tree, path, depth) { var newTree = doMerge(tree, path); var stemmed = stem(newTree.tree, depth); return { tree: stemmed.tree, stemmedRevs: stemmed.revs, conflicts: newTree.conflicts }; } // return true if a rev exists in the rev tree, false otherwise function revExists(revs, rev) { var toVisit = revs.slice(); var splitRev = rev.split('-'); var targetPos = parseInt(splitRev[0], 10); var targetId = splitRev[1]; var node; while ((node = toVisit.pop())) { if (node.pos === targetPos && node.ids[0] === targetId) { return true; } var branches = node.ids[2]; for (var i = 0, len = branches.length; i < len; i++) { toVisit.push({pos: node.pos + 1, ids: branches[i]}); } } return false; } function getTrees(node) { return node.ids; } // check if a specific revision of a doc has been deleted // - metadata: the metadata object from the doc store // - rev: (optional) the revision to check. defaults to winning revision function isDeleted(metadata, rev) { if (!rev) { rev = winningRev(metadata); } var id = rev.substring(rev.indexOf('-') + 1); var toVisit = metadata.rev_tree.map(getTrees); var tree; while ((tree = toVisit.pop())) { if (tree[0] === id) { return !!tree[1].deleted; } toVisit = toVisit.concat(tree[2]); } } function isLocalId(id) { return typeof id === 'string' && id.startsWith('_local/'); } // returns the current leaf node for a given revision function latest(rev, metadata) { var toVisit = metadata.rev_tree.slice(); var node; while ((node = toVisit.pop())) { var pos = node.pos; var tree = node.ids; var id = tree[0]; var opts = tree[1]; var branches = tree[2]; var isLeaf = branches.length === 0; var history = node.history ? node.history.slice() : []; history.push({id, pos, opts}); if (isLeaf) { for (var i = 0, len = history.length; i < len; i++) { var historyNode = history[i]; var historyRev = historyNode.pos + '-' + historyNode.id; if (historyRev === rev) { // return the rev of this leaf return pos + '-' + id; } } } for (var j = 0, l = branches.length; j < l; j++) { toVisit.push({pos: pos + 1, ids: branches[j], history}); } } /* istanbul ignore next */ throw new Error('Unable to resolve latest revision for id ' + metadata.id + ', rev ' + rev); } function tryCatchInChangeListener(self, change, pending, lastSeq) { // isolate try/catches to avoid V8 deoptimizations try { self.emit('change', change, pending, lastSeq); } catch (e) { guardedConsole('error', 'Error in .on("change", function):', e); } } function processChange(doc, metadata, opts) { var changeList = [{rev: doc._rev}]; if (opts.style === 'all_docs') { changeList = collectLeaves(metadata.rev_tree) .map(function (x) { return {rev: x.rev}; }); } var change = { id: metadata.id, changes: changeList, doc }; if (isDeleted(metadata, doc._rev)) { change.deleted = true; } if (opts.conflicts) { change.doc._conflicts = collectConflicts(metadata); if (!change.doc._conflicts.length) { delete change.doc._conflicts; } } return change; } class Changes$1 extends EE { constructor(db, opts, callback) { super(); this.db = db; opts = opts ? clone(opts) : {}; var complete = opts.complete = once((err, resp) => { if (err) { if (listenerCount(this, 'error') > 0) { this.emit('error', err); } } else { this.emit('complete', resp); } this.removeAllListeners(); db.removeListener('destroyed', onDestroy); }); if (callback) { this.on('complete', function (resp) { callback(null, resp); }); this.on('error', callback); } const onDestroy = () => { this.cancel(); }; db.once('destroyed', onDestroy); opts.onChange = (change, pending, lastSeq) => { /* istanbul ignore if */ if (this.isCancelled) { return; } tryCatchInChangeListener(this, change, pending, lastSeq); }; var promise = new Promise(function (fulfill, reject) { opts.complete = function (err, res) { if (err) { reject(err); } else { fulfill(res); } }; }); this.once('cancel', function () { db.removeListener('destroyed', onDestroy); opts.complete(null, {status: 'cancelled'}); }); this.then = promise.then.bind(promise); this['catch'] = promise['catch'].bind(promise); this.then(function (result) { complete(null, result); }, complete); if (!db.taskqueue.isReady) { db.taskqueue.addTask((failed) => { if (failed) { opts.complete(failed); } else if (this.isCancelled) { this.emit('cancel'); } else { this.validateChanges(opts); } }); } else { this.validateChanges(opts); } } cancel() { this.isCancelled = true; if (this.db.taskqueue.isReady) { this.emit('cancel'); } } validateChanges(opts) { var callback = opts.complete; /* istanbul ignore else */ if (PouchDB._changesFilterPlugin) { PouchDB._changesFilterPlugin.validate(opts, (err) => { if (err) { return callback(err); } this.doChanges(opts); }); } else { this.doChanges(opts); } } doChanges(opts) { var callback = opts.complete; opts = clone(opts); if ('live' in opts && !('continuous' in opts)) { opts.continuous = opts.live; } opts.processChange = processChange; if (opts.since === 'latest') { opts.since = 'now'; } if (!opts.since) { opts.since = 0; } if (opts.since === 'now') { this.db.info().then((info) => { /* istanbul ignore if */ if (this.isCancelled) { callback(null, {status: 'cancelled'}); return; } opts.since = info.update_seq; this.doChanges(opts); }, callback); return; } /* istanbul ignore else */ if (PouchDB._changesFilterPlugin) { PouchDB._changesFilterPlugin.normalize(opts); if (PouchDB._changesFilterPlugin.shouldFilter(this, opts)) { return PouchDB._changesFilterPlugin.filter(this, opts); } } else { ['doc_ids', 'filter', 'selector', 'view'].forEach(function (key) { if (key in opts) { guardedConsole('warn', 'The "' + key + '" option was passed in to changes/replicate, ' + 'but pouchdb-changes-filter plugin is not installed, so it ' + 'was ignored. Please install the plugin to enable filtering.' ); } }); } if (!('descending' in opts)) { opts.descending = false; } // 0 and 1 should return 1 document opts.limit = opts.limit === 0 ? 1 : opts.limit; opts.complete = callback; var newPromise = this.db._changes(opts); /* istanbul ignore else */ if (newPromise && typeof newPromise.cancel === 'function') { const cancel = this.cancel; this.cancel = (...args) => { newPromise.cancel(); cancel.apply(this, args); }; } } } /* * A generic pouch adapter */ // Wrapper for functions that call the bulkdocs api with a single doc, // if the first result is an error, return an error function yankError(callback, docId) { return function (err, results) { if (err || (results[0] && results[0].error)) { err = err || results[0]; err.docId = docId; callback(err); } else { callback(null, results.length ? results[0] : results); } }; } // clean docs given to us by the user function cleanDocs(docs) { for (var i = 0; i < docs.length; i++) { var doc = docs[i]; if (doc._deleted) { delete doc._attachments; // ignore atts for deleted docs } else if (doc._attachments) { // filter out extraneous keys from _attachments var atts = Object.keys(doc._attachments); for (var j = 0; j < atts.length; j++) { var att = atts[j]; doc._attachments[att] = pick(doc._attachments[att], ['data', 'digest', 'content_type', 'length', 'revpos', 'stub']); } } } } // compare two docs, first by _id then by _rev function compareByIdThenRev(a, b) { if (a._id === b._id) { const aStart = a._revisions ? a._revisions.start : 0; const bStart = b._revisions ? b._revisions.start : 0; return aStart - bStart; } return a._id < b._id ? -1 : 1; } // for every node in a revision tree computes its distance from the closest // leaf function computeHeight(revs) { var height = {}; var edges = []; traverseRevTree(revs, function (isLeaf, pos, id, prnt) { var rev$$1 = pos + "-" + id; if (isLeaf) { height[rev$$1] = 0; } if (prnt !== undefined) { edges.push({from: prnt, to: rev$$1}); } return rev$$1; }); edges.reverse(); edges.forEach(function (edge) { if (height[edge.from] === undefined) { height[edge.from] = 1 + height[edge.to]; } else { height[edge.from] = Math.min(height[edge.from], 1 + height[edge.to]); } }); return height; } function allDocsKeysParse(opts) { var keys = ('limit' in opts) ? opts.keys.slice(opts.skip, opts.limit + opts.skip) : (opts.skip > 0) ? opts.keys.slice(opts.skip) : opts.keys; opts.keys = keys; opts.skip = 0; delete opts.limit; if (opts.descending) { keys.reverse(); opts.descending = false; } } // all compaction is done in a queue, to avoid attaching // too many listeners at once function doNextCompaction(self) { var task = self._compactionQueue[0]; var opts = task.opts; var callback = task.callback; self.get('_local/compaction').catch(function () { return false; }).then(function (doc) { if (doc && doc.last_seq) { opts.last_seq = doc.last_seq; } self._compact(opts, function (err, res) { /* istanbul ignore if */ if (err) { callback(err); } else { callback(null, res); } nextTick(function () { self._compactionQueue.shift(); if (self._compactionQueue.length) { doNextCompaction(self); } }); }); }); } function appendPurgeSeq(db, docId, rev$$1) { return db.get('_local/purges').then(function (doc) { const purgeSeq = doc.purgeSeq + 1; doc.purges.push({ docId, rev: rev$$1, purgeSeq, }); if (doc.purges.length > self.purged_infos_limit) { doc.purges.splice(0, doc.purges.length - self.purged_infos_limit); } doc.purgeSeq = purgeSeq; return doc; }).catch(function (err) { if (err.status !== 404) { throw err; } return { _id: '_local/purges', purges: [{ docId, rev: rev$$1, purgeSeq: 0, }], purgeSeq: 0, }; }).then(function (doc) { return db.put(doc); }); } function attachmentNameError(name) { if (name.charAt(0) === '_') { return name + ' is not a valid attachment name, attachment ' + 'names cannot start with \'_\''; } return false; } function isNotSingleDoc(doc) { return doc === null || typeof doc !== 'object' || Array.isArray(doc); } const validRevRegex = /^\d+-[^-]*$/; function isValidRev(rev$$1) { return typeof rev$$1 === 'string' && validRevR