pouchdb
Version:
PouchDB is a pocket-sized database
1,801 lines (1,595 loc) • 300 kB
JavaScript
'use strict';
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
var crypto = _interopDefault(require('crypto'));
var nodeFetch = require('node-fetch');
var nodeFetch__default = _interopDefault(nodeFetch);
var fetchCookie = _interopDefault(require('fetch-cookie'));
var uuid = require('uuid');
var levelup = _interopDefault(require('levelup'));
var ltgt = _interopDefault(require('ltgt'));
var Codec = _interopDefault(require('level-codec'));
var ReadableStreamCore = _interopDefault(require('readable-stream'));
var Deque = _interopDefault(require('double-ended-queue'));
var vuvuzela = _interopDefault(require('vuvuzela'));
var fs = _interopDefault(require('fs'));
var path = _interopDefault(require('path'));
var level = _interopDefault(require('level'));
var through2 = require('through2');
var LevelWriteStream = _interopDefault(require('level-write-stream'));
var vm = _interopDefault(require('vm'));
var EE = _interopDefault(require('events'));
function isBinaryObject(object) {
return object instanceof Buffer;
}
var cloneBinaryObject = (buffer) => Buffer.from(buffer);
// most of this is borrowed from lodash.isPlainObject:
// https://github.com/fis-components/lodash.isplainobject/
// blob/29c358140a74f252aeb08c9eb28bef86f2217d4a/index.js
var funcToString = Function.prototype.toString;
var objectCtorString = funcToString.call(Object);
function isPlainObject(value) {
var proto = Object.getPrototypeOf(value);
/* istanbul ignore if */
if (proto === null) { // not sure when this happens, but I guess it can
return true;
}
var Ctor = proto.constructor;
return (typeof Ctor == 'function' &&
Ctor instanceof Ctor && funcToString.call(Ctor) == objectCtorString);
}
function clone(object) {
var newObject;
var i;
var len;
if (!object || typeof object !== 'object') {
return object;
}
if (Array.isArray(object)) {
newObject = [];
for (i = 0, len = object.length; i < len; i++) {
newObject[i] = clone(object[i]);
}
return newObject;
}
// special case: to avoid inconsistencies between IndexedDB
// and other backends, we automatically stringify Dates
if (object instanceof Date && isFinite(object)) {
return object.toISOString();
}
if (isBinaryObject(object)) {
return cloneBinaryObject(object);
}
if (!isPlainObject(object)) {
return object; // don't clone objects like Workers
}
newObject = {};
for (i in object) {
/* istanbul ignore else */
if (Object.prototype.hasOwnProperty.call(object, i)) {
var value = clone(object[i]);
if (typeof value !== 'undefined') {
newObject[i] = value;
}
}
}
return newObject;
}
function once(fun) {
var called = false;
return function (...args) {
/* istanbul ignore if */
if (called) {
// this is a smoke test and should never actually happen
throw new Error('once called more than once');
} else {
called = true;
fun.apply(this, args);
}
};
}
function toPromise(func) {
//create the function we will be returning
return function (...args) {
// Clone arguments
args = clone(args);
var self = this;
// if the last argument is a function, assume its a callback
var usedCB = (typeof args[args.length - 1] === 'function') ? args.pop() : false;
var promise = new Promise(function (fulfill, reject) {
var resp;
try {
var callback = once(function (err, mesg) {
if (err) {
reject(err);
} else {
fulfill(mesg);
}
});
// create a callback for this invocation
// apply the function in the orig context
args.push(callback);
resp = func.apply(self, args);
if (resp && typeof resp.then === 'function') {
fulfill(resp);
}
} catch (e) {
reject(e);
}
});
// if there is a callback, call it back
if (usedCB) {
promise.then(function (result) {
usedCB(null, result);
}, usedCB);
}
return promise;
};
}
function logApiCall(self, name, args) {
/* istanbul ignore if */
if (self.constructor.listeners('debug').length) {
var logArgs = ['api', self.name, name];
for (var i = 0; i < args.length - 1; i++) {
logArgs.push(args[i]);
}
self.constructor.emit('debug', logArgs);
// override the callback itself to log the response
var origCallback = args[args.length - 1];
args[args.length - 1] = function (err, res) {
var responseArgs = ['api', self.name, name];
responseArgs = responseArgs.concat(
err ? ['error', err] : ['success', res]
);
self.constructor.emit('debug', responseArgs);
origCallback(err, res);
};
}
}
function adapterFun(name, callback) {
return toPromise(function (...args) {
if (this._closed) {
return Promise.reject(new Error('database is closed'));
}
if (this._destroyed) {
return Promise.reject(new Error('database is destroyed'));
}
var self = this;
logApiCall(self, name, args);
if (!this.taskqueue.isReady) {
return new Promise(function (fulfill, reject) {
self.taskqueue.addTask(function (failed) {
if (failed) {
reject(failed);
} else {
fulfill(self[name].apply(self, args));
}
});
});
}
return callback.apply(this, args);
});
}
// like underscore/lodash _.pick()
function pick(obj, arr) {
var res = {};
for (var i = 0, len = arr.length; i < len; i++) {
var prop = arr[i];
if (prop in obj) {
res[prop] = obj[prop];
}
}
return res;
}
// Most browsers throttle concurrent requests at 6, so it's silly
// to shim _bulk_get by trying to launch potentially hundreds of requests
// and then letting the majority time out. We can handle this ourselves.
var MAX_NUM_CONCURRENT_REQUESTS = 6;
function identityFunction(x) {
return x;
}
function formatResultForOpenRevsGet(result) {
return [{
ok: result
}];
}
// shim for P/CouchDB adapters that don't directly implement _bulk_get
function bulkGet(db, opts, callback) {
var requests = opts.docs;
// consolidate into one request per doc if possible
var requestsById = new Map();
requests.forEach(function (request) {
if (requestsById.has(request.id)) {
requestsById.get(request.id).push(request);
} else {
requestsById.set(request.id, [request]);
}
});
var numDocs = requestsById.size;
var numDone = 0;
var perDocResults = new Array(numDocs);
function collapseResultsAndFinish() {
var results = [];
perDocResults.forEach(function (res) {
res.docs.forEach(function (info) {
results.push({
id: res.id,
docs: [info]
});
});
});
callback(null, {results});
}
function checkDone() {
if (++numDone === numDocs) {
collapseResultsAndFinish();
}
}
function gotResult(docIndex, id, docs) {
perDocResults[docIndex] = {id, docs};
checkDone();
}
var allRequests = [];
requestsById.forEach(function (value, key) {
allRequests.push(key);
});
var i = 0;
function nextBatch() {
if (i >= allRequests.length) {
return;
}
var upTo = Math.min(i + MAX_NUM_CONCURRENT_REQUESTS, allRequests.length);
var batch = allRequests.slice(i, upTo);
processBatch(batch, i);
i += batch.length;
}
function processBatch(batch, offset) {
batch.forEach(function (docId, j) {
var docIdx = offset + j;
var docRequests = requestsById.get(docId);
// just use the first request as the "template"
// TODO: The _bulk_get API allows for more subtle use cases than this,
// but for now it is unlikely that there will be a mix of different
// "atts_since" or "attachments" in the same request, since it's just
// replicate.js that is using this for the moment.
// Also, atts_since is aspirational, since we don't support it yet.
var docOpts = pick(docRequests[0], ['atts_since', 'attachments']);
docOpts.open_revs = docRequests.map(function (request) {
// rev is optional, open_revs disallowed
return request.rev;
});
// remove falsey / undefined revisions
docOpts.open_revs = docOpts.open_revs.filter(identityFunction);
var formatResult = identityFunction;
if (docOpts.open_revs.length === 0) {
delete docOpts.open_revs;
// when fetching only the "winning" leaf,
// transform the result so it looks like an open_revs
// request
formatResult = formatResultForOpenRevsGet;
}
// globally-supplied options
['revs', 'attachments', 'binary', 'ajax', 'latest'].forEach(function (param) {
if (param in opts) {
docOpts[param] = opts[param];
}
});
db.get(docId, docOpts, function (err, res) {
var result;
/* istanbul ignore if */
if (err) {
result = [{error: err}];
} else {
result = formatResult(res);
}
gotResult(docIdx, docId, result);
nextBatch();
});
});
}
nextBatch();
}
// in Node of course this is false
function hasLocalStorage() {
return false;
}
const nextTick = typeof queueMicrotask === "function"
? queueMicrotask
: function nextTick(fn) {
Promise.resolve().then(fn);
};
class Changes extends EE {
constructor() {
super();
this._listeners = {};
if (hasLocalStorage()) {
addEventListener("storage", (e) => {
this.emit(e.key);
});
}
}
addListener(dbName, id, db, opts) {
if (this._listeners[id]) {
return;
}
var inprogress = false;
var self = this;
function eventFunction() {
if (!self._listeners[id]) {
return;
}
if (inprogress) {
inprogress = 'waiting';
return;
}
inprogress = true;
var changesOpts = pick(opts, [
'style', 'include_docs', 'attachments', 'conflicts', 'filter',
'doc_ids', 'view', 'since', 'query_params', 'binary', 'return_docs'
]);
function onError() {
inprogress = false;
}
db.changes(changesOpts).on('change', function (c) {
if (c.seq > opts.since && !opts.cancelled) {
opts.since = c.seq;
opts.onChange(c);
}
}).on('complete', function () {
if (inprogress === 'waiting') {
nextTick(eventFunction);
}
inprogress = false;
}).on('error', onError);
}
this._listeners[id] = eventFunction;
this.on(dbName, eventFunction);
}
removeListener(dbName, id) {
if (!(id in this._listeners)) {
return;
}
super.removeListener(dbName, this._listeners[id]);
delete this._listeners[id];
}
notifyLocalWindows(dbName) {
//do a useless change on a storage thing
//in order to get other windows's listeners to activate
if (hasLocalStorage()) {
localStorage[dbName] = (localStorage[dbName] === "a") ? "b" : "a";
}
}
notify(dbName) {
this.emit(dbName);
this.notifyLocalWindows(dbName);
}
}
function guardedConsole(method) {
/* istanbul ignore else */
if (typeof console !== 'undefined' && typeof console[method] === 'function') {
var args = Array.prototype.slice.call(arguments, 1);
console[method].apply(console, args);
}
}
function randomNumber(min, max) {
var maxTimeout = 600000; // Hard-coded default of 10 minutes
min = parseInt(min, 10) || 0;
max = parseInt(max, 10);
if (max !== max || max <= min) {
max = (min || 1) << 1; //doubling
} else {
max = max + 1;
}
// In order to not exceed maxTimeout, pick a random value between half of maxTimeout and maxTimeout
if (max > maxTimeout) {
min = maxTimeout >> 1; // divide by two
max = maxTimeout;
}
var ratio = Math.random();
var range = max - min;
return ~~(range * ratio + min); // ~~ coerces to an int, but fast.
}
function defaultBackOff(min) {
var max = 0;
if (!min) {
max = 2000;
}
return randomNumber(min, max);
}
// We assume Node users don't need to see this warning
var res = function () {};
class PouchError extends Error {
constructor(status, error, reason) {
super();
this.status = status;
this.name = error;
this.message = reason;
this.error = true;
}
toString() {
return JSON.stringify({
status: this.status,
name: this.name,
message: this.message,
reason: this.reason
});
}
}
var UNAUTHORIZED = new PouchError(401, 'unauthorized', "Name or password is incorrect.");
var MISSING_BULK_DOCS = new PouchError(400, 'bad_request', "Missing JSON list of 'docs'");
var MISSING_DOC = new PouchError(404, 'not_found', 'missing');
var REV_CONFLICT = new PouchError(409, 'conflict', 'Document update conflict');
var INVALID_ID = new PouchError(400, 'bad_request', '_id field must contain a string');
var MISSING_ID = new PouchError(412, 'missing_id', '_id is required for puts');
var RESERVED_ID = new PouchError(400, 'bad_request', 'Only reserved document ids may start with underscore.');
var NOT_OPEN = new PouchError(412, 'precondition_failed', 'Database not open');
var UNKNOWN_ERROR = new PouchError(500, 'unknown_error', 'Database encountered an unknown error');
var BAD_ARG = new PouchError(500, 'badarg', 'Some query argument is invalid');
var INVALID_REQUEST = new PouchError(400, 'invalid_request', 'Request was invalid');
var QUERY_PARSE_ERROR = new PouchError(400, 'query_parse_error', 'Some query parameter is invalid');
var DOC_VALIDATION = new PouchError(500, 'doc_validation', 'Bad special document member');
var BAD_REQUEST = new PouchError(400, 'bad_request', 'Something wrong with the request');
var NOT_AN_OBJECT = new PouchError(400, 'bad_request', 'Document must be a JSON object');
var DB_MISSING = new PouchError(404, 'not_found', 'Database not found');
var IDB_ERROR = new PouchError(500, 'indexed_db_went_bad', 'unknown');
var WSQ_ERROR = new PouchError(500, 'web_sql_went_bad', 'unknown');
var LDB_ERROR = new PouchError(500, 'levelDB_went_went_bad', 'unknown');
var FORBIDDEN = new PouchError(403, 'forbidden', 'Forbidden by design doc validate_doc_update function');
var INVALID_REV = new PouchError(400, 'bad_request', 'Invalid rev format');
var FILE_EXISTS = new PouchError(412, 'file_exists', 'The database could not be created, the file already exists.');
var MISSING_STUB = new PouchError(412, 'missing_stub', 'A pre-existing attachment stub wasn\'t found');
var INVALID_URL = new PouchError(413, 'invalid_url', 'Provided URL is invalid');
function createError(error, reason) {
function CustomPouchError(reason) {
// inherit error properties from our parent error manually
// so as to allow proper JSON parsing.
var names = Object.getOwnPropertyNames(error);
for (var i = 0, len = names.length; i < len; i++) {
if (typeof error[names[i]] !== 'function') {
this[names[i]] = error[names[i]];
}
}
if (this.stack === undefined) {
this.stack = (new Error()).stack;
}
if (reason !== undefined) {
this.reason = reason;
}
}
CustomPouchError.prototype = PouchError.prototype;
return new CustomPouchError(reason);
}
function generateErrorFromResponse(err) {
if (typeof err !== 'object') {
var data = err;
err = UNKNOWN_ERROR;
err.data = data;
}
if ('error' in err && err.error === 'conflict') {
err.name = 'conflict';
err.status = 409;
}
if (!('name' in err)) {
err.name = err.error || 'unknown';
}
if (!('status' in err)) {
err.status = 500;
}
if (!('message' in err)) {
err.message = err.message || err.reason;
}
if (!('stack' in err)) {
err.stack = (new Error()).stack;
}
return err;
}
function tryFilter(filter, doc, req) {
try {
return !filter(doc, req);
} catch (err) {
var msg = 'Filter function threw: ' + err.toString();
return createError(BAD_REQUEST, msg);
}
}
function filterChange(opts) {
var req = {};
var hasFilter = opts.filter && typeof opts.filter === 'function';
req.query = opts.query_params;
return function filter(change) {
if (!change.doc) {
// CSG sends events on the changes feed that don't have documents,
// this hack makes a whole lot of existing code robust.
change.doc = {};
}
var filterReturn = hasFilter && tryFilter(opts.filter, change.doc, req);
if (typeof filterReturn === 'object') {
return filterReturn;
}
if (filterReturn) {
return false;
}
if (!opts.include_docs) {
delete change.doc;
} else if (!opts.attachments) {
for (var att in change.doc._attachments) {
/* istanbul ignore else */
if (Object.prototype.hasOwnProperty.call(change.doc._attachments, att)) {
change.doc._attachments[att].stub = true;
}
}
}
return true;
};
}
// shim for Function.prototype.name,
// for browsers that don't support it like IE
/* istanbul ignore next */
function f() {}
var hasName = f.name;
var res$1;
// We don't run coverage in IE
/* istanbul ignore else */
if (hasName) {
res$1 = function (fun) {
return fun.name;
};
} else {
res$1 = function (fun) {
var match = fun.toString().match(/^\s*function\s*(?:(\S+)\s*)?\(/);
if (match && match[1]) {
return match[1];
}
else {
return '';
}
};
}
var functionName = res$1;
// Determine id an ID is valid
// - invalid IDs begin with an underescore that does not begin '_design' or
// '_local'
// - any other string value is a valid id
// Returns the specific error object for each case
function invalidIdError(id) {
var err;
if (!id) {
err = createError(MISSING_ID);
} else if (typeof id !== 'string') {
err = createError(INVALID_ID);
} else if (/^_/.test(id) && !(/^_(design|local)/).test(id)) {
err = createError(RESERVED_ID);
}
if (err) {
throw err;
}
}
// Checks if a PouchDB object is "remote" or not. This is
function isRemote(db) {
if (typeof db._remote === 'boolean') {
return db._remote;
}
/* istanbul ignore next */
if (typeof db.type === 'function') {
guardedConsole('warn',
'db.type() is deprecated and will be removed in ' +
'a future version of PouchDB');
return db.type() === 'http';
}
/* istanbul ignore next */
return false;
}
function listenerCount(ee, type) {
return 'listenerCount' in ee ? ee.listenerCount(type) :
EE.listenerCount(ee, type);
}
function parseDesignDocFunctionName(s) {
if (!s) {
return null;
}
var parts = s.split('/');
if (parts.length === 2) {
return parts;
}
if (parts.length === 1) {
return [s, s];
}
return null;
}
function normalizeDesignDocFunctionName(s) {
var normalized = parseDesignDocFunctionName(s);
return normalized ? normalized.join('/') : null;
}
// originally parseUri 1.2.2, now patched by us
// (c) Steven Levithan <stevenlevithan.com>
// MIT License
var keys = ["source", "protocol", "authority", "userInfo", "user", "password",
"host", "port", "relative", "path", "directory", "file", "query", "anchor"];
var qName ="queryKey";
var qParser = /(?:^|&)([^&=]*)=?([^&]*)/g;
// use the "loose" parser
/* eslint no-useless-escape: 0 */
var parser = /^(?:(?![^:@]+:[^:@\/]*@)([^:\/?#.]+):)?(?:\/\/)?((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?)(((\/(?:[^?#](?![^?#\/]*\.[^?#\/.]+(?:[?#]|$)))*\/?)?([^?#\/]*))(?:\?([^#]*))?(?:#(.*))?)/;
function parseUri(str) {
var m = parser.exec(str);
var uri = {};
var i = 14;
while (i--) {
var key = keys[i];
var value = m[i] || "";
var encoded = ['user', 'password'].indexOf(key) !== -1;
uri[key] = encoded ? decodeURIComponent(value) : value;
}
uri[qName] = {};
uri[keys[12]].replace(qParser, function ($0, $1, $2) {
if ($1) {
uri[qName][$1] = $2;
}
});
return uri;
}
// Based on https://github.com/alexdavid/scope-eval v0.0.3
// this is essentially the "update sugar" function from daleharvey/pouchdb#1388
// the diffFun tells us what delta to apply to the doc. it either returns
// the doc, or false if it doesn't need to do an update after all
function upsert(db, docId, diffFun) {
return db.get(docId)
.catch(function (err) {
/* istanbul ignore next */
if (err.status !== 404) {
throw err;
}
return {};
})
.then(function (doc) {
// the user might change the _rev, so save it for posterity
var docRev = doc._rev;
var newDoc = diffFun(doc);
if (!newDoc) {
// if the diffFun returns falsy, we short-circuit as
// an optimization
return {updated: false, rev: docRev};
}
// users aren't allowed to modify these values,
// so reset them here
newDoc._id = docId;
newDoc._rev = docRev;
return tryAndPut(db, newDoc, diffFun);
});
}
function tryAndPut(db, doc, diffFun) {
return db.put(doc).then(function (res) {
return {
updated: true,
rev: res.rev
};
}, function (err) {
/* istanbul ignore next */
if (err.status !== 409) {
throw err;
}
return upsert(db, doc._id, diffFun);
});
}
function binaryMd5(data, callback) {
var base64 = crypto.createHash('md5').update(data, 'binary').digest('base64');
callback(base64);
}
function stringMd5(string) {
return crypto.createHash('md5').update(string, 'binary').digest('hex');
}
/**
* Creates a new revision string that does NOT include the revision height
* For example '56649f1b0506c6ca9fda0746eb0cacdf'
*/
function rev(doc, deterministic_revs) {
if (!deterministic_revs) {
return uuid.v4().replace(/-/g, '').toLowerCase();
}
var mutateableDoc = Object.assign({}, doc);
delete mutateableDoc._rev_tree;
return stringMd5(JSON.stringify(mutateableDoc));
}
var uuid$1 = uuid.v4; // mimic old import, only v4 is ever used elsewhere
// We fetch all leafs of the revision tree, and sort them based on tree length
// and whether they were deleted, undeleted documents with the longest revision
// tree (most edits) win
// The final sort algorithm is slightly documented in a sidebar here:
// http://guide.couchdb.org/draft/conflicts.html
function winningRev(metadata) {
var winningId;
var winningPos;
var winningDeleted;
var toVisit = metadata.rev_tree.slice();
var node;
while ((node = toVisit.pop())) {
var tree = node.ids;
var branches = tree[2];
var pos = node.pos;
if (branches.length) { // non-leaf
for (var i = 0, len = branches.length; i < len; i++) {
toVisit.push({pos: pos + 1, ids: branches[i]});
}
continue;
}
var deleted = !!tree[1].deleted;
var id = tree[0];
// sort by deleted, then pos, then id
if (!winningId || (winningDeleted !== deleted ? winningDeleted :
winningPos !== pos ? winningPos < pos : winningId < id)) {
winningId = id;
winningPos = pos;
winningDeleted = deleted;
}
}
return winningPos + '-' + winningId;
}
// Pretty much all below can be combined into a higher order function to
// traverse revisions
// The return value from the callback will be passed as context to all
// children of that node
function traverseRevTree(revs, callback) {
var toVisit = revs.slice();
var node;
while ((node = toVisit.pop())) {
var pos = node.pos;
var tree = node.ids;
var branches = tree[2];
var newCtx =
callback(branches.length === 0, pos, tree[0], node.ctx, tree[1]);
for (var i = 0, len = branches.length; i < len; i++) {
toVisit.push({pos: pos + 1, ids: branches[i], ctx: newCtx});
}
}
}
function sortByPos(a, b) {
return a.pos - b.pos;
}
function collectLeaves(revs) {
var leaves = [];
traverseRevTree(revs, function (isLeaf, pos, id, acc, opts) {
if (isLeaf) {
leaves.push({rev: pos + "-" + id, pos, opts});
}
});
leaves.sort(sortByPos).reverse();
for (var i = 0, len = leaves.length; i < len; i++) {
delete leaves[i].pos;
}
return leaves;
}
// returns revs of all conflicts that is leaves such that
// 1. are not deleted and
// 2. are different than winning revision
function collectConflicts(metadata) {
var win = winningRev(metadata);
var leaves = collectLeaves(metadata.rev_tree);
var conflicts = [];
for (var i = 0, len = leaves.length; i < len; i++) {
var leaf = leaves[i];
if (leaf.rev !== win && !leaf.opts.deleted) {
conflicts.push(leaf.rev);
}
}
return conflicts;
}
// compact a tree by marking its non-leafs as missing,
// and return a list of revs to delete
function compactTree(metadata) {
var revs = [];
traverseRevTree(metadata.rev_tree, function (isLeaf, pos,
revHash, ctx, opts) {
if (opts.status === 'available' && !isLeaf) {
revs.push(pos + '-' + revHash);
opts.status = 'missing';
}
});
return revs;
}
// `findPathToLeaf()` returns an array of revs that goes from the specified
// leaf rev to the root of that leaf’s branch.
//
// eg. for this rev tree:
// 1-9692 ▶ 2-37aa ▶ 3-df22 ▶ 4-6e94 ▶ 5-df4a ▶ 6-6a3a ▶ 7-57e5
// ┃ ┗━━━━━━▶ 5-8d8c ▶ 6-65e0
// ┗━━━━━━▶ 3-43f6 ▶ 4-a3b4
//
// For a `targetRev` of '7-57e5', `findPathToLeaf()` would return ['7-57e5', '6-6a3a', '5-df4a']
// The `revs` argument has the same structure as what `revs_tree` has on e.g.
// the IndexedDB representation of the rev tree datastructure. Please refer to
// tests/unit/test.purge.js for examples of what these look like.
//
// This function will throw an error if:
// - The requested revision does not exist
// - The requested revision is not a leaf
function findPathToLeaf(revs, targetRev) {
let path$$1 = [];
const toVisit = revs.slice();
let node;
while ((node = toVisit.pop())) {
const { pos, ids: tree } = node;
const rev = `${pos}-${tree[0]}`;
const branches = tree[2];
// just assuming we're already working on the path up towards our desired leaf.
path$$1.push(rev);
// we've reached the leaf of our dreams, so return the computed path.
if (rev === targetRev) {
//…unleeeeess
if (branches.length !== 0) {
throw new Error('The requested revision is not a leaf');
}
return path$$1.reverse();
}
// this is based on the assumption that after we have a leaf (`branches.length == 0`), we handle the next
// branch. this is true for all branches other than the path leading to the winning rev (which is 7-57e5 in
// the example above. i've added a reset condition for branching nodes (`branches.length > 1`) as well.
if (branches.length === 0 || branches.length > 1) {
path$$1 = [];
}
// as a next step, we push the branches of this node to `toVisit` for visiting it during the next iteration
for (let i = 0, len = branches.length; i < len; i++) {
toVisit.push({ pos: pos + 1, ids: branches[i] });
}
}
if (path$$1.length === 0) {
throw new Error('The requested revision does not exist');
}
return path$$1.reverse();
}
// build up a list of all the paths to the leafs in this revision tree
function rootToLeaf(revs) {
var paths = [];
var toVisit = revs.slice();
var node;
while ((node = toVisit.pop())) {
var pos = node.pos;
var tree = node.ids;
var id = tree[0];
var opts = tree[1];
var branches = tree[2];
var isLeaf = branches.length === 0;
var history = node.history ? node.history.slice() : [];
history.push({id, opts});
if (isLeaf) {
paths.push({pos: (pos + 1 - history.length), ids: history});
}
for (var i = 0, len = branches.length; i < len; i++) {
toVisit.push({pos: pos + 1, ids: branches[i], history});
}
}
return paths.reverse();
}
// for a better overview of what this is doing, read:
function sortByPos$1(a, b) {
return a.pos - b.pos;
}
// classic binary search
function binarySearch(arr, item, comparator) {
var low = 0;
var high = arr.length;
var mid;
while (low < high) {
mid = (low + high) >>> 1;
if (comparator(arr[mid], item) < 0) {
low = mid + 1;
} else {
high = mid;
}
}
return low;
}
// assuming the arr is sorted, insert the item in the proper place
function insertSorted(arr, item, comparator) {
var idx = binarySearch(arr, item, comparator);
arr.splice(idx, 0, item);
}
// Turn a path as a flat array into a tree with a single branch.
// If any should be stemmed from the beginning of the array, that's passed
// in as the second argument
function pathToTree(path$$1, numStemmed) {
var root;
var leaf;
for (var i = numStemmed, len = path$$1.length; i < len; i++) {
var node = path$$1[i];
var currentLeaf = [node.id, node.opts, []];
if (leaf) {
leaf[2].push(currentLeaf);
leaf = currentLeaf;
} else {
root = leaf = currentLeaf;
}
}
return root;
}
// compare the IDs of two trees
function compareTree(a, b) {
return a[0] < b[0] ? -1 : 1;
}
// Merge two trees together
// The roots of tree1 and tree2 must be the same revision
function mergeTree(in_tree1, in_tree2) {
var queue = [{tree1: in_tree1, tree2: in_tree2}];
var conflicts = false;
while (queue.length > 0) {
var item = queue.pop();
var tree1 = item.tree1;
var tree2 = item.tree2;
if (tree1[1].status || tree2[1].status) {
tree1[1].status =
(tree1[1].status === 'available' ||
tree2[1].status === 'available') ? 'available' : 'missing';
}
for (var i = 0; i < tree2[2].length; i++) {
if (!tree1[2][0]) {
conflicts = 'new_leaf';
tree1[2][0] = tree2[2][i];
continue;
}
var merged = false;
for (var j = 0; j < tree1[2].length; j++) {
if (tree1[2][j][0] === tree2[2][i][0]) {
queue.push({tree1: tree1[2][j], tree2: tree2[2][i]});
merged = true;
}
}
if (!merged) {
conflicts = 'new_branch';
insertSorted(tree1[2], tree2[2][i], compareTree);
}
}
}
return {conflicts, tree: in_tree1};
}
function doMerge(tree, path$$1, dontExpand) {
var restree = [];
var conflicts = false;
var merged = false;
var res;
if (!tree.length) {
return {tree: [path$$1], conflicts: 'new_leaf'};
}
for (var i = 0, len = tree.length; i < len; i++) {
var branch = tree[i];
if (branch.pos === path$$1.pos && branch.ids[0] === path$$1.ids[0]) {
// Paths start at the same position and have the same root, so they need
// merged
res = mergeTree(branch.ids, path$$1.ids);
restree.push({pos: branch.pos, ids: res.tree});
conflicts = conflicts || res.conflicts;
merged = true;
} else if (dontExpand !== true) {
// The paths start at a different position, take the earliest path and
// traverse up until it as at the same point from root as the path we
// want to merge. If the keys match we return the longer path with the
// other merged After stemming we don't want to expand the trees
var t1 = branch.pos < path$$1.pos ? branch : path$$1;
var t2 = branch.pos < path$$1.pos ? path$$1 : branch;
var diff = t2.pos - t1.pos;
var candidateParents = [];
var trees = [];
trees.push({ids: t1.ids, diff, parent: null, parentIdx: null});
while (trees.length > 0) {
var item = trees.pop();
if (item.diff === 0) {
if (item.ids[0] === t2.ids[0]) {
candidateParents.push(item);
}
continue;
}
var elements = item.ids[2];
for (var j = 0, elementsLen = elements.length; j < elementsLen; j++) {
trees.push({
ids: elements[j],
diff: item.diff - 1,
parent: item.ids,
parentIdx: j
});
}
}
var el = candidateParents[0];
if (!el) {
restree.push(branch);
} else {
res = mergeTree(el.ids, t2.ids);
el.parent[2][el.parentIdx] = res.tree;
restree.push({pos: t1.pos, ids: t1.ids});
conflicts = conflicts || res.conflicts;
merged = true;
}
} else {
restree.push(branch);
}
}
// We didnt find
if (!merged) {
restree.push(path$$1);
}
restree.sort(sortByPos$1);
return {
tree: restree,
conflicts: conflicts || 'internal_node'
};
}
// To ensure we don't grow the revision tree infinitely, we stem old revisions
function stem(tree, depth) {
// First we break out the tree into a complete list of root to leaf paths
var paths = rootToLeaf(tree);
var stemmedRevs;
var result;
for (var i = 0, len = paths.length; i < len; i++) {
// Then for each path, we cut off the start of the path based on the
// `depth` to stem to, and generate a new set of flat trees
var path$$1 = paths[i];
var stemmed = path$$1.ids;
var node;
if (stemmed.length > depth) {
// only do the stemming work if we actually need to stem
if (!stemmedRevs) {
stemmedRevs = {}; // avoid allocating this object unnecessarily
}
var numStemmed = stemmed.length - depth;
node = {
pos: path$$1.pos + numStemmed,
ids: pathToTree(stemmed, numStemmed)
};
for (var s = 0; s < numStemmed; s++) {
var rev = (path$$1.pos + s) + '-' + stemmed[s].id;
stemmedRevs[rev] = true;
}
} else { // no need to actually stem
node = {
pos: path$$1.pos,
ids: pathToTree(stemmed, 0)
};
}
// Then we remerge all those flat trees together, ensuring that we don't
// connect trees that would go beyond the depth limit
if (result) {
result = doMerge(result, node, true).tree;
} else {
result = [node];
}
}
// this is memory-heavy per Chrome profiler, avoid unless we actually stemmed
if (stemmedRevs) {
traverseRevTree(result, function (isLeaf, pos, revHash) {
// some revisions may have been removed in a branch but not in another
delete stemmedRevs[pos + '-' + revHash];
});
}
return {
tree: result,
revs: stemmedRevs ? Object.keys(stemmedRevs) : []
};
}
function merge(tree, path$$1, depth) {
var newTree = doMerge(tree, path$$1);
var stemmed = stem(newTree.tree, depth);
return {
tree: stemmed.tree,
stemmedRevs: stemmed.revs,
conflicts: newTree.conflicts
};
}
// return true if a rev exists in the rev tree, false otherwise
function revExists(revs, rev) {
var toVisit = revs.slice();
var splitRev = rev.split('-');
var targetPos = parseInt(splitRev[0], 10);
var targetId = splitRev[1];
var node;
while ((node = toVisit.pop())) {
if (node.pos === targetPos && node.ids[0] === targetId) {
return true;
}
var branches = node.ids[2];
for (var i = 0, len = branches.length; i < len; i++) {
toVisit.push({pos: node.pos + 1, ids: branches[i]});
}
}
return false;
}
function getTrees(node) {
return node.ids;
}
// check if a specific revision of a doc has been deleted
// - metadata: the metadata object from the doc store
// - rev: (optional) the revision to check. defaults to winning revision
function isDeleted(metadata, rev) {
if (!rev) {
rev = winningRev(metadata);
}
var id = rev.substring(rev.indexOf('-') + 1);
var toVisit = metadata.rev_tree.map(getTrees);
var tree;
while ((tree = toVisit.pop())) {
if (tree[0] === id) {
return !!tree[1].deleted;
}
toVisit = toVisit.concat(tree[2]);
}
}
function isLocalId(id) {
return typeof id === 'string' && id.startsWith('_local/');
}
// returns the current leaf node for a given revision
function latest(rev, metadata) {
var toVisit = metadata.rev_tree.slice();
var node;
while ((node = toVisit.pop())) {
var pos = node.pos;
var tree = node.ids;
var id = tree[0];
var opts = tree[1];
var branches = tree[2];
var isLeaf = branches.length === 0;
var history = node.history ? node.history.slice() : [];
history.push({id, pos, opts});
if (isLeaf) {
for (var i = 0, len = history.length; i < len; i++) {
var historyNode = history[i];
var historyRev = historyNode.pos + '-' + historyNode.id;
if (historyRev === rev) {
// return the rev of this leaf
return pos + '-' + id;
}
}
}
for (var j = 0, l = branches.length; j < l; j++) {
toVisit.push({pos: pos + 1, ids: branches[j], history});
}
}
/* istanbul ignore next */
throw new Error('Unable to resolve latest revision for id ' + metadata.id + ', rev ' + rev);
}
function tryCatchInChangeListener(self, change, pending, lastSeq) {
// isolate try/catches to avoid V8 deoptimizations
try {
self.emit('change', change, pending, lastSeq);
} catch (e) {
guardedConsole('error', 'Error in .on("change", function):', e);
}
}
function processChange(doc, metadata, opts) {
var changeList = [{rev: doc._rev}];
if (opts.style === 'all_docs') {
changeList = collectLeaves(metadata.rev_tree)
.map(function (x) { return {rev: x.rev}; });
}
var change = {
id: metadata.id,
changes: changeList,
doc
};
if (isDeleted(metadata, doc._rev)) {
change.deleted = true;
}
if (opts.conflicts) {
change.doc._conflicts = collectConflicts(metadata);
if (!change.doc._conflicts.length) {
delete change.doc._conflicts;
}
}
return change;
}
class Changes$1 extends EE {
constructor(db, opts, callback) {
super();
this.db = db;
opts = opts ? clone(opts) : {};
var complete = opts.complete = once((err, resp) => {
if (err) {
if (listenerCount(this, 'error') > 0) {
this.emit('error', err);
}
} else {
this.emit('complete', resp);
}
this.removeAllListeners();
db.removeListener('destroyed', onDestroy);
});
if (callback) {
this.on('complete', function (resp) {
callback(null, resp);
});
this.on('error', callback);
}
const onDestroy = () => {
this.cancel();
};
db.once('destroyed', onDestroy);
opts.onChange = (change, pending, lastSeq) => {
/* istanbul ignore if */
if (this.isCancelled) {
return;
}
tryCatchInChangeListener(this, change, pending, lastSeq);
};
var promise = new Promise(function (fulfill, reject) {
opts.complete = function (err, res$$1) {
if (err) {
reject(err);
} else {
fulfill(res$$1);
}
};
});
this.once('cancel', function () {
db.removeListener('destroyed', onDestroy);
opts.complete(null, {status: 'cancelled'});
});
this.then = promise.then.bind(promise);
this['catch'] = promise['catch'].bind(promise);
this.then(function (result) {
complete(null, result);
}, complete);
if (!db.taskqueue.isReady) {
db.taskqueue.addTask((failed) => {
if (failed) {
opts.complete(failed);
} else if (this.isCancelled) {
this.emit('cancel');
} else {
this.validateChanges(opts);
}
});
} else {
this.validateChanges(opts);
}
}
cancel() {
this.isCancelled = true;
if (this.db.taskqueue.isReady) {
this.emit('cancel');
}
}
validateChanges(opts) {
var callback = opts.complete;
/* istanbul ignore else */
if (PouchDB._changesFilterPlugin) {
PouchDB._changesFilterPlugin.validate(opts, (err) => {
if (err) {
return callback(err);
}
this.doChanges(opts);
});
} else {
this.doChanges(opts);
}
}
doChanges(opts) {
var callback = opts.complete;
opts = clone(opts);
if ('live' in opts && !('continuous' in opts)) {
opts.continuous = opts.live;
}
opts.processChange = processChange;
if (opts.since === 'latest') {
opts.since = 'now';
}
if (!opts.since) {
opts.since = 0;
}
if (opts.since === 'now') {
this.db.info().then((info) => {
/* istanbul ignore if */
if (this.isCancelled) {
callback(null, {status: 'cancelled'});
return;
}
opts.since = info.update_seq;
this.doChanges(opts);
}, callback);
return;
}
/* istanbul ignore else */
if (PouchDB._changesFilterPlugin) {
PouchDB._changesFilterPlugin.normalize(opts);
if (PouchDB._changesFilterPlugin.shouldFilter(this, opts)) {
return PouchDB._changesFilterPlugin.filter(this, opts);
}
} else {
['doc_ids', 'filter', 'selector', 'view'].forEach(function (key) {
if (key in opts) {
guardedConsole('warn',
'The "' + key + '" option was passed in to changes/replicate, ' +
'but pouchdb-changes-filter plugin is not installed, so it ' +
'was ignored. Please install the plugin to enable filtering.'
);
}
});
}
if (!('descending' in opts)) {
opts.descending = false;
}
// 0 and 1 should return 1 document
opts.limit = opts.limit === 0 ? 1 : opts.limit;
opts.complete = callback;
var newPromise = this.db._changes(opts);
/* istanbul ignore else */
if (newPromise && typeof newPromise.cancel === 'function') {
const cancel = this.cancel;
this.cancel = (...args) => {
newPromise.cancel();
cancel.apply(this, args);
};
}
}
}
/*
* A generic pouch adapter
*/
// Wrapper for functions that call the bulkdocs api with a single doc,
// if the first result is an error, return an error
function yankError(callback, docId) {
return function (err, results) {
if (err || (results[0] && results[0].error)) {
err = err || results[0];
err.docId = docId;
callback(err);
} else {
callback(null, results.length ? results[0] : results);
}
};
}
// clean docs given to us by the user
function cleanDocs(docs) {
for (var i = 0; i < docs.length; i++) {
var doc = docs[i];
if (doc._deleted) {
delete doc._attachments; // ignore atts for deleted docs
} else if (doc._attachments) {
// filter out extraneous keys from _attachments
var atts = Object.keys(doc._attachments);
for (var j = 0; j < atts.length; j++) {
var att = atts[j];
doc._attachments[att] = pick(doc._attachments[att],
['data', 'digest', 'content_type', 'length', 'revpos', 'stub']);
}
}
}
}
// compare two docs, first by _id then by _rev
function compareByIdThenRev(a, b) {
if (a._id === b._id) {
const aStart = a._revisions ? a._revisions.start : 0;
const bStart = b._revisions ? b._revisions.start : 0;
return aStart - bStart;
}
return a._id < b._id ? -1 : 1;
}
// for every node in a revision tree computes its distance from the closest
// leaf
function computeHeight(revs) {
var height = {};
var edges = [];
traverseRevTree(revs, function (isLeaf, pos, id, prnt) {
var rev$$1 = pos + "-" + id;
if (isLeaf) {
height[rev$$1] = 0;
}
if (prnt !== undefined) {
edges.push({from: prnt, to: rev$$1});
}
return rev$$1;
});
edges.reverse();
edges.forEach(function (edge) {
if (height[edge.from] === undefined) {
height[edge.from] = 1 + height[edge.to];
} else {
height[edge.from] = Math.min(height[edge.from], 1 + height[edge.to]);
}
});
return height;
}
function allDocsKeysParse(opts) {
var keys = ('limit' in opts) ?
opts.keys.slice(opts.skip, opts.limit + opts.skip) :
(opts.skip > 0) ? opts.keys.slice(opts.skip) : opts.keys;
opts.keys = keys;
opts.skip = 0;
delete opts.limit;
if (opts.descending) {
keys.reverse();
opts.descending = false;
}
}
// all compaction is done in a queue, to avoid attaching
// too many listeners at once
function doNextCompaction(self) {
var task = self._compactionQueue[0];
var opts = task.opts;
var callback = task.callback;
self.get('_local/compaction').catch(function () {
return false;
}).then(function (doc) {
if (doc && doc.last_seq) {
opts.last_seq = doc.last_seq;
}
self._compact(opts, function (err, res$$1) {
/* istanbul ignore if */
if (err) {
callback(err);
} else {
callback(null, res$$1);
}
nextTick(function () {
self._compactionQueue.shift();
if (self._compactionQueue.length) {
doNextCompaction(self);
}
});
});
});
}
function appendPurgeSeq(db, docId, rev$$1) {
return db.get('_local/purges').then(function (doc) {
const purgeSeq = doc.purgeSeq + 1;
doc.purges.push({
docId,
rev: rev$$1,
purgeSeq,
});
if (doc.purges.length > self.purged_infos_limit) {
doc.purges.splice(0, doc.purges.length - self.purged_infos_limit);
}
doc.purgeSeq = purgeSeq;
return doc;
}).catch(function (err) {
if (err.status !== 404) {
throw err;
}
return {
_id: '_local/purges',
purges: [{
docId,
rev: rev$$1,
purgeSeq: 0,
}],
purgeSeq: 0,
};
}).then(function (doc) {
return db.put(doc);
});
}
function attachmentNameError(name) {
if (name.charAt(0) === '_') {
return name + ' is not a valid attachment name, attachment ' +
'names cannot start with \'_\'';
}
return false;
}
function isNotSingleDoc(doc) {
return doc === null || typeof doc !== 'object' || Array.isArray(doc);
}
const validRevRegex = /^\d+-[^-]*$/;
function isValidRev(rev$$1) {
return typeof rev$$1 === 'string' && validRevRegex.test(rev$$1);
}
class AbstractPouchDB extends EE {
_setup() {
this.post = adapterFun('post', function (doc, opts, callback) {
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
if (isNotSingleDoc(doc)) {
return callback(createError(NOT_AN_OBJECT));
}
this.bulkDocs({docs: [doc]}, opts, yankError(callback, doc._id));
}).bind(this);
this.put = adapterFun('put', function (doc, opts, cb) {
if (typeof opts === 'function') {
cb = opts;
opts = {};
}
if (isNotSingleDoc(doc)) {
return cb(createError(NOT_AN_OBJECT));
}
invalidIdError(doc._id);
if ('_rev' in doc && !isValidRev(doc._rev)) {
return cb(createError(INVALID_REV));
}
if (isLocalId(doc._id) && typeof this._putLocal === 'function') {
if (doc._deleted) {
return this._removeLocal(doc, cb);
} else {
return this._putLocal(doc, cb);
}
}
const putDoc = (next) => {
if (typeof this._put === 'function' && opts.new_edits !== false) {
this._put(doc, opts, next);
} else {
this.bulkDocs({docs: [doc]}, opts, yankError(next, doc._id));
}
};
if (opts.force && doc._rev) {
transformForceOptionToNewEditsOption();
putDoc(function (err) {
var result = err ? null : {ok: true, id: doc._id, rev: doc._rev};
cb(err, result);
});
} else {
putDoc(cb);
}
function transformForceOptionToNewEditsOption() {
var parts = doc._rev.split('-');
var oldRevId = parts[1];
var oldRevNum = parseInt(parts[0], 10);
var newRevNum = oldRevNum + 1;
var newRevId = rev();
doc._revisions = {
start: newRevNum,
ids: [newRevId, oldRevId]
};
doc._rev = newRevNum + '-' + newRevId;
opts.new_edits = false;
}
}).bind(this);
this.putAttachment = adapterFun('putAttachment', function (docId, attachmentId, rev$$1, blob, type) {
var api = this;
if (typeof type === 'function') {
type = blob;
blob = rev$$1;
rev$$1 = null;
}
// Lets fix in https://github.com/pouchdb/pouchdb/issues/3267
/* istanbul ignore if */
if (typeof type === 'undefined') {
type = blob;
blob = rev$$1;
rev$$1 = null;
}
if (!type) {
guardedConsole('warn', 'Attachment', attachmentId, 'on document', docId, 'is missing content_type');
}
function createAttachment(doc) {
var prevrevpos = '_rev' in doc ? parseInt(doc._rev, 10) : 0;
doc._attachments = doc._attachments || {};
doc._attachments[attachmentId] = {
content_type: type,
data: blob,
revpos: ++prevrevpos
};
return api.put(doc);
}
return api.get(docId).then(function (doc) {
if (doc._rev !== rev$$1) {
throw createError(REV_CONFLICT);
}
return createAttachment(doc);
}, function (err) {
// create new doc
/* istanbul ignore else */
if (err.reason === MISSING_DOC.message) {
return createAttachment({_id: docId});
} else {
throw err;
}
});
}).bind(this);
this.removeAttachment = adapterFun('removeAttachment', function (docId, attachmentId, rev$$1, callback) {
this.get(docId, (err, obj) => {
/* istanbul ignore if */
if (err) {
callback(err);
return;
}
if (obj._rev !== rev$$1) {
callback(createError(REV_CONFLICT));
return;
}
/* istanbul ignore if */
if (!obj._attachments) {
return callback();
}
delete obj._attachments[attachmentId];
if (Object.keys(obj._attachments).length === 0) {
delete obj._attachments;
}
this.put(obj, callback);
});
}).bind(this);
this.remove = adapterFun('remove', function (docOrId, optsOrRev, opts, callback) {
var doc;
if (type