| 'use strict'; |
| |
| function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } |
| |
| var crypto = _interopDefault(require('crypto')); |
| var nodeFetch = require('node-fetch'); |
| var nodeFetch__default = _interopDefault(nodeFetch); |
| var fetchCookie = _interopDefault(require('fetch-cookie')); |
| var uuid = require('uuid'); |
| var levelup = _interopDefault(require('levelup')); |
| var ltgt = _interopDefault(require('ltgt')); |
| var Codec = _interopDefault(require('level-codec')); |
| var ReadableStreamCore = _interopDefault(require('readable-stream')); |
| var Deque = _interopDefault(require('double-ended-queue')); |
| var vuvuzela = _interopDefault(require('vuvuzela')); |
| var fs = _interopDefault(require('fs')); |
| var path = _interopDefault(require('path')); |
| var level = _interopDefault(require('level')); |
| var through2 = require('through2'); |
| var LevelWriteStream = _interopDefault(require('level-write-stream')); |
| var vm = _interopDefault(require('vm')); |
| var EE = _interopDefault(require('events')); |
| |
| function isBinaryObject(object) { |
| return object instanceof Buffer; |
| } |
| |
| var cloneBinaryObject = (buffer) => Buffer.from(buffer); |
| |
| // most of this is borrowed from lodash.isPlainObject: |
| // https://github.com/fis-components/lodash.isplainobject/ |
| // blob/29c358140a74f252aeb08c9eb28bef86f2217d4a/index.js |
| |
| var funcToString = Function.prototype.toString; |
| var objectCtorString = funcToString.call(Object); |
| |
| function isPlainObject(value) { |
| var proto = Object.getPrototypeOf(value); |
| /* istanbul ignore if */ |
| if (proto === null) { // not sure when this happens, but I guess it can |
| return true; |
| } |
| var Ctor = proto.constructor; |
| return (typeof Ctor == 'function' && |
| Ctor instanceof Ctor && funcToString.call(Ctor) == objectCtorString); |
| } |
| |
| function clone(object) { |
| var newObject; |
| var i; |
| var len; |
| |
| if (!object || typeof object !== 'object') { |
| return object; |
| } |
| |
| if (Array.isArray(object)) { |
| newObject = []; |
| for (i = 0, len = object.length; i < len; i++) { |
| newObject[i] = clone(object[i]); |
| } |
| return newObject; |
| } |
| |
| // special case: to avoid inconsistencies between IndexedDB |
| // and other backends, we automatically stringify Dates |
| if (object instanceof Date && isFinite(object)) { |
| return object.toISOString(); |
| } |
| |
| if (isBinaryObject(object)) { |
| return cloneBinaryObject(object); |
| } |
| |
| if (!isPlainObject(object)) { |
| return object; // don't clone objects like Workers |
| } |
| |
| newObject = {}; |
| for (i in object) { |
| /* istanbul ignore else */ |
| if (Object.prototype.hasOwnProperty.call(object, i)) { |
| var value = clone(object[i]); |
| if (typeof value !== 'undefined') { |
| newObject[i] = value; |
| } |
| } |
| } |
| return newObject; |
| } |
| |
| function once(fun) { |
| var called = false; |
| return function (...args) { |
| /* istanbul ignore if */ |
| if (called) { |
| // this is a smoke test and should never actually happen |
| throw new Error('once called more than once'); |
| } else { |
| called = true; |
| fun.apply(this, args); |
| } |
| }; |
| } |
| |
| function toPromise(func) { |
| //create the function we will be returning |
| return function (...args) { |
| // Clone arguments |
| args = clone(args); |
| var self = this; |
| // if the last argument is a function, assume its a callback |
| var usedCB = (typeof args[args.length - 1] === 'function') ? args.pop() : false; |
| var promise = new Promise(function (fulfill, reject) { |
| var resp; |
| try { |
| var callback = once(function (err, mesg) { |
| if (err) { |
| reject(err); |
| } else { |
| fulfill(mesg); |
| } |
| }); |
| // create a callback for this invocation |
| // apply the function in the orig context |
| args.push(callback); |
| resp = func.apply(self, args); |
| if (resp && typeof resp.then === 'function') { |
| fulfill(resp); |
| } |
| } catch (e) { |
| reject(e); |
| } |
| }); |
| // if there is a callback, call it back |
| if (usedCB) { |
| promise.then(function (result) { |
| usedCB(null, result); |
| }, usedCB); |
| } |
| return promise; |
| }; |
| } |
| |
| function logApiCall(self, name, args) { |
| /* istanbul ignore if */ |
| if (self.constructor.listeners('debug').length) { |
| var logArgs = ['api', self.name, name]; |
| for (var i = 0; i < args.length - 1; i++) { |
| logArgs.push(args[i]); |
| } |
| self.constructor.emit('debug', logArgs); |
| |
| // override the callback itself to log the response |
| var origCallback = args[args.length - 1]; |
| args[args.length - 1] = function (err, res) { |
| var responseArgs = ['api', self.name, name]; |
| responseArgs = responseArgs.concat( |
| err ? ['error', err] : ['success', res] |
| ); |
| self.constructor.emit('debug', responseArgs); |
| origCallback(err, res); |
| }; |
| } |
| } |
| |
| function adapterFun(name, callback) { |
| return toPromise(function (...args) { |
| if (this._closed) { |
| return Promise.reject(new Error('database is closed')); |
| } |
| if (this._destroyed) { |
| return Promise.reject(new Error('database is destroyed')); |
| } |
| var self = this; |
| logApiCall(self, name, args); |
| if (!this.taskqueue.isReady) { |
| return new Promise(function (fulfill, reject) { |
| self.taskqueue.addTask(function (failed) { |
| if (failed) { |
| reject(failed); |
| } else { |
| fulfill(self[name].apply(self, args)); |
| } |
| }); |
| }); |
| } |
| return callback.apply(this, args); |
| }); |
| } |
| |
| // like underscore/lodash _.pick() |
| function pick(obj, arr) { |
| var res = {}; |
| for (var i = 0, len = arr.length; i < len; i++) { |
| var prop = arr[i]; |
| if (prop in obj) { |
| res[prop] = obj[prop]; |
| } |
| } |
| return res; |
| } |
| |
| // Most browsers throttle concurrent requests at 6, so it's silly |
| // to shim _bulk_get by trying to launch potentially hundreds of requests |
| // and then letting the majority time out. We can handle this ourselves. |
| var MAX_NUM_CONCURRENT_REQUESTS = 6; |
| |
| function identityFunction(x) { |
| return x; |
| } |
| |
| function formatResultForOpenRevsGet(result) { |
| return [{ |
| ok: result |
| }]; |
| } |
| |
| // shim for P/CouchDB adapters that don't directly implement _bulk_get |
| function bulkGet(db, opts, callback) { |
| var requests = opts.docs; |
| |
| // consolidate into one request per doc if possible |
| var requestsById = new Map(); |
| requests.forEach(function (request) { |
| if (requestsById.has(request.id)) { |
| requestsById.get(request.id).push(request); |
| } else { |
| requestsById.set(request.id, [request]); |
| } |
| }); |
| |
| var numDocs = requestsById.size; |
| var numDone = 0; |
| var perDocResults = new Array(numDocs); |
| |
| function collapseResultsAndFinish() { |
| var results = []; |
| perDocResults.forEach(function (res) { |
| res.docs.forEach(function (info) { |
| results.push({ |
| id: res.id, |
| docs: [info] |
| }); |
| }); |
| }); |
| callback(null, {results}); |
| } |
| |
| function checkDone() { |
| if (++numDone === numDocs) { |
| collapseResultsAndFinish(); |
| } |
| } |
| |
| function gotResult(docIndex, id, docs) { |
| perDocResults[docIndex] = {id, docs}; |
| checkDone(); |
| } |
| |
| var allRequests = []; |
| requestsById.forEach(function (value, key) { |
| allRequests.push(key); |
| }); |
| |
| var i = 0; |
| |
| function nextBatch() { |
| |
| if (i >= allRequests.length) { |
| return; |
| } |
| |
| var upTo = Math.min(i + MAX_NUM_CONCURRENT_REQUESTS, allRequests.length); |
| var batch = allRequests.slice(i, upTo); |
| processBatch(batch, i); |
| i += batch.length; |
| } |
| |
| function processBatch(batch, offset) { |
| batch.forEach(function (docId, j) { |
| var docIdx = offset + j; |
| var docRequests = requestsById.get(docId); |
| |
| // just use the first request as the "template" |
| // TODO: The _bulk_get API allows for more subtle use cases than this, |
| // but for now it is unlikely that there will be a mix of different |
| // "atts_since" or "attachments" in the same request, since it's just |
| // replicate.js that is using this for the moment. |
| // Also, atts_since is aspirational, since we don't support it yet. |
| var docOpts = pick(docRequests[0], ['atts_since', 'attachments']); |
| docOpts.open_revs = docRequests.map(function (request) { |
| // rev is optional, open_revs disallowed |
| return request.rev; |
| }); |
| |
| // remove falsey / undefined revisions |
| docOpts.open_revs = docOpts.open_revs.filter(identityFunction); |
| |
| var formatResult = identityFunction; |
| |
| if (docOpts.open_revs.length === 0) { |
| delete docOpts.open_revs; |
| |
| // when fetching only the "winning" leaf, |
| // transform the result so it looks like an open_revs |
| // request |
| formatResult = formatResultForOpenRevsGet; |
| } |
| |
| // globally-supplied options |
| ['revs', 'attachments', 'binary', 'ajax', 'latest'].forEach(function (param) { |
| if (param in opts) { |
| docOpts[param] = opts[param]; |
| } |
| }); |
| db.get(docId, docOpts, function (err, res) { |
| var result; |
| /* istanbul ignore if */ |
| if (err) { |
| result = [{error: err}]; |
| } else { |
| result = formatResult(res); |
| } |
| gotResult(docIdx, docId, result); |
| nextBatch(); |
| }); |
| }); |
| } |
| |
| nextBatch(); |
| |
| } |
| |
| // in Node of course this is false |
| function hasLocalStorage() { |
| return false; |
| } |
| |
| const nextTick = typeof queueMicrotask === "function" |
| ? queueMicrotask |
| : function nextTick(fn) { |
| Promise.resolve().then(fn); |
| }; |
| |
| class Changes extends EE { |
| constructor() { |
| super(); |
| |
| this._listeners = {}; |
| |
| if (hasLocalStorage()) { |
| addEventListener("storage", (e) => { |
| this.emit(e.key); |
| }); |
| } |
| } |
| |
| addListener(dbName, id, db, opts) { |
| if (this._listeners[id]) { |
| return; |
| } |
| var inprogress = false; |
| var self = this; |
| function eventFunction() { |
| if (!self._listeners[id]) { |
| return; |
| } |
| if (inprogress) { |
| inprogress = 'waiting'; |
| return; |
| } |
| inprogress = true; |
| var changesOpts = pick(opts, [ |
| 'style', 'include_docs', 'attachments', 'conflicts', 'filter', |
| 'doc_ids', 'view', 'since', 'query_params', 'binary', 'return_docs' |
| ]); |
| |
| function onError() { |
| inprogress = false; |
| } |
| |
| db.changes(changesOpts).on('change', function (c) { |
| if (c.seq > opts.since && !opts.cancelled) { |
| opts.since = c.seq; |
| opts.onChange(c); |
| } |
| }).on('complete', function () { |
| if (inprogress === 'waiting') { |
| nextTick(eventFunction); |
| } |
| inprogress = false; |
| }).on('error', onError); |
| } |
| this._listeners[id] = eventFunction; |
| this.on(dbName, eventFunction); |
| } |
| |
| removeListener(dbName, id) { |
| if (!(id in this._listeners)) { |
| return; |
| } |
| super.removeListener(dbName, this._listeners[id]); |
| delete this._listeners[id]; |
| } |
| |
| notifyLocalWindows(dbName) { |
| //do a useless change on a storage thing |
| //in order to get other windows's listeners to activate |
| if (hasLocalStorage()) { |
| localStorage[dbName] = (localStorage[dbName] === "a") ? "b" : "a"; |
| } |
| } |
| |
| notify(dbName) { |
| this.emit(dbName); |
| this.notifyLocalWindows(dbName); |
| } |
| } |
| |
| function guardedConsole(method) { |
| /* istanbul ignore else */ |
| if (typeof console !== 'undefined' && typeof console[method] === 'function') { |
| var args = Array.prototype.slice.call(arguments, 1); |
| console[method].apply(console, args); |
| } |
| } |
| |
| function randomNumber(min, max) { |
| var maxTimeout = 600000; // Hard-coded default of 10 minutes |
| min = parseInt(min, 10) || 0; |
| max = parseInt(max, 10); |
| if (max !== max || max <= min) { |
| max = (min || 1) << 1; //doubling |
| } else { |
| max = max + 1; |
| } |
| // In order to not exceed maxTimeout, pick a random value between half of maxTimeout and maxTimeout |
| if (max > maxTimeout) { |
| min = maxTimeout >> 1; // divide by two |
| max = maxTimeout; |
| } |
| var ratio = Math.random(); |
| var range = max - min; |
| |
| return ~~(range * ratio + min); // ~~ coerces to an int, but fast. |
| } |
| |
| function defaultBackOff(min) { |
| var max = 0; |
| if (!min) { |
| max = 2000; |
| } |
| return randomNumber(min, max); |
| } |
| |
| // We assume Node users don't need to see this warning |
| var res = function () {}; |
| |
| class PouchError extends Error { |
| constructor(status, error, reason) { |
| super(); |
| this.status = status; |
| this.name = error; |
| this.message = reason; |
| this.error = true; |
| } |
| |
| toString() { |
| return JSON.stringify({ |
| status: this.status, |
| name: this.name, |
| message: this.message, |
| reason: this.reason |
| }); |
| } |
| } |
| |
| var UNAUTHORIZED = new PouchError(401, 'unauthorized', "Name or password is incorrect."); |
| var MISSING_BULK_DOCS = new PouchError(400, 'bad_request', "Missing JSON list of 'docs'"); |
| var MISSING_DOC = new PouchError(404, 'not_found', 'missing'); |
| var REV_CONFLICT = new PouchError(409, 'conflict', 'Document update conflict'); |
| var INVALID_ID = new PouchError(400, 'bad_request', '_id field must contain a string'); |
| var MISSING_ID = new PouchError(412, 'missing_id', '_id is required for puts'); |
| var RESERVED_ID = new PouchError(400, 'bad_request', 'Only reserved document ids may start with underscore.'); |
| var NOT_OPEN = new PouchError(412, 'precondition_failed', 'Database not open'); |
| var UNKNOWN_ERROR = new PouchError(500, 'unknown_error', 'Database encountered an unknown error'); |
| var BAD_ARG = new PouchError(500, 'badarg', 'Some query argument is invalid'); |
| var INVALID_REQUEST = new PouchError(400, 'invalid_request', 'Request was invalid'); |
| var QUERY_PARSE_ERROR = new PouchError(400, 'query_parse_error', 'Some query parameter is invalid'); |
| var DOC_VALIDATION = new PouchError(500, 'doc_validation', 'Bad special document member'); |
| var BAD_REQUEST = new PouchError(400, 'bad_request', 'Something wrong with the request'); |
| var NOT_AN_OBJECT = new PouchError(400, 'bad_request', 'Document must be a JSON object'); |
| var DB_MISSING = new PouchError(404, 'not_found', 'Database not found'); |
| var IDB_ERROR = new PouchError(500, 'indexed_db_went_bad', 'unknown'); |
| var WSQ_ERROR = new PouchError(500, 'web_sql_went_bad', 'unknown'); |
| var LDB_ERROR = new PouchError(500, 'levelDB_went_went_bad', 'unknown'); |
| var FORBIDDEN = new PouchError(403, 'forbidden', 'Forbidden by design doc validate_doc_update function'); |
| var INVALID_REV = new PouchError(400, 'bad_request', 'Invalid rev format'); |
| var FILE_EXISTS = new PouchError(412, 'file_exists', 'The database could not be created, the file already exists.'); |
| var MISSING_STUB = new PouchError(412, 'missing_stub', 'A pre-existing attachment stub wasn\'t found'); |
| var INVALID_URL = new PouchError(413, 'invalid_url', 'Provided URL is invalid'); |
| |
| function createError(error, reason) { |
| function CustomPouchError(reason) { |
| // inherit error properties from our parent error manually |
| // so as to allow proper JSON parsing. |
| var names = Object.getOwnPropertyNames(error); |
| for (var i = 0, len = names.length; i < len; i++) { |
| if (typeof error[names[i]] !== 'function') { |
| this[names[i]] = error[names[i]]; |
| } |
| } |
| |
| if (this.stack === undefined) { |
| this.stack = (new Error()).stack; |
| } |
| |
| if (reason !== undefined) { |
| this.reason = reason; |
| } |
| } |
| CustomPouchError.prototype = PouchError.prototype; |
| return new CustomPouchError(reason); |
| } |
| |
| function generateErrorFromResponse(err) { |
| |
| if (typeof err !== 'object') { |
| var data = err; |
| err = UNKNOWN_ERROR; |
| err.data = data; |
| } |
| |
| if ('error' in err && err.error === 'conflict') { |
| err.name = 'conflict'; |
| err.status = 409; |
| } |
| |
| if (!('name' in err)) { |
| err.name = err.error || 'unknown'; |
| } |
| |
| if (!('status' in err)) { |
| err.status = 500; |
| } |
| |
| if (!('message' in err)) { |
| err.message = err.message || err.reason; |
| } |
| |
| if (!('stack' in err)) { |
| err.stack = (new Error()).stack; |
| } |
| |
| return err; |
| } |
| |
| function tryFilter(filter, doc, req) { |
| try { |
| return !filter(doc, req); |
| } catch (err) { |
| var msg = 'Filter function threw: ' + err.toString(); |
| return createError(BAD_REQUEST, msg); |
| } |
| } |
| |
| function filterChange(opts) { |
| var req = {}; |
| var hasFilter = opts.filter && typeof opts.filter === 'function'; |
| req.query = opts.query_params; |
| |
| return function filter(change) { |
| if (!change.doc) { |
| // CSG sends events on the changes feed that don't have documents, |
| // this hack makes a whole lot of existing code robust. |
| change.doc = {}; |
| } |
| |
| var filterReturn = hasFilter && tryFilter(opts.filter, change.doc, req); |
| |
| if (typeof filterReturn === 'object') { |
| return filterReturn; |
| } |
| |
| if (filterReturn) { |
| return false; |
| } |
| |
| if (!opts.include_docs) { |
| delete change.doc; |
| } else if (!opts.attachments) { |
| for (var att in change.doc._attachments) { |
| /* istanbul ignore else */ |
| if (Object.prototype.hasOwnProperty.call(change.doc._attachments, att)) { |
| change.doc._attachments[att].stub = true; |
| } |
| } |
| } |
| return true; |
| }; |
| } |
| |
| // shim for Function.prototype.name, |
| // for browsers that don't support it like IE |
| |
| /* istanbul ignore next */ |
| function f() {} |
| |
| var hasName = f.name; |
| var res$1; |
| |
| // We don't run coverage in IE |
| /* istanbul ignore else */ |
| if (hasName) { |
| res$1 = function (fun) { |
| return fun.name; |
| }; |
| } else { |
| res$1 = function (fun) { |
| var match = fun.toString().match(/^\s*function\s*(?:(\S+)\s*)?\(/); |
| if (match && match[1]) { |
| return match[1]; |
| } |
| else { |
| return ''; |
| } |
| }; |
| } |
| |
| var functionName = res$1; |
| |
| // Determine id an ID is valid |
| // - invalid IDs begin with an underescore that does not begin '_design' or |
| // '_local' |
| // - any other string value is a valid id |
| // Returns the specific error object for each case |
| function invalidIdError(id) { |
| var err; |
| if (!id) { |
| err = createError(MISSING_ID); |
| } else if (typeof id !== 'string') { |
| err = createError(INVALID_ID); |
| } else if (/^_/.test(id) && !(/^_(design|local)/).test(id)) { |
| err = createError(RESERVED_ID); |
| } |
| if (err) { |
| throw err; |
| } |
| } |
| |
| // Checks if a PouchDB object is "remote" or not. This is |
| |
| function isRemote(db) { |
| if (typeof db._remote === 'boolean') { |
| return db._remote; |
| } |
| /* istanbul ignore next */ |
| if (typeof db.type === 'function') { |
| guardedConsole('warn', |
| 'db.type() is deprecated and will be removed in ' + |
| 'a future version of PouchDB'); |
| return db.type() === 'http'; |
| } |
| /* istanbul ignore next */ |
| return false; |
| } |
| |
| function listenerCount(ee, type) { |
| return 'listenerCount' in ee ? ee.listenerCount(type) : |
| EE.listenerCount(ee, type); |
| } |
| |
| function parseDesignDocFunctionName(s) { |
| if (!s) { |
| return null; |
| } |
| var parts = s.split('/'); |
| if (parts.length === 2) { |
| return parts; |
| } |
| if (parts.length === 1) { |
| return [s, s]; |
| } |
| return null; |
| } |
| |
| function normalizeDesignDocFunctionName(s) { |
| var normalized = parseDesignDocFunctionName(s); |
| return normalized ? normalized.join('/') : null; |
| } |
| |
| // originally parseUri 1.2.2, now patched by us |
| // (c) Steven Levithan <stevenlevithan.com> |
| // MIT License |
| var keys = ["source", "protocol", "authority", "userInfo", "user", "password", |
| "host", "port", "relative", "path", "directory", "file", "query", "anchor"]; |
| var qName ="queryKey"; |
| var qParser = /(?:^|&)([^&=]*)=?([^&]*)/g; |
| |
| // use the "loose" parser |
| /* eslint no-useless-escape: 0 */ |
| var parser = /^(?:(?![^:@]+:[^:@\/]*@)([^:\/?#.]+):)?(?:\/\/)?((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?)(((\/(?:[^?#](?![^?#\/]*\.[^?#\/.]+(?:[?#]|$)))*\/?)?([^?#\/]*))(?:\?([^#]*))?(?:#(.*))?)/; |
| |
| function parseUri(str) { |
| var m = parser.exec(str); |
| var uri = {}; |
| var i = 14; |
| |
| while (i--) { |
| var key = keys[i]; |
| var value = m[i] || ""; |
| var encoded = ['user', 'password'].indexOf(key) !== -1; |
| uri[key] = encoded ? decodeURIComponent(value) : value; |
| } |
| |
| uri[qName] = {}; |
| uri[keys[12]].replace(qParser, function ($0, $1, $2) { |
| if ($1) { |
| uri[qName][$1] = $2; |
| } |
| }); |
| |
| return uri; |
| } |
| |
| // Based on https://github.com/alexdavid/scope-eval v0.0.3 |
| |
| // this is essentially the "update sugar" function from daleharvey/pouchdb#1388 |
| // the diffFun tells us what delta to apply to the doc. it either returns |
| // the doc, or false if it doesn't need to do an update after all |
| function upsert(db, docId, diffFun) { |
| return db.get(docId) |
| .catch(function (err) { |
| /* istanbul ignore next */ |
| if (err.status !== 404) { |
| throw err; |
| } |
| return {}; |
| }) |
| .then(function (doc) { |
| // the user might change the _rev, so save it for posterity |
| var docRev = doc._rev; |
| var newDoc = diffFun(doc); |
| |
| if (!newDoc) { |
| // if the diffFun returns falsy, we short-circuit as |
| // an optimization |
| return {updated: false, rev: docRev}; |
| } |
| |
| // users aren't allowed to modify these values, |
| // so reset them here |
| newDoc._id = docId; |
| newDoc._rev = docRev; |
| return tryAndPut(db, newDoc, diffFun); |
| }); |
| } |
| |
| function tryAndPut(db, doc, diffFun) { |
| return db.put(doc).then(function (res) { |
| return { |
| updated: true, |
| rev: res.rev |
| }; |
| }, function (err) { |
| /* istanbul ignore next */ |
| if (err.status !== 409) { |
| throw err; |
| } |
| return upsert(db, doc._id, diffFun); |
| }); |
| } |
| |
| function binaryMd5(data, callback) { |
| var base64 = crypto.createHash('md5').update(data, 'binary').digest('base64'); |
| callback(base64); |
| } |
| |
| function stringMd5(string) { |
| return crypto.createHash('md5').update(string, 'binary').digest('hex'); |
| } |
| |
| /** |
| * Creates a new revision string that does NOT include the revision height |
| * For example '56649f1b0506c6ca9fda0746eb0cacdf' |
| */ |
| function rev(doc, deterministic_revs) { |
| if (!deterministic_revs) { |
| return uuid.v4().replace(/-/g, '').toLowerCase(); |
| } |
| |
| var mutateableDoc = Object.assign({}, doc); |
| delete mutateableDoc._rev_tree; |
| return stringMd5(JSON.stringify(mutateableDoc)); |
| } |
| |
| var uuid$1 = uuid.v4; // mimic old import, only v4 is ever used elsewhere |
| |
| // We fetch all leafs of the revision tree, and sort them based on tree length |
| // and whether they were deleted, undeleted documents with the longest revision |
| // tree (most edits) win |
| // The final sort algorithm is slightly documented in a sidebar here: |
| // http://guide.couchdb.org/draft/conflicts.html |
| function winningRev(metadata) { |
| var winningId; |
| var winningPos; |
| var winningDeleted; |
| var toVisit = metadata.rev_tree.slice(); |
| var node; |
| while ((node = toVisit.pop())) { |
| var tree = node.ids; |
| var branches = tree[2]; |
| var pos = node.pos; |
| if (branches.length) { // non-leaf |
| for (var i = 0, len = branches.length; i < len; i++) { |
| toVisit.push({pos: pos + 1, ids: branches[i]}); |
| } |
| continue; |
| } |
| var deleted = !!tree[1].deleted; |
| var id = tree[0]; |
| // sort by deleted, then pos, then id |
| if (!winningId || (winningDeleted !== deleted ? winningDeleted : |
| winningPos !== pos ? winningPos < pos : winningId < id)) { |
| winningId = id; |
| winningPos = pos; |
| winningDeleted = deleted; |
| } |
| } |
| |
| return winningPos + '-' + winningId; |
| } |
| |
| // Pretty much all below can be combined into a higher order function to |
| // traverse revisions |
| // The return value from the callback will be passed as context to all |
| // children of that node |
| function traverseRevTree(revs, callback) { |
| var toVisit = revs.slice(); |
| |
| var node; |
| while ((node = toVisit.pop())) { |
| var pos = node.pos; |
| var tree = node.ids; |
| var branches = tree[2]; |
| var newCtx = |
| callback(branches.length === 0, pos, tree[0], node.ctx, tree[1]); |
| for (var i = 0, len = branches.length; i < len; i++) { |
| toVisit.push({pos: pos + 1, ids: branches[i], ctx: newCtx}); |
| } |
| } |
| } |
| |
| function sortByPos(a, b) { |
| return a.pos - b.pos; |
| } |
| |
| function collectLeaves(revs) { |
| var leaves = []; |
| traverseRevTree(revs, function (isLeaf, pos, id, acc, opts) { |
| if (isLeaf) { |
| leaves.push({rev: pos + "-" + id, pos, opts}); |
| } |
| }); |
| leaves.sort(sortByPos).reverse(); |
| for (var i = 0, len = leaves.length; i < len; i++) { |
| delete leaves[i].pos; |
| } |
| return leaves; |
| } |
| |
| // returns revs of all conflicts that is leaves such that |
| // 1. are not deleted and |
| // 2. are different than winning revision |
| function collectConflicts(metadata) { |
| var win = winningRev(metadata); |
| var leaves = collectLeaves(metadata.rev_tree); |
| var conflicts = []; |
| for (var i = 0, len = leaves.length; i < len; i++) { |
| var leaf = leaves[i]; |
| if (leaf.rev !== win && !leaf.opts.deleted) { |
| conflicts.push(leaf.rev); |
| } |
| } |
| return conflicts; |
| } |
| |
| // compact a tree by marking its non-leafs as missing, |
| // and return a list of revs to delete |
| function compactTree(metadata) { |
| var revs = []; |
| traverseRevTree(metadata.rev_tree, function (isLeaf, pos, |
| revHash, ctx, opts) { |
| if (opts.status === 'available' && !isLeaf) { |
| revs.push(pos + '-' + revHash); |
| opts.status = 'missing'; |
| } |
| }); |
| return revs; |
| } |
| |
| // `findPathToLeaf()` returns an array of revs that goes from the specified |
| // leaf rev to the root of that leaf’s branch. |
| // |
| // eg. for this rev tree: |
| // 1-9692 ▶ 2-37aa ▶ 3-df22 ▶ 4-6e94 ▶ 5-df4a ▶ 6-6a3a ▶ 7-57e5 |
| // ┃ ┗━━━━━━▶ 5-8d8c ▶ 6-65e0 |
| // ┗━━━━━━▶ 3-43f6 ▶ 4-a3b4 |
| // |
| // For a `targetRev` of '7-57e5', `findPathToLeaf()` would return ['7-57e5', '6-6a3a', '5-df4a'] |
| // The `revs` argument has the same structure as what `revs_tree` has on e.g. |
| // the IndexedDB representation of the rev tree datastructure. Please refer to |
| // tests/unit/test.purge.js for examples of what these look like. |
| // |
| // This function will throw an error if: |
| // - The requested revision does not exist |
| // - The requested revision is not a leaf |
| function findPathToLeaf(revs, targetRev) { |
| let path$$1 = []; |
| const toVisit = revs.slice(); |
| |
| let node; |
| while ((node = toVisit.pop())) { |
| const { pos, ids: tree } = node; |
| const rev = `${pos}-${tree[0]}`; |
| const branches = tree[2]; |
| |
| // just assuming we're already working on the path up towards our desired leaf. |
| path$$1.push(rev); |
| |
| // we've reached the leaf of our dreams, so return the computed path. |
| if (rev === targetRev) { |
| //…unleeeeess |
| if (branches.length !== 0) { |
| throw new Error('The requested revision is not a leaf'); |
| } |
| return path$$1.reverse(); |
| } |
| |
| // this is based on the assumption that after we have a leaf (`branches.length == 0`), we handle the next |
| // branch. this is true for all branches other than the path leading to the winning rev (which is 7-57e5 in |
| // the example above. i've added a reset condition for branching nodes (`branches.length > 1`) as well. |
| if (branches.length === 0 || branches.length > 1) { |
| path$$1 = []; |
| } |
| |
| // as a next step, we push the branches of this node to `toVisit` for visiting it during the next iteration |
| for (let i = 0, len = branches.length; i < len; i++) { |
| toVisit.push({ pos: pos + 1, ids: branches[i] }); |
| } |
| } |
| if (path$$1.length === 0) { |
| throw new Error('The requested revision does not exist'); |
| } |
| return path$$1.reverse(); |
| } |
| |
| // build up a list of all the paths to the leafs in this revision tree |
| function rootToLeaf(revs) { |
| var paths = []; |
| var toVisit = revs.slice(); |
| var node; |
| while ((node = toVisit.pop())) { |
| var pos = node.pos; |
| var tree = node.ids; |
| var id = tree[0]; |
| var opts = tree[1]; |
| var branches = tree[2]; |
| var isLeaf = branches.length === 0; |
| |
| var history = node.history ? node.history.slice() : []; |
| history.push({id, opts}); |
| if (isLeaf) { |
| paths.push({pos: (pos + 1 - history.length), ids: history}); |
| } |
| for (var i = 0, len = branches.length; i < len; i++) { |
| toVisit.push({pos: pos + 1, ids: branches[i], history}); |
| } |
| } |
| return paths.reverse(); |
| } |
| |
| // for a better overview of what this is doing, read: |
| |
| function sortByPos$1(a, b) { |
| return a.pos - b.pos; |
| } |
| |
| // classic binary search |
| function binarySearch(arr, item, comparator) { |
| var low = 0; |
| var high = arr.length; |
| var mid; |
| while (low < high) { |
| mid = (low + high) >>> 1; |
| if (comparator(arr[mid], item) < 0) { |
| low = mid + 1; |
| } else { |
| high = mid; |
| } |
| } |
| return low; |
| } |
| |
| // assuming the arr is sorted, insert the item in the proper place |
| function insertSorted(arr, item, comparator) { |
| var idx = binarySearch(arr, item, comparator); |
| arr.splice(idx, 0, item); |
| } |
| |
| // Turn a path as a flat array into a tree with a single branch. |
| // If any should be stemmed from the beginning of the array, that's passed |
| // in as the second argument |
| function pathToTree(path$$1, numStemmed) { |
| var root; |
| var leaf; |
| for (var i = numStemmed, len = path$$1.length; i < len; i++) { |
| var node = path$$1[i]; |
| var currentLeaf = [node.id, node.opts, []]; |
| if (leaf) { |
| leaf[2].push(currentLeaf); |
| leaf = currentLeaf; |
| } else { |
| root = leaf = currentLeaf; |
| } |
| } |
| return root; |
| } |
| |
| // compare the IDs of two trees |
| function compareTree(a, b) { |
| return a[0] < b[0] ? -1 : 1; |
| } |
| |
| // Merge two trees together |
| // The roots of tree1 and tree2 must be the same revision |
| function mergeTree(in_tree1, in_tree2) { |
| var queue = [{tree1: in_tree1, tree2: in_tree2}]; |
| var conflicts = false; |
| while (queue.length > 0) { |
| var item = queue.pop(); |
| var tree1 = item.tree1; |
| var tree2 = item.tree2; |
| |
| if (tree1[1].status || tree2[1].status) { |
| tree1[1].status = |
| (tree1[1].status === 'available' || |
| tree2[1].status === 'available') ? 'available' : 'missing'; |
| } |
| |
| for (var i = 0; i < tree2[2].length; i++) { |
| if (!tree1[2][0]) { |
| conflicts = 'new_leaf'; |
| tree1[2][0] = tree2[2][i]; |
| continue; |
| } |
| |
| var merged = false; |
| for (var j = 0; j < tree1[2].length; j++) { |
| if (tree1[2][j][0] === tree2[2][i][0]) { |
| queue.push({tree1: tree1[2][j], tree2: tree2[2][i]}); |
| merged = true; |
| } |
| } |
| if (!merged) { |
| conflicts = 'new_branch'; |
| insertSorted(tree1[2], tree2[2][i], compareTree); |
| } |
| } |
| } |
| return {conflicts, tree: in_tree1}; |
| } |
| |
| function doMerge(tree, path$$1, dontExpand) { |
| var restree = []; |
| var conflicts = false; |
| var merged = false; |
| var res; |
| |
| if (!tree.length) { |
| return {tree: [path$$1], conflicts: 'new_leaf'}; |
| } |
| |
| for (var i = 0, len = tree.length; i < len; i++) { |
| var branch = tree[i]; |
| if (branch.pos === path$$1.pos && branch.ids[0] === path$$1.ids[0]) { |
| // Paths start at the same position and have the same root, so they need |
| // merged |
| res = mergeTree(branch.ids, path$$1.ids); |
| restree.push({pos: branch.pos, ids: res.tree}); |
| conflicts = conflicts || res.conflicts; |
| merged = true; |
| } else if (dontExpand !== true) { |
| // The paths start at a different position, take the earliest path and |
| // traverse up until it as at the same point from root as the path we |
| // want to merge. If the keys match we return the longer path with the |
| // other merged After stemming we don't want to expand the trees |
| |
| var t1 = branch.pos < path$$1.pos ? branch : path$$1; |
| var t2 = branch.pos < path$$1.pos ? path$$1 : branch; |
| var diff = t2.pos - t1.pos; |
| |
| var candidateParents = []; |
| |
| var trees = []; |
| trees.push({ids: t1.ids, diff, parent: null, parentIdx: null}); |
| while (trees.length > 0) { |
| var item = trees.pop(); |
| if (item.diff === 0) { |
| if (item.ids[0] === t2.ids[0]) { |
| candidateParents.push(item); |
| } |
| continue; |
| } |
| var elements = item.ids[2]; |
| for (var j = 0, elementsLen = elements.length; j < elementsLen; j++) { |
| trees.push({ |
| ids: elements[j], |
| diff: item.diff - 1, |
| parent: item.ids, |
| parentIdx: j |
| }); |
| } |
| } |
| |
| var el = candidateParents[0]; |
| |
| if (!el) { |
| restree.push(branch); |
| } else { |
| res = mergeTree(el.ids, t2.ids); |
| el.parent[2][el.parentIdx] = res.tree; |
| restree.push({pos: t1.pos, ids: t1.ids}); |
| conflicts = conflicts || res.conflicts; |
| merged = true; |
| } |
| } else { |
| restree.push(branch); |
| } |
| } |
| |
| // We didnt find |
| if (!merged) { |
| restree.push(path$$1); |
| } |
| |
| restree.sort(sortByPos$1); |
| |
| return { |
| tree: restree, |
| conflicts: conflicts || 'internal_node' |
| }; |
| } |
| |
| // To ensure we don't grow the revision tree infinitely, we stem old revisions |
| function stem(tree, depth) { |
| // First we break out the tree into a complete list of root to leaf paths |
| var paths = rootToLeaf(tree); |
| var stemmedRevs; |
| |
| var result; |
| for (var i = 0, len = paths.length; i < len; i++) { |
| // Then for each path, we cut off the start of the path based on the |
| // `depth` to stem to, and generate a new set of flat trees |
| var path$$1 = paths[i]; |
| var stemmed = path$$1.ids; |
| var node; |
| if (stemmed.length > depth) { |
| // only do the stemming work if we actually need to stem |
| if (!stemmedRevs) { |
| stemmedRevs = {}; // avoid allocating this object unnecessarily |
| } |
| var numStemmed = stemmed.length - depth; |
| node = { |
| pos: path$$1.pos + numStemmed, |
| ids: pathToTree(stemmed, numStemmed) |
| }; |
| |
| for (var s = 0; s < numStemmed; s++) { |
| var rev = (path$$1.pos + s) + '-' + stemmed[s].id; |
| stemmedRevs[rev] = true; |
| } |
| } else { // no need to actually stem |
| node = { |
| pos: path$$1.pos, |
| ids: pathToTree(stemmed, 0) |
| }; |
| } |
| |
| // Then we remerge all those flat trees together, ensuring that we don't |
| // connect trees that would go beyond the depth limit |
| if (result) { |
| result = doMerge(result, node, true).tree; |
| } else { |
| result = [node]; |
| } |
| } |
| |
| // this is memory-heavy per Chrome profiler, avoid unless we actually stemmed |
| if (stemmedRevs) { |
| traverseRevTree(result, function (isLeaf, pos, revHash) { |
| // some revisions may have been removed in a branch but not in another |
| delete stemmedRevs[pos + '-' + revHash]; |
| }); |
| } |
| |
| return { |
| tree: result, |
| revs: stemmedRevs ? Object.keys(stemmedRevs) : [] |
| }; |
| } |
| |
| function merge(tree, path$$1, depth) { |
| var newTree = doMerge(tree, path$$1); |
| var stemmed = stem(newTree.tree, depth); |
| return { |
| tree: stemmed.tree, |
| stemmedRevs: stemmed.revs, |
| conflicts: newTree.conflicts |
| }; |
| } |
| |
| // return true if a rev exists in the rev tree, false otherwise |
| function revExists(revs, rev) { |
| var toVisit = revs.slice(); |
| var splitRev = rev.split('-'); |
| var targetPos = parseInt(splitRev[0], 10); |
| var targetId = splitRev[1]; |
| |
| var node; |
| while ((node = toVisit.pop())) { |
| if (node.pos === targetPos && node.ids[0] === targetId) { |
| return true; |
| } |
| var branches = node.ids[2]; |
| for (var i = 0, len = branches.length; i < len; i++) { |
| toVisit.push({pos: node.pos + 1, ids: branches[i]}); |
| } |
| } |
| return false; |
| } |
| |
| function getTrees(node) { |
| return node.ids; |
| } |
| |
| // check if a specific revision of a doc has been deleted |
| // - metadata: the metadata object from the doc store |
| // - rev: (optional) the revision to check. defaults to winning revision |
| function isDeleted(metadata, rev) { |
| if (!rev) { |
| rev = winningRev(metadata); |
| } |
| var id = rev.substring(rev.indexOf('-') + 1); |
| var toVisit = metadata.rev_tree.map(getTrees); |
| |
| var tree; |
| while ((tree = toVisit.pop())) { |
| if (tree[0] === id) { |
| return !!tree[1].deleted; |
| } |
| toVisit = toVisit.concat(tree[2]); |
| } |
| } |
| |
| function isLocalId(id) { |
| return typeof id === 'string' && id.startsWith('_local/'); |
| } |
| |
| // returns the current leaf node for a given revision |
| function latest(rev, metadata) { |
| var toVisit = metadata.rev_tree.slice(); |
| var node; |
| while ((node = toVisit.pop())) { |
| var pos = node.pos; |
| var tree = node.ids; |
| var id = tree[0]; |
| var opts = tree[1]; |
| var branches = tree[2]; |
| var isLeaf = branches.length === 0; |
| |
| var history = node.history ? node.history.slice() : []; |
| history.push({id, pos, opts}); |
| |
| if (isLeaf) { |
| for (var i = 0, len = history.length; i < len; i++) { |
| var historyNode = history[i]; |
| var historyRev = historyNode.pos + '-' + historyNode.id; |
| |
| if (historyRev === rev) { |
| // return the rev of this leaf |
| return pos + '-' + id; |
| } |
| } |
| } |
| |
| for (var j = 0, l = branches.length; j < l; j++) { |
| toVisit.push({pos: pos + 1, ids: branches[j], history}); |
| } |
| } |
| |
| /* istanbul ignore next */ |
| throw new Error('Unable to resolve latest revision for id ' + metadata.id + ', rev ' + rev); |
| } |
| |
| function tryCatchInChangeListener(self, change, pending, lastSeq) { |
| // isolate try/catches to avoid V8 deoptimizations |
| try { |
| self.emit('change', change, pending, lastSeq); |
| } catch (e) { |
| guardedConsole('error', 'Error in .on("change", function):', e); |
| } |
| } |
| |
| function processChange(doc, metadata, opts) { |
| var changeList = [{rev: doc._rev}]; |
| if (opts.style === 'all_docs') { |
| changeList = collectLeaves(metadata.rev_tree) |
| .map(function (x) { return {rev: x.rev}; }); |
| } |
| var change = { |
| id: metadata.id, |
| changes: changeList, |
| doc |
| }; |
| |
| if (isDeleted(metadata, doc._rev)) { |
| change.deleted = true; |
| } |
| if (opts.conflicts) { |
| change.doc._conflicts = collectConflicts(metadata); |
| if (!change.doc._conflicts.length) { |
| delete change.doc._conflicts; |
| } |
| } |
| return change; |
| } |
| |
| class Changes$1 extends EE { |
| constructor(db, opts, callback) { |
| super(); |
| this.db = db; |
| opts = opts ? clone(opts) : {}; |
| var complete = opts.complete = once((err, resp) => { |
| if (err) { |
| if (listenerCount(this, 'error') > 0) { |
| this.emit('error', err); |
| } |
| } else { |
| this.emit('complete', resp); |
| } |
| this.removeAllListeners(); |
| db.removeListener('destroyed', onDestroy); |
| }); |
| if (callback) { |
| this.on('complete', function (resp) { |
| callback(null, resp); |
| }); |
| this.on('error', callback); |
| } |
| const onDestroy = () => { |
| this.cancel(); |
| }; |
| db.once('destroyed', onDestroy); |
| |
| opts.onChange = (change, pending, lastSeq) => { |
| /* istanbul ignore if */ |
| if (this.isCancelled) { |
| return; |
| } |
| tryCatchInChangeListener(this, change, pending, lastSeq); |
| }; |
| |
| var promise = new Promise(function (fulfill, reject) { |
| opts.complete = function (err, res$$1) { |
| if (err) { |
| reject(err); |
| } else { |
| fulfill(res$$1); |
| } |
| }; |
| }); |
| this.once('cancel', function () { |
| db.removeListener('destroyed', onDestroy); |
| opts.complete(null, {status: 'cancelled'}); |
| }); |
| this.then = promise.then.bind(promise); |
| this['catch'] = promise['catch'].bind(promise); |
| this.then(function (result) { |
| complete(null, result); |
| }, complete); |
| |
| |
| |
| if (!db.taskqueue.isReady) { |
| db.taskqueue.addTask((failed) => { |
| if (failed) { |
| opts.complete(failed); |
| } else if (this.isCancelled) { |
| this.emit('cancel'); |
| } else { |
| this.validateChanges(opts); |
| } |
| }); |
| } else { |
| this.validateChanges(opts); |
| } |
| } |
| |
| cancel() { |
| this.isCancelled = true; |
| if (this.db.taskqueue.isReady) { |
| this.emit('cancel'); |
| } |
| } |
| |
| validateChanges(opts) { |
| var callback = opts.complete; |
| |
| /* istanbul ignore else */ |
| if (PouchDB._changesFilterPlugin) { |
| PouchDB._changesFilterPlugin.validate(opts, (err) => { |
| if (err) { |
| return callback(err); |
| } |
| this.doChanges(opts); |
| }); |
| } else { |
| this.doChanges(opts); |
| } |
| } |
| |
| doChanges(opts) { |
| var callback = opts.complete; |
| |
| opts = clone(opts); |
| if ('live' in opts && !('continuous' in opts)) { |
| opts.continuous = opts.live; |
| } |
| opts.processChange = processChange; |
| |
| if (opts.since === 'latest') { |
| opts.since = 'now'; |
| } |
| if (!opts.since) { |
| opts.since = 0; |
| } |
| if (opts.since === 'now') { |
| this.db.info().then((info) => { |
| /* istanbul ignore if */ |
| if (this.isCancelled) { |
| callback(null, {status: 'cancelled'}); |
| return; |
| } |
| opts.since = info.update_seq; |
| this.doChanges(opts); |
| }, callback); |
| return; |
| } |
| |
| /* istanbul ignore else */ |
| if (PouchDB._changesFilterPlugin) { |
| PouchDB._changesFilterPlugin.normalize(opts); |
| if (PouchDB._changesFilterPlugin.shouldFilter(this, opts)) { |
| return PouchDB._changesFilterPlugin.filter(this, opts); |
| } |
| } else { |
| ['doc_ids', 'filter', 'selector', 'view'].forEach(function (key) { |
| if (key in opts) { |
| guardedConsole('warn', |
| 'The "' + key + '" option was passed in to changes/replicate, ' + |
| 'but pouchdb-changes-filter plugin is not installed, so it ' + |
| 'was ignored. Please install the plugin to enable filtering.' |
| ); |
| } |
| }); |
| } |
| |
| if (!('descending' in opts)) { |
| opts.descending = false; |
| } |
| |
| // 0 and 1 should return 1 document |
| opts.limit = opts.limit === 0 ? 1 : opts.limit; |
| opts.complete = callback; |
| var newPromise = this.db._changes(opts); |
| /* istanbul ignore else */ |
| if (newPromise && typeof newPromise.cancel === 'function') { |
| const cancel = this.cancel; |
| this.cancel = (...args) => { |
| newPromise.cancel(); |
| cancel.apply(this, args); |
| }; |
| } |
| } |
| } |
| |
| /* |
| * A generic pouch adapter |
| */ |
| |
| // Wrapper for functions that call the bulkdocs api with a single doc, |
| // if the first result is an error, return an error |
| function yankError(callback, docId) { |
| return function (err, results) { |
| if (err || (results[0] && results[0].error)) { |
| err = err || results[0]; |
| err.docId = docId; |
| callback(err); |
| } else { |
| callback(null, results.length ? results[0] : results); |
| } |
| }; |
| } |
| |
| // clean docs given to us by the user |
| function cleanDocs(docs) { |
| for (var i = 0; i < docs.length; i++) { |
| var doc = docs[i]; |
| if (doc._deleted) { |
| delete doc._attachments; // ignore atts for deleted docs |
| } else if (doc._attachments) { |
| // filter out extraneous keys from _attachments |
| var atts = Object.keys(doc._attachments); |
| for (var j = 0; j < atts.length; j++) { |
| var att = atts[j]; |
| doc._attachments[att] = pick(doc._attachments[att], |
| ['data', 'digest', 'content_type', 'length', 'revpos', 'stub']); |
| } |
| } |
| } |
| } |
| |
| // compare two docs, first by _id then by _rev |
| function compareByIdThenRev(a, b) { |
| if (a._id === b._id) { |
| const aStart = a._revisions ? a._revisions.start : 0; |
| const bStart = b._revisions ? b._revisions.start : 0; |
| return aStart - bStart; |
| } |
| return a._id < b._id ? -1 : 1; |
| } |
| |
| // for every node in a revision tree computes its distance from the closest |
| // leaf |
| function computeHeight(revs) { |
| var height = {}; |
| var edges = []; |
| traverseRevTree(revs, function (isLeaf, pos, id, prnt) { |
| var rev$$1 = pos + "-" + id; |
| if (isLeaf) { |
| height[rev$$1] = 0; |
| } |
| if (prnt !== undefined) { |
| edges.push({from: prnt, to: rev$$1}); |
| } |
| return rev$$1; |
| }); |
| |
| edges.reverse(); |
| edges.forEach(function (edge) { |
| if (height[edge.from] === undefined) { |
| height[edge.from] = 1 + height[edge.to]; |
| } else { |
| height[edge.from] = Math.min(height[edge.from], 1 + height[edge.to]); |
| } |
| }); |
| return height; |
| } |
| |
| function allDocsKeysParse(opts) { |
| var keys = ('limit' in opts) ? |
| opts.keys.slice(opts.skip, opts.limit + opts.skip) : |
| (opts.skip > 0) ? opts.keys.slice(opts.skip) : opts.keys; |
| opts.keys = keys; |
| opts.skip = 0; |
| delete opts.limit; |
| if (opts.descending) { |
| keys.reverse(); |
| opts.descending = false; |
| } |
| } |
| |
| // all compaction is done in a queue, to avoid attaching |
| // too many listeners at once |
| function doNextCompaction(self) { |
| var task = self._compactionQueue[0]; |
| var opts = task.opts; |
| var callback = task.callback; |
| self.get('_local/compaction').catch(function () { |
| return false; |
| }).then(function (doc) { |
| if (doc && doc.last_seq) { |
| opts.last_seq = doc.last_seq; |
| } |
| self._compact(opts, function (err, res$$1) { |
| /* istanbul ignore if */ |
| if (err) { |
| callback(err); |
| } else { |
| callback(null, res$$1); |
| } |
| nextTick(function () { |
| self._compactionQueue.shift(); |
| if (self._compactionQueue.length) { |
| doNextCompaction(self); |
| } |
| }); |
| }); |
| }); |
| } |
| |
| function appendPurgeSeq(db, docId, rev$$1) { |
| return db.get('_local/purges').then(function (doc) { |
| const purgeSeq = doc.purgeSeq + 1; |
| doc.purges.push({ |
| docId, |
| rev: rev$$1, |
| purgeSeq, |
| }); |
| if (doc.purges.length > self.purged_infos_limit) { |
| doc.purges.splice(0, doc.purges.length - self.purged_infos_limit); |
| } |
| doc.purgeSeq = purgeSeq; |
| return doc; |
| }).catch(function (err) { |
| if (err.status !== 404) { |
| throw err; |
| } |
| return { |
| _id: '_local/purges', |
| purges: [{ |
| docId, |
| rev: rev$$1, |
| purgeSeq: 0, |
| }], |
| purgeSeq: 0, |
| }; |
| }).then(function (doc) { |
| return db.put(doc); |
| }); |
| } |
| |
| function attachmentNameError(name) { |
| if (name.charAt(0) === '_') { |
| return name + ' is not a valid attachment name, attachment ' + |
| 'names cannot start with \'_\''; |
| } |
| return false; |
| } |
| |
| function isNotSingleDoc(doc) { |
| return doc === null || typeof doc !== 'object' || Array.isArray(doc); |
| } |
| |
| const validRevRegex = /^\d+-[^-]*$/; |
| function isValidRev(rev$$1) { |
| return typeof rev$$1 === 'string' && validRevRegex.test(rev$$1); |
| } |
| |
| class AbstractPouchDB extends EE { |
| _setup() { |
| this.post = adapterFun('post', function (doc, opts, callback) { |
| if (typeof opts === 'function') { |
| callback = opts; |
| opts = {}; |
| } |
| if (isNotSingleDoc(doc)) { |
| return callback(createError(NOT_AN_OBJECT)); |
| } |
| this.bulkDocs({docs: [doc]}, opts, yankError(callback, doc._id)); |
| }).bind(this); |
| |
| this.put = adapterFun('put', function (doc, opts, cb) { |
| if (typeof opts === 'function') { |
| cb = opts; |
| opts = {}; |
| } |
| if (isNotSingleDoc(doc)) { |
| return cb(createError(NOT_AN_OBJECT)); |
| } |
| invalidIdError(doc._id); |
| if ('_rev' in doc && !isValidRev(doc._rev)) { |
| return cb(createError(INVALID_REV)); |
| } |
| if (isLocalId(doc._id) && typeof this._putLocal === 'function') { |
| if (doc._deleted) { |
| return this._removeLocal(doc, cb); |
| } else { |
| return this._putLocal(doc, cb); |
| } |
| } |
| |
| const putDoc = (next) => { |
| if (typeof this._put === 'function' && opts.new_edits !== false) { |
| this._put(doc, opts, next); |
| } else { |
| this.bulkDocs({docs: [doc]}, opts, yankError(next, doc._id)); |
| } |
| }; |
| |
| if (opts.force && doc._rev) { |
| transformForceOptionToNewEditsOption(); |
| putDoc(function (err) { |
| var result = err ? null : {ok: true, id: doc._id, rev: doc._rev}; |
| cb(err, result); |
| }); |
| } else { |
| putDoc(cb); |
| } |
| |
| function transformForceOptionToNewEditsOption() { |
| var parts = doc._rev.split('-'); |
| var oldRevId = parts[1]; |
| var oldRevNum = parseInt(parts[0], 10); |
| |
| var newRevNum = oldRevNum + 1; |
| var newRevId = rev(); |
| |
| doc._revisions = { |
| start: newRevNum, |
| ids: [newRevId, oldRevId] |
| }; |
| doc._rev = newRevNum + '-' + newRevId; |
| opts.new_edits = false; |
| } |
| }).bind(this); |
| |
| this.putAttachment = adapterFun('putAttachment', function (docId, attachmentId, rev$$1, blob, type) { |
| var api = this; |
| if (typeof type === 'function') { |
| type = blob; |
| blob = rev$$1; |
| rev$$1 = null; |
| } |
| // Lets fix in https://github.com/pouchdb/pouchdb/issues/3267 |
| /* istanbul ignore if */ |
| if (typeof type === 'undefined') { |
| type = blob; |
| blob = rev$$1; |
| rev$$1 = null; |
| } |
| if (!type) { |
| guardedConsole('warn', 'Attachment', attachmentId, 'on document', docId, 'is missing content_type'); |
| } |
| |
| function createAttachment(doc) { |
| var prevrevpos = '_rev' in doc ? parseInt(doc._rev, 10) : 0; |
| doc._attachments = doc._attachments || {}; |
| doc._attachments[attachmentId] = { |
| content_type: type, |
| data: blob, |
| revpos: ++prevrevpos |
| }; |
| return api.put(doc); |
| } |
| |
| return api.get(docId).then(function (doc) { |
| if (doc._rev !== rev$$1) { |
| throw createError(REV_CONFLICT); |
| } |
| |
| return createAttachment(doc); |
| }, function (err) { |
| // create new doc |
| /* istanbul ignore else */ |
| if (err.reason === MISSING_DOC.message) { |
| return createAttachment({_id: docId}); |
| } else { |
| throw err; |
| } |
| }); |
| }).bind(this); |
| |
| this.removeAttachment = adapterFun('removeAttachment', function (docId, attachmentId, rev$$1, callback) { |
| this.get(docId, (err, obj) => { |
| /* istanbul ignore if */ |
| if (err) { |
| callback(err); |
| return; |
| } |
| if (obj._rev !== rev$$1) { |
| callback(createError(REV_CONFLICT)); |
| return; |
| } |
| /* istanbul ignore if */ |
| if (!obj._attachments) { |
| return callback(); |
| } |
| delete obj._attachments[attachmentId]; |
| if (Object.keys(obj._attachments).length === 0) { |
| delete obj._attachments; |
| } |
| this.put(obj, callback); |
| }); |
| }).bind(this); |
| |
| this.remove = adapterFun('remove', function (docOrId, optsOrRev, opts, callback) { |
| var doc; |
| if (typeof optsOrRev === 'string') { |
| // id, rev, opts, callback style |
| doc = { |
| _id: docOrId, |
| _rev: optsOrRev |
| }; |
| if (typeof opts === 'function') { |
| callback = opts; |
| opts = {}; |
| } |
| } else { |
| // doc, opts, callback style |
| doc = docOrId; |
| if (typeof optsOrRev === 'function') { |
| callback = optsOrRev; |
| opts = {}; |
| } else { |
| callback = opts; |
| opts = optsOrRev; |
| } |
| } |
| opts = opts || {}; |
| opts.was_delete = true; |
| var newDoc = {_id: doc._id, _rev: (doc._rev || opts.rev)}; |
| newDoc._deleted = true; |
| if (isLocalId(newDoc._id) && typeof this._removeLocal === 'function') { |
| return this._removeLocal(doc, callback); |
| } |
| this.bulkDocs({docs: [newDoc]}, opts, yankError(callback, newDoc._id)); |
| }).bind(this); |
| |
| this.revsDiff = adapterFun('revsDiff', function (req, opts, callback) { |
| if (typeof opts === 'function') { |
| callback = opts; |
| opts = {}; |
| } |
| var ids = Object.keys(req); |
| |
| if (!ids.length) { |
| return callback(null, {}); |
| } |
| |
| var count = 0; |
| var missing = new Map(); |
| |
| function addToMissing(id, revId) { |
| if (!missing.has(id)) { |
| missing.set(id, {missing: []}); |
| } |
| missing.get(id).missing.push(revId); |
| } |
| |
| function processDoc(id, rev_tree) { |
| // Is this fast enough? Maybe we should switch to a set simulated by a map |
| var missingForId = req[id].slice(0); |
| traverseRevTree(rev_tree, function (isLeaf, pos, revHash, ctx, |
| opts) { |
| var rev$$1 = pos + '-' + revHash; |
| var idx = missingForId.indexOf(rev$$1); |
| if (idx === -1) { |
| return; |
| } |
| |
| missingForId.splice(idx, 1); |
| /* istanbul ignore if */ |
| if (opts.status !== 'available') { |
| addToMissing(id, rev$$1); |
| } |
| }); |
| |
| // Traversing the tree is synchronous, so now `missingForId` contains |
| // revisions that were not found in the tree |
| missingForId.forEach(function (rev$$1) { |
| addToMissing(id, rev$$1); |
| }); |
| } |
| |
| ids.forEach(function (id) { |
| this._getRevisionTree(id, function (err, rev_tree) { |
| if (err && err.status === 404 && err.message === 'missing') { |
| missing.set(id, {missing: req[id]}); |
| } else if (err) { |
| /* istanbul ignore next */ |
| return callback(err); |
| } else { |
| processDoc(id, rev_tree); |
| } |
| |
| if (++count === ids.length) { |
| // convert LazyMap to object |
| var missingObj = {}; |
| missing.forEach(function (value, key) { |
| missingObj[key] = value; |
| }); |
| return callback(null, missingObj); |
| } |
| }); |
| }, this); |
| }).bind(this); |
| |
| // _bulk_get API for faster replication, as described in |
| // https://github.com/apache/couchdb-chttpd/pull/33 |
| // At the "abstract" level, it will just run multiple get()s in |
| // parallel, because this isn't much of a performance cost |
| // for local databases (except the cost of multiple transactions, which is |
| // small). The http adapter overrides this in order |
| // to do a more efficient single HTTP request. |
| this.bulkGet = adapterFun('bulkGet', function (opts, callback) { |
| bulkGet(this, opts, callback); |
| }).bind(this); |
| |
| // compact one document and fire callback |
| // by compacting we mean removing all revisions which |
| // are further from the leaf in revision tree than max_height |
| this.compactDocument = adapterFun('compactDocument', function (docId, maxHeight, callback) { |
| this._getRevisionTree(docId, (err, revTree) => { |
| /* istanbul ignore if */ |
| if (err) { |
| return callback(err); |
| } |
| var height = computeHeight(revTree); |
| var candidates = []; |
| var revs = []; |
| Object.keys(height).forEach(function (rev$$1) { |
| if (height[rev$$1] > maxHeight) { |
| candidates.push(rev$$1); |
| } |
| }); |
| |
| traverseRevTree(revTree, function (isLeaf, pos, revHash, ctx, opts) { |
| var rev$$1 = pos + '-' + revHash; |
| if (opts.status === 'available' && candidates.indexOf(rev$$1) !== -1) { |
| revs.push(rev$$1); |
| } |
| }); |
| this._doCompaction(docId, revs, callback); |
| }); |
| }).bind(this); |
| |
| // compact the whole database using single document |
| // compaction |
| this.compact = adapterFun('compact', function (opts, callback) { |
| if (typeof opts === 'function') { |
| callback = opts; |
| opts = {}; |
| } |
| |
| opts = opts || {}; |
| |
| this._compactionQueue = this._compactionQueue || []; |
| this._compactionQueue.push({opts, callback}); |
| if (this._compactionQueue.length === 1) { |
| doNextCompaction(this); |
| } |
| }).bind(this); |
| |
| /* Begin api wrappers. Specific functionality to storage belongs in the _[method] */ |
| this.get = adapterFun('get', function (id, opts, cb) { |
| if (typeof opts === 'function') { |
| cb = opts; |
| opts = {}; |
| } |
| opts = opts || {}; |
| if (typeof id !== 'string') { |
| return cb(createError(INVALID_ID)); |
| } |
| if (isLocalId(id) && typeof this._getLocal === 'function') { |
| return this._getLocal(id, cb); |
| } |
| var leaves = []; |
| |
| const finishOpenRevs = () => { |
| var result = []; |
| var count = leaves.length; |
| /* istanbul ignore if */ |
| if (!count) { |
| return cb(null, result); |
| } |
| |
| // order with open_revs is unspecified |
| leaves.forEach((leaf) => { |
| this.get(id, { |
| rev: leaf, |
| revs: opts.revs, |
| latest: opts.latest, |
| attachments: opts.attachments, |
| binary: opts.binary |
| }, function (err, doc) { |
| if (!err) { |
| // using latest=true can produce duplicates |
| var existing; |
| for (var i = 0, l = result.length; i < l; i++) { |
| if (result[i].ok && result[i].ok._rev === doc._rev) { |
| existing = true; |
| break; |
| } |
| } |
| if (!existing) { |
| result.push({ok: doc}); |
| } |
| } else { |
| result.push({missing: leaf}); |
| } |
| count--; |
| if (!count) { |
| cb(null, result); |
| } |
| }); |
| }); |
| }; |
| |
| if (opts.open_revs) { |
| if (opts.open_revs === "all") { |
| this._getRevisionTree(id, function (err, rev_tree) { |
| /* istanbul ignore if */ |
| if (err) { |
| return cb(err); |
| } |
| leaves = collectLeaves(rev_tree).map(function (leaf) { |
| return leaf.rev; |
| }); |
| finishOpenRevs(); |
| }); |
| } else { |
| if (Array.isArray(opts.open_revs)) { |
| leaves = opts.open_revs; |
| for (var i = 0; i < leaves.length; i++) { |
| var l = leaves[i]; |
| // looks like it's the only thing couchdb checks |
| if (!isValidRev(l)) { |
| return cb(createError(INVALID_REV)); |
| } |
| } |
| finishOpenRevs(); |
| } else { |
| return cb(createError(UNKNOWN_ERROR, 'function_clause')); |
| } |
| } |
| return; // open_revs does not like other options |
| } |
| |
| return this._get(id, opts, (err, result) => { |
| if (err) { |
| err.docId = id; |
| return cb(err); |
| } |
| |
| var doc = result.doc; |
| var metadata = result.metadata; |
| var ctx = result.ctx; |
| |
| if (opts.conflicts) { |
| var conflicts = collectConflicts(metadata); |
| if (conflicts.length) { |
| doc._conflicts = conflicts; |
| } |
| } |
| |
| if (isDeleted(metadata, doc._rev)) { |
| doc._deleted = true; |
| } |
| |
| if (opts.revs || opts.revs_info) { |
| var splittedRev = doc._rev.split('-'); |
| var revNo = parseInt(splittedRev[0], 10); |
| var revHash = splittedRev[1]; |
| |
| var paths = rootToLeaf(metadata.rev_tree); |
| var path$$1 = null; |
| |
| for (var i = 0; i < paths.length; i++) { |
| var currentPath = paths[i]; |
| const hashIndex = currentPath.ids.findIndex(x => x.id === revHash); |
| var hashFoundAtRevPos = hashIndex === (revNo - 1); |
| |
| if (hashFoundAtRevPos || (!path$$1 && hashIndex !== -1)) { |
| path$$1 = currentPath; |
| } |
| } |
| |
| /* istanbul ignore if */ |
| if (!path$$1) { |
| err = new Error('invalid rev tree'); |
| err.docId = id; |
| return cb(err); |
| } |
| |
| const pathId = doc._rev.split('-')[1]; |
| const indexOfRev = path$$1.ids.findIndex(x => x.id === pathId) + 1; |
| var howMany = path$$1.ids.length - indexOfRev; |
| path$$1.ids.splice(indexOfRev, howMany); |
| path$$1.ids.reverse(); |
| |
| if (opts.revs) { |
| doc._revisions = { |
| start: (path$$1.pos + path$$1.ids.length) - 1, |
| ids: path$$1.ids.map(function (rev$$1) { |
| return rev$$1.id; |
| }) |
| }; |
| } |
| if (opts.revs_info) { |
| var pos = path$$1.pos + path$$1.ids.length; |
| doc._revs_info = path$$1.ids.map(function (rev$$1) { |
| pos--; |
| return { |
| rev: pos + '-' + rev$$1.id, |
| status: rev$$1.opts.status |
| }; |
| }); |
| } |
| } |
| |
| if (opts.attachments && doc._attachments) { |
| var attachments = doc._attachments; |
| var count = Object.keys(attachments).length; |
| if (count === 0) { |
| return cb(null, doc); |
| } |
| Object.keys(attachments).forEach((key) => { |
| this._getAttachment(doc._id, key, attachments[key], { |
| binary: opts.binary, |
| metadata, |
| ctx |
| }, function (err, data) { |
| var att = doc._attachments[key]; |
| att.data = data; |
| delete att.stub; |
| delete att.length; |
| if (!--count) { |
| cb(null, doc); |
| } |
| }); |
| }); |
| } else { |
| if (doc._attachments) { |
| for (var key in doc._attachments) { |
| /* istanbul ignore else */ |
| if (Object.prototype.hasOwnProperty.call(doc._attachments, key)) { |
| doc._attachments[key].stub = true; |
| } |
| } |
| } |
| cb(null, doc); |
| } |
| }); |
| }).bind(this); |
| |
| // TODO: I don't like this, it forces an extra read for every |
| // attachment read and enforces a confusing api between |
| // adapter.js and the adapter implementation |
| this.getAttachment = adapterFun('getAttachment', function (docId, attachmentId, opts, callback) { |
| if (opts instanceof Function) { |
| callback = opts; |
| opts = {}; |
| } |
| this._get(docId, opts, (err, res$$1) => { |
| if (err) { |
| return callback(err); |
| } |
| if (res$$1.doc._attachments && res$$1.doc._attachments[attachmentId]) { |
| opts.ctx = res$$1.ctx; |
| opts.binary = true; |
| opts.metadata = res$$1.metadata; |
| this._getAttachment(docId, attachmentId, |
| res$$1.doc._attachments[attachmentId], opts, callback); |
| } else { |
| return callback(createError(MISSING_DOC)); |
| } |
| }); |
| }).bind(this); |
| |
| this.allDocs = adapterFun('allDocs', function (opts, callback) { |
| if (typeof opts === 'function') { |
| callback = opts; |
| opts = {}; |
| } |
| opts.skip = typeof opts.skip !== 'undefined' ? opts.skip : 0; |
| if (opts.start_key) { |
| opts.startkey = opts.start_key; |
| } |
| if (opts.end_key) { |
| opts.endkey = opts.end_key; |
| } |
| if ('keys' in opts) { |
| if (!Array.isArray(opts.keys)) { |
| return callback(new TypeError('options.keys must be an array')); |
| } |
| var incompatibleOpt = |
| ['startkey', 'endkey', 'key'].filter(function (incompatibleOpt) { |
| return incompatibleOpt in opts; |
| })[0]; |
| if (incompatibleOpt) { |
| callback(createError(QUERY_PARSE_ERROR, |
| 'Query parameter `' + incompatibleOpt + |
| '` is not compatible with multi-get' |
| )); |
| return; |
| } |
| if (!isRemote(this)) { |
| allDocsKeysParse(opts); |
| if (opts.keys.length === 0) { |
| return this._allDocs({limit: 0}, callback); |
| } |
| } |
| } |
| |
| return this._allDocs(opts, callback); |
| }).bind(this); |
| |
| this.close = adapterFun('close', function (callback) { |
| this._closed = true; |
| this.emit('closed'); |
| return this._close(callback); |
| }).bind(this); |
| |
| this.info = adapterFun('info', function (callback) { |
| this._info((err, info) => { |
| if (err) { |
| return callback(err); |
| } |
| // assume we know better than the adapter, unless it informs us |
| info.db_name = info.db_name || this.name; |
| info.auto_compaction = !!(this.auto_compaction && !isRemote(this)); |
| info.adapter = this.adapter; |
| callback(null, info); |
| }); |
| }).bind(this); |
| |
| this.id = adapterFun('id', function (callback) { |
| return this._id(callback); |
| }).bind(this); |
| |
| this.bulkDocs = adapterFun('bulkDocs', function (req, opts, callback) { |
| if (typeof opts === 'function') { |
| callback = opts; |
| opts = {}; |
| } |
| |
| opts = opts || {}; |
| |
| if (Array.isArray(req)) { |
| req = { |
| docs: req |
| }; |
| } |
| |
| if (!req || !req.docs || !Array.isArray(req.docs)) { |
| return callback(createError(MISSING_BULK_DOCS)); |
| } |
| |
| for (var i = 0; i < req.docs.length; ++i) { |
| const doc = req.docs[i]; |
| if (isNotSingleDoc(doc)) { |
| return callback(createError(NOT_AN_OBJECT)); |
| } |
| if ('_rev' in doc && !isValidRev(doc._rev)) { |
| return callback(createError(INVALID_REV)); |
| } |
| } |
| |
| var attachmentError; |
| req.docs.forEach(function (doc) { |
| if (doc._attachments) { |
| Object.keys(doc._attachments).forEach(function (name) { |
| attachmentError = attachmentError || attachmentNameError(name); |
| if (!doc._attachments[name].content_type) { |
| guardedConsole('warn', 'Attachment', name, 'on document', doc._id, 'is missing content_type'); |
| } |
| }); |
| } |
| }); |
| |
| if (attachmentError) { |
| return callback(createError(BAD_REQUEST, attachmentError)); |
| } |
| |
| if (!('new_edits' in opts)) { |
| if ('new_edits' in req) { |
| opts.new_edits = req.new_edits; |
| } else { |
| opts.new_edits = true; |
| } |
| } |
| |
| var adapter = this; |
| if (!opts.new_edits && !isRemote(adapter)) { |
| // ensure revisions of the same doc are sorted, so that |
| // the local adapter processes them correctly (#2935) |
| req.docs.sort(compareByIdThenRev); |
| } |
| |
| cleanDocs(req.docs); |
| |
| // in the case of conflicts, we want to return the _ids to the user |
| // however, the underlying adapter may destroy the docs array, so |
| // create a copy here |
| var ids = req.docs.map(function (doc) { |
| return doc._id; |
| }); |
| |
| this._bulkDocs(req, opts, function (err, res$$1) { |
| if (err) { |
| return callback(err); |
| } |
| if (!opts.new_edits) { |
| // this is what couch does when new_edits is false |
| res$$1 = res$$1.filter(function (x) { |
| return x.error; |
| }); |
| } |
| // add ids for error/conflict responses (not required for CouchDB) |
| if (!isRemote(adapter)) { |
| for (var i = 0, l = res$$1.length; i < l; i++) { |
| res$$1[i].id = res$$1[i].id || ids[i]; |
| } |
| } |
| |
| callback(null, res$$1); |
| }); |
| }).bind(this); |
| |
| this.registerDependentDatabase = adapterFun('registerDependentDatabase', function (dependentDb, callback) { |
| var dbOptions = clone(this.__opts); |
| if (this.__opts.view_adapter) { |
| dbOptions.adapter = this.__opts.view_adapter; |
| } |
| |
| var depDB = new this.constructor(dependentDb, dbOptions); |
| |
| function diffFun(doc) { |
| doc.dependentDbs = doc.dependentDbs || {}; |
| if (doc.dependentDbs[dependentDb]) { |
| return false; // no update required |
| } |
| doc.dependentDbs[dependentDb] = true; |
| return doc; |
| } |
| upsert(this, '_local/_pouch_dependentDbs', diffFun).then(function () { |
| callback(null, {db: depDB}); |
| }).catch(callback); |
| }).bind(this); |
| |
| this.destroy = adapterFun('destroy', function (opts, callback) { |
| |
| if (typeof opts === 'function') { |
| callback = opts; |
| opts = {}; |
| } |
| |
| var usePrefix = 'use_prefix' in this ? this.use_prefix : true; |
| |
| const destroyDb = () => { |
| // call destroy method of the particular adaptor |
| this._destroy(opts, (err, resp) => { |
| if (err) { |
| return callback(err); |
| } |
| this._destroyed = true; |
| this.emit('destroyed'); |
| callback(null, resp || { 'ok': true }); |
| }); |
| }; |
| |
| if (isRemote(this)) { |
| // no need to check for dependent DBs if it's a remote DB |
| return destroyDb(); |
| } |
| |
| this.get('_local/_pouch_dependentDbs', (err, localDoc) => { |
| if (err) { |
| /* istanbul ignore if */ |
| if (err.status !== 404) { |
| return callback(err); |
| } else { // no dependencies |
| return destroyDb(); |
| } |
| } |
| var dependentDbs = localDoc.dependentDbs; |
| var PouchDB = this.constructor; |
| var deletedMap = Object.keys(dependentDbs).map((name) => { |
| // use_prefix is only false in the browser |
| /* istanbul ignore next */ |
| var trueName = usePrefix ? |
| name.replace(new RegExp('^' + PouchDB.prefix), '') : name; |
| return new PouchDB(trueName, this.__opts).destroy(); |
| }); |
| Promise.all(deletedMap).then(destroyDb, callback); |
| }); |
| }).bind(this); |
| } |
| |
| _compact(opts, callback) { |
| var changesOpts = { |
| return_docs: false, |
| last_seq: opts.last_seq || 0, |
| since: opts.last_seq || 0 |
| }; |
| var promises = []; |
| |
| var taskId; |
| var compactedDocs = 0; |
| |
| const onChange = (row) => { |
| this.activeTasks.update(taskId, { |
| completed_items: ++compactedDocs |
| }); |
| promises.push(this.compactDocument(row.id, 0)); |
| }; |
| const onError = (err) => { |
| this.activeTasks.remove(taskId, err); |
| callback(err); |
| }; |
| const onComplete = (resp) => { |
| var lastSeq = resp.last_seq; |
| Promise.all(promises).then(() => { |
| return upsert(this, '_local/compaction', (doc) => { |
| if (!doc.last_seq || doc.last_seq < lastSeq) { |
| doc.last_seq = lastSeq; |
| return doc; |
| } |
| return false; // somebody else got here first, don't update |
| }); |
| }).then(() => { |
| this.activeTasks.remove(taskId); |
| callback(null, {ok: true}); |
| }).catch(onError); |
| }; |
| |
| this.info().then((info) => { |
| taskId = this.activeTasks.add({ |
| name: 'database_compaction', |
| total_items: info.update_seq - changesOpts.last_seq, |
| }); |
| |
| this.changes(changesOpts) |
| .on('change', onChange) |
| .on('complete', onComplete) |
| .on('error', onError); |
| }); |
| } |
| |
| changes(opts, callback) { |
| if (typeof opts === 'function') { |
| callback = opts; |
| opts = {}; |
| } |
| |
| opts = opts || {}; |
| |
| // By default set return_docs to false if the caller has opts.live = true, |
| // this will prevent us from collecting the set of changes indefinitely |
| // resulting in growing memory |
| opts.return_docs = ('return_docs' in opts) ? opts.return_docs : !opts.live; |
| |
| return new Changes$1(this, opts, callback); |
| } |
| |
| type() { |
| return (typeof this._type === 'function') ? this._type() : this.adapter; |
| } |
| } |
| |
| // The abstract purge implementation expects a doc id and the rev of a leaf node in that doc. |
| // It will return errors if the rev doesn’t exist or isn’t a leaf. |
| AbstractPouchDB.prototype.purge = adapterFun('_purge', function (docId, rev$$1, callback) { |
| if (typeof this._purge === 'undefined') { |
| return callback(createError(UNKNOWN_ERROR, 'Purge is not implemented in the ' + this.adapter + ' adapter.')); |
| } |
| var self = this; |
| |
| self._getRevisionTree(docId, (error, revs) => { |
| if (error) { |
| return callback(error); |
| } |
| if (!revs) { |
| return callback(createError(MISSING_DOC)); |
| } |
| let path$$1; |
| try { |
| path$$1 = findPathToLeaf(revs, rev$$1); |
| } catch (error) { |
| return callback(error.message || error); |
| } |
| self._purge(docId, path$$1, (error, result) => { |
| if (error) { |
| return callback(error); |
| } else { |
| appendPurgeSeq(self, docId, rev$$1).then(function () { |
| return callback(null, result); |
| }); |
| } |
| }); |
| }); |
| }); |
| |
| class TaskQueue { |
| constructor() { |
| this.isReady = false; |
| this.failed = false; |
| this.queue = []; |
| } |
| |
| execute() { |
| var fun; |
| if (this.failed) { |
| while ((fun = this.queue.shift())) { |
| fun(this.failed); |
| } |
| } else { |
| while ((fun = this.queue.shift())) { |
| fun(); |
| } |
| } |
| } |
| |
| fail(err) { |
| this.failed = err; |
| this.execute(); |
| } |
| |
| ready(db) { |
| this.isReady = true; |
| this.db = db; |
| this.execute(); |
| } |
| |
| addTask(fun) { |
| this.queue.push(fun); |
| if (this.failed) { |
| this.execute(); |
| } |
| } |
| } |
| |
| function parseAdapter(name, opts) { |
| var match = name.match(/([a-z-]*):\/\/(.*)/); |
| if (match) { |
| // the http adapter expects the fully qualified name |
| return { |
| name: /https?/.test(match[1]) ? match[1] + '://' + match[2] : match[2], |
| adapter: match[1] |
| }; |
| } |
| |
| var adapters = PouchDB.adapters; |
| var preferredAdapters = PouchDB.preferredAdapters; |
| var prefix = PouchDB.prefix; |
| var adapterName = opts.adapter; |
| |
| if (!adapterName) { // automatically determine adapter |
| for (var i = 0; i < preferredAdapters.length; ++i) { |
| adapterName = preferredAdapters[i]; |
| // check for browsers that have been upgraded from websql-only to websql+idb |
| /* istanbul ignore if */ |
| if (adapterName === 'idb' && 'websql' in adapters && |
| hasLocalStorage() && localStorage['_pouch__websqldb_' + prefix + name]) { |
| // log it, because this can be confusing during development |
| guardedConsole('log', 'PouchDB is downgrading "' + name + '" to WebSQL to' + |
| ' avoid data loss, because it was already opened with WebSQL.'); |
| continue; // keep using websql to avoid user data loss |
| } |
| break; |
| } |
| } |
| |
| var adapter = adapters[adapterName]; |
| |
| // if adapter is invalid, then an error will be thrown later |
| var usePrefix = (adapter && 'use_prefix' in adapter) ? |
| adapter.use_prefix : true; |
| |
| return { |
| name: usePrefix ? (prefix + name) : name, |
| adapter: adapterName |
| }; |
| } |
| |
| function inherits(A, B) { |
| A.prototype = Object.create(B.prototype, { |
| constructor: { value: A } |
| }); |
| } |
| |
| function createClass(parent, init) { |
| let klass = function (...args) { |
| if (!(this instanceof klass)) { |
| return new klass(...args); |
| } |
| init.apply(this, args); |
| }; |
| inherits(klass, parent); |
| return klass; |
| } |
| |
| // OK, so here's the deal. Consider this code: |
| // var db1 = new PouchDB('foo'); |
| // var db2 = new PouchDB('foo'); |
| // db1.destroy(); |
| // ^ these two both need to emit 'destroyed' events, |
| // as well as the PouchDB constructor itself. |
| // So we have one db object (whichever one got destroy() called on it) |
| // responsible for emitting the initial event, which then gets emitted |
| // by the constructor, which then broadcasts it to any other dbs |
| // that may have been created with the same name. |
| function prepareForDestruction(self) { |
| |
| function onDestroyed(from_constructor) { |
| self.removeListener('closed', onClosed); |
| if (!from_constructor) { |
| self.constructor.emit('destroyed', self.name); |
| } |
| } |
| |
| function onClosed() { |
| self.removeListener('destroyed', onDestroyed); |
| self.constructor.emit('unref', self); |
| } |
| |
| self.once('destroyed', onDestroyed); |
| self.once('closed', onClosed); |
| self.constructor.emit('ref', self); |
| } |
| |
| class PouchInternal extends AbstractPouchDB { |
| constructor(name, opts) { |
| super(); |
| this._setup(name, opts); |
| } |
| |
| _setup(name, opts) { |
| super._setup(); |
| opts = opts || {}; |
| |
| if (name && typeof name === 'object') { |
| opts = name; |
| name = opts.name; |
| delete opts.name; |
| } |
| |
| if (opts.deterministic_revs === undefined) { |
| opts.deterministic_revs = true; |
| } |
| |
| this.__opts = opts = clone(opts); |
| |
| this.auto_compaction = opts.auto_compaction; |
| this.purged_infos_limit = opts.purged_infos_limit || 1000; |
| this.prefix = PouchDB.prefix; |
| |
| if (typeof name !== 'string') { |
| throw new Error('Missing/invalid DB name'); |
| } |
| |
| var prefixedName = (opts.prefix || '') + name; |
| var backend = parseAdapter(prefixedName, opts); |
| |
| opts.name = backend.name; |
| opts.adapter = opts.adapter || backend.adapter; |
| |
| this.name = name; |
| this._adapter = opts.adapter; |
| PouchDB.emit('debug', ['adapter', 'Picked adapter: ', opts.adapter]); |
| |
| if (!PouchDB.adapters[opts.adapter] || |
| !PouchDB.adapters[opts.adapter].valid()) { |
| throw new Error('Invalid Adapter: ' + opts.adapter); |
| } |
| |
| if (opts.view_adapter) { |
| if (!PouchDB.adapters[opts.view_adapter] || |
| !PouchDB.adapters[opts.view_adapter].valid()) { |
| throw new Error('Invalid View Adapter: ' + opts.view_adapter); |
| } |
| } |
| |
| this.taskqueue = new TaskQueue(); |
| |
| this.adapter = opts.adapter; |
| |
| PouchDB.adapters[opts.adapter].call(this, opts, (err) => { |
| if (err) { |
| return this.taskqueue.fail(err); |
| } |
| prepareForDestruction(this); |
| |
| this.emit('created', this); |
| PouchDB.emit('created', this.name); |
| this.taskqueue.ready(this); |
| }); |
| } |
| } |
| |
| const PouchDB = createClass(PouchInternal, function (name, opts) { |
| PouchInternal.prototype._setup.call(this, name, opts); |
| }); |
| |
| var fetch = fetchCookie(nodeFetch__default); |
| |
| class ActiveTasks { |
| constructor() { |
| this.tasks = {}; |
| } |
| |
| list() { |
| return Object.values(this.tasks); |
| } |
| |
| add(task) { |
| const id = uuid.v4(); |
| this.tasks[id] = { |
| id, |
| name: task.name, |
| total_items: task.total_items, |
| created_at: new Date().toJSON() |
| }; |
| return id; |
| } |
| |
| get(id) { |
| return this.tasks[id]; |
| } |
| |
| /* eslint-disable no-unused-vars */ |
| remove(id, reason) { |
| delete this.tasks[id]; |
| return this.tasks; |
| } |
| |
| update(id, updatedTask) { |
| const task = this.tasks[id]; |
| if (typeof task !== 'undefined') { |
| const mergedTask = { |
| id: task.id, |
| name: task.name, |
| created_at: task.created_at, |
| total_items: updatedTask.total_items || task.total_items, |
| completed_items: updatedTask.completed_items || task.completed_items, |
| updated_at: new Date().toJSON() |
| }; |
| this.tasks[id] = mergedTask; |
| } |
| return this.tasks; |
| } |
| } |
| |
| PouchDB.adapters = {}; |
| PouchDB.preferredAdapters = []; |
| |
| PouchDB.prefix = '_pouch_'; |
| |
| var eventEmitter = new EE(); |
| |
| function setUpEventEmitter(Pouch) { |
| Object.keys(EE.prototype).forEach(function (key) { |
| if (typeof EE.prototype[key] === 'function') { |
| Pouch[key] = eventEmitter[key].bind(eventEmitter); |
| } |
| }); |
| |
| // these are created in constructor.js, and allow us to notify each DB with |
| // the same name that it was destroyed, via the constructor object |
| var destructListeners = Pouch._destructionListeners = new Map(); |
| |
| Pouch.on('ref', function onConstructorRef(db) { |
| if (!destructListeners.has(db.name)) { |
| destructListeners.set(db.name, []); |
| } |
| destructListeners.get(db.name).push(db); |
| }); |
| |
| Pouch.on('unref', function onConstructorUnref(db) { |
| if (!destructListeners.has(db.name)) { |
| return; |
| } |
| var dbList = destructListeners.get(db.name); |
| var pos = dbList.indexOf(db); |
| if (pos < 0) { |
| /* istanbul ignore next */ |
| return; |
| } |
| dbList.splice(pos, 1); |
| if (dbList.length > 1) { |
| /* istanbul ignore next */ |
| destructListeners.set(db.name, dbList); |
| } else { |
| destructListeners.delete(db.name); |
| } |
| }); |
| |
| Pouch.on('destroyed', function onConstructorDestroyed(name) { |
| if (!destructListeners.has(name)) { |
| return; |
| } |
| var dbList = destructListeners.get(name); |
| destructListeners.delete(name); |
| dbList.forEach(function (db) { |
| db.emit('destroyed',true); |
| }); |
| }); |
| } |
| |
| setUpEventEmitter(PouchDB); |
| |
| PouchDB.adapter = function (id, obj, addToPreferredAdapters) { |
| /* istanbul ignore else */ |
| if (obj.valid()) { |
| PouchDB.adapters[id] = obj; |
| if (addToPreferredAdapters) { |
| PouchDB.preferredAdapters.push(id); |
| } |
| } |
| }; |
| |
| PouchDB.plugin = function (obj) { |
| if (typeof obj === 'function') { // function style for plugins |
| obj(PouchDB); |
| } else if (typeof obj !== 'object' || Object.keys(obj).length === 0) { |
| throw new Error('Invalid plugin: got "' + obj + '", expected an object or a function'); |
| } else { |
| Object.keys(obj).forEach(function (id) { // object style for plugins |
| PouchDB.prototype[id] = obj[id]; |
| }); |
| } |
| if (this.__defaults) { |
| PouchDB.__defaults = Object.assign({}, this.__defaults); |
| } |
| return PouchDB; |
| }; |
| |
| PouchDB.defaults = function (defaultOpts) { |
| let PouchWithDefaults = createClass(PouchDB, function (name, opts) { |
| opts = opts || {}; |
| |
| if (name && typeof name === 'object') { |
| opts = name; |
| name = opts.name; |
| delete opts.name; |
| } |
| |
| opts = Object.assign({}, PouchWithDefaults.__defaults, opts); |
| PouchDB.call(this, name, opts); |
| }); |
| |
| PouchWithDefaults.preferredAdapters = PouchDB.preferredAdapters.slice(); |
| Object.keys(PouchDB).forEach(function (key) { |
| if (!(key in PouchWithDefaults)) { |
| PouchWithDefaults[key] = PouchDB[key]; |
| } |
| }); |
| |
| // make default options transitive |
| // https://github.com/pouchdb/pouchdb/issues/5922 |
| PouchWithDefaults.__defaults = Object.assign({}, this.__defaults, defaultOpts); |
| |
| return PouchWithDefaults; |
| }; |
| |
| PouchDB.fetch = function (url, opts) { |
| return fetch(url, opts); |
| }; |
| |
| PouchDB.prototype.activeTasks = PouchDB.activeTasks = new ActiveTasks(); |
| |
| // managed automatically by set-version.js |
| var version = "9.0.0"; |
| |
| // this would just be "return doc[field]", but fields |
| // can be "deep" due to dot notation |
| function getFieldFromDoc(doc, parsedField) { |
| var value = doc; |
| for (var i = 0, len = parsedField.length; i < len; i++) { |
| var key = parsedField[i]; |
| value = value[key]; |
| if (!value) { |
| break; |
| } |
| } |
| return value; |
| } |
| |
| function compare(left, right) { |
| return left < right ? -1 : left > right ? 1 : 0; |
| } |
| |
| // Converts a string in dot notation to an array of its components, with backslash escaping |
| function parseField(fieldName) { |
| // fields may be deep (e.g. "foo.bar.baz"), so parse |
| var fields = []; |
| var current = ''; |
| for (var i = 0, len = fieldName.length; i < len; i++) { |
| var ch = fieldName[i]; |
| if (i > 0 && fieldName[i - 1] === '\\' && (ch === '$' || ch === '.')) { |
| // escaped delimiter |
| current = current.substring(0, current.length - 1) + ch; |
| } else if (ch === '.') { |
| // When `.` is not escaped (above), it is a field delimiter |
| fields.push(current); |
| current = ''; |
| } else { // normal character |
| current += ch; |
| } |
| } |
| fields.push(current); |
| return fields; |
| } |
| |
| var combinationFields = ['$or', '$nor', '$not']; |
| function isCombinationalField(field) { |
| return combinationFields.indexOf(field) > -1; |
| } |
| |
| function getKey(obj) { |
| return Object.keys(obj)[0]; |
| } |
| |
| function getValue(obj) { |
| return obj[getKey(obj)]; |
| } |
| |
| |
| // flatten an array of selectors joined by an $and operator |
| function mergeAndedSelectors(selectors) { |
| |
| // sort to ensure that e.g. if the user specified |
| // $and: [{$gt: 'a'}, {$gt: 'b'}], then it's collapsed into |
| // just {$gt: 'b'} |
| var res$$1 = {}; |
| var first = {$or: true, $nor: true}; |
| |
| selectors.forEach(function (selector) { |
| Object.keys(selector).forEach(function (field) { |
| var matcher = selector[field]; |
| if (typeof matcher !== 'object') { |
| matcher = {$eq: matcher}; |
| } |
| |
| if (isCombinationalField(field)) { |
| // or, nor |
| if (matcher instanceof Array) { |
| if (first[field]) { |
| first[field] = false; |
| res$$1[field] = matcher; |
| return; |
| } |
| |
| var entries = []; |
| res$$1[field].forEach(function (existing) { |
| Object.keys(matcher).forEach(function (key) { |
| var m = matcher[key]; |
| var longest = Math.max(Object.keys(existing).length, Object.keys(m).length); |
| var merged = mergeAndedSelectors([existing, m]); |
| if (Object.keys(merged).length <= longest) { |
| // we have a situation like: (a :{$eq :1} || ...) && (a {$eq: 2} || ...) |
| // merging would produce a $eq 2 when actually we shouldn't ever match against these merged conditions |
| // merged should always contain more values to be valid |
| return; |
| } |
| entries.push(merged); |
| }); |
| }); |
| res$$1[field] = entries; |
| } else { |
| // not |
| res$$1[field] = mergeAndedSelectors([matcher]); |
| } |
| } else { |
| var fieldMatchers = res$$1[field] = res$$1[field] || {}; |
| Object.keys(matcher).forEach(function (operator) { |
| var value = matcher[operator]; |
| |
| if (operator === '$gt' || operator === '$gte') { |
| return mergeGtGte(operator, value, fieldMatchers); |
| } else if (operator === '$lt' || operator === '$lte') { |
| return mergeLtLte(operator, value, fieldMatchers); |
| } else if (operator === '$ne') { |
| return mergeNe(value, fieldMatchers); |
| } else if (operator === '$eq') { |
| return mergeEq(value, fieldMatchers); |
| } else if (operator === "$regex") { |
| return mergeRegex(value, fieldMatchers); |
| } |
| fieldMatchers[operator] = value; |
| }); |
| } |
| }); |
| }); |
| |
| return res$$1; |
| } |
| |
| |
| |
| // collapse logically equivalent gt/gte values |
| function mergeGtGte(operator, value, fieldMatchers) { |
| if (typeof fieldMatchers.$eq !== 'undefined') { |
| return; // do nothing |
| } |
| if (typeof fieldMatchers.$gte !== 'undefined') { |
| if (operator === '$gte') { |
| if (value > fieldMatchers.$gte) { // more specificity |
| fieldMatchers.$gte = value; |
| } |
| } else { // operator === '$gt' |
| if (value >= fieldMatchers.$gte) { // more specificity |
| delete fieldMatchers.$gte; |
| fieldMatchers.$gt = value; |
| } |
| } |
| } else if (typeof fieldMatchers.$gt !== 'undefined') { |
| if (operator === '$gte') { |
| if (value > fieldMatchers.$gt) { // more specificity |
| delete fieldMatchers.$gt; |
| fieldMatchers.$gte = value; |
| } |
| } else { // operator === '$gt' |
| if (value > fieldMatchers.$gt) { // more specificity |
| fieldMatchers.$gt = value; |
| } |
| } |
| } else { |
| fieldMatchers[operator] = value; |
| } |
| } |
| |
| // collapse logically equivalent lt/lte values |
| function mergeLtLte(operator, value, fieldMatchers) { |
| if (typeof fieldMatchers.$eq !== 'undefined') { |
| return; // do nothing |
| } |
| if (typeof fieldMatchers.$lte !== 'undefined') { |
| if (operator === '$lte') { |
| if (value < fieldMatchers.$lte) { // more specificity |
| fieldMatchers.$lte = value; |
| } |
| } else { // operator === '$gt' |
| if (value <= fieldMatchers.$lte) { // more specificity |
| delete fieldMatchers.$lte; |
| fieldMatchers.$lt = value; |
| } |
| } |
| } else if (typeof fieldMatchers.$lt !== 'undefined') { |
| if (operator === '$lte') { |
| if (value < fieldMatchers.$lt) { // more specificity |
| delete fieldMatchers.$lt; |
| fieldMatchers.$lte = value; |
| } |
| } else { // operator === '$gt' |
| if (value < fieldMatchers.$lt) { // more specificity |
| fieldMatchers.$lt = value; |
| } |
| } |
| } else { |
| fieldMatchers[operator] = value; |
| } |
| } |
| |
| // combine $ne values into one array |
| function mergeNe(value, fieldMatchers) { |
| if ('$ne' in fieldMatchers) { |
| // there are many things this could "not" be |
| fieldMatchers.$ne.push(value); |
| } else { // doesn't exist yet |
| fieldMatchers.$ne = [value]; |
| } |
| } |
| |
| // add $eq into the mix |
| function mergeEq(value, fieldMatchers) { |
| // these all have less specificity than the $eq |
| // TODO: check for user errors here |
| delete fieldMatchers.$gt; |
| delete fieldMatchers.$gte; |
| delete fieldMatchers.$lt; |
| delete fieldMatchers.$lte; |
| delete fieldMatchers.$ne; |
| fieldMatchers.$eq = value; |
| } |
| |
| // combine $regex values into one array |
| function mergeRegex(value, fieldMatchers) { |
| if ('$regex' in fieldMatchers) { |
| // a value could match multiple regexes |
| fieldMatchers.$regex.push(value); |
| } else { // doesn't exist yet |
| fieldMatchers.$regex = [value]; |
| } |
| } |
| |
| //#7458: execute function mergeAndedSelectors on nested $and |
| function mergeAndedSelectorsNested(obj) { |
| for (var prop in obj) { |
| if (Array.isArray(obj)) { |
| for (var i in obj) { |
| if (obj[i]['$and']) { |
| obj[i] = mergeAndedSelectors(obj[i]['$and']); |
| } |
| } |
| } |
| var value = obj[prop]; |
| if (typeof value === 'object') { |
| mergeAndedSelectorsNested(value); // <- recursive call |
| } |
| } |
| return obj; |
| } |
| |
| //#7458: determine id $and is present in selector (at any level) |
| function isAndInSelector(obj, isAnd) { |
| for (var prop in obj) { |
| if (prop === '$and') { |
| isAnd = true; |
| } |
| var value = obj[prop]; |
| if (typeof value === 'object') { |
| isAnd = isAndInSelector(value, isAnd); // <- recursive call |
| } |
| } |
| return isAnd; |
| } |
| |
| // |
| // normalize the selector |
| // |
| function massageSelector(input) { |
| var result = clone(input); |
| |
| //#7458: if $and is present in selector (at any level) merge nested $and |
| if (isAndInSelector(result, false)) { |
| result = mergeAndedSelectorsNested(result); |
| if ('$and' in result) { |
| result = mergeAndedSelectors(result['$and']); |
| } |
| } |
| |
| ['$or', '$nor'].forEach(function (orOrNor) { |
| if (orOrNor in result) { |
| // message each individual selector |
| // e.g. {foo: 'bar'} becomes {foo: {$eq: 'bar'}} |
| result[orOrNor].forEach(function (subSelector) { |
| var fields = Object.keys(subSelector); |
| for (var i = 0; i < fields.length; i++) { |
| var field = fields[i]; |
| var matcher = subSelector[field]; |
| if (typeof matcher !== 'object' || matcher === null) { |
| subSelector[field] = {$eq: matcher}; |
| } |
| } |
| }); |
| } |
| }); |
| |
| if ('$not' in result) { |
| //This feels a little like forcing, but it will work for now, |
| //I would like to come back to this and make the merging of selectors a little more generic |
| result['$not'] = mergeAndedSelectors([result['$not']]); |
| } |
| |
| var fields = Object.keys(result); |
| |
| for (var i = 0; i < fields.length; i++) { |
| var field = fields[i]; |
| var matcher = result[field]; |
| |
| if (typeof matcher !== 'object' || matcher === null) { |
| matcher = {$eq: matcher}; |
| } |
| result[field] = matcher; |
| } |
| |
| normalizeArrayOperators(result); |
| |
| return result; |
| } |
| |
| // |
| // The $ne and $regex values must be placed in an array because these operators can be used multiple times on the same field. |
| // When $and is used, mergeAndedSelectors takes care of putting some of them into arrays, otherwise it's done here. |
| // |
| function normalizeArrayOperators(selector) { |
| Object.keys(selector).forEach(function (field) { |
| var matcher = selector[field]; |
| |
| if (Array.isArray(matcher)) { |
| matcher.forEach(function (matcherItem) { |
| if (matcherItem && typeof matcherItem === 'object') { |
| normalizeArrayOperators(matcherItem); |
| } |
| }); |
| } else if (field === '$ne') { |
| selector.$ne = [matcher]; |
| } else if (field === '$regex') { |
| selector.$regex = [matcher]; |
| } else if (matcher && typeof matcher === 'object') { |
| normalizeArrayOperators(matcher); |
| } |
| }); |
| } |
| |
| function pad(str, padWith, upToLength) { |
| var padding = ''; |
| var targetLength = upToLength - str.length; |
| /* istanbul ignore next */ |
| while (padding.length < targetLength) { |
| padding += padWith; |
| } |
| return padding; |
| } |
| |
| function padLeft(str, padWith, upToLength) { |
| var padding = pad(str, padWith, upToLength); |
| return padding + str; |
| } |
| |
| var MIN_MAGNITUDE = -324; // verified by -Number.MIN_VALUE |
| var MAGNITUDE_DIGITS = 3; // ditto |
| var SEP = ''; // set to '_' for easier debugging |
| |
| function collate(a, b) { |
| |
| if (a === b) { |
| return 0; |
| } |
| |
| a = normalizeKey(a); |
| b = normalizeKey(b); |
| |
| var ai = collationIndex(a); |
| var bi = collationIndex(b); |
| if ((ai - bi) !== 0) { |
| return ai - bi; |
| } |
| switch (typeof a) { |
| case 'number': |
| return a - b; |
| case 'boolean': |
| return a < b ? -1 : 1; |
| case 'string': |
| return stringCollate(a, b); |
| } |
| return Array.isArray(a) ? arrayCollate(a, b) : objectCollate(a, b); |
| } |
| |
| // couch considers null/NaN/Infinity/-Infinity === undefined, |
| // for the purposes of mapreduce indexes. also, dates get stringified. |
| function normalizeKey(key) { |
| switch (typeof key) { |
| case 'undefined': |
| return null; |
| case 'number': |
| if (key === Infinity || key === -Infinity || isNaN(key)) { |
| return null; |
| } |
| return key; |
| case 'object': |
| var origKey = key; |
| if (Array.isArray(key)) { |
| var len = key.length; |
| key = new Array(len); |
| for (var i = 0; i < len; i++) { |
| key[i] = normalizeKey(origKey[i]); |
| } |
| /* istanbul ignore next */ |
| } else if (key instanceof Date) { |
| return key.toJSON(); |
| } else if (key !== null) { // generic object |
| key = {}; |
| for (var k in origKey) { |
| if (Object.prototype.hasOwnProperty.call(origKey, k)) { |
| var val = origKey[k]; |
| if (typeof val !== 'undefined') { |
| key[k] = normalizeKey(val); |
| } |
| } |
| } |
| } |
| } |
| return key; |
| } |
| |
| function indexify(key) { |
| if (key !== null) { |
| switch (typeof key) { |
| case 'boolean': |
| return key ? 1 : 0; |
| case 'number': |
| return numToIndexableString(key); |
| case 'string': |
| // We've to be sure that key does not contain \u0000 |
| // Do order-preserving replacements: |
| // 0 -> 1, 1 |
| // 1 -> 1, 2 |
| // 2 -> 2, 2 |
| /* eslint-disable no-control-regex */ |
| return key |
| .replace(/\u0002/g, '\u0002\u0002') |
| .replace(/\u0001/g, '\u0001\u0002') |
| .replace(/\u0000/g, '\u0001\u0001'); |
| /* eslint-enable no-control-regex */ |
| case 'object': |
| var isArray = Array.isArray(key); |
| var arr = isArray ? key : Object.keys(key); |
| var i = -1; |
| var len = arr.length; |
| var result = ''; |
| if (isArray) { |
| while (++i < len) { |
| result += toIndexableString(arr[i]); |
| } |
| } else { |
| while (++i < len) { |
| var objKey = arr[i]; |
| result += toIndexableString(objKey) + |
| toIndexableString(key[objKey]); |
| } |
| } |
| return result; |
| } |
| } |
| return ''; |
| } |
| |
| // convert the given key to a string that would be appropriate |
| // for lexical sorting, e.g. within a database, where the |
| // sorting is the same given by the collate() function. |
| function toIndexableString(key) { |
| var zero = '\u0000'; |
| key = normalizeKey(key); |
| return collationIndex(key) + SEP + indexify(key) + zero; |
| } |
| |
| function parseNumber(str, i) { |
| var originalIdx = i; |
| var num; |
| var zero = str[i] === '1'; |
| if (zero) { |
| num = 0; |
| i++; |
| } else { |
| var neg = str[i] === '0'; |
| i++; |
| var numAsString = ''; |
| var magAsString = str.substring(i, i + MAGNITUDE_DIGITS); |
| var magnitude = parseInt(magAsString, 10) + MIN_MAGNITUDE; |
| /* istanbul ignore next */ |
| if (neg) { |
| magnitude = -magnitude; |
| } |
| i += MAGNITUDE_DIGITS; |
| while (true) { |
| var ch = str[i]; |
| if (ch === '\u0000') { |
| break; |
| } else { |
| numAsString += ch; |
| } |
| i++; |
| } |
| numAsString = numAsString.split('.'); |
| if (numAsString.length === 1) { |
| num = parseInt(numAsString, 10); |
| } else { |
| /* istanbul ignore next */ |
| num = parseFloat(numAsString[0] + '.' + numAsString[1]); |
| } |
| /* istanbul ignore next */ |
| if (neg) { |
| num = num - 10; |
| } |
| /* istanbul ignore next */ |
| if (magnitude !== 0) { |
| // parseFloat is more reliable than pow due to rounding errors |
| // e.g. Number.MAX_VALUE would return Infinity if we did |
| // num * Math.pow(10, magnitude); |
| num = parseFloat(num + 'e' + magnitude); |
| } |
| } |
| return {num, length : i - originalIdx}; |
| } |
| |
| // move up the stack while parsing |
| // this function moved outside of parseIndexableString for performance |
| function pop(stack, metaStack) { |
| var obj = stack.pop(); |
| |
| if (metaStack.length) { |
| var lastMetaElement = metaStack[metaStack.length - 1]; |
| if (obj === lastMetaElement.element) { |
| // popping a meta-element, e.g. an object whose value is another object |
| metaStack.pop(); |
| lastMetaElement = metaStack[metaStack.length - 1]; |
| } |
| var element = lastMetaElement.element; |
| var lastElementIndex = lastMetaElement.index; |
| if (Array.isArray(element)) { |
| element.push(obj); |
| } else if (lastElementIndex === stack.length - 2) { // obj with key+value |
| var key = stack.pop(); |
| element[key] = obj; |
| } else { |
| stack.push(obj); // obj with key only |
| } |
| } |
| } |
| |
| function parseIndexableString(str) { |
| var stack = []; |
| var metaStack = []; // stack for arrays and objects |
| var i = 0; |
| |
| /*eslint no-constant-condition: ["error", { "checkLoops": false }]*/ |
| while (true) { |
| var collationIndex = str[i++]; |
| if (collationIndex === '\u0000') { |
| if (stack.length === 1) { |
| return stack.pop(); |
| } else { |
| pop(stack, metaStack); |
| continue; |
| } |
| } |
| switch (collationIndex) { |
| case '1': |
| stack.push(null); |
| break; |
| case '2': |
| stack.push(str[i] === '1'); |
| i++; |
| break; |
| case '3': |
| var parsedNum = parseNumber(str, i); |
| stack.push(parsedNum.num); |
| i += parsedNum.length; |
| break; |
| case '4': |
| var parsedStr = ''; |
| /*eslint no-constant-condition: ["error", { "checkLoops": false }]*/ |
| while (true) { |
| var ch = str[i]; |
| if (ch === '\u0000') { |
| break; |
| } |
| parsedStr += ch; |
| i++; |
| } |
| // perform the reverse of the order-preserving replacement |
| // algorithm (see above) |
| /* eslint-disable no-control-regex */ |
| parsedStr = parsedStr.replace(/\u0001\u0001/g, '\u0000') |
| .replace(/\u0001\u0002/g, '\u0001') |
| .replace(/\u0002\u0002/g, '\u0002'); |
| /* eslint-enable no-control-regex */ |
| stack.push(parsedStr); |
| break; |
| case '5': |
| var arrayElement = { element: [], index: stack.length }; |
| stack.push(arrayElement.element); |
| metaStack.push(arrayElement); |
| break; |
| case '6': |
| var objElement = { element: {}, index: stack.length }; |
| stack.push(objElement.element); |
| metaStack.push(objElement); |
| break; |
| /* istanbul ignore next */ |
| default: |
| throw new Error( |
| 'bad collationIndex or unexpectedly reached end of input: ' + |
| collationIndex); |
| } |
| } |
| } |
| |
| function arrayCollate(a, b) { |
| var len = Math.min(a.length, b.length); |
| for (var i = 0; i < len; i++) { |
| var sort = collate(a[i], b[i]); |
| if (sort !== 0) { |
| return sort; |
| } |
| } |
| return (a.length === b.length) ? 0 : |
| (a.length > b.length) ? 1 : -1; |
| } |
| function stringCollate(a, b) { |
| // See: https://github.com/daleharvey/pouchdb/issues/40 |
| // This is incompatible with the CouchDB implementation, but its the |
| // best we can do for now |
| return (a === b) ? 0 : ((a > b) ? 1 : -1); |
| } |
| function objectCollate(a, b) { |
| var ak = Object.keys(a), bk = Object.keys(b); |
| var len = Math.min(ak.length, bk.length); |
| for (var i = 0; i < len; i++) { |
| // First sort the keys |
| var sort = collate(ak[i], bk[i]); |
| if (sort !== 0) { |
| return sort; |
| } |
| // if the keys are equal sort the values |
| sort = collate(a[ak[i]], b[bk[i]]); |
| if (sort !== 0) { |
| return sort; |
| } |
| |
| } |
| return (ak.length === bk.length) ? 0 : |
| (ak.length > bk.length) ? 1 : -1; |
| } |
| // The collation is defined by erlangs ordered terms |
| // the atoms null, true, false come first, then numbers, strings, |
| // arrays, then objects |
| // null/undefined/NaN/Infinity/-Infinity are all considered null |
| function collationIndex(x) { |
| var id = ['boolean', 'number', 'string', 'object']; |
| var idx = id.indexOf(typeof x); |
| //false if -1 otherwise true, but fast!!!!1 |
| if (~idx) { |
| if (x === null) { |
| return 1; |
| } |
| if (Array.isArray(x)) { |
| return 5; |
| } |
| return idx < 3 ? (idx + 2) : (idx + 3); |
| } |
| /* istanbul ignore next */ |
| if (Array.isArray(x)) { |
| return 5; |
| } |
| } |
| |
| // conversion: |
| // x yyy zz...zz |
| // x = 0 for negative, 1 for 0, 2 for positive |
| // y = exponent (for negative numbers negated) moved so that it's >= 0 |
| // z = mantisse |
| function numToIndexableString(num) { |
| |
| if (num === 0) { |
| return '1'; |
| } |
| |
| // convert number to exponential format for easier and |
| // more succinct string sorting |
| var expFormat = num.toExponential().split(/e\+?/); |
| var magnitude = parseInt(expFormat[1], 10); |
| |
| var neg = num < 0; |
| |
| var result = neg ? '0' : '2'; |
| |
| // first sort by magnitude |
| // it's easier if all magnitudes are positive |
| var magForComparison = ((neg ? -magnitude : magnitude) - MIN_MAGNITUDE); |
| var magString = padLeft((magForComparison).toString(), '0', MAGNITUDE_DIGITS); |
| |
| result += SEP + magString; |
| |
| // then sort by the factor |
| var factor = Math.abs(parseFloat(expFormat[0])); // [1..10) |
| /* istanbul ignore next */ |
| if (neg) { // for negative reverse ordering |
| factor = 10 - factor; |
| } |
| |
| var factorStr = factor.toFixed(20); |
| |
| // strip zeros from the end |
| factorStr = factorStr.replace(/\.?0+$/, ''); |
| |
| result += SEP + factorStr; |
| |
| return result; |
| } |
| |
| // create a comparator based on the sort object |
| function createFieldSorter(sort) { |
| |
| function getFieldValuesAsArray(doc) { |
| return sort.map(function (sorting) { |
| var fieldName = getKey(sorting); |
| var parsedField = parseField(fieldName); |
| var docFieldValue = getFieldFromDoc(doc, parsedField); |
| return docFieldValue; |
| }); |
| } |
| |
| return function (aRow, bRow) { |
| var aFieldValues = getFieldValuesAsArray(aRow.doc); |
| var bFieldValues = getFieldValuesAsArray(bRow.doc); |
| var collation = collate(aFieldValues, bFieldValues); |
| if (collation !== 0) { |
| return collation; |
| } |
| // this is what mango seems to do |
| return compare(aRow.doc._id, bRow.doc._id); |
| }; |
| } |
| |
| function filterInMemoryFields(rows, requestDef, inMemoryFields) { |
| rows = rows.filter(function (row) { |
| return rowFilter(row.doc, requestDef.selector, inMemoryFields); |
| }); |
| |
| if (requestDef.sort) { |
| // in-memory sort |
| var fieldSorter = createFieldSorter(requestDef.sort); |
| rows = rows.sort(fieldSorter); |
| if (typeof requestDef.sort[0] !== 'string' && |
| getValue(requestDef.sort[0]) === 'desc') { |
| rows = rows.reverse(); |
| } |
| } |
| |
| if ('limit' in requestDef || 'skip' in requestDef) { |
| // have to do the limit in-memory |
| var skip = requestDef.skip || 0; |
| var limit = ('limit' in requestDef ? requestDef.limit : rows.length) + skip; |
| rows = rows.slice(skip, limit); |
| } |
| return rows; |
| } |
| |
| function rowFilter(doc, selector, inMemoryFields) { |
| return inMemoryFields.every(function (field) { |
| var matcher = selector[field]; |
| var parsedField = parseField(field); |
| var docFieldValue = getFieldFromDoc(doc, parsedField); |
| if (isCombinationalField(field)) { |
| return matchCominationalSelector(field, matcher, doc); |
| } |
| |
| return matchSelector(matcher, doc, parsedField, docFieldValue); |
| }); |
| } |
| |
| function matchSelector(matcher, doc, parsedField, docFieldValue) { |
| if (!matcher) { |
| // no filtering necessary; this field is just needed for sorting |
| return true; |
| } |
| |
| // is matcher an object, if so continue recursion |
| if (typeof matcher === 'object') { |
| return Object.keys(matcher).every(function (maybeUserOperator) { |
| var userValue = matcher[ maybeUserOperator ]; |
| // explicit operator |
| if (maybeUserOperator.indexOf("$") === 0) { |
| return match(maybeUserOperator, doc, userValue, parsedField, docFieldValue); |
| } else { |
| var subParsedField = parseField(maybeUserOperator); |
| |
| if ( |
| docFieldValue === undefined && |
| typeof userValue !== "object" && |
| subParsedField.length > 0 |
| ) { |
| // the field does not exist, return or getFieldFromDoc will throw |
| return false; |
| } |
| |
| var subDocFieldValue = getFieldFromDoc(docFieldValue, subParsedField); |
| |
| if (typeof userValue === "object") { |
| // field value is an object that might contain more operators |
| return matchSelector(userValue, doc, parsedField, subDocFieldValue); |
| } |
| |
| // implicit operator |
| return match("$eq", doc, userValue, subParsedField, subDocFieldValue); |
| } |
| }); |
| } |
| |
| // no more depth, No need to recurse further |
| return matcher === docFieldValue; |
| } |
| |
| function matchCominationalSelector(field, matcher, doc) { |
| |
| if (field === '$or') { |
| return matcher.some(function (orMatchers) { |
| return rowFilter(doc, orMatchers, Object.keys(orMatchers)); |
| }); |
| } |
| |
| if (field === '$not') { |
| return !rowFilter(doc, matcher, Object.keys(matcher)); |
| } |
| |
| //`$nor` |
| return !matcher.find(function (orMatchers) { |
| return rowFilter(doc, orMatchers, Object.keys(orMatchers)); |
| }); |
| |
| } |
| |
| function match(userOperator, doc, userValue, parsedField, docFieldValue) { |
| if (!matchers[userOperator]) { |
| /* istanbul ignore next */ |
| throw new Error('unknown operator "' + userOperator + |
| '" - should be one of $eq, $lte, $lt, $gt, $gte, $exists, $ne, $in, ' + |
| '$nin, $size, $mod, $regex, $elemMatch, $type, $allMatch or $all'); |
| } |
| return matchers[userOperator](doc, userValue, parsedField, docFieldValue); |
| } |
| |
| function fieldExists(docFieldValue) { |
| return typeof docFieldValue !== 'undefined' && docFieldValue !== null; |
| } |
| |
| function fieldIsNotUndefined(docFieldValue) { |
| return typeof docFieldValue !== 'undefined'; |
| } |
| |
| function modField(docFieldValue, userValue) { |
| if (typeof docFieldValue !== "number" || |
| parseInt(docFieldValue, 10) !== docFieldValue) { |
| return false; |
| } |
| |
| var divisor = userValue[0]; |
| var mod = userValue[1]; |
| |
| return docFieldValue % divisor === mod; |
| } |
| |
| function arrayContainsValue(docFieldValue, userValue) { |
| return userValue.some(function (val) { |
| if (docFieldValue instanceof Array) { |
| return docFieldValue.some(function (docFieldValueItem) { |
| return collate(val, docFieldValueItem) === 0; |
| }); |
| } |
| |
| return collate(val, docFieldValue) === 0; |
| }); |
| } |
| |
| function arrayContainsAllValues(docFieldValue, userValue) { |
| return userValue.every(function (val) { |
| return docFieldValue.some(function (docFieldValueItem) { |
| return collate(val, docFieldValueItem) === 0; |
| }); |
| }); |
| } |
| |
| function arraySize(docFieldValue, userValue) { |
| return docFieldValue.length === userValue; |
| } |
| |
| function regexMatch(docFieldValue, userValue) { |
| var re = new RegExp(userValue); |
| |
| return re.test(docFieldValue); |
| } |
| |
| function typeMatch(docFieldValue, userValue) { |
| |
| switch (userValue) { |
| case 'null': |
| return docFieldValue === null; |
| case 'boolean': |
| return typeof (docFieldValue) === 'boolean'; |
| case 'number': |
| return typeof (docFieldValue) === 'number'; |
| case 'string': |
| return typeof (docFieldValue) === 'string'; |
| case 'array': |
| return docFieldValue instanceof Array; |
| case 'object': |
| return ({}).toString.call(docFieldValue) === '[object Object]'; |
| } |
| } |
| |
| var matchers = { |
| |
| '$elemMatch': function (doc, userValue, parsedField, docFieldValue) { |
| if (!Array.isArray(docFieldValue)) { |
| return false; |
| } |
| |
| if (docFieldValue.length === 0) { |
| return false; |
| } |
| |
| if (typeof docFieldValue[0] === 'object' && docFieldValue[0] !== null) { |
| return docFieldValue.some(function (val) { |
| return rowFilter(val, userValue, Object.keys(userValue)); |
| }); |
| } |
| |
| return docFieldValue.some(function (val) { |
| return matchSelector(userValue, doc, parsedField, val); |
| }); |
| }, |
| |
| '$allMatch': function (doc, userValue, parsedField, docFieldValue) { |
| if (!Array.isArray(docFieldValue)) { |
| return false; |
| } |
| |
| /* istanbul ignore next */ |
| if (docFieldValue.length === 0) { |
| return false; |
| } |
| |
| if (typeof docFieldValue[0] === 'object' && docFieldValue[0] !== null) { |
| return docFieldValue.every(function (val) { |
| return rowFilter(val, userValue, Object.keys(userValue)); |
| }); |
| } |
| |
| return docFieldValue.every(function (val) { |
| return matchSelector(userValue, doc, parsedField, val); |
| }); |
| }, |
| |
| '$eq': function (doc, userValue, parsedField, docFieldValue) { |
| return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) === 0; |
| }, |
| |
| '$gte': function (doc, userValue, parsedField, docFieldValue) { |
| return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) >= 0; |
| }, |
| |
| '$gt': function (doc, userValue, parsedField, docFieldValue) { |
| return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) > 0; |
| }, |
| |
| '$lte': function (doc, userValue, parsedField, docFieldValue) { |
| return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) <= 0; |
| }, |
| |
| '$lt': function (doc, userValue, parsedField, docFieldValue) { |
| return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) < 0; |
| }, |
| |
| '$exists': function (doc, userValue, parsedField, docFieldValue) { |
| //a field that is null is still considered to exist |
| if (userValue) { |
| return fieldIsNotUndefined(docFieldValue); |
| } |
| |
| return !fieldIsNotUndefined(docFieldValue); |
| }, |
| |
| '$mod': function (doc, userValue, parsedField, docFieldValue) { |
| return fieldExists(docFieldValue) && modField(docFieldValue, userValue); |
| }, |
| |
| '$ne': function (doc, userValue, parsedField, docFieldValue) { |
| return userValue.every(function (neValue) { |
| return collate(docFieldValue, neValue) !== 0; |
| }); |
| }, |
| '$in': function (doc, userValue, parsedField, docFieldValue) { |
| return fieldExists(docFieldValue) && arrayContainsValue(docFieldValue, userValue); |
| }, |
| |
| '$nin': function (doc, userValue, parsedField, docFieldValue) { |
| return fieldExists(docFieldValue) && !arrayContainsValue(docFieldValue, userValue); |
| }, |
| |
| '$size': function (doc, userValue, parsedField, docFieldValue) { |
| return fieldExists(docFieldValue) && |
| Array.isArray(docFieldValue) && |
| arraySize(docFieldValue, userValue); |
| }, |
| |
| '$all': function (doc, userValue, parsedField, docFieldValue) { |
| return Array.isArray(docFieldValue) && arrayContainsAllValues(docFieldValue, userValue); |
| }, |
| |
| '$regex': function (doc, userValue, parsedField, docFieldValue) { |
| return fieldExists(docFieldValue) && |
| typeof docFieldValue == "string" && |
| userValue.every(function (regexValue) { |
| return regexMatch(docFieldValue, regexValue); |
| }); |
| }, |
| |
| '$type': function (doc, userValue, parsedField, docFieldValue) { |
| return typeMatch(docFieldValue, userValue); |
| } |
| }; |
| |
| // return true if the given doc matches the supplied selector |
| function matchesSelector(doc, selector) { |
| /* istanbul ignore if */ |
| if (typeof selector !== 'object') { |
| // match the CouchDB error message |
| throw new Error('Selector error: expected a JSON object'); |
| } |
| |
| selector = massageSelector(selector); |
| var row = { |
| doc |
| }; |
| |
| var rowsMatched = filterInMemoryFields([row], { selector }, Object.keys(selector)); |
| return rowsMatched && rowsMatched.length === 1; |
| } |
| |
| function evalFilter(input) { |
| var code = '(function() {\n"use strict";\nreturn ' + input + '\n})()'; |
| |
| return vm.runInNewContext(code); |
| } |
| |
| function evalView(input) { |
| var code = [ |
| '"use strict";', |
| 'var emitted = false;', |
| 'var emit = function (a, b) {', |
| ' emitted = true;', |
| '};', |
| 'var view = ' + input + ';', |
| 'view(doc);', |
| 'if (emitted) {', |
| ' return true;', |
| '}' |
| ].join('\n'); |
| |
| return vm.runInNewContext('(function(doc) {\n' + code + '\n})'); |
| } |
| |
| function validate(opts, callback) { |
| if (opts.selector) { |
| if (opts.filter && opts.filter !== '_selector') { |
| var filterName = typeof opts.filter === 'string' ? |
| opts.filter : 'function'; |
| return callback(new Error('selector invalid for filter "' + filterName + '"')); |
| } |
| } |
| callback(); |
| } |
| |
| function normalize(opts) { |
| if (opts.view && !opts.filter) { |
| opts.filter = '_view'; |
| } |
| |
| if (opts.selector && !opts.filter) { |
| opts.filter = '_selector'; |
| } |
| |
| if (opts.filter && typeof opts.filter === 'string') { |
| if (opts.filter === '_view') { |
| opts.view = normalizeDesignDocFunctionName(opts.view); |
| } else { |
| opts.filter = normalizeDesignDocFunctionName(opts.filter); |
| } |
| } |
| } |
| |
| function shouldFilter(changesHandler, opts) { |
| return opts.filter && typeof opts.filter === 'string' && |
| !opts.doc_ids && !isRemote(changesHandler.db); |
| } |
| |
| function filter(changesHandler, opts) { |
| var callback = opts.complete; |
| if (opts.filter === '_view') { |
| if (!opts.view || typeof opts.view !== 'string') { |
| var err = createError(BAD_REQUEST, |
| '`view` filter parameter not found or invalid.'); |
| return callback(err); |
| } |
| // fetch a view from a design doc, make it behave like a filter |
| var viewName = parseDesignDocFunctionName(opts.view); |
| changesHandler.db.get('_design/' + viewName[0], function (err, ddoc) { |
| /* istanbul ignore if */ |
| if (changesHandler.isCancelled) { |
| return callback(null, {status: 'cancelled'}); |
| } |
| /* istanbul ignore next */ |
| if (err) { |
| return callback(generateErrorFromResponse(err)); |
| } |
| var mapFun = ddoc && ddoc.views && ddoc.views[viewName[1]] && |
| ddoc.views[viewName[1]].map; |
| if (!mapFun) { |
| return callback(createError(MISSING_DOC, |
| (ddoc.views ? 'missing json key: ' + viewName[1] : |
| 'missing json key: views'))); |
| } |
| opts.filter = evalView(mapFun); |
| changesHandler.doChanges(opts); |
| }); |
| } else if (opts.selector) { |
| opts.filter = function (doc) { |
| return matchesSelector(doc, opts.selector); |
| }; |
| changesHandler.doChanges(opts); |
| } else { |
| // fetch a filter from a design doc |
| var filterName = parseDesignDocFunctionName(opts.filter); |
| changesHandler.db.get('_design/' + filterName[0], function (err, ddoc) { |
| /* istanbul ignore if */ |
| if (changesHandler.isCancelled) { |
| return callback(null, {status: 'cancelled'}); |
| } |
| /* istanbul ignore next */ |
| if (err) { |
| return callback(generateErrorFromResponse(err)); |
| } |
| var filterFun = ddoc && ddoc.filters && ddoc.filters[filterName[1]]; |
| if (!filterFun) { |
| return callback(createError(MISSING_DOC, |
| ((ddoc && ddoc.filters) ? 'missing json key: ' + filterName[1] |
| : 'missing json key: filters'))); |
| } |
| opts.filter = evalFilter(filterFun); |
| changesHandler.doChanges(opts); |
| }); |
| } |
| } |
| |
| function applyChangesFilterPlugin(PouchDB) { |
| PouchDB._changesFilterPlugin = { |
| validate, |
| normalize, |
| shouldFilter, |
| filter |
| }; |
| } |
| |
| // TODO: remove from pouchdb-core (breaking) |
| PouchDB.plugin(applyChangesFilterPlugin); |
| |
| PouchDB.version = version; |
| |
| function isFunction(f) { |
| return 'function' === typeof f; |
| } |
| |
| function getPrefix(db) { |
| if (isFunction(db.prefix)) { |
| return db.prefix(); |
| } |
| return db; |
| } |
| |
| function clone$1(_obj) { |
| var obj = {}; |
| for (var k in _obj) { |
| obj[k] = _obj[k]; |
| } |
| return obj; |
| } |
| |
| function nut(db, precodec, codec) { |
| function encodePrefix(prefix, key, opts1, opts2) { |
| return precodec.encode([ prefix, codec.encodeKey(key, opts1, opts2 ) ]); |
| } |
| |
| function addEncodings(op, prefix) { |
| if (prefix && prefix.options) { |
| op.keyEncoding = |
| op.keyEncoding || prefix.options.keyEncoding; |
| op.valueEncoding = |
| op.valueEncoding || prefix.options.valueEncoding; |
| } |
| return op; |
| } |
| |
| db.open(function () { /* no-op */}); |
| |
| return { |
| apply: function (ops, opts, cb) { |
| opts = opts || {}; |
| |
| var batch = []; |
| var i = -1; |
| var len = ops.length; |
| |
| while (++i < len) { |
| var op = ops[i]; |
| addEncodings(op, op.prefix); |
| op.prefix = getPrefix(op.prefix); |
| batch.push({ |
| key: encodePrefix(op.prefix, op.key, opts, op), |
| value: op.type !== 'del' && codec.encodeValue(op.value, opts, op), |
| type: op.type |
| }); |
| } |
| db.db.batch(batch, opts, cb); |
| }, |
| get: function (key, prefix, opts, cb) { |
| opts.asBuffer = codec.valueAsBuffer(opts); |
| return db.db.get( |
| encodePrefix(prefix, key, opts), |
| opts, |
| function (err, value) { |
| if (err) { |
| cb(err); |
| } else { |
| cb(null, codec.decodeValue(value, opts)); |
| } |
| } |
| ); |
| }, |
| createDecoder: function (opts) { |
| return function (key, value) { |
| return { |
| key: codec.decodeKey(precodec.decode(key)[1], opts), |
| value: codec.decodeValue(value, opts) |
| }; |
| }; |
| }, |
| isClosed: function isClosed() { |
| return db.isClosed(); |
| }, |
| close: function close(cb) { |
| return db.close(cb); |
| }, |
| iterator: function (_opts) { |
| var opts = clone$1(_opts || {}); |
| var prefix = _opts.prefix || []; |
| |
| function encodeKey(key) { |
| return encodePrefix(prefix, key, opts, {}); |
| } |
| |
| ltgt.toLtgt(_opts, opts, encodeKey, precodec.lowerBound, precodec.upperBound); |
| |
| // if these legacy values are in the options, remove them |
| |
| opts.prefix = null; |
| |
| //************************************************ |
| //hard coded defaults, for now... |
| //TODO: pull defaults and encoding out of levelup. |
| opts.keyAsBuffer = opts.valueAsBuffer = false; |
| //************************************************ |
| |
| |
| //this is vital, otherwise limit: undefined will |
| //create an empty stream. |
| /* istanbul ignore next */ |
| if ('number' !== typeof opts.limit) { |
| opts.limit = -1; |
| } |
| |
| opts.keyAsBuffer = precodec.buffer; |
| opts.valueAsBuffer = codec.valueAsBuffer(opts); |
| |
| function wrapIterator(iterator) { |
| return { |
| next: function (cb) { |
| return iterator.next(cb); |
| }, |
| end: function (cb) { |
| iterator.end(cb); |
| } |
| }; |
| } |
| |
| return wrapIterator(db.db.iterator(opts)); |
| } |
| }; |
| } |
| |
| class NotFoundError extends Error { |
| constructor() { |
| super(); |
| this.name = 'NotFoundError'; |
| } |
| } |
| |
| var EventEmitter = EE.EventEmitter; |
| var version$1 = "6.5.4"; |
| |
| var NOT_FOUND_ERROR = new NotFoundError(); |
| |
| var sublevel = function (nut, prefix, createStream, options) { |
| var emitter = new EventEmitter(); |
| emitter.sublevels = {}; |
| emitter.options = options; |
| |
| emitter.version = version$1; |
| |
| emitter.methods = {}; |
| prefix = prefix || []; |
| |
| function mergeOpts(opts) { |
| var o = {}; |
| var k; |
| if (options) { |
| for (k in options) { |
| if (typeof options[k] !== 'undefined') { |
| o[k] = options[k]; |
| } |
| } |
| } |
| if (opts) { |
| for (k in opts) { |
| if (typeof opts[k] !== 'undefined') { |
| o[k] = opts[k]; |
| } |
| } |
| } |
| return o; |
| } |
| |
| emitter.put = function (key, value, opts, cb) { |
| if ('function' === typeof opts) { |
| cb = opts; |
| opts = {}; |
| } |
| |
| nut.apply([{ |
| key, value, |
| prefix: prefix.slice(), type: 'put' |
| }], mergeOpts(opts), function (err) { |
| /* istanbul ignore next */ |
| if (err) { |
| return cb(err); |
| } |
| emitter.emit('put', key, value); |
| cb(null); |
| }); |
| }; |
| |
| emitter.prefix = function () { |
| return prefix.slice(); |
| }; |
| |
| emitter.batch = function (ops, opts, cb) { |
| if ('function' === typeof opts) { |
| cb = opts; |
| opts = {}; |
| } |
| |
| ops = ops.map(function (op) { |
| return { |
| key: op.key, |
| value: op.value, |
| prefix: op.prefix || prefix, |
| keyEncoding: op.keyEncoding, // * |
| valueEncoding: op.valueEncoding, // * (TODO: encodings on sublevel) |
| type: op.type |
| }; |
| }); |
| |
| nut.apply(ops, mergeOpts(opts), function (err) { |
| /* istanbul ignore next */ |
| if (err) { |
| return cb(err); |
| } |
| emitter.emit('batch', ops); |
| cb(null); |
| }); |
| }; |
| |
| emitter.get = function (key, opts, cb) { |
| /* istanbul ignore else */ |
| if ('function' === typeof opts) { |
| cb = opts; |
| opts = {}; |
| } |
| nut.get(key, prefix, mergeOpts(opts), function (err, value) { |
| if (err) { |
| cb(NOT_FOUND_ERROR); |
| } else { |
| cb(null, value); |
| } |
| }); |
| }; |
| |
| emitter.sublevel = function (name, opts) { |
| return emitter.sublevels[name] = |
| emitter.sublevels[name] || sublevel(nut, prefix.concat(name), createStream, mergeOpts(opts)); |
| }; |
| |
| emitter.readStream = emitter.createReadStream = function (opts) { |
| opts = mergeOpts(opts); |
| opts.prefix = prefix; |
| var stream; |
| var it = nut.iterator(opts); |
| |
| stream = createStream(opts, nut.createDecoder(opts)); |
| stream.setIterator(it); |
| |
| return stream; |
| }; |
| |
| emitter.close = function (cb) { |
| nut.close(cb); |
| }; |
| |
| emitter.isOpen = nut.isOpen; |
| emitter.isClosed = nut.isClosed; |
| |
| return emitter; |
| }; |
| |
| /* Copyright (c) 2012-2014 LevelUP contributors |
| * See list at <https://github.com/rvagg/node-levelup#contributing> |
| * MIT License <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md> |
| */ |
| |
| var Readable = ReadableStreamCore.Readable; |
| |
| function createClass$1(parent, init) { |
| let klass = function (...args) { |
| if (!(this instanceof klass)) { |
| return new klass(...args); |
| } |
| init.apply(this, args); |
| }; |
| klass.prototype = Object.create(parent.prototype, { |
| constructor: { value: klass } |
| }); |
| return klass; |
| } |
| |
| class ReadStreamInternal extends Readable { |
| constructor(options, makeData) { |
| super({ objectMode: true, highWaterMark: options.highWaterMark }); |
| this._setup(options, makeData); |
| } |
| |
| _setup(options, makeData) { |
| super.constructor({ objectMode: true, highWaterMark: options.highWaterMark }); |
| |
| // purely to keep `db` around until we're done so it's not GCed if the user doesn't keep a ref |
| this._waiting = false; |
| this._options = options; |
| this._makeData = makeData; |
| } |
| |
| setIterator(it) { |
| this._iterator = it; |
| /* istanbul ignore if */ |
| if (this._destroyed) { |
| return it.end(function () {}); |
| } |
| /* istanbul ignore if */ |
| if (this._waiting) { |
| this._waiting = false; |
| return this._read(); |
| } |
| return this; |
| } |
| |
| _cleanup(err) { |
| if (this._destroyed) { |
| return; |
| } |
| |
| this._destroyed = true; |
| |
| var self = this; |
| /* istanbul ignore if */ |
| if (err && err.message !== 'iterator has ended') { |
| self.emit('error', err); |
| } |
| |
| /* istanbul ignore else */ |
| if (self._iterator) { |
| self._iterator.end(function () { |
| self._iterator = null; |
| self.emit('close'); |
| }); |
| } else { |
| self.emit('close'); |
| } |
| } |
| |
| destroy() { |
| this._cleanup(); |
| } |
| |
| _read() { |
| var self = this; |
| /* istanbul ignore if */ |
| if (self._destroyed) { |
| return; |
| } |
| /* istanbul ignore if */ |
| if (!self._iterator) { |
| return this._waiting = true; |
| } |
| |
| self._iterator.next(function (err, key, value) { |
| if (err || (key === undefined && value === undefined)) { |
| if (!err && !self._destroyed) { |
| self.push(null); |
| } |
| return self._cleanup(err); |
| } |
| |
| |
| value = self._makeData(key, value); |
| if (!self._destroyed) { |
| self.push(value); |
| } |
| }); |
| } |
| } |
| |
| const ReadStream = createClass$1(ReadStreamInternal, function (options, makeData) { |
| ReadStreamInternal.prototype._setup.call(this, options, makeData); |
| }); |
| |
| var precodec = { |
| encode: function (decodedKey) { |
| return '\xff' + decodedKey[0] + '\xff' + decodedKey[1]; |
| }, |
| decode: function (encodedKeyAsBuffer) { |
| var str = encodedKeyAsBuffer.toString(); |
| var idx = str.indexOf('\xff', 1); |
| return [str.substring(1, idx), str.substring(idx + 1)]; |
| }, |
| lowerBound: '\x00', |
| upperBound: '\xff' |
| }; |
| |
| var codec = new Codec(); |
| |
| function sublevelPouch(db) { |
| return sublevel(nut(db, precodec, codec), [], ReadStream, db.options); |
| } |
| |
| function allDocsKeysQuery(api, opts) { |
| var keys = opts.keys; |
| var finalResults = { |
| offset: opts.skip |
| }; |
| return Promise.all(keys.map(function (key) { |
| var subOpts = Object.assign({key, deleted: 'ok'}, opts); |
| ['limit', 'skip', 'keys'].forEach(function (optKey) { |
| delete subOpts[optKey]; |
| }); |
| return new Promise(function (resolve, reject) { |
| api._allDocs(subOpts, function (err, res) { |
| /* istanbul ignore if */ |
| if (err) { |
| return reject(err); |
| } |
| /* istanbul ignore if */ |
| if (opts.update_seq && res.update_seq !== undefined) { |
| finalResults.update_seq = res.update_seq; |
| } |
| finalResults.total_rows = res.total_rows; |
| resolve(res.rows[0] || {key, error: 'not_found'}); |
| }); |
| }); |
| })).then(function (results) { |
| finalResults.rows = results; |
| return finalResults; |
| }); |
| } |
| |
| function thisAtob(str) { |
| var base64 = Buffer.from(str, 'base64'); |
| // Node.js will just skip the characters it can't decode instead of |
| // throwing an exception |
| if (base64.toString('base64') !== str) { |
| throw new Error("attachment is not a valid base64 string"); |
| } |
| return base64.toString('binary'); |
| } |
| |
| function thisBtoa(str) { |
| return Buffer.from(str, 'binary').toString('base64'); |
| } |
| |
| function typedBuffer(binString, buffType, type) { |
| // buffType is either 'binary' or 'base64' |
| const buff = Buffer.from(binString, buffType); |
| buff.type = type; // non-standard, but used for consistency with the browser |
| return buff; |
| } |
| |
| function b64ToBluffer(b64, type) { |
| return typedBuffer(b64, 'base64', type); |
| } |
| |
| // From http://stackoverflow.com/questions/14967647/ (continues on next line) |
| |
| function binStringToBluffer(binString, type) { |
| return typedBuffer(binString, 'binary', type); |
| } |
| |
| // This function is unused in Node |
| |
| function blobToBase64(blobOrBuffer, callback) { |
| callback(blobOrBuffer.toString('base64')); |
| } |
| |
| // not used in Node, but here for completeness |
| |
| // simplified API. universal browser support is assumed |
| |
| //Can't find original post, but this is close |
| |
| function toObject(array) { |
| return array.reduce(function (obj, item) { |
| obj[item] = true; |
| return obj; |
| }, {}); |
| } |
| // List of top level reserved words for doc |
| var reservedWords = toObject([ |
| '_id', |
| '_rev', |
| '_access', |
| '_attachments', |
| '_deleted', |
| '_revisions', |
| '_revs_info', |
| '_conflicts', |
| '_deleted_conflicts', |
| '_local_seq', |
| '_rev_tree', |
| // replication documents |
| '_replication_id', |
| '_replication_state', |
| '_replication_state_time', |
| '_replication_state_reason', |
| '_replication_stats', |
| // Specific to Couchbase Sync Gateway |
| '_removed' |
| ]); |
| |
| // List of reserved words that should end up in the document |
| var dataWords = toObject([ |
| '_access', |
| '_attachments', |
| // replication documents |
| '_replication_id', |
| '_replication_state', |
| '_replication_state_time', |
| '_replication_state_reason', |
| '_replication_stats' |
| ]); |
| |
| function parseRevisionInfo(rev$$1) { |
| if (!/^\d+-/.test(rev$$1)) { |
| return createError(INVALID_REV); |
| } |
| var idx = rev$$1.indexOf('-'); |
| var left = rev$$1.substring(0, idx); |
| var right = rev$$1.substring(idx + 1); |
| return { |
| prefix: parseInt(left, 10), |
| id: right |
| }; |
| } |
| |
| function makeRevTreeFromRevisions(revisions, opts) { |
| var pos = revisions.start - revisions.ids.length + 1; |
| |
| var revisionIds = revisions.ids; |
| var ids = [revisionIds[0], opts, []]; |
| |
| for (var i = 1, len = revisionIds.length; i < len; i++) { |
| ids = [revisionIds[i], {status: 'missing'}, [ids]]; |
| } |
| |
| return [{ |
| pos, |
| ids |
| }]; |
| } |
| |
| // Preprocess documents, parse their revisions, assign an id and a |
| // revision for new writes that are missing them, etc |
| function parseDoc(doc, newEdits, dbOpts) { |
| if (!dbOpts) { |
| dbOpts = { |
| deterministic_revs: true |
| }; |
| } |
| |
| var nRevNum; |
| var newRevId; |
| var revInfo; |
| var opts = {status: 'available'}; |
| if (doc._deleted) { |
| opts.deleted = true; |
| } |
| |
| if (newEdits) { |
| if (!doc._id) { |
| doc._id = uuid$1(); |
| } |
| newRevId = rev(doc, dbOpts.deterministic_revs); |
| if (doc._rev) { |
| revInfo = parseRevisionInfo(doc._rev); |
| if (revInfo.error) { |
| return revInfo; |
| } |
| doc._rev_tree = [{ |
| pos: revInfo.prefix, |
| ids: [revInfo.id, {status: 'missing'}, [[newRevId, opts, []]]] |
| }]; |
| nRevNum = revInfo.prefix + 1; |
| } else { |
| doc._rev_tree = [{ |
| pos: 1, |
| ids : [newRevId, opts, []] |
| }]; |
| nRevNum = 1; |
| } |
| } else { |
| if (doc._revisions) { |
| doc._rev_tree = makeRevTreeFromRevisions(doc._revisions, opts); |
| nRevNum = doc._revisions.start; |
| newRevId = doc._revisions.ids[0]; |
| } |
| if (!doc._rev_tree) { |
| revInfo = parseRevisionInfo(doc._rev); |
| if (revInfo.error) { |
| return revInfo; |
| } |
| nRevNum = revInfo.prefix; |
| newRevId = revInfo.id; |
| doc._rev_tree = [{ |
| pos: nRevNum, |
| ids: [newRevId, opts, []] |
| }]; |
| } |
| } |
| |
| invalidIdError(doc._id); |
| |
| doc._rev = nRevNum + '-' + newRevId; |
| |
| var result = {metadata : {}, data : {}}; |
| for (var key in doc) { |
| /* istanbul ignore else */ |
| if (Object.prototype.hasOwnProperty.call(doc, key)) { |
| var specialKey = key[0] === '_'; |
| if (specialKey && !reservedWords[key]) { |
| var error = createError(DOC_VALIDATION, key); |
| error.message = DOC_VALIDATION.message + ': ' + key; |
| throw error; |
| } else if (specialKey && !dataWords[key]) { |
| result.metadata[key.slice(1)] = doc[key]; |
| } else { |
| result.data[key] = doc[key]; |
| } |
| } |
| } |
| return result; |
| } |
| |
| function updateDoc(revLimit, prev, docInfo, results, |
| i, cb, writeDoc, newEdits) { |
| |
| if (revExists(prev.rev_tree, docInfo.metadata.rev) && !newEdits) { |
| results[i] = docInfo; |
| return cb(); |
| } |
| |
| // sometimes this is pre-calculated. historically not always |
| var previousWinningRev = prev.winningRev || winningRev(prev); |
| var previouslyDeleted = 'deleted' in prev ? prev.deleted : |
| isDeleted(prev, previousWinningRev); |
| var deleted = 'deleted' in docInfo.metadata ? docInfo.metadata.deleted : |
| isDeleted(docInfo.metadata); |
| var isRoot = /^1-/.test(docInfo.metadata.rev); |
| |
| if (previouslyDeleted && !deleted && newEdits && isRoot) { |
| var newDoc = docInfo.data; |
| newDoc._rev = previousWinningRev; |
| newDoc._id = docInfo.metadata.id; |
| docInfo = parseDoc(newDoc, newEdits); |
| } |
| |
| var merged = merge(prev.rev_tree, docInfo.metadata.rev_tree[0], revLimit); |
| |
| var inConflict = newEdits && (( |
| (previouslyDeleted && deleted && merged.conflicts !== 'new_leaf') || |
| (!previouslyDeleted && merged.conflicts !== 'new_leaf') || |
| (previouslyDeleted && !deleted && merged.conflicts === 'new_branch'))); |
| |
| if (inConflict) { |
| var err = createError(REV_CONFLICT); |
| results[i] = err; |
| return cb(); |
| } |
| |
| var newRev = docInfo.metadata.rev; |
| docInfo.metadata.rev_tree = merged.tree; |
| docInfo.stemmedRevs = merged.stemmedRevs || []; |
| /* istanbul ignore else */ |
| if (prev.rev_map) { |
| docInfo.metadata.rev_map = prev.rev_map; // used only by leveldb |
| } |
| |
| // recalculate |
| var winningRev$$1 = winningRev(docInfo.metadata); |
| var winningRevIsDeleted = isDeleted(docInfo.metadata, winningRev$$1); |
| |
| // calculate the total number of documents that were added/removed, |
| // from the perspective of total_rows/doc_count |
| var delta = (previouslyDeleted === winningRevIsDeleted) ? 0 : |
| previouslyDeleted < winningRevIsDeleted ? -1 : 1; |
| |
| var newRevIsDeleted; |
| if (newRev === winningRev$$1) { |
| // if the new rev is the same as the winning rev, we can reuse that value |
| newRevIsDeleted = winningRevIsDeleted; |
| } else { |
| // if they're not the same, then we need to recalculate |
| newRevIsDeleted = isDeleted(docInfo.metadata, newRev); |
| } |
| |
| writeDoc(docInfo, winningRev$$1, winningRevIsDeleted, newRevIsDeleted, |
| true, delta, i, cb); |
| } |
| |
| function rootIsMissing(docInfo) { |
| return docInfo.metadata.rev_tree[0].ids[1].status === 'missing'; |
| } |
| |
| function processDocs(revLimit, docInfos, api, fetchedDocs, tx, results, |
| writeDoc, opts, overallCallback) { |
| |
| // Default to 1000 locally |
| revLimit = revLimit || 1000; |
| |
| function insertDoc(docInfo, resultsIdx, callback) { |
| // Cant insert new deleted documents |
| var winningRev$$1 = winningRev(docInfo.metadata); |
| var deleted = isDeleted(docInfo.metadata, winningRev$$1); |
| if ('was_delete' in opts && deleted) { |
| results[resultsIdx] = createError(MISSING_DOC, 'deleted'); |
| return callback(); |
| } |
| |
| // 4712 - detect whether a new document was inserted with a _rev |
| var inConflict = newEdits && rootIsMissing(docInfo); |
| |
| if (inConflict) { |
| var err = createError(REV_CONFLICT); |
| results[resultsIdx] = err; |
| return callback(); |
| } |
| |
| var delta = deleted ? 0 : 1; |
| |
| writeDoc(docInfo, winningRev$$1, deleted, deleted, false, |
| delta, resultsIdx, callback); |
| } |
| |
| var newEdits = opts.new_edits; |
| var idsToDocs = new Map(); |
| |
| var docsDone = 0; |
| var docsToDo = docInfos.length; |
| |
| function checkAllDocsDone() { |
| if (++docsDone === docsToDo && overallCallback) { |
| overallCallback(); |
| } |
| } |
| |
| docInfos.forEach(function (currentDoc, resultsIdx) { |
| |
| if (currentDoc._id && isLocalId(currentDoc._id)) { |
| var fun = currentDoc._deleted ? '_removeLocal' : '_putLocal'; |
| api[fun](currentDoc, {ctx: tx}, function (err, res) { |
| results[resultsIdx] = err || res; |
| checkAllDocsDone(); |
| }); |
| return; |
| } |
| |
| var id = currentDoc.metadata.id; |
| if (idsToDocs.has(id)) { |
| docsToDo--; // duplicate |
| idsToDocs.get(id).push([currentDoc, resultsIdx]); |
| } else { |
| idsToDocs.set(id, [[currentDoc, resultsIdx]]); |
| } |
| }); |
| |
| // in the case of new_edits, the user can provide multiple docs |
| // with the same id. these need to be processed sequentially |
| idsToDocs.forEach(function (docs, id) { |
| var numDone = 0; |
| |
| function docWritten() { |
| if (++numDone < docs.length) { |
| nextDoc(); |
| } else { |
| checkAllDocsDone(); |
| } |
| } |
| function nextDoc() { |
| var value = docs[numDone]; |
| var currentDoc = value[0]; |
| var resultsIdx = value[1]; |
| |
| if (fetchedDocs.has(id)) { |
| updateDoc(revLimit, fetchedDocs.get(id), currentDoc, results, |
| resultsIdx, docWritten, writeDoc, newEdits); |
| } else { |
| // Ensure stemming applies to new writes as well |
| var merged = merge([], currentDoc.metadata.rev_tree[0], revLimit); |
| currentDoc.metadata.rev_tree = merged.tree; |
| currentDoc.stemmedRevs = merged.stemmedRevs || []; |
| insertDoc(currentDoc, resultsIdx, docWritten); |
| } |
| } |
| nextDoc(); |
| }); |
| } |
| |
| function safeJsonParse(str) { |
| // This try/catch guards against stack overflow errors. |
| // JSON.parse() is faster than vuvuzela.parse() but vuvuzela |
| // cannot overflow. |
| try { |
| return JSON.parse(str); |
| } catch (e) { |
| /* istanbul ignore next */ |
| return vuvuzela.parse(str); |
| } |
| } |
| |
| function safeJsonStringify(json) { |
| try { |
| return JSON.stringify(json); |
| } catch (e) { |
| /* istanbul ignore next */ |
| return vuvuzela.stringify(json); |
| } |
| } |
| |
| function readAsBlobOrBuffer(storedObject, type) { |
| // In Node, we've stored a buffer |
| storedObject.type = type; // non-standard, but used for consistency |
| return storedObject; |
| } |
| |
| // in Node, we store the buffer directly |
| function prepareAttachmentForStorage(attData, cb) { |
| cb(attData); |
| } |
| |
| function createEmptyBlobOrBuffer(type) { |
| return typedBuffer('', 'binary', type); |
| } |
| |
| // similar to an idb or websql transaction object |
| |
| function getCacheFor(transaction, store) { |
| var prefix = store.prefix()[0]; |
| var cache = transaction._cache; |
| var subCache = cache.get(prefix); |
| if (!subCache) { |
| subCache = new Map(); |
| cache.set(prefix, subCache); |
| } |
| return subCache; |
| } |
| |
| class LevelTransaction { |
| constructor() { |
| this._batch = []; |
| this._cache = new Map(); |
| } |
| |
| get(store, key, callback) { |
| var cache = getCacheFor(this, store); |
| var exists = cache.get(key); |
| if (exists) { |
| return nextTick(function () { |
| callback(null, exists); |
| }); |
| } else if (exists === null) { // deleted marker |
| /* istanbul ignore next */ |
| return nextTick(function () { |
| callback({name: 'NotFoundError'}); |
| }); |
| } |
| store.get(key, function (err, res$$1) { |
| if (err) { |
| /* istanbul ignore else */ |
| if (err.name === 'NotFoundError') { |
| cache.set(key, null); |
| } |
| return callback(err); |
| } |
| cache.set(key, res$$1); |
| callback(null, res$$1); |
| }); |
| } |
| |
| batch(batch) { |
| for (var i = 0, len = batch.length; i < len; i++) { |
| var operation = batch[i]; |
| |
| var cache = getCacheFor(this, operation.prefix); |
| |
| if (operation.type === 'put') { |
| cache.set(operation.key, operation.value); |
| } else { |
| cache.set(operation.key, null); |
| } |
| } |
| this._batch = this._batch.concat(batch); |
| } |
| |
| execute(db, callback) { |
| var keys = new Set(); |
| var uniqBatches = []; |
| |
| // remove duplicates; last one wins |
| for (var i = this._batch.length - 1; i >= 0; i--) { |
| var operation = this._batch[i]; |
| var lookupKey = operation.prefix.prefix()[0] + '\xff' + operation.key; |
| if (keys.has(lookupKey)) { |
| continue; |
| } |
| keys.add(lookupKey); |
| uniqBatches.push(operation); |
| } |
| |
| db.batch(uniqBatches, callback); |
| } |
| } |
| |
| var DOC_STORE = 'document-store'; |
| var BY_SEQ_STORE = 'by-sequence'; |
| var ATTACHMENT_STORE = 'attach-store'; |
| var BINARY_STORE = 'attach-binary-store'; |
| var LOCAL_STORE = 'local-store'; |
| var META_STORE = 'meta-store'; |
| |
| // leveldb barks if we try to open a db multiple times |
| // so we cache opened connections here for initstore() |
| var dbStores = new Map(); |
| |
| // store the value of update_seq in the by-sequence store the key name will |
| // never conflict, since the keys in the by-sequence store are integers |
| var UPDATE_SEQ_KEY = '_local_last_update_seq'; |
| var DOC_COUNT_KEY = '_local_doc_count'; |
| var UUID_KEY = '_local_uuid'; |
| |
| var MD5_PREFIX = 'md5-'; |
| |
| var safeJsonEncoding = { |
| encode: safeJsonStringify, |
| decode: safeJsonParse, |
| buffer: false, |
| type: 'cheap-json' |
| }; |
| |
| var levelChanges = new Changes(); |
| |
| // winningRev and deleted are performance-killers, but |
| // in newer versions of PouchDB, they are cached on the metadata |
| function getWinningRev(metadata) { |
| return 'winningRev' in metadata ? |
| metadata.winningRev : winningRev(metadata); |
| } |
| |
| function getIsDeleted(metadata, winningRev$$1) { |
| return 'deleted' in metadata ? |
| metadata.deleted : isDeleted(metadata, winningRev$$1); |
| } |
| |
| function fetchAttachment(att, stores, opts) { |
| var type = att.content_type; |
| return new Promise(function (resolve, reject) { |
| stores.binaryStore.get(att.digest, function (err, buffer) { |
| var data; |
| if (err) { |
| /* istanbul ignore if */ |
| if (err.name !== 'NotFoundError') { |
| return reject(err); |
| } else { |
| // empty |
| if (!opts.binary) { |
| data = ''; |
| } else { |
| data = binStringToBluffer('', type); |
| } |
| } |
| } else { // non-empty |
| if (opts.binary) { |
| data = readAsBlobOrBuffer(buffer, type); |
| } else { |
| data = buffer.toString('base64'); |
| } |
| } |
| delete att.stub; |
| delete att.length; |
| att.data = data; |
| resolve(); |
| }); |
| }); |
| } |
| |
| function fetchAttachments(results, stores, opts) { |
| var atts = []; |
| results.forEach(function (row) { |
| if (!(row.doc && row.doc._attachments)) { |
| return; |
| } |
| var attNames = Object.keys(row.doc._attachments); |
| attNames.forEach(function (attName) { |
| var att = row.doc._attachments[attName]; |
| if (!('data' in att)) { |
| atts.push(att); |
| } |
| }); |
| }); |
| |
| return Promise.all(atts.map(function (att) { |
| return fetchAttachment(att, stores, opts); |
| })); |
| } |
| |
| function LevelPouch(opts, callback) { |
| opts = clone(opts); |
| var api = this; |
| var instanceId; |
| var stores = {}; |
| var revLimit = opts.revs_limit; |
| var db; |
| var name = opts.name; |
| // TODO: this is undocumented and unused probably |
| /* istanbul ignore else */ |
| if (typeof opts.createIfMissing === 'undefined') { |
| opts.createIfMissing = true; |
| } |
| |
| var leveldown = opts.db; |
| |
| var dbStore; |
| var leveldownName = functionName(leveldown); |
| if (dbStores.has(leveldownName)) { |
| dbStore = dbStores.get(leveldownName); |
| } else { |
| dbStore = new Map(); |
| dbStores.set(leveldownName, dbStore); |
| } |
| if (dbStore.has(name)) { |
| db = dbStore.get(name); |
| afterDBCreated(); |
| } else { |
| dbStore.set(name, sublevelPouch(levelup(leveldown(name), opts, function (err) { |
| /* istanbul ignore if */ |
| if (err) { |
| dbStore.delete(name); |
| return callback(err); |
| } |
| db = dbStore.get(name); |
| db._docCount = -1; |
| db._queue = new Deque(); |
| /* istanbul ignore else */ |
| if (typeof opts.migrate === 'object') { // migration for leveldown |
| opts.migrate.doMigrationOne(name, db, afterDBCreated); |
| } else { |
| afterDBCreated(); |
| } |
| }))); |
| } |
| |
| function afterDBCreated() { |
| stores.docStore = db.sublevel(DOC_STORE, {valueEncoding: safeJsonEncoding}); |
| stores.bySeqStore = db.sublevel(BY_SEQ_STORE, {valueEncoding: 'json'}); |
| stores.attachmentStore = |
| db.sublevel(ATTACHMENT_STORE, {valueEncoding: 'json'}); |
| stores.binaryStore = db.sublevel(BINARY_STORE, {valueEncoding: 'binary'}); |
| stores.localStore = db.sublevel(LOCAL_STORE, {valueEncoding: 'json'}); |
| stores.metaStore = db.sublevel(META_STORE, {valueEncoding: 'json'}); |
| /* istanbul ignore else */ |
| if (typeof opts.migrate === 'object') { // migration for leveldown |
| opts.migrate.doMigrationTwo(db, stores, afterLastMigration); |
| } else { |
| afterLastMigration(); |
| } |
| } |
| |
| function afterLastMigration() { |
| stores.metaStore.get(UPDATE_SEQ_KEY, function (err, value) { |
| if (typeof db._updateSeq === 'undefined') { |
| db._updateSeq = value || 0; |
| } |
| stores.metaStore.get(DOC_COUNT_KEY, function (err, value) { |
| db._docCount = !err ? value : 0; |
| stores.metaStore.get(UUID_KEY, function (err, value) { |
| instanceId = !err ? value : uuid$1(); |
| stores.metaStore.put(UUID_KEY, instanceId, function () { |
| nextTick(function () { |
| callback(null, api); |
| }); |
| }); |
| }); |
| }); |
| }); |
| } |
| |
| function countDocs(callback) { |
| /* istanbul ignore if */ |
| if (db.isClosed()) { |
| return callback(new Error('database is closed')); |
| } |
| return callback(null, db._docCount); // use cached value |
| } |
| |
| api._remote = false; |
| /* istanbul ignore next */ |
| api.type = function () { |
| return 'leveldb'; |
| }; |
| |
| api._id = function (callback) { |
| callback(null, instanceId); |
| }; |
| |
| api._info = function (callback) { |
| var res$$1 = { |
| doc_count: db._docCount, |
| update_seq: db._updateSeq, |
| backend_adapter: functionName(leveldown) |
| }; |
| return nextTick(function () { |
| callback(null, res$$1); |
| }); |
| }; |
| |
| function tryCode(fun, args) { |
| try { |
| fun.apply(null, args); |
| } catch (err) { |
| args[args.length - 1](err); |
| } |
| } |
| |
| function executeNext() { |
| var firstTask = db._queue.peekFront(); |
| |
| if (firstTask.type === 'read') { |
| runReadOperation(firstTask); |
| } else { // write, only do one at a time |
| runWriteOperation(firstTask); |
| } |
| } |
| |
| function runReadOperation(firstTask) { |
| // do multiple reads at once simultaneously, because it's safe |
| |
| var readTasks = [firstTask]; |
| var i = 1; |
| var nextTask = db._queue.get(i); |
| while (typeof nextTask !== 'undefined' && nextTask.type === 'read') { |
| readTasks.push(nextTask); |
| i++; |
| nextTask = db._queue.get(i); |
| } |
| |
| var numDone = 0; |
| |
| readTasks.forEach(function (readTask) { |
| var args = readTask.args; |
| var callback = args[args.length - 1]; |
| args[args.length - 1] = function (...cbArgs) { |
| callback.apply(null, cbArgs); |
| if (++numDone === readTasks.length) { |
| nextTick(function () { |
| // all read tasks have finished |
| readTasks.forEach(function () { |
| db._queue.shift(); |
| }); |
| if (db._queue.length) { |
| executeNext(); |
| } |
| }); |
| } |
| }; |
| tryCode(readTask.fun, args); |
| }); |
| } |
| |
| function runWriteOperation(firstTask) { |
| var args = firstTask.args; |
| var callback = args[args.length - 1]; |
| args[args.length - 1] = function (...cbArgs) { |
| callback.apply(null, cbArgs); |
| nextTick(function () { |
| db._queue.shift(); |
| if (db._queue.length) { |
| executeNext(); |
| } |
| }); |
| }; |
| tryCode(firstTask.fun, args); |
| } |
| |
| // all read/write operations to the database are done in a queue, |
| // similar to how websql/idb works. this avoids problems such |
| // as e.g. compaction needing to have a lock on the database while |
| // it updates stuff. in the future we can revisit this. |
| function writeLock(fun) { |
| return function (...args) { |
| db._queue.push({ |
| fun, |
| args, |
| type: 'write' |
| }); |
| |
| if (db._queue.length === 1) { |
| nextTick(executeNext); |
| } |
| }; |
| } |
| |
| // same as the writelock, but multiple can run at once |
| function readLock(fun) { |
| return function (...args) { |
| db._queue.push({ |
| fun, |
| args, |
| type: 'read' |
| }); |
| |
| if (db._queue.length === 1) { |
| nextTick(executeNext); |
| } |
| }; |
| } |
| |
| function formatSeq(n) { |
| return ('0000000000000000' + n).slice(-16); |
| } |
| |
| function parseSeq(s) { |
| return parseInt(s, 10); |
| } |
| |
| api._get = readLock(function (id, opts, callback) { |
| opts = clone(opts); |
| |
| stores.docStore.get(id, function (err, metadata) { |
| |
| if (err || !metadata) { |
| return callback(createError(MISSING_DOC, 'missing')); |
| } |
| |
| var rev$$1; |
| if (!opts.rev) { |
| rev$$1 = getWinningRev(metadata); |
| var deleted = getIsDeleted(metadata, rev$$1); |
| if (deleted) { |
| return callback(createError(MISSING_DOC, "deleted")); |
| } |
| } else { |
| rev$$1 = opts.latest ? latest(opts.rev, metadata) : opts.rev; |
| } |
| |
| var seq = metadata.rev_map[rev$$1]; |
| |
| stores.bySeqStore.get(formatSeq(seq), function (err, doc) { |
| if (!doc) { |
| return callback(createError(MISSING_DOC)); |
| } |
| /* istanbul ignore if */ |
| if ('_id' in doc && doc._id !== metadata.id) { |
| // this failing implies something very wrong |
| return callback(new Error('wrong doc returned')); |
| } |
| doc._id = metadata.id; |
| if ('_rev' in doc) { |
| /* istanbul ignore if */ |
| if (doc._rev !== rev$$1) { |
| // this failing implies something very wrong |
| return callback(new Error('wrong doc returned')); |
| } |
| } else { |
| // we didn't always store this |
| doc._rev = rev$$1; |
| } |
| return callback(null, {doc, metadata}); |
| }); |
| }); |
| }); |
| |
| // not technically part of the spec, but if putAttachment has its own |
| // method... |
| api._getAttachment = function (docId, attachId, attachment, opts, callback) { |
| var digest = attachment.digest; |
| var type = attachment.content_type; |
| |
| stores.binaryStore.get(digest, function (err, attach) { |
| if (err) { |
| /* istanbul ignore if */ |
| if (err.name !== 'NotFoundError') { |
| return callback(err); |
| } |
| // Empty attachment |
| return callback(null, opts.binary ? createEmptyBlobOrBuffer(type) : ''); |
| } |
| |
| if (opts.binary) { |
| callback(null, readAsBlobOrBuffer(attach, type)); |
| } else { |
| callback(null, attach.toString('base64')); |
| } |
| }); |
| }; |
| |
| api._bulkDocs = writeLock(function (req, opts, callback) { |
| var newEdits = opts.new_edits; |
| var results = new Array(req.docs.length); |
| var fetchedDocs = new Map(); |
| var stemmedRevs = new Map(); |
| |
| var txn = new LevelTransaction(); |
| var docCountDelta = 0; |
| var newUpdateSeq = db._updateSeq; |
| |
| // parse the docs and give each a sequence number |
| var userDocs = req.docs; |
| var docInfos = userDocs.map(function (doc) { |
| if (doc._id && isLocalId(doc._id)) { |
| return doc; |
| } |
| var newDoc = parseDoc(doc, newEdits, api.__opts); |
| |
| if (newDoc.metadata && !newDoc.metadata.rev_map) { |
| newDoc.metadata.rev_map = {}; |
| } |
| |
| return newDoc; |
| }); |
| var infoErrors = docInfos.filter(function (doc) { |
| return doc.error; |
| }); |
| |
| if (infoErrors.length) { |
| return callback(infoErrors[0]); |
| } |
| |
| // verify any stub attachments as a precondition test |
| |
| function verifyAttachment(digest, callback) { |
| txn.get(stores.attachmentStore, digest, function (levelErr) { |
| if (levelErr) { |
| var err = createError(MISSING_STUB, |
| 'unknown stub attachment with digest ' + |
| digest); |
| callback(err); |
| } else { |
| callback(); |
| } |
| }); |
| } |
| |
| function verifyAttachments(finish) { |
| var digests = []; |
| userDocs.forEach(function (doc) { |
| if (doc && doc._attachments) { |
| Object.keys(doc._attachments).forEach(function (filename) { |
| var att = doc._attachments[filename]; |
| if (att.stub) { |
| digests.push(att.digest); |
| } |
| }); |
| } |
| }); |
| if (!digests.length) { |
| return finish(); |
| } |
| var numDone = 0; |
| var err; |
| |
| digests.forEach(function (digest) { |
| verifyAttachment(digest, function (attErr) { |
| if (attErr && !err) { |
| err = attErr; |
| } |
| |
| if (++numDone === digests.length) { |
| finish(err); |
| } |
| }); |
| }); |
| } |
| |
| function fetchExistingDocs(finish) { |
| var numDone = 0; |
| var overallErr; |
| function checkDone() { |
| if (++numDone === userDocs.length) { |
| return finish(overallErr); |
| } |
| } |
| |
| userDocs.forEach(function (doc) { |
| if (doc._id && isLocalId(doc._id)) { |
| // skip local docs |
| return checkDone(); |
| } |
| txn.get(stores.docStore, doc._id, function (err, info) { |
| if (err) { |
| /* istanbul ignore if */ |
| if (err.name !== 'NotFoundError') { |
| overallErr = err; |
| } |
| } else { |
| fetchedDocs.set(doc._id, info); |
| } |
| checkDone(); |
| }); |
| }); |
| } |
| |
| function compact(revsMap, callback) { |
| var promise = Promise.resolve(); |
| revsMap.forEach(function (revs, docId) { |
| // TODO: parallelize, for now need to be sequential to |
| // pass orphaned attachment tests |
| promise = promise.then(function () { |
| return new Promise(function (resolve, reject) { |
| api._doCompactionNoLock(docId, revs, {ctx: txn}, function (err) { |
| /* istanbul ignore if */ |
| if (err) { |
| return reject(err); |
| } |
| resolve(); |
| }); |
| }); |
| }); |
| }); |
| |
| promise.then(function () { |
| callback(); |
| }, callback); |
| } |
| |
| function autoCompact(callback) { |
| var revsMap = new Map(); |
| fetchedDocs.forEach(function (metadata, docId) { |
| revsMap.set(docId, compactTree(metadata)); |
| }); |
| compact(revsMap, callback); |
| } |
| |
| function finish() { |
| compact(stemmedRevs, function (error) { |
| /* istanbul ignore if */ |
| if (error) { |
| complete(error); |
| } |
| if (api.auto_compaction) { |
| return autoCompact(complete); |
| } |
| complete(); |
| }); |
| } |
| |
| function writeDoc(docInfo, winningRev$$1, winningRevIsDeleted, newRevIsDeleted, |
| isUpdate, delta, resultsIdx, callback2) { |
| docCountDelta += delta; |
| |
| var err = null; |
| var recv = 0; |
| |
| docInfo.metadata.winningRev = winningRev$$1; |
| docInfo.metadata.deleted = winningRevIsDeleted; |
| |
| docInfo.data._id = docInfo.metadata.id; |
| docInfo.data._rev = docInfo.metadata.rev; |
| |
| if (newRevIsDeleted) { |
| docInfo.data._deleted = true; |
| } |
| |
| if (docInfo.stemmedRevs.length) { |
| stemmedRevs.set(docInfo.metadata.id, docInfo.stemmedRevs); |
| } |
| |
| var attachments = docInfo.data._attachments ? |
| Object.keys(docInfo.data._attachments) : |
| []; |
| |
| function attachmentSaved(attachmentErr) { |
| recv++; |
| if (!err) { |
| /* istanbul ignore if */ |
| if (attachmentErr) { |
| err = attachmentErr; |
| callback2(err); |
| } else if (recv === attachments.length) { |
| finish(); |
| } |
| } |
| } |
| |
| function onMD5Load(doc, key, data, attachmentSaved) { |
| return function (result) { |
| saveAttachment(doc, MD5_PREFIX + result, key, data, attachmentSaved); |
| }; |
| } |
| |
| function doMD5(doc, key, attachmentSaved) { |
| return function (data) { |
| binaryMd5(data, onMD5Load(doc, key, data, attachmentSaved)); |
| }; |
| } |
| |
| for (var i = 0; i < attachments.length; i++) { |
| var key = attachments[i]; |
| var att = docInfo.data._attachments[key]; |
| |
| if (att.stub) { |
| // still need to update the refs mapping |
| var id = docInfo.data._id; |
| var rev$$1 = docInfo.data._rev; |
| saveAttachmentRefs(id, rev$$1, att.digest, attachmentSaved); |
| continue; |
| } |
| var data; |
| if (typeof att.data === 'string') { |
| // input is assumed to be a base64 string |
| try { |
| data = thisAtob(att.data); |
| } catch (e) { |
| callback(createError(BAD_ARG, |
| 'Attachment is not a valid base64 string')); |
| return; |
| } |
| doMD5(docInfo, key, attachmentSaved)(data); |
| } else { |
| prepareAttachmentForStorage(att.data, |
| doMD5(docInfo, key, attachmentSaved)); |
| } |
| } |
| |
| function finish() { |
| var seq = docInfo.metadata.rev_map[docInfo.metadata.rev]; |
| /* istanbul ignore if */ |
| if (seq) { |
| // check that there aren't any existing revisions with the same |
| // revision id, else we shouldn't do anything |
| return callback2(); |
| } |
| seq = ++newUpdateSeq; |
| docInfo.metadata.rev_map[docInfo.metadata.rev] = |
| docInfo.metadata.seq = seq; |
| var seqKey = formatSeq(seq); |
| var batch = [{ |
| key: seqKey, |
| value: docInfo.data, |
| prefix: stores.bySeqStore, |
| type: 'put' |
| }, { |
| key: docInfo.metadata.id, |
| value: docInfo.metadata, |
| prefix: stores.docStore, |
| type: 'put' |
| }]; |
| txn.batch(batch); |
| results[resultsIdx] = { |
| ok: true, |
| id: docInfo.metadata.id, |
| rev: docInfo.metadata.rev |
| }; |
| fetchedDocs.set(docInfo.metadata.id, docInfo.metadata); |
| callback2(); |
| } |
| |
| if (!attachments.length) { |
| finish(); |
| } |
| } |
| |
| // attachments are queued per-digest, otherwise the refs could be |
| // overwritten by concurrent writes in the same bulkDocs session |
| var attachmentQueues = {}; |
| |
| function saveAttachmentRefs(id, rev$$1, digest, callback) { |
| |
| function fetchAtt() { |
| return new Promise(function (resolve, reject) { |
| txn.get(stores.attachmentStore, digest, function (err, oldAtt) { |
| /* istanbul ignore if */ |
| if (err && err.name !== 'NotFoundError') { |
| return reject(err); |
| } |
| resolve(oldAtt); |
| }); |
| }); |
| } |
| |
| function saveAtt(oldAtt) { |
| var ref = [id, rev$$1].join('@'); |
| var newAtt = {}; |
| |
| if (oldAtt) { |
| if (oldAtt.refs) { |
| // only update references if this attachment already has them |
| // since we cannot migrate old style attachments here without |
| // doing a full db scan for references |
| newAtt.refs = oldAtt.refs; |
| newAtt.refs[ref] = true; |
| } |
| } else { |
| newAtt.refs = {}; |
| newAtt.refs[ref] = true; |
| } |
| |
| return new Promise(function (resolve) { |
| txn.batch([{ |
| type: 'put', |
| prefix: stores.attachmentStore, |
| key: digest, |
| value: newAtt |
| }]); |
| resolve(!oldAtt); |
| }); |
| } |
| |
| // put attachments in a per-digest queue, to avoid two docs with the same |
| // attachment overwriting each other |
| var queue = attachmentQueues[digest] || Promise.resolve(); |
| attachmentQueues[digest] = queue.then(function () { |
| return fetchAtt().then(saveAtt).then(function (isNewAttachment) { |
| callback(null, isNewAttachment); |
| }, callback); |
| }); |
| } |
| |
| function saveAttachment(docInfo, digest, key, data, callback) { |
| var att = docInfo.data._attachments[key]; |
| delete att.data; |
| att.digest = digest; |
| att.length = data.length; |
| var id = docInfo.metadata.id; |
| var rev$$1 = docInfo.metadata.rev; |
| att.revpos = parseInt(rev$$1, 10); |
| |
| saveAttachmentRefs(id, rev$$1, digest, function (err, isNewAttachment) { |
| /* istanbul ignore if */ |
| if (err) { |
| return callback(err); |
| } |
| // do not try to store empty attachments |
| if (data.length === 0) { |
| return callback(err); |
| } |
| if (!isNewAttachment) { |
| // small optimization - don't bother writing it again |
| return callback(err); |
| } |
| txn.batch([{ |
| type: 'put', |
| prefix: stores.binaryStore, |
| key: digest, |
| value: Buffer.from(data, 'binary') |
| }]); |
| callback(); |
| }); |
| } |
| |
| function complete(err) { |
| /* istanbul ignore if */ |
| if (err) { |
| return nextTick(function () { |
| callback(err); |
| }); |
| } |
| txn.batch([ |
| { |
| prefix: stores.metaStore, |
| type: 'put', |
| key: UPDATE_SEQ_KEY, |
| value: newUpdateSeq |
| }, |
| { |
| prefix: stores.metaStore, |
| type: 'put', |
| key: DOC_COUNT_KEY, |
| value: db._docCount + docCountDelta |
| } |
| ]); |
| txn.execute(db, function (err) { |
| /* istanbul ignore if */ |
| if (err) { |
| return callback(err); |
| } |
| db._docCount += docCountDelta; |
| db._updateSeq = newUpdateSeq; |
| levelChanges.notify(name); |
| nextTick(function () { |
| callback(null, results); |
| }); |
| }); |
| } |
| |
| if (!docInfos.length) { |
| return callback(null, []); |
| } |
| |
| verifyAttachments(function (err) { |
| if (err) { |
| return callback(err); |
| } |
| fetchExistingDocs(function (err) { |
| /* istanbul ignore if */ |
| if (err) { |
| return callback(err); |
| } |
| processDocs(revLimit, docInfos, api, fetchedDocs, txn, results, |
| writeDoc, opts, finish); |
| }); |
| }); |
| }); |
| api._allDocs = function (opts, callback) { |
| if ('keys' in opts) { |
| return allDocsKeysQuery(this, opts); |
| } |
| return readLock(function (opts, callback) { |
| opts = clone(opts); |
| countDocs(function (err, docCount) { |
| /* istanbul ignore if */ |
| if (err) { |
| return callback(err); |
| } |
| var readstreamOpts = {}; |
| var skip = opts.skip || 0; |
| if (opts.startkey) { |
| readstreamOpts.gte = opts.startkey; |
| } |
| if (opts.endkey) { |
| readstreamOpts.lte = opts.endkey; |
| } |
| if (opts.key) { |
| readstreamOpts.gte = readstreamOpts.lte = opts.key; |
| } |
| if (opts.descending) { |
| readstreamOpts.reverse = true; |
| // switch start and ends |
| var tmp = readstreamOpts.lte; |
| readstreamOpts.lte = readstreamOpts.gte; |
| readstreamOpts.gte = tmp; |
| } |
| var limit; |
| if (typeof opts.limit === 'number') { |
| limit = opts.limit; |
| } |
| if (limit === 0 || |
| ('gte' in readstreamOpts && 'lte' in readstreamOpts && |
| readstreamOpts.gte > readstreamOpts.lte)) { |
| // should return 0 results when start is greater than end. |
| // normally level would "fix" this for us by reversing the order, |
| // so short-circuit instead |
| var returnVal = { |
| total_rows: docCount, |
| offset: opts.skip, |
| rows: [] |
| }; |
| /* istanbul ignore if */ |
| if (opts.update_seq) { |
| returnVal.update_seq = db._updateSeq; |
| } |
| return callback(null, returnVal); |
| } |
| var results = []; |
| var docstream = stores.docStore.readStream(readstreamOpts); |
| |
| var throughStream = through2.obj(function (entry, _, next) { |
| var metadata = entry.value; |
| // winningRev and deleted are performance-killers, but |
| // in newer versions of PouchDB, they are cached on the metadata |
| var winningRev$$1 = getWinningRev(metadata); |
| var deleted = getIsDeleted(metadata, winningRev$$1); |
| if (!deleted) { |
| if (skip-- > 0) { |
| next(); |
| return; |
| } else if (typeof limit === 'number' && limit-- <= 0) { |
| docstream.unpipe(); |
| docstream.destroy(); |
| next(); |
| return; |
| } |
| } else if (opts.deleted !== 'ok') { |
| next(); |
| return; |
| } |
| function allDocsInner(data) { |
| var doc = { |
| id: metadata.id, |
| key: metadata.id, |
| value: { |
| rev: winningRev$$1 |
| } |
| }; |
| if (opts.include_docs) { |
| doc.doc = data; |
| doc.doc._rev = doc.value.rev; |
| if (opts.conflicts) { |
| var conflicts = collectConflicts(metadata); |
| if (conflicts.length) { |
| doc.doc._conflicts = conflicts; |
| } |
| } |
| for (var att in doc.doc._attachments) { |
| if (Object.prototype.hasOwnProperty.call(doc.doc._attachments, att)) { |
| doc.doc._attachments[att].stub = true; |
| } |
| } |
| } |
| if (opts.inclusive_end === false && metadata.id === opts.endkey) { |
| return next(); |
| } else if (deleted) { |
| if (opts.deleted === 'ok') { |
| doc.value.deleted = true; |
| doc.doc = null; |
| } else { |
| /* istanbul ignore next */ |
| return next(); |
| } |
| } |
| results.push(doc); |
| next(); |
| } |
| if (opts.include_docs) { |
| var seq = metadata.rev_map[winningRev$$1]; |
| stores.bySeqStore.get(formatSeq(seq), function (err, data) { |
| allDocsInner(data); |
| }); |
| } |
| else { |
| allDocsInner(); |
| } |
| }, function (next) { |
| Promise.resolve().then(function () { |
| if (opts.include_docs && opts.attachments) { |
| return fetchAttachments(results, stores, opts); |
| } |
| }).then(function () { |
| var returnVal = { |
| total_rows: docCount, |
| offset: opts.skip, |
| rows: results |
| }; |
| |
| /* istanbul ignore if */ |
| if (opts.update_seq) { |
| returnVal.update_seq = db._updateSeq; |
| } |
| callback(null, returnVal); |
| }, callback); |
| next(); |
| }).on('unpipe', function () { |
| throughStream.end(); |
| }); |
| |
| docstream.on('error', callback); |
| |
| docstream.pipe(throughStream); |
| }); |
| })(opts, callback); |
| }; |
| |
| api._changes = function (opts) { |
| opts = clone(opts); |
| |
| if (opts.continuous) { |
| var id = name + ':' + uuid$1(); |
| levelChanges.addListener(name, id, api, opts); |
| levelChanges.notify(name); |
| return { |
| cancel: function () { |
| levelChanges.removeListener(name, id); |
| } |
| }; |
| } |
| |
| var descending = opts.descending; |
| var results = []; |
| var lastSeq = opts.since || 0; |
| var called = 0; |
| var streamOpts = { |
| reverse: descending |
| }; |
| var limit; |
| if ('limit' in opts && opts.limit > 0) { |
| limit = opts.limit; |
| } |
| if (!streamOpts.reverse) { |
| streamOpts.start = formatSeq(opts.since || 0); |
| } |
| |
| var docIds = opts.doc_ids && new Set(opts.doc_ids); |
| var filter = filterChange(opts); |
| var docIdsToMetadata = new Map(); |
| |
| function complete() { |
| opts.done = true; |
| if (opts.return_docs && opts.limit) { |
| /* istanbul ignore if */ |
| if (opts.limit < results.length) { |
| results.length = opts.limit; |
| } |
| } |
| changeStream.unpipe(throughStream); |
| changeStream.destroy(); |
| if (!opts.continuous && !opts.cancelled) { |
| if (opts.include_docs && opts.attachments && opts.return_docs) { |
| fetchAttachments(results, stores, opts).then(function () { |
| opts.complete(null, {results, last_seq: lastSeq}); |
| }); |
| } else { |
| opts.complete(null, {results, last_seq: lastSeq}); |
| } |
| } |
| } |
| var changeStream = stores.bySeqStore.readStream(streamOpts); |
| var throughStream = through2.obj(function (data, _, next) { |
| if (limit && called >= limit) { |
| complete(); |
| return next(); |
| } |
| if (opts.cancelled || opts.done) { |
| return next(); |
| } |
| |
| var seq = parseSeq(data.key); |
| var doc = data.value; |
| |
| if (seq === opts.since && !descending) { |
| // couchdb ignores `since` if descending=true |
| return next(); |
| } |
| |
| if (docIds && !docIds.has(doc._id)) { |
| return next(); |
| } |
| |
| var metadata; |
| |
| function onGetMetadata(metadata) { |
| var winningRev$$1 = getWinningRev(metadata); |
| |
| function onGetWinningDoc(winningDoc) { |
| |
| var change = opts.processChange(winningDoc, metadata, opts); |
| change.seq = metadata.seq; |
| |
| var filtered = filter(change); |
| if (typeof filtered === 'object') { |
| return opts.complete(filtered); |
| } |
| |
| if (filtered) { |
| called++; |
| |
| if (opts.attachments && opts.include_docs) { |
| // fetch attachment immediately for the benefit |
| // of live listeners |
| fetchAttachments([change], stores, opts).then(function () { |
| opts.onChange(change); |
| }); |
| } else { |
| opts.onChange(change); |
| } |
| |
| if (opts.return_docs) { |
| results.push(change); |
| } |
| } |
| next(); |
| } |
| |
| if (metadata.seq !== seq) { |
| // some other seq is later |
| return next(); |
| } |
| |
| lastSeq = seq; |
| |
| if (winningRev$$1 === doc._rev) { |
| return onGetWinningDoc(doc); |
| } |
| |
| // fetch the winner |
| |
| var winningSeq = metadata.rev_map[winningRev$$1]; |
| |
| stores.bySeqStore.get(formatSeq(winningSeq), function (err, doc) { |
| onGetWinningDoc(doc); |
| }); |
| } |
| |
| metadata = docIdsToMetadata.get(doc._id); |
| if (metadata) { // cached |
| return onGetMetadata(metadata); |
| } |
| // metadata not cached, have to go fetch it |
| stores.docStore.get(doc._id, function (err, metadata) { |
| /* istanbul ignore if */ |
| if (opts.cancelled || opts.done || db.isClosed() || |
| isLocalId(metadata.id)) { |
| return next(); |
| } |
| docIdsToMetadata.set(doc._id, metadata); |
| onGetMetadata(metadata); |
| }); |
| }, function (next) { |
| if (opts.cancelled) { |
| return next(); |
| } |
| if (opts.return_docs && opts.limit) { |
| /* istanbul ignore if */ |
| if (opts.limit < results.length) { |
| results.length = opts.limit; |
| } |
| } |
| |
| next(); |
| }).on('unpipe', function () { |
| throughStream.end(); |
| complete(); |
| }); |
| changeStream.pipe(throughStream); |
| return { |
| cancel: function () { |
| opts.cancelled = true; |
| complete(); |
| } |
| }; |
| }; |
| |
| api._close = function (callback) { |
| /* istanbul ignore if */ |
| if (db.isClosed()) { |
| return callback(createError(NOT_OPEN)); |
| } |
| db.close(function (err) { |
| /* istanbul ignore if */ |
| if (err) { |
| callback(err); |
| } else { |
| dbStore.delete(name); |
| |
| var adapterName = functionName(leveldown); |
| var adapterStore = dbStores.get(adapterName); |
| var viewNamePrefix = PouchDB.prefix + name + "-mrview-"; |
| var keys = [...adapterStore.keys()].filter(k => k.includes(viewNamePrefix)); |
| keys.forEach(key => { |
| var eventEmitter = adapterStore.get(key); |
| eventEmitter.removeAllListeners(); |
| eventEmitter.close(); |
| adapterStore.delete(key); |
| }); |
| |
| callback(); |
| } |
| }); |
| }; |
| |
| api._getRevisionTree = function (docId, callback) { |
| stores.docStore.get(docId, function (err, metadata) { |
| if (err) { |
| callback(createError(MISSING_DOC)); |
| } else { |
| callback(null, metadata.rev_tree); |
| } |
| }); |
| }; |
| |
| api._doCompaction = writeLock(function (docId, revs, opts, callback) { |
| api._doCompactionNoLock(docId, revs, opts, callback); |
| }); |
| |
| // the NoLock version is for use by bulkDocs |
| api._doCompactionNoLock = function (docId, revs, opts, callback) { |
| if (typeof opts === 'function') { |
| callback = opts; |
| opts = {}; |
| } |
| |
| if (!revs.length) { |
| return callback(); |
| } |
| var txn = opts.ctx || new LevelTransaction(); |
| |
| txn.get(stores.docStore, docId, function (err, metadata) { |
| /* istanbul ignore if */ |
| if (err) { |
| return callback(err); |
| } |
| var seqs = revs.map(function (rev$$1) { |
| var seq = metadata.rev_map[rev$$1]; |
| delete metadata.rev_map[rev$$1]; |
| return seq; |
| }); |
| traverseRevTree(metadata.rev_tree, function (isLeaf, pos, |
| revHash, ctx, opts) { |
| var rev$$1 = pos + '-' + revHash; |
| if (revs.indexOf(rev$$1) !== -1) { |
| opts.status = 'missing'; |
| } |
| }); |
| |
| var batch = []; |
| batch.push({ |
| key: metadata.id, |
| value: metadata, |
| type: 'put', |
| prefix: stores.docStore |
| }); |
| |
| var digestMap = {}; |
| var numDone = 0; |
| var overallErr; |
| function checkDone(err) { |
| /* istanbul ignore if */ |
| if (err) { |
| overallErr = err; |
| } |
| if (++numDone === revs.length) { // done |
| /* istanbul ignore if */ |
| if (overallErr) { |
| return callback(overallErr); |
| } |
| deleteOrphanedAttachments(); |
| } |
| } |
| |
| function finish(err) { |
| /* istanbul ignore if */ |
| if (err) { |
| return callback(err); |
| } |
| txn.batch(batch); |
| if (opts.ctx) { |
| // don't execute immediately |
| return callback(); |
| } |
| txn.execute(db, callback); |
| } |
| |
| function deleteOrphanedAttachments() { |
| var possiblyOrphanedAttachments = Object.keys(digestMap); |
| if (!possiblyOrphanedAttachments.length) { |
| return finish(); |
| } |
| var numDone = 0; |
| var overallErr; |
| function checkDone(err) { |
| /* istanbul ignore if */ |
| if (err) { |
| overallErr = err; |
| } |
| if (++numDone === possiblyOrphanedAttachments.length) { |
| finish(overallErr); |
| } |
| } |
| var refsToDelete = new Map(); |
| revs.forEach(function (rev$$1) { |
| refsToDelete.set(docId + '@' + rev$$1, true); |
| }); |
| possiblyOrphanedAttachments.forEach(function (digest) { |
| txn.get(stores.attachmentStore, digest, function (err, attData) { |
| /* istanbul ignore if */ |
| if (err) { |
| if (err.name === 'NotFoundError') { |
| return checkDone(); |
| } else { |
| return checkDone(err); |
| } |
| } |
| var refs = Object.keys(attData.refs || {}).filter(function (ref) { |
| return !refsToDelete.has(ref); |
| }); |
| var newRefs = {}; |
| refs.forEach(function (ref) { |
| newRefs[ref] = true; |
| }); |
| if (refs.length) { // not orphaned |
| batch.push({ |
| key: digest, |
| type: 'put', |
| value: {refs: newRefs}, |
| prefix: stores.attachmentStore |
| }); |
| } else { // orphaned, can safely delete |
| batch = batch.concat([{ |
| key: digest, |
| type: 'del', |
| prefix: stores.attachmentStore |
| }, { |
| key: digest, |
| type: 'del', |
| prefix: stores.binaryStore |
| }]); |
| } |
| checkDone(); |
| }); |
| }); |
| } |
| |
| seqs.forEach(function (seq) { |
| batch.push({ |
| key: formatSeq(seq), |
| type: 'del', |
| prefix: stores.bySeqStore |
| }); |
| txn.get(stores.bySeqStore, formatSeq(seq), function (err, doc) { |
| /* istanbul ignore if */ |
| if (err) { |
| if (err.name === 'NotFoundError') { |
| return checkDone(); |
| } else { |
| return checkDone(err); |
| } |
| } |
| var atts = Object.keys(doc._attachments || {}); |
| atts.forEach(function (attName) { |
| var digest = doc._attachments[attName].digest; |
| digestMap[digest] = true; |
| }); |
| checkDone(); |
| }); |
| }); |
| }); |
| }; |
| |
| api._getLocal = function (id, callback) { |
| stores.localStore.get(id, function (err, doc) { |
| if (err) { |
| callback(createError(MISSING_DOC)); |
| } else { |
| callback(null, doc); |
| } |
| }); |
| }; |
| |
| api._putLocal = function (doc, opts, callback) { |
| if (typeof opts === 'function') { |
| callback = opts; |
| opts = {}; |
| } |
| if (opts.ctx) { |
| api._putLocalNoLock(doc, opts, callback); |
| } else { |
| api._putLocalWithLock(doc, opts, callback); |
| } |
| }; |
| |
| api._putLocalWithLock = writeLock(function (doc, opts, callback) { |
| api._putLocalNoLock(doc, opts, callback); |
| }); |
| |
| // the NoLock version is for use by bulkDocs |
| api._putLocalNoLock = function (doc, opts, callback) { |
| delete doc._revisions; // ignore this, trust the rev |
| var oldRev = doc._rev; |
| var id = doc._id; |
| |
| var txn = opts.ctx || new LevelTransaction(); |
| |
| txn.get(stores.localStore, id, function (err, resp) { |
| if (err && oldRev) { |
| return callback(createError(REV_CONFLICT)); |
| } |
| if (resp && resp._rev !== oldRev) { |
| return callback(createError(REV_CONFLICT)); |
| } |
| doc._rev = |
| oldRev ? '0-' + (parseInt(oldRev.split('-')[1], 10) + 1) : '0-1'; |
| var batch = [ |
| { |
| type: 'put', |
| prefix: stores.localStore, |
| key: id, |
| value: doc |
| } |
| ]; |
| |
| txn.batch(batch); |
| var ret = {ok: true, id: doc._id, rev: doc._rev}; |
| |
| if (opts.ctx) { |
| // don't execute immediately |
| return callback(null, ret); |
| } |
| txn.execute(db, function (err) { |
| /* istanbul ignore if */ |
| if (err) { |
| return callback(err); |
| } |
| callback(null, ret); |
| }); |
| }); |
| }; |
| |
| api._removeLocal = function (doc, opts, callback) { |
| if (typeof opts === 'function') { |
| callback = opts; |
| opts = {}; |
| } |
| if (opts.ctx) { |
| api._removeLocalNoLock(doc, opts, callback); |
| } else { |
| api._removeLocalWithLock(doc, opts, callback); |
| } |
| }; |
| |
| api._removeLocalWithLock = writeLock(function (doc, opts, callback) { |
| api._removeLocalNoLock(doc, opts, callback); |
| }); |
| |
| // the NoLock version is for use by bulkDocs |
| api._removeLocalNoLock = function (doc, opts, callback) { |
| var txn = opts.ctx || new LevelTransaction(); |
| txn.get(stores.localStore, doc._id, function (err, resp) { |
| if (err) { |
| /* istanbul ignore if */ |
| if (err.name !== 'NotFoundError') { |
| return callback(err); |
| } else { |
| return callback(createError(MISSING_DOC)); |
| } |
| } |
| if (resp._rev !== doc._rev) { |
| return callback(createError(REV_CONFLICT)); |
| } |
| txn.batch([{ |
| prefix: stores.localStore, |
| type: 'del', |
| key: doc._id |
| }]); |
| var ret = {ok: true, id: doc._id, rev: '0-0'}; |
| if (opts.ctx) { |
| // don't execute immediately |
| return callback(null, ret); |
| } |
| txn.execute(db, function (err) { |
| /* istanbul ignore if */ |
| if (err) { |
| return callback(err); |
| } |
| callback(null, ret); |
| }); |
| }); |
| }; |
| |
| // close and delete open leveldb stores |
| api._destroy = function (opts, callback) { |
| var dbStore; |
| var leveldownName = functionName(leveldown); |
| /* istanbul ignore else */ |
| if (dbStores.has(leveldownName)) { |
| dbStore = dbStores.get(leveldownName); |
| } else { |
| return callDestroy(name, callback); |
| } |
| |
| /* istanbul ignore else */ |
| if (dbStore.has(name)) { |
| levelChanges.removeAllListeners(name); |
| |
| dbStore.get(name).close(function () { |
| dbStore.delete(name); |
| callDestroy(name, callback); |
| }); |
| } else { |
| callDestroy(name, callback); |
| } |
| }; |
| function callDestroy(name, cb) { |
| // May not exist if leveldown is backed by memory adapter |
| /* istanbul ignore else */ |
| if ('destroy' in leveldown) { |
| leveldown.destroy(name, cb); |
| } else { |
| cb(null); |
| } |
| } |
| } |
| |
| // require leveldown. provide verbose output on error as it is the default |
| // nodejs adapter, which we do not provide for the user |
| /* istanbul ignore next */ |
| var requireLeveldown = function () { |
| try { |
| return require('leveldown'); |
| } catch (err) { |
| /* eslint no-ex-assign: 0*/ |
| err = err || 'leveldown import error'; |
| if (err.code === 'MODULE_NOT_FOUND') { |
| // handle leveldown not installed case |
| return new Error([ |
| 'the \'leveldown\' package is not available. install it, or,', |
| 'specify another storage backend using the \'db\' option' |
| ].join(' ')); |
| } else if (err.message && err.message.match('Module version mismatch')) { |
| // handle common user environment error |
| return new Error([ |
| err.message, |
| 'This generally implies that leveldown was built with a different', |
| 'version of node than that which is running now. You may try', |
| 'fully removing and reinstalling PouchDB or leveldown to resolve.' |
| ].join(' ')); |
| } |
| // handle general internal nodejs require error |
| return new Error(err.toString() + ': unable to import leveldown'); |
| } |
| }; |
| |
| var stores = [ |
| 'document-store', |
| 'by-sequence', |
| 'attach-store', |
| 'attach-binary-store' |
| ]; |
| function formatSeq(n) { |
| return ('0000000000000000' + n).slice(-16); |
| } |
| var UPDATE_SEQ_KEY$1 = '_local_last_update_seq'; |
| var DOC_COUNT_KEY$1 = '_local_doc_count'; |
| var UUID_KEY$1 = '_local_uuid'; |
| |
| var doMigrationOne = function (name, db, callback) { |
| // local require to prevent crashing if leveldown isn't installed. |
| var leveldown = require("leveldown"); |
| |
| var base = path.resolve(name); |
| function move(store, index, cb) { |
| var storePath = path.join(base, store); |
| var opts; |
| if (index === 3) { |
| opts = { |
| valueEncoding: 'binary' |
| }; |
| } else { |
| opts = { |
| valueEncoding: 'json' |
| }; |
| } |
| var sub = db.sublevel(store, opts); |
| var orig = level(storePath, opts); |
| var from = orig.createReadStream(); |
| var writeStream = new LevelWriteStream(sub); |
| var to = writeStream(); |
| from.on('end', function () { |
| orig.close(function (err) { |
| cb(err, storePath); |
| }); |
| }); |
| from.pipe(to); |
| } |
| fs.unlink(base + '.uuid', function (err) { |
| if (err) { |
| return callback(); |
| } |
| var todo = 4; |
| var done = []; |
| stores.forEach(function (store, i) { |
| move(store, i, function (err, storePath) { |
| /* istanbul ignore if */ |
| if (err) { |
| return callback(err); |
| } |
| done.push(storePath); |
| if (!(--todo)) { |
| done.forEach(function (item) { |
| leveldown.destroy(item, function () { |
| if (++todo === done.length) { |
| fs.rmdir(base, callback); |
| } |
| }); |
| }); |
| } |
| }); |
| }); |
| }); |
| }; |
| var doMigrationTwo = function (db, stores, callback) { |
| var batches = []; |
| stores.bySeqStore.get(UUID_KEY$1, function (err, value) { |
| if (err) { |
| // no uuid key, so don't need to migrate; |
| return callback(); |
| } |
| batches.push({ |
| key: UUID_KEY$1, |
| value, |
| prefix: stores.metaStore, |
| type: 'put', |
| valueEncoding: 'json' |
| }); |
| batches.push({ |
| key: UUID_KEY$1, |
| prefix: stores.bySeqStore, |
| type: 'del' |
| }); |
| stores.bySeqStore.get(DOC_COUNT_KEY$1, function (err, value) { |
| if (value) { |
| // if no doc count key, |
| // just skip |
| // we can live with this |
| batches.push({ |
| key: DOC_COUNT_KEY$1, |
| value, |
| prefix: stores.metaStore, |
| type: 'put', |
| valueEncoding: 'json' |
| }); |
| batches.push({ |
| key: DOC_COUNT_KEY$1, |
| prefix: stores.bySeqStore, |
| type: 'del' |
| }); |
| } |
| stores.bySeqStore.get(UPDATE_SEQ_KEY$1, function (err, value) { |
| if (value) { |
| // if no UPDATE_SEQ_KEY |
| // just skip |
| // we've gone to far to stop. |
| batches.push({ |
| key: UPDATE_SEQ_KEY$1, |
| value, |
| prefix: stores.metaStore, |
| type: 'put', |
| valueEncoding: 'json' |
| }); |
| batches.push({ |
| key: UPDATE_SEQ_KEY$1, |
| prefix: stores.bySeqStore, |
| type: 'del' |
| }); |
| } |
| var deletedSeqs = {}; |
| stores.docStore.createReadStream({ |
| startKey: '_', |
| endKey: '_\xFF' |
| }).pipe(through2.obj(function (ch, _, next) { |
| if (!isLocalId(ch.key)) { |
| return next(); |
| } |
| batches.push({ |
| key: ch.key, |
| prefix: stores.docStore, |
| type: 'del' |
| }); |
| var winner = winningRev(ch.value); |
| Object.keys(ch.value.rev_map).forEach(function (key) { |
| if (key !== 'winner') { |
| this.push(formatSeq(ch.value.rev_map[key])); |
| } |
| }, this); |
| var winningSeq = ch.value.rev_map[winner]; |
| stores.bySeqStore.get(formatSeq(winningSeq), function (err, value) { |
| if (!err) { |
| batches.push({ |
| key: ch.key, |
| value, |
| prefix: stores.localStore, |
| type: 'put', |
| valueEncoding: 'json' |
| }); |
| } |
| next(); |
| }); |
| |
| })).pipe(through2.obj(function (seq, _, next) { |
| /* istanbul ignore if */ |
| if (deletedSeqs[seq]) { |
| return next(); |
| } |
| deletedSeqs[seq] = true; |
| stores.bySeqStore.get(seq, function (err, resp) { |
| /* istanbul ignore if */ |
| if (err || !isLocalId(resp._id)) { |
| return next(); |
| } |
| batches.push({ |
| key: seq, |
| prefix: stores.bySeqStore, |
| type: 'del' |
| }); |
| next(); |
| }); |
| }, function () { |
| db.batch(batches, callback); |
| })); |
| }); |
| }); |
| }); |
| |
| }; |
| |
| var migrate = { |
| doMigrationOne, |
| doMigrationTwo |
| }; |
| |
| function LevelDownPouch(opts, callback) { |
| |
| // Users can pass in their own leveldown alternative here, in which case |
| // it overrides the default one. (This is in addition to the custom builds.) |
| var leveldown = opts.db; |
| |
| /* istanbul ignore else */ |
| if (!leveldown) { |
| leveldown = requireLeveldown(); |
| |
| /* istanbul ignore if */ |
| if (leveldown instanceof Error) { |
| return callback(leveldown); |
| } |
| } |
| |
| var _opts = Object.assign({ |
| db: leveldown, |
| migrate |
| }, opts); |
| |
| LevelPouch.call(this, _opts, callback); |
| } |
| |
| // overrides for normal LevelDB behavior on Node |
| LevelDownPouch.valid = function () { |
| return true; |
| }; |
| LevelDownPouch.use_prefix = false; |
| |
| function LevelPouch$1 (PouchDB) { |
| PouchDB.adapter('leveldb', LevelDownPouch, true); |
| } |
| |
| // dead simple promise pool, inspired by https://github.com/timdp/es6-promise-pool |
| // but much smaller in code size. limits the number of concurrent promises that are executed |
| |
| |
| function pool(promiseFactories, limit) { |
| return new Promise(function (resolve, reject) { |
| var running = 0; |
| var current = 0; |
| var done = 0; |
| var len = promiseFactories.length; |
| var err; |
| |
| function runNext() { |
| running++; |
| promiseFactories[current++]().then(onSuccess, onError); |
| } |
| |
| function doNext() { |
| if (++done === len) { |
| /* istanbul ignore if */ |
| if (err) { |
| reject(err); |
| } else { |
| resolve(); |
| } |
| } else { |
| runNextBatch(); |
| } |
| } |
| |
| function onSuccess() { |
| running--; |
| doNext(); |
| } |
| |
| /* istanbul ignore next */ |
| function onError(thisErr) { |
| running--; |
| err = err || thisErr; |
| doNext(); |
| } |
| |
| function runNextBatch() { |
| while (running < limit && current < len) { |
| runNext(); |
| } |
| } |
| |
| runNextBatch(); |
| }); |
| } |
| |
| const CHANGES_BATCH_SIZE = 25; |
| const MAX_SIMULTANEOUS_REVS = 50; |
| const CHANGES_TIMEOUT_BUFFER = 5000; |
| const DEFAULT_HEARTBEAT = 10000; |
| |
| const supportsBulkGetMap = {}; |
| |
| function readAttachmentsAsBlobOrBuffer(row) { |
| const doc = row.doc || row.ok; |
| const atts = doc && doc._attachments; |
| if (!atts) { |
| return; |
| } |
| Object.keys(atts).forEach(function (filename) { |
| const att = atts[filename]; |
| att.data = b64ToBluffer(att.data, att.content_type); |
| }); |
| } |
| |
| function encodeDocId(id) { |
| if (/^_design/.test(id)) { |
| return '_design/' + encodeURIComponent(id.slice(8)); |
| } |
| if (id.startsWith('_local/')) { |
| return '_local/' + encodeURIComponent(id.slice(7)); |
| } |
| return encodeURIComponent(id); |
| } |
| |
| function preprocessAttachments$1(doc) { |
| if (!doc._attachments || !Object.keys(doc._attachments)) { |
| return Promise.resolve(); |
| } |
| |
| return Promise.all(Object.keys(doc._attachments).map(function (key) { |
| const attachment = doc._attachments[key]; |
| if (attachment.data && typeof attachment.data !== 'string') { |
| return new Promise(function (resolve) { |
| blobToBase64(attachment.data, resolve); |
| }).then(function (b64) { |
| attachment.data = b64; |
| }); |
| } |
| })); |
| } |
| |
| function hasUrlPrefix(opts) { |
| if (!opts.prefix) { |
| return false; |
| } |
| const protocol = parseUri(opts.prefix).protocol; |
| return protocol === 'http' || protocol === 'https'; |
| } |
| |
| // Get all the information you possibly can about the URI given by name and |
| // return it as a suitable object. |
| function getHost(name, opts) { |
| // encode db name if opts.prefix is a url (#5574) |
| if (hasUrlPrefix(opts)) { |
| const dbName = opts.name.substr(opts.prefix.length); |
| // Ensure prefix has a trailing slash |
| const prefix = opts.prefix.replace(/\/?$/, '/'); |
| name = prefix + encodeURIComponent(dbName); |
| } |
| |
| const uri = parseUri(name); |
| if (uri.user || uri.password) { |
| uri.auth = {username: uri.user, password: uri.password}; |
| } |
| |
| // Split the path part of the URI into parts using '/' as the delimiter |
| // after removing any leading '/' and any trailing '/' |
| const parts = uri.path.replace(/(^\/|\/$)/g, '').split('/'); |
| |
| uri.db = parts.pop(); |
| // Prevent double encoding of URI component |
| if (uri.db.indexOf('%') === -1) { |
| uri.db = encodeURIComponent(uri.db); |
| } |
| |
| uri.path = parts.join('/'); |
| |
| return uri; |
| } |
| |
| // Generate a URL with the host data given by opts and the given path |
| function genDBUrl(opts, path$$1) { |
| return genUrl(opts, opts.db + '/' + path$$1); |
| } |
| |
| // Generate a URL with the host data given by opts and the given path |
| function genUrl(opts, path$$1) { |
| // If the host already has a path, then we need to have a path delimiter |
| // Otherwise, the path delimiter is the empty string |
| const pathDel = !opts.path ? '' : '/'; |
| |
| // If the host already has a path, then we need to have a path delimiter |
| // Otherwise, the path delimiter is the empty string |
| return opts.protocol + '://' + opts.host + |
| (opts.port ? (':' + opts.port) : '') + |
| '/' + opts.path + pathDel + path$$1; |
| } |
| |
| function paramsToStr(params) { |
| const paramKeys = Object.keys(params); |
| if (paramKeys.length === 0) { |
| return ''; |
| } |
| |
| return '?' + paramKeys.map(key => key + '=' + encodeURIComponent(params[key])).join('&'); |
| } |
| |
| function shouldCacheBust(opts) { |
| const ua = (typeof navigator !== 'undefined' && navigator.userAgent) ? |
| navigator.userAgent.toLowerCase() : ''; |
| const isIE = ua.indexOf('msie') !== -1; |
| const isTrident = ua.indexOf('trident') !== -1; |
| const isEdge = ua.indexOf('edge') !== -1; |
| const isGET = !('method' in opts) || opts.method === 'GET'; |
| return (isIE || isTrident || isEdge) && isGET; |
| } |
| |
| // Implements the PouchDB API for dealing with CouchDB instances over HTTP |
| function HttpPouch(opts, callback) { |
| |
| // The functions that will be publicly available for HttpPouch |
| const api = this; |
| |
| const host = getHost(opts.name, opts); |
| const dbUrl = genDBUrl(host, ''); |
| |
| opts = clone(opts); |
| |
| const ourFetch = async function (url, options) { |
| |
| options = options || {}; |
| options.headers = options.headers || new nodeFetch.Headers(); |
| |
| options.credentials = 'include'; |
| |
| if (opts.auth || host.auth) { |
| const nAuth = opts.auth || host.auth; |
| const str = nAuth.username + ':' + nAuth.password; |
| const token = thisBtoa(unescape(encodeURIComponent(str))); |
| options.headers.set('Authorization', 'Basic ' + token); |
| } |
| |
| const headers = opts.headers || {}; |
| Object.keys(headers).forEach(function (key) { |
| options.headers.append(key, headers[key]); |
| }); |
| |
| /* istanbul ignore if */ |
| if (shouldCacheBust(options)) { |
| url += (url.indexOf('?') === -1 ? '?' : '&') + '_nonce=' + Date.now(); |
| } |
| |
| const fetchFun = opts.fetch || fetch; |
| return await fetchFun(url, options); |
| }; |
| |
| function adapterFun$$1(name, fun) { |
| return adapterFun(name, function (...args) { |
| setup().then(function () { |
| return fun.apply(this, args); |
| }).catch(function (e) { |
| const callback = args.pop(); |
| callback(e); |
| }); |
| }).bind(api); |
| } |
| |
| async function fetchJSON(url, options) { |
| |
| const result = {}; |
| |
| options = options || {}; |
| options.headers = options.headers || new nodeFetch.Headers(); |
| |
| if (!options.headers.get('Content-Type')) { |
| options.headers.set('Content-Type', 'application/json'); |
| } |
| if (!options.headers.get('Accept')) { |
| options.headers.set('Accept', 'application/json'); |
| } |
| |
| const response = await ourFetch(url, options); |
| result.ok = response.ok; |
| result.status = response.status; |
| const json = await response.json(); |
| |
| result.data = json; |
| if (!result.ok) { |
| result.data.status = result.status; |
| const err = generateErrorFromResponse(result.data); |
| throw err; |
| } |
| |
| if (Array.isArray(result.data)) { |
| result.data = result.data.map(function (v) { |
| if (v.error || v.missing) { |
| return generateErrorFromResponse(v); |
| } else { |
| return v; |
| } |
| }); |
| } |
| |
| return result; |
| } |
| |
| let setupPromise; |
| |
| async function setup() { |
| if (opts.skip_setup) { |
| return Promise.resolve(); |
| } |
| |
| // If there is a setup in process or previous successful setup |
| // done then we will use that |
| // If previous setups have been rejected we will try again |
| if (setupPromise) { |
| return setupPromise; |
| } |
| |
| setupPromise = fetchJSON(dbUrl).catch(function (err) { |
| if (err && err.status && err.status === 404) { |
| return fetchJSON(dbUrl, {method: 'PUT'}); |
| } else { |
| return Promise.reject(err); |
| } |
| }).catch(function (err) { |
| // If we try to create a database that already exists, skipped in |
| // istanbul since its catching a race condition. |
| /* istanbul ignore if */ |
| if (err && err.status && err.status === 412) { |
| return true; |
| } |
| return Promise.reject(err); |
| }); |
| |
| setupPromise.catch(function () { |
| setupPromise = null; |
| }); |
| |
| return setupPromise; |
| } |
| |
| nextTick(function () { |
| callback(null, api); |
| }); |
| |
| api._remote = true; |
| |
| /* istanbul ignore next */ |
| api.type = function () { |
| return 'http'; |
| }; |
| |
| api.id = adapterFun$$1('id', async function (callback) { |
| let result; |
| try { |
| const response = await ourFetch(genUrl(host, '')); |
| result = await response.json(); |
| } catch (err) { |
| result = {}; |
| } |
| |
| // Bad response or missing `uuid` should not prevent ID generation. |
| const uuid$$1 = (result && result.uuid) ? (result.uuid + host.db) : genDBUrl(host, ''); |
| callback(null, uuid$$1); |
| }); |
| |
| // Sends a POST request to the host calling the couchdb _compact function |
| // version: The version of CouchDB it is running |
| api.compact = adapterFun$$1('compact', async function (opts, callback) { |
| if (typeof opts === 'function') { |
| callback = opts; |
| opts = {}; |
| } |
| opts = clone(opts); |
| |
| await fetchJSON(genDBUrl(host, '_compact'), {method: 'POST'}); |
| |
| function ping() { |
| api.info(function (err, res$$1) { |
| // CouchDB may send a "compact_running:true" if it's |
| // already compacting. PouchDB Server doesn't. |
| /* istanbul ignore else */ |
| if (res$$1 && !res$$1.compact_running) { |
| callback(null, {ok: true}); |
| } else { |
| setTimeout(ping, opts.interval || 200); |
| } |
| }); |
| } |
| // Ping the http if it's finished compaction |
| ping(); |
| }); |
| |
| api.bulkGet = adapterFun('bulkGet', function (opts, callback) { |
| const self = this; |
| |
| async function doBulkGet(cb) { |
| const params = {}; |
| if (opts.revs) { |
| params.revs = true; |
| } |
| if (opts.attachments) { |
| /* istanbul ignore next */ |
| params.attachments = true; |
| } |
| if (opts.latest) { |
| params.latest = true; |
| } |
| try { |
| const result = await fetchJSON(genDBUrl(host, '_bulk_get' + paramsToStr(params)), { |
| method: 'POST', |
| body: JSON.stringify({ docs: opts.docs}) |
| }); |
| |
| if (opts.attachments && opts.binary) { |
| result.data.results.forEach(function (res$$1) { |
| res$$1.docs.forEach(readAttachmentsAsBlobOrBuffer); |
| }); |
| } |
| cb(null, result.data); |
| } catch (error) { |
| cb(error); |
| } |
| } |
| |
| /* istanbul ignore next */ |
| function doBulkGetShim() { |
| // avoid "url too long error" by splitting up into multiple requests |
| const batchSize = MAX_SIMULTANEOUS_REVS; |
| const numBatches = Math.ceil(opts.docs.length / batchSize); |
| let numDone = 0; |
| const results = new Array(numBatches); |
| |
| function onResult(batchNum) { |
| return function (err, res$$1) { |
| // err is impossible because shim returns a list of errs in that case |
| results[batchNum] = res$$1.results; |
| if (++numDone === numBatches) { |
| callback(null, {results: results.flat()}); |
| } |
| }; |
| } |
| |
| for (let i = 0; i < numBatches; i++) { |
| const subOpts = pick(opts, ['revs', 'attachments', 'binary', 'latest']); |
| subOpts.docs = opts.docs.slice(i * batchSize, |
| Math.min(opts.docs.length, (i + 1) * batchSize)); |
| bulkGet(self, subOpts, onResult(i)); |
| } |
| } |
| |
| // mark the whole database as either supporting or not supporting _bulk_get |
| const dbUrl = genUrl(host, ''); |
| const supportsBulkGet = supportsBulkGetMap[dbUrl]; |
| |
| /* istanbul ignore next */ |
| if (typeof supportsBulkGet !== 'boolean') { |
| // check if this database supports _bulk_get |
| doBulkGet(function (err, res$$1) { |
| if (err) { |
| supportsBulkGetMap[dbUrl] = false; |
| res( |
| err.status, |
| 'PouchDB is just detecting if the remote ' + |
| 'supports the _bulk_get API.' |
| ); |
| doBulkGetShim(); |
| } else { |
| supportsBulkGetMap[dbUrl] = true; |
| callback(null, res$$1); |
| } |
| }); |
| } else if (supportsBulkGet) { |
| doBulkGet(callback); |
| } else { |
| doBulkGetShim(); |
| } |
| }); |
| |
| // Calls GET on the host, which gets back a JSON string containing |
| // couchdb: A welcome string |
| // version: The version of CouchDB it is running |
| api._info = async function (callback) { |
| try { |
| await setup(); |
| const response = await ourFetch(genDBUrl(host, '')); |
| const info = await response.json(); |
| info.host = genDBUrl(host, ''); |
| callback(null, info); |
| } catch (err) { |
| callback(err); |
| } |
| }; |
| |
| api.fetch = async function (path$$1, options) { |
| await setup(); |
| const url = path$$1.substring(0, 1) === '/' ? |
| genUrl(host, path$$1.substring(1)) : |
| genDBUrl(host, path$$1); |
| return ourFetch(url, options); |
| }; |
| |
| // Get the document with the given id from the database given by host. |
| // The id could be solely the _id in the database, or it may be a |
| // _design/ID or _local/ID path |
| api.get = adapterFun$$1('get', async function (id, opts, callback) { |
| // If no options were given, set the callback to the second parameter |
| if (typeof opts === 'function') { |
| callback = opts; |
| opts = {}; |
| } |
| opts = clone(opts); |
| |
| // List of parameters to add to the GET request |
| const params = {}; |
| |
| if (opts.revs) { |
| params.revs = true; |
| } |
| |
| if (opts.revs_info) { |
| params.revs_info = true; |
| } |
| |
| if (opts.latest) { |
| params.latest = true; |
| } |
| |
| if (opts.open_revs) { |
| if (opts.open_revs !== "all") { |
| opts.open_revs = JSON.stringify(opts.open_revs); |
| } |
| params.open_revs = opts.open_revs; |
| } |
| |
| if (opts.rev) { |
| params.rev = opts.rev; |
| } |
| |
| if (opts.conflicts) { |
| params.conflicts = opts.conflicts; |
| } |
| |
| /* istanbul ignore if */ |
| if (opts.update_seq) { |
| params.update_seq = opts.update_seq; |
| } |
| |
| id = encodeDocId(id); |
| |
| function fetchAttachments(doc) { |
| const atts = doc._attachments; |
| const filenames = atts && Object.keys(atts); |
| if (!atts || !filenames.length) { |
| return; |
| } |
| // we fetch these manually in separate XHRs, because |
| // Sync Gateway would normally send it back as multipart/mixed, |
| // which we cannot parse. Also, this is more efficient than |
| // receiving attachments as base64-encoded strings. |
| async function fetchData(filename) { |
| const att = atts[filename]; |
| const path$$1 = encodeDocId(doc._id) + '/' + encodeAttachmentId(filename) + |
| '?rev=' + doc._rev; |
| |
| const response = await ourFetch(genDBUrl(host, path$$1)); |
| |
| let blob; |
| if ('buffer' in response) { |
| blob = await response.buffer(); |
| } else { |
| /* istanbul ignore next */ |
| blob = await response.blob(); |
| } |
| |
| let data; |
| if (opts.binary) { |
| const typeFieldDescriptor = Object.getOwnPropertyDescriptor(blob.__proto__, 'type'); |
| if (!typeFieldDescriptor || typeFieldDescriptor.set) { |
| blob.type = att.content_type; |
| } |
| data = blob; |
| } else { |
| data = await new Promise(function (resolve) { |
| blobToBase64(blob, resolve); |
| }); |
| } |
| |
| delete att.stub; |
| delete att.length; |
| att.data = data; |
| } |
| |
| const promiseFactories = filenames.map(function (filename) { |
| return function () { |
| return fetchData(filename); |
| }; |
| }); |
| |
| // This limits the number of parallel xhr requests to 5 any time |
| // to avoid issues with maximum browser request limits |
| return pool(promiseFactories, 5); |
| } |
| |
| function fetchAllAttachments(docOrDocs) { |
| if (Array.isArray(docOrDocs)) { |
| return Promise.all(docOrDocs.map(function (doc) { |
| if (doc.ok) { |
| return fetchAttachments(doc.ok); |
| } |
| })); |
| } |
| return fetchAttachments(docOrDocs); |
| } |
| |
| const url = genDBUrl(host, id + paramsToStr(params)); |
| try { |
| const res$$1 = await fetchJSON(url); |
| if (opts.attachments) { |
| await fetchAllAttachments(res$$1.data); |
| } |
| callback(null, res$$1.data); |
| } catch (error) { |
| error.docId = id; |
| callback(error); |
| } |
| }); |
| |
| |
| // Delete the document given by doc from the database given by host. |
| api.remove = adapterFun$$1('remove', async function (docOrId, optsOrRev, opts, cb) { |
| let doc; |
| if (typeof optsOrRev === 'string') { |
| // id, rev, opts, callback style |
| doc = { |
| _id: docOrId, |
| _rev: optsOrRev |
| }; |
| if (typeof opts === 'function') { |
| cb = opts; |
| opts = {}; |
| } |
| } else { |
| // doc, opts, callback style |
| doc = docOrId; |
| if (typeof optsOrRev === 'function') { |
| cb = optsOrRev; |
| opts = {}; |
| } else { |
| cb = opts; |
| opts = optsOrRev; |
| } |
| } |
| |
| const rev$$1 = (doc._rev || opts.rev); |
| const url = genDBUrl(host, encodeDocId(doc._id)) + '?rev=' + rev$$1; |
| |
| try { |
| const result = await fetchJSON(url, {method: 'DELETE'}); |
| cb(null, result.data); |
| } catch (error) { |
| cb(error); |
| } |
| }); |
| |
| function encodeAttachmentId(attachmentId) { |
| return attachmentId.split("/").map(encodeURIComponent).join("/"); |
| } |
| |
| // Get the attachment |
| api.getAttachment = adapterFun$$1('getAttachment', async function (docId, attachmentId, |
| opts, callback) { |
| if (typeof opts === 'function') { |
| callback = opts; |
| opts = {}; |
| } |
| const params = opts.rev ? ('?rev=' + opts.rev) : ''; |
| const url = genDBUrl(host, encodeDocId(docId)) + '/' + |
| encodeAttachmentId(attachmentId) + params; |
| let contentType; |
| try { |
| const response = await ourFetch(url, {method: 'GET'}); |
| |
| if (!response.ok) { |
| throw response; |
| } |
| |
| contentType = response.headers.get('content-type'); |
| let blob; |
| if (typeof process !== 'undefined' && !process.browser && typeof response.buffer === 'function') { |
| blob = await response.buffer(); |
| } else { |
| /* istanbul ignore next */ |
| blob = await response.blob(); |
| } |
| |
| // TODO: also remove |
| if (typeof process !== 'undefined' && !process.browser) { |
| const typeFieldDescriptor = Object.getOwnPropertyDescriptor(blob.__proto__, 'type'); |
| if (!typeFieldDescriptor || typeFieldDescriptor.set) { |
| blob.type = contentType; |
| } |
| } |
| callback(null, blob); |
| } catch (err) { |
| callback(err); |
| } |
| }); |
| |
| // Remove the attachment given by the id and rev |
| api.removeAttachment = adapterFun$$1('removeAttachment', async function ( |
| docId, |
| attachmentId, |
| rev$$1, |
| callback, |
| ) { |
| const url = genDBUrl(host, encodeDocId(docId) + '/' + encodeAttachmentId(attachmentId)) + '?rev=' + rev$$1; |
| |
| try { |
| const result = await fetchJSON(url, {method: 'DELETE'}); |
| callback(null, result.data); |
| } catch (error) { |
| callback(error); |
| } |
| }); |
| |
| // Add the attachment given by blob and its contentType property |
| // to the document with the given id, the revision given by rev, and |
| // add it to the database given by host. |
| api.putAttachment = adapterFun$$1('putAttachment', async function ( |
| docId, |
| attachmentId, |
| rev$$1, |
| blob, |
| type, |
| callback, |
| ) { |
| if (typeof type === 'function') { |
| callback = type; |
| type = blob; |
| blob = rev$$1; |
| rev$$1 = null; |
| } |
| const id = encodeDocId(docId) + '/' + encodeAttachmentId(attachmentId); |
| let url = genDBUrl(host, id); |
| if (rev$$1) { |
| url += '?rev=' + rev$$1; |
| } |
| |
| if (typeof blob === 'string') { |
| // input is assumed to be a base64 string |
| let binary; |
| try { |
| binary = thisAtob(blob); |
| } catch (err) { |
| return callback(createError(BAD_ARG, |
| 'Attachment is not a valid base64 string')); |
| } |
| blob = binary ? binStringToBluffer(binary, type) : ''; |
| } |
| |
| try { |
| // Add the attachment |
| const result = await fetchJSON(url, { |
| headers: new nodeFetch.Headers({'Content-Type': type}), |
| method: 'PUT', |
| body: blob |
| }); |
| callback(null, result.data); |
| } catch (error) { |
| callback(error); |
| } |
| }); |
| |
| // Update/create multiple documents given by req in the database |
| // given by host. |
| api._bulkDocs = async function (req, opts, callback) { |
| // If new_edits=false then it prevents the database from creating |
| // new revision numbers for the documents. Instead it just uses |
| // the old ones. This is used in database replication. |
| req.new_edits = opts.new_edits; |
| |
| try { |
| await setup(); |
| await Promise.all(req.docs.map(preprocessAttachments$1)); |
| |
| // Update/create the documents |
| const result = await fetchJSON(genDBUrl(host, '_bulk_docs'), { |
| method: 'POST', |
| body: JSON.stringify(req) |
| }); |
| callback(null, result.data); |
| } catch (error) { |
| callback(error); |
| } |
| }; |
| |
| // Update/create document |
| api._put = async function (doc, opts, callback) { |
| try { |
| await setup(); |
| await preprocessAttachments$1(doc); |
| |
| const result = await fetchJSON(genDBUrl(host, encodeDocId(doc._id)), { |
| method: 'PUT', |
| body: JSON.stringify(doc) |
| }); |
| callback(null, result.data); |
| } catch (error) { |
| error.docId = doc && doc._id; |
| callback(error); |
| } |
| }; |
| |
| |
| // Get a listing of the documents in the database given |
| // by host and ordered by increasing id. |
| api.allDocs = adapterFun$$1('allDocs', async function (opts, callback) { |
| if (typeof opts === 'function') { |
| callback = opts; |
| opts = {}; |
| } |
| opts = clone(opts); |
| |
| // List of parameters to add to the GET request |
| const params = {}; |
| let body; |
| let method = 'GET'; |
| |
| if (opts.conflicts) { |
| params.conflicts = true; |
| } |
| |
| /* istanbul ignore if */ |
| if (opts.update_seq) { |
| params.update_seq = true; |
| } |
| |
| if (opts.descending) { |
| params.descending = true; |
| } |
| |
| if (opts.include_docs) { |
| params.include_docs = true; |
| } |
| |
| // added in CouchDB 1.6.0 |
| if (opts.attachments) { |
| params.attachments = true; |
| } |
| |
| if (opts.key) { |
| params.key = JSON.stringify(opts.key); |
| } |
| |
| if (opts.start_key) { |
| opts.startkey = opts.start_key; |
| } |
| |
| if (opts.startkey) { |
| params.startkey = JSON.stringify(opts.startkey); |
| } |
| |
| if (opts.end_key) { |
| opts.endkey = opts.end_key; |
| } |
| |
| if (opts.endkey) { |
| params.endkey = JSON.stringify(opts.endkey); |
| } |
| |
| if (typeof opts.inclusive_end !== 'undefined') { |
| params.inclusive_end = !!opts.inclusive_end; |
| } |
| |
| if (typeof opts.limit !== 'undefined') { |
| params.limit = opts.limit; |
| } |
| |
| if (typeof opts.skip !== 'undefined') { |
| params.skip = opts.skip; |
| } |
| |
| const paramStr = paramsToStr(params); |
| |
| if (typeof opts.keys !== 'undefined') { |
| method = 'POST'; |
| body = {keys: opts.keys}; |
| } |
| |
| try { |
| const result = await fetchJSON(genDBUrl(host, '_all_docs' + paramStr), { |
| method, |
| body: JSON.stringify(body) |
| }); |
| if (opts.include_docs && opts.attachments && opts.binary) { |
| result.data.rows.forEach(readAttachmentsAsBlobOrBuffer); |
| } |
| callback(null, result.data); |
| } catch (error) { |
| callback(error); |
| } |
| }); |
| |
| // Get a list of changes made to documents in the database given by host. |
| // TODO According to the README, there should be two other methods here, |
| // api.changes.addListener and api.changes.removeListener. |
| api._changes = function (opts) { |
| |
| // We internally page the results of a changes request, this means |
| // if there is a large set of changes to be returned we can start |
| // processing them quicker instead of waiting on the entire |
| // set of changes to return and attempting to process them at once |
| const batchSize = 'batch_size' in opts ? opts.batch_size : CHANGES_BATCH_SIZE; |
| |
| opts = clone(opts); |
| |
| if (opts.continuous && !('heartbeat' in opts)) { |
| opts.heartbeat = DEFAULT_HEARTBEAT; |
| } |
| |
| let requestTimeout = ('timeout' in opts) ? opts.timeout : 30 * 1000; |
| |
| // ensure CHANGES_TIMEOUT_BUFFER applies |
| if ('timeout' in opts && opts.timeout && |
| (requestTimeout - opts.timeout) < CHANGES_TIMEOUT_BUFFER) { |
| requestTimeout = opts.timeout + CHANGES_TIMEOUT_BUFFER; |
| } |
| |
| /* istanbul ignore if */ |
| if ('heartbeat' in opts && opts.heartbeat && |
| (requestTimeout - opts.heartbeat) < CHANGES_TIMEOUT_BUFFER) { |
| requestTimeout = opts.heartbeat + CHANGES_TIMEOUT_BUFFER; |
| } |
| |
| const params = {}; |
| if ('timeout' in opts && opts.timeout) { |
| params.timeout = opts.timeout; |
| } |
| |
| const limit = (typeof opts.limit !== 'undefined') ? opts.limit : false; |
| let leftToFetch = limit; |
| |
| if (opts.style) { |
| params.style = opts.style; |
| } |
| |
| if (opts.include_docs || opts.filter && typeof opts.filter === 'function') { |
| params.include_docs = true; |
| } |
| |
| if (opts.attachments) { |
| params.attachments = true; |
| } |
| |
| if (opts.continuous) { |
| params.feed = 'longpoll'; |
| } |
| |
| if (opts.seq_interval) { |
| params.seq_interval = opts.seq_interval; |
| } |
| |
| if (opts.conflicts) { |
| params.conflicts = true; |
| } |
| |
| if (opts.descending) { |
| params.descending = true; |
| } |
| |
| /* istanbul ignore if */ |
| if (opts.update_seq) { |
| params.update_seq = true; |
| } |
| |
| if ('heartbeat' in opts) { |
| // If the heartbeat value is false, it disables the default heartbeat |
| if (opts.heartbeat) { |
| params.heartbeat = opts.heartbeat; |
| } |
| } |
| |
| if (opts.filter && typeof opts.filter === 'string') { |
| params.filter = opts.filter; |
| } |
| |
| if (opts.view && typeof opts.view === 'string') { |
| params.filter = '_view'; |
| params.view = opts.view; |
| } |
| |
| // If opts.query_params exists, pass it through to the changes request. |
| // These parameters may be used by the filter on the source database. |
| if (opts.query_params && typeof opts.query_params === 'object') { |
| for (const param_name in opts.query_params) { |
| /* istanbul ignore else */ |
| if (Object.prototype.hasOwnProperty.call(opts.query_params, param_name)) { |
| params[param_name] = opts.query_params[param_name]; |
| } |
| } |
| } |
| |
| let method = 'GET'; |
| let body; |
| |
| if (opts.doc_ids) { |
| // set this automagically for the user; it's annoying that couchdb |
| // requires both a "filter" and a "doc_ids" param. |
| params.filter = '_doc_ids'; |
| method = 'POST'; |
| body = {doc_ids: opts.doc_ids }; |
| } |
| /* istanbul ignore next */ |
| else if (opts.selector) { |
| // set this automagically for the user, similar to above |
| params.filter = '_selector'; |
| method = 'POST'; |
| body = {selector: opts.selector }; |
| } |
| |
| const controller = new AbortController(); |
| let lastFetchedSeq; |
| |
| // Get all the changes starting with the one immediately after the |
| // sequence number given by since. |
| const fetchData = async function (since, callback) { |
| if (opts.aborted) { |
| return; |
| } |
| params.since = since; |
| // "since" can be any kind of json object in Cloudant/CouchDB 2.x |
| /* istanbul ignore next */ |
| if (typeof params.since === "object") { |
| params.since = JSON.stringify(params.since); |
| } |
| |
| if (opts.descending) { |
| if (limit) { |
| params.limit = leftToFetch; |
| } |
| } else { |
| params.limit = (!limit || leftToFetch > batchSize) ? |
| batchSize : leftToFetch; |
| } |
| |
| // Set the options for the ajax call |
| const url = genDBUrl(host, '_changes' + paramsToStr(params)); |
| const fetchOpts = { |
| signal: controller.signal, |
| method, |
| body: JSON.stringify(body) |
| }; |
| lastFetchedSeq = since; |
| |
| /* istanbul ignore if */ |
| if (opts.aborted) { |
| return; |
| } |
| |
| // Get the changes |
| try { |
| await setup(); |
| const result = await fetchJSON(url, fetchOpts); |
| callback(null, result.data); |
| } catch (error) { |
| callback(error); |
| } |
| }; |
| |
| // If opts.since exists, get all the changes from the sequence |
| // number given by opts.since. Otherwise, get all the changes |
| // from the sequence number 0. |
| const results = {results: []}; |
| |
| const fetched = function (err, res$$1) { |
| if (opts.aborted) { |
| return; |
| } |
| let raw_results_length = 0; |
| // If the result of the ajax call (res) contains changes (res.results) |
| if (res$$1 && res$$1.results) { |
| raw_results_length = res$$1.results.length; |
| results.last_seq = res$$1.last_seq; |
| let pending = null; |
| let lastSeq = null; |
| // Attach 'pending' property if server supports it (CouchDB 2.0+) |
| /* istanbul ignore if */ |
| if (typeof res$$1.pending === 'number') { |
| pending = res$$1.pending; |
| } |
| if (typeof results.last_seq === 'string' || typeof results.last_seq === 'number') { |
| lastSeq = results.last_seq; |
| } |
| // For each change |
| const req = {}; |
| req.query = opts.query_params; |
| res$$1.results = res$$1.results.filter(function (c) { |
| leftToFetch--; |
| const ret = filterChange(opts)(c); |
| if (ret) { |
| if (opts.include_docs && opts.attachments && opts.binary) { |
| readAttachmentsAsBlobOrBuffer(c); |
| } |
| if (opts.return_docs) { |
| results.results.push(c); |
| } |
| opts.onChange(c, pending, lastSeq); |
| } |
| return ret; |
| }); |
| } else if (err) { |
| // In case of an error, stop listening for changes and call |
| // opts.complete |
| opts.aborted = true; |
| opts.complete(err); |
| return; |
| } |
| |
| // The changes feed may have timed out with no results |
| // if so reuse last update sequence |
| if (res$$1 && res$$1.last_seq) { |
| lastFetchedSeq = res$$1.last_seq; |
| } |
| |
| const finished = (limit && leftToFetch <= 0) || |
| (res$$1 && raw_results_length < batchSize) || |
| (opts.descending); |
| |
| if ((opts.continuous && !(limit && leftToFetch <= 0)) || !finished) { |
| // Queue a call to fetch again with the newest sequence number |
| nextTick(function () { fetchData(lastFetchedSeq, fetched); }); |
| } else { |
| // We're done, call the callback |
| opts.complete(null, results); |
| } |
| }; |
| |
| fetchData(opts.since || 0, fetched); |
| |
| // Return a method to cancel this method from processing any more |
| return { |
| cancel: function () { |
| opts.aborted = true; |
| controller.abort(); |
| } |
| }; |
| }; |
| |
| // Given a set of document/revision IDs (given by req), tets the subset of |
| // those that do NOT correspond to revisions stored in the database. |
| // See http://wiki.apache.org/couchdb/HttpPostRevsDiff |
| api.revsDiff = adapterFun$$1('revsDiff', async function (req, opts, callback) { |
| // If no options were given, set the callback to be the second parameter |
| if (typeof opts === 'function') { |
| callback = opts; |
| opts = {}; |
| } |
| |
| try { |
| // Get the missing document/revision IDs |
| const result = await fetchJSON(genDBUrl(host, '_revs_diff'), { |
| method: 'POST', |
| body: JSON.stringify(req) |
| }); |
| callback(null, result.data); |
| } catch (error) { |
| callback(error); |
| } |
| }); |
| |
| api._close = function (callback) { |
| callback(); |
| }; |
| |
| api._destroy = async function (options, callback) { |
| try { |
| const json = await fetchJSON(genDBUrl(host, ''), {method: 'DELETE'}); |
| callback(null, json); |
| } catch (error) { |
| if (error.status === 404) { |
| callback(null, {ok: true}); |
| } else { |
| callback(error); |
| } |
| } |
| }; |
| } |
| |
| // HttpPouch is a valid adapter. |
| HttpPouch.valid = function () { |
| return true; |
| }; |
| |
| function HttpPouch$1 (PouchDB) { |
| PouchDB.adapter('http', HttpPouch, false); |
| PouchDB.adapter('https', HttpPouch, false); |
| } |
| |
| class QueryParseError extends Error { |
| constructor(message) { |
| super(); |
| this.status = 400; |
| this.name = 'query_parse_error'; |
| this.message = message; |
| this.error = true; |
| try { |
| Error.captureStackTrace(this, QueryParseError); |
| } catch (e) {} |
| } |
| } |
| |
| class NotFoundError$1 extends Error { |
| constructor(message) { |
| super(); |
| this.status = 404; |
| this.name = 'not_found'; |
| this.message = message; |
| this.error = true; |
| try { |
| Error.captureStackTrace(this, NotFoundError$1); |
| } catch (e) {} |
| } |
| } |
| |
| class BuiltInError extends Error { |
| constructor(message) { |
| super(); |
| this.status = 500; |
| this.name = 'invalid_value'; |
| this.message = message; |
| this.error = true; |
| try { |
| Error.captureStackTrace(this, BuiltInError); |
| } catch (e) {} |
| } |
| } |
| |
| function promisedCallback(promise, callback) { |
| if (callback) { |
| promise.then(function (res$$1) { |
| nextTick(function () { |
| callback(null, res$$1); |
| }); |
| }, function (reason) { |
| nextTick(function () { |
| callback(reason); |
| }); |
| }); |
| } |
| return promise; |
| } |
| |
| function callbackify(fun) { |
| return function (...args) { |
| var cb = args.pop(); |
| var promise = fun.apply(this, args); |
| if (typeof cb === 'function') { |
| promisedCallback(promise, cb); |
| } |
| return promise; |
| }; |
| } |
| |
| // Promise finally util similar to Q.finally |
| function fin(promise, finalPromiseFactory) { |
| return promise.then(function (res$$1) { |
| return finalPromiseFactory().then(function () { |
| return res$$1; |
| }); |
| }, function (reason) { |
| return finalPromiseFactory().then(function () { |
| throw reason; |
| }); |
| }); |
| } |
| |
| function sequentialize(queue, promiseFactory) { |
| return function () { |
| var args = arguments; |
| var that = this; |
| return queue.add(function () { |
| return promiseFactory.apply(that, args); |
| }); |
| }; |
| } |
| |
| // uniq an array of strings, order not guaranteed |
| // similar to underscore/lodash _.uniq |
| function uniq(arr) { |
| var theSet = new Set(arr); |
| var result = new Array(theSet.size); |
| var index = -1; |
| theSet.forEach(function (value) { |
| result[++index] = value; |
| }); |
| return result; |
| } |
| |
| function mapToKeysArray(map) { |
| var result = new Array(map.size); |
| var index = -1; |
| map.forEach(function (value, key) { |
| result[++index] = key; |
| }); |
| return result; |
| } |
| |
| function createBuiltInError(name) { |
| var message = 'builtin ' + name + |
| ' function requires map values to be numbers' + |
| ' or number arrays'; |
| return new BuiltInError(message); |
| } |
| |
| function sum(values) { |
| var result = 0; |
| for (var i = 0, len = values.length; i < len; i++) { |
| var num = values[i]; |
| if (typeof num !== 'number') { |
| if (Array.isArray(num)) { |
| // lists of numbers are also allowed, sum them separately |
| result = typeof result === 'number' ? [result] : result; |
| for (var j = 0, jLen = num.length; j < jLen; j++) { |
| var jNum = num[j]; |
| if (typeof jNum !== 'number') { |
| throw createBuiltInError('_sum'); |
| } else if (typeof result[j] === 'undefined') { |
| result.push(jNum); |
| } else { |
| result[j] += jNum; |
| } |
| } |
| } else { // not array/number |
| throw createBuiltInError('_sum'); |
| } |
| } else if (typeof result === 'number') { |
| result += num; |
| } else { // add number to array |
| result[0] += num; |
| } |
| } |
| return result; |
| } |
| |
| // Inside of 'vm' for Node, we need a way to translate a pseudo-error |
| // back into a real error once it's out of the VM. |
| function createBuiltInErrorInVm(name) { |
| return { |
| builtInError: true, |
| name |
| }; |
| } |
| |
| function convertToTrueError(err) { |
| return createBuiltInError(err.name); |
| } |
| |
| function isBuiltInError(obj) { |
| return obj && obj.builtInError; |
| } |
| |
| // All of this vm hullaballoo is to be able to run arbitrary code in a sandbox |
| // for security reasons. |
| function evalFunctionInVm(func, emit) { |
| return function (arg1, arg2, arg3) { |
| var code = '(function() {"use strict";' + |
| 'var createBuiltInError = ' + createBuiltInErrorInVm.toString() + ';' + |
| 'var sum = ' + sum.toString() + ';' + |
| 'var log = function () {};' + |
| 'var isArray = Array.isArray;' + |
| 'var toJSON = JSON.parse;' + |
| 'var __emitteds__ = [];' + |
| 'var emit = function (key, value) {__emitteds__.push([key, value]);};' + |
| 'var __result__ = (' + |
| func.replace(/;\s*$/, '') + ')' + '(' + |
| JSON.stringify(arg1) + ',' + |
| JSON.stringify(arg2) + ',' + |
| JSON.stringify(arg3) + ');' + |
| 'return {result: __result__, emitteds: __emitteds__};' + |
| '})()'; |
| |
| var output = vm.runInNewContext(code); |
| |
| output.emitteds.forEach(function (emitted) { |
| emit(emitted[0], emitted[1]); |
| }); |
| if (isBuiltInError(output.result)) { |
| output.result = convertToTrueError(output.result); |
| } |
| return output.result; |
| }; |
| } |
| |
| var log = guardedConsole.bind(null, 'log'); |
| var toJSON = JSON.parse; |
| |
| // The "stringify, then execute in a VM" strategy totally breaks Istanbul due |
| // to missing __coverage global objects. As a solution, export different |
| // code during coverage testing and during regular execution. |
| // Note that this doesn't get shipped to consumers because Rollup replaces it |
| // with rollup-plugin-replace, so false is replaced with `false` |
| var evalFunc; |
| /* istanbul ignore else */ |
| { |
| evalFunc = evalFunctionInVm; |
| } |
| |
| var evalFunction = evalFunc; |
| |
| /* |
| * Simple task queue to sequentialize actions. Assumes |
| * callbacks will eventually fire (once). |
| */ |
| |
| class TaskQueue$1 { |
| constructor() { |
| this.promise = Promise.resolve(); |
| } |
| |
| add(promiseFactory) { |
| this.promise = this.promise |
| // just recover |
| .catch(() => { }) |
| .then(() => promiseFactory()); |
| return this.promise; |
| } |
| |
| finish() { |
| return this.promise; |
| } |
| } |
| |
| function stringify(input) { |
| if (!input) { |
| return 'undefined'; // backwards compat for empty reduce |
| } |
| // for backwards compat with mapreduce, functions/strings are stringified |
| // as-is. everything else is JSON-stringified. |
| switch (typeof input) { |
| case 'function': |
| // e.g. a mapreduce map |
| return input.toString(); |
| case 'string': |
| // e.g. a mapreduce built-in _reduce function |
| return input.toString(); |
| default: |
| // e.g. a JSON object in the case of mango queries |
| return JSON.stringify(input); |
| } |
| } |
| |
| /* create a string signature for a view so we can cache it and uniq it */ |
| function createViewSignature(mapFun, reduceFun) { |
| // the "undefined" part is for backwards compatibility |
| return stringify(mapFun) + stringify(reduceFun) + 'undefined'; |
| } |
| |
| async function createView(sourceDB, viewName, mapFun, reduceFun, temporary, localDocName) { |
| const viewSignature = createViewSignature(mapFun, reduceFun); |
| |
| let cachedViews; |
| if (!temporary) { |
| // cache this to ensure we don't try to update the same view twice |
| cachedViews = sourceDB._cachedViews = sourceDB._cachedViews || {}; |
| if (cachedViews[viewSignature]) { |
| return cachedViews[viewSignature]; |
| } |
| } |
| |
| const promiseForView = sourceDB.info().then(async function (info) { |
| const depDbName = info.db_name + '-mrview-' + |
| (temporary ? 'temp' : stringMd5(viewSignature)); |
| |
| // save the view name in the source db so it can be cleaned up if necessary |
| // (e.g. when the _design doc is deleted, remove all associated view data) |
| function diffFunction(doc) { |
| doc.views = doc.views || {}; |
| let fullViewName = viewName; |
| if (fullViewName.indexOf('/') === -1) { |
| fullViewName = viewName + '/' + viewName; |
| } |
| const depDbs = doc.views[fullViewName] = doc.views[fullViewName] || {}; |
| /* istanbul ignore if */ |
| if (depDbs[depDbName]) { |
| return; // no update necessary |
| } |
| depDbs[depDbName] = true; |
| return doc; |
| } |
| await upsert(sourceDB, '_local/' + localDocName, diffFunction); |
| const res$$1 = await sourceDB.registerDependentDatabase(depDbName); |
| const db = res$$1.db; |
| db.auto_compaction = true; |
| const view = { |
| name: depDbName, |
| db, |
| sourceDB, |
| adapter: sourceDB.adapter, |
| mapFun, |
| reduceFun |
| }; |
| |
| let lastSeqDoc; |
| try { |
| lastSeqDoc = await view.db.get('_local/lastSeq'); |
| } catch (err) { |
| /* istanbul ignore if */ |
| if (err.status !== 404) { |
| throw err; |
| } |
| } |
| |
| view.seq = lastSeqDoc ? lastSeqDoc.seq : 0; |
| if (cachedViews) { |
| view.db.once('destroyed', function () { |
| delete cachedViews[viewSignature]; |
| }); |
| } |
| return view; |
| }); |
| |
| if (cachedViews) { |
| cachedViews[viewSignature] = promiseForView; |
| } |
| return promiseForView; |
| } |
| |
| const persistentQueues = {}; |
| const tempViewQueue = new TaskQueue$1(); |
| const CHANGES_BATCH_SIZE$1 = 50; |
| |
| function parseViewName(name) { |
| // can be either 'ddocname/viewname' or just 'viewname' |
| // (where the ddoc name is the same) |
| return name.indexOf('/') === -1 ? [name, name] : name.split('/'); |
| } |
| |
| function isGenOne(changes) { |
| // only return true if the current change is 1- |
| // and there are no other leafs |
| return changes.length === 1 && /^1-/.test(changes[0].rev); |
| } |
| |
| function emitError(db, e, data) { |
| try { |
| db.emit('error', e); |
| } catch (err) { |
| guardedConsole('error', |
| 'The user\'s map/reduce function threw an uncaught error.\n' + |
| 'You can debug this error by doing:\n' + |
| 'myDatabase.on(\'error\', function (err) { debugger; });\n' + |
| 'Please double-check your map/reduce function.'); |
| guardedConsole('error', e, data); |
| } |
| } |
| |
| /** |
| * Returns an "abstract" mapreduce object of the form: |
| * |
| * { |
| * query: queryFun, |
| * viewCleanup: viewCleanupFun |
| * } |
| * |
| * Arguments are: |
| * |
| * localDoc: string |
| * This is for the local doc that gets saved in order to track the |
| * "dependent" DBs and clean them up for viewCleanup. It should be |
| * unique, so that indexer plugins don't collide with each other. |
| * mapper: function (mapFunDef, emit) |
| * Returns a map function based on the mapFunDef, which in the case of |
| * normal map/reduce is just the de-stringified function, but may be |
| * something else, such as an object in the case of pouchdb-find. |
| * reducer: function (reduceFunDef) |
| * Ditto, but for reducing. Modules don't have to support reducing |
| * (e.g. pouchdb-find). |
| * ddocValidator: function (ddoc, viewName) |
| * Throws an error if the ddoc or viewName is not valid. |
| * This could be a way to communicate to the user that the configuration for the |
| * indexer is invalid. |
| */ |
| function createAbstractMapReduce(localDocName, mapper, reducer, ddocValidator) { |
| |
| function tryMap(db, fun, doc) { |
| // emit an event if there was an error thrown by a map function. |
| // putting try/catches in a single function also avoids deoptimizations. |
| try { |
| fun(doc); |
| } catch (e) { |
| emitError(db, e, {fun, doc}); |
| } |
| } |
| |
| function tryReduce(db, fun, keys, values, rereduce) { |
| // same as above, but returning the result or an error. there are two separate |
| // functions to avoid extra memory allocations since the tryCode() case is used |
| // for custom map functions (common) vs this function, which is only used for |
| // custom reduce functions (rare) |
| try { |
| return {output : fun(keys, values, rereduce)}; |
| } catch (e) { |
| emitError(db, e, {fun, keys, values, rereduce}); |
| return {error: e}; |
| } |
| } |
| |
| function sortByKeyThenValue(x, y) { |
| const keyCompare = collate(x.key, y.key); |
| return keyCompare !== 0 ? keyCompare : collate(x.value, y.value); |
| } |
| |
| function sliceResults(results, limit, skip) { |
| skip = skip || 0; |
| if (typeof limit === 'number') { |
| return results.slice(skip, limit + skip); |
| } else if (skip > 0) { |
| return results.slice(skip); |
| } |
| return results; |
| } |
| |
| function rowToDocId(row) { |
| const val = row.value; |
| // Users can explicitly specify a joined doc _id, or it |
| // defaults to the doc _id that emitted the key/value. |
| const docId = (val && typeof val === 'object' && val._id) || row.id; |
| return docId; |
| } |
| |
| function readAttachmentsAsBlobOrBuffer(res$$1) { |
| for (const row of res$$1.rows) { |
| const atts = row.doc && row.doc._attachments; |
| if (!atts) { |
| continue; |
| } |
| for (const filename of Object.keys(atts)) { |
| const att = atts[filename]; |
| atts[filename].data = b64ToBluffer(att.data, att.content_type); |
| } |
| } |
| } |
| |
| function postprocessAttachments(opts) { |
| return function (res$$1) { |
| if (opts.include_docs && opts.attachments && opts.binary) { |
| readAttachmentsAsBlobOrBuffer(res$$1); |
| } |
| return res$$1; |
| }; |
| } |
| |
| function addHttpParam(paramName, opts, params, asJson) { |
| // add an http param from opts to params, optionally json-encoded |
| let val = opts[paramName]; |
| if (typeof val !== 'undefined') { |
| if (asJson) { |
| val = encodeURIComponent(JSON.stringify(val)); |
| } |
| params.push(paramName + '=' + val); |
| } |
| } |
| |
| function coerceInteger(integerCandidate) { |
| if (typeof integerCandidate !== 'undefined') { |
| const asNumber = Number(integerCandidate); |
| // prevents e.g. '1foo' or '1.1' being coerced to 1 |
| if (!isNaN(asNumber) && asNumber === parseInt(integerCandidate, 10)) { |
| return asNumber; |
| } else { |
| return integerCandidate; |
| } |
| } |
| } |
| |
| function coerceOptions(opts) { |
| opts.group_level = coerceInteger(opts.group_level); |
| opts.limit = coerceInteger(opts.limit); |
| opts.skip = coerceInteger(opts.skip); |
| return opts; |
| } |
| |
| function checkPositiveInteger(number) { |
| if (number) { |
| if (typeof number !== 'number') { |
| return new QueryParseError(`Invalid value for integer: "${number}"`); |
| } |
| if (number < 0) { |
| return new QueryParseError(`Invalid value for positive integer: "${number}"`); |
| } |
| } |
| } |
| |
| function checkQueryParseError(options, fun) { |
| const startkeyName = options.descending ? 'endkey' : 'startkey'; |
| const endkeyName = options.descending ? 'startkey' : 'endkey'; |
| |
| if (typeof options[startkeyName] !== 'undefined' && |
| typeof options[endkeyName] !== 'undefined' && |
| collate(options[startkeyName], options[endkeyName]) > 0) { |
| throw new QueryParseError('No rows can match your key range, ' + |
| 'reverse your start_key and end_key or set {descending : true}'); |
| } else if (fun.reduce && options.reduce !== false) { |
| if (options.include_docs) { |
| throw new QueryParseError('{include_docs:true} is invalid for reduce'); |
| } else if (options.keys && options.keys.length > 1 && |
| !options.group && !options.group_level) { |
| throw new QueryParseError('Multi-key fetches for reduce views must use ' + |
| '{group: true}'); |
| } |
| } |
| for (const optionName of ['group_level', 'limit', 'skip']) { |
| const error = checkPositiveInteger(options[optionName]); |
| if (error) { |
| throw error; |
| } |
| } |
| } |
| |
| async function httpQuery(db, fun, opts) { |
| // List of parameters to add to the PUT request |
| let params = []; |
| let body; |
| let method = 'GET'; |
| let ok; |
| |
| // If opts.reduce exists and is defined, then add it to the list |
| // of parameters. |
| // If reduce=false then the results are that of only the map function |
| // not the final result of map and reduce. |
| addHttpParam('reduce', opts, params); |
| addHttpParam('include_docs', opts, params); |
| addHttpParam('attachments', opts, params); |
| addHttpParam('limit', opts, params); |
| addHttpParam('descending', opts, params); |
| addHttpParam('group', opts, params); |
| addHttpParam('group_level', opts, params); |
| addHttpParam('skip', opts, params); |
| addHttpParam('stale', opts, params); |
| addHttpParam('conflicts', opts, params); |
| addHttpParam('startkey', opts, params, true); |
| addHttpParam('start_key', opts, params, true); |
| addHttpParam('endkey', opts, params, true); |
| addHttpParam('end_key', opts, params, true); |
| addHttpParam('inclusive_end', opts, params); |
| addHttpParam('key', opts, params, true); |
| addHttpParam('update_seq', opts, params); |
| |
| // Format the list of parameters into a valid URI query string |
| params = params.join('&'); |
| params = params === '' ? '' : '?' + params; |
| |
| // If keys are supplied, issue a POST to circumvent GET query string limits |
| // see http://wiki.apache.org/couchdb/HTTP_view_API#Querying_Options |
| if (typeof opts.keys !== 'undefined') { |
| const MAX_URL_LENGTH = 2000; |
| // according to http://stackoverflow.com/a/417184/680742, |
| // the de facto URL length limit is 2000 characters |
| |
| const keysAsString = `keys=${encodeURIComponent(JSON.stringify(opts.keys))}`; |
| if (keysAsString.length + params.length + 1 <= MAX_URL_LENGTH) { |
| // If the keys are short enough, do a GET. we do this to work around |
| // Safari not understanding 304s on POSTs (see pouchdb/pouchdb#1239) |
| params += (params[0] === '?' ? '&' : '?') + keysAsString; |
| } else { |
| method = 'POST'; |
| if (typeof fun === 'string') { |
| body = {keys: opts.keys}; |
| } else { // fun is {map : mapfun}, so append to this |
| fun.keys = opts.keys; |
| } |
| } |
| } |
| |
| // We are referencing a query defined in the design doc |
| if (typeof fun === 'string') { |
| const parts = parseViewName(fun); |
| |
| const response = await db.fetch('_design/' + parts[0] + '/_view/' + parts[1] + params, { |
| headers: new nodeFetch.Headers({'Content-Type': 'application/json'}), |
| method, |
| body: JSON.stringify(body) |
| }); |
| ok = response.ok; |
| // status = response.status; |
| const result = await response.json(); |
| |
| if (!ok) { |
| result.status = response.status; |
| throw generateErrorFromResponse(result); |
| } |
| |
| // fail the entire request if the result contains an error |
| for (const row of result.rows) { |
| /* istanbul ignore if */ |
| if (row.value && row.value.error && row.value.error === "builtin_reduce_error") { |
| throw new Error(row.reason); |
| } |
| } |
| |
| return new Promise(function (resolve) { |
| resolve(result); |
| }).then(postprocessAttachments(opts)); |
| } |
| |
| // We are using a temporary view, terrible for performance, good for testing |
| body = body || {}; |
| for (const key of Object.keys(fun)) { |
| if (Array.isArray(fun[key])) { |
| body[key] = fun[key]; |
| } else { |
| body[key] = fun[key].toString(); |
| } |
| } |
| |
| const response = await db.fetch('_temp_view' + params, { |
| headers: new nodeFetch.Headers({'Content-Type': 'application/json'}), |
| method: 'POST', |
| body: JSON.stringify(body) |
| }); |
| |
| ok = response.ok; |
| // status = response.status; |
| const result = await response.json(); |
| if (!ok) { |
| result.status = response.status; |
| throw generateErrorFromResponse(result); |
| } |
| |
| return new Promise(function (resolve) { |
| resolve(result); |
| }).then(postprocessAttachments(opts)); |
| } |
| |
| // custom adapters can define their own api._query |
| // and override the default behavior |
| /* istanbul ignore next */ |
| function customQuery(db, fun, opts) { |
| return new Promise(function (resolve, reject) { |
| db._query(fun, opts, function (err, res$$1) { |
| if (err) { |
| return reject(err); |
| } |
| resolve(res$$1); |
| }); |
| }); |
| } |
| |
| // custom adapters can define their own api._viewCleanup |
| // and override the default behavior |
| /* istanbul ignore next */ |
| function customViewCleanup(db) { |
| return new Promise(function (resolve, reject) { |
| db._viewCleanup(function (err, res$$1) { |
| if (err) { |
| return reject(err); |
| } |
| resolve(res$$1); |
| }); |
| }); |
| } |
| |
| function defaultsTo(value) { |
| return function (reason) { |
| /* istanbul ignore else */ |
| if (reason.status === 404) { |
| return value; |
| } else { |
| throw reason; |
| } |
| }; |
| } |
| |
| // returns a promise for a list of docs to update, based on the input docId. |
| // the order doesn't matter, because post-3.2.0, bulkDocs |
| // is an atomic operation in all three adapters. |
| async function getDocsToPersist(docId, view, docIdsToChangesAndEmits) { |
| const metaDocId = '_local/doc_' + docId; |
| const defaultMetaDoc = {_id: metaDocId, keys: []}; |
| const docData = docIdsToChangesAndEmits.get(docId); |
| const indexableKeysToKeyValues = docData[0]; |
| const changes = docData[1]; |
| |
| function getMetaDoc() { |
| if (isGenOne(changes)) { |
| // generation 1, so we can safely assume initial state |
| // for performance reasons (avoids unnecessary GETs) |
| return Promise.resolve(defaultMetaDoc); |
| } |
| return view.db.get(metaDocId).catch(defaultsTo(defaultMetaDoc)); |
| } |
| |
| function getKeyValueDocs(metaDoc) { |
| if (!metaDoc.keys.length) { |
| // no keys, no need for a lookup |
| return Promise.resolve({rows: []}); |
| } |
| return view.db.allDocs({ |
| keys: metaDoc.keys, |
| include_docs: true |
| }); |
| } |
| |
| function processKeyValueDocs(metaDoc, kvDocsRes) { |
| const kvDocs = []; |
| const oldKeys = new Set(); |
| |
| for (const row of kvDocsRes.rows) { |
| const doc = row.doc; |
| if (!doc) { // deleted |
| continue; |
| } |
| kvDocs.push(doc); |
| oldKeys.add(doc._id); |
| doc._deleted = !indexableKeysToKeyValues.has(doc._id); |
| if (!doc._deleted) { |
| const keyValue = indexableKeysToKeyValues.get(doc._id); |
| if ('value' in keyValue) { |
| doc.value = keyValue.value; |
| } |
| } |
| } |
| const newKeys = mapToKeysArray(indexableKeysToKeyValues); |
| for (const key of newKeys) { |
| if (!oldKeys.has(key)) { |
| // new doc |
| const kvDoc = { |
| _id: key |
| }; |
| const keyValue = indexableKeysToKeyValues.get(key); |
| if ('value' in keyValue) { |
| kvDoc.value = keyValue.value; |
| } |
| kvDocs.push(kvDoc); |
| } |
| } |
| metaDoc.keys = uniq(newKeys.concat(metaDoc.keys)); |
| kvDocs.push(metaDoc); |
| |
| return kvDocs; |
| } |
| |
| const metaDoc = await getMetaDoc(); |
| const keyValueDocs = await getKeyValueDocs(metaDoc); |
| return processKeyValueDocs(metaDoc, keyValueDocs); |
| } |
| |
| function updatePurgeSeq(view) { |
| // with this approach, we just assume to have processed all missing purges and write the latest |
| // purgeSeq into the _local/purgeSeq doc. |
| return view.sourceDB.get('_local/purges').then(function (res$$1) { |
| const purgeSeq = res$$1.purgeSeq; |
| return view.db.get('_local/purgeSeq').then(function (res$$1) { |
| return res$$1._rev; |
| }) |
| .catch(defaultsTo(undefined)) |
| .then(function (rev$$1) { |
| return view.db.put({ |
| _id: '_local/purgeSeq', |
| _rev: rev$$1, |
| purgeSeq, |
| }); |
| }); |
| }).catch(function (err) { |
| if (err.status !== 404) { |
| throw err; |
| } |
| }); |
| } |
| |
| // updates all emitted key/value docs and metaDocs in the mrview database |
| // for the given batch of documents from the source database |
| function saveKeyValues(view, docIdsToChangesAndEmits, seq) { |
| var seqDocId = '_local/lastSeq'; |
| return view.db.get(seqDocId) |
| .catch(defaultsTo({_id: seqDocId, seq: 0})) |
| .then(function (lastSeqDoc) { |
| var docIds = mapToKeysArray(docIdsToChangesAndEmits); |
| return Promise.all(docIds.map(function (docId) { |
| return getDocsToPersist(docId, view, docIdsToChangesAndEmits); |
| })).then(function (listOfDocsToPersist) { |
| var docsToPersist = listOfDocsToPersist.flat(); |
| lastSeqDoc.seq = seq; |
| docsToPersist.push(lastSeqDoc); |
| // write all docs in a single operation, update the seq once |
| return view.db.bulkDocs({docs : docsToPersist}); |
| }) |
| // TODO: this should be placed somewhere else, probably? we're querying both docs twice |
| // (first time when getting the actual purges). |
| .then(() => updatePurgeSeq(view)); |
| }); |
| } |
| |
| function getQueue(view) { |
| const viewName = typeof view === 'string' ? view : view.name; |
| let queue = persistentQueues[viewName]; |
| if (!queue) { |
| queue = persistentQueues[viewName] = new TaskQueue$1(); |
| } |
| return queue; |
| } |
| |
| async function updateView(view, opts) { |
| return sequentialize(getQueue(view), function () { |
| return updateViewInQueue(view, opts); |
| })(); |
| } |
| |
| async function updateViewInQueue(view, opts) { |
| // bind the emit function once |
| let mapResults; |
| let doc; |
| let taskId; |
| |
| function emit(key, value) { |
| const output = {id: doc._id, key: normalizeKey(key)}; |
| // Don't explicitly store the value unless it's defined and non-null. |
| // This saves on storage space, because often people don't use it. |
| if (typeof value !== 'undefined' && value !== null) { |
| output.value = normalizeKey(value); |
| } |
| mapResults.push(output); |
| } |
| |
| const mapFun = mapper(view.mapFun, emit); |
| |
| let currentSeq = view.seq || 0; |
| |
| function createTask() { |
| return view.sourceDB.info().then(function (info) { |
| taskId = view.sourceDB.activeTasks.add({ |
| name: 'view_indexing', |
| total_items: info.update_seq - currentSeq, |
| }); |
| }); |
| } |
| |
| function processChange(docIdsToChangesAndEmits, seq) { |
| return function () { |
| return saveKeyValues(view, docIdsToChangesAndEmits, seq); |
| }; |
| } |
| |
| let indexed_docs = 0; |
| const progress = { |
| view: view.name, |
| indexed_docs |
| }; |
| view.sourceDB.emit('indexing', progress); |
| |
| const queue = new TaskQueue$1(); |
| |
| async function processNextBatch() { |
| const response = await view.sourceDB.changes({ |
| return_docs: true, |
| conflicts: true, |
| include_docs: true, |
| style: 'all_docs', |
| since: currentSeq, |
| limit: opts.changes_batch_size |
| }); |
| const purges = await getRecentPurges(); |
| return processBatch(response, purges); |
| } |
| |
| function getRecentPurges() { |
| return view.db.get('_local/purgeSeq').then(function (res$$1) { |
| return res$$1.purgeSeq; |
| }) |
| .catch(defaultsTo(-1)) |
| .then(function (purgeSeq) { |
| return view.sourceDB.get('_local/purges').then(function (res$$1) { |
| const recentPurges = res$$1.purges.filter(function (purge, index) { |
| return index > purgeSeq; |
| }).map((purge) => purge.docId); |
| |
| const uniquePurges = recentPurges.filter(function (docId, index) { |
| return recentPurges.indexOf(docId) === index; |
| }); |
| |
| return Promise.all(uniquePurges.map(function (docId) { |
| return view.sourceDB.get(docId).then(function (doc) { |
| return { docId, doc }; |
| }) |
| .catch(defaultsTo({ docId })); |
| })); |
| }) |
| .catch(defaultsTo([])); |
| }); |
| } |
| |
| function processBatch(response, purges) { |
| const results = response.results; |
| if (!results.length && !purges.length) { |
| return; |
| } |
| |
| for (const purge of purges) { |
| const index = results.findIndex(function (change) { |
| return change.id === purge.docId; |
| }); |
| if (index < 0) { |
| // mimic a db.remove() on the changes feed |
| const entry = { |
| _id: purge.docId, |
| doc: { |
| _id: purge.docId, |
| _deleted: 1, |
| }, |
| changes: [], |
| }; |
| |
| if (purge.doc) { |
| // update with new winning rev after purge |
| entry.doc = purge.doc; |
| entry.changes.push({ rev: purge.doc._rev }); |
| } |
| |
| results.push(entry); |
| } |
| } |
| |
| const docIdsToChangesAndEmits = createDocIdsToChangesAndEmits(results); |
| |
| queue.add(processChange(docIdsToChangesAndEmits, currentSeq)); |
| |
| indexed_docs = indexed_docs + results.length; |
| const progress = { |
| view: view.name, |
| last_seq: response.last_seq, |
| results_count: results.length, |
| indexed_docs |
| }; |
| view.sourceDB.emit('indexing', progress); |
| view.sourceDB.activeTasks.update(taskId, {completed_items: indexed_docs}); |
| |
| if (results.length < opts.changes_batch_size) { |
| return; |
| } |
| return processNextBatch(); |
| } |
| |
| function createDocIdsToChangesAndEmits(results) { |
| const docIdsToChangesAndEmits = new Map(); |
| for (const change of results) { |
| if (change.doc._id[0] !== '_') { |
| mapResults = []; |
| doc = change.doc; |
| |
| if (!doc._deleted) { |
| tryMap(view.sourceDB, mapFun, doc); |
| } |
| mapResults.sort(sortByKeyThenValue); |
| |
| const indexableKeysToKeyValues = createIndexableKeysToKeyValues(mapResults); |
| docIdsToChangesAndEmits.set(change.doc._id, [ |
| indexableKeysToKeyValues, |
| change.changes |
| ]); |
| } |
| currentSeq = change.seq; |
| } |
| return docIdsToChangesAndEmits; |
| } |
| |
| function createIndexableKeysToKeyValues(mapResults) { |
| const indexableKeysToKeyValues = new Map(); |
| let lastKey; |
| for (let i = 0, len = mapResults.length; i < len; i++) { |
| const emittedKeyValue = mapResults[i]; |
| const complexKey = [emittedKeyValue.key, emittedKeyValue.id]; |
| if (i > 0 && collate(emittedKeyValue.key, lastKey) === 0) { |
| complexKey.push(i); // dup key+id, so make it unique |
| } |
| indexableKeysToKeyValues.set(toIndexableString(complexKey), emittedKeyValue); |
| lastKey = emittedKeyValue.key; |
| } |
| return indexableKeysToKeyValues; |
| } |
| |
| try { |
| await createTask(); |
| await processNextBatch(); |
| await queue.finish(); |
| view.seq = currentSeq; |
| view.sourceDB.activeTasks.remove(taskId); |
| } catch (error) { |
| view.sourceDB.activeTasks.remove(taskId, error); |
| } |
| } |
| |
| function reduceView(view, results, options) { |
| if (options.group_level === 0) { |
| delete options.group_level; |
| } |
| |
| const shouldGroup = options.group || options.group_level; |
| const reduceFun = reducer(view.reduceFun); |
| const groups = []; |
| const lvl = isNaN(options.group_level) |
| ? Number.POSITIVE_INFINITY |
| : options.group_level; |
| |
| for (const result of results) { |
| const last = groups[groups.length - 1]; |
| let groupKey = shouldGroup ? result.key : null; |
| |
| // only set group_level for array keys |
| if (shouldGroup && Array.isArray(groupKey)) { |
| groupKey = groupKey.slice(0, lvl); |
| } |
| |
| if (last && collate(last.groupKey, groupKey) === 0) { |
| last.keys.push([result.key, result.id]); |
| last.values.push(result.value); |
| continue; |
| } |
| groups.push({ |
| keys: [[result.key, result.id]], |
| values: [result.value], |
| groupKey |
| }); |
| } |
| |
| results = []; |
| for (const group of groups) { |
| const reduceTry = tryReduce(view.sourceDB, reduceFun, group.keys, group.values, false); |
| if (reduceTry.error && reduceTry.error instanceof BuiltInError) { |
| // CouchDB returns an error if a built-in errors out |
| throw reduceTry.error; |
| } |
| results.push({ |
| // CouchDB just sets the value to null if a non-built-in errors out |
| value: reduceTry.error ? null : reduceTry.output, |
| key: group.groupKey |
| }); |
| } |
| // no total_rows/offset when reducing |
| return { rows: sliceResults(results, options.limit, options.skip) }; |
| } |
| |
| function queryView(view, opts) { |
| return sequentialize(getQueue(view), function () { |
| return queryViewInQueue(view, opts); |
| })(); |
| } |
| |
| async function queryViewInQueue(view, opts) { |
| let totalRows; |
| const shouldReduce = view.reduceFun && opts.reduce !== false; |
| const skip = opts.skip || 0; |
| if (typeof opts.keys !== 'undefined' && !opts.keys.length) { |
| // equivalent query |
| opts.limit = 0; |
| delete opts.keys; |
| } |
| |
| async function fetchFromView(viewOpts) { |
| viewOpts.include_docs = true; |
| const res$$1 = await view.db.allDocs(viewOpts); |
| totalRows = res$$1.total_rows; |
| |
| return res$$1.rows.map(function (result) { |
| // implicit migration - in older versions of PouchDB, |
| // we explicitly stored the doc as {id: ..., key: ..., value: ...} |
| // this is tested in a migration test |
| /* istanbul ignore next */ |
| if ('value' in result.doc && typeof result.doc.value === 'object' && |
| result.doc.value !== null) { |
| const keys = Object.keys(result.doc.value).sort(); |
| // this detection method is not perfect, but it's unlikely the user |
| // emitted a value which was an object with these 3 exact keys |
| const expectedKeys = ['id', 'key', 'value']; |
| if (!(keys < expectedKeys || keys > expectedKeys)) { |
| return result.doc.value; |
| } |
| } |
| |
| const parsedKeyAndDocId = parseIndexableString(result.doc._id); |
| return { |
| key: parsedKeyAndDocId[0], |
| id: parsedKeyAndDocId[1], |
| value: ('value' in result.doc ? result.doc.value : null) |
| }; |
| }); |
| } |
| |
| async function onMapResultsReady(rows) { |
| let finalResults; |
| if (shouldReduce) { |
| finalResults = reduceView(view, rows, opts); |
| } else if (typeof opts.keys === 'undefined') { |
| finalResults = { |
| total_rows: totalRows, |
| offset: skip, |
| rows |
| }; |
| } else { |
| // support limit, skip for keys query |
| finalResults = { |
| total_rows: totalRows, |
| offset: skip, |
| rows: sliceResults(rows,opts.limit,opts.skip) |
| }; |
| } |
| /* istanbul ignore if */ |
| if (opts.update_seq) { |
| finalResults.update_seq = view.seq; |
| } |
| if (opts.include_docs) { |
| const docIds = uniq(rows.map(rowToDocId)); |
| |
| const allDocsRes = await view.sourceDB.allDocs({ |
| keys: docIds, |
| include_docs: true, |
| conflicts: opts.conflicts, |
| attachments: opts.attachments, |
| binary: opts.binary |
| }); |
| const docIdsToDocs = new Map(); |
| for (const row of allDocsRes.rows) { |
| docIdsToDocs.set(row.id, row.doc); |
| } |
| for (const row of rows) { |
| const docId = rowToDocId(row); |
| const doc = docIdsToDocs.get(docId); |
| if (doc) { |
| row.doc = doc; |
| } |
| } |
| } |
| return finalResults; |
| } |
| |
| if (typeof opts.keys !== 'undefined') { |
| const keys = opts.keys; |
| const fetchPromises = keys.map(function (key) { |
| const viewOpts = { |
| startkey : toIndexableString([key]), |
| endkey : toIndexableString([key, {}]) |
| }; |
| /* istanbul ignore if */ |
| if (opts.update_seq) { |
| viewOpts.update_seq = true; |
| } |
| return fetchFromView(viewOpts); |
| }); |
| const result = await Promise.all(fetchPromises); |
| const flattenedResult = result.flat(); |
| return onMapResultsReady(flattenedResult); |
| } else { // normal query, no 'keys' |
| const viewOpts = { |
| descending : opts.descending |
| }; |
| /* istanbul ignore if */ |
| if (opts.update_seq) { |
| viewOpts.update_seq = true; |
| } |
| let startkey; |
| let endkey; |
| if ('start_key' in opts) { |
| startkey = opts.start_key; |
| } |
| if ('startkey' in opts) { |
| startkey = opts.startkey; |
| } |
| if ('end_key' in opts) { |
| endkey = opts.end_key; |
| } |
| if ('endkey' in opts) { |
| endkey = opts.endkey; |
| } |
| if (typeof startkey !== 'undefined') { |
| viewOpts.startkey = opts.descending ? |
| toIndexableString([startkey, {}]) : |
| toIndexableString([startkey]); |
| } |
| if (typeof endkey !== 'undefined') { |
| let inclusiveEnd = opts.inclusive_end !== false; |
| if (opts.descending) { |
| inclusiveEnd = !inclusiveEnd; |
| } |
| |
| viewOpts.endkey = toIndexableString( |
| inclusiveEnd ? [endkey, {}] : [endkey]); |
| } |
| if (typeof opts.key !== 'undefined') { |
| const keyStart = toIndexableString([opts.key]); |
| const keyEnd = toIndexableString([opts.key, {}]); |
| if (viewOpts.descending) { |
| viewOpts.endkey = keyStart; |
| viewOpts.startkey = keyEnd; |
| } else { |
| viewOpts.startkey = keyStart; |
| viewOpts.endkey = keyEnd; |
| } |
| } |
| if (!shouldReduce) { |
| if (typeof opts.limit === 'number') { |
| viewOpts.limit = opts.limit; |
| } |
| viewOpts.skip = skip; |
| } |
| |
| const result = await fetchFromView(viewOpts); |
| return onMapResultsReady(result); |
| } |
| } |
| |
| async function httpViewCleanup(db) { |
| const response = await db.fetch('_view_cleanup', { |
| headers: new nodeFetch.Headers({'Content-Type': 'application/json'}), |
| method: 'POST' |
| }); |
| return response.json(); |
| } |
| |
| async function localViewCleanup(db) { |
| try { |
| const metaDoc = await db.get('_local/' + localDocName); |
| const docsToViews = new Map(); |
| |
| for (const fullViewName of Object.keys(metaDoc.views)) { |
| const parts = parseViewName(fullViewName); |
| const designDocName = '_design/' + parts[0]; |
| const viewName = parts[1]; |
| let views = docsToViews.get(designDocName); |
| if (!views) { |
| views = new Set(); |
| docsToViews.set(designDocName, views); |
| } |
| views.add(viewName); |
| } |
| const opts = { |
| keys : mapToKeysArray(docsToViews), |
| include_docs : true |
| }; |
| |
| const res$$1 = await db.allDocs(opts); |
| const viewsToStatus = {}; |
| for (const row of res$$1.rows) { |
| const ddocName = row.key.substring(8); // cuts off '_design/' |
| for (const viewName of docsToViews.get(row.key)) { |
| let fullViewName = ddocName + '/' + viewName; |
| /* istanbul ignore if */ |
| if (!metaDoc.views[fullViewName]) { |
| // new format, without slashes, to support PouchDB 2.2.0 |
| // migration test in pouchdb's browser.migration.js verifies this |
| fullViewName = viewName; |
| } |
| const viewDBNames = Object.keys(metaDoc.views[fullViewName]); |
| // design doc deleted, or view function nonexistent |
| const statusIsGood = row.doc && row.doc.views && |
| row.doc.views[viewName]; |
| for (const viewDBName of viewDBNames) { |
| viewsToStatus[viewDBName] = viewsToStatus[viewDBName] || statusIsGood; |
| } |
| } |
| } |
| |
| const dbsToDelete = Object.keys(viewsToStatus) |
| .filter(function (viewDBName) { return !viewsToStatus[viewDBName]; }); |
| |
| const destroyPromises = dbsToDelete.map(function (viewDBName) { |
| return sequentialize(getQueue(viewDBName), function () { |
| return new db.constructor(viewDBName, db.__opts).destroy(); |
| })(); |
| }); |
| |
| return Promise.all(destroyPromises).then(function () { |
| return {ok: true}; |
| }); |
| } catch (err) { |
| if (err.status === 404) { |
| return {ok: true}; |
| } else { |
| throw err; |
| } |
| } |
| } |
| |
| async function queryPromised(db, fun, opts) { |
| /* istanbul ignore next */ |
| if (typeof db._query === 'function') { |
| return customQuery(db, fun, opts); |
| } |
| if (isRemote(db)) { |
| return httpQuery(db, fun, opts); |
| } |
| |
| const updateViewOpts = { |
| changes_batch_size: db.__opts.view_update_changes_batch_size || CHANGES_BATCH_SIZE$1 |
| }; |
| |
| if (typeof fun !== 'string') { |
| // temp_view |
| checkQueryParseError(opts, fun); |
| |
| tempViewQueue.add(async function () { |
| const view = await createView( |
| /* sourceDB */ db, |
| /* viewName */ 'temp_view/temp_view', |
| /* mapFun */ fun.map, |
| /* reduceFun */ fun.reduce, |
| /* temporary */ true, |
| /* localDocName */ localDocName); |
| |
| return fin(updateView(view, updateViewOpts).then( |
| function () { return queryView(view, opts); }), |
| function () { return view.db.destroy(); } |
| ); |
| }); |
| return tempViewQueue.finish(); |
| } else { |
| // persistent view |
| const fullViewName = fun; |
| const parts = parseViewName(fullViewName); |
| const designDocName = parts[0]; |
| const viewName = parts[1]; |
| |
| const doc = await db.get('_design/' + designDocName); |
| fun = doc.views && doc.views[viewName]; |
| |
| if (!fun) { |
| // basic validator; it's assumed that every subclass would want this |
| throw new NotFoundError$1(`ddoc ${doc._id} has no view named ${viewName}`); |
| } |
| |
| ddocValidator(doc, viewName); |
| checkQueryParseError(opts, fun); |
| |
| const view = await createView( |
| /* sourceDB */ db, |
| /* viewName */ fullViewName, |
| /* mapFun */ fun.map, |
| /* reduceFun */ fun.reduce, |
| /* temporary */ false, |
| /* localDocName */ localDocName); |
| |
| if (opts.stale === 'ok' || opts.stale === 'update_after') { |
| if (opts.stale === 'update_after') { |
| nextTick(function () { |
| updateView(view, updateViewOpts); |
| }); |
| } |
| return queryView(view, opts); |
| } else { // stale not ok |
| await updateView(view, updateViewOpts); |
| return queryView(view, opts); |
| } |
| } |
| } |
| |
| function abstractQuery(fun, opts, callback) { |
| const db = this; |
| if (typeof opts === 'function') { |
| callback = opts; |
| opts = {}; |
| } |
| opts = opts ? coerceOptions(opts) : {}; |
| |
| if (typeof fun === 'function') { |
| fun = {map : fun}; |
| } |
| |
| const promise = Promise.resolve().then(function () { |
| return queryPromised(db, fun, opts); |
| }); |
| promisedCallback(promise, callback); |
| return promise; |
| } |
| |
| const abstractViewCleanup = callbackify(function () { |
| const db = this; |
| /* istanbul ignore next */ |
| if (typeof db._viewCleanup === 'function') { |
| return customViewCleanup(db); |
| } |
| if (isRemote(db)) { |
| return httpViewCleanup(db); |
| } |
| return localViewCleanup(db); |
| }); |
| |
| return { |
| query: abstractQuery, |
| viewCleanup: abstractViewCleanup |
| }; |
| } |
| |
| var builtInReduce = { |
| _sum: function (keys, values) { |
| return sum(values); |
| }, |
| |
| _count: function (keys, values) { |
| return values.length; |
| }, |
| |
| _stats: function (keys, values) { |
| // no need to implement rereduce=true, because Pouch |
| // will never call it |
| function sumsqr(values) { |
| var _sumsqr = 0; |
| for (var i = 0, len = values.length; i < len; i++) { |
| var num = values[i]; |
| _sumsqr += (num * num); |
| } |
| return _sumsqr; |
| } |
| return { |
| sum : sum(values), |
| min : Math.min.apply(null, values), |
| max : Math.max.apply(null, values), |
| count : values.length, |
| sumsqr : sumsqr(values) |
| }; |
| } |
| }; |
| |
| function getBuiltIn(reduceFunString) { |
| if (/^_sum/.test(reduceFunString)) { |
| return builtInReduce._sum; |
| } else if (/^_count/.test(reduceFunString)) { |
| return builtInReduce._count; |
| } else if (/^_stats/.test(reduceFunString)) { |
| return builtInReduce._stats; |
| } else if (/^_/.test(reduceFunString)) { |
| throw new Error(reduceFunString + ' is not a supported reduce function.'); |
| } |
| } |
| |
| function mapper(mapFun, emit) { |
| // for temp_views one can use emit(doc, emit), see #38 |
| if (typeof mapFun === "function" && mapFun.length === 2) { |
| var origMap = mapFun; |
| return function (doc) { |
| return origMap(doc, emit); |
| }; |
| } else { |
| return evalFunction(mapFun.toString(), emit); |
| } |
| } |
| |
| function reducer(reduceFun) { |
| var reduceFunString = reduceFun.toString(); |
| var builtIn = getBuiltIn(reduceFunString); |
| if (builtIn) { |
| return builtIn; |
| } else { |
| return evalFunction(reduceFunString); |
| } |
| } |
| |
| function ddocValidator(ddoc, viewName) { |
| var fun = ddoc.views && ddoc.views[viewName]; |
| if (typeof fun.map !== 'string') { |
| throw new NotFoundError$1('ddoc ' + ddoc._id + ' has no string view named ' + |
| viewName + ', instead found object of type: ' + typeof fun.map); |
| } |
| } |
| |
| var localDocName = 'mrviews'; |
| var abstract = createAbstractMapReduce(localDocName, mapper, reducer, ddocValidator); |
| |
| function query(fun, opts, callback) { |
| return abstract.query.call(this, fun, opts, callback); |
| } |
| |
| function viewCleanup(callback) { |
| return abstract.viewCleanup.call(this, callback); |
| } |
| |
| var mapreduce = { |
| query, |
| viewCleanup |
| }; |
| |
| function fileHasChanged(localDoc, remoteDoc, filename) { |
| return !localDoc._attachments || |
| !localDoc._attachments[filename] || |
| localDoc._attachments[filename].digest !== remoteDoc._attachments[filename].digest; |
| } |
| |
| function getDocAttachments(db, doc) { |
| var filenames = Object.keys(doc._attachments); |
| return Promise.all(filenames.map(function (filename) { |
| return db.getAttachment(doc._id, filename, {rev: doc._rev}); |
| })); |
| } |
| |
| function getDocAttachmentsFromTargetOrSource(target, src, doc) { |
| var doCheckForLocalAttachments = isRemote(src) && !isRemote(target); |
| var filenames = Object.keys(doc._attachments); |
| |
| if (!doCheckForLocalAttachments) { |
| return getDocAttachments(src, doc); |
| } |
| |
| return target.get(doc._id).then(function (localDoc) { |
| return Promise.all(filenames.map(function (filename) { |
| if (fileHasChanged(localDoc, doc, filename)) { |
| return src.getAttachment(doc._id, filename); |
| } |
| |
| return target.getAttachment(localDoc._id, filename); |
| })); |
| }).catch(function (error) { |
| /* istanbul ignore if */ |
| if (error.status !== 404) { |
| throw error; |
| } |
| |
| return getDocAttachments(src, doc); |
| }); |
| } |
| |
| function createBulkGetOpts(diffs) { |
| var requests = []; |
| Object.keys(diffs).forEach(function (id) { |
| var missingRevs = diffs[id].missing; |
| missingRevs.forEach(function (missingRev) { |
| requests.push({ |
| id, |
| rev: missingRev |
| }); |
| }); |
| }); |
| |
| return { |
| docs: requests, |
| revs: true, |
| latest: true |
| }; |
| } |
| |
| // |
| // Fetch all the documents from the src as described in the "diffs", |
| // which is a mapping of docs IDs to revisions. If the state ever |
| // changes to "cancelled", then the returned promise will be rejected. |
| // Else it will be resolved with a list of fetched documents. |
| // |
| function getDocs(src, target, diffs, state) { |
| diffs = clone(diffs); // we do not need to modify this |
| |
| var resultDocs = [], |
| ok = true; |
| |
| function getAllDocs() { |
| |
| var bulkGetOpts = createBulkGetOpts(diffs); |
| |
| if (!bulkGetOpts.docs.length) { // optimization: skip empty requests |
| return; |
| } |
| |
| return src.bulkGet(bulkGetOpts).then(function (bulkGetResponse) { |
| /* istanbul ignore if */ |
| if (state.cancelled) { |
| throw new Error('cancelled'); |
| } |
| return Promise.all(bulkGetResponse.results.map(function (bulkGetInfo) { |
| return Promise.all(bulkGetInfo.docs.map(function (doc) { |
| var remoteDoc = doc.ok; |
| |
| if (doc.error) { |
| // when AUTO_COMPACTION is set, docs can be returned which look |
| // like this: {"missing":"1-7c3ac256b693c462af8442f992b83696"} |
| ok = false; |
| } |
| |
| if (!remoteDoc || !remoteDoc._attachments) { |
| return remoteDoc; |
| } |
| |
| return getDocAttachmentsFromTargetOrSource(target, src, remoteDoc).then((attachments) => { |
| var filenames = Object.keys(remoteDoc._attachments); |
| attachments.forEach(function (attachment, i) { |
| var att = remoteDoc._attachments[filenames[i]]; |
| delete att.stub; |
| delete att.length; |
| att.data = attachment; |
| }); |
| |
| return remoteDoc; |
| }); |
| })); |
| })) |
| |
| .then(function (results) { |
| resultDocs = resultDocs.concat(results.flat().filter(Boolean)); |
| }); |
| }); |
| } |
| |
| function returnResult() { |
| return { ok, docs:resultDocs }; |
| } |
| |
| return Promise.resolve() |
| .then(getAllDocs) |
| .then(returnResult); |
| } |
| |
| var CHECKPOINT_VERSION = 1; |
| var REPLICATOR = "pouchdb"; |
| // This is an arbitrary number to limit the |
| // amount of replication history we save in the checkpoint. |
| // If we save too much, the checkpoint docs will become very big, |
| // if we save fewer, we'll run a greater risk of having to |
| // read all the changes from 0 when checkpoint PUTs fail |
| // CouchDB 2.0 has a more involved history pruning, |
| // but let's go for the simple version for now. |
| var CHECKPOINT_HISTORY_SIZE = 5; |
| var LOWEST_SEQ = 0; |
| |
| function updateCheckpoint(db, id, checkpoint, session, returnValue) { |
| return db.get(id).catch(function (err) { |
| if (err.status === 404) { |
| if (db.adapter === 'http' || db.adapter === 'https') ; |
| return { |
| session_id: session, |
| _id: id, |
| history: [], |
| replicator: REPLICATOR, |
| version: CHECKPOINT_VERSION |
| }; |
| } |
| throw err; |
| }).then(function (doc) { |
| if (returnValue.cancelled) { |
| return; |
| } |
| |
| // if the checkpoint has not changed, do not update |
| if (doc.last_seq === checkpoint) { |
| return; |
| } |
| |
| // Filter out current entry for this replication |
| doc.history = (doc.history || []).filter(function (item) { |
| return item.session_id !== session; |
| }); |
| |
| // Add the latest checkpoint to history |
| doc.history.unshift({ |
| last_seq: checkpoint, |
| session_id: session |
| }); |
| |
| // Just take the last pieces in history, to |
| // avoid really big checkpoint docs. |
| // see comment on history size above |
| doc.history = doc.history.slice(0, CHECKPOINT_HISTORY_SIZE); |
| |
| doc.version = CHECKPOINT_VERSION; |
| doc.replicator = REPLICATOR; |
| |
| doc.session_id = session; |
| doc.last_seq = checkpoint; |
| |
| return db.put(doc).catch(function (err) { |
| if (err.status === 409) { |
| // retry; someone is trying to write a checkpoint simultaneously |
| return updateCheckpoint(db, id, checkpoint, session, returnValue); |
| } |
| throw err; |
| }); |
| }); |
| } |
| |
| class CheckpointerInternal { |
| constructor(src, target, id, returnValue, opts = { |
| writeSourceCheckpoint: true, |
| writeTargetCheckpoint: true, |
| }) { |
| this.src = src; |
| this.target = target; |
| this.id = id; |
| this.returnValue = returnValue; |
| this.opts = opts; |
| |
| if (typeof opts.writeSourceCheckpoint === "undefined") { |
| opts.writeSourceCheckpoint = true; |
| } |
| |
| if (typeof opts.writeTargetCheckpoint === "undefined") { |
| opts.writeTargetCheckpoint = true; |
| } |
| } |
| |
| writeCheckpoint(checkpoint, session) { |
| var self = this; |
| return this.updateTarget(checkpoint, session).then(function () { |
| return self.updateSource(checkpoint, session); |
| }); |
| } |
| |
| updateTarget(checkpoint, session) { |
| if (this.opts.writeTargetCheckpoint) { |
| return updateCheckpoint(this.target, this.id, checkpoint, |
| session, this.returnValue); |
| } else { |
| return Promise.resolve(true); |
| } |
| } |
| |
| updateSource(checkpoint, session) { |
| if (this.opts.writeSourceCheckpoint) { |
| var self = this; |
| return updateCheckpoint(this.src, this.id, checkpoint, |
| session, this.returnValue) |
| .catch(function (err) { |
| if (isForbiddenError(err)) { |
| self.opts.writeSourceCheckpoint = false; |
| return true; |
| } |
| throw err; |
| }); |
| } else { |
| return Promise.resolve(true); |
| } |
| } |
| |
| getCheckpoint() { |
| var self = this; |
| |
| if (!self.opts.writeSourceCheckpoint && !self.opts.writeTargetCheckpoint) { |
| return Promise.resolve(LOWEST_SEQ); |
| } |
| |
| if (self.opts && self.opts.writeSourceCheckpoint && !self.opts.writeTargetCheckpoint) { |
| return self.src.get(self.id).then(function (sourceDoc) { |
| return sourceDoc.last_seq || LOWEST_SEQ; |
| }).catch(function (err) { |
| /* istanbul ignore if */ |
| if (err.status !== 404) { |
| throw err; |
| } |
| return LOWEST_SEQ; |
| }); |
| } |
| |
| return self.target.get(self.id).then(function (targetDoc) { |
| if (self.opts && self.opts.writeTargetCheckpoint && !self.opts.writeSourceCheckpoint) { |
| return targetDoc.last_seq || LOWEST_SEQ; |
| } |
| |
| return self.src.get(self.id).then(function (sourceDoc) { |
| // Since we can't migrate an old version doc to a new one |
| // (no session id), we just go with the lowest seq in this case |
| /* istanbul ignore if */ |
| if (targetDoc.version !== sourceDoc.version) { |
| return LOWEST_SEQ; |
| } |
| |
| var version; |
| if (targetDoc.version) { |
| version = targetDoc.version.toString(); |
| } else { |
| version = "undefined"; |
| } |
| |
| if (version in comparisons) { |
| return comparisons[version](targetDoc, sourceDoc); |
| } |
| /* istanbul ignore next */ |
| return LOWEST_SEQ; |
| }, function (err) { |
| if (err.status === 404 && targetDoc.last_seq) { |
| return self.src.put({ |
| _id: self.id, |
| last_seq: LOWEST_SEQ |
| }).then(function () { |
| return LOWEST_SEQ; |
| }, function (err) { |
| if (isForbiddenError(err)) { |
| self.opts.writeSourceCheckpoint = false; |
| return targetDoc.last_seq; |
| } |
| /* istanbul ignore next */ |
| return LOWEST_SEQ; |
| }); |
| } |
| throw err; |
| }); |
| }).catch(function (err) { |
| if (err.status !== 404) { |
| throw err; |
| } |
| return LOWEST_SEQ; |
| }); |
| } |
| } |
| |
| var comparisons = { |
| "undefined": function (targetDoc, sourceDoc) { |
| // This is the previous comparison function |
| if (collate(targetDoc.last_seq, sourceDoc.last_seq) === 0) { |
| return sourceDoc.last_seq; |
| } |
| /* istanbul ignore next */ |
| return 0; |
| }, |
| "1": function (targetDoc, sourceDoc) { |
| // This is the comparison function ported from CouchDB |
| return compareReplicationLogs(sourceDoc, targetDoc).last_seq; |
| } |
| }; |
| |
| // This checkpoint comparison is ported from CouchDBs source |
| // they come from here: |
| // https://github.com/apache/couchdb-couch-replicator/blob/master/src/couch_replicator.erl#L863-L906 |
| |
| function compareReplicationLogs(srcDoc, tgtDoc) { |
| if (srcDoc.session_id === tgtDoc.session_id) { |
| return { |
| last_seq: srcDoc.last_seq, |
| history: srcDoc.history |
| }; |
| } |
| |
| return compareReplicationHistory(srcDoc.history, tgtDoc.history); |
| } |
| |
| function compareReplicationHistory(sourceHistory, targetHistory) { |
| // the erlang loop via function arguments is not so easy to repeat in JS |
| // therefore, doing this as recursion |
| var S = sourceHistory[0]; |
| var sourceRest = sourceHistory.slice(1); |
| var T = targetHistory[0]; |
| var targetRest = targetHistory.slice(1); |
| |
| if (!S || targetHistory.length === 0) { |
| return { |
| last_seq: LOWEST_SEQ, |
| history: [] |
| }; |
| } |
| |
| var sourceId = S.session_id; |
| /* istanbul ignore if */ |
| if (hasSessionId(sourceId, targetHistory)) { |
| return { |
| last_seq: S.last_seq, |
| history: sourceHistory |
| }; |
| } |
| |
| var targetId = T.session_id; |
| if (hasSessionId(targetId, sourceRest)) { |
| return { |
| last_seq: T.last_seq, |
| history: targetRest |
| }; |
| } |
| |
| return compareReplicationHistory(sourceRest, targetRest); |
| } |
| |
| function hasSessionId(sessionId, history) { |
| var props = history[0]; |
| var rest = history.slice(1); |
| |
| if (!sessionId || history.length === 0) { |
| return false; |
| } |
| |
| if (sessionId === props.session_id) { |
| return true; |
| } |
| |
| return hasSessionId(sessionId, rest); |
| } |
| |
| function isForbiddenError(err) { |
| return typeof err.status === 'number' && Math.floor(err.status / 100) === 4; |
| } |
| |
| function Checkpointer(src, target, id, returnValue, opts) { |
| if (!(this instanceof CheckpointerInternal)) { |
| return new CheckpointerInternal(src, target, id, returnValue, opts); |
| } |
| return Checkpointer; |
| } |
| |
| var STARTING_BACK_OFF = 0; |
| |
| function backOff(opts, returnValue, error, callback) { |
| if (opts.retry === false) { |
| returnValue.emit('error', error); |
| returnValue.removeAllListeners(); |
| return; |
| } |
| /* istanbul ignore if */ |
| if (typeof opts.back_off_function !== 'function') { |
| opts.back_off_function = defaultBackOff; |
| } |
| returnValue.emit('requestError', error); |
| if (returnValue.state === 'active' || returnValue.state === 'pending') { |
| returnValue.emit('paused', error); |
| returnValue.state = 'stopped'; |
| var backOffSet = function backoffTimeSet() { |
| opts.current_back_off = STARTING_BACK_OFF; |
| }; |
| var removeBackOffSetter = function removeBackOffTimeSet() { |
| returnValue.removeListener('active', backOffSet); |
| }; |
| returnValue.once('paused', removeBackOffSetter); |
| returnValue.once('active', backOffSet); |
| } |
| |
| opts.current_back_off = opts.current_back_off || STARTING_BACK_OFF; |
| opts.current_back_off = opts.back_off_function(opts.current_back_off); |
| setTimeout(callback, opts.current_back_off); |
| } |
| |
| function sortObjectPropertiesByKey(queryParams) { |
| return Object.keys(queryParams).sort(collate).reduce(function (result, key) { |
| result[key] = queryParams[key]; |
| return result; |
| }, {}); |
| } |
| |
| // Generate a unique id particular to this replication. |
| // Not guaranteed to align perfectly with CouchDB's rep ids. |
| function generateReplicationId(src, target, opts) { |
| var docIds = opts.doc_ids ? opts.doc_ids.sort(collate) : ''; |
| var filterFun = opts.filter ? opts.filter.toString() : ''; |
| var queryParams = ''; |
| var filterViewName = ''; |
| var selector = ''; |
| |
| // possibility for checkpoints to be lost here as behaviour of |
| // JSON.stringify is not stable (see #6226) |
| /* istanbul ignore if */ |
| if (opts.selector) { |
| selector = JSON.stringify(opts.selector); |
| } |
| |
| if (opts.filter && opts.query_params) { |
| queryParams = JSON.stringify(sortObjectPropertiesByKey(opts.query_params)); |
| } |
| |
| if (opts.filter && opts.filter === '_view') { |
| filterViewName = opts.view.toString(); |
| } |
| |
| return Promise.all([src.id(), target.id()]).then(function (res) { |
| var queryData = res[0] + res[1] + filterFun + filterViewName + |
| queryParams + docIds + selector; |
| return new Promise(function (resolve) { |
| binaryMd5(queryData, resolve); |
| }); |
| }).then(function (md5sum) { |
| // can't use straight-up md5 alphabet, because |
| // the char '/' is interpreted as being for attachments, |
| // and + is also not url-safe |
| md5sum = md5sum.replace(/\//g, '.').replace(/\+/g, '_'); |
| return '_local/' + md5sum; |
| }); |
| } |
| |
| function replicate(src, target, opts, returnValue, result) { |
| var batches = []; // list of batches to be processed |
| var currentBatch; // the batch currently being processed |
| var pendingBatch = { |
| seq: 0, |
| changes: [], |
| docs: [] |
| }; // next batch, not yet ready to be processed |
| var writingCheckpoint = false; // true while checkpoint is being written |
| var changesCompleted = false; // true when all changes received |
| var replicationCompleted = false; // true when replication has completed |
| // initial_last_seq is the state of the source db before |
| // replication started, and it is _not_ updated during |
| // replication or used anywhere else, as opposed to last_seq |
| var initial_last_seq = 0; |
| var last_seq = 0; |
| var continuous = opts.continuous || opts.live || false; |
| var batch_size = opts.batch_size || 100; |
| var batches_limit = opts.batches_limit || 10; |
| var style = opts.style || 'all_docs'; |
| var changesPending = false; // true while src.changes is running |
| var doc_ids = opts.doc_ids; |
| var selector = opts.selector; |
| var repId; |
| var checkpointer; |
| var changedDocs = []; |
| // Like couchdb, every replication gets a unique session id |
| var session = uuid$1(); |
| var taskId; |
| |
| result = result || { |
| ok: true, |
| start_time: new Date().toISOString(), |
| docs_read: 0, |
| docs_written: 0, |
| doc_write_failures: 0, |
| errors: [] |
| }; |
| |
| var changesOpts = {}; |
| returnValue.ready(src, target); |
| |
| function initCheckpointer() { |
| if (checkpointer) { |
| return Promise.resolve(); |
| } |
| return generateReplicationId(src, target, opts).then(function (res$$1) { |
| repId = res$$1; |
| |
| var checkpointOpts = {}; |
| if (opts.checkpoint === false) { |
| checkpointOpts = { writeSourceCheckpoint: false, writeTargetCheckpoint: false }; |
| } else if (opts.checkpoint === 'source') { |
| checkpointOpts = { writeSourceCheckpoint: true, writeTargetCheckpoint: false }; |
| } else if (opts.checkpoint === 'target') { |
| checkpointOpts = { writeSourceCheckpoint: false, writeTargetCheckpoint: true }; |
| } else { |
| checkpointOpts = { writeSourceCheckpoint: true, writeTargetCheckpoint: true }; |
| } |
| |
| checkpointer = new Checkpointer(src, target, repId, returnValue, checkpointOpts); |
| }); |
| } |
| |
| function writeDocs() { |
| changedDocs = []; |
| |
| if (currentBatch.docs.length === 0) { |
| return; |
| } |
| var docs = currentBatch.docs; |
| var bulkOpts = {timeout: opts.timeout}; |
| return target.bulkDocs({docs, new_edits: false}, bulkOpts).then(function (res$$1) { |
| /* istanbul ignore if */ |
| if (returnValue.cancelled) { |
| completeReplication(); |
| throw new Error('cancelled'); |
| } |
| |
| // `res` doesn't include full documents (which live in `docs`), so we create a map of |
| // (id -> error), and check for errors while iterating over `docs` |
| var errorsById = Object.create(null); |
| res$$1.forEach(function (res$$1) { |
| if (res$$1.error) { |
| errorsById[res$$1.id] = res$$1; |
| } |
| }); |
| |
| var errorsNo = Object.keys(errorsById).length; |
| result.doc_write_failures += errorsNo; |
| result.docs_written += docs.length - errorsNo; |
| |
| docs.forEach(function (doc) { |
| var error = errorsById[doc._id]; |
| if (error) { |
| result.errors.push(error); |
| // Normalize error name. i.e. 'Unauthorized' -> 'unauthorized' (eg Sync Gateway) |
| var errorName = (error.name || '').toLowerCase(); |
| if (errorName === 'unauthorized' || errorName === 'forbidden') { |
| returnValue.emit('denied', clone(error)); |
| } else { |
| throw error; |
| } |
| } else { |
| changedDocs.push(doc); |
| } |
| }); |
| |
| }, function (err) { |
| result.doc_write_failures += docs.length; |
| throw err; |
| }); |
| } |
| |
| function finishBatch() { |
| if (currentBatch.error) { |
| throw new Error('There was a problem getting docs.'); |
| } |
| result.last_seq = last_seq = currentBatch.seq; |
| var outResult = clone(result); |
| if (changedDocs.length) { |
| outResult.docs = changedDocs; |
| // Attach 'pending' property if server supports it (CouchDB 2.0+) |
| /* istanbul ignore if */ |
| if (typeof currentBatch.pending === 'number') { |
| outResult.pending = currentBatch.pending; |
| delete currentBatch.pending; |
| } |
| returnValue.emit('change', outResult); |
| } |
| writingCheckpoint = true; |
| |
| src.info().then(function (info) { |
| var task = src.activeTasks.get(taskId); |
| if (!currentBatch || !task) { |
| return; |
| } |
| |
| var completed = task.completed_items || 0; |
| var total_items = parseInt(info.update_seq, 10) - parseInt(initial_last_seq, 10); |
| src.activeTasks.update(taskId, { |
| completed_items: completed + currentBatch.changes.length, |
| total_items |
| }); |
| }); |
| |
| return checkpointer.writeCheckpoint(currentBatch.seq, |
| session).then(function () { |
| returnValue.emit('checkpoint', { 'checkpoint': currentBatch.seq }); |
| writingCheckpoint = false; |
| /* istanbul ignore if */ |
| if (returnValue.cancelled) { |
| completeReplication(); |
| throw new Error('cancelled'); |
| } |
| currentBatch = undefined; |
| getChanges(); |
| }).catch(function (err) { |
| onCheckpointError(err); |
| throw err; |
| }); |
| } |
| |
| function getDiffs() { |
| var diff = {}; |
| currentBatch.changes.forEach(function (change) { |
| returnValue.emit('checkpoint', { 'revs_diff': change }); |
| // Couchbase Sync Gateway emits these, but we can ignore them |
| /* istanbul ignore if */ |
| if (change.id === "_user/") { |
| return; |
| } |
| diff[change.id] = change.changes.map(function (x) { |
| return x.rev; |
| }); |
| }); |
| return target.revsDiff(diff).then(function (diffs) { |
| /* istanbul ignore if */ |
| if (returnValue.cancelled) { |
| completeReplication(); |
| throw new Error('cancelled'); |
| } |
| // currentBatch.diffs elements are deleted as the documents are written |
| currentBatch.diffs = diffs; |
| }); |
| } |
| |
| function getBatchDocs() { |
| return getDocs(src, target, currentBatch.diffs, returnValue).then(function (got) { |
| currentBatch.error = !got.ok; |
| got.docs.forEach(function (doc) { |
| delete currentBatch.diffs[doc._id]; |
| result.docs_read++; |
| currentBatch.docs.push(doc); |
| }); |
| }); |
| } |
| |
| function startNextBatch() { |
| if (returnValue.cancelled || currentBatch) { |
| return; |
| } |
| if (batches.length === 0) { |
| processPendingBatch(true); |
| return; |
| } |
| currentBatch = batches.shift(); |
| returnValue.emit('checkpoint', { 'start_next_batch': currentBatch.seq }); |
| getDiffs() |
| .then(getBatchDocs) |
| .then(writeDocs) |
| .then(finishBatch) |
| .then(startNextBatch) |
| .catch(function (err) { |
| abortReplication('batch processing terminated with error', err); |
| }); |
| } |
| |
| |
| function processPendingBatch(immediate) { |
| if (pendingBatch.changes.length === 0) { |
| if (batches.length === 0 && !currentBatch) { |
| if ((continuous && changesOpts.live) || changesCompleted) { |
| returnValue.state = 'pending'; |
| returnValue.emit('paused'); |
| } |
| if (changesCompleted) { |
| completeReplication(); |
| } |
| } |
| return; |
| } |
| if ( |
| immediate || |
| changesCompleted || |
| pendingBatch.changes.length >= batch_size |
| ) { |
| batches.push(pendingBatch); |
| pendingBatch = { |
| seq: 0, |
| changes: [], |
| docs: [] |
| }; |
| if (returnValue.state === 'pending' || returnValue.state === 'stopped') { |
| returnValue.state = 'active'; |
| returnValue.emit('active'); |
| } |
| startNextBatch(); |
| } |
| } |
| |
| |
| function abortReplication(reason, err) { |
| if (replicationCompleted) { |
| return; |
| } |
| if (!err.message) { |
| err.message = reason; |
| } |
| result.ok = false; |
| result.status = 'aborting'; |
| batches = []; |
| pendingBatch = { |
| seq: 0, |
| changes: [], |
| docs: [] |
| }; |
| completeReplication(err); |
| } |
| |
| |
| function completeReplication(fatalError) { |
| if (replicationCompleted) { |
| return; |
| } |
| /* istanbul ignore if */ |
| if (returnValue.cancelled) { |
| result.status = 'cancelled'; |
| if (writingCheckpoint) { |
| return; |
| } |
| } |
| result.status = result.status || 'complete'; |
| result.end_time = new Date().toISOString(); |
| result.last_seq = last_seq; |
| replicationCompleted = true; |
| |
| src.activeTasks.remove(taskId, fatalError); |
| |
| if (fatalError) { |
| // need to extend the error because Firefox considers ".result" read-only |
| fatalError = createError(fatalError); |
| fatalError.result = result; |
| |
| // Normalize error name. i.e. 'Unauthorized' -> 'unauthorized' (eg Sync Gateway) |
| var errorName = (fatalError.name || '').toLowerCase(); |
| if (errorName === 'unauthorized' || errorName === 'forbidden') { |
| returnValue.emit('error', fatalError); |
| returnValue.removeAllListeners(); |
| } else { |
| backOff(opts, returnValue, fatalError, function () { |
| replicate(src, target, opts, returnValue); |
| }); |
| } |
| } else { |
| returnValue.emit('complete', result); |
| returnValue.removeAllListeners(); |
| } |
| } |
| |
| function onChange(change, pending, lastSeq) { |
| /* istanbul ignore if */ |
| if (returnValue.cancelled) { |
| return completeReplication(); |
| } |
| // Attach 'pending' property if server supports it (CouchDB 2.0+) |
| /* istanbul ignore if */ |
| if (typeof pending === 'number') { |
| pendingBatch.pending = pending; |
| } |
| |
| var filter = filterChange(opts)(change); |
| if (!filter) { |
| // update processed items count by 1 |
| var task = src.activeTasks.get(taskId); |
| if (task) { |
| // we can assume that task exists here? shouldn't be deleted by here. |
| var completed = task.completed_items || 0; |
| src.activeTasks.update(taskId, {completed_items: ++completed}); |
| } |
| return; |
| } |
| pendingBatch.seq = change.seq || lastSeq; |
| pendingBatch.changes.push(change); |
| returnValue.emit('checkpoint', { 'pending_batch': pendingBatch.seq }); |
| nextTick(function () { |
| processPendingBatch(batches.length === 0 && changesOpts.live); |
| }); |
| } |
| |
| |
| function onChangesComplete(changes) { |
| changesPending = false; |
| /* istanbul ignore if */ |
| if (returnValue.cancelled) { |
| return completeReplication(); |
| } |
| |
| // if no results were returned then we're done, |
| // else fetch more |
| if (changes.results.length > 0) { |
| changesOpts.since = changes.results[changes.results.length - 1].seq; |
| getChanges(); |
| processPendingBatch(true); |
| } else { |
| |
| var complete = function () { |
| if (continuous) { |
| changesOpts.live = true; |
| getChanges(); |
| } else { |
| changesCompleted = true; |
| } |
| processPendingBatch(true); |
| }; |
| |
| // update the checkpoint so we start from the right seq next time |
| if (!currentBatch && changes.results.length === 0) { |
| writingCheckpoint = true; |
| checkpointer.writeCheckpoint(changes.last_seq, |
| session).then(function () { |
| writingCheckpoint = false; |
| result.last_seq = last_seq = changes.last_seq; |
| if (returnValue.cancelled) { |
| completeReplication(); |
| throw new Error('cancelled'); |
| } else { |
| complete(); |
| } |
| }) |
| .catch(onCheckpointError); |
| } else { |
| complete(); |
| } |
| } |
| } |
| |
| |
| function onChangesError(err) { |
| changesPending = false; |
| /* istanbul ignore if */ |
| if (returnValue.cancelled) { |
| return completeReplication(); |
| } |
| abortReplication('changes rejected', err); |
| } |
| |
| |
| function getChanges() { |
| if (!( |
| !changesPending && |
| !changesCompleted && |
| batches.length < batches_limit |
| )) { |
| return; |
| } |
| changesPending = true; |
| function abortChanges() { |
| changes.cancel(); |
| } |
| function removeListener() { |
| returnValue.removeListener('cancel', abortChanges); |
| } |
| |
| if (returnValue._changes) { // remove old changes() and listeners |
| returnValue.removeListener('cancel', returnValue._abortChanges); |
| returnValue._changes.cancel(); |
| } |
| returnValue.once('cancel', abortChanges); |
| |
| var changes = src.changes(changesOpts) |
| .on('change', onChange); |
| changes.then(removeListener, removeListener); |
| changes.then(onChangesComplete) |
| .catch(onChangesError); |
| |
| if (opts.retry) { |
| // save for later so we can cancel if necessary |
| returnValue._changes = changes; |
| returnValue._abortChanges = abortChanges; |
| } |
| } |
| |
| function createTask(checkpoint) { |
| return src.info().then(function (info) { |
| var total_items = typeof opts.since === 'undefined' ? |
| parseInt(info.update_seq, 10) - parseInt(checkpoint, 10) : |
| parseInt(info.update_seq, 10); |
| |
| taskId = src.activeTasks.add({ |
| name: `${continuous ? 'continuous ' : ''}replication from ${info.db_name}` , |
| total_items, |
| }); |
| |
| return checkpoint; |
| }); |
| } |
| |
| function startChanges() { |
| initCheckpointer().then(function () { |
| /* istanbul ignore if */ |
| if (returnValue.cancelled) { |
| completeReplication(); |
| return; |
| } |
| return checkpointer.getCheckpoint().then(createTask).then(function (checkpoint) { |
| last_seq = checkpoint; |
| initial_last_seq = checkpoint; |
| changesOpts = { |
| since: last_seq, |
| limit: batch_size, |
| batch_size, |
| style, |
| doc_ids, |
| selector, |
| return_docs: true // required so we know when we're done |
| }; |
| if (opts.filter) { |
| if (typeof opts.filter !== 'string') { |
| // required for the client-side filter in onChange |
| changesOpts.include_docs = true; |
| } else { // ddoc filter |
| changesOpts.filter = opts.filter; |
| } |
| } |
| if ('heartbeat' in opts) { |
| changesOpts.heartbeat = opts.heartbeat; |
| } |
| if ('timeout' in opts) { |
| changesOpts.timeout = opts.timeout; |
| } |
| if (opts.query_params) { |
| changesOpts.query_params = opts.query_params; |
| } |
| if (opts.view) { |
| changesOpts.view = opts.view; |
| } |
| getChanges(); |
| }); |
| }).catch(function (err) { |
| abortReplication('getCheckpoint rejected with ', err); |
| }); |
| } |
| |
| /* istanbul ignore next */ |
| function onCheckpointError(err) { |
| writingCheckpoint = false; |
| abortReplication('writeCheckpoint completed with error', err); |
| } |
| |
| /* istanbul ignore if */ |
| if (returnValue.cancelled) { // cancelled immediately |
| completeReplication(); |
| return; |
| } |
| |
| if (!returnValue._addedListeners) { |
| returnValue.once('cancel', completeReplication); |
| |
| if (typeof opts.complete === 'function') { |
| returnValue.once('error', opts.complete); |
| returnValue.once('complete', function (result) { |
| opts.complete(null, result); |
| }); |
| } |
| returnValue._addedListeners = true; |
| } |
| |
| if (typeof opts.since === 'undefined') { |
| startChanges(); |
| } else { |
| initCheckpointer().then(function () { |
| writingCheckpoint = true; |
| return checkpointer.writeCheckpoint(opts.since, session); |
| }).then(function () { |
| writingCheckpoint = false; |
| /* istanbul ignore if */ |
| if (returnValue.cancelled) { |
| completeReplication(); |
| return; |
| } |
| last_seq = opts.since; |
| startChanges(); |
| }).catch(onCheckpointError); |
| } |
| } |
| |
| // We create a basic promise so the caller can cancel the replication possibly |
| // before we have actually started listening to changes etc |
| class Replication extends EE { |
| constructor() { |
| super(); |
| this.cancelled = false; |
| this.state = 'pending'; |
| const promise = new Promise((fulfill, reject) => { |
| this.once('complete', fulfill); |
| this.once('error', reject); |
| }); |
| this.then = function (resolve, reject) { |
| return promise.then(resolve, reject); |
| }; |
| this.catch = function (reject) { |
| return promise.catch(reject); |
| }; |
| // As we allow error handling via "error" event as well, |
| // put a stub in here so that rejecting never throws UnhandledError. |
| this.catch(function () {}); |
| } |
| |
| cancel() { |
| this.cancelled = true; |
| this.state = 'cancelled'; |
| this.emit('cancel'); |
| } |
| |
| ready(src, target) { |
| if (this._readyCalled) { |
| return; |
| } |
| this._readyCalled = true; |
| |
| const onDestroy = () => { |
| this.cancel(); |
| }; |
| src.once('destroyed', onDestroy); |
| target.once('destroyed', onDestroy); |
| function cleanup() { |
| src.removeListener('destroyed', onDestroy); |
| target.removeListener('destroyed', onDestroy); |
| } |
| this.once('complete', cleanup); |
| this.once('error', cleanup); |
| } |
| } |
| |
| function toPouch(db, opts) { |
| var PouchConstructor = opts.PouchConstructor; |
| if (typeof db === 'string') { |
| return new PouchConstructor(db, opts); |
| } else { |
| return db; |
| } |
| } |
| |
| function replicateWrapper(src, target, opts, callback) { |
| |
| if (typeof opts === 'function') { |
| callback = opts; |
| opts = {}; |
| } |
| if (typeof opts === 'undefined') { |
| opts = {}; |
| } |
| |
| if (opts.doc_ids && !Array.isArray(opts.doc_ids)) { |
| throw createError(BAD_REQUEST, |
| "`doc_ids` filter parameter is not a list."); |
| } |
| |
| opts.complete = callback; |
| opts = clone(opts); |
| opts.continuous = opts.continuous || opts.live; |
| opts.retry = ('retry' in opts) ? opts.retry : false; |
| opts.PouchConstructor = opts.PouchConstructor || this; |
| var replicateRet = new Replication(opts); |
| var srcPouch = toPouch(src, opts); |
| var targetPouch = toPouch(target, opts); |
| replicate(srcPouch, targetPouch, opts, replicateRet); |
| return replicateRet; |
| } |
| |
| function sync(src, target, opts, callback) { |
| if (typeof opts === 'function') { |
| callback = opts; |
| opts = {}; |
| } |
| if (typeof opts === 'undefined') { |
| opts = {}; |
| } |
| opts = clone(opts); |
| opts.PouchConstructor = opts.PouchConstructor || this; |
| src = toPouch(src, opts); |
| target = toPouch(target, opts); |
| return new Sync(src, target, opts, callback); |
| } |
| |
| class Sync extends EE { |
| constructor(src, target, opts, callback) { |
| super(); |
| this.canceled = false; |
| |
| const optsPush = opts.push ? Object.assign({}, opts, opts.push) : opts; |
| const optsPull = opts.pull ? Object.assign({}, opts, opts.pull) : opts; |
| |
| this.push = replicateWrapper(src, target, optsPush); |
| this.pull = replicateWrapper(target, src, optsPull); |
| |
| this.pushPaused = true; |
| this.pullPaused = true; |
| |
| const pullChange = (change) => { |
| this.emit('change', { |
| direction: 'pull', |
| change |
| }); |
| }; |
| const pushChange = (change) => { |
| this.emit('change', { |
| direction: 'push', |
| change |
| }); |
| }; |
| const pushDenied = (doc) => { |
| this.emit('denied', { |
| direction: 'push', |
| doc |
| }); |
| }; |
| const pullDenied = (doc) => { |
| this.emit('denied', { |
| direction: 'pull', |
| doc |
| }); |
| }; |
| const pushPaused = () => { |
| this.pushPaused = true; |
| /* istanbul ignore if */ |
| if (this.pullPaused) { |
| this.emit('paused'); |
| } |
| }; |
| const pullPaused = () => { |
| this.pullPaused = true; |
| /* istanbul ignore if */ |
| if (this.pushPaused) { |
| this.emit('paused'); |
| } |
| }; |
| const pushActive = () => { |
| this.pushPaused = false; |
| /* istanbul ignore if */ |
| if (this.pullPaused) { |
| this.emit('active', { |
| direction: 'push' |
| }); |
| } |
| }; |
| const pullActive = () => { |
| this.pullPaused = false; |
| /* istanbul ignore if */ |
| if (this.pushPaused) { |
| this.emit('active', { |
| direction: 'pull' |
| }); |
| } |
| }; |
| |
| let removed = {}; |
| |
| const removeAll = (type) => { // type is 'push' or 'pull' |
| return (event, func) => { |
| const isChange = event === 'change' && |
| (func === pullChange || func === pushChange); |
| const isDenied = event === 'denied' && |
| (func === pullDenied || func === pushDenied); |
| const isPaused = event === 'paused' && |
| (func === pullPaused || func === pushPaused); |
| const isActive = event === 'active' && |
| (func === pullActive || func === pushActive); |
| |
| if (isChange || isDenied || isPaused || isActive) { |
| if (!(event in removed)) { |
| removed[event] = {}; |
| } |
| removed[event][type] = true; |
| if (Object.keys(removed[event]).length === 2) { |
| // both push and pull have asked to be removed |
| this.removeAllListeners(event); |
| } |
| } |
| }; |
| }; |
| |
| if (opts.live) { |
| this.push.on('complete', this.pull.cancel.bind(this.pull)); |
| this.pull.on('complete', this.push.cancel.bind(this.push)); |
| } |
| |
| function addOneListener(ee, event, listener) { |
| if (ee.listeners(event).indexOf(listener) == -1) { |
| ee.on(event, listener); |
| } |
| } |
| |
| this.on('newListener', function (event) { |
| if (event === 'change') { |
| addOneListener(this.pull, 'change', pullChange); |
| addOneListener(this.push, 'change', pushChange); |
| } else if (event === 'denied') { |
| addOneListener(this.pull, 'denied', pullDenied); |
| addOneListener(this.push, 'denied', pushDenied); |
| } else if (event === 'active') { |
| addOneListener(this.pull, 'active', pullActive); |
| addOneListener(this.push, 'active', pushActive); |
| } else if (event === 'paused') { |
| addOneListener(this.pull, 'paused', pullPaused); |
| addOneListener(this.push, 'paused', pushPaused); |
| } |
| }); |
| |
| this.on('removeListener', function (event) { |
| if (event === 'change') { |
| this.pull.removeListener('change', pullChange); |
| this.push.removeListener('change', pushChange); |
| } else if (event === 'denied') { |
| this.pull.removeListener('denied', pullDenied); |
| this.push.removeListener('denied', pushDenied); |
| } else if (event === 'active') { |
| this.pull.removeListener('active', pullActive); |
| this.push.removeListener('active', pushActive); |
| } else if (event === 'paused') { |
| this.pull.removeListener('paused', pullPaused); |
| this.push.removeListener('paused', pushPaused); |
| } |
| }); |
| |
| this.pull.on('removeListener', removeAll('pull')); |
| this.push.on('removeListener', removeAll('push')); |
| |
| const promise = Promise.all([ |
| this.push, |
| this.pull |
| ]).then((resp) => { |
| const out = { |
| push: resp[0], |
| pull: resp[1] |
| }; |
| this.emit('complete', out); |
| if (callback) { |
| callback(null, out); |
| } |
| this.removeAllListeners(); |
| return out; |
| }, (err) => { |
| this.cancel(); |
| if (callback) { |
| // if there's a callback, then the callback can receive |
| // the error event |
| callback(err); |
| } else { |
| // if there's no callback, then we're safe to emit an error |
| // event, which would otherwise throw an unhandled error |
| // due to 'error' being a special event in EventEmitters |
| this.emit('error', err); |
| } |
| this.removeAllListeners(); |
| if (callback) { |
| // no sense throwing if we're already emitting an 'error' event |
| throw err; |
| } |
| }); |
| |
| this.then = function (success, err) { |
| return promise.then(success, err); |
| }; |
| |
| this.catch = function (err) { |
| return promise.catch(err); |
| }; |
| } |
| |
| cancel() { |
| if (!this.canceled) { |
| this.canceled = true; |
| this.push.cancel(); |
| this.pull.cancel(); |
| } |
| } |
| } |
| |
| function replication(PouchDB) { |
| PouchDB.replicate = replicateWrapper; |
| PouchDB.sync = sync; |
| |
| Object.defineProperty(PouchDB.prototype, 'replicate', { |
| get: function () { |
| var self = this; |
| if (typeof this.replicateMethods === 'undefined') { |
| this.replicateMethods = { |
| from: function (other, opts, callback) { |
| return self.constructor.replicate(other, self, opts, callback); |
| }, |
| to: function (other, opts, callback) { |
| return self.constructor.replicate(self, other, opts, callback); |
| } |
| }; |
| } |
| return this.replicateMethods; |
| } |
| }); |
| |
| PouchDB.prototype.sync = function (dbName, opts, callback) { |
| return this.constructor.sync(this, dbName, opts, callback); |
| }; |
| } |
| |
| PouchDB.plugin(LevelPouch$1) |
| .plugin(HttpPouch$1) |
| .plugin(mapreduce) |
| .plugin(replication); |
| |
| // Pull from src because pouchdb-node/pouchdb-browser themselves |
| |
| module.exports = PouchDB; |