Skip to content

Commit

Permalink
lint
Browse files Browse the repository at this point in the history
  • Loading branch information
alxndrsn committed Mar 25, 2024
1 parent 32ab14a commit 9f3399c
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 14 deletions.
21 changes: 10 additions & 11 deletions packages/node_modules/pouchdb-adapter-indexeddb/src/allDocs.js
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ function allDocsKeys(keys, docStore, allDocsInner) {
});
}

function createKeyRange(start, end, inclusiveEnd, key, descending, inclusiveStart=true) {
function createKeyRange(start, end, inclusiveStart, inclusiveEnd, key, descending) {
try {
if (key) {
return IDBKeyRange.only([0, key]);
Expand Down Expand Up @@ -112,7 +112,7 @@ export default function (txn, metadata, opts, callback) {
return allDocsKeys(keys, docStore, allDocsInner);
}

let keyRange = createKeyRange([0, start], [0, end], inclusiveEnd, key, descending);
let keyRange = createKeyRange([0, start], [0, end], true, inclusiveEnd, key, descending);
if (keyRange.error) {
return handleKeyRangeError(opts, metadata, keyRange.error, callback);
}
Expand Down Expand Up @@ -200,7 +200,7 @@ export default function (txn, metadata, opts, callback) {
limitKey = descending ? keyRange.lower : keyRange.upper;
}

keyRange = createKeyRange(firstKey, limitKey, inclusiveEnd, key, descending);
keyRange = createKeyRange(firstKey, limitKey, true, inclusiveEnd, key, descending);
if (keyRange.error) {
txn.txn.abort();
return handleKeyRangeError(opts, metadata, keyRange.error, callback);
Expand All @@ -214,8 +214,9 @@ export default function (txn, metadata, opts, callback) {
// There is a risk here with getting all results into memory - if they have multiple
// revs, then we risk loading loads of extra data which is then discarded. This is
// reduced by batching. This also loads unused data when include_docs is false.

// TODO the tests which cover this will only actually test it if batch size < 5ish
//
// Current batch size is quite arbitrary, but seems like (1) more than a typical
// result size, and (2) not so big it's likely to cause issues.
const batchSize = 100;

fetchNextBatch();
Expand All @@ -233,15 +234,13 @@ export default function (txn, metadata, opts, callback) {
}

if (batch.length >= batchSize) {
const lastSeenKey = [0, batch[batch.length-1].id];
const lastSeenKey = [ 0, batch[batch.length-1].id ];
const startKey = descending ? keyRange.upper : lastSeenKey;
const endKey = descending ? lastSeenKey : keyRange.upper;
if (startKey[1] !== endKey[1]) {
// TODO unclear why
const _incEnd = descending ? false : inclusiveEnd;
const _incStart = descending ? true : false;
// TODO move inclusiveStart arg to be more consistent
keyRange = createKeyRange(startKey, endKey, _incEnd, key, descending, _incStart);
const incEnd = descending ? false : inclusiveEnd;
const incStart = descending ? true : false;
keyRange = createKeyRange(startKey, endKey, incStart, incEnd, key, descending);
return fetchNextBatch();
}
}
Expand Down
8 changes: 5 additions & 3 deletions tests/integration/test.all_docs.js
Original file line number Diff line number Diff line change
Expand Up @@ -464,16 +464,18 @@ adapters.forEach(function (adapter) {
var db = new PouchDB(dbs.name);

const docs = [];
for (let i=0; i<1000; ++i) docs.push({ _id:i.toString().padStart(5, '0') });
for (let i=0; i<1000; ++i) {
docs.push({ _id:i.toString().padStart(5, '0') });
}

db.bulkDocs({docs}).then(function (res) {
const deletes = [];
for(let i=300; i<400; ++i) {
for (let i=300; i<400; ++i) {
docs[i]._deleted = true;
docs[i]._rev = res[i].rev;
deletes.push(db.remove(docs[i]));
}
for(let i=700; i<800; ++i) {
for (let i=700; i<800; ++i) {
docs[i]._deleted = true;
docs[i]._rev = res[i].rev;
deletes.push(db.remove(docs[i]));
Expand Down

0 comments on commit 9f3399c

Please sign in to comment.