Let's say I have 100 documents (Note: total of documents will definitely increase). Getting all 100 documents at once will cause performance issue so I figured that I need to do infinite scroll and display, let's say, 15 documents on first load. Every time infinite scroll is triggered, it should get another 15 documents.
Basically, docArr.length = 100
First load: grab docArr[0]
to docArr[14]
Trigger Infinite Scroll: grab docArr[15]
to docArr[29]
and so on until docArr[99]
is grabbed too
In the PouchDB FAQ on Pagination, it's stated that startkey
, endkey
, limit
and skip
need to be used.
My document id is in the format doc-1
. So I have doc-1
, doc-2
, doc-3
, etc
I followed the suggested method to use so my code is;
var options = {limit : 15};
function fetchNextPage() {
pouch.allDocs(options, function (err, response) {
if (response && response.rows.length > 0) {
let id_num = response[response.length - 1]._id.split('-').pop()
//on first trigger, the line above should get '15'
id_num = Number(id_num) + 1
let id = 'doc-' + id_num
//on first trigger, id = 'doc-16'
options.startkey = id;
options.skip = 1;
}
// handle err or response
});
}
However, when I do this way, when I trigger the infinite scroll, I only get the first 15 documents (from element 0
to element 14
) repeatedly even after I increased the id (which I use as the startkey
).
How do I get the next 15 documents every time I trigger the infinite scroll (doing pagination) ?
I am not 100% clear on what is going on with the OP's code but the assumption that the response.rows
of the first query to _all_docs
is ordered so
rows index | id |
---|---|
0 | doc-1 |
1 | doc-2 |
2 | doc-3 |
3 | doc-4 |
4 | doc-5 |
5 | doc-6 |
6 | doc-7 |
7 | doc-8 |
8 | doc-9 |
9 | doc-10 |
10 | doc-11 |
11 | doc-12 |
12 | doc-13 |
13 | doc-14 |
14 | doc-15 |
is wrong. Document id's are strings and as such follow the collation rules documented at CouchDB documentation 3.2.2.5. Collation Specification.
This should be the actual sequence of document id's returned by the initial call to _all_docs
according to the format of the document ids specified by the OP:
rows index | id |
---|---|
0 | doc-1 |
1 | doc-10 |
2 | doc-100 |
3 | doc-11 |
4 | doc-12 |
5 | doc-13 |
6 | doc-14 |
7 | doc-15 |
8 | doc-16 |
9 | doc-17 |
10 | doc-18 |
11 | doc-19 |
12 | doc-2 |
13 | doc-20 |
14 | doc-21 |
Hence the calculation of the next set of results is flawed
id_num = Number(id_num) + 1
let id = 'doc-' + id_num
options.startkey = id;
Calculating the next id is a bad idea and wholly unnecessary. Rather use the id of the last document, e.g.
let result = await db.allDocs(options);
if(result.rows.length) {
// do something with results
// get next page of docs?
if(result.rows.length === options.limit) {
options.startkey = result.rows.pop().id;
options.skip = 1;
// repeat
}
}
This snippet uses setTimeout
to grab 15 documents per second, logs the document ids to the console, and exits when the number of documents returned is less than the limit
, which signals the end of the results.
// canned test documents
function getDocsToInstall() {
let docs = [];
for (let i = 1; i < 101; i++) {
docs.push({
_id: `doc-${i}`
});
}
return docs;
}
let db;
// init db instance
async function initDb() {
db = new PouchDB('test', {
adapter: 'memory'
});
await db.bulkDocs(getDocsToInstall());
}
function update(result) {
console.log(result.rows.map(d => d.id).join(','));
}
initDb().then(async() => {
let options = {
limit: 15,
include_docs: false,
reduce: false
};
async function timerFn() {
let result = await db.allDocs(options);
if (result.rows.length) {
update(result);
// get next page of docs
if (result.rows.length === options.limit) {
options.startkey = result.rows.pop().id;
options.skip = 1;
// repeat
setTimeout(timerFn, 1000);
} else {
console.log("All docs processed 👍");
}
}
};
timerFn();
});
<script src="https://cdn.jsdelivr.net/npm/pouchdb@7.1.1/dist/pouchdb.min.js"></script>
<script src="https://github.com/pouchdb/pouchdb/releases/download/7.1.1/pouchdb.memory.min.js"></script>