Bug 1588329 - Centralize array chunking in PlacesUtils.chunkArray. r=mak

Differential Revision: https://phabricator.services.mozilla.com/D49072

--HG--
extra : moz-landing-system : lando
This commit is contained in:
Lina Cambridge 2019-10-15 21:23:07 +00:00
parent 9d9b081407
commit a0d2e56628
8 changed files with 88 additions and 89 deletions

View File

@ -38,7 +38,7 @@ const { SerializableSet, Svc, Utils } = ChromeUtils.import(
XPCOMUtils.defineLazyModuleGetters(this, {
fxAccounts: "resource://gre/modules/FxAccounts.jsm",
OS: "resource://gre/modules/osfile.jsm",
PlacesSyncUtils: "resource://gre/modules/PlacesSyncUtils.jsm",
PlacesUtils: "resource://gre/modules/PlacesUtils.jsm",
});
function ensureDirectory(path) {
@ -1310,52 +1310,54 @@ SyncEngine.prototype = {
// `getBatched` includes the list of IDs as a query parameter, so we need to fetch
// records in chunks to avoid exceeding URI length limits.
for (let [, ids] of PlacesSyncUtils.chunkArray(
idsToBackfill,
this.guidFetchBatchSize
)) {
backfilledItems.ids = ids;
if (this.guidFetchBatchSize) {
for (let ids of PlacesUtils.chunkArray(
idsToBackfill,
this.guidFetchBatchSize
)) {
backfilledItems.ids = ids;
let { response, records } = await backfilledItems.getBatched(
this.downloadBatchSize
);
if (!response.success) {
response.failureCode = ENGINE_DOWNLOAD_FAIL;
throw response;
}
let backfilledRecordsToApply = [];
let failedInBackfill = [];
await Async.yieldingForEach(records, async record => {
let { shouldApply, error } = await this._maybeReconcile(record);
if (error) {
failedInBackfill.push(record.id);
count.failed++;
return;
let { response, records } = await backfilledItems.getBatched(
this.downloadBatchSize
);
if (!response.success) {
response.failureCode = ENGINE_DOWNLOAD_FAIL;
throw response;
}
if (!shouldApply) {
count.reconciled++;
return;
let backfilledRecordsToApply = [];
let failedInBackfill = [];
await Async.yieldingForEach(records, async record => {
let { shouldApply, error } = await this._maybeReconcile(record);
if (error) {
failedInBackfill.push(record.id);
count.failed++;
return;
}
if (!shouldApply) {
count.reconciled++;
return;
}
backfilledRecordsToApply.push(record);
});
let failedToApply = await this._applyRecords(backfilledRecordsToApply);
failedInBackfill.push(...failedToApply);
count.failed += failedToApply.length;
count.applied += backfilledRecordsToApply.length;
this.toFetch = Utils.setDeleteAll(this.toFetch, ids);
this.previousFailed = Utils.setAddAll(
this.previousFailed,
failedInBackfill
);
if (lastSync < this.lastModified) {
lastSync = this.lastModified;
await this.setLastSync(lastSync);
}
backfilledRecordsToApply.push(record);
});
let failedToApply = await this._applyRecords(backfilledRecordsToApply);
failedInBackfill.push(...failedToApply);
count.failed += failedToApply.length;
count.applied += backfilledRecordsToApply.length;
this.toFetch = Utils.setDeleteAll(this.toFetch, ids);
this.previousFailed = Utils.setAddAll(
this.previousFailed,
failedInBackfill
);
if (lastSync < this.lastModified) {
lastSync = this.lastModified;
await this.setLastSync(lastSync);
}
}

View File

@ -1347,10 +1347,7 @@ BufferedBookmarksStore.prototype = {
async applyIncomingBatch(records) {
let buf = await this.ensureOpenMirror();
for (let [, chunk] of PlacesSyncUtils.chunkArray(
records,
this._batchChunkSize
)) {
for (let chunk of PlacesUtils.chunkArray(records, this._batchChunkSize)) {
await buf.store(chunk);
}
// Array of failed records.

View File

@ -812,10 +812,7 @@ add_task(async function test_processIncoming_previousFailed() {
let sortedRecords = records.sort((a, b) => (a.id > b.id ? 1 : -1));
let recordsToApply = [],
recordsToFail = [];
let chunks = Array.from(
PlacesSyncUtils.chunkArray(sortedRecords, 2),
([, chunk]) => chunk
);
let chunks = Array.from(PlacesUtils.chunkArray(sortedRecords, 2));
for (let i = 0; i < chunks.length; i++) {
(i % 2 === 0 ? recordsToFail : recordsToApply).push(...chunks[i]);
}

View File

@ -2217,7 +2217,10 @@ function insertBookmarkTree(items, source, parent, urls, lastAddedForParent) {
);
// Remove stale tombstones for new items.
for (let chunk of chunkArray(items, SQLITE_MAX_VARIABLE_NUMBER)) {
for (let chunk of PlacesUtils.chunkArray(
items,
SQLITE_MAX_VARIABLE_NUMBER
)) {
await db.executeCached(
`DELETE FROM moz_bookmarks_deleted WHERE guid IN (${new Array(
chunk.length
@ -2625,7 +2628,10 @@ function removeBookmarks(items, options) {
}
}
for (let chunk of chunkArray(items, SQLITE_MAX_VARIABLE_NUMBER)) {
for (let chunk of PlacesUtils.chunkArray(
items,
SQLITE_MAX_VARIABLE_NUMBER
)) {
// We don't go through the annotations service for this cause otherwise
// we'd get a pointless onItemChanged notification and it would also
// set lastModified to an unexpected value.
@ -3390,17 +3396,6 @@ function adjustSeparatorsSyncCounter(
);
}
function* chunkArray(array, chunkLength) {
if (array.length <= chunkLength) {
yield array;
return;
}
let startIndex = 0;
while (startIndex < array.length) {
yield array.slice(startIndex, (startIndex += chunkLength));
}
}
/**
* Generates a list of "?" SQL bindings based on input array length.
* @param {array} values an array of values.

View File

@ -26,27 +26,7 @@ ChromeUtils.defineModuleGetter(
* `nsINavBookmarksService`, with special handling for
* tags, keywords, synced annotations, and missing parents.
*/
var PlacesSyncUtils = {
/**
* Auxiliary generator function that yields an array in chunks
*
* @param array
* @param chunkLength
* @yields {Array}
* New Array with the next chunkLength elements of array.
* If the array has less than chunkLength elements, yields all of them
*/
*chunkArray(array, chunkLength) {
if (!array.length || chunkLength <= 0) {
return;
}
let startIndex = 0;
while (startIndex < array.length) {
yield [startIndex, array.slice(startIndex, startIndex + chunkLength)];
startIndex += chunkLength;
}
},
};
var PlacesSyncUtils = {};
const { SOURCE_SYNC } = Ci.nsINavBookmarksService;
@ -258,7 +238,7 @@ const HistorySyncUtils = (PlacesSyncUtils.history = Object.freeze({
// aren't stored in the database.
let db = await PlacesUtils.promiseDBConnection();
let nonSyncableGuids = [];
for (let [, chunk] of PlacesSyncUtils.chunkArray(
for (let chunk of PlacesUtils.chunkArray(
guids,
SQLITE_MAX_VARIABLE_NUMBER
)) {

View File

@ -1907,6 +1907,32 @@ var PlacesUtils = {
return rootItem;
},
/**
* Returns a generator that iterates over `array` and yields slices of no
* more than `chunkLength` elements at a time.
*
* @param {Array} array An array containing zero or more elements.
* @param {number} chunkLength The maximum number of elements in each chunk.
* @yields {Array} A chunk of the array.
* @throws if `chunkLength` is negative or not an integer.
*/
*chunkArray(array, chunkLength) {
if (chunkLength <= 0 || !Number.isInteger(chunkLength)) {
throw new TypeError("Chunk length must be a positive integer");
}
if (!array.length) {
return;
}
if (array.length <= chunkLength) {
yield array;
return;
}
let startIndex = 0;
while (startIndex < array.length) {
yield array.slice(startIndex, (startIndex += chunkLength));
}
},
};
XPCOMUtils.defineLazyGetter(PlacesUtils, "history", function() {

View File

@ -1005,7 +1005,8 @@ class SyncedBookmarksMirror {
let children = record.children;
if (children && Array.isArray(children)) {
for (let [offset, chunk] of PlacesSyncUtils.chunkArray(
let offset = 0;
for (let chunk of PlacesUtils.chunkArray(
children,
SQLITE_MAX_VARIABLE_NUMBER - 1
)) {
@ -1029,6 +1030,7 @@ class SyncedBookmarksMirror {
VALUES ${valuesFragment}`,
[guid, ...chunk.map(PlacesSyncUtils.bookmarks.recordIdToGuid)]
);
offset += chunk.length;
}
}
}

View File

@ -171,7 +171,7 @@ function inspectChangeRecords(changeRecords) {
async function promiseManyDatesAdded(guids) {
let datesAdded = new Map();
let db = await PlacesUtils.promiseDBConnection();
for (let [, chunk] of PlacesSyncUtils.chunkArray(guids, 100)) {
for (let chunk of PlacesUtils.chunkArray(guids, 100)) {
let rows = await db.executeCached(
`
SELECT guid, dateAdded FROM moz_bookmarks