Bug 1864896: Autofix unused function arguments (services). r=markh,sync-reviewers

Differential Revision: https://phabricator.services.mozilla.com/D202986
This commit is contained in:
Dave Townsend 2024-03-02 15:26:03 +00:00
parent 5a36ecfb41
commit 6e02473120
75 changed files with 195 additions and 232 deletions

View File

@ -139,11 +139,11 @@ export var Authentication = {
let mainWindow = Services.wm.getMostRecentWindow("navigator:browser");
let newtab = mainWindow.gBrowser.addWebTab(uri);
let win = mainWindow.gBrowser.getBrowserForTab(newtab);
win.addEventListener("load", function (e) {
win.addEventListener("load", function () {
LOG("load");
});
win.addEventListener("loadstart", function (e) {
win.addEventListener("loadstart", function () {
LOG("loadstart");
});
@ -299,7 +299,7 @@ export var Sync = {
await this.wipeLogs();
},
observe(subject, topic, data) {
observe(subject, topic) {
LOG("Event received " + topic);
},

View File

@ -289,7 +289,7 @@ class Watchdog {
}
}
observe(subject, topic, data) {
observe(subject, topic) {
if (topic == "timer-callback") {
this.abortReason = "timeout";
} else if (topic == "quit-application") {

View File

@ -162,7 +162,7 @@ Intl.prototype = {
Services.prefs.removeObserver("intl.accept_languages", this);
},
observe(subject, topic, data) {
observe() {
this.readPref();
},

View File

@ -51,6 +51,6 @@ export function initTestLogging(level) {
return logStats;
}
export function getTestLogger(component) {
export function getTestLogger() {
return Log.repository.getLogger("Testing");
}

View File

@ -91,7 +91,7 @@ function do_check_throws_message(aFunc, aResult) {
* @usage _("Hello World") -> prints "Hello World"
* @usage _(1, 2, 3) -> prints "1 2 3"
*/
var _ = function (some, debug, text, to) {
var _ = function () {
print(Array.from(arguments).join(" "));
};
@ -192,7 +192,7 @@ var PACSystemSettings = {
// each test gets a completely fresh setup.
mainThreadOnly: true,
PACURI: null,
getProxyForURI: function getProxyForURI(aURI) {
getProxyForURI: function getProxyForURI() {
throw Components.Exception("", Cr.NS_ERROR_NOT_IMPLEMENTED);
},
};
@ -221,7 +221,7 @@ function getUptakeTelemetrySnapshot(component, source) {
return (
parentEvents
// Transform raw event data to objects.
.map(([i, category, method, object, value, extras]) => {
.map(([, category, method, object, value, extras]) => {
return { category, method, object, value, extras };
})
// Keep only for the specified component and source.

View File

@ -40,7 +40,7 @@ add_test(function test_delay() {
const delay = 100;
let that = {};
let t0 = Date.now();
function callback(timer) {
function callback() {
// Difference should be ~2*delay, but hard to predict on all platforms,
// particularly Windows XP.
Assert.ok(Date.now() - t0 > delay);
@ -57,7 +57,7 @@ add_test(function test_clear() {
const delay = 0;
let that = {};
CommonUtils.namedTimer(
function callback(timer) {
function callback() {
do_throw("Shouldn't fire!");
},
delay,

View File

@ -25,7 +25,7 @@ WeaveCrypto.prototype = {
"nsISupportsWeakReference",
]),
observe(subject, topic, data) {
observe(subject, topic) {
let self = this._self;
self.log("Observed " + topic + " topic.");
if (topic == "nsPref:changed") {

View File

@ -73,6 +73,6 @@ addResourceAlias();
* @usage _("Hello World") -> prints "Hello World"
* @usage _(1, 2, 3) -> prints "1 2 3"
*/
var _ = function (some, debug, text, to) {
var _ = function () {
print(Array.from(arguments).join(" "));
};

View File

@ -498,7 +498,7 @@ FxAccountsClient.prototype = {
*/
accountExists(email) {
return this.signIn(email, "").then(
cantHappen => {
() => {
throw new Error("How did I sign in with an empty password?");
},
expectedError => {

View File

@ -52,7 +52,7 @@ FxAccountsProfile.prototype = {
// making another request to determine if it is fresh or not.
PROFILE_FRESHNESS_THRESHOLD: 120000, // 2 minutes
observe(subject, topic, data) {
observe(subject, topic) {
// If we get a profile change notification from our webchannel it means
// the user has just changed their profile via the web, so we want to
// ignore our "freshness threshold"

View File

@ -49,7 +49,7 @@ MockStorage.prototype = Object.freeze({
getOAuthTokens() {
return Promise.resolve(null);
},
setOAuthTokens(contents) {
setOAuthTokens() {
return Promise.resolve();
},
});

View File

@ -120,7 +120,7 @@ function MockFxAccountsClient() {
// mock calls up to the auth server to determine whether the
// user account has been verified
this.recoveryEmailStatus = async function (sessionToken) {
this.recoveryEmailStatus = async function () {
// simulate a call to /recovery_email/status
return {
email: this._email,
@ -139,7 +139,7 @@ function MockFxAccountsClient() {
return !this._deletedOnServer;
};
this.accountKeys = function (keyFetchToken) {
this.accountKeys = function () {
return new Promise(resolve => {
do_timeout(50, () => {
resolve({
@ -188,7 +188,7 @@ Object.setPrototypeOf(
* mock the now() method, so that we can simulate the passing of
* time and verify that signatures expire correctly.
*/
function MockFxAccounts(credentials = null) {
function MockFxAccounts() {
let result = new FxAccounts({
VERIFICATION_POLL_TIMEOUT_INITIAL: 100, // 100ms
@ -453,10 +453,10 @@ add_test(function test_polling_timeout() {
fxa.setSignedInUser(test_user).then(() => {
p.then(
success => {
() => {
do_throw("this should not succeed");
},
fail => {
() => {
removeObserver();
fxa.signOut().then(run_next_test);
}
@ -471,7 +471,7 @@ add_task(async function test_onverified_once() {
let numNotifications = 0;
function observe(aSubject, aTopic, aData) {
function observe() {
numNotifications += 1;
}
Services.obs.addObserver(observe, ONVERIFIED_NOTIFICATION);
@ -971,17 +971,17 @@ add_test(function test_fetchAndUnwrapAndDeriveKeys_no_token() {
makeObserver(ONLOGOUT_NOTIFICATION, function () {
log.debug("test_fetchAndUnwrapKeys_no_token observed logout");
fxa._internal.getUserAccountData().then(user2 => {
fxa._internal.getUserAccountData().then(() => {
fxa._internal.abortExistingFlow().then(run_next_test);
});
});
fxa
.setSignedInUser(user)
.then(user2 => {
.then(() => {
return fxa.keys._fetchAndUnwrapAndDeriveKeys();
})
.catch(error => {
.catch(() => {
log.info("setSignedInUser correctly rejected");
});
});
@ -1272,11 +1272,7 @@ add_task(async function test_getOAuthTokenCachedScopeNormalization() {
let numOAuthTokenCalls = 0;
let client = fxa._internal.fxAccountsClient;
client.accessTokenWithSessionToken = async (
_sessionTokenHex,
_clientId,
scopeString
) => {
client.accessTokenWithSessionToken = async (_sessionTokenHex, _clientId) => {
numOAuthTokenCalls++;
return MOCK_TOKEN_RESPONSE;
};

View File

@ -80,7 +80,7 @@ function MockFxAccountsClient(device) {
// mock calls up to the auth server to determine whether the
// user account has been verified
this.recoveryEmailStatus = function (sessionToken) {
this.recoveryEmailStatus = function () {
// simulate a call to /recovery_email/status
return Promise.resolve({
email: this._email,
@ -104,8 +104,7 @@ function MockFxAccountsClient(device) {
return Promise.resolve(!!uid && !this._deletedOnServer);
};
this.registerDevice = (st, name, type) =>
Promise.resolve({ id: device.id, name });
this.registerDevice = (st, name) => Promise.resolve({ id: device.id, name });
this.updateDevice = (st, id, name) => Promise.resolve({ id, name });
this.signOut = () => Promise.resolve({});
this.getDeviceList = st =>
@ -655,7 +654,7 @@ add_task(async function test_verification_updates_registration() {
};
});
fxa._internal.checkEmailStatus = async function (sessionToken) {
fxa._internal.checkEmailStatus = async function () {
credentials.verified = true;
return credentials;
};
@ -792,7 +791,7 @@ add_task(async function test_refreshDeviceList() {
};
const deviceListUpdateObserver = {
count: 0,
observe(subject, topic, data) {
observe() {
this.count++;
},
};

View File

@ -174,7 +174,7 @@ add_task(async function test_sendtab_receive() {
const fxai = FxaInternalMock();
const sendTab = new SendTab(commands, fxai);
sendTab._encrypt = (bytes, device) => {
sendTab._encrypt = bytes => {
return bytes;
};
sendTab._decrypt = bytes => {
@ -387,7 +387,7 @@ add_task(async function test_commands_handleCommands() {
},
};
const commands = new FxAccountsCommands(fxAccounts);
commands.sendTab.handle = (sender, data, reason) => {
commands.sendTab.handle = () => {
return {
title: "testTitle",
uri: "https://testURI",
@ -436,7 +436,7 @@ add_task(async function test_commands_handleCommands_invalid_tab() {
},
};
const commands = new FxAccountsCommands(fxAccounts);
commands.sendTab.handle = (sender, data, reason) => {
commands.sendTab.handle = () => {
return {
title: "badUriTab",
uri: "file://path/to/pdf",

View File

@ -94,7 +94,7 @@ Object.setPrototypeOf(
FxAccountsClient.prototype
);
function MockFxAccounts(device = {}) {
function MockFxAccounts() {
return new FxAccounts({
fxAccountsClient: new MockFxAccountsClient(),
newAccountState(credentials) {

View File

@ -61,7 +61,7 @@ const fxAccounts = {
},
_internal: {
keys: {
getKeyForScope(scope) {
getKeyForScope() {
return {
kid: "123456",
k: KSYNC,

View File

@ -142,10 +142,10 @@ add_test(function fetchAndCacheProfile_always_bumps_cachedAt() {
profile._cachedAt = 12345;
return profile._fetchAndCacheProfile().then(
result => {
() => {
do_throw("Should not succeed");
},
err => {
() => {
Assert.notEqual(profile._cachedAt, 12345, "cachedAt has been bumped");
run_next_test();
}
@ -164,7 +164,7 @@ add_test(function fetchAndCacheProfile_sendsETag() {
};
let profile = CreateFxAccountsProfile(fxa, client);
return profile._fetchAndCacheProfile().then(result => {
return profile._fetchAndCacheProfile().then(() => {
run_next_test();
});
});
@ -282,7 +282,7 @@ add_test(function fetchAndCacheProfile_alreadyCached() {
};
let profile = CreateFxAccountsProfile(fxa, client);
profile._cacheProfile = function (toCache) {
profile._cacheProfile = function () {
do_throw("This method should not be called.");
};
@ -614,7 +614,7 @@ add_test(function getProfile_has_cached_fetch_deleted() {
// instead of checking this in a mocked "save" function, just check after the
// observer
makeObserver(ON_PROFILE_CHANGE_NOTIFICATION, function (subject, topic, data) {
makeObserver(ON_PROFILE_CHANGE_NOTIFICATION, function () {
profile.getProfile().then(profileData => {
Assert.equal(null, profileData.avatar);
run_next_test();

View File

@ -39,7 +39,7 @@ let mockResponse = function (response) {
Request.ifNoneMatchSet = true;
}
},
async dispatch(method, payload) {
async dispatch() {
this.response = response;
return this.response;
},
@ -74,7 +74,7 @@ let mockResponseError = function (error) {
return function () {
return {
setHeader() {},
async dispatch(method, payload) {
async dispatch() {
throw error;
},
};
@ -221,7 +221,7 @@ add_test(function server401ResponseThenSuccess() {
let numRequests = 0;
let numAuthHeaders = 0;
// Like mockResponse but we want access to headers etc.
client._Request = function (requestUri) {
client._Request = function () {
return {
setHeader(name, value) {
if (name == "Authorization") {
@ -229,7 +229,7 @@ add_test(function server401ResponseThenSuccess() {
Assert.equal(value, "Bearer " + lastToken);
}
},
async dispatch(method, payload) {
async dispatch() {
this.response = responses[numRequests];
++numRequests;
return this.response;
@ -283,7 +283,7 @@ add_test(function server401ResponsePersists() {
let numRequests = 0;
let numAuthHeaders = 0;
client._Request = function (requestUri) {
client._Request = function () {
return {
setHeader(name, value) {
if (name == "Authorization") {
@ -291,7 +291,7 @@ add_test(function server401ResponsePersists() {
Assert.equal(value, "Bearer " + lastToken);
}
},
async dispatch(method, payload) {
async dispatch() {
this.response = response;
++numRequests;
return this.response;

View File

@ -179,7 +179,7 @@ add_test(function observePushTopicDeviceConnected() {
return this;
},
};
let obs = (subject, topic, data) => {
let obs = (subject, topic) => {
Services.obs.removeObserver(obs, topic);
run_next_test();
};
@ -392,7 +392,7 @@ add_test(function observePushTopicProfileUpdated() {
return this;
},
};
let obs = (subject, topic, data) => {
let obs = (subject, topic) => {
Services.obs.removeObserver(obs, topic);
run_next_test();
};

View File

@ -62,7 +62,7 @@ MockedSecureStorage.prototype = {
// "TypeError: this.STORAGE_LOCKED is not a constructor"
STORAGE_LOCKED: function () {},
/* eslint-enable object-shorthand */
async get(uid, email) {
async get() {
this.fetchCount++;
if (this.locked) {
throw new this.STORAGE_LOCKED();

View File

@ -202,7 +202,7 @@ add_test(function test_error_message_remove_profile_path() {
const toTest = Object.keys(errors).length;
for (const key in errors) {
let error = errors[key];
channel._channel.send = (message, context) => {
channel._channel.send = message => {
equal(
message.data.error.message,
error.expected,
@ -403,7 +403,7 @@ add_test(function test_fxa_status_message() {
});
channel._channel = {
send(response, sendingContext) {
send(response) {
Assert.equal(response.command, "fxaccounts:fxa_status");
Assert.equal(response.messageId, 123);
@ -513,7 +513,7 @@ add_task(async function test_helpers_login_set_previous_account_name_hash() {
let helpers = new FxAccountsWebChannelHelpers({
fxAccounts: {
_internal: {
setSignedInUser(accountData) {
setSignedInUser() {
return new Promise(resolve => {
// previously signed in user preference is updated.
Assert.equal(
@ -554,7 +554,7 @@ add_task(
let helpers = new FxAccountsWebChannelHelpers({
fxAccounts: {
_internal: {
setSignedInUser(accountData) {
setSignedInUser() {
return new Promise(resolve => {
// previously signed in user preference should not be updated.
Assert.equal(

View File

@ -108,7 +108,7 @@ function executeIDB(db, storeNames, mode, callback, desc) {
desc || "execute()"
)
);
transaction.oncomplete = event => resolve(result);
transaction.oncomplete = () => resolve(result);
// Simplify access to a single datastore:
if (stores.length == 1) {
stores = stores[0];

View File

@ -34,7 +34,7 @@ ChromeUtils.defineLazyGetter(lazy, "console", () => lazy.Utils.log);
function cacheProxy(target) {
const cache = new Map();
return new Proxy(target, {
get(target, prop, receiver) {
get(target, prop) {
if (!cache.has(prop)) {
cache.set(prop, target[prop]);
}

View File

@ -15,7 +15,7 @@ RemoteSettingsTimer.prototype = {
contractID: "@mozilla.org/services/settings;1",
// By default, this timer fires once every 24 hours. See the "services.settings.poll_interval" pref.
notify(timer) {
notify() {
lazy.RemoteSettings.pollChanges({ trigger: "timer" }).catch(e =>
console.error(e)
);

View File

@ -68,9 +68,9 @@ async function clear_state() {
downloader = new Downloader("main", "some-collection");
const dummyCacheImpl = {
get: async attachmentId => {},
set: async (attachmentId, attachment) => {},
delete: async attachmentId => {},
get: async () => {},
set: async () => {},
delete: async () => {},
};
// The download() method requires a cacheImpl, but the Downloader
// class does not have one. Define a dummy no-op one.
@ -388,7 +388,7 @@ async function doTestDownloadCacheImpl({ simulateCorruption }) {
throw new Error("Simulation of corrupted cache (write)");
}
},
async delete(attachmentId) {},
async delete() {},
};
Object.defineProperty(downloader, "cacheImpl", { value: cacheImpl });

View File

@ -545,7 +545,7 @@ add_task(async function test_get_does_not_verify_signature_if_load_dump() {
let called;
clientWithDump._verifier = {
async asyncVerifyContentSignature(serialized, signature) {
async asyncVerifyContentSignature() {
called = true;
return true;
},
@ -583,7 +583,7 @@ add_task(
const backup = clientWithDump._verifier;
let callCount = 0;
clientWithDump._verifier = {
async asyncVerifyContentSignature(serialized, signature) {
async asyncVerifyContentSignature() {
callCount++;
return true;
},
@ -640,7 +640,7 @@ add_task(
let called;
clientWithDump._verifier = {
async asyncVerifyContentSignature(serialized, signature) {
async asyncVerifyContentSignature() {
called = true;
return true;
},
@ -1174,7 +1174,7 @@ add_task(clear_state);
add_task(async function test_sync_event_is_not_sent_from_get_when_no_dump() {
let called = false;
client.on("sync", e => {
client.on("sync", () => {
called = true;
});

View File

@ -188,7 +188,7 @@ add_task(async function test_check_success() {
// Ensure that the remote-settings:changes-poll-end notification works
let notificationObserved = false;
const observer = {
observe(aSubject, aTopic, aData) {
observe() {
Services.obs.removeObserver(this, "remote-settings:changes-poll-end");
notificationObserved = true;
},
@ -258,7 +258,7 @@ add_task(async function test_update_timer_interface() {
await new Promise(resolve => {
const e = "remote-settings:changes-poll-end";
const changesPolledObserver = {
observe(aSubject, aTopic, aData) {
observe() {
Services.obs.removeObserver(this, e);
resolve();
},
@ -288,7 +288,7 @@ add_task(async function test_check_up_to_date() {
// Ensure that the remote-settings:changes-poll-end notification is sent.
let notificationObserved = false;
const observer = {
observe(aSubject, aTopic, aData) {
observe() {
Services.obs.removeObserver(this, "remote-settings:changes-poll-end");
notificationObserved = true;
},
@ -686,7 +686,7 @@ add_task(async function test_server_error() {
let notificationObserved = false;
const observer = {
observe(aSubject, aTopic, aData) {
observe() {
Services.obs.removeObserver(this, "remote-settings:changes-poll-end");
notificationObserved = true;
},
@ -807,7 +807,7 @@ add_task(async function test_client_error() {
let notificationsObserved = [];
const observer = {
observe(aSubject, aTopic, aData) {
observe(aSubject, aTopic) {
Services.obs.removeObserver(this, aTopic);
notificationsObserved.push([aTopic, aSubject.wrappedJSObject]);
},
@ -935,7 +935,7 @@ add_task(
// Wait for the "sync-broken-error" notification.
let notificationObserved = false;
const observer = {
observe(aSubject, aTopic, aData) {
observe() {
notificationObserved = true;
},
};

View File

@ -487,7 +487,7 @@ add_task(async function test_check_synchronization_with_signatures() {
);
let syncEventSent = false;
client.on("sync", ({ data }) => {
client.on("sync", () => {
syncEventSent = true;
});
@ -542,7 +542,7 @@ add_task(async function test_check_synchronization_with_signatures() {
registerHandlers(badSigGoodOldResponses);
syncEventSent = false;
client.on("sync", ({ data }) => {
client.on("sync", () => {
syncEventSent = true;
});
@ -783,7 +783,7 @@ add_task(async function test_check_synchronization_with_signatures() {
const sigCalls = [];
let i = 0;
client._verifier = {
async asyncVerifyContentSignature(serialized, signature) {
async asyncVerifyContentSignature(serialized) {
sigCalls.push(serialized);
console.log(`verify call ${i}`);
return [

View File

@ -82,8 +82,8 @@ add_task(async function test_throws_error_if_worker_fails_async() {
// should be reported to the caller.
await new Promise((resolve, reject) => {
const request = indexedDB.deleteDatabase("remote-settings");
request.onsuccess = event => resolve();
request.onblocked = event => reject(new Error("Cannot delete DB"));
request.onsuccess = () => resolve();
request.onblocked = () => reject(new Error("Cannot delete DB"));
request.onerror = event => reject(event.target.error);
});
let error;

View File

@ -41,7 +41,7 @@ add_task(async function test_shutdown_abort_after_start() {
const request = store
.index("cid")
.openCursor(IDBKeyRange.only("foopydoo/foo"));
request.onsuccess = event => {
request.onsuccess = () => {
makeRequest();
};
}
@ -74,7 +74,7 @@ add_task(async function test_shutdown_immediate_abort() {
let request = store
.index("cid")
.openCursor(IDBKeyRange.only("foopydoo/foo"));
request.onsuccess = event => {
request.onsuccess = () => {
// Abort immediately.
Database._shutdownHandler();
request = store

View File

@ -156,7 +156,7 @@ AboutWeaveLog.prototype = {
"nsISupportsWeakReference",
]),
getURIFlags(aURI) {
getURIFlags() {
return 0;
},

View File

@ -29,7 +29,7 @@ export function FakeFilesystemService(contents) {
self.fakeContents["weave/" + filePath + ".json"] = JSON.stringify(json);
};
Utils.jsonLoad = async function jsonLoad(filePath, that) {
Utils.jsonLoad = async function jsonLoad(filePath) {
let obj;
let json = self.fakeContents["weave/" + filePath + ".json"];
if (json) {
@ -38,14 +38,14 @@ export function FakeFilesystemService(contents) {
return obj;
};
Utils.jsonMove = function jsonMove(aFrom, aTo, that) {
Utils.jsonMove = function jsonMove(aFrom, aTo) {
const fromPath = "weave/" + aFrom + ".json";
self.fakeContents["weave/" + aTo + ".json"] = self.fakeContents[fromPath];
delete self.fakeContents[fromPath];
return Promise.resolve();
};
Utils.jsonRemove = function jsonRemove(filePath, that) {
Utils.jsonRemove = function jsonRemove(filePath) {
delete self.fakeContents["weave/" + filePath + ".json"];
return Promise.resolve();
};
@ -79,19 +79,17 @@ export function FakeCryptoService() {
delete Weave.Crypto; // get rid of the getter first
Weave.Crypto = this;
RawCryptoWrapper.prototype.ciphertextHMAC = function ciphertextHMAC(
keyBundle
) {
RawCryptoWrapper.prototype.ciphertextHMAC = function ciphertextHMAC() {
return fakeSHA256HMAC(this.ciphertext);
};
}
FakeCryptoService.prototype = {
async encrypt(clearText, symmetricKey, iv) {
async encrypt(clearText) {
return clearText;
},
async decrypt(cipherText, symmetricKey, iv) {
async decrypt(cipherText) {
return cipherText;
},
@ -104,7 +102,7 @@ FakeCryptoService.prototype = {
return btoa("fake-fake-fake-random-iv");
},
expandData: function expandData(data, len) {
expandData: function expandData(data) {
return data;
},

View File

@ -23,7 +23,7 @@ export var initializeIdentityWithTokenServerResponse = function (response) {
}
// A mock request object.
function MockRESTRequest(url) {}
function MockRESTRequest() {}
MockRESTRequest.prototype = {
_log: requestLog,
setHeader() {},

View File

@ -87,7 +87,7 @@ const UIStateInternal = {
this._initialized = false;
},
observe(subject, topic, data) {
observe(subject, topic) {
switch (topic) {
case "weave:service:sync:start":
this.toggleSyncActivity(true);

View File

@ -43,7 +43,7 @@ class BridgedStore {
this._batchChunkSize = 500;
}
async applyIncomingBatch(records, countTelemetry) {
async applyIncomingBatch(records) {
for (let chunk of lazy.PlacesUtils.chunkArray(
records,
this._batchChunkSize

View File

@ -114,13 +114,13 @@ export class CollectionValidator {
// Return whether or not a server item should be present on the client. Expected
// to be overridden.
clientUnderstands(item) {
clientUnderstands() {
return true;
}
// Return whether or not a client item should be present on the server. Expected
// to be overridden
async syncedByClient(item) {
async syncedByClient() {
return true;
}

View File

@ -113,12 +113,12 @@ Tracker.prototype = {
},
// Also unsupported.
async addChangedID(id, when) {
async addChangedID() {
throw new TypeError("Can't add changed ID to this tracker");
},
// Ditto.
async removeChangedID(...ids) {
async removeChangedID() {
throw new TypeError("Can't remove changed IDs from this tracker");
},
@ -155,7 +155,7 @@ Tracker.prototype = {
// Override these in your subclasses.
onStart() {},
onStop() {},
async observe(subject, topic, data) {},
async observe() {},
engineIsEnabled() {
if (!this.engine) {
@ -437,7 +437,7 @@ Store.prototype = {
* @param record
* The store record to create an item from
*/
async create(record) {
async create() {
throw new Error("override create in a subclass");
},
@ -450,7 +450,7 @@ Store.prototype = {
* @param record
* The store record to delete an item from
*/
async remove(record) {
async remove() {
throw new Error("override remove in a subclass");
},
@ -463,7 +463,7 @@ Store.prototype = {
* @param record
* The record to use to update an item from
*/
async update(record) {
async update() {
throw new Error("override update in a subclass");
},
@ -477,7 +477,7 @@ Store.prototype = {
* string record ID
* @return boolean indicating whether record exists locally
*/
async itemExists(id) {
async itemExists() {
throw new Error("override itemExists in a subclass");
},
@ -495,7 +495,7 @@ Store.prototype = {
* constructor for the newly-created record.
* @return record type for this engine
*/
async createRecord(id, collection) {
async createRecord() {
throw new Error("override createRecord in a subclass");
},
@ -507,7 +507,7 @@ Store.prototype = {
* @param newID
* string new record ID
*/
async changeItemID(oldID, newID) {
async changeItemID() {
throw new Error("override changeItemID in a subclass");
},
@ -1040,7 +1040,7 @@ SyncEngine.prototype = {
* Note: Overriding engines must take resyncs into account -- score will not
* be cleared.
*/
shouldSkipSync(syncReason) {
shouldSkipSync() {
return false;
},
@ -1550,7 +1550,7 @@ SyncEngine.prototype = {
// Indicates whether an incoming item should be deleted from the server at
// the end of the sync. Engines can override this method to clean up records
// that shouldn't be on the server.
_shouldDeleteRemotely(remoteItem) {
_shouldDeleteRemotely() {
return false;
},
@ -1560,7 +1560,7 @@ SyncEngine.prototype = {
*
* @return GUID of the similar item; falsy otherwise
*/
async _findDupe(item) {
async _findDupe() {
// By default, assume there's no dupe items for the engine
},
@ -1568,7 +1568,7 @@ SyncEngine.prototype = {
* Called before a remote record is discarded due to failed reconciliation.
* Used by bookmark sync to merge folder child orders.
*/
beforeRecordDiscard(localRecord, remoteRecord, remoteIsNewer) {},
beforeRecordDiscard() {},
// Called when the server has a record marked as deleted, but locally we've
// changed it more recently than the deletion. If we return false, the
@ -1576,7 +1576,7 @@ SyncEngine.prototype = {
// record to the server -- any extra work that's needed as part of this
// process should be done at this point (such as mark the record's parent
// for reuploading in the case of bookmarks).
async _shouldReviveRemotelyDeletedRecord(remoteItem) {
async _shouldReviveRemotelyDeletedRecord() {
return true;
},
@ -1948,7 +1948,7 @@ SyncEngine.prototype = {
}
},
async _onRecordsWritten(succeeded, failed, serverModifiedTime) {
async _onRecordsWritten() {
// Implement this method to take specific actions against successfully
// uploaded records and failed records.
},

View File

@ -513,7 +513,7 @@ BookmarksEngine.prototype = {
await this._apply();
},
async _reconcile(item) {
async _reconcile() {
return true;
},
@ -752,7 +752,7 @@ BookmarksStore.prototype = {
});
},
async applyIncomingBatch(records, countTelemetry) {
async applyIncomingBatch(records) {
let buf = await this.ensureOpenMirror();
for (let chunk of lazy.PlacesUtils.chunkArray(
records,
@ -921,11 +921,11 @@ Object.setPrototypeOf(BookmarksTracker.prototype, Tracker.prototype);
class BookmarksChangeset extends Changeset {
// Only `_reconcile` calls `getModifiedTimestamp` and `has`, and the engine
// does its own reconciliation.
getModifiedTimestamp(id) {
getModifiedTimestamp() {
throw new Error("Don't use timestamps to resolve bookmark conflicts");
}
has(id) {
has() {
throw new Error("Don't use the changeset to resolve bookmark conflicts");
}

View File

@ -1107,7 +1107,7 @@ ClientsTracker.prototype = {
Svc.Obs.remove("fxaccounts:new_device_id", this.asyncObserver);
},
async observe(subject, topic, data) {
async observe(subject, topic) {
switch (topic) {
case "nsPref:changed":
this._log.debug("client.name preference changed");

View File

@ -124,7 +124,7 @@ ExtensionStorageEngineBridge.prototype = {
},
_takeMigrationInfo() {
return new Promise((resolve, reject) => {
return new Promise(resolve => {
this.component
.QueryInterface(Ci.mozIExtensionStorageArea)
.takeMigrationInfo({
@ -291,7 +291,7 @@ ExtensionStorageTracker.prototype = {
lazy.Svc.Obs.remove("ext.storage.sync-changed", this.asyncObserver);
},
async observe(subject, topic, data) {
async observe(subject, topic) {
if (this.ignoreAll) {
return;
}

View File

@ -189,7 +189,7 @@ FormStore.prototype = {
await this._processChange(change);
},
async update(record) {
async update() {
this._log.trace("Ignoring form record update request!");
},

View File

@ -386,7 +386,7 @@ PrefStore.prototype = {
return allprefs;
},
async changeItemID(oldID, newID) {
async changeItemID() {
this._log.trace("PrefStore GUID is constant!");
},
@ -406,11 +406,11 @@ PrefStore.prototype = {
return record;
},
async create(record) {
async create() {
this._log.trace("Ignoring create request");
},
async remove(record) {
async remove() {
this._log.trace("Ignoring remove request");
},

View File

@ -430,7 +430,7 @@ export const TabProvider = {
.then(iconData => {
thisTab.icon = iconData.uri.spec;
})
.catch(ex => {
.catch(() => {
log.trace(
`Failed to fetch favicon for ${url}`,
thisTab.urlHistory[0]
@ -503,7 +503,7 @@ TabTracker.prototype = {
}
},
async observe(subject, topic, data) {
async observe(subject, topic) {
switch (topic) {
case "domwindowopened":
let onLoad = () => {

View File

@ -182,7 +182,7 @@ RawCryptoWrapper.prototype = {
* @param {Cleartext} outgoingCleartext The cleartext to upload.
* @returns {String} The serialized cleartext.
*/
transformBeforeEncrypt(outgoingCleartext) {
transformBeforeEncrypt() {
throw new TypeError("Override to stringify outgoing records");
},
@ -194,7 +194,7 @@ RawCryptoWrapper.prototype = {
* @param {String} incomingCleartext The decrypted cleartext string.
* @returns {Cleartext} The parsed cleartext.
*/
transformAfterDecrypt(incomingCleartext) {
transformAfterDecrypt() {
throw new TypeError("Override to parse incoming records");
},
@ -527,7 +527,7 @@ CollectionKeyManager.prototype = {
/**
* Create a WBO for the current keys.
*/
asWBO(collection, id) {
asWBO() {
return this._makeWBO(this._collections, this._default);
},

View File

@ -164,7 +164,7 @@ SyncAuthManager.prototype = {
this._token = null;
},
async observe(subject, topic, data) {
async observe(subject, topic) {
this._log.debug("observed " + topic);
if (!this.username) {
this._log.info("Sync is not configured, so ignoring the notification");
@ -276,7 +276,7 @@ SyncAuthManager.prototype = {
* allows us to avoid a network request for when we actually need the
* migration info.
*/
prefetchMigrationSentinel(service) {
prefetchMigrationSentinel() {
// nothing to do here until we decide to migrate away from FxA.
},

View File

@ -530,8 +530,8 @@ async function sync_engine_and_validate_telem(
// Returns a promise that resolves once the specified observer notification
// has fired.
function promiseOneObserver(topic, callback) {
return new Promise((resolve, reject) => {
function promiseOneObserver(topic) {
return new Promise(resolve => {
let observer = function (subject, data) {
Svc.Obs.remove(topic, observer);
resolve({ subject, data });

View File

@ -687,8 +687,8 @@ function track_collections_helper() {
* prototype, and override as appropriate.
*/
var SyncServerCallback = {
onCollectionDeleted: function onCollectionDeleted(user, collection) {},
onItemDeleted: function onItemDeleted(user, collection, wboID) {},
onCollectionDeleted: function onCollectionDeleted() {},
onItemDeleted: function onItemDeleted() {},
/**
* Called at the top of every request.
@ -699,7 +699,7 @@ var SyncServerCallback = {
* must be taken to not screw with the response body or headers that may
* conflict with normal operation of this server.
*/
onRequest: function onRequest(request, response) {},
onRequest: function onRequest() {},
};
/**

View File

@ -119,7 +119,7 @@ add_task(async function test_source_uri_rewrite() {
let installCalled = false;
Object.getPrototypeOf(AddonUtils).installAddonFromSearchResult =
async function testInstallAddon(addon, metadata) {
async function testInstallAddon(addon) {
Assert.equal(
SERVER_ADDRESS + "/require.xpi?src=sync",
addon.sourceURI.spec

View File

@ -49,7 +49,7 @@ function getDummyServerAndClient() {
add_task(async function test_valid() {
let { server, client } = getDummyServerAndClient();
let validator = new AddonValidator({
_findDupe(item) {
_findDupe() {
return null;
},
isAddonSyncable(item) {

View File

@ -778,7 +778,7 @@ add_task(async function test_onFaviconChanged() {
iconURI,
true,
PlacesUtils.favicons.FAVICON_LOAD_NON_PRIVATE,
(uri, dataLen, data, mimeType) => {
() => {
resolve();
},
Services.scriptSecurityManager.getSystemPrincipal()

View File

@ -779,7 +779,7 @@ add_task(async function test_filter_duplicate_names() {
// Check that a subsequent Sync doesn't report anything as being processed.
let counts;
Svc.Obs.add("weave:engine:sync:applied", function observe(subject, data) {
Svc.Obs.add("weave:engine:sync:applied", function observe(subject) {
Svc.Obs.remove("weave:engine:sync:applied", observe);
counts = subject;
});

View File

@ -79,7 +79,7 @@ add_task(async function testOldMeta() {
let declinedEngines = new DeclinedEngines(Service);
function onNotDeclined(subject, topic, data) {
function onNotDeclined(subject) {
Observers.remove("weave:engines:notdeclined", onNotDeclined);
Assert.ok(
subject.undecided.has("actual"),
@ -129,7 +129,7 @@ add_task(async function testDeclinedMeta() {
let declinedEngines = new DeclinedEngines(Service);
function onNotDeclined(subject, topic, data) {
function onNotDeclined(subject) {
Observers.remove("weave:engines:notdeclined", onNotDeclined);
Assert.ok(
subject.undecided.has("actual"),

View File

@ -37,7 +37,7 @@ add_task(async function test_processIncoming_abort() {
);
meta_global.payload.engines = { rotary: { version: engine.version, syncID } };
_("Fake applyIncoming to abort.");
engine._store.applyIncoming = async function (record) {
engine._store.applyIncoming = async function () {
let ex = {
code: SyncEngine.prototype.eEngineAbortApplyIncoming,
cause: "Nooo",

View File

@ -286,13 +286,10 @@ add_task(async function test_info_collections_login_server_maintenance_error() {
await configureIdentity({ username: "broken.info" }, server);
let backoffInterval;
Svc.Obs.add(
"weave:service:backoff:interval",
function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
}
);
Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
Assert.ok(!Status.enforceBackoff);
Assert.equal(Status.service, STATUS_OK);
@ -318,13 +315,10 @@ add_task(async function test_meta_global_login_server_maintenance_error() {
await configureIdentity({ username: "broken.meta" }, server);
let backoffInterval;
Svc.Obs.add(
"weave:service:backoff:interval",
function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
}
);
Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
Assert.ok(!Status.enforceBackoff);
Assert.equal(Status.service, STATUS_OK);

View File

@ -74,13 +74,10 @@ add_task(async function test_crypto_keys_login_server_maintenance_error() {
Service.collectionKeys.clear();
let backoffInterval;
Svc.Obs.add(
"weave:service:backoff:interval",
function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
}
);
Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
Assert.ok(!Status.enforceBackoff);
Assert.equal(Status.service, STATUS_OK);
@ -178,13 +175,10 @@ add_task(
await configureIdentity({ username: "broken.info" }, server);
let backoffInterval;
Svc.Obs.add(
"weave:service:backoff:interval",
function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
}
);
Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
Assert.ok(!Status.enforceBackoff);
Assert.equal(Status.service, STATUS_OK);
@ -215,13 +209,10 @@ add_task(
await configureIdentity({ username: "broken.meta" }, server);
let backoffInterval;
Svc.Obs.add(
"weave:service:backoff:interval",
function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
}
);
Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
Assert.ok(!Status.enforceBackoff);
Assert.equal(Status.service, STATUS_OK);
@ -254,13 +245,10 @@ add_task(
Service.collectionKeys.clear();
let backoffInterval;
Svc.Obs.add(
"weave:service:backoff:interval",
function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
}
);
Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
Assert.ok(!Status.enforceBackoff);
Assert.equal(Status.service, STATUS_OK);
@ -291,13 +279,10 @@ add_task(
await configureIdentity({ username: "broken.keys" }, server);
let backoffInterval;
Svc.Obs.add(
"weave:service:backoff:interval",
function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
}
);
Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
Assert.ok(!Status.enforceBackoff);
Assert.equal(Status.service, STATUS_OK);
@ -328,13 +313,10 @@ add_task(
await configureIdentity({ username: "broken.wipe" }, server);
let backoffInterval;
Svc.Obs.add(
"weave:service:backoff:interval",
function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
}
);
Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
Assert.ok(!Status.enforceBackoff);
Assert.equal(Status.service, STATUS_OK);
@ -368,13 +350,10 @@ add_task(
engine.enabled = true;
let backoffInterval;
Svc.Obs.add(
"weave:service:backoff:interval",
function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
}
);
Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
Assert.ok(!Status.enforceBackoff);
Assert.equal(Status.service, STATUS_OK);

View File

@ -82,7 +82,7 @@ function readFile(file, callback) {
uri: NetUtil.newURI(file),
loadUsingSystemPrincipal: true,
},
function (inputStream, statusCode, request) {
function (inputStream, statusCode) {
let data = NetUtil.readInputStreamToString(
inputStream,
inputStream.available()

View File

@ -46,7 +46,7 @@ function prepareServer(cbAfterTokenFetch) {
// A server callback to ensure we don't accidentally hit the wrong endpoint
// after a node reassignment.
let callback = {
onRequest(req, resp) {
onRequest(req) {
let full = `${req.scheme}://${req.host}:${req.port}${req.path}`;
let expected = config.fxaccount.token.endpoint;
Assert.ok(

View File

@ -16,13 +16,13 @@ XPCOMUtils.defineLazyServiceGetter(
"mozIAsyncHistory"
);
async function rawAddVisit(id, uri, visitPRTime, transitionType) {
return new Promise((resolve, reject) => {
return new Promise(resolve => {
let results = [];
let handler = {
handleResult(result) {
results.push(result);
},
handleError(resultCode, placeInfo) {
handleError(resultCode) {
do_throw(`updatePlaces gave error ${resultCode}!`);
},
handleCompletion(count) {

View File

@ -17,7 +17,7 @@ const TIMESTAMP3 = (Date.now() - 123894) * 1000;
function promiseOnVisitObserved() {
return new Promise(res => {
let listener = new PlacesWeakCallbackWrapper(events => {
let listener = new PlacesWeakCallbackWrapper(() => {
PlacesObservers.removeListener(["page-visited"], listener);
res();
});

View File

@ -160,7 +160,7 @@ add_task(async function test_dont_track_expiration() {
let scorePromise = promiseOneObserver("weave:engine:score:updated");
// Observe expiration.
Services.obs.addObserver(function onExpiration(aSubject, aTopic, aData) {
Services.obs.addObserver(function onExpiration(aSubject, aTopic) {
Services.obs.removeObserver(onExpiration, aTopic);
// Remove the remaining page to update its score.
PlacesUtils.history.remove(uriToRemove);

View File

@ -171,7 +171,7 @@ add_task(async function hmac_error_during_node_reassignment() {
}
let onSyncFinished = function () {};
let obs = {
observe: function observe(subject, topic, data) {
observe: function observe(subject, topic) {
switch (topic) {
case "weave:service:sync:error":
onSyncError();

View File

@ -160,7 +160,7 @@ add_task(async function test_storage_request() {
async function deleteWBONotExists() {
let req = localRequest(server, keysURL);
server.callback.onItemDeleted = function (username, collection, wboID) {
server.callback.onItemDeleted = function () {
do_throw("onItemDeleted should not have been called.");
};

View File

@ -51,7 +51,7 @@ add_task(async function setup() {
// Don't remove stale clients when syncing. This is a test-only workaround
// that lets us add clients directly to the store, without losing them on
// the next sync.
clientsEngine._removeRemoteClient = async id => {};
clientsEngine._removeRemoteClient = async () => {};
});
add_task(async function test_successful_sync_adjustSyncInterval() {

View File

@ -480,7 +480,7 @@ add_task(async function test_post_override_content_type() {
add_task(async function test_weave_backoff() {
_("X-Weave-Backoff header notifies observer");
let backoffInterval;
function onBackoff(subject, data) {
function onBackoff(subject) {
backoffInterval = subject;
}
Observers.add("weave:service:backoff:interval", onBackoff);

View File

@ -48,7 +48,7 @@ add_task(async function run_test() {
Svc.PrefBranch.setIntPref("lastPing", Math.floor(Date.now() / 1000));
let threw = false;
Svc.Obs.add("weave:service:sync:error", function (subject, data) {
Svc.Obs.add("weave:service:sync:error", function () {
threw = true;
});

View File

@ -78,13 +78,10 @@ add_task(async function test_verifyLogin() {
Service._updateCachedURLs();
Assert.ok(!Service.status.enforceBackoff);
let backoffInterval;
Svc.Obs.add(
"weave:service:backoff:interval",
function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
}
);
Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
Assert.equal(false, await Service.verifyLogin());
Assert.ok(Service.status.enforceBackoff);
Assert.equal(backoffInterval, 42);

View File

@ -1002,7 +1002,7 @@ function mockTokenServer(func) {
requestLog.addAppender(new Log.DumpAppender());
requestLog.level = Log.Level.Trace;
}
function MockRESTRequest(url) {}
function MockRESTRequest() {}
MockRESTRequest.prototype = {
_log: requestLog,
setHeader() {},

View File

@ -87,7 +87,7 @@ let MockClientsEngine = {
return tabsEngine.clients[id].fxaDeviceId;
},
getClientType(id) {
getClientType() {
return "desktop";
},
};

View File

@ -92,7 +92,7 @@ add_task(async function setup() {
// Don't remove stale clients when syncing. This is a test-only workaround
// that lets us add clients directly to the store, without losing them on
// the next sync.
clientsEngine._removeRemoteClient = async id => {};
clientsEngine._removeRemoteClient = async () => {};
await Service.engineManager.clear();
validate_all_future_pings();

View File

@ -179,7 +179,7 @@ add_task(async function test_tab_quickWrite_telemetry() {
let telem = get_sync_test_telemetry();
telem.payloads = [];
let oldSubmit = telem.submit;
let submitPromise = new Promise((resolve, reject) => {
let submitPromise = new Promise(resolve => {
telem.submit = function (ping) {
telem.submit = oldSubmit;
resolve(ping);

View File

@ -314,7 +314,7 @@ add_task(async function test_syncError() {
function observeUIUpdate() {
return new Promise(resolve => {
let obs = (aSubject, aTopic, aData) => {
let obs = (aSubject, aTopic) => {
Services.obs.removeObserver(obs, aTopic);
const state = UIState.get();
resolve(state);

View File

@ -602,7 +602,7 @@ class ServerRecordInspection {
await lazy.Async.yieldingForEach(
this.liveRecords,
(record, i) => {
record => {
if (!seen.has(record.id)) {
// We intentionally don't record the parentid here, since we only record
// that if the record refers to a parent that doesn't exist, which we

View File

@ -44,7 +44,7 @@ export var BrowserTabs = {
// Wait for the tab to load.
await new Promise(resolve => {
let mm = browser.ownerGlobal.messageManager;
mm.addMessageListener("tps:loadEvent", function onLoad(msg) {
mm.addMessageListener("tps:loadEvent", function onLoad() {
mm.removeMessageListener("tps:loadEvent", onLoad);
resolve();
});

View File

@ -16,7 +16,7 @@ export var BrowserWindows = {
* @param aPrivate The private option.
* @return nothing
*/
Add(aPrivate, fn) {
Add(aPrivate) {
return new Promise(resolve => {
let mainWindow = Services.wm.getMostRecentWindow("navigator:browser");
let win = mainWindow.OpenBrowserWindow({ private: aPrivate });

View File

@ -168,7 +168,7 @@ export var TPS = {
"nsISupportsWeakReference",
]),
observe: function TPS__observe(subject, topic, data) {
observe: function TPS__observe(subject, topic) {
try {
lazy.Logger.logInfo("----------event observed: " + topic);