mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-12-02 10:00:54 +00:00
Bug 1309481 - Remove leftover code specific to Logins from JSONFile.jsm and add tests; r=Paolo
MozReview-Commit-ID: DjEPCN2PXA1 --HG-- extra : rebase_source : d1150d6a0cbc94282d6120f38c094fb58eed9d5e
This commit is contained in:
parent
685b64a883
commit
aa29135011
@ -97,6 +97,10 @@ LoginStore.prototype.constructor = LoginStore;
|
||||
* Synchronously work on the data just loaded into memory.
|
||||
*/
|
||||
LoginStore.prototype._dataPostProcessor = function(data) {
|
||||
if (data.nextId === undefined) {
|
||||
data.nextId = 1;
|
||||
}
|
||||
|
||||
// Create any arrays that are not present in the saved file.
|
||||
if (!data.logins) {
|
||||
data.logins = [];
|
||||
@ -121,7 +125,7 @@ LoginStore.prototype._dataPostProcessor = function(data) {
|
||||
* Migrates disabled hosts to the permission manager.
|
||||
*/
|
||||
LoginStore.prototype._migrateDisabledHosts = function (data) {
|
||||
for (let host of this.data.disabledHosts) {
|
||||
for (let host of data.disabledHosts) {
|
||||
try {
|
||||
let uri = Services.io.newURI(host, null, null);
|
||||
Services.perms.add(uri, PERMISSION_SAVE_LOGINS, Services.perms.DENY_ACTION);
|
||||
@ -130,5 +134,5 @@ LoginStore.prototype._migrateDisabledHosts = function (data) {
|
||||
}
|
||||
}
|
||||
|
||||
delete this.data.disabledHosts;
|
||||
delete data.disabledHosts;
|
||||
};
|
||||
|
@ -97,7 +97,7 @@ this.LoginManagerStorage_json.prototype = {
|
||||
*/
|
||||
terminate() {
|
||||
this._store._saver.disarm();
|
||||
return this._store.save();
|
||||
return this._store._save();
|
||||
},
|
||||
|
||||
addLogin(login) {
|
||||
|
@ -50,7 +50,7 @@ add_task(function* test_save_reload()
|
||||
|
||||
storeForSave.data.disabledHosts.push("http://www.example.org");
|
||||
|
||||
yield storeForSave.save();
|
||||
yield storeForSave._save();
|
||||
|
||||
// Test the asynchronous initialization path.
|
||||
let storeForLoad = new LoginStore(storeForSave.path);
|
||||
@ -100,7 +100,7 @@ add_task(function* test_save_empty()
|
||||
let createdFile = yield OS.File.open(store.path, { create: true });
|
||||
yield createdFile.close();
|
||||
|
||||
yield store.save();
|
||||
yield store._save();
|
||||
|
||||
do_check_true(yield OS.File.exists(store.path));
|
||||
});
|
||||
|
@ -78,7 +78,8 @@ const kSaveDelayMs = 1500;
|
||||
* - dataPostProcessor: Function triggered when data is just loaded. The
|
||||
* data object will be passed as the first argument
|
||||
* and should be returned no matter it's modified or
|
||||
* not.
|
||||
* not. Its failure leads to the failure of load()
|
||||
* and ensureDataReady().
|
||||
* - saveDelayMs: Number indicating the delay (in milliseconds) between a
|
||||
* change to the data and the related save operation. The
|
||||
* default value will be applied if omitted.
|
||||
@ -93,7 +94,7 @@ function JSONFile(config) {
|
||||
if (config.saveDelayMs === undefined) {
|
||||
config.saveDelayMs = kSaveDelayMs;
|
||||
}
|
||||
this._saver = new DeferredTask(() => this.save(), config.saveDelayMs);
|
||||
this._saver = new DeferredTask(() => this._save(), config.saveDelayMs);
|
||||
|
||||
AsyncShutdown.profileBeforeChange.addBlocker("JSON store: writing data",
|
||||
() => this._saver.finalize());
|
||||
@ -105,82 +106,98 @@ JSONFile.prototype = {
|
||||
*/
|
||||
path: "",
|
||||
|
||||
/**
|
||||
* Serializable object containing the data. This is populated directly with
|
||||
* the data loaded from the file, and is saved without modifications.
|
||||
*
|
||||
* This contains one property for each list.
|
||||
*/
|
||||
data: null,
|
||||
|
||||
/**
|
||||
* True when data has been loaded.
|
||||
*/
|
||||
dataReady: false,
|
||||
|
||||
/**
|
||||
* DeferredTask that handles the save operation.
|
||||
*/
|
||||
_saver: null,
|
||||
|
||||
/**
|
||||
* Internal data object.
|
||||
*/
|
||||
_data: null,
|
||||
|
||||
/**
|
||||
* Serializable object containing the data. This is populated directly with
|
||||
* the data loaded from the file, and is saved without modifications.
|
||||
*
|
||||
* The raw data should be manipulated synchronously, without waiting for the
|
||||
* event loop or for promise resolution, so that the saved file is always
|
||||
* consistent.
|
||||
*/
|
||||
get data() {
|
||||
if (!this.dataReady) {
|
||||
throw new Error("Data is not ready.");
|
||||
}
|
||||
return this._data;
|
||||
},
|
||||
|
||||
/**
|
||||
* Loads persistent data from the file to memory.
|
||||
*
|
||||
* @return {Promise}
|
||||
* @resolves When the operation finished successfully.
|
||||
* @rejects JavaScript exception.
|
||||
* @rejects JavaScript exception when dataPostProcessor fails. It never fails
|
||||
* if there is no dataPostProcessor.
|
||||
*/
|
||||
load() {
|
||||
return Task.spawn(function* () {
|
||||
try {
|
||||
let bytes = yield OS.File.read(this.path);
|
||||
load: Task.async(function* () {
|
||||
let data = {};
|
||||
|
||||
// If synchronous loading happened in the meantime, exit now.
|
||||
if (this.dataReady) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
let bytes = yield OS.File.read(this.path);
|
||||
|
||||
this.data = JSON.parse(gTextDecoder.decode(bytes));
|
||||
} catch (ex) {
|
||||
// If an exception occurred because the file did not exist, we should
|
||||
// just start with new data. Other errors may indicate that the file is
|
||||
// corrupt, thus we move it to a backup location before allowing it to
|
||||
// be overwritten by an empty file.
|
||||
if (!(ex instanceof OS.File.Error && ex.becauseNoSuchFile)) {
|
||||
Cu.reportError(ex);
|
||||
|
||||
// Move the original file to a backup location, ignoring errors.
|
||||
try {
|
||||
let openInfo = yield OS.File.openUnique(this.path + ".corrupt",
|
||||
{ humanReadable: true });
|
||||
yield openInfo.file.close();
|
||||
yield OS.File.move(this.path, openInfo.path);
|
||||
} catch (e2) {
|
||||
Cu.reportError(e2);
|
||||
}
|
||||
}
|
||||
|
||||
// In some rare cases it's possible for logins to have been added to
|
||||
// our database between the call to OS.File.read and when we've been
|
||||
// notified that there was a problem with it. In that case, leave the
|
||||
// synchronously-added data alone. See bug 1029128, comment 4.
|
||||
if (this.dataReady) {
|
||||
return;
|
||||
}
|
||||
|
||||
// In any case, initialize a new object to host the data.
|
||||
this.data = {
|
||||
nextId: 1,
|
||||
};
|
||||
// If synchronous loading happened in the meantime, exit now.
|
||||
if (this.dataReady) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._processLoadedData();
|
||||
}.bind(this));
|
||||
},
|
||||
data = JSON.parse(gTextDecoder.decode(bytes));
|
||||
} catch (ex) {
|
||||
// If an exception occurred because the file did not exist, we should
|
||||
// just start with new data. Other errors may indicate that the file is
|
||||
// corrupt, thus we move it to a backup location before allowing it to
|
||||
// be overwritten by an empty file.
|
||||
if (!(ex instanceof OS.File.Error && ex.becauseNoSuchFile)) {
|
||||
Cu.reportError(ex);
|
||||
|
||||
// Move the original file to a backup location, ignoring errors.
|
||||
try {
|
||||
let openInfo = yield OS.File.openUnique(this.path + ".corrupt",
|
||||
{ humanReadable: true });
|
||||
yield openInfo.file.close();
|
||||
yield OS.File.move(this.path, openInfo.path);
|
||||
} catch (e2) {
|
||||
Cu.reportError(e2);
|
||||
}
|
||||
}
|
||||
|
||||
// In some rare cases it's possible for data to have been added to
|
||||
// our database between the call to OS.File.read and when we've been
|
||||
// notified that there was a problem with it. In that case, leave the
|
||||
// synchronously-added data alone.
|
||||
if (this.dataReady) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
this._processLoadedData(data);
|
||||
}),
|
||||
|
||||
/**
|
||||
* Loads persistent data from the file to memory, synchronously.
|
||||
* Loads persistent data from the file to memory, synchronously. An exception
|
||||
* can be thrown only if dataPostProcessor exists and fails.
|
||||
*/
|
||||
ensureDataReady() {
|
||||
if (this.dataReady) {
|
||||
return;
|
||||
}
|
||||
|
||||
let data = {};
|
||||
|
||||
try {
|
||||
// This reads the file and automatically detects the UTF-8 encoding.
|
||||
let inputStream = new FileInputStream(new FileUtils.File(this.path),
|
||||
@ -188,8 +205,7 @@ JSONFile.prototype = {
|
||||
FileUtils.PERMS_FILE, 0);
|
||||
try {
|
||||
let json = Cc["@mozilla.org/dom/json;1"].createInstance(Ci.nsIJSON);
|
||||
this.data = json.decodeFromStream(inputStream,
|
||||
inputStream.available());
|
||||
data = json.decodeFromStream(inputStream, inputStream.available());
|
||||
} finally {
|
||||
inputStream.close();
|
||||
}
|
||||
@ -214,24 +230,9 @@ JSONFile.prototype = {
|
||||
Cu.reportError(e2);
|
||||
}
|
||||
}
|
||||
|
||||
// In any case, initialize a new object to host the data.
|
||||
this.data = {
|
||||
nextId: 1,
|
||||
};
|
||||
}
|
||||
|
||||
this._processLoadedData();
|
||||
},
|
||||
|
||||
/**
|
||||
* Synchronously work on the data just loaded into memory.
|
||||
*/
|
||||
_processLoadedData() {
|
||||
if (this._dataPostProcessor) {
|
||||
this.data = this._dataPostProcessor(this.data);
|
||||
}
|
||||
this.dataReady = true;
|
||||
this._processLoadedData(data);
|
||||
},
|
||||
|
||||
/**
|
||||
@ -241,11 +242,6 @@ JSONFile.prototype = {
|
||||
return this._saver.arm();
|
||||
},
|
||||
|
||||
/**
|
||||
* DeferredTask that handles the save operation.
|
||||
*/
|
||||
_saver: null,
|
||||
|
||||
/**
|
||||
* Saves persistent data from memory to the file.
|
||||
*
|
||||
@ -255,12 +251,18 @@ JSONFile.prototype = {
|
||||
* @resolves When the operation finished successfully.
|
||||
* @rejects JavaScript exception.
|
||||
*/
|
||||
save() {
|
||||
return Task.spawn(function* () {
|
||||
// Create or overwrite the file.
|
||||
let bytes = gTextEncoder.encode(JSON.stringify(this.data));
|
||||
yield OS.File.writeAtomic(this.path, bytes,
|
||||
{ tmpPath: this.path + ".tmp" });
|
||||
}.bind(this));
|
||||
_save: Task.async(function* () {
|
||||
// Create or overwrite the file.
|
||||
let bytes = gTextEncoder.encode(JSON.stringify(this._data));
|
||||
yield OS.File.writeAtomic(this.path, bytes,
|
||||
{ tmpPath: this.path + ".tmp" });
|
||||
}),
|
||||
|
||||
/**
|
||||
* Synchronously work on the data just loaded into memory.
|
||||
*/
|
||||
_processLoadedData(data) {
|
||||
this._data = this._dataPostProcessor ? this._dataPostProcessor(data) : data;
|
||||
this.dataReady = true;
|
||||
},
|
||||
};
|
||||
|
244
toolkit/modules/tests/xpcshell/test_JSONFile.js
Normal file
244
toolkit/modules/tests/xpcshell/test_JSONFile.js
Normal file
@ -0,0 +1,244 @@
|
||||
/**
|
||||
* Tests the JSONFile object.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
//// Globals
|
||||
|
||||
const { classes: Cc, interfaces: Ci, utils: Cu, results: Cr } = Components;
|
||||
|
||||
Cu.import("resource://gre/modules/XPCOMUtils.jsm");
|
||||
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "DownloadPaths",
|
||||
"resource://gre/modules/DownloadPaths.jsm");
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "FileUtils",
|
||||
"resource://gre/modules/FileUtils.jsm");
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "OS",
|
||||
"resource://gre/modules/osfile.jsm");
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "JSONFile",
|
||||
"resource://gre/modules/JSONFile.jsm");
|
||||
|
||||
let gFileCounter = Math.floor(Math.random() * 1000000);
|
||||
|
||||
/**
|
||||
* Returns a reference to a temporary file, that is guaranteed not to exist, and
|
||||
* to have never been created before.
|
||||
*
|
||||
* @param aLeafName
|
||||
* Suggested leaf name for the file to be created.
|
||||
*
|
||||
* @return nsIFile pointing to a non-existent file in a temporary directory.
|
||||
*
|
||||
* @note It is not enough to delete the file if it exists, or to delete the file
|
||||
* after calling nsIFile.createUnique, because on Windows the delete
|
||||
* operation in the file system may still be pending, preventing a new
|
||||
* file with the same name to be created.
|
||||
*/
|
||||
function getTempFile(aLeafName)
|
||||
{
|
||||
// Prepend a serial number to the extension in the suggested leaf name.
|
||||
let [base, ext] = DownloadPaths.splitBaseNameAndExtension(aLeafName);
|
||||
let leafName = base + "-" + gFileCounter + ext;
|
||||
gFileCounter++;
|
||||
|
||||
// Get a file reference under the temporary directory for this test file.
|
||||
let file = FileUtils.getFile("TmpD", [leafName]);
|
||||
do_check_false(file.exists());
|
||||
|
||||
do_register_cleanup(function () {
|
||||
if (file.exists()) {
|
||||
file.remove(false);
|
||||
}
|
||||
});
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
const TEST_STORE_FILE_NAME = "test-store.json";
|
||||
|
||||
const TEST_DATA = {
|
||||
number: 123,
|
||||
string: "test",
|
||||
object: {
|
||||
prop1: 1,
|
||||
prop2: 2,
|
||||
},
|
||||
};
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
//// Tests
|
||||
|
||||
add_task(function* test_save_reload()
|
||||
{
|
||||
let storeForSave = new JSONFile({
|
||||
path: getTempFile(TEST_STORE_FILE_NAME).path,
|
||||
});
|
||||
|
||||
yield storeForSave.load();
|
||||
|
||||
do_check_true(storeForSave.dataReady);
|
||||
do_check_matches(storeForSave.data, {});
|
||||
|
||||
Object.assign(storeForSave.data, TEST_DATA);
|
||||
|
||||
yield new Promise((resolve) => {
|
||||
let save = storeForSave._save.bind(storeForSave);
|
||||
storeForSave._save = () => {
|
||||
save();
|
||||
resolve();
|
||||
};
|
||||
storeForSave.saveSoon();
|
||||
});
|
||||
|
||||
let storeForLoad = new JSONFile({
|
||||
path: storeForSave.path,
|
||||
});
|
||||
|
||||
yield storeForLoad.load();
|
||||
|
||||
Assert.deepEqual(storeForLoad.data, TEST_DATA);
|
||||
});
|
||||
|
||||
add_task(function* test_load_sync()
|
||||
{
|
||||
let storeForSave = new JSONFile({
|
||||
path: getTempFile(TEST_STORE_FILE_NAME).path
|
||||
});
|
||||
yield storeForSave.load();
|
||||
Object.assign(storeForSave.data, TEST_DATA);
|
||||
yield storeForSave._save();
|
||||
|
||||
let storeForLoad = new JSONFile({
|
||||
path: storeForSave.path,
|
||||
});
|
||||
storeForLoad.ensureDataReady();
|
||||
|
||||
Assert.deepEqual(storeForLoad.data, TEST_DATA);
|
||||
});
|
||||
|
||||
add_task(function* test_load_with_dataPostProcessor()
|
||||
{
|
||||
let storeForSave = new JSONFile({
|
||||
path: getTempFile(TEST_STORE_FILE_NAME).path
|
||||
});
|
||||
yield storeForSave.load();
|
||||
Object.assign(storeForSave.data, TEST_DATA);
|
||||
yield storeForSave._save();
|
||||
|
||||
let random = Math.random();
|
||||
let storeForLoad = new JSONFile({
|
||||
path: storeForSave.path,
|
||||
dataPostProcessor: (data) => {
|
||||
Assert.deepEqual(data, TEST_DATA);
|
||||
|
||||
data.test = random;
|
||||
return data;
|
||||
},
|
||||
});
|
||||
|
||||
yield storeForLoad.load();
|
||||
|
||||
do_check_eq(storeForLoad.data.test, random);
|
||||
});
|
||||
|
||||
add_task(function* test_load_with_dataPostProcessor_fails()
|
||||
{
|
||||
let store = new JSONFile({
|
||||
path: getTempFile(TEST_STORE_FILE_NAME).path,
|
||||
dataPostProcessor: () => {
|
||||
throw new Error("dataPostProcessor fails.");
|
||||
},
|
||||
});
|
||||
|
||||
yield Assert.rejects(store.load(), /dataPostProcessor fails\./);
|
||||
|
||||
do_check_false(store.dataReady);
|
||||
});
|
||||
|
||||
add_task(function* test_load_sync_with_dataPostProcessor_fails()
|
||||
{
|
||||
let store = new JSONFile({
|
||||
path: getTempFile(TEST_STORE_FILE_NAME).path,
|
||||
dataPostProcessor: () => {
|
||||
throw new Error("dataPostProcessor fails.");
|
||||
},
|
||||
});
|
||||
|
||||
Assert.throws(() => store.ensureDataReady(), /dataPostProcessor fails\./);
|
||||
|
||||
do_check_false(store.dataReady);
|
||||
});
|
||||
|
||||
/**
|
||||
* Loads data from a string in a predefined format. The purpose of this test is
|
||||
* to verify that the JSON format used in previous versions can be loaded.
|
||||
*/
|
||||
add_task(function* test_load_string_predefined()
|
||||
{
|
||||
let store = new JSONFile({
|
||||
path: getTempFile(TEST_STORE_FILE_NAME).path,
|
||||
});
|
||||
|
||||
let string =
|
||||
"{\"number\":123,\"string\":\"test\",\"object\":{\"prop1\":1,\"prop2\":2}}";
|
||||
|
||||
yield OS.File.writeAtomic(store.path, new TextEncoder().encode(string),
|
||||
{ tmpPath: store.path + ".tmp" });
|
||||
|
||||
yield store.load();
|
||||
|
||||
Assert.deepEqual(store.data, TEST_DATA);
|
||||
});
|
||||
|
||||
/**
|
||||
* Loads data from a malformed JSON string.
|
||||
*/
|
||||
add_task(function* test_load_string_malformed()
|
||||
{
|
||||
let store = new JSONFile({
|
||||
path: getTempFile(TEST_STORE_FILE_NAME).path,
|
||||
});
|
||||
|
||||
let string = "{\"number\":123,\"string\":\"test\",\"object\":{\"prop1\":1,";
|
||||
|
||||
yield OS.File.writeAtomic(store.path, new TextEncoder().encode(string),
|
||||
{ tmpPath: store.path + ".tmp" });
|
||||
|
||||
yield store.load();
|
||||
|
||||
// A backup file should have been created.
|
||||
do_check_true(yield OS.File.exists(store.path + ".corrupt"));
|
||||
yield OS.File.remove(store.path + ".corrupt");
|
||||
|
||||
// The store should be ready to accept new data.
|
||||
do_check_true(store.dataReady);
|
||||
do_check_matches(store.data, {});
|
||||
});
|
||||
|
||||
/**
|
||||
* Loads data from a malformed JSON string, using the synchronous initialization
|
||||
* path.
|
||||
*/
|
||||
add_task(function* test_load_string_malformed_sync()
|
||||
{
|
||||
let store = new JSONFile({
|
||||
path: getTempFile(TEST_STORE_FILE_NAME).path,
|
||||
});
|
||||
|
||||
let string = "{\"number\":123,\"string\":\"test\",\"object\":{\"prop1\":1,";
|
||||
|
||||
yield OS.File.writeAtomic(store.path, new TextEncoder().encode(string),
|
||||
{ tmpPath: store.path + ".tmp" });
|
||||
|
||||
store.ensureDataReady();
|
||||
|
||||
// A backup file should have been created.
|
||||
do_check_true(yield OS.File.exists(store.path + ".corrupt"));
|
||||
yield OS.File.remove(store.path + ".corrupt");
|
||||
|
||||
// The store should be ready to accept new data.
|
||||
do_check_true(store.dataReady);
|
||||
do_check_matches(store.data, {});
|
||||
});
|
@ -27,6 +27,8 @@ skip-if = toolkit == 'android'
|
||||
[test_Integration.js]
|
||||
[test_jsesc.js]
|
||||
skip-if = toolkit == 'android'
|
||||
[test_JSONFile.js]
|
||||
skip-if = toolkit == 'android'
|
||||
[test_Log.js]
|
||||
skip-if = toolkit == 'android'
|
||||
[test_MatchPattern.js]
|
||||
|
Loading…
Reference in New Issue
Block a user