2007-12-11 05:38:53 +00:00
|
|
|
/* ***** BEGIN LICENSE BLOCK *****
|
|
|
|
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
|
|
|
*
|
|
|
|
* The contents of this file are subject to the Mozilla Public License Version
|
|
|
|
* 1.1 (the "License"); you may not use this file except in compliance with
|
|
|
|
* the License. You may obtain a copy of the License at
|
|
|
|
* http://www.mozilla.org/MPL/
|
|
|
|
*
|
|
|
|
* Software distributed under the License is distributed on an "AS IS" basis,
|
|
|
|
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
|
|
|
* for the specific language governing rights and limitations under the
|
|
|
|
* License.
|
|
|
|
*
|
|
|
|
* The Original Code is Bookmarks Sync.
|
|
|
|
*
|
|
|
|
* The Initial Developer of the Original Code is Mozilla.
|
|
|
|
* Portions created by the Initial Developer are Copyright (C) 2007
|
|
|
|
* the Initial Developer. All Rights Reserved.
|
|
|
|
*
|
|
|
|
* Contributor(s):
|
|
|
|
* Dan Mills <thunder@mozilla.com>
|
2008-06-03 18:32:59 +00:00
|
|
|
* Myk Melez <myk@mozilla.org>
|
2007-12-11 05:38:53 +00:00
|
|
|
*
|
|
|
|
* Alternatively, the contents of this file may be used under the terms of
|
|
|
|
* either the GNU General Public License Version 2 or later (the "GPL"), or
|
|
|
|
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
|
|
|
* in which case the provisions of the GPL or the LGPL are applicable instead
|
|
|
|
* of those above. If you wish to allow use of your version of this file only
|
|
|
|
* under the terms of either the GPL or the LGPL, and not to allow others to
|
|
|
|
* use your version of this file under the terms of the MPL, indicate your
|
|
|
|
* decision by deleting the provisions above and replace them with the notice
|
|
|
|
* and other provisions required by the GPL or the LGPL. If you do not delete
|
|
|
|
* the provisions above, a recipient may use your version of this file under
|
|
|
|
* the terms of any one of the MPL, the GPL or the LGPL.
|
|
|
|
*
|
|
|
|
* ***** END LICENSE BLOCK ***** */
|
|
|
|
|
2009-01-06 21:54:18 +00:00
|
|
|
const EXPORTED_SYMBOLS = ['Engines', 'Engine', 'SyncEngine'];
|
2007-12-11 05:38:53 +00:00
|
|
|
|
|
|
|
const Cc = Components.classes;
|
|
|
|
const Ci = Components.interfaces;
|
|
|
|
const Cr = Components.results;
|
|
|
|
const Cu = Components.utils;
|
|
|
|
|
2009-02-22 08:04:58 +00:00
|
|
|
Cu.import("resource://weave/ext/Observers.js");
|
2009-07-14 21:01:26 +00:00
|
|
|
Cu.import("resource://weave/ext/Sync.js");
|
2007-12-11 05:38:53 +00:00
|
|
|
Cu.import("resource://weave/log4moz.js");
|
|
|
|
Cu.import("resource://weave/constants.js");
|
|
|
|
Cu.import("resource://weave/util.js");
|
2008-11-03 23:00:38 +00:00
|
|
|
Cu.import("resource://weave/resource.js");
|
2008-03-19 22:17:04 +00:00
|
|
|
Cu.import("resource://weave/identity.js");
|
2007-12-11 05:38:53 +00:00
|
|
|
Cu.import("resource://weave/stores.js");
|
2008-05-22 22:58:29 +00:00
|
|
|
Cu.import("resource://weave/trackers.js");
|
2007-12-11 05:38:53 +00:00
|
|
|
|
2008-11-08 10:00:33 +00:00
|
|
|
Cu.import("resource://weave/base_records/wbo.js");
|
2008-11-20 00:20:25 +00:00
|
|
|
Cu.import("resource://weave/base_records/keys.js");
|
2008-11-08 10:00:33 +00:00
|
|
|
Cu.import("resource://weave/base_records/crypto.js");
|
2008-11-20 00:20:25 +00:00
|
|
|
Cu.import("resource://weave/base_records/collection.js");
|
2008-11-08 10:00:33 +00:00
|
|
|
|
2008-04-15 01:53:35 +00:00
|
|
|
// Singleton service, holds registered engines
|
|
|
|
|
|
|
|
Utils.lazy(this, 'Engines', EngineManagerSvc);
|
|
|
|
|
|
|
|
function EngineManagerSvc() {
|
|
|
|
this._engines = {};
|
2009-03-20 07:13:16 +00:00
|
|
|
this._log = Log4Moz.repository.getLogger("Service.Engines");
|
|
|
|
this._log.level = Log4Moz.Level[Svc.Prefs.get(
|
|
|
|
"log.logger.service.engines", "Debug")];
|
2007-12-11 05:38:53 +00:00
|
|
|
}
|
2008-04-15 01:53:35 +00:00
|
|
|
EngineManagerSvc.prototype = {
|
|
|
|
get: function EngMgr_get(name) {
|
2009-03-20 07:13:16 +00:00
|
|
|
// Return an array of engines if we have an array of names
|
2009-04-07 21:45:29 +00:00
|
|
|
if (Utils.isArray(name)) {
|
2009-03-20 07:13:16 +00:00
|
|
|
let engines = [];
|
|
|
|
name.forEach(function(name) {
|
|
|
|
let engine = this.get(name);
|
|
|
|
if (engine)
|
|
|
|
engines.push(engine);
|
|
|
|
}, this);
|
|
|
|
return engines;
|
|
|
|
}
|
|
|
|
|
|
|
|
let engine = this._engines[name];
|
|
|
|
if (!engine)
|
|
|
|
this._log.debug("Could not get engine: " + name);
|
|
|
|
return engine;
|
2008-04-15 01:53:35 +00:00
|
|
|
},
|
|
|
|
getAll: function EngMgr_getAll() {
|
2009-10-08 20:51:22 +00:00
|
|
|
return [engine for ([name, engine] in Iterator(Engines._engines))];
|
2008-04-15 01:53:35 +00:00
|
|
|
},
|
2008-07-10 00:17:24 +00:00
|
|
|
getEnabled: function EngMgr_getEnabled() {
|
2009-10-08 20:51:22 +00:00
|
|
|
return this.getAll().filter(function(engine) engine.enabled);
|
2008-07-10 00:17:24 +00:00
|
|
|
},
|
2009-04-07 21:45:41 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Register an Engine to the service. Alternatively, give an array of engine
|
|
|
|
* objects to register.
|
|
|
|
*
|
|
|
|
* @param engineObject
|
|
|
|
* Engine object used to get an instance of the engine
|
|
|
|
* @return The engine object if anything failed
|
|
|
|
*/
|
|
|
|
register: function EngMgr_register(engineObject) {
|
|
|
|
if (Utils.isArray(engineObject))
|
|
|
|
return engineObject.map(this.register, this);
|
|
|
|
|
|
|
|
try {
|
2009-04-08 19:39:14 +00:00
|
|
|
let name = engineObject.prototype.name;
|
|
|
|
if (name in this._engines)
|
|
|
|
this._log.error("Engine '" + name + "' is already registered!");
|
|
|
|
else
|
|
|
|
this._engines[name] = new engineObject();
|
2009-04-07 21:45:41 +00:00
|
|
|
}
|
|
|
|
catch(ex) {
|
|
|
|
let mesg = ex.message ? ex.message : ex;
|
|
|
|
let name = engineObject || "";
|
|
|
|
name = name.prototype || "";
|
|
|
|
name = name.name || "";
|
|
|
|
|
|
|
|
let out = "Could not initialize engine '" + name + "': " + mesg;
|
|
|
|
dump(out);
|
|
|
|
this._log.error(out);
|
|
|
|
|
|
|
|
return engineObject;
|
|
|
|
}
|
2008-04-15 01:53:35 +00:00
|
|
|
},
|
|
|
|
unregister: function EngMgr_unregister(val) {
|
|
|
|
let name = val;
|
|
|
|
if (val instanceof Engine)
|
|
|
|
name = val.name;
|
|
|
|
delete this._engines[name];
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2009-01-06 21:54:18 +00:00
|
|
|
function Engine() { this._init(); }
|
2007-12-15 02:07:25 +00:00
|
|
|
Engine.prototype = {
|
2009-01-06 21:54:18 +00:00
|
|
|
name: "engine",
|
2009-10-28 07:42:35 +00:00
|
|
|
_displayName: "Boring Engine",
|
2009-09-16 01:54:05 +00:00
|
|
|
description: "An engine example - it doesn't actually sync anything",
|
2009-01-06 21:54:18 +00:00
|
|
|
logName: "Engine",
|
2007-12-15 02:07:25 +00:00
|
|
|
|
2009-01-06 21:54:18 +00:00
|
|
|
// _storeObj, and _trackerObj should to be overridden in subclasses
|
2008-07-10 00:17:24 +00:00
|
|
|
|
2009-01-06 21:54:18 +00:00
|
|
|
_storeObj: Store,
|
|
|
|
_trackerObj: Tracker,
|
2007-12-15 02:07:25 +00:00
|
|
|
|
2009-11-30 20:15:18 +00:00
|
|
|
get prefName() this.name,
|
|
|
|
get enabled() Svc.Prefs.get("engine." + this.prefName, null),
|
|
|
|
set enabled(val) Svc.Prefs.set("engine." + this.prefName, !!val),
|
2009-05-14 16:38:15 +00:00
|
|
|
|
2009-01-06 21:54:18 +00:00
|
|
|
get score() this._tracker.score,
|
2008-04-16 00:21:34 +00:00
|
|
|
|
2007-12-15 02:07:25 +00:00
|
|
|
get _store() {
|
2009-03-03 00:15:48 +00:00
|
|
|
if (!this.__store)
|
|
|
|
this.__store = new this._storeObj();
|
|
|
|
return this.__store;
|
2007-12-15 02:07:25 +00:00
|
|
|
},
|
2008-05-23 18:05:42 +00:00
|
|
|
|
2008-05-22 22:58:29 +00:00
|
|
|
get _tracker() {
|
2009-03-03 02:55:26 +00:00
|
|
|
if (!this.__tracker)
|
|
|
|
this.__tracker = new this._trackerObj();
|
|
|
|
return this.__tracker;
|
2007-12-11 05:38:53 +00:00
|
|
|
},
|
2009-11-20 22:34:20 +00:00
|
|
|
|
2009-10-28 07:42:35 +00:00
|
|
|
get displayName() {
|
|
|
|
try {
|
|
|
|
return Str.engines.get(this.name);
|
|
|
|
} catch (e) {}
|
2009-11-20 22:34:20 +00:00
|
|
|
|
2009-10-28 07:42:35 +00:00
|
|
|
return this._displayName;
|
|
|
|
},
|
2007-12-11 05:38:53 +00:00
|
|
|
|
2008-04-16 00:21:34 +00:00
|
|
|
_init: function Engine__init() {
|
2009-06-05 22:34:23 +00:00
|
|
|
this._notify = Utils.notify("weave:engine:");
|
2008-12-09 20:26:14 +00:00
|
|
|
this._log = Log4Moz.repository.getLogger("Engine." + this.logName);
|
2009-07-15 01:34:03 +00:00
|
|
|
let level = Svc.Prefs.get("log.logger.engine." + this.name, "Debug");
|
2008-07-26 00:02:43 +00:00
|
|
|
this._log.level = Log4Moz.Level[level];
|
2008-12-05 08:39:54 +00:00
|
|
|
|
|
|
|
this._tracker; // initialize tracker to load previously changed IDs
|
|
|
|
this._log.debug("Engine initialized");
|
2007-12-11 05:38:53 +00:00
|
|
|
},
|
|
|
|
|
2009-06-05 22:34:32 +00:00
|
|
|
sync: function Engine_sync() {
|
2009-01-06 21:54:18 +00:00
|
|
|
if (!this._sync)
|
|
|
|
throw "engine does not implement _sync method";
|
2009-09-04 03:11:36 +00:00
|
|
|
|
|
|
|
let times = {};
|
|
|
|
let wrapped = {};
|
|
|
|
// Find functions in any point of the prototype chain
|
|
|
|
for (let _name in this) {
|
|
|
|
let name = _name;
|
|
|
|
|
|
|
|
// Ignore certain constructors/functions
|
|
|
|
if (name.search(/^_(.+Obj|notify)$/) == 0)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
// Only track functions but skip the constructors
|
|
|
|
if (typeof this[name] == "function") {
|
|
|
|
times[name] = [];
|
|
|
|
wrapped[name] = this[name];
|
|
|
|
|
|
|
|
// Wrap the original function with a start/stop timer
|
|
|
|
this[name] = function() {
|
|
|
|
let start = Date.now();
|
|
|
|
try {
|
|
|
|
return wrapped[name].apply(this, arguments);
|
|
|
|
}
|
|
|
|
finally {
|
|
|
|
times[name].push(Date.now() - start);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
try {
|
|
|
|
this._notify("sync", this.name, this._sync)();
|
|
|
|
}
|
|
|
|
finally {
|
|
|
|
// Restore original unwrapped functionality
|
|
|
|
for (let [name, func] in Iterator(wrapped))
|
|
|
|
this[name] = func;
|
|
|
|
|
|
|
|
let stats = {};
|
|
|
|
for (let [name, time] in Iterator(times)) {
|
|
|
|
// Figure out stats on the times unless there's nothing
|
|
|
|
let num = time.length;
|
|
|
|
if (num == 0)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
// Track the min/max/sum of the values
|
|
|
|
let stat = {
|
|
|
|
num: num,
|
|
|
|
sum: 0
|
|
|
|
};
|
|
|
|
time.forEach(function(val) {
|
2009-11-04 23:12:29 +00:00
|
|
|
if (stat.min == null || val < stat.min)
|
2009-09-04 03:11:36 +00:00
|
|
|
stat.min = val;
|
2009-11-04 23:12:29 +00:00
|
|
|
if (stat.max == null || val > stat.max)
|
2009-09-04 03:11:36 +00:00
|
|
|
stat.max = val;
|
|
|
|
stat.sum += val;
|
|
|
|
});
|
|
|
|
|
|
|
|
stat.avg = Number((stat.sum / num).toFixed(2));
|
|
|
|
stats[name] = stat;
|
|
|
|
}
|
|
|
|
|
|
|
|
stats.toString = function() {
|
|
|
|
let sums = [];
|
|
|
|
for (let [name, stat] in Iterator(this))
|
|
|
|
if (stat.sum != null)
|
|
|
|
sums.push(name.replace(/^_/, "") + " " + stat.sum);
|
|
|
|
|
2009-09-26 01:38:47 +00:00
|
|
|
// Order certain functions first before any other random ones
|
|
|
|
let nameOrder = ["sync", "processIncoming", "uploadOutgoing",
|
|
|
|
"syncStartup", "syncFinish"];
|
|
|
|
let getPos = function(str) {
|
|
|
|
let pos = nameOrder.indexOf(str.split(" ")[0]);
|
|
|
|
return pos != -1 ? pos : Infinity;
|
|
|
|
};
|
|
|
|
let order = function(a, b) getPos(a) > getPos(b);
|
|
|
|
|
|
|
|
return "Total (ms): " + sums.sort(order).join(", ");
|
2009-09-04 03:11:36 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
this._log.info(stats);
|
|
|
|
}
|
2008-07-25 08:06:23 +00:00
|
|
|
},
|
|
|
|
|
2009-06-05 22:34:37 +00:00
|
|
|
wipeServer: function Engine_wipeServer() {
|
2009-01-06 21:54:18 +00:00
|
|
|
if (!this._wipeServer)
|
|
|
|
throw "engine does not implement _wipeServer method";
|
2009-06-05 22:34:37 +00:00
|
|
|
this._notify("wipe-server", this.name, this._wipeServer)();
|
2008-07-25 08:06:23 +00:00
|
|
|
},
|
|
|
|
|
2009-02-27 06:36:14 +00:00
|
|
|
/**
|
|
|
|
* Get rid of any local meta-data
|
|
|
|
*/
|
2009-06-05 22:34:34 +00:00
|
|
|
resetClient: function Engine_resetClient() {
|
2009-02-27 06:36:14 +00:00
|
|
|
if (!this._resetClient)
|
|
|
|
throw "engine does not implement _resetClient method";
|
|
|
|
|
2009-06-05 22:34:34 +00:00
|
|
|
this._notify("reset-client", this.name, this._resetClient)();
|
2009-02-27 06:36:14 +00:00
|
|
|
},
|
|
|
|
|
2009-01-06 21:54:18 +00:00
|
|
|
_wipeClient: function Engine__wipeClient() {
|
2009-06-05 22:34:34 +00:00
|
|
|
this.resetClient();
|
2009-01-06 21:54:18 +00:00
|
|
|
this._log.debug("Deleting all local data");
|
|
|
|
this._store.wipe();
|
2008-07-25 08:06:23 +00:00
|
|
|
},
|
2009-02-27 06:36:14 +00:00
|
|
|
|
2009-06-05 22:34:35 +00:00
|
|
|
wipeClient: function Engine_wipeClient() {
|
|
|
|
this._notify("wipe-client", this.name, this._wipeClient)();
|
2008-07-25 08:06:23 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2009-01-06 21:54:18 +00:00
|
|
|
function SyncEngine() { this._init(); }
|
2008-12-05 08:39:54 +00:00
|
|
|
SyncEngine.prototype = {
|
2008-11-07 03:18:07 +00:00
|
|
|
__proto__: Engine.prototype,
|
|
|
|
|
2009-01-06 21:54:18 +00:00
|
|
|
_recordObj: CryptoWrapper,
|
|
|
|
|
2009-09-11 06:11:33 +00:00
|
|
|
_init: function _init() {
|
|
|
|
Engine.prototype._init.call(this);
|
|
|
|
this.loadToFetch();
|
|
|
|
},
|
|
|
|
|
2009-11-10 23:24:31 +00:00
|
|
|
get storageURL() Svc.Prefs.get("clusterURL") + Svc.Prefs.get("storageAPI") +
|
|
|
|
"/" + ID.get("WeaveID").username + "/storage/",
|
2008-12-05 08:39:54 +00:00
|
|
|
|
2009-09-22 00:13:41 +00:00
|
|
|
get engineURL() this.storageURL + this.name,
|
2008-12-05 08:39:54 +00:00
|
|
|
|
2009-09-22 00:13:41 +00:00
|
|
|
get cryptoMetaURL() this.storageURL + "crypto/" + this.name,
|
2008-11-20 00:20:25 +00:00
|
|
|
|
|
|
|
get lastSync() {
|
2009-06-17 00:22:59 +00:00
|
|
|
return parseFloat(Svc.Prefs.get(this.name + ".lastSync", "0"));
|
2008-11-20 00:20:25 +00:00
|
|
|
},
|
2008-11-07 07:23:35 +00:00
|
|
|
set lastSync(value) {
|
2009-06-17 00:22:59 +00:00
|
|
|
// Reset the pref in-case it's a number instead of a string
|
2009-02-24 03:33:40 +00:00
|
|
|
Svc.Prefs.reset(this.name + ".lastSync");
|
2009-06-17 00:22:59 +00:00
|
|
|
// Store the value as a string to keep floating point precision
|
|
|
|
Svc.Prefs.set(this.name + ".lastSync", value.toString());
|
2009-02-17 21:20:02 +00:00
|
|
|
},
|
|
|
|
resetLastSync: function SyncEngine_resetLastSync() {
|
|
|
|
this._log.debug("Resetting " + this.name + " last sync time");
|
|
|
|
Svc.Prefs.reset(this.name + ".lastSync");
|
2009-06-17 00:22:59 +00:00
|
|
|
Svc.Prefs.set(this.name + ".lastSync", "0");
|
2008-11-07 07:23:35 +00:00
|
|
|
},
|
|
|
|
|
2009-09-11 06:11:33 +00:00
|
|
|
get toFetch() this._toFetch,
|
|
|
|
set toFetch(val) {
|
|
|
|
this._toFetch = val;
|
|
|
|
Utils.jsonSave("toFetch/" + this.name, this, val);
|
|
|
|
},
|
|
|
|
|
|
|
|
loadToFetch: function loadToFetch() {
|
|
|
|
// Initialize to empty if there's no file
|
|
|
|
this._toFetch = [];
|
|
|
|
Utils.jsonLoad("toFetch/" + this.name, this, Utils.bind2(this, function(o)
|
|
|
|
this._toFetch = o));
|
|
|
|
},
|
|
|
|
|
2008-12-29 03:59:44 +00:00
|
|
|
// Create a new record by querying the store, and add the engine metadata
|
|
|
|
_createRecord: function SyncEngine__createRecord(id) {
|
2009-02-19 08:36:55 +00:00
|
|
|
return this._store.createRecord(id, this.cryptoMetaURL);
|
2008-11-20 00:20:25 +00:00
|
|
|
},
|
|
|
|
|
2008-12-05 08:39:54 +00:00
|
|
|
// Any setup that needs to happen at the beginning of each sync.
|
|
|
|
// Makes sure crypto records and keys are all set-up
|
|
|
|
_syncStartup: function SyncEngine__syncStartup() {
|
|
|
|
this._log.debug("Ensuring server crypto records are there");
|
|
|
|
|
2009-12-03 01:25:14 +00:00
|
|
|
// Try getting/unwrapping the crypto record
|
2009-06-04 23:50:57 +00:00
|
|
|
let meta = CryptoMetas.get(this.cryptoMetaURL);
|
2009-12-03 01:25:14 +00:00
|
|
|
if (meta) {
|
|
|
|
try {
|
|
|
|
let pubkey = PubKeys.getDefaultKey();
|
|
|
|
let privkey = PrivKeys.get(pubkey.privateKeyUri);
|
|
|
|
meta.getKey(privkey, ID.get("WeaveCryptoID"));
|
|
|
|
}
|
|
|
|
catch(ex) {
|
|
|
|
// Remove traces of this bad cryptometa
|
|
|
|
this._log.debug("Purging bad data after failed unwrap crypto: " + ex);
|
|
|
|
CryptoMetas.del(this.cryptoMetaURL);
|
|
|
|
meta = null;
|
|
|
|
|
|
|
|
// Remove any potentially tained data
|
|
|
|
new Resource(this.engineURL).delete();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Generate a new crypto record
|
2008-12-05 08:39:54 +00:00
|
|
|
if (!meta) {
|
2009-01-27 21:35:10 +00:00
|
|
|
let symkey = Svc.Crypto.generateRandomKey();
|
2009-06-05 00:04:51 +00:00
|
|
|
let pubkey = PubKeys.getDefaultKey();
|
2008-12-05 08:39:54 +00:00
|
|
|
meta = new CryptoMeta(this.cryptoMetaURL);
|
|
|
|
meta.generateIV();
|
2009-06-05 00:36:34 +00:00
|
|
|
meta.addUnwrappedKey(pubkey, symkey);
|
2009-01-27 21:35:10 +00:00
|
|
|
let res = new Resource(meta.uri);
|
2009-09-17 02:15:55 +00:00
|
|
|
let resp = res.put(meta);
|
2009-09-16 01:38:52 +00:00
|
|
|
if (!resp.success) {
|
|
|
|
this._log.debug("Metarecord upload fail:" + resp);
|
|
|
|
resp.failureCode = ENGINE_METARECORD_UPLOAD_FAIL;
|
2009-08-26 22:32:46 +00:00
|
|
|
throw resp;
|
2009-09-16 01:38:52 +00:00
|
|
|
}
|
2009-07-17 06:13:58 +00:00
|
|
|
|
|
|
|
// Cache the cryto meta that we just put on the server
|
|
|
|
CryptoMetas.set(meta.uri, meta);
|
2008-12-05 08:39:54 +00:00
|
|
|
}
|
2008-11-24 16:04:14 +00:00
|
|
|
|
2008-12-05 08:39:54 +00:00
|
|
|
// first sync special case: upload all items
|
2009-01-02 21:51:38 +00:00
|
|
|
// NOTE: we use a backdoor (of sorts) to the tracker so it
|
|
|
|
// won't save to disk this list over and over
|
2008-11-20 00:20:25 +00:00
|
|
|
if (!this.lastSync) {
|
2008-12-05 08:39:54 +00:00
|
|
|
this._log.info("First sync, uploading all items");
|
2008-12-06 08:08:54 +00:00
|
|
|
this._tracker.clearChangedIDs();
|
2009-01-02 21:51:38 +00:00
|
|
|
[i for (i in this._store.getAllIDs())]
|
|
|
|
.forEach(function(id) this._tracker.changedIDs[id] = true, this);
|
2008-11-08 10:00:33 +00:00
|
|
|
}
|
|
|
|
|
2009-01-02 21:51:38 +00:00
|
|
|
let outnum = [i for (i in this._tracker.changedIDs)].length;
|
|
|
|
this._log.info(outnum + " outgoing items pre-reconciliation");
|
2009-09-01 00:30:34 +00:00
|
|
|
|
|
|
|
// Keep track of what to delete at the end of sync
|
|
|
|
this._delete = {};
|
2008-12-05 08:39:54 +00:00
|
|
|
},
|
|
|
|
|
|
|
|
// Generate outgoing records
|
2008-12-19 19:48:09 +00:00
|
|
|
_processIncoming: function SyncEngine__processIncoming() {
|
|
|
|
this._log.debug("Downloading & applying server changes");
|
2008-11-24 16:04:14 +00:00
|
|
|
|
2009-10-13 18:20:28 +00:00
|
|
|
// Figure out how many total items to fetch this sync; do less on mobile
|
|
|
|
let fetchNum = 1500;
|
|
|
|
if (Svc.Prefs.get("client.type") == "mobile")
|
|
|
|
fetchNum /= 10;
|
|
|
|
|
2008-12-31 07:52:20 +00:00
|
|
|
// enable cache, and keep only the first few items. Otherwise (when
|
|
|
|
// we have more outgoing items than can fit in the cache), we will
|
|
|
|
// keep rotating items in and out, perpetually getting cache misses
|
|
|
|
this._store.cache.enabled = true;
|
|
|
|
this._store.cache.fifo = false; // filo
|
|
|
|
this._store.cache.clear();
|
|
|
|
|
2009-01-06 21:54:18 +00:00
|
|
|
let newitems = new Collection(this.engineURL, this._recordObj);
|
2009-02-10 08:57:16 +00:00
|
|
|
newitems.newer = this.lastSync;
|
2008-11-20 00:20:25 +00:00
|
|
|
newitems.full = true;
|
2009-08-15 07:56:27 +00:00
|
|
|
newitems.sort = "index";
|
2009-10-13 18:20:28 +00:00
|
|
|
newitems.limit = fetchNum;
|
2008-11-20 00:20:25 +00:00
|
|
|
|
2009-01-15 06:01:04 +00:00
|
|
|
let count = {applied: 0, reconciled: 0};
|
2009-09-11 06:11:33 +00:00
|
|
|
let handled = [];
|
2009-07-22 23:38:34 +00:00
|
|
|
newitems.recordHandler = Utils.bind2(this, function(item) {
|
2009-11-12 23:18:43 +00:00
|
|
|
// Grab a later last modified if possible
|
|
|
|
if (this.lastModified == null || item.modified > this.lastModified)
|
|
|
|
this.lastModified = item.modified;
|
|
|
|
|
2009-09-11 06:11:33 +00:00
|
|
|
// Remember which records were processed
|
|
|
|
handled.push(item.id);
|
|
|
|
|
2009-01-27 21:35:10 +00:00
|
|
|
try {
|
2009-07-14 23:51:04 +00:00
|
|
|
item.decrypt(ID.get("WeaveCryptoID"));
|
2009-06-04 21:18:04 +00:00
|
|
|
if (this._reconcile(item)) {
|
2009-02-27 01:31:06 +00:00
|
|
|
count.applied++;
|
2009-09-10 19:41:38 +00:00
|
|
|
this._tracker.ignoreAll = true;
|
|
|
|
this._store.applyIncoming(item);
|
2009-02-27 01:31:06 +00:00
|
|
|
} else {
|
|
|
|
count.reconciled++;
|
|
|
|
this._log.trace("Skipping reconciled incoming item " + item.id);
|
|
|
|
}
|
2009-01-27 21:35:10 +00:00
|
|
|
}
|
2009-09-10 19:41:38 +00:00
|
|
|
catch(ex) {
|
2009-09-11 20:10:19 +00:00
|
|
|
this._log.warn("Error processing record: " + Utils.exceptionStr(ex));
|
2009-09-10 19:41:38 +00:00
|
|
|
}
|
|
|
|
this._tracker.ignoreAll = false;
|
2009-07-14 21:01:26 +00:00
|
|
|
Sync.sleep(0);
|
2009-07-22 23:38:34 +00:00
|
|
|
});
|
|
|
|
|
2009-09-11 06:11:33 +00:00
|
|
|
// Only bother getting data from the server if there's new things
|
2009-11-12 23:18:43 +00:00
|
|
|
if (this.lastModified == null || this.lastModified > this.lastSync) {
|
2009-09-11 06:11:33 +00:00
|
|
|
let resp = newitems.get();
|
2009-09-16 01:38:52 +00:00
|
|
|
if (!resp.success) {
|
|
|
|
resp.failureCode = ENGINE_DOWNLOAD_FAIL;
|
2009-09-11 06:11:33 +00:00
|
|
|
throw resp;
|
2009-09-16 01:38:52 +00:00
|
|
|
}
|
2009-10-13 18:20:28 +00:00
|
|
|
|
|
|
|
// Subtract out the number of items we just got
|
|
|
|
fetchNum -= handled.length;
|
2009-09-11 06:11:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Check if we got the maximum that we requested; get the rest if so
|
|
|
|
if (handled.length == newitems.limit) {
|
|
|
|
let guidColl = new Collection(this.engineURL);
|
|
|
|
guidColl.newer = this.lastSync;
|
|
|
|
guidColl.sort = "index";
|
|
|
|
|
|
|
|
let guids = guidColl.get();
|
|
|
|
if (!guids.success)
|
|
|
|
throw guids;
|
|
|
|
|
|
|
|
// Figure out which guids weren't just fetched then remove any guids that
|
|
|
|
// were already waiting and prepend the new ones
|
|
|
|
let extra = Utils.arraySub(guids.obj, handled);
|
|
|
|
if (extra.length > 0)
|
|
|
|
this.toFetch = extra.concat(Utils.arraySub(this.toFetch, extra));
|
|
|
|
}
|
|
|
|
|
2009-10-13 18:20:28 +00:00
|
|
|
// Process any backlog of GUIDs if we haven't fetched too many this sync
|
|
|
|
while (this.toFetch.length > 0 && fetchNum > 0) {
|
2009-09-11 06:11:33 +00:00
|
|
|
// Reuse the original query, but get rid of the restricting params
|
|
|
|
newitems.limit = 0;
|
|
|
|
newitems.newer = 0;
|
|
|
|
|
2009-10-13 18:20:28 +00:00
|
|
|
// Get the first bunch of records and save the rest for later
|
|
|
|
let minFetch = Math.min(150, this.toFetch.length, fetchNum);
|
|
|
|
newitems.ids = this.toFetch.slice(0, minFetch);
|
|
|
|
this.toFetch = this.toFetch.slice(minFetch);
|
|
|
|
fetchNum -= minFetch;
|
2009-09-11 06:11:33 +00:00
|
|
|
|
2009-09-11 15:24:42 +00:00
|
|
|
// Reuse the existing record handler set earlier
|
2009-09-11 06:11:33 +00:00
|
|
|
let resp = newitems.get();
|
2009-09-16 01:38:52 +00:00
|
|
|
if (!resp.success) {
|
|
|
|
resp.failureCode = ENGINE_DOWNLOAD_FAIL;
|
2009-09-11 06:11:33 +00:00
|
|
|
throw resp;
|
2009-09-16 01:38:52 +00:00
|
|
|
}
|
2009-09-11 06:11:33 +00:00
|
|
|
}
|
2009-07-22 23:38:34 +00:00
|
|
|
|
2009-09-10 19:41:38 +00:00
|
|
|
if (this.lastSync < this.lastModified)
|
|
|
|
this.lastSync = this.lastModified;
|
2008-12-31 07:52:20 +00:00
|
|
|
|
2009-09-11 06:11:33 +00:00
|
|
|
this._log.info(["Records:", count.applied, "applied,", count.reconciled,
|
|
|
|
"reconciled,", this.toFetch.length, "left to fetch"].join(" "));
|
2009-01-15 06:01:04 +00:00
|
|
|
|
2009-01-02 21:51:38 +00:00
|
|
|
// try to free some memory
|
|
|
|
this._store.cache.clear();
|
2008-12-05 08:39:54 +00:00
|
|
|
},
|
|
|
|
|
2009-07-28 17:06:02 +00:00
|
|
|
/**
|
2009-09-01 00:27:30 +00:00
|
|
|
* Find a GUID of an item that is a duplicate of the incoming item but happens
|
|
|
|
* to have a different GUID
|
2009-07-28 17:06:02 +00:00
|
|
|
*
|
2009-09-01 00:27:30 +00:00
|
|
|
* @return GUID of the similar item; falsy otherwise
|
2009-07-28 17:06:02 +00:00
|
|
|
*/
|
2009-09-01 00:27:30 +00:00
|
|
|
_findDupe: function _findDupe(item) {
|
|
|
|
// By default, assume there's no dupe items for the engine
|
2009-07-28 17:06:02 +00:00
|
|
|
},
|
|
|
|
|
2009-01-02 23:51:35 +00:00
|
|
|
_isEqual: function SyncEngine__isEqual(item) {
|
|
|
|
let local = this._createRecord(item.id);
|
2009-08-21 00:00:15 +00:00
|
|
|
if (this._log.level <= Log4Moz.Level.Trace)
|
|
|
|
this._log.trace("Local record: " + local);
|
2009-01-02 23:51:35 +00:00
|
|
|
if (item.parentid == local.parentid &&
|
2009-05-15 18:18:16 +00:00
|
|
|
item.deleted == local.deleted &&
|
2009-01-02 23:51:35 +00:00
|
|
|
Utils.deepEquals(item.cleartext, local.cleartext)) {
|
2009-01-03 01:35:47 +00:00
|
|
|
this._log.trace("Local record is the same");
|
2009-01-02 23:51:35 +00:00
|
|
|
return true;
|
|
|
|
} else {
|
2009-01-03 01:35:47 +00:00
|
|
|
this._log.trace("Local record is different");
|
2009-01-02 23:51:35 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
},
|
|
|
|
|
2009-09-01 00:51:26 +00:00
|
|
|
_deleteId: function _deleteId(id) {
|
|
|
|
this._tracker.removeChangedID(id);
|
|
|
|
|
|
|
|
// Remember this id to delete at the end of sync
|
|
|
|
if (this._delete.ids == null)
|
|
|
|
this._delete.ids = [id];
|
|
|
|
else
|
|
|
|
this._delete.ids.push(id);
|
|
|
|
},
|
|
|
|
|
2009-09-09 06:33:15 +00:00
|
|
|
_handleDupe: function _handleDupe(item, dupeId) {
|
|
|
|
// The local dupe is the lower id, so pretend the incoming is for it
|
|
|
|
if (dupeId < item.id) {
|
|
|
|
this._deleteId(item.id);
|
|
|
|
item.id = dupeId;
|
|
|
|
this._tracker.changedIDs[dupeId] = true;
|
|
|
|
}
|
|
|
|
// The incoming item has the lower id, so change the dupe to it
|
|
|
|
else {
|
|
|
|
this._store.changeItemID(dupeId, item.id);
|
|
|
|
this._deleteId(dupeId);
|
|
|
|
}
|
|
|
|
|
|
|
|
this._store.cache.clear(); // because parentid refs will be wrong
|
|
|
|
},
|
|
|
|
|
2009-01-02 21:51:38 +00:00
|
|
|
// Reconciliation has three steps:
|
2008-12-05 08:39:54 +00:00
|
|
|
// 1) Check for the same item (same ID) on both the incoming and outgoing
|
2009-01-02 21:51:38 +00:00
|
|
|
// queues. This means the same item was modified on this profile and
|
|
|
|
// another at the same time. In this case, this client wins (which really
|
|
|
|
// means, the last profile you sync wins).
|
|
|
|
// 2) Check if the incoming item's ID exists locally. In that case it's an
|
|
|
|
// update and we should not try a similarity check (step 3)
|
|
|
|
// 3) Check if any incoming & outgoing items are actually the same, even
|
|
|
|
// though they have different IDs. This happens when the same item is
|
|
|
|
// added on two different machines at the same time. It's also the common
|
|
|
|
// case when syncing for the first time two machines that already have the
|
|
|
|
// same bookmarks. In this case we change the IDs to match.
|
2008-12-19 19:48:09 +00:00
|
|
|
_reconcile: function SyncEngine__reconcile(item) {
|
2009-08-25 23:15:05 +00:00
|
|
|
if (this._log.level <= Log4Moz.Level.Trace)
|
|
|
|
this._log.trace("Incoming: " + item);
|
|
|
|
|
2009-01-02 23:51:35 +00:00
|
|
|
// Step 1: Check for conflicts
|
|
|
|
// If same as local record, do not upload
|
2009-03-03 09:29:35 +00:00
|
|
|
this._log.trace("Reconcile step 1");
|
2008-12-30 07:28:17 +00:00
|
|
|
if (item.id in this._tracker.changedIDs) {
|
2009-01-02 23:51:35 +00:00
|
|
|
if (this._isEqual(item))
|
2008-12-30 07:28:17 +00:00
|
|
|
this._tracker.removeChangedID(item.id);
|
2009-06-04 21:18:04 +00:00
|
|
|
return false;
|
2008-12-30 07:28:17 +00:00
|
|
|
}
|
|
|
|
|
2009-01-02 23:51:35 +00:00
|
|
|
// Step 2: Check for updates
|
|
|
|
// If different from local record, apply server update
|
2009-03-03 09:29:35 +00:00
|
|
|
this._log.trace("Reconcile step 2");
|
2009-06-04 21:18:04 +00:00
|
|
|
if (this._store.itemExists(item.id))
|
|
|
|
return !this._isEqual(item);
|
2008-11-20 00:20:25 +00:00
|
|
|
|
2009-01-09 05:33:37 +00:00
|
|
|
// If the incoming item has been deleted, skip step 3
|
2009-03-03 09:29:35 +00:00
|
|
|
this._log.trace("Reconcile step 2.5");
|
2009-06-04 21:18:04 +00:00
|
|
|
if (item.deleted)
|
|
|
|
return true;
|
2009-01-09 05:33:37 +00:00
|
|
|
|
2009-01-02 23:51:35 +00:00
|
|
|
// Step 3: Check for similar items
|
2009-03-03 09:29:35 +00:00
|
|
|
this._log.trace("Reconcile step 3");
|
2009-09-01 00:27:30 +00:00
|
|
|
let dupeId = this._findDupe(item);
|
2009-09-09 06:33:15 +00:00
|
|
|
if (dupeId)
|
|
|
|
this._handleDupe(item, dupeId);
|
2009-06-04 21:18:04 +00:00
|
|
|
|
2009-09-01 00:27:30 +00:00
|
|
|
// Apply the incoming item (now that the dupe is the right id)
|
2009-06-04 21:18:04 +00:00
|
|
|
return true;
|
2008-12-05 08:39:54 +00:00
|
|
|
},
|
2008-11-24 16:04:14 +00:00
|
|
|
|
2008-12-05 08:39:54 +00:00
|
|
|
// Upload outgoing records
|
|
|
|
_uploadOutgoing: function SyncEngine__uploadOutgoing() {
|
2009-01-02 21:51:38 +00:00
|
|
|
let outnum = [i for (i in this._tracker.changedIDs)].length;
|
|
|
|
if (outnum) {
|
2009-09-11 05:57:36 +00:00
|
|
|
this._log.debug("Preparing " + outnum + " outgoing records");
|
|
|
|
|
2008-12-23 19:30:31 +00:00
|
|
|
// collection we'll upload
|
2009-11-10 23:52:40 +00:00
|
|
|
let up = new Collection(this.engineURL);
|
2009-08-14 01:50:54 +00:00
|
|
|
let count = 0;
|
|
|
|
|
|
|
|
// Upload what we've got so far in the collection
|
|
|
|
let doUpload = Utils.bind2(this, function(desc) {
|
|
|
|
this._log.info("Uploading " + desc + " of " + outnum + " records");
|
2009-08-26 22:32:46 +00:00
|
|
|
let resp = up.post();
|
2009-09-16 01:38:52 +00:00
|
|
|
if (!resp.success) {
|
|
|
|
this._log.debug("Uploading records failed: " + resp);
|
|
|
|
resp.failureCode = ENGINE_UPLOAD_FAIL;
|
2009-08-26 22:32:46 +00:00
|
|
|
throw resp;
|
2009-09-16 01:38:52 +00:00
|
|
|
}
|
2009-08-26 01:04:46 +00:00
|
|
|
|
|
|
|
// Record the modified time of the upload
|
2009-08-26 23:09:48 +00:00
|
|
|
let modified = resp.headers["X-Weave-Timestamp"];
|
2009-08-26 01:04:46 +00:00
|
|
|
if (modified > this.lastSync)
|
|
|
|
this.lastSync = modified;
|
|
|
|
|
2009-08-14 01:50:54 +00:00
|
|
|
up.clearRecords();
|
|
|
|
});
|
2008-12-23 19:30:31 +00:00
|
|
|
|
2008-12-31 07:52:20 +00:00
|
|
|
// don't cache the outgoing items, we won't need them later
|
|
|
|
this._store.cache.enabled = false;
|
|
|
|
|
|
|
|
for (let id in this._tracker.changedIDs) {
|
|
|
|
let out = this._createRecord(id);
|
2009-08-21 00:00:15 +00:00
|
|
|
if (this._log.level <= Log4Moz.Level.Trace)
|
|
|
|
this._log.trace("Outgoing: " + out);
|
2009-07-22 23:21:33 +00:00
|
|
|
|
2009-07-14 23:51:04 +00:00
|
|
|
out.encrypt(ID.get("WeaveCryptoID"));
|
2009-09-10 18:05:13 +00:00
|
|
|
up.pushData(out);
|
2009-07-22 23:21:33 +00:00
|
|
|
|
2009-08-14 01:50:54 +00:00
|
|
|
// Partial upload
|
|
|
|
if ((++count % MAX_UPLOAD_RECORDS) == 0)
|
|
|
|
doUpload((count - MAX_UPLOAD_RECORDS) + " - " + count + " out");
|
2009-07-22 23:21:33 +00:00
|
|
|
|
2009-07-14 21:01:26 +00:00
|
|
|
Sync.sleep(0);
|
2008-12-23 19:30:31 +00:00
|
|
|
}
|
|
|
|
|
2009-08-14 01:50:54 +00:00
|
|
|
// Final upload
|
|
|
|
if (count % MAX_UPLOAD_RECORDS > 0)
|
|
|
|
doUpload(count >= MAX_UPLOAD_RECORDS ? "last batch" : "all");
|
2008-12-31 07:52:20 +00:00
|
|
|
|
2009-08-14 01:50:54 +00:00
|
|
|
this._store.cache.enabled = true;
|
2008-11-08 10:00:33 +00:00
|
|
|
}
|
2008-12-05 08:39:54 +00:00
|
|
|
this._tracker.clearChangedIDs();
|
2008-07-26 00:02:43 +00:00
|
|
|
},
|
|
|
|
|
2008-12-05 08:39:54 +00:00
|
|
|
// Any cleanup necessary.
|
|
|
|
// Save the current snapshot so as to calculate changes at next sync
|
2009-06-05 06:48:27 +00:00
|
|
|
_syncFinish: function SyncEngine__syncFinish() {
|
2009-07-23 04:40:18 +00:00
|
|
|
this._log.trace("Finishing up sync");
|
2008-12-05 08:39:54 +00:00
|
|
|
this._tracker.resetScore();
|
2009-09-01 00:30:34 +00:00
|
|
|
|
2009-10-13 01:11:31 +00:00
|
|
|
let doDelete = Utils.bind2(this, function(key, val) {
|
|
|
|
let coll = new Collection(this.engineURL, this._recordObj);
|
|
|
|
coll[key] = val;
|
|
|
|
coll.delete();
|
|
|
|
});
|
|
|
|
|
2009-09-01 00:30:34 +00:00
|
|
|
for (let [key, val] in Iterator(this._delete)) {
|
|
|
|
// Remove the key for future uses
|
|
|
|
delete this._delete[key];
|
|
|
|
|
2009-10-13 01:11:31 +00:00
|
|
|
// Send a simple delete for the property
|
|
|
|
if (key != "ids" || val.length <= 100)
|
|
|
|
doDelete(key, val);
|
|
|
|
else {
|
|
|
|
// For many ids, split into chunks of at most 100
|
|
|
|
while (val.length > 0) {
|
|
|
|
doDelete(key, val.slice(0, 100));
|
|
|
|
val = val.slice(100);
|
|
|
|
}
|
|
|
|
}
|
2009-09-01 00:30:34 +00:00
|
|
|
}
|
2008-11-07 03:18:07 +00:00
|
|
|
},
|
|
|
|
|
2008-07-26 00:02:43 +00:00
|
|
|
_sync: function SyncEngine__sync() {
|
2008-12-05 08:55:19 +00:00
|
|
|
try {
|
2009-06-05 06:48:27 +00:00
|
|
|
this._syncStartup();
|
2009-02-22 08:04:58 +00:00
|
|
|
Observers.notify("weave:engine:sync:status", "process-incoming");
|
2009-06-05 07:39:35 +00:00
|
|
|
this._processIncoming();
|
2009-02-22 08:04:58 +00:00
|
|
|
Observers.notify("weave:engine:sync:status", "upload-outgoing");
|
2009-06-05 06:48:27 +00:00
|
|
|
this._uploadOutgoing();
|
|
|
|
this._syncFinish();
|
2008-12-05 08:55:19 +00:00
|
|
|
}
|
|
|
|
catch (e) {
|
2008-12-05 11:28:17 +00:00
|
|
|
this._log.warn("Sync failed");
|
2008-12-05 08:55:19 +00:00
|
|
|
throw e;
|
|
|
|
}
|
2008-12-05 08:39:54 +00:00
|
|
|
},
|
2008-08-08 21:42:57 +00:00
|
|
|
|
2009-01-06 21:54:18 +00:00
|
|
|
_wipeServer: function SyncEngine__wipeServer() {
|
2009-06-05 22:34:37 +00:00
|
|
|
new Resource(this.engineURL).delete();
|
|
|
|
new Resource(this.cryptoMetaURL).delete();
|
2009-02-27 06:36:14 +00:00
|
|
|
},
|
|
|
|
|
2009-10-12 23:22:54 +00:00
|
|
|
_testDecrypt: function _testDecrypt() {
|
|
|
|
// Report failure even if there's nothing to decrypt
|
|
|
|
let canDecrypt = false;
|
|
|
|
|
|
|
|
// Fetch the most recently uploaded record and try to decrypt it
|
|
|
|
let test = new Collection(this.engineURL, this._recordObj);
|
|
|
|
test.limit = 1;
|
|
|
|
test.sort = "newest";
|
|
|
|
test.full = true;
|
|
|
|
test.recordHandler = function(record) {
|
|
|
|
record.decrypt(ID.get("WeaveCryptoID"));
|
|
|
|
canDecrypt = true;
|
|
|
|
};
|
|
|
|
|
|
|
|
// Any failure fetching/decrypting will just result in false
|
|
|
|
try {
|
|
|
|
this._log.trace("Trying to decrypt a record from the server..");
|
|
|
|
test.get();
|
|
|
|
}
|
|
|
|
catch(ex) {
|
|
|
|
this._log.debug("Failed test decrypt: " + Utils.exceptionStr(ex));
|
|
|
|
}
|
|
|
|
|
|
|
|
return canDecrypt;
|
|
|
|
},
|
|
|
|
|
2009-02-27 06:36:14 +00:00
|
|
|
_resetClient: function SyncEngine__resetClient() {
|
|
|
|
this.resetLastSync();
|
2009-09-11 06:11:33 +00:00
|
|
|
this.toFetch = [];
|
2008-07-25 08:06:23 +00:00
|
|
|
}
|
|
|
|
};
|