michael@0: /* This Source Code Form is subject to the terms of the Mozilla Public michael@0: * License, v. 2.0. If a copy of the MPL was not distributed with this michael@0: * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ michael@0: michael@0: this.EXPORTED_SYMBOLS = [ michael@0: "EngineManager", michael@0: "Engine", michael@0: "SyncEngine", michael@0: "Tracker", michael@0: "Store" michael@0: ]; michael@0: michael@0: const {classes: Cc, interfaces: Ci, results: Cr, utils: Cu} = Components; michael@0: michael@0: Cu.import("resource://services-common/async.js"); michael@0: Cu.import("resource://gre/modules/Log.jsm"); michael@0: Cu.import("resource://services-common/observers.js"); michael@0: Cu.import("resource://services-common/utils.js"); michael@0: Cu.import("resource://services-sync/constants.js"); michael@0: Cu.import("resource://services-sync/identity.js"); michael@0: Cu.import("resource://services-sync/record.js"); michael@0: Cu.import("resource://services-sync/resource.js"); michael@0: Cu.import("resource://services-sync/util.js"); michael@0: michael@0: /* michael@0: * Trackers are associated with a single engine and deal with michael@0: * listening for changes to their particular data type. michael@0: * michael@0: * There are two things they keep track of: michael@0: * 1) A score, indicating how urgently the engine wants to sync michael@0: * 2) A list of IDs for all the changed items that need to be synced michael@0: * and updating their 'score', indicating how urgently they michael@0: * want to sync. michael@0: * michael@0: */ michael@0: this.Tracker = function Tracker(name, engine) { michael@0: if (!engine) { michael@0: throw new Error("Tracker must be associated with an Engine instance."); michael@0: } michael@0: michael@0: name = name || "Unnamed"; michael@0: this.name = this.file = name.toLowerCase(); michael@0: this.engine = engine; michael@0: michael@0: this._log = Log.repository.getLogger("Sync.Tracker." + name); michael@0: let level = Svc.Prefs.get("log.logger.engine." + this.name, "Debug"); michael@0: this._log.level = Log.Level[level]; michael@0: michael@0: this._score = 0; michael@0: this._ignored = []; michael@0: this.ignoreAll = false; michael@0: this.changedIDs = {}; michael@0: this.loadChangedIDs(); michael@0: michael@0: Svc.Obs.add("weave:engine:start-tracking", this); michael@0: Svc.Obs.add("weave:engine:stop-tracking", this); michael@0: }; michael@0: michael@0: Tracker.prototype = { michael@0: /* michael@0: * Score can be called as often as desired to decide which engines to sync michael@0: * michael@0: * Valid values for score: michael@0: * -1: Do not sync unless the user specifically requests it (almost disabled) michael@0: * 0: Nothing has changed michael@0: * 100: Please sync me ASAP! michael@0: * michael@0: * Setting it to other values should (but doesn't currently) throw an exception michael@0: */ michael@0: get score() { michael@0: return this._score; michael@0: }, michael@0: michael@0: set score(value) { michael@0: this._score = value; michael@0: Observers.notify("weave:engine:score:updated", this.name); michael@0: }, michael@0: michael@0: // Should be called by service everytime a sync has been done for an engine michael@0: resetScore: function () { michael@0: this._score = 0; michael@0: }, michael@0: michael@0: persistChangedIDs: true, michael@0: michael@0: /** michael@0: * Persist changedIDs to disk at a later date. michael@0: * Optionally pass a callback to be invoked when the write has occurred. michael@0: */ michael@0: saveChangedIDs: function (cb) { michael@0: if (!this.persistChangedIDs) { michael@0: this._log.debug("Not saving changedIDs."); michael@0: return; michael@0: } michael@0: Utils.namedTimer(function () { michael@0: this._log.debug("Saving changed IDs to " + this.file); michael@0: Utils.jsonSave("changes/" + this.file, this, this.changedIDs, cb); michael@0: }, 1000, this, "_lazySave"); michael@0: }, michael@0: michael@0: loadChangedIDs: function (cb) { michael@0: Utils.jsonLoad("changes/" + this.file, this, function(json) { michael@0: if (json && (typeof(json) == "object")) { michael@0: this.changedIDs = json; michael@0: } else { michael@0: this._log.warn("Changed IDs file " + this.file + " contains non-object value."); michael@0: json = null; michael@0: } michael@0: if (cb) { michael@0: cb.call(this, json); michael@0: } michael@0: }); michael@0: }, michael@0: michael@0: // ignore/unignore specific IDs. Useful for ignoring items that are michael@0: // being processed, or that shouldn't be synced. michael@0: // But note: not persisted to disk michael@0: michael@0: ignoreID: function (id) { michael@0: this.unignoreID(id); michael@0: this._ignored.push(id); michael@0: }, michael@0: michael@0: unignoreID: function (id) { michael@0: let index = this._ignored.indexOf(id); michael@0: if (index != -1) michael@0: this._ignored.splice(index, 1); michael@0: }, michael@0: michael@0: addChangedID: function (id, when) { michael@0: if (!id) { michael@0: this._log.warn("Attempted to add undefined ID to tracker"); michael@0: return false; michael@0: } michael@0: michael@0: if (this.ignoreAll || (id in this._ignored)) { michael@0: return false; michael@0: } michael@0: michael@0: // Default to the current time in seconds if no time is provided. michael@0: if (when == null) { michael@0: when = Math.floor(Date.now() / 1000); michael@0: } michael@0: michael@0: // Add/update the entry if we have a newer time. michael@0: if ((this.changedIDs[id] || -Infinity) < when) { michael@0: this._log.trace("Adding changed ID: " + id + ", " + when); michael@0: this.changedIDs[id] = when; michael@0: this.saveChangedIDs(this.onSavedChangedIDs); michael@0: } michael@0: michael@0: return true; michael@0: }, michael@0: michael@0: removeChangedID: function (id) { michael@0: if (!id) { michael@0: this._log.warn("Attempted to remove undefined ID to tracker"); michael@0: return false; michael@0: } michael@0: if (this.ignoreAll || (id in this._ignored)) michael@0: return false; michael@0: if (this.changedIDs[id] != null) { michael@0: this._log.trace("Removing changed ID " + id); michael@0: delete this.changedIDs[id]; michael@0: this.saveChangedIDs(); michael@0: } michael@0: return true; michael@0: }, michael@0: michael@0: clearChangedIDs: function () { michael@0: this._log.trace("Clearing changed ID list"); michael@0: this.changedIDs = {}; michael@0: this.saveChangedIDs(); michael@0: }, michael@0: michael@0: _isTracking: false, michael@0: michael@0: // Override these in your subclasses. michael@0: startTracking: function () { michael@0: }, michael@0: michael@0: stopTracking: function () { michael@0: }, michael@0: michael@0: engineIsEnabled: function () { michael@0: if (!this.engine) { michael@0: // Can't tell -- we must be running in a test! michael@0: return true; michael@0: } michael@0: return this.engine.enabled; michael@0: }, michael@0: michael@0: onEngineEnabledChanged: function (engineEnabled) { michael@0: if (engineEnabled == this._isTracking) { michael@0: return; michael@0: } michael@0: michael@0: if (engineEnabled) { michael@0: this.startTracking(); michael@0: this._isTracking = true; michael@0: } else { michael@0: this.stopTracking(); michael@0: this._isTracking = false; michael@0: this.clearChangedIDs(); michael@0: } michael@0: }, michael@0: michael@0: observe: function (subject, topic, data) { michael@0: switch (topic) { michael@0: case "weave:engine:start-tracking": michael@0: if (!this.engineIsEnabled()) { michael@0: return; michael@0: } michael@0: this._log.trace("Got start-tracking."); michael@0: if (!this._isTracking) { michael@0: this.startTracking(); michael@0: this._isTracking = true; michael@0: } michael@0: return; michael@0: case "weave:engine:stop-tracking": michael@0: this._log.trace("Got stop-tracking."); michael@0: if (this._isTracking) { michael@0: this.stopTracking(); michael@0: this._isTracking = false; michael@0: } michael@0: return; michael@0: } michael@0: } michael@0: }; michael@0: michael@0: michael@0: michael@0: /** michael@0: * The Store serves as the interface between Sync and stored data. michael@0: * michael@0: * The name "store" is slightly a misnomer because it doesn't actually "store" michael@0: * anything. Instead, it serves as a gateway to something that actually does michael@0: * the "storing." michael@0: * michael@0: * The store is responsible for record management inside an engine. It tells michael@0: * Sync what items are available for Sync, converts items to and from Sync's michael@0: * record format, and applies records from Sync into changes on the underlying michael@0: * store. michael@0: * michael@0: * Store implementations require a number of functions to be implemented. These michael@0: * are all documented below. michael@0: * michael@0: * For stores that deal with many records or which have expensive store access michael@0: * routines, it is highly recommended to implement a custom applyIncomingBatch michael@0: * and/or applyIncoming function on top of the basic APIs. michael@0: */ michael@0: michael@0: this.Store = function Store(name, engine) { michael@0: if (!engine) { michael@0: throw new Error("Store must be associated with an Engine instance."); michael@0: } michael@0: michael@0: name = name || "Unnamed"; michael@0: this.name = name.toLowerCase(); michael@0: this.engine = engine; michael@0: michael@0: this._log = Log.repository.getLogger("Sync.Store." + name); michael@0: let level = Svc.Prefs.get("log.logger.engine." + this.name, "Debug"); michael@0: this._log.level = Log.Level[level]; michael@0: michael@0: XPCOMUtils.defineLazyGetter(this, "_timer", function() { michael@0: return Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer); michael@0: }); michael@0: } michael@0: Store.prototype = { michael@0: michael@0: _sleep: function _sleep(delay) { michael@0: let cb = Async.makeSyncCallback(); michael@0: this._timer.initWithCallback(cb, delay, Ci.nsITimer.TYPE_ONE_SHOT); michael@0: Async.waitForSyncCallback(cb); michael@0: }, michael@0: michael@0: /** michael@0: * Apply multiple incoming records against the store. michael@0: * michael@0: * This is called with a set of incoming records to process. The function michael@0: * should look at each record, reconcile with the current local state, and michael@0: * make the local changes required to bring its state in alignment with the michael@0: * record. michael@0: * michael@0: * The default implementation simply iterates over all records and calls michael@0: * applyIncoming(). Store implementations may overwrite this function michael@0: * if desired. michael@0: * michael@0: * @param records Array of records to apply michael@0: * @return Array of record IDs which did not apply cleanly michael@0: */ michael@0: applyIncomingBatch: function (records) { michael@0: let failed = []; michael@0: for each (let record in records) { michael@0: try { michael@0: this.applyIncoming(record); michael@0: } catch (ex if (ex.code == Engine.prototype.eEngineAbortApplyIncoming)) { michael@0: // This kind of exception should have a 'cause' attribute, which is an michael@0: // originating exception. michael@0: // ex.cause will carry its stack with it when rethrown. michael@0: throw ex.cause; michael@0: } catch (ex) { michael@0: this._log.warn("Failed to apply incoming record " + record.id); michael@0: this._log.warn("Encountered exception: " + Utils.exceptionStr(ex)); michael@0: failed.push(record.id); michael@0: } michael@0: }; michael@0: return failed; michael@0: }, michael@0: michael@0: /** michael@0: * Apply a single record against the store. michael@0: * michael@0: * This takes a single record and makes the local changes required so the michael@0: * local state matches what's in the record. michael@0: * michael@0: * The default implementation calls one of remove(), create(), or update() michael@0: * depending on the state obtained from the store itself. Store michael@0: * implementations may overwrite this function if desired. michael@0: * michael@0: * @param record michael@0: * Record to apply michael@0: */ michael@0: applyIncoming: function (record) { michael@0: if (record.deleted) michael@0: this.remove(record); michael@0: else if (!this.itemExists(record.id)) michael@0: this.create(record); michael@0: else michael@0: this.update(record); michael@0: }, michael@0: michael@0: // override these in derived objects michael@0: michael@0: /** michael@0: * Create an item in the store from a record. michael@0: * michael@0: * This is called by the default implementation of applyIncoming(). If using michael@0: * applyIncomingBatch(), this won't be called unless your store calls it. michael@0: * michael@0: * @param record michael@0: * The store record to create an item from michael@0: */ michael@0: create: function (record) { michael@0: throw "override create in a subclass"; michael@0: }, michael@0: michael@0: /** michael@0: * Remove an item in the store from a record. michael@0: * michael@0: * This is called by the default implementation of applyIncoming(). If using michael@0: * applyIncomingBatch(), this won't be called unless your store calls it. michael@0: * michael@0: * @param record michael@0: * The store record to delete an item from michael@0: */ michael@0: remove: function (record) { michael@0: throw "override remove in a subclass"; michael@0: }, michael@0: michael@0: /** michael@0: * Update an item from a record. michael@0: * michael@0: * This is called by the default implementation of applyIncoming(). If using michael@0: * applyIncomingBatch(), this won't be called unless your store calls it. michael@0: * michael@0: * @param record michael@0: * The record to use to update an item from michael@0: */ michael@0: update: function (record) { michael@0: throw "override update in a subclass"; michael@0: }, michael@0: michael@0: /** michael@0: * Determine whether a record with the specified ID exists. michael@0: * michael@0: * Takes a string record ID and returns a booleans saying whether the record michael@0: * exists. michael@0: * michael@0: * @param id michael@0: * string record ID michael@0: * @return boolean indicating whether record exists locally michael@0: */ michael@0: itemExists: function (id) { michael@0: throw "override itemExists in a subclass"; michael@0: }, michael@0: michael@0: /** michael@0: * Create a record from the specified ID. michael@0: * michael@0: * If the ID is known, the record should be populated with metadata from michael@0: * the store. If the ID is not known, the record should be created with the michael@0: * delete field set to true. michael@0: * michael@0: * @param id michael@0: * string record ID michael@0: * @param collection michael@0: * Collection to add record to. This is typically passed into the michael@0: * constructor for the newly-created record. michael@0: * @return record type for this engine michael@0: */ michael@0: createRecord: function (id, collection) { michael@0: throw "override createRecord in a subclass"; michael@0: }, michael@0: michael@0: /** michael@0: * Change the ID of a record. michael@0: * michael@0: * @param oldID michael@0: * string old/current record ID michael@0: * @param newID michael@0: * string new record ID michael@0: */ michael@0: changeItemID: function (oldID, newID) { michael@0: throw "override changeItemID in a subclass"; michael@0: }, michael@0: michael@0: /** michael@0: * Obtain the set of all known record IDs. michael@0: * michael@0: * @return Object with ID strings as keys and values of true. The values michael@0: * are ignored. michael@0: */ michael@0: getAllIDs: function () { michael@0: throw "override getAllIDs in a subclass"; michael@0: }, michael@0: michael@0: /** michael@0: * Wipe all data in the store. michael@0: * michael@0: * This function is called during remote wipes or when replacing local data michael@0: * with remote data. michael@0: * michael@0: * This function should delete all local data that the store is managing. It michael@0: * can be thought of as clearing out all state and restoring the "new michael@0: * browser" state. michael@0: */ michael@0: wipe: function () { michael@0: throw "override wipe in a subclass"; michael@0: } michael@0: }; michael@0: michael@0: this.EngineManager = function EngineManager(service) { michael@0: this.service = service; michael@0: michael@0: this._engines = {}; michael@0: michael@0: // This will be populated by Service on startup. michael@0: this._declined = new Set(); michael@0: this._log = Log.repository.getLogger("Sync.EngineManager"); michael@0: this._log.level = Log.Level[Svc.Prefs.get("log.logger.service.engines", "Debug")]; michael@0: } michael@0: EngineManager.prototype = { michael@0: get: function (name) { michael@0: // Return an array of engines if we have an array of names michael@0: if (Array.isArray(name)) { michael@0: let engines = []; michael@0: name.forEach(function(name) { michael@0: let engine = this.get(name); michael@0: if (engine) { michael@0: engines.push(engine); michael@0: } michael@0: }, this); michael@0: return engines; michael@0: } michael@0: michael@0: let engine = this._engines[name]; michael@0: if (!engine) { michael@0: this._log.debug("Could not get engine: " + name); michael@0: if (Object.keys) { michael@0: this._log.debug("Engines are: " + JSON.stringify(Object.keys(this._engines))); michael@0: } michael@0: } michael@0: return engine; michael@0: }, michael@0: michael@0: getAll: function () { michael@0: return [engine for ([name, engine] in Iterator(this._engines))]; michael@0: }, michael@0: michael@0: /** michael@0: * N.B., does not pay attention to the declined list. michael@0: */ michael@0: getEnabled: function () { michael@0: return this.getAll().filter(function(engine) engine.enabled); michael@0: }, michael@0: michael@0: get enabledEngineNames() { michael@0: return [e.name for each (e in this.getEnabled())]; michael@0: }, michael@0: michael@0: persistDeclined: function () { michael@0: Svc.Prefs.set("declinedEngines", [...this._declined].join(",")); michael@0: }, michael@0: michael@0: /** michael@0: * Returns an array. michael@0: */ michael@0: getDeclined: function () { michael@0: return [...this._declined]; michael@0: }, michael@0: michael@0: setDeclined: function (engines) { michael@0: this._declined = new Set(engines); michael@0: this.persistDeclined(); michael@0: }, michael@0: michael@0: isDeclined: function (engineName) { michael@0: return this._declined.has(engineName); michael@0: }, michael@0: michael@0: /** michael@0: * Accepts a Set or an array. michael@0: */ michael@0: decline: function (engines) { michael@0: for (let e of engines) { michael@0: this._declined.add(e); michael@0: } michael@0: this.persistDeclined(); michael@0: }, michael@0: michael@0: undecline: function (engines) { michael@0: for (let e of engines) { michael@0: this._declined.delete(e); michael@0: } michael@0: this.persistDeclined(); michael@0: }, michael@0: michael@0: /** michael@0: * Mark any non-enabled engines as declined. michael@0: * michael@0: * This is useful after initial customization during setup. michael@0: */ michael@0: declineDisabled: function () { michael@0: for (let e of this.getAll()) { michael@0: if (!e.enabled) { michael@0: this._log.debug("Declining disabled engine " + e.name); michael@0: this._declined.add(e.name); michael@0: } michael@0: } michael@0: this.persistDeclined(); michael@0: }, michael@0: michael@0: /** michael@0: * Register an Engine to the service. Alternatively, give an array of engine michael@0: * objects to register. michael@0: * michael@0: * @param engineObject michael@0: * Engine object used to get an instance of the engine michael@0: * @return The engine object if anything failed michael@0: */ michael@0: register: function (engineObject) { michael@0: if (Array.isArray(engineObject)) { michael@0: return engineObject.map(this.register, this); michael@0: } michael@0: michael@0: try { michael@0: let engine = new engineObject(this.service); michael@0: let name = engine.name; michael@0: if (name in this._engines) { michael@0: this._log.error("Engine '" + name + "' is already registered!"); michael@0: } else { michael@0: this._engines[name] = engine; michael@0: } michael@0: } catch (ex) { michael@0: this._log.error(CommonUtils.exceptionStr(ex)); michael@0: michael@0: let mesg = ex.message ? ex.message : ex; michael@0: let name = engineObject || ""; michael@0: name = name.prototype || ""; michael@0: name = name.name || ""; michael@0: michael@0: let out = "Could not initialize engine '" + name + "': " + mesg; michael@0: this._log.error(out); michael@0: michael@0: return engineObject; michael@0: } michael@0: }, michael@0: michael@0: unregister: function (val) { michael@0: let name = val; michael@0: if (val instanceof Engine) { michael@0: name = val.name; michael@0: } michael@0: delete this._engines[name]; michael@0: }, michael@0: michael@0: clear: function () { michael@0: for (let name in this._engines) { michael@0: delete this._engines[name]; michael@0: } michael@0: }, michael@0: }; michael@0: michael@0: this.Engine = function Engine(name, service) { michael@0: if (!service) { michael@0: throw new Error("Engine must be associated with a Service instance."); michael@0: } michael@0: michael@0: this.Name = name || "Unnamed"; michael@0: this.name = name.toLowerCase(); michael@0: this.service = service; michael@0: michael@0: this._notify = Utils.notify("weave:engine:"); michael@0: this._log = Log.repository.getLogger("Sync.Engine." + this.Name); michael@0: let level = Svc.Prefs.get("log.logger.engine." + this.name, "Debug"); michael@0: this._log.level = Log.Level[level]; michael@0: michael@0: this._tracker; // initialize tracker to load previously changed IDs michael@0: this._log.debug("Engine initialized"); michael@0: } michael@0: Engine.prototype = { michael@0: // _storeObj, and _trackerObj should to be overridden in subclasses michael@0: _storeObj: Store, michael@0: _trackerObj: Tracker, michael@0: michael@0: // Local 'constant'. michael@0: // Signal to the engine that processing further records is pointless. michael@0: eEngineAbortApplyIncoming: "error.engine.abort.applyincoming", michael@0: michael@0: get prefName() this.name, michael@0: get enabled() { michael@0: return Svc.Prefs.get("engine." + this.prefName, false); michael@0: }, michael@0: michael@0: set enabled(val) { michael@0: Svc.Prefs.set("engine." + this.prefName, !!val); michael@0: this._tracker.onEngineEnabledChanged(val); michael@0: }, michael@0: michael@0: get score() this._tracker.score, michael@0: michael@0: get _store() { michael@0: let store = new this._storeObj(this.Name, this); michael@0: this.__defineGetter__("_store", function() store); michael@0: return store; michael@0: }, michael@0: michael@0: get _tracker() { michael@0: let tracker = new this._trackerObj(this.Name, this); michael@0: this.__defineGetter__("_tracker", function() tracker); michael@0: return tracker; michael@0: }, michael@0: michael@0: sync: function () { michael@0: if (!this.enabled) { michael@0: return; michael@0: } michael@0: michael@0: if (!this._sync) { michael@0: throw "engine does not implement _sync method"; michael@0: } michael@0: michael@0: this._notify("sync", this.name, this._sync)(); michael@0: }, michael@0: michael@0: /** michael@0: * Get rid of any local meta-data. michael@0: */ michael@0: resetClient: function () { michael@0: if (!this._resetClient) { michael@0: throw "engine does not implement _resetClient method"; michael@0: } michael@0: michael@0: this._notify("reset-client", this.name, this._resetClient)(); michael@0: }, michael@0: michael@0: _wipeClient: function () { michael@0: this.resetClient(); michael@0: this._log.debug("Deleting all local data"); michael@0: this._tracker.ignoreAll = true; michael@0: this._store.wipe(); michael@0: this._tracker.ignoreAll = false; michael@0: this._tracker.clearChangedIDs(); michael@0: }, michael@0: michael@0: wipeClient: function () { michael@0: this._notify("wipe-client", this.name, this._wipeClient)(); michael@0: } michael@0: }; michael@0: michael@0: this.SyncEngine = function SyncEngine(name, service) { michael@0: Engine.call(this, name || "SyncEngine", service); michael@0: michael@0: this.loadToFetch(); michael@0: this.loadPreviousFailed(); michael@0: } michael@0: michael@0: // Enumeration to define approaches to handling bad records. michael@0: // Attached to the constructor to allow use as a kind of static enumeration. michael@0: SyncEngine.kRecoveryStrategy = { michael@0: ignore: "ignore", michael@0: retry: "retry", michael@0: error: "error" michael@0: }; michael@0: michael@0: SyncEngine.prototype = { michael@0: __proto__: Engine.prototype, michael@0: _recordObj: CryptoWrapper, michael@0: version: 1, michael@0: michael@0: // How many records to pull in a single sync. This is primarily to avoid very michael@0: // long first syncs against profiles with many history records. michael@0: downloadLimit: null, michael@0: michael@0: // How many records to pull at one time when specifying IDs. This is to avoid michael@0: // URI length limitations. michael@0: guidFetchBatchSize: DEFAULT_GUID_FETCH_BATCH_SIZE, michael@0: mobileGUIDFetchBatchSize: DEFAULT_MOBILE_GUID_FETCH_BATCH_SIZE, michael@0: michael@0: // How many records to process in a single batch. michael@0: applyIncomingBatchSize: DEFAULT_STORE_BATCH_SIZE, michael@0: michael@0: get storageURL() this.service.storageURL, michael@0: michael@0: get engineURL() this.storageURL + this.name, michael@0: michael@0: get cryptoKeysURL() this.storageURL + "crypto/keys", michael@0: michael@0: get metaURL() this.storageURL + "meta/global", michael@0: michael@0: get syncID() { michael@0: // Generate a random syncID if we don't have one michael@0: let syncID = Svc.Prefs.get(this.name + ".syncID", ""); michael@0: return syncID == "" ? this.syncID = Utils.makeGUID() : syncID; michael@0: }, michael@0: set syncID(value) { michael@0: Svc.Prefs.set(this.name + ".syncID", value); michael@0: }, michael@0: michael@0: /* michael@0: * lastSync is a timestamp in server time. michael@0: */ michael@0: get lastSync() { michael@0: return parseFloat(Svc.Prefs.get(this.name + ".lastSync", "0")); michael@0: }, michael@0: set lastSync(value) { michael@0: // Reset the pref in-case it's a number instead of a string michael@0: Svc.Prefs.reset(this.name + ".lastSync"); michael@0: // Store the value as a string to keep floating point precision michael@0: Svc.Prefs.set(this.name + ".lastSync", value.toString()); michael@0: }, michael@0: resetLastSync: function () { michael@0: this._log.debug("Resetting " + this.name + " last sync time"); michael@0: Svc.Prefs.reset(this.name + ".lastSync"); michael@0: Svc.Prefs.set(this.name + ".lastSync", "0"); michael@0: this.lastSyncLocal = 0; michael@0: }, michael@0: michael@0: get toFetch() this._toFetch, michael@0: set toFetch(val) { michael@0: let cb = (error) => this._log.error(Utils.exceptionStr(error)); michael@0: // Coerce the array to a string for more efficient comparison. michael@0: if (val + "" == this._toFetch) { michael@0: return; michael@0: } michael@0: this._toFetch = val; michael@0: Utils.namedTimer(function () { michael@0: Utils.jsonSave("toFetch/" + this.name, this, val, cb); michael@0: }, 0, this, "_toFetchDelay"); michael@0: }, michael@0: michael@0: loadToFetch: function () { michael@0: // Initialize to empty if there's no file. michael@0: this._toFetch = []; michael@0: Utils.jsonLoad("toFetch/" + this.name, this, function(toFetch) { michael@0: if (toFetch) { michael@0: this._toFetch = toFetch; michael@0: } michael@0: }); michael@0: }, michael@0: michael@0: get previousFailed() this._previousFailed, michael@0: set previousFailed(val) { michael@0: let cb = (error) => this._log.error(Utils.exceptionStr(error)); michael@0: // Coerce the array to a string for more efficient comparison. michael@0: if (val + "" == this._previousFailed) { michael@0: return; michael@0: } michael@0: this._previousFailed = val; michael@0: Utils.namedTimer(function () { michael@0: Utils.jsonSave("failed/" + this.name, this, val, cb); michael@0: }, 0, this, "_previousFailedDelay"); michael@0: }, michael@0: michael@0: loadPreviousFailed: function () { michael@0: // Initialize to empty if there's no file michael@0: this._previousFailed = []; michael@0: Utils.jsonLoad("failed/" + this.name, this, function(previousFailed) { michael@0: if (previousFailed) { michael@0: this._previousFailed = previousFailed; michael@0: } michael@0: }); michael@0: }, michael@0: michael@0: /* michael@0: * lastSyncLocal is a timestamp in local time. michael@0: */ michael@0: get lastSyncLocal() { michael@0: return parseInt(Svc.Prefs.get(this.name + ".lastSyncLocal", "0"), 10); michael@0: }, michael@0: set lastSyncLocal(value) { michael@0: // Store as a string because pref can only store C longs as numbers. michael@0: Svc.Prefs.set(this.name + ".lastSyncLocal", value.toString()); michael@0: }, michael@0: michael@0: /* michael@0: * Returns a mapping of IDs -> changed timestamp. Engine implementations michael@0: * can override this method to bypass the tracker for certain or all michael@0: * changed items. michael@0: */ michael@0: getChangedIDs: function () { michael@0: return this._tracker.changedIDs; michael@0: }, michael@0: michael@0: // Create a new record using the store and add in crypto fields. michael@0: _createRecord: function (id) { michael@0: let record = this._store.createRecord(id, this.name); michael@0: record.id = id; michael@0: record.collection = this.name; michael@0: return record; michael@0: }, michael@0: michael@0: // Any setup that needs to happen at the beginning of each sync. michael@0: _syncStartup: function () { michael@0: michael@0: // Determine if we need to wipe on outdated versions michael@0: let metaGlobal = this.service.recordManager.get(this.metaURL); michael@0: let engines = metaGlobal.payload.engines || {}; michael@0: let engineData = engines[this.name] || {}; michael@0: michael@0: let needsWipe = false; michael@0: michael@0: // Assume missing versions are 0 and wipe the server michael@0: if ((engineData.version || 0) < this.version) { michael@0: this._log.debug("Old engine data: " + [engineData.version, this.version]); michael@0: michael@0: // Prepare to clear the server and upload everything michael@0: needsWipe = true; michael@0: this.syncID = ""; michael@0: michael@0: // Set the newer version and newly generated syncID michael@0: engineData.version = this.version; michael@0: engineData.syncID = this.syncID; michael@0: michael@0: // Put the new data back into meta/global and mark for upload michael@0: engines[this.name] = engineData; michael@0: metaGlobal.payload.engines = engines; michael@0: metaGlobal.changed = true; michael@0: } michael@0: // Don't sync this engine if the server has newer data michael@0: else if (engineData.version > this.version) { michael@0: let error = new String("New data: " + [engineData.version, this.version]); michael@0: error.failureCode = VERSION_OUT_OF_DATE; michael@0: throw error; michael@0: } michael@0: // Changes to syncID mean we'll need to upload everything michael@0: else if (engineData.syncID != this.syncID) { michael@0: this._log.debug("Engine syncIDs: " + [engineData.syncID, this.syncID]); michael@0: this.syncID = engineData.syncID; michael@0: this._resetClient(); michael@0: }; michael@0: michael@0: // Delete any existing data and reupload on bad version or missing meta. michael@0: // No crypto component here...? We could regenerate per-collection keys... michael@0: if (needsWipe) { michael@0: this.wipeServer(); michael@0: } michael@0: michael@0: // Save objects that need to be uploaded in this._modified. We also save michael@0: // the timestamp of this fetch in this.lastSyncLocal. As we successfully michael@0: // upload objects we remove them from this._modified. If an error occurs michael@0: // or any objects fail to upload, they will remain in this._modified. At michael@0: // the end of a sync, or after an error, we add all objects remaining in michael@0: // this._modified to the tracker. michael@0: this.lastSyncLocal = Date.now(); michael@0: if (this.lastSync) { michael@0: this._modified = this.getChangedIDs(); michael@0: } else { michael@0: // Mark all items to be uploaded, but treat them as changed from long ago michael@0: this._log.debug("First sync, uploading all items"); michael@0: this._modified = {}; michael@0: for (let id in this._store.getAllIDs()) { michael@0: this._modified[id] = 0; michael@0: } michael@0: } michael@0: // Clear the tracker now. If the sync fails we'll add the ones we failed michael@0: // to upload back. michael@0: this._tracker.clearChangedIDs(); michael@0: michael@0: this._log.info(Object.keys(this._modified).length + michael@0: " outgoing items pre-reconciliation"); michael@0: michael@0: // Keep track of what to delete at the end of sync michael@0: this._delete = {}; michael@0: }, michael@0: michael@0: /** michael@0: * A tiny abstraction to make it easier to test incoming record michael@0: * application. michael@0: */ michael@0: _itemSource: function () { michael@0: return new Collection(this.engineURL, this._recordObj, this.service); michael@0: }, michael@0: michael@0: /** michael@0: * Process incoming records. michael@0: * In the most awful and untestable way possible. michael@0: * This now accepts something that makes testing vaguely less impossible. michael@0: */ michael@0: _processIncoming: function (newitems) { michael@0: this._log.trace("Downloading & applying server changes"); michael@0: michael@0: // Figure out how many total items to fetch this sync; do less on mobile. michael@0: let batchSize = this.downloadLimit || Infinity; michael@0: let isMobile = (Svc.Prefs.get("client.type") == "mobile"); michael@0: michael@0: if (!newitems) { michael@0: newitems = this._itemSource(); michael@0: } michael@0: michael@0: if (isMobile) { michael@0: batchSize = MOBILE_BATCH_SIZE; michael@0: } michael@0: newitems.newer = this.lastSync; michael@0: newitems.full = true; michael@0: newitems.limit = batchSize; michael@0: michael@0: // applied => number of items that should be applied. michael@0: // failed => number of items that failed in this sync. michael@0: // newFailed => number of items that failed for the first time in this sync. michael@0: // reconciled => number of items that were reconciled. michael@0: let count = {applied: 0, failed: 0, newFailed: 0, reconciled: 0}; michael@0: let handled = []; michael@0: let applyBatch = []; michael@0: let failed = []; michael@0: let failedInPreviousSync = this.previousFailed; michael@0: let fetchBatch = Utils.arrayUnion(this.toFetch, failedInPreviousSync); michael@0: // Reset previousFailed for each sync since previously failed items may not fail again. michael@0: this.previousFailed = []; michael@0: michael@0: // Used (via exceptions) to allow the record handler/reconciliation/etc. michael@0: // methods to signal that they would like processing of incoming records to michael@0: // cease. michael@0: let aborting = undefined; michael@0: michael@0: function doApplyBatch() { michael@0: this._tracker.ignoreAll = true; michael@0: try { michael@0: failed = failed.concat(this._store.applyIncomingBatch(applyBatch)); michael@0: } catch (ex) { michael@0: // Catch any error that escapes from applyIncomingBatch. At present michael@0: // those will all be abort events. michael@0: this._log.warn("Got exception " + Utils.exceptionStr(ex) + michael@0: ", aborting processIncoming."); michael@0: aborting = ex; michael@0: } michael@0: this._tracker.ignoreAll = false; michael@0: applyBatch = []; michael@0: } michael@0: michael@0: function doApplyBatchAndPersistFailed() { michael@0: // Apply remaining batch. michael@0: if (applyBatch.length) { michael@0: doApplyBatch.call(this); michael@0: } michael@0: // Persist failed items so we refetch them. michael@0: if (failed.length) { michael@0: this.previousFailed = Utils.arrayUnion(failed, this.previousFailed); michael@0: count.failed += failed.length; michael@0: this._log.debug("Records that failed to apply: " + failed); michael@0: failed = []; michael@0: } michael@0: } michael@0: michael@0: let key = this.service.collectionKeys.keyForCollection(this.name); michael@0: michael@0: // Not binding this method to 'this' for performance reasons. It gets michael@0: // called for every incoming record. michael@0: let self = this; michael@0: michael@0: newitems.recordHandler = function(item) { michael@0: if (aborting) { michael@0: return; michael@0: } michael@0: michael@0: // Grab a later last modified if possible michael@0: if (self.lastModified == null || item.modified > self.lastModified) michael@0: self.lastModified = item.modified; michael@0: michael@0: // Track the collection for the WBO. michael@0: item.collection = self.name; michael@0: michael@0: // Remember which records were processed michael@0: handled.push(item.id); michael@0: michael@0: try { michael@0: try { michael@0: item.decrypt(key); michael@0: } catch (ex if Utils.isHMACMismatch(ex)) { michael@0: let strategy = self.handleHMACMismatch(item, true); michael@0: if (strategy == SyncEngine.kRecoveryStrategy.retry) { michael@0: // You only get one retry. michael@0: try { michael@0: // Try decrypting again, typically because we've got new keys. michael@0: self._log.info("Trying decrypt again..."); michael@0: key = self.service.collectionKeys.keyForCollection(self.name); michael@0: item.decrypt(key); michael@0: strategy = null; michael@0: } catch (ex if Utils.isHMACMismatch(ex)) { michael@0: strategy = self.handleHMACMismatch(item, false); michael@0: } michael@0: } michael@0: michael@0: switch (strategy) { michael@0: case null: michael@0: // Retry succeeded! No further handling. michael@0: break; michael@0: case SyncEngine.kRecoveryStrategy.retry: michael@0: self._log.debug("Ignoring second retry suggestion."); michael@0: // Fall through to error case. michael@0: case SyncEngine.kRecoveryStrategy.error: michael@0: self._log.warn("Error decrypting record: " + Utils.exceptionStr(ex)); michael@0: failed.push(item.id); michael@0: return; michael@0: case SyncEngine.kRecoveryStrategy.ignore: michael@0: self._log.debug("Ignoring record " + item.id + michael@0: " with bad HMAC: already handled."); michael@0: return; michael@0: } michael@0: } michael@0: } catch (ex) { michael@0: self._log.warn("Error decrypting record: " + Utils.exceptionStr(ex)); michael@0: failed.push(item.id); michael@0: return; michael@0: } michael@0: michael@0: let shouldApply; michael@0: try { michael@0: shouldApply = self._reconcile(item); michael@0: } catch (ex if (ex.code == Engine.prototype.eEngineAbortApplyIncoming)) { michael@0: self._log.warn("Reconciliation failed: aborting incoming processing."); michael@0: failed.push(item.id); michael@0: aborting = ex.cause; michael@0: } catch (ex) { michael@0: self._log.warn("Failed to reconcile incoming record " + item.id); michael@0: self._log.warn("Encountered exception: " + Utils.exceptionStr(ex)); michael@0: failed.push(item.id); michael@0: return; michael@0: } michael@0: michael@0: if (shouldApply) { michael@0: count.applied++; michael@0: applyBatch.push(item); michael@0: } else { michael@0: count.reconciled++; michael@0: self._log.trace("Skipping reconciled incoming item " + item.id); michael@0: } michael@0: michael@0: if (applyBatch.length == self.applyIncomingBatchSize) { michael@0: doApplyBatch.call(self); michael@0: } michael@0: self._store._sleep(0); michael@0: }; michael@0: michael@0: // Only bother getting data from the server if there's new things michael@0: if (this.lastModified == null || this.lastModified > this.lastSync) { michael@0: let resp = newitems.get(); michael@0: doApplyBatchAndPersistFailed.call(this); michael@0: if (!resp.success) { michael@0: resp.failureCode = ENGINE_DOWNLOAD_FAIL; michael@0: throw resp; michael@0: } michael@0: michael@0: if (aborting) { michael@0: throw aborting; michael@0: } michael@0: } michael@0: michael@0: // Mobile: check if we got the maximum that we requested; get the rest if so. michael@0: if (handled.length == newitems.limit) { michael@0: let guidColl = new Collection(this.engineURL, null, this.service); michael@0: michael@0: // Sort and limit so that on mobile we only get the last X records. michael@0: guidColl.limit = this.downloadLimit; michael@0: guidColl.newer = this.lastSync; michael@0: michael@0: // index: Orders by the sortindex descending (highest weight first). michael@0: guidColl.sort = "index"; michael@0: michael@0: let guids = guidColl.get(); michael@0: if (!guids.success) michael@0: throw guids; michael@0: michael@0: // Figure out which guids weren't just fetched then remove any guids that michael@0: // were already waiting and prepend the new ones michael@0: let extra = Utils.arraySub(guids.obj, handled); michael@0: if (extra.length > 0) { michael@0: fetchBatch = Utils.arrayUnion(extra, fetchBatch); michael@0: this.toFetch = Utils.arrayUnion(extra, this.toFetch); michael@0: } michael@0: } michael@0: michael@0: // Fast-foward the lastSync timestamp since we have stored the michael@0: // remaining items in toFetch. michael@0: if (this.lastSync < this.lastModified) { michael@0: this.lastSync = this.lastModified; michael@0: } michael@0: michael@0: // Process any backlog of GUIDs. michael@0: // At this point we impose an upper limit on the number of items to fetch michael@0: // in a single request, even for desktop, to avoid hitting URI limits. michael@0: batchSize = isMobile ? this.mobileGUIDFetchBatchSize : michael@0: this.guidFetchBatchSize; michael@0: michael@0: while (fetchBatch.length && !aborting) { michael@0: // Reuse the original query, but get rid of the restricting params michael@0: // and batch remaining records. michael@0: newitems.limit = 0; michael@0: newitems.newer = 0; michael@0: newitems.ids = fetchBatch.slice(0, batchSize); michael@0: michael@0: // Reuse the existing record handler set earlier michael@0: let resp = newitems.get(); michael@0: if (!resp.success) { michael@0: resp.failureCode = ENGINE_DOWNLOAD_FAIL; michael@0: throw resp; michael@0: } michael@0: michael@0: // This batch was successfully applied. Not using michael@0: // doApplyBatchAndPersistFailed() here to avoid writing toFetch twice. michael@0: fetchBatch = fetchBatch.slice(batchSize); michael@0: this.toFetch = Utils.arraySub(this.toFetch, newitems.ids); michael@0: this.previousFailed = Utils.arrayUnion(this.previousFailed, failed); michael@0: if (failed.length) { michael@0: count.failed += failed.length; michael@0: this._log.debug("Records that failed to apply: " + failed); michael@0: } michael@0: failed = []; michael@0: michael@0: if (aborting) { michael@0: throw aborting; michael@0: } michael@0: michael@0: if (this.lastSync < this.lastModified) { michael@0: this.lastSync = this.lastModified; michael@0: } michael@0: } michael@0: michael@0: // Apply remaining items. michael@0: doApplyBatchAndPersistFailed.call(this); michael@0: michael@0: count.newFailed = Utils.arraySub(this.previousFailed, failedInPreviousSync).length; michael@0: count.succeeded = Math.max(0, count.applied - count.failed); michael@0: this._log.info(["Records:", michael@0: count.applied, "applied,", michael@0: count.succeeded, "successfully,", michael@0: count.failed, "failed to apply,", michael@0: count.newFailed, "newly failed to apply,", michael@0: count.reconciled, "reconciled."].join(" ")); michael@0: Observers.notify("weave:engine:sync:applied", count, this.name); michael@0: }, michael@0: michael@0: /** michael@0: * Find a GUID of an item that is a duplicate of the incoming item but happens michael@0: * to have a different GUID michael@0: * michael@0: * @return GUID of the similar item; falsy otherwise michael@0: */ michael@0: _findDupe: function (item) { michael@0: // By default, assume there's no dupe items for the engine michael@0: }, michael@0: michael@0: _deleteId: function (id) { michael@0: this._tracker.removeChangedID(id); michael@0: michael@0: // Remember this id to delete at the end of sync michael@0: if (this._delete.ids == null) michael@0: this._delete.ids = [id]; michael@0: else michael@0: this._delete.ids.push(id); michael@0: }, michael@0: michael@0: /** michael@0: * Reconcile incoming record with local state. michael@0: * michael@0: * This function essentially determines whether to apply an incoming record. michael@0: * michael@0: * @param item michael@0: * Record from server to be tested for application. michael@0: * @return boolean michael@0: * Truthy if incoming record should be applied. False if not. michael@0: */ michael@0: _reconcile: function (item) { michael@0: if (this._log.level <= Log.Level.Trace) { michael@0: this._log.trace("Incoming: " + item); michael@0: } michael@0: michael@0: // We start reconciling by collecting a bunch of state. We do this here michael@0: // because some state may change during the course of this function and we michael@0: // need to operate on the original values. michael@0: let existsLocally = this._store.itemExists(item.id); michael@0: let locallyModified = item.id in this._modified; michael@0: michael@0: // TODO Handle clock drift better. Tracked in bug 721181. michael@0: let remoteAge = AsyncResource.serverTime - item.modified; michael@0: let localAge = locallyModified ? michael@0: (Date.now() / 1000 - this._modified[item.id]) : null; michael@0: let remoteIsNewer = remoteAge < localAge; michael@0: michael@0: this._log.trace("Reconciling " + item.id + ". exists=" + michael@0: existsLocally + "; modified=" + locallyModified + michael@0: "; local age=" + localAge + "; incoming age=" + michael@0: remoteAge); michael@0: michael@0: // We handle deletions first so subsequent logic doesn't have to check michael@0: // deleted flags. michael@0: if (item.deleted) { michael@0: // If the item doesn't exist locally, there is nothing for us to do. We michael@0: // can't check for duplicates because the incoming record has no data michael@0: // which can be used for duplicate detection. michael@0: if (!existsLocally) { michael@0: this._log.trace("Ignoring incoming item because it was deleted and " + michael@0: "the item does not exist locally."); michael@0: return false; michael@0: } michael@0: michael@0: // We decide whether to process the deletion by comparing the record michael@0: // ages. If the item is not modified locally, the remote side wins and michael@0: // the deletion is processed. If it is modified locally, we take the michael@0: // newer record. michael@0: if (!locallyModified) { michael@0: this._log.trace("Applying incoming delete because the local item " + michael@0: "exists and isn't modified."); michael@0: return true; michael@0: } michael@0: michael@0: // TODO As part of bug 720592, determine whether we should do more here. michael@0: // In the case where the local changes are newer, it is quite possible michael@0: // that the local client will restore data a remote client had tried to michael@0: // delete. There might be a good reason for that delete and it might be michael@0: // enexpected for this client to restore that data. michael@0: this._log.trace("Incoming record is deleted but we had local changes. " + michael@0: "Applying the youngest record."); michael@0: return remoteIsNewer; michael@0: } michael@0: michael@0: // At this point the incoming record is not for a deletion and must have michael@0: // data. If the incoming record does not exist locally, we check for a local michael@0: // duplicate existing under a different ID. The default implementation of michael@0: // _findDupe() is empty, so engines have to opt in to this functionality. michael@0: // michael@0: // If we find a duplicate, we change the local ID to the incoming ID and we michael@0: // refresh the metadata collected above. See bug 710448 for the history michael@0: // of this logic. michael@0: if (!existsLocally) { michael@0: let dupeID = this._findDupe(item); michael@0: if (dupeID) { michael@0: this._log.trace("Local item " + dupeID + " is a duplicate for " + michael@0: "incoming item " + item.id); michael@0: michael@0: // The local, duplicate ID is always deleted on the server. michael@0: this._deleteId(dupeID); michael@0: michael@0: // The current API contract does not mandate that the ID returned by michael@0: // _findDupe() actually exists. Therefore, we have to perform this michael@0: // check. michael@0: existsLocally = this._store.itemExists(dupeID); michael@0: michael@0: // We unconditionally change the item's ID in case the engine knows of michael@0: // an item but doesn't expose it through itemExists. If the API michael@0: // contract were stronger, this could be changed. michael@0: this._log.debug("Switching local ID to incoming: " + dupeID + " -> " + michael@0: item.id); michael@0: this._store.changeItemID(dupeID, item.id); michael@0: michael@0: // If the local item was modified, we carry its metadata forward so michael@0: // appropriate reconciling can be performed. michael@0: if (dupeID in this._modified) { michael@0: locallyModified = true; michael@0: localAge = Date.now() / 1000 - this._modified[dupeID]; michael@0: remoteIsNewer = remoteAge < localAge; michael@0: michael@0: this._modified[item.id] = this._modified[dupeID]; michael@0: delete this._modified[dupeID]; michael@0: } else { michael@0: locallyModified = false; michael@0: localAge = null; michael@0: } michael@0: michael@0: this._log.debug("Local item after duplication: age=" + localAge + michael@0: "; modified=" + locallyModified + "; exists=" + michael@0: existsLocally); michael@0: } else { michael@0: this._log.trace("No duplicate found for incoming item: " + item.id); michael@0: } michael@0: } michael@0: michael@0: // At this point we've performed duplicate detection. But, nothing here michael@0: // should depend on duplicate detection as the above should have updated michael@0: // state seamlessly. michael@0: michael@0: if (!existsLocally) { michael@0: // If the item doesn't exist locally and we have no local modifications michael@0: // to the item (implying that it was not deleted), always apply the remote michael@0: // item. michael@0: if (!locallyModified) { michael@0: this._log.trace("Applying incoming because local item does not exist " + michael@0: "and was not deleted."); michael@0: return true; michael@0: } michael@0: michael@0: // If the item was modified locally but isn't present, it must have michael@0: // been deleted. If the incoming record is younger, we restore from michael@0: // that record. michael@0: if (remoteIsNewer) { michael@0: this._log.trace("Applying incoming because local item was deleted " + michael@0: "before the incoming item was changed."); michael@0: delete this._modified[item.id]; michael@0: return true; michael@0: } michael@0: michael@0: this._log.trace("Ignoring incoming item because the local item's " + michael@0: "deletion is newer."); michael@0: return false; michael@0: } michael@0: michael@0: // If the remote and local records are the same, there is nothing to be michael@0: // done, so we don't do anything. In the ideal world, this logic wouldn't michael@0: // be here and the engine would take a record and apply it. The reason we michael@0: // want to defer this logic is because it would avoid a redundant and michael@0: // possibly expensive dip into the storage layer to query item state. michael@0: // This should get addressed in the async rewrite, so we ignore it for now. michael@0: let localRecord = this._createRecord(item.id); michael@0: let recordsEqual = Utils.deepEquals(item.cleartext, michael@0: localRecord.cleartext); michael@0: michael@0: // If the records are the same, we don't need to do anything. This does michael@0: // potentially throw away a local modification time. But, if the records michael@0: // are the same, does it matter? michael@0: if (recordsEqual) { michael@0: this._log.trace("Ignoring incoming item because the local item is " + michael@0: "identical."); michael@0: michael@0: delete this._modified[item.id]; michael@0: return false; michael@0: } michael@0: michael@0: // At this point the records are different. michael@0: michael@0: // If we have no local modifications, always take the server record. michael@0: if (!locallyModified) { michael@0: this._log.trace("Applying incoming record because no local conflicts."); michael@0: return true; michael@0: } michael@0: michael@0: // At this point, records are different and the local record is modified. michael@0: // We resolve conflicts by record age, where the newest one wins. This does michael@0: // result in data loss and should be handled by giving the engine an michael@0: // opportunity to merge the records. Bug 720592 tracks this feature. michael@0: this._log.warn("DATA LOSS: Both local and remote changes to record: " + michael@0: item.id); michael@0: return remoteIsNewer; michael@0: }, michael@0: michael@0: // Upload outgoing records. michael@0: _uploadOutgoing: function () { michael@0: this._log.trace("Uploading local changes to server."); michael@0: michael@0: let modifiedIDs = Object.keys(this._modified); michael@0: if (modifiedIDs.length) { michael@0: this._log.trace("Preparing " + modifiedIDs.length + michael@0: " outgoing records"); michael@0: michael@0: // collection we'll upload michael@0: let up = new Collection(this.engineURL, null, this.service); michael@0: let count = 0; michael@0: michael@0: // Upload what we've got so far in the collection michael@0: let doUpload = Utils.bind2(this, function(desc) { michael@0: this._log.info("Uploading " + desc + " of " + modifiedIDs.length + michael@0: " records"); michael@0: let resp = up.post(); michael@0: if (!resp.success) { michael@0: this._log.debug("Uploading records failed: " + resp); michael@0: resp.failureCode = ENGINE_UPLOAD_FAIL; michael@0: throw resp; michael@0: } michael@0: michael@0: // Update server timestamp from the upload. michael@0: let modified = resp.headers["x-weave-timestamp"]; michael@0: if (modified > this.lastSync) michael@0: this.lastSync = modified; michael@0: michael@0: let failed_ids = Object.keys(resp.obj.failed); michael@0: if (failed_ids.length) michael@0: this._log.debug("Records that will be uploaded again because " michael@0: + "the server couldn't store them: " michael@0: + failed_ids.join(", ")); michael@0: michael@0: // Clear successfully uploaded objects. michael@0: for each (let id in resp.obj.success) { michael@0: delete this._modified[id]; michael@0: } michael@0: michael@0: up.clearRecords(); michael@0: }); michael@0: michael@0: for each (let id in modifiedIDs) { michael@0: try { michael@0: let out = this._createRecord(id); michael@0: if (this._log.level <= Log.Level.Trace) michael@0: this._log.trace("Outgoing: " + out); michael@0: michael@0: out.encrypt(this.service.collectionKeys.keyForCollection(this.name)); michael@0: up.pushData(out); michael@0: } michael@0: catch(ex) { michael@0: this._log.warn("Error creating record: " + Utils.exceptionStr(ex)); michael@0: } michael@0: michael@0: // Partial upload michael@0: if ((++count % MAX_UPLOAD_RECORDS) == 0) michael@0: doUpload((count - MAX_UPLOAD_RECORDS) + " - " + count + " out"); michael@0: michael@0: this._store._sleep(0); michael@0: } michael@0: michael@0: // Final upload michael@0: if (count % MAX_UPLOAD_RECORDS > 0) michael@0: doUpload(count >= MAX_UPLOAD_RECORDS ? "last batch" : "all"); michael@0: } michael@0: }, michael@0: michael@0: // Any cleanup necessary. michael@0: // Save the current snapshot so as to calculate changes at next sync michael@0: _syncFinish: function () { michael@0: this._log.trace("Finishing up sync"); michael@0: this._tracker.resetScore(); michael@0: michael@0: let doDelete = Utils.bind2(this, function(key, val) { michael@0: let coll = new Collection(this.engineURL, this._recordObj, this.service); michael@0: coll[key] = val; michael@0: coll.delete(); michael@0: }); michael@0: michael@0: for (let [key, val] in Iterator(this._delete)) { michael@0: // Remove the key for future uses michael@0: delete this._delete[key]; michael@0: michael@0: // Send a simple delete for the property michael@0: if (key != "ids" || val.length <= 100) michael@0: doDelete(key, val); michael@0: else { michael@0: // For many ids, split into chunks of at most 100 michael@0: while (val.length > 0) { michael@0: doDelete(key, val.slice(0, 100)); michael@0: val = val.slice(100); michael@0: } michael@0: } michael@0: } michael@0: }, michael@0: michael@0: _syncCleanup: function () { michael@0: if (!this._modified) { michael@0: return; michael@0: } michael@0: michael@0: // Mark failed WBOs as changed again so they are reuploaded next time. michael@0: for (let [id, when] in Iterator(this._modified)) { michael@0: this._tracker.addChangedID(id, when); michael@0: } michael@0: this._modified = {}; michael@0: }, michael@0: michael@0: _sync: function () { michael@0: try { michael@0: this._syncStartup(); michael@0: Observers.notify("weave:engine:sync:status", "process-incoming"); michael@0: this._processIncoming(); michael@0: Observers.notify("weave:engine:sync:status", "upload-outgoing"); michael@0: this._uploadOutgoing(); michael@0: this._syncFinish(); michael@0: } finally { michael@0: this._syncCleanup(); michael@0: } michael@0: }, michael@0: michael@0: canDecrypt: function () { michael@0: // Report failure even if there's nothing to decrypt michael@0: let canDecrypt = false; michael@0: michael@0: // Fetch the most recently uploaded record and try to decrypt it michael@0: let test = new Collection(this.engineURL, this._recordObj, this.service); michael@0: test.limit = 1; michael@0: test.sort = "newest"; michael@0: test.full = true; michael@0: michael@0: let key = this.service.collectionKeys.keyForCollection(this.name); michael@0: test.recordHandler = function recordHandler(record) { michael@0: record.decrypt(key); michael@0: canDecrypt = true; michael@0: }.bind(this); michael@0: michael@0: // Any failure fetching/decrypting will just result in false michael@0: try { michael@0: this._log.trace("Trying to decrypt a record from the server.."); michael@0: test.get(); michael@0: } michael@0: catch(ex) { michael@0: this._log.debug("Failed test decrypt: " + Utils.exceptionStr(ex)); michael@0: } michael@0: michael@0: return canDecrypt; michael@0: }, michael@0: michael@0: _resetClient: function () { michael@0: this.resetLastSync(); michael@0: this.previousFailed = []; michael@0: this.toFetch = []; michael@0: }, michael@0: michael@0: wipeServer: function () { michael@0: let response = this.service.resource(this.engineURL).delete(); michael@0: if (response.status != 200 && response.status != 404) { michael@0: throw response; michael@0: } michael@0: this._resetClient(); michael@0: }, michael@0: michael@0: removeClientData: function () { michael@0: // Implement this method in engines that store client specific data michael@0: // on the server. michael@0: }, michael@0: michael@0: /* michael@0: * Decide on (and partially effect) an error-handling strategy. michael@0: * michael@0: * Asks the Service to respond to an HMAC error, which might result in keys michael@0: * being downloaded. That call returns true if an action which might allow a michael@0: * retry to occur. michael@0: * michael@0: * If `mayRetry` is truthy, and the Service suggests a retry, michael@0: * handleHMACMismatch returns kRecoveryStrategy.retry. Otherwise, it returns michael@0: * kRecoveryStrategy.error. michael@0: * michael@0: * Subclasses of SyncEngine can override this method to allow for different michael@0: * behavior -- e.g., to delete and ignore erroneous entries. michael@0: * michael@0: * All return values will be part of the kRecoveryStrategy enumeration. michael@0: */ michael@0: handleHMACMismatch: function (item, mayRetry) { michael@0: // By default we either try again, or bail out noisily. michael@0: return (this.service.handleHMACEvent() && mayRetry) ? michael@0: SyncEngine.kRecoveryStrategy.retry : michael@0: SyncEngine.kRecoveryStrategy.error; michael@0: } michael@0: };