diff options
author | wolfbeast <mcwerewolf@gmail.com> | 2018-10-06 06:57:51 +0200 |
---|---|---|
committer | wolfbeast <mcwerewolf@gmail.com> | 2018-10-06 06:57:51 +0200 |
commit | 0c47c83e1b3b7d95681a43fbb0de0e17b2cd5b25 (patch) | |
tree | c321601f04cbfd02fb6e12878e745dc49a612c86 /services | |
parent | 8860eddcee1417483cafd114f3a9ec127e0f1f74 (diff) | |
download | UXP-0c47c83e1b3b7d95681a43fbb0de0e17b2cd5b25.tar UXP-0c47c83e1b3b7d95681a43fbb0de0e17b2cd5b25.tar.gz UXP-0c47c83e1b3b7d95681a43fbb0de0e17b2cd5b25.tar.lz UXP-0c47c83e1b3b7d95681a43fbb0de0e17b2cd5b25.tar.xz UXP-0c47c83e1b3b7d95681a43fbb0de0e17b2cd5b25.zip |
Import Tycho weave client
Diffstat (limited to 'services')
183 files changed, 7272 insertions, 16759 deletions
diff --git a/services/sync/Makefile.in b/services/sync/Makefile.in new file mode 100644 index 000000000..e86ee160f --- /dev/null +++ b/services/sync/Makefile.in @@ -0,0 +1,16 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Definitions used by constants.js. +weave_version := 1.40.0 +weave_id := {340c2bbc-ce74-4362-90b5-7c26312808ef} + +# Preprocess files. +SYNC_PP := modules/constants.js +SYNC_PP_FLAGS := \ + -Dweave_version=$(weave_version) \ + -Dweave_id='$(weave_id)' +SYNC_PP_PATH = $(FINAL_TARGET)/modules/services-sync +PP_TARGETS += SYNC_PP + diff --git a/services/sync/SyncComponents.manifest b/services/sync/SyncComponents.manifest index c58286277..f6b631f5d 100644 --- a/services/sync/SyncComponents.manifest +++ b/services/sync/SyncComponents.manifest @@ -1,22 +1,28 @@ -# WeaveService has to restrict its registration for the app-startup category -# to the specific list of apps that use it so it doesn't get loaded in xpcshell. -# Thus we restrict it to these apps: +# WebappRT doesn't need these instructions, and they don't necessarily work +# with it, but it does use a GRE directory that the GRE shares with Firefox, +# so in order to prevent the instructions from being processed for WebappRT, +# we need to restrict them to the applications that depend on them, i.e.: # # b2g: {3c2e2abc-06d4-11e1-ac3b-374f68613e61} -# basilisk: {ec8030f7-c20a-464f-9b0e-13a3a9e97384} -# pale moon: {8de7fcbb-c55c-4fbe-bfc5-fc555c87dbc4} +# browser: {8de7fcbb-c55c-4fbe-bfc5-fc555c87dbc4} # mobile/android: {aa3c5121-dab2-40e2-81ca-7ea25febc110} # mobile/xul: {a23983c0-fd0e-11dc-95ff-0800200c9a66} # suite (comm): {92650c4d-4b8e-4d2a-b7eb-24ecf4f6b63a} -# graphene: {d1bfe7d9-c01e-4237-998b-7b5f960a4314} +# +# In theory we should do this for all these instructions, but in practice it is +# sufficient to do it for the app-startup one, and the file is simpler that way. # Weave.js component {74b89fb0-f200-4ae8-a3ec-dd164117f6de} Weave.js contract @mozilla.org/weave/service;1 {74b89fb0-f200-4ae8-a3ec-dd164117f6de} -category app-startup WeaveService service,@mozilla.org/weave/service;1 application={3c2e2abc-06d4-11e1-ac3b-374f68613e61} application={ec8030f7-c20a-464f-9b0e-13a3a9e97384} application={8de7fcbb-c55c-4fbe-bfc5-fc555c87dbc4} application={aa3c5121-dab2-40e2-81ca-7ea25febc110} application={a23983c0-fd0e-11dc-95ff-0800200c9a66} application={92650c4d-4b8e-4d2a-b7eb-24ecf4f6b63a} application={99bceaaa-e3c6-48c1-b981-ef9b46b67d60} application={d1bfe7d9-c01e-4237-998b-7b5f960a4314} +category app-startup WeaveService service,@mozilla.org/weave/service;1 application={3c2e2abc-06d4-11e1-ac3b-374f68613e61} application={8de7fcbb-c55c-4fbe-bfc5-fc555c87dbc4} application={aa3c5121-dab2-40e2-81ca-7ea25febc110} application={a23983c0-fd0e-11dc-95ff-0800200c9a66} application={92650c4d-4b8e-4d2a-b7eb-24ecf4f6b63a} component {d28f8a0b-95da-48f4-b712-caf37097be41} Weave.js contract @mozilla.org/network/protocol/about;1?what=sync-log {d28f8a0b-95da-48f4-b712-caf37097be41} # Register resource aliases # (Note, for tests these are also set up in addResourceAlias) resource services-sync resource://gre/modules/services-sync/ + +#ifdef MOZ_SERVICES_HEALTHREPORT +category healthreport-js-provider-default SyncProvider resource://services-sync/healthreport.jsm +#endif diff --git a/services/sync/Weave.js b/services/sync/Weave.js index de131d08a..d99c217c0 100644 --- a/services/sync/Weave.js +++ b/services/sync/Weave.js @@ -72,6 +72,13 @@ WeaveService.prototype = { Ci.nsISupportsWeakReference]), ensureLoaded: function () { + // XXX: We don't support FxA, so prevent migrator calls + // to the Sync server from this module! Don't load it. + // If we are loaded and not using FxA, load the migration module. + //if (!this.fxAccountsEnabled) { + // Cu.import("resource://services-sync/FxaMigrator.jsm"); + //} + Components.utils.import("resource://services-sync/main.js"); // Side-effect of accessing the service is that it is instantiated. @@ -96,11 +103,14 @@ WeaveService.prototype = { * Whether Firefox Accounts is enabled. * * @return bool + * + * This function is currently always returning false because we don't support + * the use of FxA/Sync-1.5 but do want to keep the code "just in case". */ get fxAccountsEnabled() { -#ifdef MC_PALEMOON + // Early exit: FxA not supported. return false; -#else + try { // Old sync guarantees '@' will never appear in the username while FxA // uses the FxA email address - so '@' is the flag we use. @@ -109,7 +119,6 @@ WeaveService.prototype = { } catch (_) { return true; // No username == only allow FxA to be configured. } -#endif }, /** @@ -123,7 +132,8 @@ WeaveService.prototype = { */ get enabled() { let prefs = Services.prefs.getBranch(SYNC_PREFS_BRANCH); - return prefs.prefHasUserValue("username"); + return prefs.prefHasUserValue("username") && + prefs.prefHasUserValue("clusterURL"); }, observe: function (subject, topic, data) { @@ -183,13 +193,10 @@ AboutWeaveLog.prototype = { channel.originalURI = aURI; // Ensure that the about page has the same privileges as a regular directory - // view. That way links to files can be opened. make sure we use the correct - // origin attributes when creating the principal for accessing the - // about:sync-log data. + // view. That way links to files can be opened. let ssm = Cc["@mozilla.org/scriptsecuritymanager;1"] .getService(Ci.nsIScriptSecurityManager); - let principal = ssm.createCodebasePrincipal(uri, aLoadInfo.originAttributes); - + let principal = ssm.getNoAppCodebasePrincipal(uri); channel.owner = principal; return channel; } diff --git a/services/sync/locales/en-US/errors.properties b/services/sync/locales/en-US/errors.properties index f67f5ea1c..e51eb422c 100644 --- a/services/sync/locales/en-US/errors.properties +++ b/services/sync/locales/en-US/errors.properties @@ -12,7 +12,7 @@ error.login.reason.server = Server incorrectly configured error.sync.failed_partial = One or more data types could not be synced # LOCALIZATION NOTE (error.sync.reason.serverMaintenance): We removed the extraneous period from this string -error.sync.reason.serverMaintenance = Sync server maintenance is underway, syncing will resume automatically +error.sync.reason.serverMaintenance = Sync server maintenance is underway; syncing will resume automatically invalid-captcha = Incorrect words, try again weak-password = Use a stronger password @@ -20,8 +20,8 @@ weak-password = Use a stronger password # this is the fallback, if we hit an error we didn't bother to localize error.reason.unknown = Unknown error -change.password.pwSameAsPassword = Password can’t match current password -change.password.pwSameAsUsername = Password can’t match your user name -change.password.pwSameAsEmail = Password can’t match your email address +change.password.pwSameAsPassword = Password can't match current password +change.password.pwSameAsUsername = Password can't match your user name +change.password.pwSameAsEmail = Password can't match your email address change.password.mismatch = The passwords entered do not match change.password.tooShort = The password entered is too short diff --git a/services/sync/locales/en-US/sync.properties b/services/sync/locales/en-US/sync.properties index a1a6f76b2..f605221b6 100644 --- a/services/sync/locales/en-US/sync.properties +++ b/services/sync/locales/en-US/sync.properties @@ -3,14 +3,17 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # %1: the user name (Ed), %2: the app name (Firefox), %3: the operating system (Android) -client.name2 = %1$S’s %2$S on %3$S +client.name2 = %1$S's %2$S on %3$S # %S is the date and time at which the last sync successfully completed lastSync2.label = Last sync: %S -# signInToSync.description is the tooltip for the Sync buttons when Sync is -# not configured. -signInToSync.description = Sign In To Sync +mobile.label = Mobile Bookmarks + +remote.pending.label = Remote tabs are being synced… +remote.missing2.label = Sync your other devices again to access their tabs +remote.opened.label = All remote tabs are already open +remote.notification.label = Recent desktop tabs will be available once they sync error.login.title = Error While Signing In error.login.description = Sync encountered an error while connecting: %1$S. Please try again. @@ -31,16 +34,15 @@ error.sync.tryAgainButton.label = Sync Now error.sync.tryAgainButton.accesskey = S warning.sync.quota.label = Approaching Server Quota warning.sync.quota.description = You are approaching the server quota. Please review which data to sync. +error.sync.quota.label = Server Quota Exceeded +error.sync.quota.description = Sync failed because it exceeded the server quota. Please review which data to sync. error.sync.viewQuotaButton.label = View Quota error.sync.viewQuotaButton.accesskey = V warning.sync.eol.label = Service Shutting Down -# %1: the app name (Basilisk) +# %1: the app name (Firefox) warning.sync.eol.description = Your Sync service is shutting down soon. Upgrade %1$S to keep syncing. error.sync.eol.label = Service Unavailable -# %1: the app name (Basilisk) +# %1: the app name (Firefox) error.sync.eol.description = Your Sync service is no longer available. You need to upgrade %1$S to keep syncing. sync.eol.learnMore.label = Learn more sync.eol.learnMore.accesskey = L - -syncnow.label = Sync Now -syncing2.label = Syncing… diff --git a/services/sync/locales/moz.build b/services/sync/locales/moz.build index aac3a838c..3bbe67297 100644 --- a/services/sync/locales/moz.build +++ b/services/sync/locales/moz.build @@ -1,4 +1,4 @@ -# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*- # vim: set filetype=python: # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this diff --git a/services/sync/modules-testing/fakeservices.js b/services/sync/modules-testing/fakeservices.js index 2895736df..0e265937b 100644 --- a/services/sync/modules-testing/fakeservices.js +++ b/services/sync/modules-testing/fakeservices.js @@ -11,31 +11,17 @@ this.EXPORTED_SYMBOLS = [ "fakeSHA256HMAC", ]; -var {utils: Cu} = Components; +const {utils: Cu} = Components; Cu.import("resource://services-sync/record.js"); Cu.import("resource://services-sync/util.js"); -var btoa = Cu.import("resource://gre/modules/Log.jsm").btoa; +let btoa = Cu.import("resource://gre/modules/Log.jsm").btoa; this.FakeFilesystemService = function FakeFilesystemService(contents) { this.fakeContents = contents; let self = this; - // Save away the unmocked versions of the functions we replace here for tests - // that really want the originals. As this may be called many times per test, - // we must be careful to not replace them with ones we previously replaced. - // (And WTF are we bothering with these mocks in the first place? Is the - // performance of the filesystem *really* such that it outweighs the downside - // of not running our real JSON functions in the tests? Eg, these mocks don't - // always throw exceptions when the real ones do. Anyway...) - for (let name of ["jsonSave", "jsonLoad", "jsonMove", "jsonRemove"]) { - let origName = "_real_" + name; - if (!Utils[origName]) { - Utils[origName] = Utils[name]; - } - } - Utils.jsonSave = function jsonSave(filePath, that, obj, callback) { let json = typeof obj == "function" ? obj.call(that) : obj; self.fakeContents["weave/" + filePath + ".json"] = JSON.stringify(json); @@ -50,18 +36,6 @@ this.FakeFilesystemService = function FakeFilesystemService(contents) { } cb.call(that, obj); }; - - Utils.jsonMove = function jsonMove(aFrom, aTo, that) { - const fromPath = "weave/" + aFrom + ".json"; - self.fakeContents["weave/" + aTo + ".json"] = self.fakeContents[fromPath]; - delete self.fakeContents[fromPath]; - return Promise.resolve(); - }; - - Utils.jsonRemove = function jsonRemove(filePath, that) { - delete self.fakeContents["weave/" + filePath + ".json"]; - return Promise.resolve(); - }; }; this.fakeSHA256HMAC = function fakeSHA256HMAC(message) { @@ -76,9 +50,7 @@ this.FakeGUIDService = function FakeGUIDService() { let latestGUID = 0; Utils.makeGUID = function makeGUID() { - // ensure that this always returns a unique 12 character string - let nextGUID = "fake-guid-" + String(latestGUID++).padStart(2, "0"); - return nextGUID.slice(nextGUID.length-12, nextGUID.length); + return "fake-guid-" + latestGUID++; }; } diff --git a/services/sync/modules-testing/fxa_utils.js b/services/sync/modules-testing/fxa_utils.js index 70aa17b03..4c622660a 100644 --- a/services/sync/modules-testing/fxa_utils.js +++ b/services/sync/modules-testing/fxa_utils.js @@ -4,7 +4,7 @@ this.EXPORTED_SYMBOLS = [ "initializeIdentityWithTokenServerResponse",
];
-var {utils: Cu} = Components;
+const {utils: Cu} = Components;
Cu.import("resource://gre/modules/Log.jsm");
Cu.import("resource://services-sync/main.js");
diff --git a/services/sync/modules-testing/rotaryengine.js b/services/sync/modules-testing/rotaryengine.js index 9d3bf723d..d86c1a75e 100644 --- a/services/sync/modules-testing/rotaryengine.js +++ b/services/sync/modules-testing/rotaryengine.js @@ -11,7 +11,7 @@ this.EXPORTED_SYMBOLS = [ "RotaryTracker", ]; -var {utils: Cu} = Components; +const {utils: Cu} = Components; Cu.import("resource://services-sync/engines.js"); Cu.import("resource://services-sync/record.js"); @@ -32,8 +32,8 @@ RotaryRecord.prototype = { }; Utils.deferGetSet(RotaryRecord, "cleartext", ["denomination"]); -this.RotaryStore = function RotaryStore(name, engine) { - Store.call(this, name, engine); +this.RotaryStore = function RotaryStore(engine) { + Store.call(this, "Rotary", engine); this.items = {}; } RotaryStore.prototype = { @@ -88,8 +88,8 @@ RotaryStore.prototype = { } }; -this.RotaryTracker = function RotaryTracker(name, engine) { - Tracker.call(this, name, engine); +this.RotaryTracker = function RotaryTracker(engine) { + Tracker.call(this, "Rotary", engine); } RotaryTracker.prototype = { __proto__: Tracker.prototype @@ -115,7 +115,7 @@ RotaryEngine.prototype = { return "DUPE_LOCAL"; } - for (let [id, value] of Object.entries(this._store.items)) { + for (let [id, value] in Iterator(this._store.items)) { if (item.denomination == value) { return id; } diff --git a/services/sync/modules-testing/utils.js b/services/sync/modules-testing/utils.js index 261c2bb21..faea8fb04 100644 --- a/services/sync/modules-testing/utils.js +++ b/services/sync/modules-testing/utils.js @@ -7,23 +7,18 @@ this.EXPORTED_SYMBOLS = [ "btoa", // It comes from a module import. "encryptPayload", - "isConfiguredWithLegacyIdentity", "ensureLegacyIdentityManager", "setBasicCredentials", "makeIdentityConfig", - "makeFxAccountsInternalMock", "configureFxAccountIdentity", "configureIdentity", "SyncTestingInfrastructure", "waitForZeroTimer", "Promise", // from a module import "add_identity_test", - "MockFxaStorageManager", - "AccountState", // from a module import - "sumHistogram", ]; -var {utils: Cu} = Components; +const {utils: Cu} = Components; Cu.import("resource://services-sync/status.js"); Cu.import("resource://services-sync/identity.js"); @@ -34,49 +29,8 @@ Cu.import("resource://services-sync/browserid_identity.js"); Cu.import("resource://testing-common/services/common/logging.js"); Cu.import("resource://testing-common/services/sync/fakeservices.js"); Cu.import("resource://gre/modules/FxAccounts.jsm"); -Cu.import("resource://gre/modules/FxAccountsClient.jsm"); Cu.import("resource://gre/modules/FxAccountsCommon.js"); Cu.import("resource://gre/modules/Promise.jsm"); -Cu.import("resource://gre/modules/Services.jsm"); - -// and grab non-exported stuff via a backstage pass. -const {AccountState} = Cu.import("resource://gre/modules/FxAccounts.jsm", {}); - -// A mock "storage manager" for FxAccounts that doesn't actually write anywhere. -function MockFxaStorageManager() { -} - -MockFxaStorageManager.prototype = { - promiseInitialized: Promise.resolve(), - - initialize(accountData) { - this.accountData = accountData; - }, - - finalize() { - return Promise.resolve(); - }, - - getAccountData() { - return Promise.resolve(this.accountData); - }, - - updateAccountData(updatedFields) { - for (let [name, value] of Object.entries(updatedFields)) { - if (value == null) { - delete this.accountData[name]; - } else { - this.accountData[name] = value; - } - } - return Promise.resolve(); - }, - - deleteAccountData() { - this.accountData = null; - return Promise.resolve(); - } -} /** * First wait >100ms (nsITimers can take up to that much time to fire, so @@ -97,18 +51,6 @@ this.waitForZeroTimer = function waitForZeroTimer(callback) { } /** - * Return true if Sync is configured with the "legacy" identity provider. - */ -this.isConfiguredWithLegacyIdentity = function() { - let ns = {}; - Cu.import("resource://services-sync/service.js", ns); - - // We can't use instanceof as BrowserIDManager (the "other" identity) inherits - // from IdentityManager so that would return true - so check the prototype. - return Object.getPrototypeOf(ns.Service.identity) === IdentityManager.prototype; -} - -/** * Ensure Sync is configured with the "legacy" identity provider. */ this.ensureLegacyIdentityManager = function() { @@ -145,15 +87,14 @@ this.makeIdentityConfig = function(overrides) { kA: 'kA', kB: 'kB', sessionToken: 'sessionToken', - uid: "a".repeat(32), + uid: 'user_uid', verified: true, }, token: { - endpoint: null, + endpoint: Svc.Prefs.get("tokenServerURI"), duration: 300, id: "id", key: "key", - hashed_fxa_uid: "f".repeat(32), // used during telemetry validation // uid will be set to the username. } }, @@ -181,47 +122,27 @@ this.makeIdentityConfig = function(overrides) { return result; } -this.makeFxAccountsInternalMock = function(config) { - return { - newAccountState(credentials) { - // We only expect this to be called with null indicating the (mock) - // storage should be read. - if (credentials) { - throw new Error("Not expecting to have credentials passed"); - } - let storageManager = new MockFxaStorageManager(); - storageManager.initialize(config.fxaccount.user); - let accountState = new AccountState(storageManager); - return accountState; - }, - _getAssertion(audience) { - return Promise.resolve("assertion"); - }, - }; -}; - // Configure an instance of an FxAccount identity provider with the specified // config (or the default config if not specified). this.configureFxAccountIdentity = function(authService, - config = makeIdentityConfig(), - fxaInternal = makeFxAccountsInternalMock(config)) { + config = makeIdentityConfig()) { + let MockInternal = {}; + let fxa = new FxAccounts(MockInternal); + // until we get better test infrastructure for bid_identity, we set the // signedin user's "email" to the username, simply as many tests rely on this. config.fxaccount.user.email = config.username; - - let fxa = new FxAccounts(fxaInternal); - - let MockFxAccountsClient = function() { - FxAccountsClient.apply(this); + fxa.internal.currentAccountState.signedInUser = { + version: DATA_FORMAT_VERSION, + accountData: config.fxaccount.user }; - MockFxAccountsClient.prototype = { - __proto__: FxAccountsClient.prototype, - accountStatus() { - return Promise.resolve(true); - } + fxa.internal.currentAccountState.getCertificate = function(data, keyPair, mustBeValidUntil) { + this.cert = { + validUntil: fxa.internal.now() + CERT_LIFETIME, + cert: "certificate", + }; + return Promise.resolve(this.cert.cert); }; - let mockFxAClient = new MockFxAccountsClient(); - fxa.internal._fxAccountsClient = mockFxAClient; let mockTSC = { // TokenServerClient getTokenFromBrowserIDAssertion: function(uri, assertion, cb) { @@ -233,7 +154,7 @@ this.configureFxAccountIdentity = function(authService, authService._tokenServerClient = mockTSC; // Set the "account" of the browserId manager to be the "email" of the // logged in user of the mockFXA service. - authService._signedInUser = config.fxaccount.user; + authService._signedInUser = fxa.internal.currentAccountState.signedInUser.accountData; authService._account = config.fxaccount.user.email; } @@ -320,7 +241,7 @@ this.add_identity_test = function(test, testFunction) { let ns = {}; Cu.import("resource://services-sync/service.js", ns); // one task for the "old" identity manager. - test.add_task(function* () { + test.add_task(function() { note("sync"); let oldIdentity = Status._authManager; ensureLegacyIdentityManager(); @@ -328,7 +249,7 @@ this.add_identity_test = function(test, testFunction) { Status.__authManager = ns.Service.identity = oldIdentity; }); // another task for the FxAccounts identity manager. - test.add_task(function* () { + test.add_task(function() { note("FxAccounts"); let oldIdentity = Status._authManager; Status.__authManager = ns.Service.identity = new BrowserIDManager(); @@ -336,15 +257,3 @@ this.add_identity_test = function(test, testFunction) { Status.__authManager = ns.Service.identity = oldIdentity; }); } - -this.sumHistogram = function(name, options = {}) { - let histogram = options.key ? Services.telemetry.getKeyedHistogramById(name) : - Services.telemetry.getHistogramById(name); - let snapshot = histogram.snapshot(options.key); - let sum = -Infinity; - if (snapshot) { - sum = snapshot.sum; - } - histogram.clear(); - return sum; -} diff --git a/services/sync/modules/FxaMigrator.jsm b/services/sync/modules/FxaMigrator.jsm new file mode 100644 index 000000000..605ee5d7f --- /dev/null +++ b/services/sync/modules/FxaMigrator.jsm @@ -0,0 +1,546 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +"use strict;" + +const {classes: Cc, interfaces: Ci, results: Cr, utils: Cu} = Components; + +Cu.import("resource://gre/modules/XPCOMUtils.jsm"); +Cu.import("resource://gre/modules/Log.jsm"); +Cu.import("resource://gre/modules/Services.jsm"); +Cu.import("resource://gre/modules/Task.jsm"); + +XPCOMUtils.defineLazyModuleGetter(this, "fxAccounts", + "resource://gre/modules/FxAccounts.jsm"); + +XPCOMUtils.defineLazyGetter(this, "WeaveService", function() { + return Cc["@mozilla.org/weave/service;1"] + .getService(Components.interfaces.nsISupports) + .wrappedJSObject; +}); + +XPCOMUtils.defineLazyModuleGetter(this, "Weave", + "resource://services-sync/main.js"); + +// FxAccountsCommon.js doesn't use a "namespace", so create one here. +let fxAccountsCommon = {}; +Cu.import("resource://gre/modules/FxAccountsCommon.js", fxAccountsCommon); + +// We send this notification whenever the "user" migration state changes. +const OBSERVER_STATE_CHANGE_TOPIC = "fxa-migration:state-changed"; +// We also send the state notification when we *receive* this. This allows +// consumers to avoid loading this module until it receives a notification +// from us (which may never happen if there's no migration to do) +const OBSERVER_STATE_REQUEST_TOPIC = "fxa-migration:state-request"; + +// We send this notification whenever the migration is paused waiting for +// something internal to complete. +const OBSERVER_INTERNAL_STATE_CHANGE_TOPIC = "fxa-migration:internal-state-changed"; + +// We use this notification so Sync's healthreport module can record telemetry +// (actually via "health report") for us. +const OBSERVER_INTERNAL_TELEMETRY_TOPIC = "fxa-migration:internal-telemetry"; + +const OBSERVER_TOPICS = [ + "xpcom-shutdown", + "weave:service:sync:start", + "weave:service:sync:finish", + "weave:service:sync:error", + "weave:eol", + OBSERVER_STATE_REQUEST_TOPIC, + fxAccountsCommon.ONLOGIN_NOTIFICATION, + fxAccountsCommon.ONLOGOUT_NOTIFICATION, + fxAccountsCommon.ONVERIFIED_NOTIFICATION, +]; + +// A list of preference names we write to the migration sentinel. We only +// write ones that have a user-set value. +const FXA_SENTINEL_PREFS = [ + "identity.fxaccounts.auth.uri", + "identity.fxaccounts.remote.force_auth.uri", + "identity.fxaccounts.remote.signup.uri", + "identity.fxaccounts.remote.signin.uri", + "identity.fxaccounts.settings.uri", + "services.sync.tokenServerURI", +]; + +function Migrator() { + // Leave the log-level as Debug - Sync will setup log appenders such that + // these messages generally will not be seen unless other log related + // prefs are set. + this.log.level = Log.Level.Debug; + + this._nextUserStatePromise = Promise.resolve(); + + for (let topic of OBSERVER_TOPICS) { + Services.obs.addObserver(this, topic, false); + } + // ._state is an optimization so we avoid sending redundant observer + // notifications when the state hasn't actually changed. + this._state = null; +} + +Migrator.prototype = { + log: Log.repository.getLogger("Sync.SyncMigration"), + + // What user action is necessary to push the migration forward? + // A |null| state means there is nothing to do. Note that a null state implies + // either. (a) no migration is necessary or (b) that the migrator module is + // waiting for something outside of the user's control - eg, sync to complete, + // the migration sentinel to be uploaded, etc. In most cases the wait will be + // short, but edge cases (eg, no network, sync bugs that prevent it stopping + // until shutdown) may require a significantly longer wait. + STATE_USER_FXA: "waiting for user to be signed in to FxA", + STATE_USER_FXA_VERIFIED: "waiting for a verified FxA user", + + // What internal state are we at? This is primarily used for FHR reporting so + // we can determine why exactly we might be stalled. + STATE_INTERNAL_WAITING_SYNC_COMPLETE: "waiting for sync to complete", + STATE_INTERNAL_WAITING_WRITE_SENTINEL: "waiting for sentinel to be written", + STATE_INTERNAL_WAITING_START_OVER: "waiting for sync to reset itself", + STATE_INTERNAL_COMPLETE: "migration complete", + + // Flags for the telemetry we record. The UI will call a helper to record + // the fact some UI was interacted with. + TELEMETRY_ACCEPTED: "accepted", + TELEMETRY_DECLINED: "declined", + TELEMETRY_UNLINKED: "unlinked", + + finalize() { + for (let topic of OBSERVER_TOPICS) { + Services.obs.removeObserver(this, topic); + } + }, + + observe(subject, topic, data) { + this.log.debug("observed " + topic); + switch (topic) { + case "xpcom-shutdown": + this.finalize(); + break; + + case OBSERVER_STATE_REQUEST_TOPIC: + // someone has requested the state - send it. + this._queueCurrentUserState(true); + break; + + default: + // some other observer that may affect our state has fired, so update. + this._queueCurrentUserState().then( + () => this.log.debug("update state from observer " + topic + " complete") + ).catch(err => { + let msg = "Failed to handle topic " + topic + ": " + err; + Cu.reportError(msg); + this.log.error(msg); + }); + } + }, + + // Try and move to a state where we are blocked on a user action. + // This needs to be restartable, and the states may, in edge-cases, end + // up going backwards (eg, user logs out while we are waiting to be told + // about verification) + // This is called by our observer notifications - so if there is already + // a promise in-flight, it's possible we will miss something important - so + // we wait for the in-flight one to complete then fire another (ie, this + // is effectively a queue of promises) + _queueCurrentUserState(forceObserver = false) { + return this._nextUserStatePromise = this._nextUserStatePromise.then( + () => this._promiseCurrentUserState(forceObserver), + err => { + let msg = "Failed to determine the current user state: " + err; + Cu.reportError(msg); + this.log.error(msg); + return this._promiseCurrentUserState(forceObserver) + } + ); + }, + + _promiseCurrentUserState: Task.async(function* (forceObserver) { + this.log.trace("starting _promiseCurrentUserState"); + let update = (newState, email=null) => { + this.log.info("Migration state: '${state}' => '${newState}'", + {state: this._state, newState: newState}); + if (forceObserver || newState !== this._state) { + this._state = newState; + let subject = Cc["@mozilla.org/supports-string;1"] + .createInstance(Ci.nsISupportsString); + subject.data = email || ""; + Services.obs.notifyObservers(subject, OBSERVER_STATE_CHANGE_TOPIC, newState); + } + return newState; + } + + // If we have no sync user, or are already using an FxA account we must + // be done. + if (WeaveService.fxAccountsEnabled) { + // should not be necessary, but if we somehow ended up with FxA enabled + // and sync blocked it would be bad - so better safe than sorry. + this.log.debug("FxA enabled - there's nothing to do!") + this._unblockSync(); + return update(null); + } + + // so we need to migrate - let's see how far along we are. + // If sync isn't in EOL mode, then we are still waiting for the server + // to offer the migration process - so no user action necessary. + let isEOL = false; + try { + isEOL = !!Services.prefs.getCharPref("services.sync.errorhandler.alert.mode"); + } catch (e) {} + + if (!isEOL) { + return update(null); + } + + // So we are in EOL mode - have we a user? + let fxauser = yield fxAccounts.getSignedInUser(); + if (!fxauser) { + // See if there is a migration sentinel so we can send the email + // address that was used on a different device for this account (ie, if + // this is a "join the party" migration rather than the first) + let sentinel = yield this._getSyncMigrationSentinel(); + return update(this.STATE_USER_FXA, sentinel && sentinel.email); + } + if (!fxauser.verified) { + return update(this.STATE_USER_FXA_VERIFIED, fxauser.email); + } + + // So we just have housekeeping to do - we aren't blocked on a user, so + // reflect that. + this.log.info("No next user state - doing some housekeeping"); + update(null); + + // We need to disable sync from automatically starting, + // and if we are currently syncing wait for it to complete. + this._blockSync(); + + // Are we currently syncing? + if (Weave.Service._locked) { + // our observers will kick us further along when complete. + this.log.info("waiting for sync to complete") + Services.obs.notifyObservers(null, OBSERVER_INTERNAL_STATE_CHANGE_TOPIC, + this.STATE_INTERNAL_WAITING_SYNC_COMPLETE); + return null; + } + + // Write the migration sentinel if necessary. + Services.obs.notifyObservers(null, OBSERVER_INTERNAL_STATE_CHANGE_TOPIC, + this.STATE_INTERNAL_WAITING_WRITE_SENTINEL); + yield this._setMigrationSentinelIfNecessary(); + + // Get the list of enabled engines to we can restore that state. + let enginePrefs = this._getEngineEnabledPrefs(); + + // Must be ready to perform the actual migration. + this.log.info("Performing final sync migration steps"); + // Do the actual migration. We setup one observer for when the new identity + // is about to be initialized so we can reset some key preferences - but + // there's no promise associated with this. + let observeStartOverIdentity; + Services.obs.addObserver(observeStartOverIdentity = () => { + this.log.info("observed that startOver is about to re-initialize the identity"); + Services.obs.removeObserver(observeStartOverIdentity, "weave:service:start-over:init-identity"); + // We've now reset all sync prefs - set the engine related prefs back to + // what they were. + for (let [prefName, prefType, prefVal] of enginePrefs) { + this.log.debug("Restoring pref ${prefName} (type=${prefType}) to ${prefVal}", + {prefName, prefType, prefVal}); + switch (prefType) { + case Services.prefs.PREF_BOOL: + Services.prefs.setBoolPref(prefName, prefVal); + break; + case Services.prefs.PREF_STRING: + Services.prefs.setCharPref(prefName, prefVal); + break; + default: + // _getEngineEnabledPrefs doesn't return any other type... + Cu.reportError("unknown engine pref type for " + prefName + ": " + prefType); + } + } + }, "weave:service:start-over:init-identity", false); + + // And another observer for the startOver being fully complete - the only + // reason for this is so we can wait until everything is fully reset. + let startOverComplete = new Promise((resolve, reject) => { + let observe; + Services.obs.addObserver(observe = () => { + this.log.info("observed that startOver is complete"); + Services.obs.removeObserver(observe, "weave:service:start-over:finish"); + resolve(); + }, "weave:service:start-over:finish", false); + }); + + Weave.Service.startOver(); + // need to wait for an observer. + Services.obs.notifyObservers(null, OBSERVER_INTERNAL_STATE_CHANGE_TOPIC, + this.STATE_INTERNAL_WAITING_START_OVER); + yield startOverComplete; + // observer fired, now kick things off with the FxA user. + this.log.info("scheduling initial FxA sync."); + // Note we technically don't need to unblockSync as by now all sync prefs + // have been reset - but it doesn't hurt. + this._unblockSync(); + Weave.Service.scheduler.scheduleNextSync(0); + + // Tell the front end that migration is now complete -- Sync is now + // configured with an FxA user. + forceObserver = true; + this.log.info("Migration complete"); + update(null); + + Services.obs.notifyObservers(null, OBSERVER_INTERNAL_STATE_CHANGE_TOPIC, + this.STATE_INTERNAL_COMPLETE); + return null; + }), + + /* Return an object with the preferences we care about */ + _getSentinelPrefs() { + let result = {}; + for (let pref of FXA_SENTINEL_PREFS) { + if (Services.prefs.prefHasUserValue(pref)) { + result[pref] = Services.prefs.getCharPref(pref); + } + } + return result; + }, + + /* Apply any preferences we've obtained from the sentinel */ + _applySentinelPrefs(savedPrefs) { + for (let pref of FXA_SENTINEL_PREFS) { + if (savedPrefs[pref]) { + Services.prefs.setCharPref(pref, savedPrefs[pref]); + } + } + }, + + /* Ask sync to upload the migration sentinel */ + _setSyncMigrationSentinel: Task.async(function* () { + yield WeaveService.whenLoaded(); + let signedInUser = yield fxAccounts.getSignedInUser(); + let sentinel = { + email: signedInUser.email, + uid: signedInUser.uid, + verified: signedInUser.verified, + prefs: this._getSentinelPrefs(), + }; + yield Weave.Service.setFxAMigrationSentinel(sentinel); + }), + + /* Ask sync to upload the migration sentinal if we (or any other linked device) + haven't previously written one. + */ + _setMigrationSentinelIfNecessary: Task.async(function* () { + if (!(yield this._getSyncMigrationSentinel())) { + this.log.info("writing the migration sentinel"); + yield this._setSyncMigrationSentinel(); + } + }), + + /* Ask sync to return a migration sentinel if one exists, otherwise return null */ + _getSyncMigrationSentinel: Task.async(function* () { + yield WeaveService.whenLoaded(); + let sentinel = yield Weave.Service.getFxAMigrationSentinel(); + this.log.debug("got migration sentinel ${}", sentinel); + return sentinel; + }), + + _getDefaultAccountName: Task.async(function* (sentinel) { + // Requires looking to see if other devices have written a migration + // sentinel (eg, see _haveSynchedMigrationSentinel), and if not, see if + // the legacy account name appears to be a valid email address (via the + // services.sync.account pref), otherwise return null. + // NOTE: Sync does all this synchronously via nested event loops, but we + // expose a promise to make future migration to an async-sync easier. + if (sentinel && sentinel.email) { + this.log.info("defaultAccountName found via sentinel: ${}", sentinel.email); + return sentinel.email; + } + // No previous migrations, so check the existing account name. + let account = Weave.Service.identity.account; + if (account && account.contains("@")) { + this.log.info("defaultAccountName found via legacy account name: {}", account); + return account; + } + this.log.info("defaultAccountName could not find an account"); + return null; + }), + + // Prevent sync from automatically starting + _blockSync() { + Weave.Service.scheduler.blockSync(); + }, + + _unblockSync() { + Weave.Service.scheduler.unblockSync(); + }, + + /* Return a list of [prefName, prefType, prefVal] for all engine related + preferences. + */ + _getEngineEnabledPrefs() { + let result = []; + for (let engine of Weave.Service.engineManager.getAll()) { + let prefName = "services.sync.engine." + engine.prefName; + let prefVal; + try { + prefVal = Services.prefs.getBoolPref(prefName); + result.push([prefName, Services.prefs.PREF_BOOL, prefVal]); + } catch (ex) {} /* just skip this pref */ + } + // and the declined list. + try { + let prefName = "services.sync.declinedEngines"; + let prefVal = Services.prefs.getCharPref(prefName); + result.push([prefName, Services.prefs.PREF_STRING, prefVal]); + } catch (ex) {} + return result; + }, + + /* return true if all engines are enabled, false otherwise. */ + _allEnginesEnabled() { + return Weave.Service.engineManager.getAll().every(e => e.enabled); + }, + + /* + * Some helpers for the UI to try and move to the next state. + */ + + // Open a UI for the user to create a Firefox Account. This should only be + // called while we are in the STATE_USER_FXA state. When the user completes + // the creation we'll see an ONLOGIN_NOTIFICATION notification from FxA and + // we'll move to either the STATE_USER_FXA_VERIFIED state or we'll just + // complete the migration if they login as an already verified user. + createFxAccount: Task.async(function* (win) { + let {url, options} = yield this.getFxAccountCreationOptions(); + win.switchToTabHavingURI(url, true, options); + // An FxA observer will fire when the user completes this, which will + // cause us to move to the next "user blocked" state and notify via our + // observer notification. + }), + + // Returns an object with properties "url" and "options", suitable for + // opening FxAccounts to create/signin to FxA suitable for the migration + // state. The caller of this is responsible for the actual opening of the + // page. + // This should only be called while we are in the STATE_USER_FXA state. When + // the user completes the creation we'll see an ONLOGIN_NOTIFICATION + // notification from FxA and we'll move to either the STATE_USER_FXA_VERIFIED + // state or we'll just complete the migration if they login as an already + // verified user. + getFxAccountCreationOptions: Task.async(function* (win) { + // warn if we aren't in the expected state - but go ahead anyway! + if (this._state != this.STATE_USER_FXA) { + this.log.warn("getFxAccountCreationOptions called in an unexpected state: ${}", this._state); + } + // We need to obtain the sentinel and apply any prefs that might be + // specified *before* attempting to setup FxA as the prefs might + // specify custom servers etc. + let sentinel = yield this._getSyncMigrationSentinel(); + if (sentinel && sentinel.prefs) { + this._applySentinelPrefs(sentinel.prefs); + } + // If we already have a sentinel then we assume the user has previously + // created the specified account, so just ask to sign-in. + let action = sentinel ? "signin" : "signup"; + // See if we can find a default account name to use. + let email = yield this._getDefaultAccountName(sentinel); + let tail = email ? "&email=" + encodeURIComponent(email) : ""; + // A special flag so server-side metrics can tell this is part of migration. + tail += "&migration=sync11"; + // We want to ask FxA to offer a "Customize Sync" checkbox iff any engines + // are disabled. + let customize = !this._allEnginesEnabled(); + tail += "&customizeSync=" + customize; + + // We assume the caller of this is going to actually use it, so record + // telemetry now. + this.recordTelemetry(this.TELEMETRY_ACCEPTED); + return { + url: "about:accounts?action=" + action + tail, + options: {ignoreFragment: true, replaceQueryString: true} + }; + }), + + // Ask the FxA servers to re-send a verification mail for the currently + // logged in user. This should only be called while we are in the + // STATE_USER_FXA_VERIFIED state. When the user clicks on the link in + // the mail we should see an ONVERIFIED_NOTIFICATION which will cause us + // to complete the migration. + resendVerificationMail: Task.async(function * (win) { + // warn if we aren't in the expected state - but go ahead anyway! + if (this._state != this.STATE_USER_FXA_VERIFIED) { + this.log.warn("resendVerificationMail called in an unexpected state: ${}", this._state); + } + let ok = true; + try { + yield fxAccounts.resendVerificationEmail(); + } catch (ex) { + this.log.error("Failed to resend verification mail: ${}", ex); + ok = false; + } + this.recordTelemetry(this.TELEMETRY_ACCEPTED); + let fxauser = yield fxAccounts.getSignedInUser(); + let sb = Services.strings.createBundle("chrome://browser/locale/accounts.properties"); + + let heading = ok ? + sb.formatStringFromName("verificationSentHeading", [fxauser.email], 1) : + sb.GetStringFromName("verificationNotSentHeading"); + let title = sb.GetStringFromName(ok ? "verificationSentTitle" : "verificationNotSentTitle"); + let description = sb.GetStringFromName(ok ? "verificationSentDescription" + : "verificationNotSentDescription"); + + let factory = Cc["@mozilla.org/prompter;1"] + .getService(Ci.nsIPromptFactory); + let prompt = factory.getPrompt(win, Ci.nsIPrompt); + let bag = prompt.QueryInterface(Ci.nsIWritablePropertyBag2); + bag.setPropertyAsBool("allowTabModal", true); + + prompt.alert(title, heading + "\n\n" + description); + }), + + // "forget" about the current Firefox account. This should only be called + // while we are in the STATE_USER_FXA_VERIFIED state. After this we will + // see an ONLOGOUT_NOTIFICATION, which will cause the migrator to return back + // to the STATE_USER_FXA state, from where they can choose a different account. + forgetFxAccount: Task.async(function * () { + // warn if we aren't in the expected state - but go ahead anyway! + if (this._state != this.STATE_USER_FXA_VERIFIED) { + this.log.warn("forgetFxAccount called in an unexpected state: ${}", this._state); + } + return fxAccounts.signOut(); + }), + + recordTelemetry(flag) { + // Note the value is the telemetry field name - but this is an + // implementation detail which could be changed later. + switch (flag) { + case this.TELEMETRY_ACCEPTED: + case this.TELEMETRY_UNLINKED: + case this.TELEMETRY_DECLINED: + Services.obs.notifyObservers(null, OBSERVER_INTERNAL_TELEMETRY_TOPIC, flag); + break; + default: + throw new Error("Unexpected telemetry flag: " + flag); + } + }, + + get learnMoreLink() { + try { + var url = Services.prefs.getCharPref("app.support.baseURL"); + } catch (err) { + return null; + } + url += "sync-upgrade"; + let sb = Services.strings.createBundle("chrome://weave/locale/services/sync.properties"); + return { + text: sb.GetStringFromName("sync.eol.learnMore.label"), + href: Services.urlFormatter.formatURL(url), + }; + }, +}; + +// We expose a singleton +this.EXPORTED_SYMBOLS = ["fxaMigrator"]; +let fxaMigrator = new Migrator(); diff --git a/services/sync/modules/SyncedTabs.jsm b/services/sync/modules/SyncedTabs.jsm deleted file mode 100644 index 1a69e3564..000000000 --- a/services/sync/modules/SyncedTabs.jsm +++ /dev/null @@ -1,301 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -"use strict"; - -this.EXPORTED_SYMBOLS = ["SyncedTabs"]; - - -const { classes: Cc, interfaces: Ci, results: Cr, utils: Cu } = Components; - -Cu.import("resource://gre/modules/Services.jsm"); -Cu.import("resource://gre/modules/XPCOMUtils.jsm"); -Cu.import("resource://gre/modules/Task.jsm"); -Cu.import("resource://gre/modules/Log.jsm"); -Cu.import("resource://gre/modules/PlacesUtils.jsm", this); -Cu.import("resource://services-sync/main.js"); -Cu.import("resource://gre/modules/Preferences.jsm"); - -// The Sync XPCOM service -XPCOMUtils.defineLazyGetter(this, "weaveXPCService", function() { - return Cc["@mozilla.org/weave/service;1"] - .getService(Ci.nsISupports) - .wrappedJSObject; -}); - -// from MDN... -function escapeRegExp(string) { - return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); -} - -// A topic we fire whenever we have new tabs available. This might be due -// to a request made by this module to refresh the tab list, or as the result -// of a regularly scheduled sync. The intent is that consumers just listen -// for this notification and update their UI in response. -const TOPIC_TABS_CHANGED = "services.sync.tabs.changed"; - -// The interval, in seconds, before which we consider the existing list -// of tabs "fresh enough" and don't force a new sync. -const TABS_FRESH_ENOUGH_INTERVAL = 30; - -let log = Log.repository.getLogger("Sync.RemoteTabs"); -// A new scope to do the logging thang... -(function() { - let level = Preferences.get("services.sync.log.logger.tabs"); - if (level) { - let appender = new Log.DumpAppender(); - log.level = appender.level = Log.Level[level] || Log.Level.Debug; - log.addAppender(appender); - } -})(); - - -// A private singleton that does the work. -let SyncedTabsInternal = { - /* Make a "tab" record. Returns a promise */ - _makeTab: Task.async(function* (client, tab, url, showRemoteIcons) { - let icon; - if (showRemoteIcons) { - icon = tab.icon; - } - if (!icon) { - try { - icon = (yield PlacesUtils.promiseFaviconLinkUrl(url)).spec; - } catch (ex) { /* no favicon avaiable */ } - } - if (!icon) { - icon = ""; - } - return { - type: "tab", - title: tab.title || url, - url, - icon, - client: client.id, - lastUsed: tab.lastUsed, - }; - }), - - /* Make a "client" record. Returns a promise for consistency with _makeTab */ - _makeClient: Task.async(function* (client) { - return { - id: client.id, - type: "client", - name: Weave.Service.clientsEngine.getClientName(client.id), - isMobile: Weave.Service.clientsEngine.isMobile(client.id), - lastModified: client.lastModified * 1000, // sec to ms - tabs: [] - }; - }), - - _tabMatchesFilter(tab, filter) { - let reFilter = new RegExp(escapeRegExp(filter), "i"); - return tab.url.match(reFilter) || tab.title.match(reFilter); - }, - - getTabClients: Task.async(function* (filter) { - log.info("Generating tab list with filter", filter); - let result = []; - - // If Sync isn't ready, don't try and get anything. - if (!weaveXPCService.ready) { - log.debug("Sync isn't yet ready, so returning an empty tab list"); - return result; - } - - // A boolean that controls whether we should show the icon from the remote tab. - const showRemoteIcons = Preferences.get("services.sync.syncedTabs.showRemoteIcons", true); - - let engine = Weave.Service.engineManager.get("tabs"); - - let seenURLs = new Set(); - let parentIndex = 0; - let ntabs = 0; - - for (let [guid, client] of Object.entries(engine.getAllClients())) { - if (!Weave.Service.clientsEngine.remoteClientExists(client.id)) { - continue; - } - let clientRepr = yield this._makeClient(client); - log.debug("Processing client", clientRepr); - - for (let tab of client.tabs) { - let url = tab.urlHistory[0]; - log.debug("remote tab", url); - // Note there are some issues with tracking "seen" tabs, including: - // * We really can't return the entire urlHistory record as we are - // only checking the first entry - others might be different. - // * We don't update the |lastUsed| timestamp to reflect the - // most-recently-seen time. - // In a followup we should consider simply dropping this |seenUrls| - // check and return duplicate records - it seems the user will be more - // confused by tabs not showing up on a device (because it was detected - // as a dupe so it only appears on a different device) than being - // confused by seeing the same tab on different clients. - if (!url || seenURLs.has(url)) { - continue; - } - let tabRepr = yield this._makeTab(client, tab, url, showRemoteIcons); - if (filter && !this._tabMatchesFilter(tabRepr, filter)) { - continue; - } - seenURLs.add(url); - clientRepr.tabs.push(tabRepr); - } - // We return all clients, even those without tabs - the consumer should - // filter it if they care. - ntabs += clientRepr.tabs.length; - result.push(clientRepr); - } - log.info(`Final tab list has ${result.length} clients with ${ntabs} tabs.`); - return result; - }), - - syncTabs(force) { - if (!force) { - // Don't bother refetching tabs if we already did so recently - let lastFetch = Preferences.get("services.sync.lastTabFetch", 0); - let now = Math.floor(Date.now() / 1000); - if (now - lastFetch < TABS_FRESH_ENOUGH_INTERVAL) { - log.info("_refetchTabs was done recently, do not doing it again"); - return Promise.resolve(false); - } - } - - // If Sync isn't configured don't try and sync, else we will get reports - // of a login failure. - if (Weave.Status.checkSetup() == Weave.CLIENT_NOT_CONFIGURED) { - log.info("Sync client is not configured, so not attempting a tab sync"); - return Promise.resolve(false); - } - // Ask Sync to just do the tabs engine if it can. - // Sync is currently synchronous, so do it after an event-loop spin to help - // keep the UI responsive. - return new Promise((resolve, reject) => { - Services.tm.currentThread.dispatch(() => { - try { - log.info("Doing a tab sync."); - Weave.Service.sync(["tabs"]); - resolve(true); - } catch (ex) { - log.error("Sync failed", ex); - reject(ex); - }; - }, Ci.nsIThread.DISPATCH_NORMAL); - }); - }, - - observe(subject, topic, data) { - log.trace(`observed topic=${topic}, data=${data}, subject=${subject}`); - switch (topic) { - case "weave:engine:sync:finish": - if (data != "tabs") { - return; - } - // The tabs engine just finished syncing - // Set our lastTabFetch pref here so it tracks both explicit sync calls - // and normally scheduled ones. - Preferences.set("services.sync.lastTabFetch", Math.floor(Date.now() / 1000)); - Services.obs.notifyObservers(null, TOPIC_TABS_CHANGED, null); - break; - case "weave:service:start-over": - // start-over needs to notify so consumers find no tabs. - Preferences.reset("services.sync.lastTabFetch"); - Services.obs.notifyObservers(null, TOPIC_TABS_CHANGED, null); - break; - case "nsPref:changed": - Services.obs.notifyObservers(null, TOPIC_TABS_CHANGED, null); - break; - default: - break; - } - }, - - // Returns true if Sync is configured to Sync tabs, false otherwise - get isConfiguredToSyncTabs() { - if (!weaveXPCService.ready) { - log.debug("Sync isn't yet ready; assuming tab engine is enabled"); - return true; - } - - let engine = Weave.Service.engineManager.get("tabs"); - return engine && engine.enabled; - }, - - get hasSyncedThisSession() { - let engine = Weave.Service.engineManager.get("tabs"); - return engine && engine.hasSyncedThisSession; - }, -}; - -Services.obs.addObserver(SyncedTabsInternal, "weave:engine:sync:finish", false); -Services.obs.addObserver(SyncedTabsInternal, "weave:service:start-over", false); -// Observe the pref the indicates the state of the tabs engine has changed. -// This will force consumers to re-evaluate the state of sync and update -// accordingly. -Services.prefs.addObserver("services.sync.engine.tabs", SyncedTabsInternal, false); - -// The public interface. -this.SyncedTabs = { - // A mock-point for tests. - _internal: SyncedTabsInternal, - - // We make the topic for the observer notification public. - TOPIC_TABS_CHANGED, - - // Returns true if Sync is configured to Sync tabs, false otherwise - get isConfiguredToSyncTabs() { - return this._internal.isConfiguredToSyncTabs; - }, - - // Returns true if a tab sync has completed once this session. If this - // returns false, then getting back no clients/tabs possibly just means we - // are waiting for that first sync to complete. - get hasSyncedThisSession() { - return this._internal.hasSyncedThisSession; - }, - - // Return a promise that resolves with an array of client records, each with - // a .tabs array. Note that part of the contract for this module is that the - // returned objects are not shared between invocations, so callers are free - // to mutate the returned objects (eg, sort, truncate) however they see fit. - getTabClients(query) { - return this._internal.getTabClients(query); - }, - - // Starts a background request to start syncing tabs. Returns a promise that - // resolves when the sync is complete, but there's no resolved value - - // callers should be listening for TOPIC_TABS_CHANGED. - // If |force| is true we always sync. If false, we only sync if the most - // recent sync wasn't "recently". - syncTabs(force) { - return this._internal.syncTabs(force); - }, - - sortTabClientsByLastUsed(clients, maxTabs = Infinity) { - // First sort and filter the list of tabs for each client. Note that - // this module promises that the objects it returns are never - // shared, so we are free to mutate those objects directly. - for (let client of clients) { - let tabs = client.tabs; - tabs.sort((a, b) => b.lastUsed - a.lastUsed); - if (Number.isFinite(maxTabs)) { - client.tabs = tabs.slice(0, maxTabs); - } - } - // Now sort the clients - the clients are sorted in the order of the - // most recent tab for that client (ie, it is important the tabs for - // each client are already sorted.) - clients.sort((a, b) => { - if (a.tabs.length == 0) { - return 1; // b comes first. - } - if (b.tabs.length == 0) { - return -1; // a comes first. - } - return b.tabs[0].lastUsed - a.tabs[0].lastUsed; - }); - }, -}; - diff --git a/services/sync/modules/addonsreconciler.js b/services/sync/modules/addonsreconciler.js index a60fc8d56..2e838e885 100644 --- a/services/sync/modules/addonsreconciler.js +++ b/services/sync/modules/addonsreconciler.js @@ -17,7 +17,7 @@ "use strict"; -var Cu = Components.utils; +const Cu = Components.utils; Cu.import("resource://gre/modules/Log.jsm"); Cu.import("resource://services-sync/util.js"); @@ -218,12 +218,11 @@ AddonsReconciler.prototype = { } this._addons = json.addons; - for (let id in this._addons) { - let record = this._addons[id]; + for each (let record in this._addons) { record.modified = new Date(record.modified); } - for (let [time, change, id] of json.changes) { + for each (let [time, change, id] in json.changes) { this._changes.push([new Date(time), change, id]); } @@ -247,9 +246,9 @@ AddonsReconciler.prototype = { let file = path || DEFAULT_STATE_FILE; let state = {version: 1, addons: {}, changes: []}; - for (let [id, record] of Object.entries(this._addons)) { + for (let [id, record] in Iterator(this._addons)) { state.addons[id] = {}; - for (let [k, v] of Object.entries(record)) { + for (let [k, v] in Iterator(record)) { if (k == "modified") { state.addons[id][k] = v.getTime(); } @@ -259,7 +258,7 @@ AddonsReconciler.prototype = { } } - for (let [time, change, id] of this._changes) { + for each (let [time, change, id] in this._changes) { state.changes.push([time.getTime(), change, id]); } @@ -351,14 +350,14 @@ AddonsReconciler.prototype = { AddonManager.getAllAddons(function (addons) { let ids = {}; - for (let addon of addons) { + for each (let addon in addons) { ids[addon.id] = true; this.rectifyStateFromAddon(addon); } // Look for locally-defined add-ons that no longer exist and update their // record. - for (let [id, addon] of Object.entries(this._addons)) { + for (let [id, addon] in Iterator(this._addons)) { if (id in ids) { continue; } @@ -374,7 +373,7 @@ AddonsReconciler.prototype = { } let installFound = false; - for (let install of installs) { + for each (let install in installs) { if (install.addon && install.addon.id == id && install.state == AddonManager.STATE_INSTALLED) { @@ -417,7 +416,7 @@ AddonsReconciler.prototype = { * Addon instance being updated. */ rectifyStateFromAddon: function rectifyStateFromAddon(addon) { - this._log.debug(`Rectifying state for addon ${addon.name} (version=${addon.version}, id=${addon.id})`); + this._log.debug("Rectifying state for addon: " + addon.id); this._ensureStateLoaded(); let id = addon.id; @@ -434,8 +433,7 @@ AddonsReconciler.prototype = { modified: now, type: addon.type, scope: addon.scope, - foreignInstall: addon.foreignInstall, - isSyncable: addon.isSyncable, + foreignInstall: addon.foreignInstall }; this._addons[id] = record; this._log.debug("Adding change because add-on not present locally: " + @@ -445,7 +443,6 @@ AddonsReconciler.prototype = { } let record = this._addons[id]; - record.isSyncable = addon.isSyncable; if (!record.installed) { // It is possible the record is marked as uninstalled because an @@ -486,11 +483,12 @@ AddonsReconciler.prototype = { this._log.info("Change recorded for " + state.id); this._changes.push([date, change, state.id]); - for (let listener of this._listeners) { + for each (let listener in this._listeners) { try { listener.changeListener.call(listener, date, change, state); } catch (ex) { - this._log.warn("Exception calling change listener", ex); + this._log.warn("Exception calling change listener: " + + Utils.exceptionStr(ex)); } } }, @@ -556,8 +554,7 @@ AddonsReconciler.prototype = { * @return Object on success on null on failure. */ getAddonStateFromSyncGUID: function getAddonStateFromSyncGUID(guid) { - for (let id in this.addons) { - let addon = this.addons[id]; + for each (let addon in this.addons) { if (addon.guid == guid) { return addon; } @@ -636,7 +633,7 @@ AddonsReconciler.prototype = { } } catch (ex) { - this._log.warn("Exception", ex); + this._log.warn("Exception: " + Utils.exceptionStr(ex)); } }, diff --git a/services/sync/modules/addonutils.js b/services/sync/modules/addonutils.js index 95da6be0a..54b441b9e 100644 --- a/services/sync/modules/addonutils.js +++ b/services/sync/modules/addonutils.js @@ -6,7 +6,7 @@ this.EXPORTED_SYMBOLS = ["AddonUtils"]; -var {interfaces: Ci, utils: Cu} = Components; +const {interfaces: Ci, utils: Cu} = Components; Cu.import("resource://gre/modules/XPCOMUtils.jsm"); Cu.import("resource://gre/modules/Log.jsm"); @@ -38,10 +38,21 @@ AddonUtilsInternal.prototype = { * Function to be called with result of operation. */ getInstallFromSearchResult: - function getInstallFromSearchResult(addon, cb) { + function getInstallFromSearchResult(addon, cb, requireSecureURI=true) { this._log.debug("Obtaining install for " + addon.id); + // Verify that the source URI uses TLS. We don't allow installs from + // insecure sources for security reasons. The Addon Manager ensures that + // cert validation, etc is performed. + if (requireSecureURI) { + let scheme = addon.sourceURI.scheme; + if (scheme != "https") { + cb(new Error("Insecure source URI scheme: " + scheme), addon.install); + return; + } + } + // We should theoretically be able to obtain (and use) addon.install if // it is available. However, the addon.sourceURI rewriting won't be // reflected in the AddonInstall, so we can't use it. If we ever get rid @@ -69,6 +80,8 @@ AddonUtilsInternal.prototype = { * syncGUID - Sync GUID to use for the new add-on. * enabled - Boolean indicating whether the add-on should be enabled upon * install. + * requireSecureURI - Boolean indicating whether to require a secure + * URI to install from. This defaults to true. * * When complete it calls a callback with 2 arguments, error and result. * @@ -92,6 +105,10 @@ AddonUtilsInternal.prototype = { function installAddonFromSearchResult(addon, options, cb) { this._log.info("Trying to install add-on from search result: " + addon.id); + if (options.requireSecureURI === undefined) { + options.requireSecureURI = true; + } + this.getInstallFromSearchResult(addon, function onResult(error, install) { if (error) { cb(error, null); @@ -147,10 +164,10 @@ AddonUtilsInternal.prototype = { install.install(); } catch (ex) { - this._log.error("Error installing add-on", ex); + this._log.error("Error installing add-on: " + Utils.exceptionstr(ex)); cb(ex, null); } - }.bind(this)); + }.bind(this), options.requireSecureURI); }, /** @@ -231,7 +248,7 @@ AddonUtilsInternal.prototype = { } let ids = []; - for (let addon of installs) { + for each (let addon in installs) { ids.push(addon.id); } @@ -244,7 +261,6 @@ AddonUtilsInternal.prototype = { installedIDs: [], installs: [], addons: [], - skipped: [], errors: [] }; @@ -282,21 +298,15 @@ AddonUtilsInternal.prototype = { // server-side metrics aren't skewed (bug 708134). The server should // ideally send proper URLs, but this solution was deemed too // complicated at the time the functionality was implemented. - for (let addon of addons) { - // Find the specified options for this addon. - let options; - for (let install of installs) { - if (install.id == addon.id) { - options = install; - break; - } - } - if (!this.canInstallAddon(addon, options)) { - ourResult.skipped.push(addon.id); + for each (let addon in addons) { + // sourceURI presence isn't enforced by AddonRepository. So, we skip + // add-ons without a sourceURI. + if (!addon.sourceURI) { + this._log.info("Skipping install of add-on because missing " + + "sourceURI: " + addon.id); continue; } - // We can go ahead and attempt to install it. toInstall.push(addon); // We should always be able to QI the nsIURI to nsIURL. If not, we @@ -332,9 +342,9 @@ AddonUtilsInternal.prototype = { // Start all the installs asynchronously. They will report back to us // as they finish, eventually triggering the global callback. - for (let addon of toInstall) { + for each (let addon in toInstall) { let options = {}; - for (let install of installs) { + for each (let install in installs) { if (install.id == addon.id) { options = install; break; @@ -353,51 +363,9 @@ AddonUtilsInternal.prototype = { }, /** - * Returns true if we are able to install the specified addon, false - * otherwise. It is expected that this will log the reason if it returns - * false. - * - * @param addon - * (Addon) Add-on instance to check. - * @param options - * (object) The options specified for this addon. See installAddons() - * for the valid elements. - */ - canInstallAddon(addon, options) { - // sourceURI presence isn't enforced by AddonRepository. So, we skip - // add-ons without a sourceURI. - if (!addon.sourceURI) { - this._log.info("Skipping install of add-on because missing " + - "sourceURI: " + addon.id); - return false; - } - // Verify that the source URI uses TLS. We don't allow installs from - // insecure sources for security reasons. The Addon Manager ensures - // that cert validation etc is performed. - // (We should also consider just dropping this entirely and calling - // XPIProvider.isInstallAllowed, but that has additional semantics we might - // need to think through...) - let requireSecureURI = true; - if (options && options.requireSecureURI !== undefined) { - requireSecureURI = options.requireSecureURI; - } - - if (requireSecureURI) { - let scheme = addon.sourceURI.scheme; - if (scheme != "https") { - this._log.info(`Skipping install of add-on "${addon.id}" because sourceURI's scheme of "${scheme}" is not trusted`); - return false; - } - } - this._log.info(`Add-on "${addon.id}" is able to be installed`); - return true; - }, - - - /** * Update the user disabled flag for an add-on. * - * The supplied callback will be called when the operation is + * The supplied callback will ba called when the operation is * complete. If the new flag matches the existing or if the add-on * isn't currently active, the function will fire the callback * immediately. Else, the callback is invoked when the AddonManager diff --git a/services/sync/modules/bookmark_validator.js b/services/sync/modules/bookmark_validator.js deleted file mode 100644 index 2a94ba043..000000000 --- a/services/sync/modules/bookmark_validator.js +++ /dev/null @@ -1,784 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this file, - * You can obtain one at http://mozilla.org/MPL/2.0/. */ - -"use strict"; - -const Cu = Components.utils; - -Cu.import("resource://gre/modules/PlacesUtils.jsm"); -Cu.import("resource://gre/modules/PlacesSyncUtils.jsm"); -Cu.import("resource://gre/modules/Task.jsm"); -Cu.import("resource://gre/modules/XPCOMUtils.jsm"); - - -this.EXPORTED_SYMBOLS = ["BookmarkValidator", "BookmarkProblemData"]; - -const LEFT_PANE_ROOT_ANNO = "PlacesOrganizer/OrganizerFolder"; -const LEFT_PANE_QUERY_ANNO = "PlacesOrganizer/OrganizerQuery"; - -// Indicates if a local bookmark tree node should be excluded from syncing. -function isNodeIgnored(treeNode) { - return treeNode.annos && treeNode.annos.some(anno => anno.name == LEFT_PANE_ROOT_ANNO || - anno.name == LEFT_PANE_QUERY_ANNO); -} -const BOOKMARK_VALIDATOR_VERSION = 1; - -/** - * Result of bookmark validation. Contains the following fields which describe - * server-side problems unless otherwise specified. - * - * - missingIDs (number): # of objects with missing ids - * - duplicates (array of ids): ids seen more than once - * - parentChildMismatches (array of {parent: parentid, child: childid}): - * instances where the child's parentid and the parent's children array - * do not match - * - cycles (array of array of ids). List of cycles found in the server-side tree. - * - clientCycles (array of array of ids). List of cycles found in the client-side tree. - * - orphans (array of {id: string, parent: string}): List of nodes with - * either no parentid, or where the parent could not be found. - * - missingChildren (array of {parent: id, child: id}): - * List of parent/children where the child id couldn't be found - * - deletedChildren (array of { parent: id, child: id }): - * List of parent/children where child id was a deleted item (but still showed up - * in the children array) - * - multipleParents (array of {child: id, parents: array of ids}): - * List of children that were part of multiple parent arrays - * - deletedParents (array of ids) : List of records that aren't deleted but - * had deleted parents - * - childrenOnNonFolder (array of ids): list of non-folders that still have - * children arrays - * - duplicateChildren (array of ids): list of records who have the same - * child listed multiple times in their children array - * - parentNotFolder (array of ids): list of records that have parents that - * aren't folders - * - rootOnServer (boolean): true if the root came from the server - * - badClientRoots (array of ids): Contains any client-side root ids where - * the root is missing or isn't a (direct) child of the places root. - * - * - clientMissing: Array of ids on the server missing from the client - * - serverMissing: Array of ids on the client missing from the server - * - serverDeleted: Array of ids on the client that the server had marked as deleted. - * - serverUnexpected: Array of ids that appear on the server but shouldn't - * because the client attempts to never upload them. - * - differences: Array of {id: string, differences: string array} recording - * the non-structural properties that are differente between the client and server - * - structuralDifferences: As above, but contains the items where the differences were - * structural, that is, they contained childGUIDs or parentid - */ -class BookmarkProblemData { - constructor() { - this.rootOnServer = false; - this.missingIDs = 0; - - this.duplicates = []; - this.parentChildMismatches = []; - this.cycles = []; - this.clientCycles = []; - this.orphans = []; - this.missingChildren = []; - this.deletedChildren = []; - this.multipleParents = []; - this.deletedParents = []; - this.childrenOnNonFolder = []; - this.duplicateChildren = []; - this.parentNotFolder = []; - - this.badClientRoots = []; - this.clientMissing = []; - this.serverMissing = []; - this.serverDeleted = []; - this.serverUnexpected = []; - this.differences = []; - this.structuralDifferences = []; - } - - /** - * Convert ("difference", [{ differences: ["tags", "name"] }, { differences: ["name"] }]) into - * [{ name: "difference:tags", count: 1}, { name: "difference:name", count: 2 }], etc. - */ - _summarizeDifferences(prefix, diffs) { - let diffCounts = new Map(); - for (let { differences } of diffs) { - for (let type of differences) { - let name = prefix + ":" + type; - let count = diffCounts.get(name) || 0; - diffCounts.set(name, count + 1); - } - } - return [...diffCounts].map(([name, count]) => ({ name, count })); - } - - /** - * Produce a list summarizing problems found. Each entry contains {name, count}, - * where name is the field name for the problem, and count is the number of times - * the problem was encountered. - * - * Validation has failed if all counts are not 0. - * - * If the `full` argument is truthy, we also include information about which - * properties we saw structural differences in. Currently, this means either - * "sdiff:parentid" and "sdiff:childGUIDS" may be present. - */ - getSummary(full) { - let result = [ - { name: "clientMissing", count: this.clientMissing.length }, - { name: "serverMissing", count: this.serverMissing.length }, - { name: "serverDeleted", count: this.serverDeleted.length }, - { name: "serverUnexpected", count: this.serverUnexpected.length }, - - { name: "structuralDifferences", count: this.structuralDifferences.length }, - { name: "differences", count: this.differences.length }, - - { name: "missingIDs", count: this.missingIDs }, - { name: "rootOnServer", count: this.rootOnServer ? 1 : 0 }, - - { name: "duplicates", count: this.duplicates.length }, - { name: "parentChildMismatches", count: this.parentChildMismatches.length }, - { name: "cycles", count: this.cycles.length }, - { name: "clientCycles", count: this.clientCycles.length }, - { name: "badClientRoots", count: this.badClientRoots.length }, - { name: "orphans", count: this.orphans.length }, - { name: "missingChildren", count: this.missingChildren.length }, - { name: "deletedChildren", count: this.deletedChildren.length }, - { name: "multipleParents", count: this.multipleParents.length }, - { name: "deletedParents", count: this.deletedParents.length }, - { name: "childrenOnNonFolder", count: this.childrenOnNonFolder.length }, - { name: "duplicateChildren", count: this.duplicateChildren.length }, - { name: "parentNotFolder", count: this.parentNotFolder.length }, - ]; - if (full) { - let structural = this._summarizeDifferences("sdiff", this.structuralDifferences); - result.push.apply(result, structural); - } - return result; - } -} - -// Defined lazily to avoid initializing PlacesUtils.bookmarks too soon. -XPCOMUtils.defineLazyGetter(this, "SYNCED_ROOTS", () => [ - PlacesUtils.bookmarks.menuGuid, - PlacesUtils.bookmarks.toolbarGuid, - PlacesUtils.bookmarks.unfiledGuid, - PlacesUtils.bookmarks.mobileGuid, -]); - -class BookmarkValidator { - - _followQueries(recordMap) { - for (let [guid, entry] of recordMap) { - if (entry.type !== "query" && (!entry.bmkUri || !entry.bmkUri.startsWith("place:"))) { - continue; - } - // Might be worth trying to parse the place: query instead so that this - // works "automatically" with things like aboutsync. - let queryNodeParent = PlacesUtils.getFolderContents(entry, false, true); - if (!queryNodeParent || !queryNodeParent.root.hasChildren) { - continue; - } - queryNodeParent = queryNodeParent.root; - let queryNode = null; - let numSiblings = 0; - let containerWasOpen = queryNodeParent.containerOpen; - queryNodeParent.containerOpen = true; - try { - try { - numSiblings = queryNodeParent.childCount; - } catch (e) { - // This throws when we can't actually get the children. This is the - // case for history containers, tag queries, ... - continue; - } - for (let i = 0; i < numSiblings && !queryNode; ++i) { - let child = queryNodeParent.getChild(i); - if (child && child.bookmarkGuid && child.bookmarkGuid === guid) { - queryNode = child; - } - } - } finally { - queryNodeParent.containerOpen = containerWasOpen; - } - if (!queryNode) { - continue; - } - - let concreteId = PlacesUtils.getConcreteItemGuid(queryNode); - if (!concreteId) { - continue; - } - let concreteItem = recordMap.get(concreteId); - if (!concreteItem) { - continue; - } - entry.concrete = concreteItem; - } - } - - createClientRecordsFromTree(clientTree) { - // Iterate over the treeNode, converting it to something more similar to what - // the server stores. - let records = []; - let recordsByGuid = new Map(); - let syncedRoots = SYNCED_ROOTS; - function traverse(treeNode, synced) { - if (!synced) { - synced = syncedRoots.includes(treeNode.guid); - } else if (isNodeIgnored(treeNode)) { - synced = false; - } - let guid = PlacesSyncUtils.bookmarks.guidToSyncId(treeNode.guid); - let itemType = 'item'; - treeNode.ignored = !synced; - treeNode.id = guid; - switch (treeNode.type) { - case PlacesUtils.TYPE_X_MOZ_PLACE: - let query = null; - if (treeNode.annos && treeNode.uri.startsWith("place:")) { - query = treeNode.annos.find(({name}) => - name === PlacesSyncUtils.bookmarks.SMART_BOOKMARKS_ANNO); - } - if (query && query.value) { - itemType = 'query'; - } else { - itemType = 'bookmark'; - } - break; - case PlacesUtils.TYPE_X_MOZ_PLACE_CONTAINER: - let isLivemark = false; - if (treeNode.annos) { - for (let anno of treeNode.annos) { - if (anno.name === PlacesUtils.LMANNO_FEEDURI) { - isLivemark = true; - treeNode.feedUri = anno.value; - } else if (anno.name === PlacesUtils.LMANNO_SITEURI) { - isLivemark = true; - treeNode.siteUri = anno.value; - } - } - } - itemType = isLivemark ? "livemark" : "folder"; - break; - case PlacesUtils.TYPE_X_MOZ_PLACE_SEPARATOR: - itemType = 'separator'; - break; - } - - if (treeNode.tags) { - treeNode.tags = treeNode.tags.split(","); - } else { - treeNode.tags = []; - } - treeNode.type = itemType; - treeNode.pos = treeNode.index; - treeNode.bmkUri = treeNode.uri; - records.push(treeNode); - // We want to use the "real" guid here. - recordsByGuid.set(treeNode.guid, treeNode); - if (treeNode.type === 'folder') { - treeNode.childGUIDs = []; - if (!treeNode.children) { - treeNode.children = []; - } - for (let child of treeNode.children) { - traverse(child, synced); - child.parent = treeNode; - child.parentid = guid; - treeNode.childGUIDs.push(child.guid); - } - } - } - traverse(clientTree, false); - clientTree.id = 'places'; - this._followQueries(recordsByGuid); - return records; - } - - /** - * Process the server-side list. Mainly this builds the records into a tree, - * but it also records information about problems, and produces arrays of the - * deleted and non-deleted nodes. - * - * Returns an object containing: - * - records:Array of non-deleted records. Each record contains the following - * properties - * - childGUIDs (array of strings, only present if type is 'folder'): the - * list of child GUIDs stored on the server. - * - children (array of records, only present if type is 'folder'): - * each record has these same properties. This may differ in content - * from what you may expect from the childGUIDs list, as it won't - * contain any records that could not be found. - * - parent (record): The parent to this record. - * - Unchanged properties send down from the server: id, title, type, - * parentName, parentid, bmkURI, keyword, tags, pos, queryId, loadInSidebar - * - root: Root of the server-side bookmark tree. Has the same properties as - * above. - * - deletedRecords: As above, but only contains items that the server sent - * where it also sent indication that the item should be deleted. - * - problemData: a BookmarkProblemData object, with the caveat that - * the fields describing client/server relationship will not have been filled - * out yet. - */ - inspectServerRecords(serverRecords) { - let deletedItemIds = new Set(); - let idToRecord = new Map(); - let deletedRecords = []; - - let folders = []; - let problems = []; - - let problemData = new BookmarkProblemData(); - - let resultRecords = []; - - for (let record of serverRecords) { - if (!record.id) { - ++problemData.missingIDs; - continue; - } - if (record.deleted) { - deletedItemIds.add(record.id); - } else { - if (idToRecord.has(record.id)) { - problemData.duplicates.push(record.id); - continue; - } - } - idToRecord.set(record.id, record); - - if (record.children) { - if (record.type !== "folder") { - // Due to implementation details in engines/bookmarks.js, (Livemark - // subclassing BookmarkFolder) Livemarks will have a children array, - // but it should still be empty. - if (!record.children.length) { - continue; - } - // Otherwise we mark it as an error and still try to resolve the children - problemData.childrenOnNonFolder.push(record.id); - } - folders.push(record); - - if (new Set(record.children).size !== record.children.length) { - problemData.duplicateChildren.push(record.id) - } - - // The children array stores special guids as their local guid values, - // e.g. 'menu________' instead of 'menu', but all other parts of the - // serverside bookmark info stores it as the special value ('menu'). - record.childGUIDs = record.children; - record.children = record.children.map(childID => { - return PlacesSyncUtils.bookmarks.guidToSyncId(childID); - }); - } - } - - for (let deletedId of deletedItemIds) { - let record = idToRecord.get(deletedId); - if (record && !record.isDeleted) { - deletedRecords.push(record); - record.isDeleted = true; - } - } - - let root = idToRecord.get('places'); - - if (!root) { - // Fabricate a root. We want to remember that it's fake so that we can - // avoid complaining about stuff like it missing it's childGUIDs later. - root = { id: 'places', children: [], type: 'folder', title: '', fake: true }; - resultRecords.push(root); - idToRecord.set('places', root); - } else { - problemData.rootOnServer = true; - } - - // Build the tree, find orphans, and record most problems having to do with - // the tree structure. - for (let [id, record] of idToRecord) { - if (record === root) { - continue; - } - - if (record.isDeleted) { - continue; - } - - let parentID = record.parentid; - if (!parentID) { - problemData.orphans.push({id: record.id, parent: parentID}); - continue; - } - - let parent = idToRecord.get(parentID); - if (!parent) { - problemData.orphans.push({id: record.id, parent: parentID}); - continue; - } - - if (parent.type !== 'folder') { - problemData.parentNotFolder.push(record.id); - if (!parent.children) { - parent.children = []; - } - if (!parent.childGUIDs) { - parent.childGUIDs = []; - } - } - - if (!record.isDeleted) { - resultRecords.push(record); - } - - record.parent = parent; - if (parent !== root || problemData.rootOnServer) { - let childIndex = parent.children.indexOf(id); - if (childIndex < 0) { - problemData.parentChildMismatches.push({parent: parent.id, child: record.id}); - } else { - parent.children[childIndex] = record; - } - } else { - parent.children.push(record); - } - - if (parent.isDeleted && !record.isDeleted) { - problemData.deletedParents.push(record.id); - } - - // We used to check if the parentName on the server matches the actual - // local parent name, but given this is used only for de-duping a record - // the first time it is seen and expensive to keep up-to-date, we decided - // to just stop recording it. See bug 1276969 for more. - } - - // Check that we aren't missing any children. - for (let folder of folders) { - folder.unfilteredChildren = folder.children; - folder.children = []; - for (let ci = 0; ci < folder.unfilteredChildren.length; ++ci) { - let child = folder.unfilteredChildren[ci]; - let childObject; - if (typeof child == "string") { - // This can happen the parent refers to a child that has a different - // parentid, or if it refers to a missing or deleted child. It shouldn't - // be possible with totally valid bookmarks. - childObject = idToRecord.get(child); - if (!childObject) { - problemData.missingChildren.push({parent: folder.id, child}); - } else { - folder.unfilteredChildren[ci] = childObject; - if (childObject.isDeleted) { - problemData.deletedChildren.push({ parent: folder.id, child }); - } - } - } else { - childObject = child; - } - - if (!childObject) { - continue; - } - - if (childObject.parentid === folder.id) { - folder.children.push(childObject); - continue; - } - - // The child is very probably in multiple `children` arrays -- - // see if we already have a problem record about it. - let currentProblemRecord = problemData.multipleParents.find(pr => - pr.child === child); - - if (currentProblemRecord) { - currentProblemRecord.parents.push(folder.id); - continue; - } - - let otherParent = idToRecord.get(childObject.parentid); - // it's really an ... orphan ... sort of. - if (!otherParent) { - // if we never end up adding to this parent's list, we filter it out after this loop. - problemData.multipleParents.push({ - child, - parents: [folder.id] - }); - if (!problemData.orphans.some(r => r.id === child)) { - problemData.orphans.push({ - id: child, - parent: childObject.parentid - }); - } - continue; - } - - if (otherParent.isDeleted) { - if (!problemData.deletedParents.includes(child)) { - problemData.deletedParents.push(child); - } - continue; - } - - if (otherParent.childGUIDs && !otherParent.childGUIDs.includes(child)) { - if (!problemData.parentChildMismatches.some(r => r.child === child)) { - // Might not be possible to get here. - problemData.parentChildMismatches.push({ child, parent: folder.id }); - } - } - - problemData.multipleParents.push({ - child, - parents: [childObject.parentid, folder.id] - }); - } - } - problemData.multipleParents = problemData.multipleParents.filter(record => - record.parents.length >= 2); - - problemData.cycles = this._detectCycles(resultRecords); - - return { - deletedRecords, - records: resultRecords, - problemData, - root, - }; - } - - // helper for inspectServerRecords - _detectCycles(records) { - // currentPath and pathLookup contain the same data. pathLookup is faster to - // query, but currentPath gives is the order of traversal that we need in - // order to report the members of the cycles. - let pathLookup = new Set(); - let currentPath = []; - let cycles = []; - let seenEver = new Set(); - const traverse = node => { - if (pathLookup.has(node)) { - let cycleStart = currentPath.lastIndexOf(node); - let cyclePath = currentPath.slice(cycleStart).map(n => n.id); - cycles.push(cyclePath); - return; - } else if (seenEver.has(node)) { - // If we're checking the server, this is a problem, but it should already be reported. - // On the client, this could happen due to including `node.concrete` in the child list. - return; - } - seenEver.add(node); - let children = node.children || []; - if (node.concrete) { - children.push(node.concrete); - } - if (children) { - pathLookup.add(node); - currentPath.push(node); - for (let child of children) { - traverse(child); - } - currentPath.pop(); - pathLookup.delete(node); - } - }; - for (let record of records) { - if (!seenEver.has(record)) { - traverse(record); - } - } - - return cycles; - } - - // Perform client-side sanity checking that doesn't involve server data - _validateClient(problemData, clientRecords) { - problemData.clientCycles = this._detectCycles(clientRecords); - for (let rootGUID of SYNCED_ROOTS) { - let record = clientRecords.find(record => - record.guid === rootGUID); - if (!record || record.parentid !== "places") { - problemData.badClientRoots.push(rootGUID); - } - } - } - - /** - * Compare the list of server records with the client tree. - * - * Returns the same data as described in the inspectServerRecords comment, - * with the following additional fields. - * - clientRecords: an array of client records in a similar format to - * the .records (ie, server records) entry. - * - problemData is the same as for inspectServerRecords, except all properties - * will be filled out. - */ - compareServerWithClient(serverRecords, clientTree) { - - let clientRecords = this.createClientRecordsFromTree(clientTree); - let inspectionInfo = this.inspectServerRecords(serverRecords); - inspectionInfo.clientRecords = clientRecords; - - // Mainly do this to remove deleted items and normalize child guids. - serverRecords = inspectionInfo.records; - let problemData = inspectionInfo.problemData; - - this._validateClient(problemData, clientRecords); - - let matches = []; - - let allRecords = new Map(); - let serverDeletedLookup = new Set(inspectionInfo.deletedRecords.map(r => r.id)); - - for (let sr of serverRecords) { - if (sr.fake) { - continue; - } - allRecords.set(sr.id, {client: null, server: sr}); - } - - for (let cr of clientRecords) { - let unified = allRecords.get(cr.id); - if (!unified) { - allRecords.set(cr.id, {client: cr, server: null}); - } else { - unified.client = cr; - } - } - - - for (let [id, {client, server}] of allRecords) { - if (!client && server) { - problemData.clientMissing.push(id); - continue; - } - if (!server && client) { - if (serverDeletedLookup.has(id)) { - problemData.serverDeleted.push(id); - } else if (!client.ignored && client.id != "places") { - problemData.serverMissing.push(id); - } - continue; - } - if (server && client && client.ignored) { - problemData.serverUnexpected.push(id); - } - let differences = []; - let structuralDifferences = []; - - // Don't bother comparing titles of roots. It's okay if locally it's - // "Mobile Bookmarks", but the server thinks it's "mobile". - // TODO: We probably should be handing other localized bookmarks (e.g. - // default bookmarks) here as well, see bug 1316041. - if (!SYNCED_ROOTS.includes(client.guid)) { - // We want to treat undefined, null and an empty string as identical - if ((client.title || "") !== (server.title || "")) { - differences.push("title"); - } - } - - if (client.parentid || server.parentid) { - if (client.parentid !== server.parentid) { - structuralDifferences.push('parentid'); - } - } - - if (client.tags || server.tags) { - let cl = client.tags || []; - let sl = server.tags || []; - if (cl.length !== sl.length || !cl.every((tag, i) => sl.indexOf(tag) >= 0)) { - differences.push('tags'); - } - } - - let sameType = client.type === server.type; - if (!sameType) { - if (server.type === "query" && client.type === "bookmark" && client.bmkUri.startsWith("place:")) { - sameType = true; - } - } - - - if (!sameType) { - differences.push('type'); - } else { - switch (server.type) { - case 'bookmark': - case 'query': - if (server.bmkUri !== client.bmkUri) { - differences.push('bmkUri'); - } - break; - case "livemark": - if (server.feedUri != client.feedUri) { - differences.push("feedUri"); - } - if (server.siteUri != client.siteUri) { - differences.push("siteUri"); - } - break; - case 'folder': - if (server.id === 'places' && !problemData.rootOnServer) { - // It's the fabricated places root. It won't have the GUIDs, but - // it doesn't matter. - break; - } - if (client.childGUIDs || server.childGUIDs) { - let cl = client.childGUIDs || []; - let sl = server.childGUIDs || []; - if (cl.length !== sl.length || !cl.every((id, i) => sl[i] === id)) { - structuralDifferences.push('childGUIDs'); - } - } - break; - } - } - - if (differences.length) { - problemData.differences.push({id, differences}); - } - if (structuralDifferences.length) { - problemData.structuralDifferences.push({ id, differences: structuralDifferences }); - } - } - return inspectionInfo; - } - - _getServerState(engine) { - let collection = engine.itemSource(); - let collectionKey = engine.service.collectionKeys.keyForCollection(engine.name); - collection.full = true; - let items = []; - collection.recordHandler = function(item) { - item.decrypt(collectionKey); - items.push(item.cleartext); - }; - let resp = collection.getBatched(); - if (!resp.success) { - throw resp; - } - return items; - } - - validate(engine) { - let self = this; - return Task.spawn(function*() { - let start = Date.now(); - let clientTree = yield PlacesUtils.promiseBookmarksTree("", { - includeItemIds: true - }); - let serverState = self._getServerState(engine); - let serverRecordCount = serverState.length; - let result = self.compareServerWithClient(serverState, clientTree); - let end = Date.now(); - let duration = end-start; - return { - duration, - version: self.version, - problems: result.problemData, - recordCount: serverRecordCount - }; - }); - } - -}; - -BookmarkValidator.prototype.version = BOOKMARK_VALIDATOR_VERSION; - diff --git a/services/sync/modules/browserid_identity.js b/services/sync/modules/browserid_identity.js index db3821518..bc8ea6b30 100644 --- a/services/sync/modules/browserid_identity.js +++ b/services/sync/modules/browserid_identity.js @@ -4,9 +4,9 @@ "use strict"; -this.EXPORTED_SYMBOLS = ["BrowserIDManager", "AuthenticationError"]; +this.EXPORTED_SYMBOLS = ["BrowserIDManager"]; -var {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components; +const {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components; Cu.import("resource://gre/modules/Log.jsm"); Cu.import("resource://services-common/async.js"); @@ -39,13 +39,12 @@ XPCOMUtils.defineLazyGetter(this, 'log', function() { }); // FxAccountsCommon.js doesn't use a "namespace", so create one here. -var fxAccountsCommon = {}; +let fxAccountsCommon = {}; Cu.import("resource://gre/modules/FxAccountsCommon.js", fxAccountsCommon); const OBSERVER_TOPICS = [ fxAccountsCommon.ONLOGIN_NOTIFICATION, fxAccountsCommon.ONLOGOUT_NOTIFICATION, - fxAccountsCommon.ON_ACCOUNT_STATE_CHANGE_NOTIFICATION, ]; const PREF_SYNC_SHOW_CUSTOMIZATION = "services.sync-setup.ui.showCustomizationDialog"; @@ -66,9 +65,8 @@ function deriveKeyBundle(kB) { some other error object (which should do the right thing when toString() is called on it) */ -function AuthenticationError(details, source) { +function AuthenticationError(details) { this.details = details; - this.source = source; } AuthenticationError.prototype = { @@ -106,6 +104,12 @@ this.BrowserIDManager.prototype = { // we don't consider the lack of a keybundle as a failure state. _shouldHaveSyncKeyBundle: false, + get readyToAuthenticate() { + // We are finished initializing when we *should* have a sync key bundle, + // although we might not actually have one due to auth failures etc. + return this._shouldHaveSyncKeyBundle; + }, + get needsCustomization() { try { return Services.prefs.getBoolPref(PREF_SYNC_SHOW_CUSTOMIZATION); @@ -114,34 +118,11 @@ this.BrowserIDManager.prototype = { } }, - hashedUID() { - if (!this._token) { - throw new Error("hashedUID: Don't have token"); - } - return this._token.hashed_fxa_uid - }, - - deviceID() { - return this._signedInUser && this._signedInUser.deviceId; - }, - initialize: function() { for (let topic of OBSERVER_TOPICS) { Services.obs.addObserver(this, topic, false); } - // and a background fetch of account data just so we can set this.account, - // so we have a username available before we've actually done a login. - // XXX - this is actually a hack just for tests and really shouldn't be - // necessary. Also, you'd think it would be safe to allow this.account to - // be set to null when there's no user logged in, but argue with the test - // suite, not with me :) - this._fxaService.getSignedInUser().then(accountData => { - if (accountData) { - this.account = accountData.email; - } - }).catch(err => { - // As above, this is only for tests so it is safe to ignore. - }); + return this.initializeWithCurrentIdentity(); }, /** @@ -149,7 +130,7 @@ this.BrowserIDManager.prototype = { * the user is logged in, or is rejected if the login attempt has failed. */ ensureLoggedIn: function() { - if (!this._shouldHaveSyncKeyBundle && this.whenReadyToAuthenticate) { + if (!this._shouldHaveSyncKeyBundle) { // We are already in the process of logging in. return this.whenReadyToAuthenticate.promise; } @@ -163,7 +144,7 @@ this.BrowserIDManager.prototype = { // re-entering of credentials by the user is necessary we don't take any // further action - an observer will fire when the user does that. if (Weave.Status.login == LOGIN_FAILED_LOGIN_REJECTED) { - return Promise.reject(new Error("User needs to re-authenticate")); + return Promise.reject(); } // So - we've a previous auth problem and aren't currently attempting to @@ -179,6 +160,7 @@ this.BrowserIDManager.prototype = { } this.resetCredentials(); this._signedInUser = null; + return Promise.resolve(); }, offerSyncOptions: function () { @@ -202,7 +184,7 @@ this.BrowserIDManager.prototype = { // Reset the world before we do anything async. this.whenReadyToAuthenticate = Promise.defer(); - this.whenReadyToAuthenticate.promise.catch(err => { + this.whenReadyToAuthenticate.promise.then(null, (err) => { this._log.error("Could not authenticate", err); }); @@ -258,14 +240,14 @@ this.BrowserIDManager.prototype = { Services.obs.notifyObservers(null, "weave:service:setup-complete", null); Weave.Utils.nextTick(Weave.Service.sync, Weave.Service); } - }).catch(authErr => { - // report what failed... - this._log.error("Background fetch for key bundle failed", authErr); + }).then(null, err => { this._shouldHaveSyncKeyBundle = true; // but we probably don't have one... - this.whenReadyToAuthenticate.reject(authErr); + this.whenReadyToAuthenticate.reject(err); + // report what failed... + this._log.error("Background fetch for key bundle failed", err); }); // and we are done - the fetch continues on in the background... - }).catch(err => { + }).then(null, err => { this._log.error("Processing logged in account", err); }); }, @@ -301,8 +283,7 @@ this.BrowserIDManager.prototype = { // reauth with the server - in that case we will also get here, but // should have the same identity. // initializeWithCurrentIdentity will throw and log if these constraints - // aren't met (indirectly, via _updateSignedInUser()), so just go ahead - // and do the init. + // aren't met, so just go ahead and do the init. this.initializeWithCurrentIdentity(true); break; @@ -311,13 +292,6 @@ this.BrowserIDManager.prototype = { // startOver will cause this instance to be thrown away, so there's // nothing else to do. break; - - case fxAccountsCommon.ON_ACCOUNT_STATE_CHANGE_NOTIFICATION: - // throw away token and fetch a new one - this.resetCredentials(); - this._ensureValidToken().catch(err => - this._log.error("Error while fetching a new token", err)); - break; } }, @@ -413,9 +387,6 @@ this.BrowserIDManager.prototype = { resetCredentials: function() { this.resetSyncKey(); this._token = null; - // The cluster URL comes from the token, so resetting it to empty will - // force Sync to not accidentally use a value from an earlier token. - Weave.Service.clusterURL = null; }, /** @@ -503,12 +474,7 @@ this.BrowserIDManager.prototype = { // If we still can't get keys it probably means the user authenticated // without unlocking the MP or cleared the saved logins, so we've now // lost them - the user will need to reauth before continuing. - let result; - if (this._canFetchKeys()) { - result = STATUS_OK; - } else { - result = LOGIN_FAILED_LOGIN_REJECTED; - } + let result = this._canFetchKeys() ? STATUS_OK : LOGIN_FAILED_LOGIN_REJECTED; log.debug("unlockAndVerifyAuthState re-fetched credentials and is returning", result); return result; } @@ -540,27 +506,14 @@ this.BrowserIDManager.prototype = { return true; }, - // Get our tokenServerURL - a private helper. Returns a string. - get _tokenServerUrl() { - // We used to support services.sync.tokenServerURI but this was a - // pain-point for people using non-default servers as Sync may auto-reset - // all services.sync prefs. So if that still exists, it wins. - let url = Svc.Prefs.get("tokenServerURI"); // Svc.Prefs "root" is services.sync - if (!url) { - url = Services.prefs.getCharPref("identity.sync.tokenserver.uri"); - } - while (url.endsWith("/")) { // trailing slashes cause problems... - url = url.slice(0, -1); - } - return url; - }, - // Refresh the sync token for our user. Returns a promise that resolves // with a token (which may be null in one sad edge-case), or rejects with an // error. _fetchTokenForUser: function() { - // tokenServerURI is mis-named - convention is uri means nsISomething... - let tokenServerURI = this._tokenServerUrl; + let tokenServerURI = Svc.Prefs.get("tokenServerURI"); + if (tokenServerURI.endsWith("/")) { // trailing slashes cause problems... + tokenServerURI = tokenServerURI.slice(0, -1); + } let log = this._log; let client = this._tokenServerClient; let fxa = this._fxaService; @@ -589,7 +542,7 @@ this.BrowserIDManager.prototype = { ); } - let getToken = assertion => { + let getToken = (tokenServerURI, assertion) => { log.debug("Getting a token"); let deferred = Promise.defer(); let cb = function (err, token) { @@ -617,18 +570,7 @@ this.BrowserIDManager.prototype = { return fxa.whenVerified(this._signedInUser) .then(() => maybeFetchKeys()) .then(() => getAssertion()) - .then(assertion => getToken(assertion)) - .catch(err => { - // If we get a 401 fetching the token it may be that our certificate - // needs to be regenerated. - if (!err.response || err.response.status !== 401) { - return Promise.reject(err); - } - log.warn("Token server returned 401, refreshing certificate and retrying token fetch"); - return fxa.invalidateCertificate() - .then(() => getAssertion()) - .then(assertion => getToken(assertion)) - }) + .then(assertion => getToken(tokenServerURI, assertion)) .then(token => { // TODO: Make it be only 80% of the duration, so refresh the token // before it actually expires. This is to avoid sync storage errors @@ -640,18 +582,15 @@ this.BrowserIDManager.prototype = { } return token; }) - .catch(err => { + .then(null, err => { // TODO: unify these errors - we need to handle errors thrown by // both tokenserverclient and hawkclient. // A tokenserver error thrown based on a bad response. if (err.response && err.response.status === 401) { - err = new AuthenticationError(err, "tokenserver"); + err = new AuthenticationError(err); // A hawkclient error. } else if (err.code && err.code === 401) { - err = new AuthenticationError(err, "hawkclient"); - // An FxAccounts.jsm error. - } else if (err.message == fxAccountsCommon.ERROR_AUTH_ERROR) { - err = new AuthenticationError(err, "fxaccounts"); + err = new AuthenticationError(err); } // TODO: write tests to make sure that different auth error cases are handled here @@ -673,6 +612,7 @@ this.BrowserIDManager.prototype = { // that there is no authentication dance still under way. this._shouldHaveSyncKeyBundle = true; Weave.Status.login = this._authFailureReason; + Services.obs.notifyObservers(null, "weave:ui:login:error", null); throw err; }); }, @@ -684,19 +624,12 @@ this.BrowserIDManager.prototype = { this._log.debug("_ensureValidToken already has one"); return Promise.resolve(); } - const notifyStateChanged = - () => Services.obs.notifyObservers(null, "weave:service:login:change", null); // reset this._token as a safety net to reduce the possibility of us // repeatedly attempting to use an invalid token if _fetchTokenForUser throws. this._token = null; return this._fetchTokenForUser().then( token => { this._token = token; - notifyStateChanged(); - }, - error => { - notifyStateChanged(); - throw error } ); }, @@ -719,16 +652,9 @@ this.BrowserIDManager.prototype = { _getAuthenticationHeader: function(httpObject, method) { let cb = Async.makeSpinningCallback(); this._ensureValidToken().then(cb, cb); - // Note that in failure states we return null, causing the request to be - // made without authorization headers, thereby presumably causing a 401, - // which causes Sync to log out. If we throw, this may not happen as - // expected. try { cb.wait(); } catch (ex) { - if (Async.isShutdownException(ex)) { - throw ex; - } this._log.error("Failed to fetch a token for authentication", ex); return null; } @@ -764,17 +690,8 @@ this.BrowserIDManager.prototype = { createClusterManager: function(service) { return new BrowserIDClusterManager(service); - }, + } - // Tell Sync what the login status should be if it saw a 401 fetching - // info/collections as part of login verification (typically immediately - // after login.) - // In our case, it almost certainly means a transient error fetching a token - // (and hitting this will cause us to logout, which will correctly handle an - // authoritative login issue.) - loginStatusFromVerification404() { - return LOGIN_FAILED_NETWORK_ERROR; - }, }; /* An implementation of the ClusterManager for this identity @@ -820,7 +737,7 @@ BrowserIDClusterManager.prototype = { // it's likely a 401 was received using the existing token - in which // case we just discard the existing token and fetch a new one. if (this.service.clusterURL) { - log.debug("_findCluster has a pre-existing clusterURL, so discarding the current token"); + log.debug("_findCluster found existing clusterURL, so discarding the current token"); this.identity._token = null; } return this.identity._ensureValidToken(); diff --git a/services/sync/modules/collection_validator.js b/services/sync/modules/collection_validator.js deleted file mode 100644 index 41141bba3..000000000 --- a/services/sync/modules/collection_validator.js +++ /dev/null @@ -1,204 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this file, - * You can obtain one at http://mozilla.org/MPL/2.0/. */ - -"use strict"; - -const Cu = Components.utils; - -Cu.import("resource://services-sync/record.js"); -Cu.import("resource://services-sync/main.js"); - -this.EXPORTED_SYMBOLS = ["CollectionValidator", "CollectionProblemData"]; - -class CollectionProblemData { - constructor() { - this.missingIDs = 0; - this.duplicates = []; - this.clientMissing = []; - this.serverMissing = []; - this.serverDeleted = []; - this.serverUnexpected = []; - this.differences = []; - } - - /** - * Produce a list summarizing problems found. Each entry contains {name, count}, - * where name is the field name for the problem, and count is the number of times - * the problem was encountered. - * - * Validation has failed if all counts are not 0. - */ - getSummary() { - return [ - { name: "clientMissing", count: this.clientMissing.length }, - { name: "serverMissing", count: this.serverMissing.length }, - { name: "serverDeleted", count: this.serverDeleted.length }, - { name: "serverUnexpected", count: this.serverUnexpected.length }, - { name: "differences", count: this.differences.length }, - { name: "missingIDs", count: this.missingIDs }, - { name: "duplicates", count: this.duplicates.length } - ]; - } -} - -class CollectionValidator { - // Construct a generic collection validator. This is intended to be called by - // subclasses. - // - name: Name of the engine - // - idProp: Property that identifies a record. That is, if a client and server - // record have the same value for the idProp property, they should be - // compared against eachother. - // - props: Array of properties that should be compared - constructor(name, idProp, props) { - this.name = name; - this.props = props; - this.idProp = idProp; - } - - // Should a custom ProblemData type be needed, return it here. - emptyProblemData() { - return new CollectionProblemData(); - } - - getServerItems(engine) { - let collection = engine.itemSource(); - let collectionKey = engine.service.collectionKeys.keyForCollection(engine.name); - collection.full = true; - let items = []; - collection.recordHandler = function(item) { - item.decrypt(collectionKey); - items.push(item.cleartext); - }; - let resp = collection.getBatched(); - if (!resp.success) { - throw resp; - } - return items; - } - - // Should return a promise that resolves to an array of client items. - getClientItems() { - return Promise.reject("Must implement"); - } - - // Turn the client item into something that can be compared with the server item, - // and is also safe to mutate. - normalizeClientItem(item) { - return Cu.cloneInto(item, {}); - } - - // Turn the server item into something that can be easily compared with the client - // items. - normalizeServerItem(item) { - return item; - } - - // Return whether or not a server item should be present on the client. Expected - // to be overridden. - clientUnderstands(item) { - return true; - } - - // Return whether or not a client item should be present on the server. Expected - // to be overridden - syncedByClient(item) { - return true; - } - - // Compare the server item and the client item, and return a list of property - // names that are different. Can be overridden if needed. - getDifferences(client, server) { - let differences = []; - for (let prop of this.props) { - let clientProp = client[prop]; - let serverProp = server[prop]; - if ((clientProp || "") !== (serverProp || "")) { - differences.push(prop); - } - } - return differences; - } - - // Returns an object containing - // problemData: an instance of the class returned by emptyProblemData(), - // clientRecords: Normalized client records - // records: Normalized server records, - // deletedRecords: Array of ids that were marked as deleted by the server. - compareClientWithServer(clientItems, serverItems) { - clientItems = clientItems.map(item => this.normalizeClientItem(item)); - serverItems = serverItems.map(item => this.normalizeServerItem(item)); - let problems = this.emptyProblemData(); - let seenServer = new Map(); - let serverDeleted = new Set(); - let allRecords = new Map(); - - for (let record of serverItems) { - let id = record[this.idProp]; - if (!id) { - ++problems.missingIDs; - continue; - } - if (record.deleted) { - serverDeleted.add(record); - } else { - let possibleDupe = seenServer.get(id); - if (possibleDupe) { - problems.duplicates.push(id); - } else { - seenServer.set(id, record); - allRecords.set(id, { server: record, client: null, }); - } - record.understood = this.clientUnderstands(record); - } - } - - let recordPairs = []; - let seenClient = new Map(); - for (let record of clientItems) { - let id = record[this.idProp]; - record.shouldSync = this.syncedByClient(record); - seenClient.set(id, record); - let combined = allRecords.get(id); - if (combined) { - combined.client = record; - } else { - allRecords.set(id, { client: record, server: null }); - } - } - - for (let [id, { server, client }] of allRecords) { - if (!client && !server) { - throw new Error("Impossible: no client or server record for " + id); - } else if (server && !client) { - if (server.understood) { - problems.clientMissing.push(id); - } - } else if (client && !server) { - if (client.shouldSync) { - problems.serverMissing.push(id); - } - } else { - if (!client.shouldSync) { - if (!problems.serverUnexpected.includes(id)) { - problems.serverUnexpected.push(id); - } - continue; - } - let differences = this.getDifferences(client, server); - if (differences && differences.length) { - problems.differences.push({ id, differences }); - } - } - } - return { - problemData: problems, - clientRecords: clientItems, - records: serverItems, - deletedRecords: [...serverDeleted] - }; - } -} - -// Default to 0, some engines may override. -CollectionValidator.prototype.version = 0; diff --git a/services/sync/modules/constants.js b/services/sync/modules/constants.js index f70bbd61c..c8d66d921 100644 --- a/services/sync/modules/constants.js +++ b/services/sync/modules/constants.js @@ -4,8 +4,7 @@ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ // Process each item in the "constants hash" to add to "global" and give a name -this.EXPORTED_SYMBOLS = []; -for (let [key, val] of Object.entries({ +this.EXPORTED_SYMBOLS = [((this[key] = val), key) for ([key, val] in Iterator({ WEAVE_VERSION: "@weave_version@", @@ -45,7 +44,7 @@ MAX_IGNORE_ERROR_COUNT: 5, // Backoff intervals MINIMUM_BACKOFF_INTERVAL: 15 * 60 * 1000, // 15 minutes -MAXIMUM_BACKOFF_INTERVAL: 8 * 60 * 60 * 1000, // 8 hours +MAXIMUM_BACKOFF_INTERVAL: 8 * 60 * 60 * 1000, // 8 hours // HMAC event handling timeout. // 10 minutes: a compromise between the multi-desktop sync interval @@ -76,10 +75,6 @@ PASSWORDS_STORE_BATCH_SIZE: 50, // same as MOBILE_BATCH_SIZE ADDONS_STORE_BATCH_SIZE: 1000000, // process all addons at once APPS_STORE_BATCH_SIZE: 50, // same as MOBILE_BATCH_SIZE -// Default batch size for download batching -// (how many records are fetched at a time from the server when batching is used). -DEFAULT_DOWNLOAD_BATCH_SIZE: 1000, - // score thresholds for early syncs SINGLE_USER_THRESHOLD: 1000, MULTI_DEVICE_THRESHOLD: 300, @@ -98,16 +93,13 @@ SCORE_UPDATE_DELAY: 100, // observed spurious idle/back events and short enough to pre-empt user activity. IDLE_OBSERVER_BACK_DELAY: 100, -// Max number of records or bytes to upload in a single POST - we'll do multiple POSTS if either -// MAX_UPLOAD_RECORDS or MAX_UPLOAD_BYTES is hit) +// Number of records to upload in a single POST (multiple POSTS if exceeded) +// FIXME: Record size limit is 256k (new cluster), so this can be quite large! +// (Bug 569295) MAX_UPLOAD_RECORDS: 100, -MAX_UPLOAD_BYTES: 1024 * 1023, // just under 1MB MAX_HISTORY_UPLOAD: 5000, MAX_HISTORY_DOWNLOAD: 5000, -// TTL of the message sent to another device when sending a tab -NOTIFY_TAB_SENT_TTL_SECS: 1 * 3600, // 1 hour - // Top-level statuses: STATUS_OK: "success.status_ok", SYNC_FAILED: "error.sync.failed", @@ -130,6 +122,7 @@ LOGIN_FAILED_NETWORK_ERROR: "error.login.reason.network", LOGIN_FAILED_SERVER_ERROR: "error.login.reason.server", LOGIN_FAILED_INVALID_PASSPHRASE: "error.login.reason.recoverykey", LOGIN_FAILED_LOGIN_REJECTED: "error.login.reason.account", +LOGIN_FAILED_NOT_READY: "error.login.reason.initializing", // sync failure status codes METARECORD_DOWNLOAD_FAIL: "error.sync.reason.metarecord_download_fail", @@ -152,8 +145,6 @@ ENGINE_UNKNOWN_FAIL: "error.engine.reason.unknown_fail", ENGINE_APPLY_FAIL: "error.engine.reason.apply_fail", ENGINE_METARECORD_DOWNLOAD_FAIL: "error.engine.reason.metarecord_download_fail", ENGINE_METARECORD_UPLOAD_FAIL: "error.engine.reason.metarecord_upload_fail", -// an upload failure where the batch was interrupted with a 412 -ENGINE_BATCH_INTERRUPTED: "error.engine.reason.batch_interrupted", JPAKE_ERROR_CHANNEL: "jpake.error.channel", JPAKE_ERROR_NETWORK: "jpake.error.network", @@ -181,7 +172,7 @@ kSyncBackoffNotMet: "Trying to sync before the server said it kFirstSyncChoiceNotMade: "User has not selected an action for first sync", // Application IDs -FIREFOX_ID: "{ec8030f7-c20a-464f-9b0e-13a3a9e97384}", +FIREFOX_ID: "{8de7fcbb-c55c-4fbe-bfc5-fc555c87dbc4}", FENNEC_ID: "{a23983c0-fd0e-11dc-95ff-0800200c9a66}", SEAMONKEY_ID: "{92650c4d-4b8e-4d2a-b7eb-24ecf4f6b63a}", TEST_HARNESS_ID: "xuth@mozilla.org", @@ -189,10 +180,6 @@ TEST_HARNESS_ID: "xuth@mozilla.org", MIN_PP_LENGTH: 12, MIN_PASS_LENGTH: 8, -DEVICE_TYPE_DESKTOP: "desktop", -DEVICE_TYPE_MOBILE: "mobile", +LOG_DATE_FORMAT: "%Y-%m-%d %H:%M:%S", -})) { - this[key] = val; - this.EXPORTED_SYMBOLS.push(key); -} +}))]; diff --git a/services/sync/modules/engines.js b/services/sync/modules/engines.js index 1eaa1863a..eabddae2b 100644 --- a/services/sync/modules/engines.js +++ b/services/sync/modules/engines.js @@ -7,24 +7,21 @@ this.EXPORTED_SYMBOLS = [ "Engine", "SyncEngine", "Tracker", - "Store", - "Changeset" + "Store" ]; -var {classes: Cc, interfaces: Ci, results: Cr, utils: Cu} = Components; +const {classes: Cc, interfaces: Ci, results: Cr, utils: Cu} = Components; Cu.import("resource://services-common/async.js"); Cu.import("resource://gre/modules/Log.jsm"); Cu.import("resource://services-common/observers.js"); +Cu.import("resource://services-common/utils.js"); Cu.import("resource://services-sync/constants.js"); Cu.import("resource://services-sync/identity.js"); Cu.import("resource://services-sync/record.js"); Cu.import("resource://services-sync/resource.js"); Cu.import("resource://services-sync/util.js"); -XPCOMUtils.defineLazyModuleGetter(this, "fxAccounts", - "resource://gre/modules/FxAccounts.jsm"); - /* * Trackers are associated with a single engine and deal with * listening for changes to their particular data type. @@ -107,7 +104,7 @@ Tracker.prototype = { Utils.jsonLoad("changes/" + this.file, this, function(json) { if (json && (typeof(json) == "object")) { this.changedIDs = json; - } else if (json !== null) { + } else { this._log.warn("Changed IDs file " + this.file + " contains non-object value."); json = null; } @@ -132,30 +129,26 @@ Tracker.prototype = { this._ignored.splice(index, 1); }, - _saveChangedID(id, when) { - this._log.trace(`Adding changed ID: ${id}, ${JSON.stringify(when)}`); - this.changedIDs[id] = when; - this.saveChangedIDs(this.onSavedChangedIDs); - }, - addChangedID: function (id, when) { if (!id) { this._log.warn("Attempted to add undefined ID to tracker"); return false; } - if (this.ignoreAll || this._ignored.includes(id)) { + if (this.ignoreAll || (id in this._ignored)) { return false; } // Default to the current time in seconds if no time is provided. if (when == null) { - when = this._now(); + when = Math.floor(Date.now() / 1000); } // Add/update the entry if we have a newer time. if ((this.changedIDs[id] || -Infinity) < when) { - this._saveChangedID(id, when); + this._log.trace("Adding changed ID: " + id + ", " + when); + this.changedIDs[id] = when; + this.saveChangedIDs(this.onSavedChangedIDs); } return true; @@ -166,9 +159,8 @@ Tracker.prototype = { this._log.warn("Attempted to remove undefined ID to tracker"); return false; } - if (this.ignoreAll || this._ignored.includes(id)) { + if (this.ignoreAll || (id in this._ignored)) return false; - } if (this.changedIDs[id] != null) { this._log.trace("Removing changed ID " + id); delete this.changedIDs[id]; @@ -183,10 +175,6 @@ Tracker.prototype = { this.saveChangedIDs(); }, - _now() { - return Date.now() / 1000; - }, - _isTracking: false, // Override these in your subclasses. @@ -311,21 +299,17 @@ Store.prototype = { */ applyIncomingBatch: function (records) { let failed = []; - for (let record of records) { + for each (let record in records) { try { this.applyIncoming(record); + } catch (ex if (ex.code == Engine.prototype.eEngineAbortApplyIncoming)) { + // This kind of exception should have a 'cause' attribute, which is an + // originating exception. + // ex.cause will carry its stack with it when rethrown. + throw ex.cause; } catch (ex) { - if (ex.code == Engine.prototype.eEngineAbortApplyIncoming) { - // This kind of exception should have a 'cause' attribute, which is an - // originating exception. - // ex.cause will carry its stack with it when rethrown. - throw ex.cause; - } - if (Async.isShutdownException(ex)) { - throw ex; - } - this._log.warn("Failed to apply incoming record " + record.id, ex); - this.engine._noteApplyFailure(); + this._log.warn("Failed to apply incoming record " + record.id); + this._log.warn("Encountered exception: " + Utils.exceptionStr(ex)); failed.push(record.id); } }; @@ -499,11 +483,7 @@ EngineManager.prototype = { }, getAll: function () { - let engines = []; - for (let [, engine] of Object.entries(this._engines)) { - engines.push(engine); - } - return engines; + return [engine for ([name, engine] in Iterator(this._engines))]; }, /** @@ -516,7 +496,7 @@ EngineManager.prototype = { }, get enabledEngineNames() { - return this.getEnabled().map(e => e.name); + return [e.name for each (e in this.getEnabled())]; }, persistDeclined: function () { @@ -593,11 +573,16 @@ EngineManager.prototype = { this._engines[name] = engine; } } catch (ex) { + this._log.error(CommonUtils.exceptionStr(ex)); + + let mesg = ex.message ? ex.message : ex; let name = engineObject || ""; name = name.prototype || ""; name = name.name || ""; - this._log.error(`Could not initialize engine ${name}`, ex); + let out = "Could not initialize engine '" + name + "': " + mesg; + this._log.error(out); + return engineObject; } }, @@ -643,17 +628,16 @@ Engine.prototype = { // Signal to the engine that processing further records is pointless. eEngineAbortApplyIncoming: "error.engine.abort.applyincoming", - // Should we keep syncing if we find a record that cannot be uploaded (ever)? - // If this is false, we'll throw, otherwise, we'll ignore the record and - // continue. This currently can only happen due to the record being larger - // than the record upload limit. - allowSkippedRecord: true, - get prefName() { return this.name; }, get enabled() { + // XXX: Disable non-functional add-ons syncing for the time being + // This check can go away when add-on syncing is addressed + if (this.prefName == "addons") + return false; + return Svc.Prefs.get("engine." + this.prefName, false); }, @@ -711,15 +695,6 @@ Engine.prototype = { wipeClient: function () { this._notify("wipe-client", this.name, this._wipeClient)(); - }, - - /** - * If one exists, initialize and return a validator for this engine (which - * must have a `validate(engine)` method that returns a promise to an object - * with a getSummary method). Otherwise return null. - */ - getValidator: function () { - return null; } }; @@ -813,11 +788,7 @@ SyncEngine.prototype = { return this._toFetch; }, set toFetch(val) { - let cb = (error) => { - if (error) { - this._log.error("Failed to read JSON records to fetch", error); - } - } + let cb = (error) => this._log.error(Utils.exceptionStr(error)); // Coerce the array to a string for more efficient comparison. if (val + "" == this._toFetch) { return; @@ -842,13 +813,7 @@ SyncEngine.prototype = { return this._previousFailed; }, set previousFailed(val) { - let cb = (error) => { - if (error) { - this._log.error("Failed to set previousFailed", error); - } else { - this._log.debug("Successfully wrote previousFailed."); - } - } + let cb = (error) => this._log.error(Utils.exceptionStr(error)); // Coerce the array to a string for more efficient comparison. if (val + "" == this._previousFailed) { return; @@ -881,8 +846,9 @@ SyncEngine.prototype = { }, /* - * Returns a changeset for this sync. Engine implementations can override this - * method to bypass the tracker for certain or all changed items. + * Returns a mapping of IDs -> changed timestamp. Engine implementations + * can override this method to bypass the tracker for certain or all + * changed items. */ getChangedIDs: function () { return this._tracker.changedIDs; @@ -950,16 +916,20 @@ SyncEngine.prototype = { // this._modified to the tracker. this.lastSyncLocal = Date.now(); if (this.lastSync) { - this._modified = this.pullNewChanges(); + this._modified = this.getChangedIDs(); } else { + // Mark all items to be uploaded, but treat them as changed from long ago this._log.debug("First sync, uploading all items"); - this._modified = this.pullAllChanges(); + this._modified = {}; + for (let id in this._store.getAllIDs()) { + this._modified[id] = 0; + } } // Clear the tracker now. If the sync fails we'll add the ones we failed // to upload back. this._tracker.clearChangedIDs(); - this._log.info(this._modified.count() + + this._log.info(Object.keys(this._modified).length + " outgoing items pre-reconciliation"); // Keep track of what to delete at the end of sync @@ -970,7 +940,7 @@ SyncEngine.prototype = { * A tiny abstraction to make it easier to test incoming record * application. */ - itemSource: function () { + _itemSource: function () { return new Collection(this.engineURL, this._recordObj, this.service); }, @@ -987,7 +957,7 @@ SyncEngine.prototype = { let isMobile = (Svc.Prefs.get("client.type") == "mobile"); if (!newitems) { - newitems = this.itemSource(); + newitems = this._itemSource(); } if (this._defaultSort) { @@ -1024,12 +994,10 @@ SyncEngine.prototype = { try { failed = failed.concat(this._store.applyIncomingBatch(applyBatch)); } catch (ex) { - if (Async.isShutdownException(ex)) { - throw ex; - } // Catch any error that escapes from applyIncomingBatch. At present // those will all be abort events. - this._log.warn("Got exception, aborting processIncoming", ex); + this._log.warn("Got exception " + Utils.exceptionStr(ex) + + ", aborting processIncoming."); aborting = ex; } this._tracker.ignoreAll = false; @@ -1074,10 +1042,7 @@ SyncEngine.prototype = { try { try { item.decrypt(key); - } catch (ex) { - if (!Utils.isHMACMismatch(ex)) { - throw ex; - } + } catch (ex if Utils.isHMACMismatch(ex)) { let strategy = self.handleHMACMismatch(item, true); if (strategy == SyncEngine.kRecoveryStrategy.retry) { // You only get one retry. @@ -1087,10 +1052,7 @@ SyncEngine.prototype = { key = self.service.collectionKeys.keyForCollection(self.name); item.decrypt(key); strategy = null; - } catch (ex) { - if (!Utils.isHMACMismatch(ex)) { - throw ex; - } + } catch (ex if Utils.isHMACMismatch(ex)) { strategy = self.handleHMACMismatch(item, false); } } @@ -1103,8 +1065,7 @@ SyncEngine.prototype = { self._log.debug("Ignoring second retry suggestion."); // Fall through to error case. case SyncEngine.kRecoveryStrategy.error: - self._log.warn("Error decrypting record", ex); - self._noteApplyFailure(); + self._log.warn("Error decrypting record: " + Utils.exceptionStr(ex)); failed.push(item.id); return; case SyncEngine.kRecoveryStrategy.ignore: @@ -1114,11 +1075,7 @@ SyncEngine.prototype = { } } } catch (ex) { - if (Async.isShutdownException(ex)) { - throw ex; - } - self._log.warn("Error decrypting record", ex); - self._noteApplyFailure(); + self._log.warn("Error decrypting record: " + Utils.exceptionStr(ex)); failed.push(item.id); return; } @@ -1126,20 +1083,15 @@ SyncEngine.prototype = { let shouldApply; try { shouldApply = self._reconcile(item); + } catch (ex if (ex.code == Engine.prototype.eEngineAbortApplyIncoming)) { + self._log.warn("Reconciliation failed: aborting incoming processing."); + failed.push(item.id); + aborting = ex.cause; } catch (ex) { - if (ex.code == Engine.prototype.eEngineAbortApplyIncoming) { - self._log.warn("Reconciliation failed: aborting incoming processing."); - self._noteApplyFailure(); - failed.push(item.id); - aborting = ex.cause; - } else if (!Async.isShutdownException(ex)) { - self._log.warn("Failed to reconcile incoming record " + item.id, ex); - self._noteApplyFailure(); - failed.push(item.id); - return; - } else { - throw ex; - } + self._log.warn("Failed to reconcile incoming record " + item.id); + self._log.warn("Encountered exception: " + Utils.exceptionStr(ex)); + failed.push(item.id); + return; } if (shouldApply) { @@ -1158,7 +1110,7 @@ SyncEngine.prototype = { // Only bother getting data from the server if there's new things if (this.lastModified == null || this.lastModified > this.lastSync) { - let resp = newitems.getBatched(); + let resp = newitems.get(); doApplyBatchAndPersistFailed.call(this); if (!resp.success) { resp.failureCode = ENGINE_DOWNLOAD_FAIL; @@ -1243,13 +1195,7 @@ SyncEngine.prototype = { // Apply remaining items. doApplyBatchAndPersistFailed.call(this); - count.newFailed = this.previousFailed.reduce((count, engine) => { - if (failedInPreviousSync.indexOf(engine) == -1) { - count++; - this._noteApplyNewFailure(); - } - return count; - }, 0); + count.newFailed = Utils.arraySub(this.previousFailed, failedInPreviousSync).length; count.succeeded = Math.max(0, count.applied - count.failed); this._log.info(["Records:", count.applied, "applied,", @@ -1260,14 +1206,6 @@ SyncEngine.prototype = { Observers.notify("weave:engine:sync:applied", count, this.name); }, - _noteApplyFailure: function () { - // here would be a good place to record telemetry... - }, - - _noteApplyNewFailure: function () { - // here would be a good place to record telemetry... - }, - /** * Find a GUID of an item that is a duplicate of the incoming item but happens * to have a different GUID @@ -1278,16 +1216,6 @@ SyncEngine.prototype = { // By default, assume there's no dupe items for the engine }, - // Called when the server has a record marked as deleted, but locally we've - // changed it more recently than the deletion. If we return false, the - // record will be deleted locally. If we return true, we'll reupload the - // record to the server -- any extra work that's needed as part of this - // process should be done at this point (such as mark the record's parent - // for reuploading in the case of bookmarks). - _shouldReviveRemotelyDeletedRecord(remoteItem) { - return true; - }, - _deleteId: function (id) { this._tracker.removeChangedID(id); @@ -1298,18 +1226,6 @@ SyncEngine.prototype = { this._delete.ids.push(id); }, - _switchItemToDupe(localDupeGUID, incomingItem) { - // The local, duplicate ID is always deleted on the server. - this._deleteId(localDupeGUID); - - // We unconditionally change the item's ID in case the engine knows of - // an item but doesn't expose it through itemExists. If the API - // contract were stronger, this could be changed. - this._log.debug("Switching local ID to incoming: " + localDupeGUID + " -> " + - incomingItem.id); - this._store.changeItemID(localDupeGUID, incomingItem.id); - }, - /** * Reconcile incoming record with local state. * @@ -1329,12 +1245,12 @@ SyncEngine.prototype = { // because some state may change during the course of this function and we // need to operate on the original values. let existsLocally = this._store.itemExists(item.id); - let locallyModified = this._modified.has(item.id); + let locallyModified = item.id in this._modified; // TODO Handle clock drift better. Tracked in bug 721181. let remoteAge = AsyncResource.serverTime - item.modified; let localAge = locallyModified ? - (Date.now() / 1000 - this._modified.getModifiedTimestamp(item.id)) : null; + (Date.now() / 1000 - this._modified[item.id]) : null; let remoteIsNewer = remoteAge < localAge; this._log.trace("Reconciling " + item.id + ". exists=" + @@ -1363,18 +1279,15 @@ SyncEngine.prototype = { "exists and isn't modified."); return true; } - this._log.trace("Incoming record is deleted but we had local changes."); - if (remoteIsNewer) { - this._log.trace("Remote record is newer -- deleting local record."); - return true; - } - // If the local record is newer, we defer to individual engines for - // how to handle this. By default, we revive the record. - let willRevive = this._shouldReviveRemotelyDeletedRecord(item); - this._log.trace("Local record is newer -- reviving? " + willRevive); - - return !willRevive; + // TODO As part of bug 720592, determine whether we should do more here. + // In the case where the local changes are newer, it is quite possible + // that the local client will restore data a remote client had tried to + // delete. There might be a good reason for that delete and it might be + // enexpected for this client to restore that data. + this._log.trace("Incoming record is deleted but we had local changes. " + + "Applying the youngest record."); + return remoteIsNewer; } // At this point the incoming record is not for a deletion and must have @@ -1386,32 +1299,40 @@ SyncEngine.prototype = { // refresh the metadata collected above. See bug 710448 for the history // of this logic. if (!existsLocally) { - let localDupeGUID = this._findDupe(item); - if (localDupeGUID) { - this._log.trace("Local item " + localDupeGUID + " is a duplicate for " + + let dupeID = this._findDupe(item); + if (dupeID) { + this._log.trace("Local item " + dupeID + " is a duplicate for " + "incoming item " + item.id); + // The local, duplicate ID is always deleted on the server. + this._deleteId(dupeID); + // The current API contract does not mandate that the ID returned by // _findDupe() actually exists. Therefore, we have to perform this // check. - existsLocally = this._store.itemExists(localDupeGUID); + existsLocally = this._store.itemExists(dupeID); + + // We unconditionally change the item's ID in case the engine knows of + // an item but doesn't expose it through itemExists. If the API + // contract were stronger, this could be changed. + this._log.debug("Switching local ID to incoming: " + dupeID + " -> " + + item.id); + this._store.changeItemID(dupeID, item.id); // If the local item was modified, we carry its metadata forward so // appropriate reconciling can be performed. - if (this._modified.has(localDupeGUID)) { + if (dupeID in this._modified) { locallyModified = true; - localAge = this._tracker._now() - this._modified.getModifiedTimestamp(localDupeGUID); + localAge = Date.now() / 1000 - this._modified[dupeID]; remoteIsNewer = remoteAge < localAge; - this._modified.swap(localDupeGUID, item.id); + this._modified[item.id] = this._modified[dupeID]; + delete this._modified[dupeID]; } else { locallyModified = false; localAge = null; } - // Tell the engine to do whatever it needs to switch the items. - this._switchItemToDupe(localDupeGUID, item); - this._log.debug("Local item after duplication: age=" + localAge + "; modified=" + locallyModified + "; exists=" + existsLocally); @@ -1440,7 +1361,7 @@ SyncEngine.prototype = { if (remoteIsNewer) { this._log.trace("Applying incoming because local item was deleted " + "before the incoming item was changed."); - this._modified.delete(item.id); + delete this._modified[item.id]; return true; } @@ -1466,7 +1387,7 @@ SyncEngine.prototype = { this._log.trace("Ignoring incoming item because the local item is " + "identical."); - this._modified.delete(item.id); + delete this._modified[item.id]; return false; } @@ -1491,97 +1412,69 @@ SyncEngine.prototype = { _uploadOutgoing: function () { this._log.trace("Uploading local changes to server."); - let modifiedIDs = this._modified.ids(); + let modifiedIDs = Object.keys(this._modified); if (modifiedIDs.length) { this._log.trace("Preparing " + modifiedIDs.length + " outgoing records"); - let counts = { sent: modifiedIDs.length, failed: 0 }; - // collection we'll upload let up = new Collection(this.engineURL, null, this.service); + let count = 0; - let failed = []; - let successful = []; - let handleResponse = (resp, batchOngoing = false) => { - // Note: We don't want to update this.lastSync, or this._modified until - // the batch is complete, however we want to remember success/failure - // indicators for when that happens. + // Upload what we've got so far in the collection + let doUpload = Utils.bind2(this, function(desc) { + this._log.info("Uploading " + desc + " of " + modifiedIDs.length + + " records"); + let resp = up.post(); if (!resp.success) { this._log.debug("Uploading records failed: " + resp); - resp.failureCode = resp.status == 412 ? ENGINE_BATCH_INTERRUPTED : ENGINE_UPLOAD_FAIL; + resp.failureCode = ENGINE_UPLOAD_FAIL; throw resp; } // Update server timestamp from the upload. - failed = failed.concat(Object.keys(resp.obj.failed)); - successful = successful.concat(resp.obj.success); - - if (batchOngoing) { - // Nothing to do yet - return; - } - // Advance lastSync since we've finished the batch. let modified = resp.headers["x-weave-timestamp"]; - if (modified > this.lastSync) { + if (modified > this.lastSync) this.lastSync = modified; - } - if (failed.length && this._log.level <= Log.Level.Debug) { + + let failed_ids = Object.keys(resp.obj.failed); + if (failed_ids.length) this._log.debug("Records that will be uploaded again because " + "the server couldn't store them: " - + failed.join(", ")); - } - - counts.failed += failed.length; + + failed_ids.join(", ")); - for (let id of successful) { - this._modified.delete(id); + // Clear successfully uploaded objects. + for each (let id in resp.obj.success) { + delete this._modified[id]; } - this._onRecordsWritten(successful, failed); - - // clear for next batch - failed.length = 0; - successful.length = 0; - }; - - let postQueue = up.newPostQueue(this._log, this.lastSync, handleResponse); + up.clearRecords(); + }); - for (let id of modifiedIDs) { - let out; - let ok = false; + for each (let id in modifiedIDs) { try { - out = this._createRecord(id); + let out = this._createRecord(id); if (this._log.level <= Log.Level.Trace) this._log.trace("Outgoing: " + out); out.encrypt(this.service.collectionKeys.keyForCollection(this.name)); - ok = true; - } catch (ex) { - if (Async.isShutdownException(ex)) { - throw ex; - } - this._log.warn("Error creating record", ex); + up.pushData(out); } - if (ok) { - let { enqueued, error } = postQueue.enqueue(out); - if (!enqueued) { - ++counts.failed; - if (!this.allowSkippedRecord) { - throw error; - } - } + catch(ex) { + this._log.warn("Error creating record: " + Utils.exceptionStr(ex)); } + + // Partial upload + if ((++count % MAX_UPLOAD_RECORDS) == 0) + doUpload((count - MAX_UPLOAD_RECORDS) + " - " + count + " out"); + this._store._sleep(0); } - postQueue.flush(true); - Observers.notify("weave:engine:sync:uploaded", counts, this.name); - } - }, - _onRecordsWritten(succeeded, failed) { - // Implement this method to take specific actions against successfully - // uploaded records and failed records. + // Final upload + if (count % MAX_UPLOAD_RECORDS > 0) + doUpload(count >= MAX_UPLOAD_RECORDS ? "last batch" : "all"); + } }, // Any cleanup necessary. @@ -1596,7 +1489,7 @@ SyncEngine.prototype = { coll.delete(); }); - for (let [key, val] of Object.entries(this._delete)) { + for (let [key, val] in Iterator(this._delete)) { // Remove the key for future uses delete this._delete[key]; @@ -1619,8 +1512,10 @@ SyncEngine.prototype = { } // Mark failed WBOs as changed again so they are reuploaded next time. - this.trackRemainingChanges(); - this._modified.clear(); + for (let [id, when] in Iterator(this._modified)) { + this._tracker.addChangedID(id, when); + } + this._modified = {}; }, _sync: function () { @@ -1656,11 +1551,9 @@ SyncEngine.prototype = { try { this._log.trace("Trying to decrypt a record from the server.."); test.get(); - } catch (ex) { - if (Async.isShutdownException(ex)) { - throw ex; - } - this._log.debug("Failed test decrypt", ex); + } + catch(ex) { + this._log.debug("Failed test decrypt: " + Utils.exceptionStr(ex)); } return canDecrypt; @@ -1706,108 +1599,5 @@ SyncEngine.prototype = { return (this.service.handleHMACEvent() && mayRetry) ? SyncEngine.kRecoveryStrategy.retry : SyncEngine.kRecoveryStrategy.error; - }, - - /** - * Returns a changeset containing all items in the store. The default - * implementation returns a changeset with timestamps from long ago, to - * ensure we always use the remote version if one exists. - * - * This function is only called for the first sync. Subsequent syncs call - * `pullNewChanges`. - * - * @return A `Changeset` object. - */ - pullAllChanges() { - let changeset = new Changeset(); - for (let id in this._store.getAllIDs()) { - changeset.set(id, 0); - } - return changeset; - }, - - /* - * Returns a changeset containing entries for all currently tracked items. - * The default implementation returns a changeset with timestamps indicating - * when the item was added to the tracker. - * - * @return A `Changeset` object. - */ - pullNewChanges() { - return new Changeset(this.getChangedIDs()); - }, - - /** - * Adds all remaining changeset entries back to the tracker, typically for - * items that failed to upload. This method is called at the end of each sync. - * - */ - trackRemainingChanges() { - for (let [id, change] of this._modified.entries()) { - this._tracker.addChangedID(id, change); - } - }, -}; - -/** - * A changeset is created for each sync in `Engine::get{Changed, All}IDs`, - * and stores opaque change data for tracked IDs. The default implementation - * only records timestamps, though engines can extend this to store additional - * data for each entry. - */ -class Changeset { - // Creates a changeset with an initial set of tracked entries. - constructor(changes = {}) { - this.changes = changes; - } - - // Returns the last modified time, in seconds, for an entry in the changeset. - // `id` is guaranteed to be in the set. - getModifiedTimestamp(id) { - return this.changes[id]; - } - - // Adds a change for a tracked ID to the changeset. - set(id, change) { - this.changes[id] = change; - } - - // Indicates whether an entry is in the changeset. - has(id) { - return id in this.changes; } - - // Deletes an entry from the changeset. Used to clean up entries for - // reconciled and successfully uploaded records. - delete(id) { - delete this.changes[id]; - } - - // Swaps two entries in the changeset. Used when reconciling duplicates that - // have local changes. - swap(oldID, newID) { - this.changes[newID] = this.changes[oldID]; - delete this.changes[oldID]; - } - - // Returns an array of all tracked IDs in this changeset. - ids() { - return Object.keys(this.changes); - } - - // Returns an array of `[id, change]` tuples. Used to repopulate the tracker - // with entries for failed uploads at the end of a sync. - entries() { - return Object.entries(this.changes); - } - - // Returns the number of entries in this changeset. - count() { - return this.ids().length; - } - - // Clears the changeset. - clear() { - this.changes = {}; - } -} +}; diff --git a/services/sync/modules/engines/addons.js b/services/sync/modules/engines/addons.js index 01dab58d1..ab3131c30 100644 --- a/services/sync/modules/engines/addons.js +++ b/services/sync/modules/engines/addons.js @@ -25,18 +25,15 @@ * * Synchronization is influenced by the following preferences: * + * - services.sync.addons.ignoreRepositoryChecking * - services.sync.addons.ignoreUserEnabledChanges * - services.sync.addons.trustedSourceHostnames * - * and also influenced by whether addons have repository caching enabled and - * whether they allow installation of addons from insecure options (both of - * which are themselves influenced by the "extensions." pref branch) - * * See the documentation in services-sync.js for the behavior of these prefs. */ "use strict"; -var {classes: Cc, interfaces: Ci, utils: Cu} = Components; +const {classes: Cc, interfaces: Ci, utils: Cu} = Components; Cu.import("resource://services-sync/addonutils.js"); Cu.import("resource://services-sync/addonsreconciler.js"); @@ -44,7 +41,6 @@ Cu.import("resource://services-sync/engines.js"); Cu.import("resource://services-sync/record.js"); Cu.import("resource://services-sync/util.js"); Cu.import("resource://services-sync/constants.js"); -Cu.import("resource://services-sync/collection_validator.js"); Cu.import("resource://services-common/async.js"); Cu.import("resource://gre/modules/Preferences.jsm"); @@ -54,7 +50,7 @@ XPCOMUtils.defineLazyModuleGetter(this, "AddonManager", XPCOMUtils.defineLazyModuleGetter(this, "AddonRepository", "resource://gre/modules/addons/AddonRepository.jsm"); -this.EXPORTED_SYMBOLS = ["AddonsEngine", "AddonValidator"]; +this.EXPORTED_SYMBOLS = ["AddonsEngine"]; // 7 days in milliseconds. const PRUNE_ADDON_CHANGES_THRESHOLD = 60 * 60 * 24 * 7 * 1000; @@ -154,7 +150,7 @@ AddonsEngine.prototype = { */ getChangedIDs: function getChangedIDs() { let changes = {}; - for (let [id, modified] of Object.entries(this._tracker.changedIDs)) { + for (let [id, modified] in Iterator(this._tracker.changedIDs)) { changes[id] = modified; } @@ -164,7 +160,7 @@ AddonsEngine.prototype = { // we assume this function is only called from within a sync. let reconcilerChanges = this._reconciler.getChangesSinceDate(lastSyncDate); let addons = this._reconciler.addons; - for (let change of reconcilerChanges) { + for each (let change in reconcilerChanges) { let changeTime = change[0]; let id = change[2]; @@ -177,7 +173,7 @@ AddonsEngine.prototype = { continue; } - if (!this.isAddonSyncable(addons[id])) { + if (!this._store.isAddonSyncable(addons[id])) { continue; } @@ -235,10 +231,6 @@ AddonsEngine.prototype = { let cb = Async.makeSpinningCallback(); this._reconciler.refreshGlobalState(cb); cb.wait(); - }, - - isAddonSyncable(addon, ignoreRepoCheck) { - return this._store.isAddonSyncable(addon, ignoreRepoCheck); } }; @@ -286,14 +278,6 @@ AddonsStore.prototype = { } } - // Ignore incoming records for which an existing non-syncable addon - // exists. - let existingMeta = this.reconciler.addons[record.addonID]; - if (existingMeta && !this.isAddonSyncable(existingMeta)) { - this._log.info("Ignoring incoming record for an existing but non-syncable addon", record.addonID); - return; - } - Store.prototype.applyIncoming.call(this, record); }, @@ -307,23 +291,15 @@ AddonsStore.prototype = { id: record.addonID, syncGUID: record.id, enabled: record.enabled, - requireSecureURI: this._extensionsPrefs.get("install.requireSecureOrigin", true), + requireSecureURI: !Svc.Prefs.get("addons.ignoreRepositoryChecking", false), }], cb); // This will throw if there was an error. This will get caught by the sync // engine and the record will try to be applied later. let results = cb.wait(); - if (results.skipped.includes(record.addonID)) { - this._log.info("Add-on skipped: " + record.addonID); - // Just early-return for skipped addons - we don't want to arrange to - // try again next time because the condition that caused up to skip - // will remain true for this addon forever. - return; - } - let addon; - for (let a of results.addons) { + for each (let a in results.addons) { if (a.id == record.addonID) { addon = a; break; @@ -467,8 +443,7 @@ AddonsStore.prototype = { let ids = {}; let addons = this.reconciler.addons; - for (let id in addons) { - let addon = addons[id]; + for each (let addon in addons) { if (this.isAddonSyncable(addon)) { ids[addon.guid] = true; } @@ -499,7 +474,7 @@ AddonsStore.prototype = { } this._log.info("Uninstalling add-on as part of wipe: " + addon.id); - Utils.catch.call(this, () => addon.uninstall())(); + Utils.catch(addon.uninstall)(); } }, @@ -538,22 +513,16 @@ AddonsStore.prototype = { * * @param addon * Addon instance - * @param ignoreRepoCheck - * Should we skip checking the Addons repository (primarially useful - * for testing and validation). * @return Boolean indicating whether it is appropriate for Sync */ - isAddonSyncable: function isAddonSyncable(addon, ignoreRepoCheck = false) { + isAddonSyncable: function isAddonSyncable(addon) { // Currently, we limit syncable add-ons to those that are: // 1) In a well-defined set of types // 2) Installed in the current profile // 3) Not installed by a foreign entity (i.e. installed by the app) // since they act like global extensions. // 4) Is not a hotfix. - // 5) The addons XPIProvider doesn't veto it (i.e not being installed in - // the profile directory, or any other reasons it says the addon can't - // be synced) - // 6) Are installed from AMO + // 5) Are installed from AMO // We could represent the test as a complex boolean expression. We go the // verbose route so the failure reason is logged. @@ -573,12 +542,6 @@ AddonsStore.prototype = { return false; } - // If the addon manager says it's not syncable, we skip it. - if (!addon.isSyncable) { - this._log.debug(addon.id + " not syncable: vetoed by the addon manager."); - return false; - } - // This may be too aggressive. If an add-on is downloaded from AMO and // manually placed in the profile directory, foreignInstall will be set. // Arguably, that add-on should be syncable. @@ -589,20 +552,15 @@ AddonsStore.prototype = { } // Ignore hotfix extensions (bug 741670). The pref may not be defined. - // XXX - note that addon.isSyncable will be false for hotfix addons, so - // this check isn't strictly necessary - except for Sync tests which aren't - // setup to create a "real" hotfix addon. This can be removed once those - // tests are fixed (but keeping it doesn't hurt either) if (this._extensionsPrefs.get("hotfix.id", null) == addon.id) { this._log.debug(addon.id + " not syncable: is a hotfix."); return false; } - // If the AddonRepository's cache isn't enabled (which it typically isn't - // in tests), getCachedAddonByID always returns null - so skip the check - // in that case. We also provide a way to specifically opt-out of the check - // even if the cache is enabled, which is used by the validators. - if (ignoreRepoCheck || !AddonRepository.cacheEnabled) { + // We provide a back door to skip the repository checking of an add-on. + // This is utilized by the tests to make testing easier. Users could enable + // this, but it would sacrifice security. + if (Svc.Prefs.get("addons.ignoreRepositoryChecking", false)) { return true; } @@ -745,69 +703,3 @@ AddonsTracker.prototype = { this.reconciler.stopListening(); }, }; - -class AddonValidator extends CollectionValidator { - constructor(engine = null) { - super("addons", "id", [ - "addonID", - "enabled", - "applicationID", - "source" - ]); - this.engine = engine; - } - - getClientItems() { - return Promise.all([ - new Promise(resolve => - AddonManager.getAllAddons(resolve)), - new Promise(resolve => - AddonManager.getAddonsWithOperationsByTypes(["extension", "theme"], resolve)), - ]).then(([installed, addonsWithPendingOperation]) => { - // Addons pending install won't be in the first list, but addons pending - // uninstall/enable/disable will be in both lists. - let all = new Map(installed.map(addon => [addon.id, addon])); - for (let addon of addonsWithPendingOperation) { - all.set(addon.id, addon); - } - // Convert to an array since Map.prototype.values returns an iterable - return [...all.values()]; - }); - } - - normalizeClientItem(item) { - let enabled = !item.userDisabled; - if (item.pendingOperations & AddonManager.PENDING_ENABLE) { - enabled = true; - } else if (item.pendingOperations & AddonManager.PENDING_DISABLE) { - enabled = false; - } - return { - enabled, - id: item.syncGUID, - addonID: item.id, - applicationID: Services.appinfo.ID, - source: "amo", // check item.foreignInstall? - original: item - }; - } - - normalizeServerItem(item) { - let guid = this.engine._findDupe(item); - if (guid) { - item.id = guid; - } - return item; - } - - clientUnderstands(item) { - return item.applicationID === Services.appinfo.ID; - } - - syncedByClient(item) { - return !item.original.hidden && - !item.original.isSystem && - !(item.original.pendingOperations & AddonManager.PENDING_UNINSTALL) && - this.engine.isAddonSyncable(item.original, true); - } -} diff --git a/services/sync/modules/engines/bookmarks.js b/services/sync/modules/engines/bookmarks.js index 76a198a8b..1936afc3f 100644 --- a/services/sync/modules/engines/bookmarks.js +++ b/services/sync/modules/engines/bookmarks.js @@ -6,12 +6,11 @@ this.EXPORTED_SYMBOLS = ['BookmarksEngine', "PlacesItem", "Bookmark", "BookmarkFolder", "BookmarkQuery", "Livemark", "BookmarkSeparator"]; -var Cc = Components.classes; -var Ci = Components.interfaces; -var Cu = Components.utils; +const Cc = Components.classes; +const Ci = Components.interfaces; +const Cu = Components.utils; Cu.import("resource://gre/modules/PlacesUtils.jsm"); -Cu.import("resource://gre/modules/PlacesSyncUtils.jsm"); Cu.import("resource://gre/modules/XPCOMUtils.jsm"); Cu.import("resource://services-common/async.js"); Cu.import("resource://services-sync/constants.js"); @@ -20,57 +19,21 @@ Cu.import("resource://services-sync/record.js"); Cu.import("resource://services-sync/util.js"); Cu.import("resource://gre/modules/Task.jsm"); Cu.import("resource://gre/modules/PlacesBackups.jsm"); -XPCOMUtils.defineLazyModuleGetter(this, "BookmarkValidator", - "resource://services-sync/bookmark_validator.js"); -XPCOMUtils.defineLazyGetter(this, "PlacesBundle", () => { - let bundleService = Cc["@mozilla.org/intl/stringbundle;1"] - .getService(Ci.nsIStringBundleService); - return bundleService.createBundle("chrome://places/locale/places.properties"); -}); - -const ANNOS_TO_TRACK = [PlacesSyncUtils.bookmarks.DESCRIPTION_ANNO, - PlacesSyncUtils.bookmarks.SIDEBAR_ANNO, + +const ALLBOOKMARKS_ANNO = "AllBookmarks"; +const DESCRIPTION_ANNO = "bookmarkProperties/description"; +const SIDEBAR_ANNO = "bookmarkProperties/loadInSidebar"; +const MOBILEROOT_ANNO = "mobile/bookmarksRoot"; +const MOBILE_ANNO = "MobileBookmarks"; +const EXCLUDEBACKUP_ANNO = "places/excludeFromBackup"; +const SMART_BOOKMARKS_ANNO = "Places/SmartBookmark"; +const PARENT_ANNO = "sync/parent"; +const ORGANIZERQUERY_ANNO = "PlacesOrganizer/OrganizerQuery"; +const ANNOS_TO_TRACK = [DESCRIPTION_ANNO, SIDEBAR_ANNO, PlacesUtils.LMANNO_FEEDURI, PlacesUtils.LMANNO_SITEURI]; const SERVICE_NOT_SUPPORTED = "Service not supported on this platform"; const FOLDER_SORTINDEX = 1000000; -const { - SOURCE_SYNC, - SOURCE_IMPORT, - SOURCE_IMPORT_REPLACE, -} = Ci.nsINavBookmarksService; - -const SQLITE_MAX_VARIABLE_NUMBER = 999; - -const ORGANIZERQUERY_ANNO = "PlacesOrganizer/OrganizerQuery"; -const ALLBOOKMARKS_ANNO = "AllBookmarks"; -const MOBILE_ANNO = "MobileBookmarks"; - -// The tracker ignores changes made by bookmark import and restore, and -// changes made by Sync. We don't need to exclude `SOURCE_IMPORT`, but both -// import and restore fire `bookmarks-restore-*` observer notifications, and -// the tracker doesn't currently distinguish between the two. -const IGNORED_SOURCES = [SOURCE_SYNC, SOURCE_IMPORT, SOURCE_IMPORT_REPLACE]; - -// Returns the constructor for a bookmark record type. -function getTypeObject(type) { - switch (type) { - case "bookmark": - case "microsummary": - return Bookmark; - case "query": - return BookmarkQuery; - case "folder": - return BookmarkFolder; - case "livemark": - return Livemark; - case "separator": - return BookmarkSeparator; - case "item": - return PlacesItem; - } - return null; -} this.PlacesItem = function PlacesItem(collection, id, type) { CryptoWrapper.call(this, collection, id); @@ -89,32 +52,26 @@ PlacesItem.prototype = { }, getTypeObject: function PlacesItem_getTypeObject(type) { - let recordObj = getTypeObject(type); - if (!recordObj) { - throw new Error("Unknown places item object type: " + type); + switch (type) { + case "bookmark": + case "microsummary": + return Bookmark; + case "query": + return BookmarkQuery; + case "folder": + return BookmarkFolder; + case "livemark": + return Livemark; + case "separator": + return BookmarkSeparator; + case "item": + return PlacesItem; } - return recordObj; + throw "Unknown places item object type: " + type; }, __proto__: CryptoWrapper.prototype, _logName: "Sync.Record.PlacesItem", - - // Converts the record to a Sync bookmark object that can be passed to - // `PlacesSyncUtils.bookmarks.{insert, update}`. - toSyncBookmark() { - return { - kind: this.type, - syncId: this.id, - parentSyncId: this.parentid, - }; - }, - - // Populates the record from a Sync bookmark object returned from - // `PlacesSyncUtils.bookmarks.fetch`. - fromSyncBookmark(item) { - this.parentid = item.parentSyncId; - this.parentName = item.parentTitle; - }, }; Utils.deferGetSet(PlacesItem, @@ -127,27 +84,6 @@ this.Bookmark = function Bookmark(collection, id, type) { Bookmark.prototype = { __proto__: PlacesItem.prototype, _logName: "Sync.Record.Bookmark", - - toSyncBookmark() { - let info = PlacesItem.prototype.toSyncBookmark.call(this); - info.title = this.title; - info.url = this.bmkUri; - info.description = this.description; - info.loadInSidebar = this.loadInSidebar; - info.tags = this.tags; - info.keyword = this.keyword; - return info; - }, - - fromSyncBookmark(item) { - PlacesItem.prototype.fromSyncBookmark.call(this, item); - this.title = item.title; - this.bmkUri = item.url.href; - this.description = item.description; - this.loadInSidebar = item.loadInSidebar; - this.tags = item.tags; - this.keyword = item.keyword; - }, }; Utils.deferGetSet(Bookmark, @@ -161,19 +97,6 @@ this.BookmarkQuery = function BookmarkQuery(collection, id) { BookmarkQuery.prototype = { __proto__: Bookmark.prototype, _logName: "Sync.Record.BookmarkQuery", - - toSyncBookmark() { - let info = Bookmark.prototype.toSyncBookmark.call(this); - info.folder = this.folderName; - info.query = this.queryId; - return info; - }, - - fromSyncBookmark(item) { - Bookmark.prototype.fromSyncBookmark.call(this, item); - this.folderName = item.folder; - this.queryId = item.query; - }, }; Utils.deferGetSet(BookmarkQuery, @@ -186,20 +109,6 @@ this.BookmarkFolder = function BookmarkFolder(collection, id, type) { BookmarkFolder.prototype = { __proto__: PlacesItem.prototype, _logName: "Sync.Record.Folder", - - toSyncBookmark() { - let info = PlacesItem.prototype.toSyncBookmark.call(this); - info.description = this.description; - info.title = this.title; - return info; - }, - - fromSyncBookmark(item) { - PlacesItem.prototype.fromSyncBookmark.call(this, item); - this.title = item.title; - this.description = item.description; - this.children = item.childSyncIds; - }, }; Utils.deferGetSet(BookmarkFolder, "cleartext", ["description", "title", @@ -211,21 +120,6 @@ this.Livemark = function Livemark(collection, id) { Livemark.prototype = { __proto__: BookmarkFolder.prototype, _logName: "Sync.Record.Livemark", - - toSyncBookmark() { - let info = BookmarkFolder.prototype.toSyncBookmark.call(this); - info.feed = this.feedUri; - info.site = this.siteUri; - return info; - }, - - fromSyncBookmark(item) { - BookmarkFolder.prototype.fromSyncBookmark.call(this, item); - this.feedUri = item.feed.href; - if (item.site) { - this.siteUri = item.site.href; - } - }, }; Utils.deferGetSet(Livemark, "cleartext", ["siteUri", "feedUri"]); @@ -236,15 +130,81 @@ this.BookmarkSeparator = function BookmarkSeparator(collection, id) { BookmarkSeparator.prototype = { __proto__: PlacesItem.prototype, _logName: "Sync.Record.Separator", - - fromSyncBookmark(item) { - PlacesItem.prototype.fromSyncBookmark.call(this, item); - this.pos = item.index; - }, }; Utils.deferGetSet(BookmarkSeparator, "cleartext", "pos"); + +let kSpecialIds = { + + // Special IDs. Note that mobile can attempt to create a record on + // dereference; special accessors are provided to prevent recursion within + // observers. + guids: ["menu", "places", "tags", "toolbar", "unfiled", "mobile"], + + // Create the special mobile folder to store mobile bookmarks. + createMobileRoot: function createMobileRoot() { + let root = PlacesUtils.placesRootId; + let mRoot = PlacesUtils.bookmarks.createFolder(root, "mobile", -1); + PlacesUtils.annotations.setItemAnnotation( + mRoot, MOBILEROOT_ANNO, 1, 0, PlacesUtils.annotations.EXPIRE_NEVER); + PlacesUtils.annotations.setItemAnnotation( + mRoot, EXCLUDEBACKUP_ANNO, 1, 0, PlacesUtils.annotations.EXPIRE_NEVER); + return mRoot; + }, + + findMobileRoot: function findMobileRoot(create) { + // Use the (one) mobile root if it already exists. + let root = PlacesUtils.annotations.getItemsWithAnnotation(MOBILEROOT_ANNO, {}); + if (root.length != 0) + return root[0]; + + if (create) + return this.createMobileRoot(); + + return null; + }, + + // Accessors for IDs. + isSpecialGUID: function isSpecialGUID(g) { + return this.guids.indexOf(g) != -1; + }, + + specialIdForGUID: function specialIdForGUID(guid, create) { + if (guid == "mobile") { + return this.findMobileRoot(create); + } + return this[guid]; + }, + + // Don't bother creating mobile: if it doesn't exist, this ID can't be it! + specialGUIDForId: function specialGUIDForId(id) { + for each (let guid in this.guids) + if (this.specialIdForGUID(guid, false) == id) + return guid; + return null; + }, + + get menu() { + return PlacesUtils.bookmarksMenuFolderId; + }, + get places() { + return PlacesUtils.placesRootId; + }, + get tags() { + return PlacesUtils.tagsFolderId; + }, + get toolbar() { + return PlacesUtils.toolbarFolderId; + }, + get unfiled() { + return PlacesUtils.unfiledBookmarksFolderId; + }, + get mobile() { + return this.findMobileRoot(true); + }, +}; + this.BookmarksEngine = function BookmarksEngine(service) { SyncEngine.call(this, "Bookmarks", service); } @@ -257,103 +217,68 @@ BookmarksEngine.prototype = { _defaultSort: "index", syncPriority: 4, - allowSkippedRecord: false, - - // A diagnostic helper to get the string value for a bookmark's URL given - // its ID. Always returns a string - on error will return a string in the - // form of "<description of error>" as this is purely for, eg, logging. - // (This means hitting the DB directly and we don't bother using a cached - // statement - we should rarely hit this.) - _getStringUrlForId(id) { - let url; - try { - let stmt = this._store._getStmt(` - SELECT h.url - FROM moz_places h - JOIN moz_bookmarks b ON h.id = b.fk - WHERE b.id = :id`); - stmt.params.id = id; - let rows = Async.querySpinningly(stmt, ["url"]); - url = rows.length == 0 ? "<not found>" : rows[0].url; - } catch (ex) { - if (Async.isShutdownException(ex)) { - throw ex; - } - if (ex instanceof Ci.mozIStorageError) { - url = `<failed: Storage error: ${ex.message} (${ex.result})>`; - } else { - url = `<failed: ${ex.toString()}>`; - } - } - return url; - }, - _guidMapFailed: false, - _buildGUIDMap: function _buildGUIDMap() { - let store = this._store; - let guidMap = {}; - let tree = Async.promiseSpinningly(PlacesUtils.promiseBookmarksTree("", { - includeItemIds: true - })); - function* walkBookmarksTree(tree, parent=null) { - if (tree) { - // Skip root node - if (parent) { - yield [tree, parent]; + _sync: function _sync() { + let engine = this; + let batchEx = null; + + // Try running sync in batch mode + PlacesUtils.bookmarks.runInBatchMode({ + runBatched: function wrappedSync() { + try { + SyncEngine.prototype._sync.call(engine); } - if (tree.children) { - for (let child of tree.children) { - store._sleep(0); // avoid jank while looping. - yield* walkBookmarksTree(child, tree); - } + catch(ex) { + batchEx = ex; } } - } + }, null); - function* walkBookmarksRoots(tree, rootIDs) { - for (let id of rootIDs) { - let bookmarkRoot = tree.children.find(child => child.id === id); - if (bookmarkRoot === null) { - continue; - } - yield* walkBookmarksTree(bookmarkRoot, tree); - } + // Expose the exception if something inside the batch failed + if (batchEx != null) { + throw batchEx; } + }, - let rootsToWalk = getChangeRootIds(); - - for (let [node, parent] of walkBookmarksRoots(tree, rootsToWalk)) { - let {guid, id, type: placeType} = node; - guid = PlacesSyncUtils.bookmarks.guidToSyncId(guid); + _guidMapFailed: false, + _buildGUIDMap: function _buildGUIDMap() { + let guidMap = {}; + for (let guid in this._store.getAllIDs()) { + // Figure out with which key to store the mapping. let key; - switch (placeType) { - case PlacesUtils.TYPE_X_MOZ_PLACE: - // Bookmark - let query = null; - if (node.annos && node.uri.startsWith("place:")) { - query = node.annos.find(({name}) => - name === PlacesSyncUtils.bookmarks.SMART_BOOKMARKS_ANNO); - } - if (query && query.value) { - key = "q" + query.value; - } else { - key = "b" + node.uri + ":" + (node.title || ""); - } + let id = this._store.idForGUID(guid); + switch (PlacesUtils.bookmarks.getItemType(id)) { + case PlacesUtils.bookmarks.TYPE_BOOKMARK: + + // Smart bookmarks map to their annotation value. + let queryId; + try { + queryId = PlacesUtils.annotations.getItemAnnotation( + id, SMART_BOOKMARKS_ANNO); + } catch(ex) {} + + if (queryId) + key = "q" + queryId; + else + key = "b" + PlacesUtils.bookmarks.getBookmarkURI(id).spec + ":" + + PlacesUtils.bookmarks.getItemTitle(id); break; - case PlacesUtils.TYPE_X_MOZ_PLACE_CONTAINER: - // Folder - key = "f" + (node.title || ""); + case PlacesUtils.bookmarks.TYPE_FOLDER: + key = "f" + PlacesUtils.bookmarks.getItemTitle(id); break; - case PlacesUtils.TYPE_X_MOZ_PLACE_SEPARATOR: - // Separator - key = "s" + node.index; + case PlacesUtils.bookmarks.TYPE_SEPARATOR: + key = "s" + PlacesUtils.bookmarks.getItemIndex(id); break; default: - this._log.error("Unknown place type: '"+placeType+"'"); continue; } - let parentName = parent.title || ""; + // The mapping is on a per parent-folder-name basis. + let parent = PlacesUtils.bookmarks.getFolderIdForItem(id); + if (parent <= 0) + continue; + + let parentName = PlacesUtils.bookmarks.getItemTitle(parent); if (guidMap[parentName] == null) guidMap[parentName] = {}; @@ -381,17 +306,17 @@ BookmarksEngine.prototype = { // hack should get them to dupe correctly. if (item.queryId) { key = "q" + item.queryId; - altKey = "b" + item.bmkUri + ":" + (item.title || ""); + altKey = "b" + item.bmkUri + ":" + item.title; break; } // No queryID? Fall through to the regular bookmark case. case "bookmark": case "microsummary": - key = "b" + item.bmkUri + ":" + (item.title || ""); + key = "b" + item.bmkUri + ":" + item.title; break; case "folder": case "livemark": - key = "f" + (item.title || ""); + key = "f" + item.title; break; case "separator": key = "s" + item.pos; @@ -405,22 +330,21 @@ BookmarksEngine.prototype = { let guidMap = this._guidMap; // Give the GUID if we have the matching pair. - let parentName = item.parentName || ""; - this._log.trace("Finding mapping: " + parentName + ", " + key); - let parent = guidMap[parentName]; - + this._log.trace("Finding mapping: " + item.parentName + ", " + key); + let parent = guidMap[item.parentName]; + if (!parent) { this._log.trace("No parent => no dupe."); return undefined; } - + let dupe = parent[key]; - + if (dupe) { this._log.trace("Mapped dupe: " + dupe); return dupe; } - + if (altKey) { dupe = parent[altKey]; if (dupe) { @@ -428,7 +352,7 @@ BookmarksEngine.prototype = { return dupe; } } - + this._log.trace("No dupe found for key " + key + "/" + altKey + "."); return undefined; }, @@ -437,7 +361,7 @@ BookmarksEngine.prototype = { SyncEngine.prototype._syncStartup.call(this); let cb = Async.makeSpinningCallback(); - Task.spawn(function* () { + Task.spawn(function() { // For first-syncs, make a backup for the user to restore if (this.lastSync == 0) { this._log.debug("Bookmarks backup starting."); @@ -449,7 +373,8 @@ BookmarksEngine.prototype = { // Failure to create a backup is somewhat bad, but probably not bad // enough to prevent syncing of bookmarks - so just log the error and // continue. - this._log.warn("Error while backing up bookmarks, but continuing with sync", ex); + this._log.warn("Got exception \"" + Utils.exceptionStr(ex) + + "\" backing up bookmarks, but continuing with sync."); cb(); } ); @@ -464,10 +389,9 @@ BookmarksEngine.prototype = { try { guidMap = this._buildGUIDMap(); } catch (ex) { - if (Async.isShutdownException(ex)) { - throw ex; - } - this._log.warn("Error while building GUID map, skipping all other incoming items", ex); + this._log.warn("Got exception \"" + Utils.exceptionStr(ex) + + "\" building GUID map." + + " Skipping all other incoming items."); throw {code: Engine.prototype.eEngineAbortApplyIncoming, cause: ex}; } @@ -476,71 +400,17 @@ BookmarksEngine.prototype = { }); this._store._childrenToOrder = {}; - this._store.clearPendingDeletions(); - }, - - _deletePending() { - // Delete pending items -- See the comment above BookmarkStore's deletePending - let newlyModified = Async.promiseSpinningly(this._store.deletePending()); - let now = this._tracker._now(); - this._log.debug("Deleted pending items", newlyModified); - for (let modifiedSyncID of newlyModified) { - if (!this._modified.has(modifiedSyncID)) { - this._modified.set(modifiedSyncID, { timestamp: now, deleted: false }); - } - } - }, - - // We avoid reviving folders since reviving them properly would require - // reviving their children as well. Unfortunately, this is the wrong choice - // in the case of a bookmark restore where wipeServer failed -- if the - // server has the folder as deleted, we *would* want to reupload this folder. - // This is mitigated by the fact that we move any undeleted children to the - // grandparent when deleting the parent. - _shouldReviveRemotelyDeletedRecord(item) { - let kind = Async.promiseSpinningly( - PlacesSyncUtils.bookmarks.getKindForSyncId(item.id)); - if (kind === PlacesSyncUtils.bookmarks.KINDS.FOLDER) { - return false; - } - - // In addition to preventing the deletion of this record (handled by the caller), - // we need to mark the parent of this record for uploading next sync, in order - // to ensure its children array is accurate. - let modifiedTimestamp = this._modified.getModifiedTimestamp(item.id); - if (!modifiedTimestamp) { - // We only expect this to be called with items locally modified, so - // something strange is going on - play it safe and don't revive it. - this._log.error("_shouldReviveRemotelyDeletedRecord called on unmodified item: " + item.id); - return false; - } - - let localID = this._store.idForGUID(item.id); - let localParentID = PlacesUtils.bookmarks.getFolderIdForItem(localID); - let localParentSyncID = this._store.GUIDForId(localParentID); - - this._log.trace(`Reviving item "${item.id}" and marking parent ${localParentSyncID} as modified.`); - - if (!this._modified.has(localParentSyncID)) { - this._modified.set(localParentSyncID, { - timestamp: modifiedTimestamp, - deleted: false - }); - } - return true }, _processIncoming: function (newitems) { try { SyncEngine.prototype._processIncoming.call(this, newitems); } finally { - try { - this._deletePending(); - } finally { - // Reorder children. - this._store._orderChildren(); - delete this._store._childrenToOrder; - } + // Reorder children. + this._tracker.ignoreAll = true; + this._store._orderChildren(); + this._tracker.ignoreAll = false; + delete this._store._childrenToOrder; } }, @@ -575,154 +445,16 @@ BookmarksEngine.prototype = { } let mapped = this._mapDupe(item); this._log.debug(item.id + " mapped to " + mapped); - // We must return a string, not an object, and the entries in the GUIDMap - // are created via "new String()" making them an object. - return mapped ? mapped.toString() : mapped; - }, - - pullAllChanges() { - return new BookmarksChangeset(this._store.getAllIDs()); - }, - - pullNewChanges() { - let modifiedGUIDs = this._getModifiedGUIDs(); - if (!modifiedGUIDs.length) { - return new BookmarksChangeset(this._tracker.changedIDs); - } - - // We don't use `PlacesUtils.promiseDBConnection` here because - // `getChangedIDs` might be called while we're in a batch, meaning we - // won't see any changes until the batch finishes and the transaction - // commits. - let db = PlacesUtils.history.QueryInterface(Ci.nsPIPlacesDatabase) - .DBConnection; - - // Filter out tags, organizer queries, and other descendants that we're - // not tracking. We chunk `modifiedGUIDs` because SQLite limits the number - // of bound parameters per query. - for (let startIndex = 0; - startIndex < modifiedGUIDs.length; - startIndex += SQLITE_MAX_VARIABLE_NUMBER) { - - let chunkLength = Math.min(SQLITE_MAX_VARIABLE_NUMBER, - modifiedGUIDs.length - startIndex); - - let query = ` - WITH RECURSIVE - modifiedGuids(guid) AS ( - VALUES ${new Array(chunkLength).fill("(?)").join(", ")} - ), - syncedItems(id) AS ( - VALUES ${getChangeRootIds().map(id => `(${id})`).join(", ")} - UNION ALL - SELECT b.id - FROM moz_bookmarks b - JOIN syncedItems s ON b.parent = s.id - ) - SELECT b.guid - FROM modifiedGuids m - JOIN moz_bookmarks b ON b.guid = m.guid - LEFT JOIN syncedItems s ON b.id = s.id - WHERE s.id IS NULL - `; - - let statement = db.createAsyncStatement(query); - try { - for (let i = 0; i < chunkLength; i++) { - statement.bindByIndex(i, modifiedGUIDs[startIndex + i]); - } - let results = Async.querySpinningly(statement, ["guid"]); - for (let { guid } of results) { - let syncID = PlacesSyncUtils.bookmarks.guidToSyncId(guid); - this._tracker.removeChangedID(syncID); - } - } finally { - statement.finalize(); - } - } - - return new BookmarksChangeset(this._tracker.changedIDs); - }, - - // Returns an array of Places GUIDs for all changed items. Ignores deletions, - // which won't exist in the DB and shouldn't be removed from the tracker. - _getModifiedGUIDs() { - let guids = []; - for (let syncID in this._tracker.changedIDs) { - if (this._tracker.changedIDs[syncID].deleted === true) { - // The `===` check also filters out old persisted timestamps, - // which won't have a `deleted` property. - continue; - } - let guid = PlacesSyncUtils.bookmarks.syncIdToGuid(syncID); - guids.push(guid); - } - return guids; - }, - - // Called when _findDupe returns a dupe item and the engine has decided to - // switch the existing item to the new incoming item. - _switchItemToDupe(localDupeGUID, incomingItem) { - // We unconditionally change the item's ID in case the engine knows of - // an item but doesn't expose it through itemExists. If the API - // contract were stronger, this could be changed. - this._log.debug("Switching local ID to incoming: " + localDupeGUID + " -> " + - incomingItem.id); - this._store.changeItemID(localDupeGUID, incomingItem.id); - - // And mark the parent as being modified. Given we de-dupe based on the - // parent *name* it's possible the item having its GUID changed has a - // different parent from the incoming record. - // So we need to find the GUID of the local parent. - let now = this._tracker._now(); - let localID = this._store.idForGUID(incomingItem.id); - let localParentID = PlacesUtils.bookmarks.getFolderIdForItem(localID); - let localParentGUID = this._store.GUIDForId(localParentID); - this._modified.set(localParentGUID, { modified: now, deleted: false }); - - // And we also add the parent as reflected in the incoming record as the - // de-dupe process might have used an existing item in a different folder. - // But only if the parent exists, otherwise we will upload a deleted item - // when it might actually be valid, just unknown to us. Note that this - // scenario will still leave us with inconsistent client and server states; - // the incoming record on the server references a parent that isn't the - // actual parent locally - see bug 1297955. - if (localParentGUID != incomingItem.parentid) { - let remoteParentID = this._store.idForGUID(incomingItem.parentid); - if (remoteParentID > 0) { - // The parent specified in the record does exist, so we are going to - // attempt a move when we come to applying the record. Mark the parent - // as being modified so we will later upload it with the new child - // reference. - this._modified.set(incomingItem.parentid, { modified: now, deleted: false }); - } else { - // We aren't going to do a move as we don't have the parent (yet?). - // When applying the record we will add our special PARENT_ANNO - // annotation, so if it arrives in the future (either this Sync or a - // later one) it will be reparented. - this._log.debug(`Incoming duplicate item ${incomingItem.id} specifies ` + - `non-existing parent ${incomingItem.parentid}`); - } - } - - // The local, duplicate ID is always deleted on the server - but for - // bookmarks it is a logical delete. - // Simply adding this (now non-existing) ID to the tracker is enough. - this._modified.set(localDupeGUID, { modified: now, deleted: true }); - }, - getValidator() { - return new BookmarkValidator(); + return mapped; } }; function BookmarksStore(name, engine) { Store.call(this, name, engine); - this._foldersToDelete = new Set(); - this._atomsToDelete = new Set(); + // Explicitly nullify our references to our cached services so we don't leak Svc.Obs.add("places-shutdown", function() { - for (let query in this._stmts) { - let stmt = this._stmts[query]; + for each (let [query, stmt] in Iterator(this._stmts)) { stmt.finalize(); } this._stmts = {}; @@ -732,12 +464,70 @@ BookmarksStore.prototype = { __proto__: Store.prototype, itemExists: function BStore_itemExists(id) { - return this.idForGUID(id) > 0; + return this.idForGUID(id, true) > 0; }, + + /* + * If the record is a tag query, rewrite it to refer to the local tag ID. + * + * Otherwise, just return. + */ + preprocessTagQuery: function preprocessTagQuery(record) { + if (record.type != "query" || + record.bmkUri == null || + !record.folderName) + return; + + // Yes, this works without chopping off the "place:" prefix. + let uri = record.bmkUri + let queriesRef = {}; + let queryCountRef = {}; + let optionsRef = {}; + PlacesUtils.history.queryStringToQueries(uri, queriesRef, queryCountRef, + optionsRef); + + // We only process tag URIs. + if (optionsRef.value.resultType != optionsRef.value.RESULTS_AS_TAG_CONTENTS) + return; + + // Tag something to ensure that the tag exists. + let tag = record.folderName; + let dummyURI = Utils.makeURI("about:weave#BStore_preprocess"); + PlacesUtils.tagging.tagURI(dummyURI, [tag]); + + // Look for the id of the tag, which might just have been added. + let tags = this._getNode(PlacesUtils.tagsFolderId); + if (!(tags instanceof Ci.nsINavHistoryQueryResultNode)) { + this._log.debug("tags isn't an nsINavHistoryQueryResultNode; aborting."); + return; + } + tags.containerOpen = true; + try { + for (let i = 0; i < tags.childCount; i++) { + let child = tags.getChild(i); + if (child.title == tag) { + // Found the tag, so fix up the query to use the right id. + this._log.debug("Tag query folder: " + tag + " = " + child.itemId); + + this._log.trace("Replacing folders in: " + uri); + for each (let q in queriesRef.value) + q.setFolders([child.itemId], 1); + + record.bmkUri = PlacesUtils.history.queriesToQueryString( + queriesRef.value, queryCountRef.value, optionsRef.value); + return; + } + } + } + finally { + tags.containerOpen = false; + } + }, + applyIncoming: function BStore_applyIncoming(record) { this._log.debug("Applying record " + record.id); - let isSpecial = PlacesSyncUtils.bookmarks.ROOTS.includes(record.id); + let isSpecial = record.id in kSpecialIds; if (record.deleted) { if (isSpecial) { @@ -765,217 +555,548 @@ BookmarksStore.prototype = { return; } + // Preprocess the record before doing the normal apply. + this.preprocessTagQuery(record); + // Figure out the local id of the parent GUID if available let parentGUID = record.parentid; if (!parentGUID) { throw "Record " + record.id + " has invalid parentid: " + parentGUID; } - this._log.debug("Remote parent is " + parentGUID); + this._log.debug("Local parent is " + parentGUID); + + let parentId = this.idForGUID(parentGUID); + if (parentId > 0) { + // Save the parent id for modifying the bookmark later + record._parent = parentId; + record._orphan = false; + this._log.debug("Record " + record.id + " is not an orphan."); + } else { + this._log.trace("Record " + record.id + + " is an orphan: could not find parent " + parentGUID); + record._orphan = true; + } // Do the normal processing of incoming records Store.prototype.applyIncoming.call(this, record); - if (record.type == "folder" && record.children) { - this._childrenToOrder[record.id] = record.children; + // Do some post-processing if we have an item + let itemId = this.idForGUID(record.id); + if (itemId > 0) { + // Move any children that are looking for this folder as a parent + if (record.type == "folder") { + this._reparentOrphans(itemId); + // Reorder children later + if (record.children) + this._childrenToOrder[record.id] = record.children; + } + + // Create an annotation to remember that it needs reparenting. + if (record._orphan) { + PlacesUtils.annotations.setItemAnnotation( + itemId, PARENT_ANNO, parentGUID, 0, + PlacesUtils.annotations.EXPIRE_NEVER); + } + } + }, + + /** + * Find all ids of items that have a given value for an annotation + */ + _findAnnoItems: function BStore__findAnnoItems(anno, val) { + return PlacesUtils.annotations.getItemsWithAnnotation(anno, {}) + .filter(function(id) { + return PlacesUtils.annotations.getItemAnnotation(id, anno) == val; + }); + }, + + /** + * For the provided parent item, attach its children to it + */ + _reparentOrphans: function _reparentOrphans(parentId) { + // Find orphans and reunite with this folder parent + let parentGUID = this.GUIDForId(parentId); + let orphans = this._findAnnoItems(PARENT_ANNO, parentGUID); + + this._log.debug("Reparenting orphans " + orphans + " to " + parentId); + orphans.forEach(function(orphan) { + // Move the orphan to the parent and drop the missing parent annotation + if (this._reparentItem(orphan, parentId)) { + PlacesUtils.annotations.removeItemAnnotation(orphan, PARENT_ANNO); + } + }, this); + }, + + _reparentItem: function _reparentItem(itemId, parentId) { + this._log.trace("Attempting to move item " + itemId + " to new parent " + + parentId); + try { + if (parentId > 0) { + PlacesUtils.bookmarks.moveItem(itemId, parentId, + PlacesUtils.bookmarks.DEFAULT_INDEX); + return true; + } + } catch(ex) { + this._log.debug("Failed to reparent item. " + Utils.exceptionStr(ex)); + } + return false; + }, + + // Turn a record's nsINavBookmarksService constant and other attributes into + // a granular type for comparison. + _recordType: function _recordType(itemId) { + let bms = PlacesUtils.bookmarks; + let type = bms.getItemType(itemId); + + switch (type) { + case bms.TYPE_FOLDER: + if (PlacesUtils.annotations + .itemHasAnnotation(itemId, PlacesUtils.LMANNO_FEEDURI)) { + return "livemark"; + } + return "folder"; + + case bms.TYPE_BOOKMARK: + let bmkUri = bms.getBookmarkURI(itemId).spec; + if (bmkUri.indexOf("place:") == 0) { + return "query"; + } + return "bookmark"; + + case bms.TYPE_SEPARATOR: + return "separator"; + + default: + return null; } }, create: function BStore_create(record) { - let info = record.toSyncBookmark(); - // This can throw if we're inserting an invalid or incomplete bookmark. - // That's fine; the exception will be caught by `applyIncomingBatch` - // without aborting further processing. - let item = Async.promiseSpinningly(PlacesSyncUtils.bookmarks.insert(info)); - if (item) { - this._log.debug(`Created ${item.kind} ${item.syncId} under ${ - item.parentSyncId}`, item); + // Default to unfiled if we don't have the parent yet. + + // Valid parent IDs are all positive integers. Other values -- undefined, + // null, -1 -- all compare false for > 0, so this catches them all. We + // don't just use <= without the !, because undefined and null compare + // false for that, too! + if (!(record._parent > 0)) { + this._log.debug("Parent is " + record._parent + "; reparenting to unfiled."); + record._parent = kSpecialIds.unfiled; + } + + let newId; + switch (record.type) { + case "bookmark": + case "query": + case "microsummary": { + let uri = Utils.makeURI(record.bmkUri); + newId = PlacesUtils.bookmarks.insertBookmark( + record._parent, uri, PlacesUtils.bookmarks.DEFAULT_INDEX, record.title); + this._log.debug("created bookmark " + newId + " under " + record._parent + + " as " + record.title + " " + record.bmkUri); + + // Smart bookmark annotations are strings. + if (record.queryId) { + PlacesUtils.annotations.setItemAnnotation( + newId, SMART_BOOKMARKS_ANNO, record.queryId, 0, + PlacesUtils.annotations.EXPIRE_NEVER); + } + + if (Array.isArray(record.tags)) { + this._tagURI(uri, record.tags); + } + PlacesUtils.bookmarks.setKeywordForBookmark(newId, record.keyword); + if (record.description) { + PlacesUtils.annotations.setItemAnnotation( + newId, DESCRIPTION_ANNO, record.description, 0, + PlacesUtils.annotations.EXPIRE_NEVER); + } + + if (record.loadInSidebar) { + PlacesUtils.annotations.setItemAnnotation( + newId, SIDEBAR_ANNO, true, 0, + PlacesUtils.annotations.EXPIRE_NEVER); + } + + } break; + case "folder": + newId = PlacesUtils.bookmarks.createFolder( + record._parent, record.title, PlacesUtils.bookmarks.DEFAULT_INDEX); + this._log.debug("created folder " + newId + " under " + record._parent + + " as " + record.title); + + if (record.description) { + PlacesUtils.annotations.setItemAnnotation( + newId, DESCRIPTION_ANNO, record.description, 0, + PlacesUtils.annotations.EXPIRE_NEVER); + } + + // record.children will be dealt with in _orderChildren. + break; + case "livemark": + let siteURI = null; + if (!record.feedUri) { + this._log.debug("No feed URI: skipping livemark record " + record.id); + return; + } + if (PlacesUtils.annotations + .itemHasAnnotation(record._parent, PlacesUtils.LMANNO_FEEDURI)) { + this._log.debug("Invalid parent: skipping livemark record " + record.id); + return; + } + + if (record.siteUri != null) + siteURI = Utils.makeURI(record.siteUri); + + // Until this engine can handle asynchronous error reporting, we need to + // detect errors on creation synchronously. + let spinningCb = Async.makeSpinningCallback(); + + let livemarkObj = {title: record.title, + parentId: record._parent, + index: PlacesUtils.bookmarks.DEFAULT_INDEX, + feedURI: Utils.makeURI(record.feedUri), + siteURI: siteURI, + guid: record.id}; + PlacesUtils.livemarks.addLivemark(livemarkObj).then( + aLivemark => { spinningCb(null, [Components.results.NS_OK, aLivemark]) }, + () => { spinningCb(null, [Components.results.NS_ERROR_UNEXPECTED, aLivemark]) } + ); + + let [status, livemark] = spinningCb.wait(); + if (!Components.isSuccessCode(status)) { + throw status; + } + + this._log.debug("Created livemark " + livemark.id + " under " + + livemark.parentId + " as " + livemark.title + + ", " + livemark.siteURI.spec + ", " + + livemark.feedURI.spec + ", GUID " + + livemark.guid); + break; + case "separator": + newId = PlacesUtils.bookmarks.insertSeparator( + record._parent, PlacesUtils.bookmarks.DEFAULT_INDEX); + this._log.debug("created separator " + newId + " under " + record._parent); + break; + case "item": + this._log.debug(" -> got a generic places item.. do nothing?"); + return; + default: + this._log.error("_create: Unknown item type: " + record.type); + return; + } + + if (newId) { + // Livemarks can set the GUID through the API, so there's no need to + // do that here. + this._log.trace("Setting GUID of new item " + newId + " to " + record.id); + this._setGUID(newId, record.id); + } + }, + + // Factored out of `remove` to avoid redundant DB queries when the Places ID + // is already known. + removeById: function removeById(itemId, guid) { + let type = PlacesUtils.bookmarks.getItemType(itemId); + + switch (type) { + case PlacesUtils.bookmarks.TYPE_BOOKMARK: + this._log.debug(" -> removing bookmark " + guid); + PlacesUtils.bookmarks.removeItem(itemId); + break; + case PlacesUtils.bookmarks.TYPE_FOLDER: + this._log.debug(" -> removing folder " + guid); + PlacesUtils.bookmarks.removeItem(itemId); + break; + case PlacesUtils.bookmarks.TYPE_SEPARATOR: + this._log.debug(" -> removing separator " + guid); + PlacesUtils.bookmarks.removeItem(itemId); + break; + default: + this._log.error("remove: Unknown item type: " + type); + break; } }, remove: function BStore_remove(record) { - if (PlacesSyncUtils.bookmarks.isRootSyncID(record.id)) { + if (kSpecialIds.isSpecialGUID(record.id)) { this._log.warn("Refusing to remove special folder " + record.id); return; } - let recordKind = Async.promiseSpinningly( - PlacesSyncUtils.bookmarks.getKindForSyncId(record.id)); - let isFolder = recordKind === PlacesSyncUtils.bookmarks.KINDS.FOLDER; - this._log.trace(`Buffering removal of item "${record.id}" of type "${recordKind}".`); - if (isFolder) { - this._foldersToDelete.add(record.id); - } else { - this._atomsToDelete.add(record.id); + + let itemId = this.idForGUID(record.id); + if (itemId <= 0) { + this._log.debug("Item " + record.id + " already removed"); + return; } + this.removeById(itemId, record.id); }, - update: function BStore_update(record) { - let info = record.toSyncBookmark(); - let item = Async.promiseSpinningly(PlacesSyncUtils.bookmarks.update(info)); - if (item) { - this._log.debug(`Updated ${item.kind} ${item.syncId} under ${ - item.parentSyncId}`, item); - } + _taggableTypes: ["bookmark", "microsummary", "query"], + isTaggable: function isTaggable(recordType) { + return this._taggableTypes.indexOf(recordType) != -1; }, - _orderChildren: function _orderChildren() { - let promises = Object.keys(this._childrenToOrder).map(syncID => { - let children = this._childrenToOrder[syncID]; - return PlacesSyncUtils.bookmarks.order(syncID, children).catch(ex => { - this._log.debug(`Could not order children for ${syncID}`, ex); - }); - }); - Async.promiseSpinningly(Promise.all(promises)); - }, - - // There's some complexity here around pending deletions. Our goals: - // - // - Don't delete any bookmarks a user has created but not explicitly deleted - // (This includes any bookmark that was not a child of the folder at the - // time the deletion was recorded, and also bookmarks restored from a backup). - // - Don't undelete any bookmark without ensuring the server structure - // includes it (see `BookmarkEngine.prototype._shouldReviveRemotelyDeletedRecord`) - // - // This leads the following approach: - // - // - Additions, moves, and updates are processed before deletions. - // - To do this, all deletion operations are buffered during a sync. Folders - // we plan on deleting have their sync id's stored in `this._foldersToDelete`, - // and non-folders we plan on deleting have their sync id's stored in - // `this._atomsToDelete`. - // - The exception to this is the moves that occur to fix the order of bookmark - // children, which are performed after we process deletions. - // - Non-folders are deleted before folder deletions, so that when we process - // folder deletions we know the correct state. - // - Remote deletions always win for folders, but do not result in recursive - // deletion of children. This is a hack because we're not able to distinguish - // between value changes and structural changes to folders, and we don't even - // have the old server record to compare to. See `BookmarkEngine`'s - // `_shouldReviveRemotelyDeletedRecord` method. - // - When a folder is deleted, its remaining children are moved in order to - // their closest living ancestor. If this is interrupted (unlikely, but - // possible given that we don't perform this operation in a transaction), - // we revive the folder. - // - Remote deletions can lose for non-folders, but only until we handle - // bookmark restores correctly (removing stale state from the server -- this - // is to say, if bug 1230011 is fixed, we should never revive bookmarks). - - deletePending: Task.async(function* deletePending() { - yield this._deletePendingAtoms(); - let guidsToUpdate = yield this._deletePendingFolders(); - this.clearPendingDeletions(); - return guidsToUpdate; - }), - - clearPendingDeletions() { - this._foldersToDelete.clear(); - this._atomsToDelete.clear(); - }, - - _deleteAtom: Task.async(function* _deleteAtom(syncID) { - try { - let info = yield PlacesSyncUtils.bookmarks.remove(syncID, { - preventRemovalOfNonEmptyFolders: true - }); - this._log.trace(`Removed item ${syncID} with type ${info.type}`); - } catch (ex) { - // Likely already removed. - this._log.trace(`Error removing ${syncID}`, ex); + update: function BStore_update(record) { + let itemId = this.idForGUID(record.id); + + if (itemId <= 0) { + this._log.debug("Skipping update for unknown item: " + record.id); + return; } - }), - - _deletePendingAtoms() { - return Promise.all( - [...this._atomsToDelete.values()] - .map(syncID => this._deleteAtom(syncID))); - }, - - // Returns an array of sync ids that need updates. - _deletePendingFolders: Task.async(function* _deletePendingFolders() { - // To avoid data loss, we don't want to just delete the folder outright, - // so we buffer folder deletions and process them at the end (now). - // - // At this point, any member in the folder that remains is either a folder - // pending deletion (which we'll get to in this function), or an item that - // should not be deleted. To avoid deleting these items, we first move them - // to the parent of the folder we're about to delete. - let needUpdate = new Set(); - for (let syncId of this._foldersToDelete) { - let childSyncIds = yield PlacesSyncUtils.bookmarks.fetchChildSyncIds(syncId); - if (!childSyncIds.length) { - // No children -- just delete the folder. - yield this._deleteAtom(syncId) - continue; - } - // We could avoid some redundant work here by finding the nearest - // grandparent who isn't present in `this._toDelete`... - let grandparentSyncId = this.GUIDForId( - PlacesUtils.bookmarks.getFolderIdForItem( - this.idForGUID(PlacesSyncUtils.bookmarks.syncIdToGuid(syncId)))); + // Two items are the same type if they have the same ItemType in Places, + // and also share some key characteristics (e.g., both being livemarks). + // We figure this out by examining the item to find the equivalent granular + // (string) type. + // If they're not the same type, we can't just update attributes. Delete + // then recreate the record instead. + let localItemType = this._recordType(itemId); + let remoteRecordType = record.type; + this._log.trace("Local type: " + localItemType + ". " + + "Remote type: " + remoteRecordType + "."); + + if (localItemType != remoteRecordType) { + this._log.debug("Local record and remote record differ in type. " + + "Deleting and recreating."); + this.removeById(itemId, record.id); + this.create(record); + return; + } - this._log.trace(`Moving ${childSyncIds.length} children of "${syncId}" to ` + - `grandparent "${grandparentSyncId}" before deletion.`); + this._log.trace("Updating " + record.id + " (" + itemId + ")"); - // Move children out of the parent and into the grandparent - yield Promise.all(childSyncIds.map(child => PlacesSyncUtils.bookmarks.update({ - syncId: child, - parentSyncId: grandparentSyncId - }))); + // Move the bookmark to a new parent or new position if necessary + if (record._parent > 0 && + PlacesUtils.bookmarks.getFolderIdForItem(itemId) != record._parent) { + this._reparentItem(itemId, record._parent); + } - // Delete the (now empty) parent - try { - yield PlacesSyncUtils.bookmarks.remove(syncId, { - preventRemovalOfNonEmptyFolders: true - }); - } catch (e) { - // We failed, probably because someone added something to this folder - // between when we got the children and now (or the database is corrupt, - // or something else happened...) This is unlikely, but possible. To - // avoid corruption in this case, we need to reupload the record to the - // server. - // - // (Ideally this whole operation would be done in a transaction, and this - // wouldn't be possible). - needUpdate.add(syncId); + for (let [key, val] in Iterator(record.cleartext)) { + switch (key) { + case "title": + PlacesUtils.bookmarks.setItemTitle(itemId, val); + break; + case "bmkUri": + PlacesUtils.bookmarks.changeBookmarkURI(itemId, Utils.makeURI(val)); + break; + case "tags": + if (Array.isArray(val)) { + if (this.isTaggable(remoteRecordType)) { + this._tagID(itemId, val); + } else { + this._log.debug("Remote record type is invalid for tags: " + remoteRecordType); + } + } + break; + case "keyword": + PlacesUtils.bookmarks.setKeywordForBookmark(itemId, val); + break; + case "description": + if (val) { + PlacesUtils.annotations.setItemAnnotation( + itemId, DESCRIPTION_ANNO, val, 0, + PlacesUtils.annotations.EXPIRE_NEVER); + } else { + PlacesUtils.annotations.removeItemAnnotation(itemId, DESCRIPTION_ANNO); + } + break; + case "loadInSidebar": + if (val) { + PlacesUtils.annotations.setItemAnnotation( + itemId, SIDEBAR_ANNO, true, 0, + PlacesUtils.annotations.EXPIRE_NEVER); + } else { + PlacesUtils.annotations.removeItemAnnotation(itemId, SIDEBAR_ANNO); + } + break; + case "queryId": + PlacesUtils.annotations.setItemAnnotation( + itemId, SMART_BOOKMARKS_ANNO, val, 0, + PlacesUtils.annotations.EXPIRE_NEVER); + break; } + } + }, - // Add children (for parentid) and grandparent (for children list) to set - // of records needing an update, *unless* they're marked for deletion. - if (!this._foldersToDelete.has(grandparentSyncId)) { - needUpdate.add(grandparentSyncId); - } - for (let childSyncId of childSyncIds) { - if (!this._foldersToDelete.has(childSyncId)) { - needUpdate.add(childSyncId); + _orderChildren: function _orderChildren() { + for (let [guid, children] in Iterator(this._childrenToOrder)) { + // Reorder children according to the GUID list. Gracefully deal + // with missing items, e.g. locally deleted. + let delta = 0; + let parent = null; + for (let idx = 0; idx < children.length; idx++) { + let itemid = this.idForGUID(children[idx]); + if (itemid == -1) { + delta += 1; + this._log.trace("Could not locate record " + children[idx]); + continue; + } + try { + // This code path could be optimized by caching the parent earlier. + // Doing so should take in count any edge case due to reparenting + // or parent invalidations though. + if (!parent) { + parent = PlacesUtils.bookmarks.getFolderIdForItem(itemid); + } + PlacesUtils.bookmarks.moveItem(itemid, parent, idx - delta); + } catch (ex) { + this._log.debug("Could not move item " + children[idx] + ": " + ex); } } } - return [...needUpdate]; - }), + }, changeItemID: function BStore_changeItemID(oldID, newID) { this._log.debug("Changing GUID " + oldID + " to " + newID); - Async.promiseSpinningly(PlacesSyncUtils.bookmarks.changeGuid(oldID, newID)); + // Make sure there's an item to change GUIDs + let itemId = this.idForGUID(oldID); + if (itemId <= 0) + return; + + this._setGUID(itemId, newID); + }, + + _getNode: function BStore__getNode(folder) { + let query = PlacesUtils.history.getNewQuery(); + query.setFolders([folder], 1); + return PlacesUtils.history.executeQuery( + query, PlacesUtils.history.getNewQueryOptions()).root; + }, + + _getTags: function BStore__getTags(uri) { + try { + if (typeof(uri) == "string") + uri = Utils.makeURI(uri); + } catch(e) { + this._log.warn("Could not parse URI \"" + uri + "\": " + e); + } + return PlacesUtils.tagging.getTagsForURI(uri, {}); + }, + + _getDescription: function BStore__getDescription(id) { + try { + return PlacesUtils.annotations.getItemAnnotation(id, DESCRIPTION_ANNO); + } catch (e) { + return null; + } + }, + + _isLoadInSidebar: function BStore__isLoadInSidebar(id) { + return PlacesUtils.annotations.itemHasAnnotation(id, SIDEBAR_ANNO); + }, + + get _childGUIDsStm() { + return this._getStmt( + "SELECT id AS item_id, guid " + + "FROM moz_bookmarks " + + "WHERE parent = :parent " + + "ORDER BY position"); + }, + _childGUIDsCols: ["item_id", "guid"], + + _getChildGUIDsForId: function _getChildGUIDsForId(itemid) { + let stmt = this._childGUIDsStm; + stmt.params.parent = itemid; + let rows = Async.querySpinningly(stmt, this._childGUIDsCols); + return rows.map(function (row) { + if (row.guid) { + return row.guid; + } + // A GUID hasn't been assigned to this item yet, do this now. + return this.GUIDForId(row.item_id); + }, this); }, // Create a record starting from the weave id (places guid) createRecord: function createRecord(id, collection) { - let item = Async.promiseSpinningly(PlacesSyncUtils.bookmarks.fetch(id)); - if (!item) { // deleted item - let record = new PlacesItem(collection, id); + let placeId = this.idForGUID(id); + let record; + if (placeId <= 0) { // deleted item + record = new PlacesItem(collection, id); record.deleted = true; return record; } - let recordObj = getTypeObject(item.kind); - if (!recordObj) { - this._log.warn("Unknown item type, cannot serialize: " + item.kind); - recordObj = PlacesItem; + let parent = PlacesUtils.bookmarks.getFolderIdForItem(placeId); + switch (PlacesUtils.bookmarks.getItemType(placeId)) { + case PlacesUtils.bookmarks.TYPE_BOOKMARK: + let bmkUri = PlacesUtils.bookmarks.getBookmarkURI(placeId).spec; + if (bmkUri.indexOf("place:") == 0) { + record = new BookmarkQuery(collection, id); + + // Get the actual tag name instead of the local itemId + let folder = bmkUri.match(/[:&]folder=(\d+)/); + try { + // There might not be the tag yet when creating on a new client + if (folder != null) { + folder = folder[1]; + record.folderName = PlacesUtils.bookmarks.getItemTitle(folder); + this._log.trace("query id: " + folder + " = " + record.folderName); + } + } + catch(ex) {} + + // Persist the Smart Bookmark anno, if found. + try { + let anno = PlacesUtils.annotations.getItemAnnotation(placeId, SMART_BOOKMARKS_ANNO); + if (anno != null) { + this._log.trace("query anno: " + SMART_BOOKMARKS_ANNO + + " = " + anno); + record.queryId = anno; + } + } + catch(ex) {} + } + else { + record = new Bookmark(collection, id); + } + record.title = PlacesUtils.bookmarks.getItemTitle(placeId); + + record.parentName = PlacesUtils.bookmarks.getItemTitle(parent); + record.bmkUri = bmkUri; + record.tags = this._getTags(record.bmkUri); + record.keyword = PlacesUtils.bookmarks.getKeywordForBookmark(placeId); + record.description = this._getDescription(placeId); + record.loadInSidebar = this._isLoadInSidebar(placeId); + break; + + case PlacesUtils.bookmarks.TYPE_FOLDER: + if (PlacesUtils.annotations + .itemHasAnnotation(placeId, PlacesUtils.LMANNO_FEEDURI)) { + record = new Livemark(collection, id); + let as = PlacesUtils.annotations; + record.feedUri = as.getItemAnnotation(placeId, PlacesUtils.LMANNO_FEEDURI); + try { + record.siteUri = as.getItemAnnotation(placeId, PlacesUtils.LMANNO_SITEURI); + } catch (ex) {} + } else { + record = new BookmarkFolder(collection, id); + } + + if (parent > 0) + record.parentName = PlacesUtils.bookmarks.getItemTitle(parent); + record.title = PlacesUtils.bookmarks.getItemTitle(placeId); + record.description = this._getDescription(placeId); + record.children = this._getChildGUIDsForId(placeId); + break; + + case PlacesUtils.bookmarks.TYPE_SEPARATOR: + record = new BookmarkSeparator(collection, id); + if (parent > 0) + record.parentName = PlacesUtils.bookmarks.getItemTitle(parent); + // Create a positioning identifier for the separator, used by _mapDupe + record.pos = PlacesUtils.bookmarks.getItemIndex(placeId); + break; + + default: + record = new PlacesItem(collection, id); + this._log.warn("Unknown item type, cannot serialize: " + + PlacesUtils.bookmarks.getItemType(placeId)); } - let record = new recordObj(collection, id); - record.fromSyncBookmark(item); + record.parentid = this.GUIDForId(parent); record.sortindex = this._calculateIndex(record); return record; @@ -997,22 +1118,84 @@ BookmarksStore.prototype = { return this._getStmt( "SELECT frecency " + "FROM moz_places " + - "WHERE url_hash = hash(:url) AND url = :url " + + "WHERE url = :url " + "LIMIT 1"); }, _frecencyCols: ["frecency"], + get _setGUIDStm() { + return this._getStmt( + "UPDATE moz_bookmarks " + + "SET guid = :guid " + + "WHERE id = :item_id"); + }, + + // Some helper functions to handle GUIDs + _setGUID: function _setGUID(id, guid) { + if (!guid) + guid = Utils.makeGUID(); + + let stmt = this._setGUIDStm; + stmt.params.guid = guid; + stmt.params.item_id = id; + Async.querySpinningly(stmt); + return guid; + }, + + get _guidForIdStm() { + return this._getStmt( + "SELECT guid " + + "FROM moz_bookmarks " + + "WHERE id = :item_id"); + }, + _guidForIdCols: ["guid"], + GUIDForId: function GUIDForId(id) { - let guid = Async.promiseSpinningly(PlacesUtils.promiseItemGuid(id)); - return PlacesSyncUtils.bookmarks.guidToSyncId(guid); + let special = kSpecialIds.specialGUIDForId(id); + if (special) + return special; + + let stmt = this._guidForIdStm; + stmt.params.item_id = id; + + // Use the existing GUID if it exists + let result = Async.querySpinningly(stmt, this._guidForIdCols)[0]; + if (result && result.guid) + return result.guid; + + // Give the uri a GUID if it doesn't have one + return this._setGUID(id); }, - idForGUID: function idForGUID(guid) { - // guid might be a String object rather than a string. - guid = PlacesSyncUtils.bookmarks.syncIdToGuid(guid.toString()); + get _idForGUIDStm() { + return this._getStmt( + "SELECT id AS item_id " + + "FROM moz_bookmarks " + + "WHERE guid = :guid"); + }, + _idForGUIDCols: ["item_id"], + + // noCreate is provided as an optional argument to prevent the creation of + // non-existent special records, such as "mobile". + idForGUID: function idForGUID(guid, noCreate) { + if (kSpecialIds.isSpecialGUID(guid)) + return kSpecialIds.specialIdForGUID(guid, !noCreate); - return Async.promiseSpinningly(PlacesUtils.promiseItemId(guid).catch( - ex => -1)); + let stmt = this._idForGUIDStm; + // guid might be a String object rather than a string. + stmt.params.guid = guid.toString(); + + let results = Async.querySpinningly(stmt, this._idForGUIDCols); + this._log.trace("Number of rows matching GUID " + guid + ": " + + results.length); + + // Here's the one we care about: the first. + let result = results[0]; + + if (!result) + return -1; + + return result.item_id; }, _calculateIndex: function _calculateIndex(record) { @@ -1037,48 +1220,107 @@ BookmarksStore.prototype = { return index; }, - getAllIDs: function BStore_getAllIDs() { - let items = {}; - - let query = ` - WITH RECURSIVE - changeRootContents(id) AS ( - VALUES ${getChangeRootIds().map(id => `(${id})`).join(", ")} - UNION ALL - SELECT b.id - FROM moz_bookmarks b - JOIN changeRootContents c ON b.parent = c.id - ) - SELECT guid - FROM changeRootContents - JOIN moz_bookmarks USING (id) - `; - - let statement = this._getStmt(query); - let results = Async.querySpinningly(statement, ["guid"]); - for (let { guid } of results) { - let syncID = PlacesSyncUtils.bookmarks.guidToSyncId(guid); - items[syncID] = { modified: 0, deleted: false }; + _getChildren: function BStore_getChildren(guid, items) { + let node = guid; // the recursion case + if (typeof(node) == "string") { // callers will give us the guid as the first arg + let nodeID = this.idForGUID(guid, true); + if (!nodeID) { + this._log.debug("No node for GUID " + guid + "; returning no children."); + return items; + } + node = this._getNode(nodeID); + } + + if (node.type == node.RESULT_TYPE_FOLDER) { + node.QueryInterface(Ci.nsINavHistoryQueryResultNode); + node.containerOpen = true; + try { + // Remember all the children GUIDs and recursively get more + for (let i = 0; i < node.childCount; i++) { + let child = node.getChild(i); + items[this.GUIDForId(child.itemId)] = true; + this._getChildren(child, items); + } + } + finally { + node.containerOpen = false; + } } return items; }, + /** + * Associates the URI of the item with the provided ID with the + * provided array of tags. + * If the provided ID does not identify an item with a URI, + * returns immediately. + */ + _tagID: function _tagID(itemID, tags) { + if (!itemID || !tags) { + return; + } + + try { + let u = PlacesUtils.bookmarks.getBookmarkURI(itemID); + this._tagURI(u, tags); + } catch (e) { + this._log.warn("Got exception fetching URI for " + itemID + ": not tagging. " + + Utils.exceptionStr(e)); + + // I guess it doesn't have a URI. Don't try to tag it. + return; + } + }, + + /** + * Associate the provided URI with the provided array of tags. + * If the provided URI is falsy, returns immediately. + */ + _tagURI: function _tagURI(bookmarkURI, tags) { + if (!bookmarkURI || !tags) { + return; + } + + // Filter out any null/undefined/empty tags. + tags = tags.filter(t => t); + + // Temporarily tag a dummy URI to preserve tag ids when untagging. + let dummyURI = Utils.makeURI("about:weave#BStore_tagURI"); + PlacesUtils.tagging.tagURI(dummyURI, tags); + PlacesUtils.tagging.untagURI(bookmarkURI, null); + PlacesUtils.tagging.tagURI(bookmarkURI, tags); + PlacesUtils.tagging.untagURI(dummyURI, null); + }, + + getAllIDs: function BStore_getAllIDs() { + let items = {"menu": true, + "toolbar": true}; + for each (let guid in kSpecialIds.guids) { + if (guid != "places" && guid != "tags") + this._getChildren(guid, items); + } + return items; + }, + wipe: function BStore_wipe() { - this.clearPendingDeletions(); - Async.promiseSpinningly(Task.spawn(function* () { + let cb = Async.makeSpinningCallback(); + Task.spawn(function() { // Save a backup before clearing out all bookmarks. yield PlacesBackups.create(null, true); - yield PlacesUtils.bookmarks.eraseEverything({ - source: SOURCE_SYNC, - }); - })); + for each (let guid in kSpecialIds.guids) + if (guid != "places") { + let id = kSpecialIds.specialIdForGUID(guid); + if (id) + PlacesUtils.bookmarks.removeFolderChildren(id); + } + cb(); + }); + cb.wait(); } }; function BookmarksTracker(name, engine) { - this._batchDepth = 0; - this._batchSawScoreIncrement = false; Tracker.call(this, name, engine); Svc.Obs.add("places-shutdown", this); @@ -1086,16 +1328,6 @@ function BookmarksTracker(name, engine) { BookmarksTracker.prototype = { __proto__: Tracker.prototype, - //`_ignore` checks the change source for each observer notification, so we - // don't want to let the engine ignore all changes during a sync. - get ignoreAll() { - return false; - }, - - // Define an empty setter so that the engine doesn't throw a `TypeError` - // setting a read-only property. - set ignoreAll(value) {}, - startTracking: function() { PlacesUtils.bookmarks.addObserver(this, true); Svc.Obs.add("bookmarks-restore-begin", this); @@ -1116,9 +1348,11 @@ BookmarksTracker.prototype = { switch (topic) { case "bookmarks-restore-begin": this._log.debug("Ignoring changes from importing bookmarks."); + this.ignoreAll = true; break; case "bookmarks-restore-success": this._log.debug("Tracking all items on successful import."); + this.ignoreAll = false; this._log.debug("Restore succeeded: wiping server and other clients."); this.engine.service.resetClient([this.name]); @@ -1127,6 +1361,7 @@ BookmarksTracker.prototype = { break; case "bookmarks-restore-failed": this._log.debug("Tracking all items on failed import."); + this.ignoreAll = false; break; } }, @@ -1137,68 +1372,73 @@ BookmarksTracker.prototype = { Ci.nsISupportsWeakReference ]), - addChangedID(id, change) { - if (!id) { - this._log.warn("Attempted to add undefined ID to tracker"); - return false; - } - if (this._ignored.includes(id)) { - return false; - } - let shouldSaveChange = false; - let currentChange = this.changedIDs[id]; - if (currentChange) { - if (typeof currentChange == "number") { - // Allow raw timestamps for backward-compatibility with persisted - // changed IDs. The new format uses tuples to track deleted items. - shouldSaveChange = currentChange < change.modified; - } else { - shouldSaveChange = currentChange.modified < change.modified || - currentChange.deleted != change.deleted; - } - } else { - shouldSaveChange = true; - } - if (shouldSaveChange) { - this._saveChangedID(id, change); - } - return true; - }, - /** * Add a bookmark GUID to be uploaded and bump up the sync score. * - * @param itemId - * The Places item ID of the bookmark to upload. - * @param guid - * The Places GUID of the bookmark to upload. - * @param isTombstone - * Whether we're uploading a tombstone for a removed bookmark. + * @param itemGuid + * GUID of the bookmark to upload. */ - _add: function BMT__add(itemId, guid, isTombstone = false) { - let syncID = PlacesSyncUtils.bookmarks.guidToSyncId(guid); - let info = { modified: Date.now() / 1000, deleted: isTombstone }; - if (this.addChangedID(syncID, info)) { + _add: function BMT__add(itemId, guid) { + guid = kSpecialIds.specialGUIDForId(itemId) || guid; + if (this.addChangedID(guid)) this._upScore(); - } }, - /* Every add/remove/change will trigger a sync for MULTI_DEVICE (except in - a batch operation, where we do it at the end of the batch) */ + /* Every add/remove/change will trigger a sync for MULTI_DEVICE. */ _upScore: function BMT__upScore() { - if (this._batchDepth == 0) { - this.score += SCORE_INCREMENT_XLARGE; - } else { - this._batchSawScoreIncrement = true; + this.score += SCORE_INCREMENT_XLARGE; + }, + + /** + * Determine if a change should be ignored. + * + * @param itemId + * Item under consideration to ignore + * @param folder (optional) + * Folder of the item being changed + */ + _ignore: function BMT__ignore(itemId, folder, guid) { + // Ignore unconditionally if the engine tells us to. + if (this.ignoreAll) + return true; + + // Get the folder id if we weren't given one. + if (folder == null) { + try { + folder = PlacesUtils.bookmarks.getFolderIdForItem(itemId); + } catch (ex) { + this._log.debug("getFolderIdForItem(" + itemId + + ") threw; calling _ensureMobileQuery."); + // I'm guessing that gFIFI can throw, and perhaps that's why + // _ensureMobileQuery is here at all. Try not to call it. + this._ensureMobileQuery(); + folder = PlacesUtils.bookmarks.getFolderIdForItem(itemId); + } + } + + // Ignore changes to tags (folders under the tags folder). + let tags = kSpecialIds.tags; + if (folder == tags) + return true; + + // Ignore tag items (the actual instance of a tag for a bookmark). + if (PlacesUtils.bookmarks.getFolderIdForItem(folder) == tags) + return true; + + // Make sure to remove items that have the exclude annotation. + if (PlacesUtils.annotations.itemHasAnnotation(itemId, EXCLUDEBACKUP_ANNO)) { + this.removeChangedID(guid); + return true; } + + return false; }, onItemAdded: function BMT_onItemAdded(itemId, folder, index, itemType, uri, title, dateAdded, - guid, parentGuid, source) { - if (IGNORED_SOURCES.includes(source)) { + guid, parentGuid) { + if (this._ignore(itemId, folder, guid)) return; - } this._log.trace("onItemAdded: " + itemId); this._add(itemId, guid); @@ -1206,51 +1446,13 @@ BookmarksTracker.prototype = { }, onItemRemoved: function (itemId, parentId, index, type, uri, - guid, parentGuid, source) { - if (IGNORED_SOURCES.includes(source)) { - return; - } - - // Ignore changes to tags (folders under the tags folder). - if (parentId == PlacesUtils.tagsFolderId) { - return; - } - - let grandParentId = -1; - try { - grandParentId = PlacesUtils.bookmarks.getFolderIdForItem(parentId); - } catch (ex) { - // `getFolderIdForItem` can throw if the item no longer exists, such as - // when we've removed a subtree using `removeFolderChildren`. - return; - } - - // Ignore tag items (the actual instance of a tag for a bookmark). - if (grandParentId == PlacesUtils.tagsFolderId) { + guid, parentGuid) { + if (this._ignore(itemId, parentId, guid)) { return; } - /** - * The above checks are incomplete: we can still write tombstones for - * items that we don't track, and upload extraneous roots. - * - * Consider the left pane root: it's a child of the Places root, and has - * children and grandchildren. `PlacesUIUtils` can create, delete, and - * recreate it as needed. We can't determine ancestors when the root or its - * children are deleted, because they've already been removed from the - * database when `onItemRemoved` is called. Likewise, we can't check their - * "exclude from backup" annos, because they've *also* been removed. - * - * So, we end up writing tombstones for the left pane queries and left - * pane root. For good measure, we'll also upload the Places root, because - * it's the parent of the left pane root. - * - * As a workaround, we can track the parent GUID and reconstruct the item's - * ancestry at sync time. This is complicated, and the previous behavior was - * already wrong, so we'll wait for bug 1258127 to fix this generally. - */ this._log.trace("onItemRemoved: " + itemId); - this._add(itemId, guid, /* isTombstone */ true); + this._add(itemId, guid); this._add(parentId, parentGuid); }, @@ -1265,40 +1467,32 @@ BookmarksTracker.prototype = { if (all.length == 0) return; + // Disable handling of notifications while changing the mobile query + this.ignoreAll = true; + let mobile = find(MOBILE_ANNO); - let queryURI = Utils.makeURI("place:folder=" + PlacesUtils.mobileFolderId); - let title = PlacesBundle.GetStringFromName("MobileBookmarksFolderTitle"); + let queryURI = Utils.makeURI("place:folder=" + kSpecialIds.mobile); + let title = Str.sync.get("mobile.label"); // Don't add OR remove the mobile bookmarks if there's nothing. - if (PlacesUtils.bookmarks.getIdForItemAt(PlacesUtils.mobileFolderId, 0) == -1) { + if (PlacesUtils.bookmarks.getIdForItemAt(kSpecialIds.mobile, 0) == -1) { if (mobile.length != 0) - PlacesUtils.bookmarks.removeItem(mobile[0], SOURCE_SYNC); + PlacesUtils.bookmarks.removeItem(mobile[0]); } // Add the mobile bookmarks query if it doesn't exist else if (mobile.length == 0) { - let query = PlacesUtils.bookmarks.insertBookmark(all[0], queryURI, -1, title, /* guid */ null, SOURCE_SYNC); + let query = PlacesUtils.bookmarks.insertBookmark(all[0], queryURI, -1, title); PlacesUtils.annotations.setItemAnnotation(query, ORGANIZERQUERY_ANNO, MOBILE_ANNO, 0, - PlacesUtils.annotations.EXPIRE_NEVER, SOURCE_SYNC); - PlacesUtils.annotations.setItemAnnotation(query, PlacesUtils.EXCLUDE_FROM_BACKUP_ANNO, 1, 0, - PlacesUtils.annotations.EXPIRE_NEVER, SOURCE_SYNC); + PlacesUtils.annotations.EXPIRE_NEVER); + PlacesUtils.annotations.setItemAnnotation(query, EXCLUDEBACKUP_ANNO, 1, 0, + PlacesUtils.annotations.EXPIRE_NEVER); } - // Make sure the existing query URL and title are correct - else { - if (!PlacesUtils.bookmarks.getBookmarkURI(mobile[0]).equals(queryURI)) { - PlacesUtils.bookmarks.changeBookmarkURI(mobile[0], queryURI, - SOURCE_SYNC); - } - let queryTitle = PlacesUtils.bookmarks.getItemTitle(mobile[0]); - if (queryTitle != title) { - PlacesUtils.bookmarks.setItemTitle(mobile[0], title, SOURCE_SYNC); - } - let rootTitle = - PlacesUtils.bookmarks.getItemTitle(PlacesUtils.mobileFolderId); - if (rootTitle != title) { - PlacesUtils.bookmarks.setItemTitle(PlacesUtils.mobileFolderId, title, - SOURCE_SYNC); - } + // Make sure the existing title is correct + else if (PlacesUtils.bookmarks.getItemTitle(mobile[0]) != title) { + PlacesUtils.bookmarks.setItemTitle(mobile[0], title); } + + this.ignoreAll = false; }, // This method is oddly structured, but the idea is to return as quickly as @@ -1306,11 +1500,10 @@ BookmarksTracker.prototype = { // *each change*. onItemChanged: function BMT_onItemChanged(itemId, property, isAnno, value, lastModified, itemType, parentId, - guid, parentGuid, oldValue, - source) { - if (IGNORED_SOURCES.includes(source)) { + guid, parentGuid) { + // Quicker checks first. + if (this.ignoreAll) return; - } if (isAnno && (ANNOS_TO_TRACK.indexOf(property) == -1)) // Ignore annotations except for the ones that we sync. @@ -1320,6 +1513,9 @@ BookmarksTracker.prototype = { if (property == "favicon") return; + if (this._ignore(itemId, parentId, guid)) + return; + this._log.trace("onItemChanged: " + itemId + (", " + property + (isAnno? " (anno)" : "")) + (value ? (" = \"" + value + "\"") : "")); @@ -1328,11 +1524,9 @@ BookmarksTracker.prototype = { onItemMoved: function BMT_onItemMoved(itemId, oldParent, oldIndex, newParent, newIndex, itemType, - guid, oldParentGuid, newParentGuid, - source) { - if (IGNORED_SOURCES.includes(source)) { + guid, oldParentGuid, newParentGuid) { + if (this._ignore(itemId, newParent, guid)) return; - } this._log.trace("onItemMoved: " + itemId); this._add(oldParent, oldParentGuid); @@ -1342,37 +1536,10 @@ BookmarksTracker.prototype = { } // Remove any position annotations now that the user moved the item - PlacesUtils.annotations.removeItemAnnotation(itemId, - PlacesSyncUtils.bookmarks.SYNC_PARENT_ANNO, SOURCE_SYNC); + PlacesUtils.annotations.removeItemAnnotation(itemId, PARENT_ANNO); }, - onBeginUpdateBatch: function () { - ++this._batchDepth; - }, - onEndUpdateBatch: function () { - if (--this._batchDepth === 0 && this._batchSawScoreIncrement) { - this.score += SCORE_INCREMENT_XLARGE; - this._batchSawScoreIncrement = false; - } - }, + onBeginUpdateBatch: function () {}, + onEndUpdateBatch: function () {}, onItemVisited: function () {} }; - -// Returns an array of root IDs to recursively query for synced bookmarks. -// Items in other roots, including tags and organizer queries, will be -// ignored. -function getChangeRootIds() { - return [ - PlacesUtils.bookmarksMenuFolderId, - PlacesUtils.toolbarFolderId, - PlacesUtils.unfiledBookmarksFolderId, - PlacesUtils.mobileFolderId, - ]; -} - -class BookmarksChangeset extends Changeset { - getModifiedTimestamp(id) { - let change = this.changes[id]; - return change ? change.modified : Number.NaN; - } -} diff --git a/services/sync/modules/engines/clients.js b/services/sync/modules/engines/clients.js index 3dd679570..f423242c9 100644 --- a/services/sync/modules/engines/clients.js +++ b/services/sync/modules/engines/clients.js @@ -2,57 +2,24 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ -/** - * How does the clients engine work? - * - * - We use 2 files - commands.json and commands-syncing.json. - * - * - At sync upload time, we attempt a rename of commands.json to - * commands-syncing.json, and ignore errors (helps for crash during sync!). - * - We load commands-syncing.json and stash the contents in - * _currentlySyncingCommands which lives for the duration of the upload process. - * - We use _currentlySyncingCommands to build the outgoing records - * - Immediately after successful upload, we delete commands-syncing.json from - * disk (and clear _currentlySyncingCommands). We reconcile our local records - * with what we just wrote in the server, and add failed IDs commands - * back in commands.json - * - Any time we need to "save" a command for future syncs, we load - * commands.json, update it, and write it back out. - */ - this.EXPORTED_SYMBOLS = [ "ClientEngine", "ClientsRec" ]; -var {classes: Cc, interfaces: Ci, utils: Cu} = Components; +const {classes: Cc, interfaces: Ci, utils: Cu} = Components; -Cu.import("resource://services-common/async.js"); Cu.import("resource://services-common/stringbundle.js"); Cu.import("resource://services-sync/constants.js"); Cu.import("resource://services-sync/engines.js"); Cu.import("resource://services-sync/record.js"); -Cu.import("resource://services-sync/resource.js"); Cu.import("resource://services-sync/util.js"); -Cu.import("resource://gre/modules/Services.jsm"); - -XPCOMUtils.defineLazyModuleGetter(this, "fxAccounts", - "resource://gre/modules/FxAccounts.jsm"); const CLIENTS_TTL = 1814400; // 21 days const CLIENTS_TTL_REFRESH = 604800; // 7 days -const STALE_CLIENT_REMOTE_AGE = 604800; // 7 days const SUPPORTED_PROTOCOL_VERSIONS = ["1.1", "1.5"]; -function hasDupeCommand(commands, action) { - if (!commands) { - return false; - } - return commands.some(other => other.command == action.command && - Utils.deepEquals(other.args, action.args)); -} - this.ClientsRec = function ClientsRec(collection, id) { CryptoWrapper.call(this, collection, id); } @@ -66,27 +33,23 @@ Utils.deferGetSet(ClientsRec, "cleartext", ["name", "type", "commands", "version", "protocols", - "formfactor", "os", "appPackage", "application", "device", - "fxaDeviceId"]); + "formfactor", "os", "appPackage", "application", "device"]); this.ClientEngine = function ClientEngine(service) { SyncEngine.call(this, "Clients", service); - // Reset the last sync timestamp on every startup so that we fetch all clients - this.resetLastSync(); + // Reset the client on every startup so that we fetch recent clients + this._resetClient(); } ClientEngine.prototype = { __proto__: SyncEngine.prototype, _storeObj: ClientStore, _recordObj: ClientsRec, _trackerObj: ClientsTracker, - allowSkippedRecord: false, // Always sync client data as it controls other sync behavior - get enabled() { - return true; - }, + get enabled() true, get lastRecordUpload() { return Svc.Prefs.get(this.name + ".lastRecordUpload", 0); @@ -95,31 +58,18 @@ ClientEngine.prototype = { Svc.Prefs.set(this.name + ".lastRecordUpload", Math.floor(value)); }, - get remoteClients() { - // return all non-stale clients for external consumption. - return Object.values(this._store._remoteClients).filter(v => !v.stale); - }, - - remoteClientExists(id) { - let client = this._store._remoteClients[id]; - return !!(client && !client.stale); - }, - // Aggregate some stats on the composition of clients on this account get stats() { let stats = { - hasMobile: this.localType == DEVICE_TYPE_MOBILE, + hasMobile: this.localType == "mobile", names: [this.localName], numClients: 1, }; - for (let id in this._store._remoteClients) { - let {name, type, stale} = this._store._remoteClients[id]; - if (!stale) { - stats.hasMobile = stats.hasMobile || type == DEVICE_TYPE_MOBILE; - stats.names.push(name); - stats.numClients++; - } + for each (let {name, type} in this._store._remoteClients) { + stats.hasMobile = stats.hasMobile || type == "mobile"; + stats.names.push(name); + stats.numClients++; } return stats; @@ -135,11 +85,7 @@ ClientEngine.prototype = { counts.set(this.localType, 1); - for (let id in this._store._remoteClients) { - let record = this._store._remoteClients[id]; - if (record.stale) { - continue; // pretend "stale" records don't exist. - } + for each (let record in this._store._remoteClients) { let type = record.type; if (!counts.has(type)) { counts.set(type, 0); @@ -156,9 +102,7 @@ ClientEngine.prototype = { let localID = Svc.Prefs.get("client.GUID", ""); return localID == "" ? this.localID = Utils.makeGUID() : localID; }, - set localID(value) { - Svc.Prefs.set("client.GUID", value); - }, + set localID(value) Svc.Prefs.set("client.GUID", value), get brandName() { let brand = new StringBundle("chrome://branding/locale/brand.properties"); @@ -166,97 +110,23 @@ ClientEngine.prototype = { }, get localName() { - let name = Utils.getDeviceName(); - // If `getDeviceName` returns the default name, set the pref. FxA registers - // the device before syncing, so we don't need to update the registration - // in this case. - Svc.Prefs.set("client.name", name); - return name; - }, - set localName(value) { - Svc.Prefs.set("client.name", value); - // Update the registration in the background. - fxAccounts.updateDeviceRegistration().catch(error => { - this._log.warn("failed to update fxa device registration", error); - }); - }, + let localName = Svc.Prefs.get("client.name", ""); + if (localName != "") + return localName; - get localType() { - return Utils.getDeviceType(); - }, - set localType(value) { - Svc.Prefs.set("client.type", value); + return this.localName = Utils.getDefaultDeviceName(); }, + set localName(value) Svc.Prefs.set("client.name", value), - getClientName(id) { - if (id == this.localID) { - return this.localName; - } - let client = this._store._remoteClients[id]; - return client ? client.name : ""; - }, - - getClientFxaDeviceId(id) { - if (this._store._remoteClients[id]) { - return this._store._remoteClients[id].fxaDeviceId; - } - return null; - }, + get localType() Svc.Prefs.get("client.type", "desktop"), + set localType(value) Svc.Prefs.set("client.type", value), isMobile: function isMobile(id) { if (this._store._remoteClients[id]) - return this._store._remoteClients[id].type == DEVICE_TYPE_MOBILE; + return this._store._remoteClients[id].type == "mobile"; return false; }, - _readCommands() { - let cb = Async.makeSpinningCallback(); - Utils.jsonLoad("commands", this, commands => cb(null, commands)); - return cb.wait() || {}; - }, - - /** - * Low level function, do not use directly (use _addClientCommand instead). - */ - _saveCommands(commands) { - let cb = Async.makeSpinningCallback(); - Utils.jsonSave("commands", this, commands, error => { - if (error) { - this._log.error("Failed to save JSON outgoing commands", error); - } - cb(); - }); - cb.wait(); - }, - - _prepareCommandsForUpload() { - let cb = Async.makeSpinningCallback(); - Utils.jsonMove("commands", "commands-syncing", this).catch(() => {}) // Ignore errors - .then(() => { - Utils.jsonLoad("commands-syncing", this, commands => cb(null, commands)); - }); - return cb.wait() || {}; - }, - - _deleteUploadedCommands() { - delete this._currentlySyncingCommands; - Async.promiseSpinningly( - Utils.jsonRemove("commands-syncing", this).catch(err => { - this._log.error("Failed to delete syncing-commands file", err); - }) - ); - }, - - _addClientCommand(clientId, command) { - const allCommands = this._readCommands(); - const clientCommands = allCommands[clientId] || []; - if (hasDupeCommand(clientCommands, command)) { - return; - } - allCommands[clientId] = clientCommands.concat(command); - this._saveCommands(allCommands); - }, - _syncStartup: function _syncStartup() { // Reupload new client record periodically. if (Date.now() / 1000 - this.lastRecordUpload > CLIENTS_TTL_REFRESH) { @@ -266,157 +136,9 @@ ClientEngine.prototype = { SyncEngine.prototype._syncStartup.call(this); }, - _processIncoming() { - // Fetch all records from the server. - this.lastSync = 0; - this._incomingClients = {}; - try { - SyncEngine.prototype._processIncoming.call(this); - // Since clients are synced unconditionally, any records in the local store - // that don't exist on the server must be for disconnected clients. Remove - // them, so that we don't upload records with commands for clients that will - // never see them. We also do this to filter out stale clients from the - // tabs collection, since showing their list of tabs is confusing. - for (let id in this._store._remoteClients) { - if (!this._incomingClients[id]) { - this._log.info(`Removing local state for deleted client ${id}`); - this._removeRemoteClient(id); - } - } - // Bug 1264498: Mobile clients don't remove themselves from the clients - // collection when the user disconnects Sync, so we mark as stale clients - // with the same name that haven't synced in over a week. - // (Note we can't simply delete them, or we re-apply them next sync - see - // bug 1287687) - delete this._incomingClients[this.localID]; - let names = new Set([this.localName]); - for (let id in this._incomingClients) { - let record = this._store._remoteClients[id]; - if (!names.has(record.name)) { - names.add(record.name); - continue; - } - let remoteAge = AsyncResource.serverTime - this._incomingClients[id]; - if (remoteAge > STALE_CLIENT_REMOTE_AGE) { - this._log.info(`Hiding stale client ${id} with age ${remoteAge}`); - record.stale = true; - } - } - } finally { - this._incomingClients = null; - } - }, - - _uploadOutgoing() { - this._currentlySyncingCommands = this._prepareCommandsForUpload(); - const clientWithPendingCommands = Object.keys(this._currentlySyncingCommands); - for (let clientId of clientWithPendingCommands) { - if (this._store._remoteClients[clientId] || this.localID == clientId) { - this._modified.set(clientId, 0); - } - } - SyncEngine.prototype._uploadOutgoing.call(this); - }, - - _onRecordsWritten(succeeded, failed) { - // Reconcile the status of the local records with what we just wrote on the - // server - for (let id of succeeded) { - const commandChanges = this._currentlySyncingCommands[id]; - if (id == this.localID) { - if (this.localCommands) { - this.localCommands = this.localCommands.filter(command => !hasDupeCommand(commandChanges, command)); - } - } else { - const clientRecord = this._store._remoteClients[id]; - if (!commandChanges || !clientRecord) { - // should be impossible, else we wouldn't have been writing it. - this._log.warn("No command/No record changes for a client we uploaded"); - continue; - } - // fixup the client record, so our copy of _remoteClients matches what we uploaded. - clientRecord.commands = this._store.createRecord(id); - // we could do better and pass the reference to the record we just uploaded, - // but this will do for now - } - } - - // Re-add failed commands - for (let id of failed) { - const commandChanges = this._currentlySyncingCommands[id]; - if (!commandChanges) { - continue; - } - this._addClientCommand(id, commandChanges); - } - - this._deleteUploadedCommands(); - - // Notify other devices that their own client collection changed - const idsToNotify = succeeded.reduce((acc, id) => { - if (id == this.localID) { - return acc; - } - const fxaDeviceId = this.getClientFxaDeviceId(id); - return fxaDeviceId ? acc.concat(fxaDeviceId) : acc; - }, []); - if (idsToNotify.length > 0) { - this._notifyCollectionChanged(idsToNotify); - } - }, - - _notifyCollectionChanged(ids) { - const message = { - version: 1, - command: "sync:collection_changed", - data: { - collections: ["clients"] - } - }; - fxAccounts.notifyDevices(ids, message, NOTIFY_TAB_SENT_TTL_SECS); - }, - - _syncFinish() { - // Record histograms for our device types, and also write them to a pref - // so non-histogram telemetry (eg, UITelemetry) has easy access to them. - for (let [deviceType, count] of this.deviceTypes) { - let hid; - let prefName = this.name + ".devices."; - switch (deviceType) { - case "desktop": - hid = "WEAVE_DEVICE_COUNT_DESKTOP"; - prefName += "desktop"; - break; - case "mobile": - hid = "WEAVE_DEVICE_COUNT_MOBILE"; - prefName += "mobile"; - break; - default: - this._log.warn(`Unexpected deviceType "${deviceType}" recording device telemetry.`); - continue; - } - Services.telemetry.getHistogramById(hid).add(count); - Svc.Prefs.set(prefName, count); - } - SyncEngine.prototype._syncFinish.call(this); - }, - - _reconcile: function _reconcile(item) { - // Every incoming record is reconciled, so we use this to track the - // contents of the collection on the server. - this._incomingClients[item.id] = item.modified; - - if (!this._store.itemExists(item.id)) { - return true; - } - // Clients are synced unconditionally, so we'll always have new records. - // Unfortunately, this will cause the scheduler to use the immediate sync - // interval for the multi-device case, instead of the active interval. We - // work around this by updating the record during reconciliation, and - // returning false to indicate that the record doesn't need to be applied - // later. - this._store.update(item); - return false; + // Always process incoming items because they might have commands + _reconcile: function _reconcile() { + return true; }, // Treat reset the same as wiping for locally cached clients @@ -426,13 +148,7 @@ ClientEngine.prototype = { _wipeClient: function _wipeClient() { SyncEngine.prototype._resetClient.call(this); - delete this.localCommands; this._store.wipe(); - const logRemoveError = err => this._log.warn("Could not delete json file", err); - Async.promiseSpinningly( - Utils.jsonRemove("commands", this).catch(logRemoveError) - .then(Utils.jsonRemove("commands-syncing", this).catch(logRemoveError)) - ); }, removeClientData: function removeClientData() { @@ -471,6 +187,14 @@ ClientEngine.prototype = { }, /** + * Remove any commands for the local client and mark it for upload. + */ + clearCommands: function clearCommands() { + delete this.localCommands; + this._tracker.addChangedID(this.localID); + }, + + /** * Sends a command+args pair to a specific client. * * @param command Command string @@ -484,17 +208,30 @@ ClientEngine.prototype = { if (!client) { throw new Error("Unknown remote client ID: '" + clientId + "'."); } - if (client.stale) { - throw new Error("Stale remote client ID: '" + clientId + "'."); - } + + // notDupe compares two commands and returns if they are not equal. + let notDupe = function(other) { + return other.command != command || !Utils.deepEquals(other.args, args); + }; let action = { command: command, args: args, }; + if (!client.commands) { + client.commands = [action]; + } + // Add the new action if there are no duplicates. + else if (client.commands.every(notDupe)) { + client.commands.push(action); + } + // It must be a dupe. Skip. + else { + return; + } + this._log.trace("Client " + clientId + " got a new action: " + [command, args]); - this._addClientCommand(clientId, action); this._tracker.addChangedID(clientId); }, @@ -505,17 +242,13 @@ ClientEngine.prototype = { */ processIncomingCommands: function processIncomingCommands() { return this._notify("clients:process-commands", "", function() { - if (!this.localCommands) { - return true; - } + let commands = this.localCommands; - const clearedCommands = this._readCommands()[this.localID]; - const commands = this.localCommands.filter(command => !hasDupeCommand(clearedCommands, command)); + // Immediately clear out the commands as we've got them locally. + this.clearCommands(); - let URIsToDisplay = []; // Process each command in order. - for (let rawCommand of commands) { - let {command, args} = rawCommand; + for each (let {command, args} in commands) { this._log.debug("Processing command: " + command + "(" + args + ")"); let engines = [args[0]]; @@ -536,20 +269,12 @@ ClientEngine.prototype = { this.service.logout(); return false; case "displayURI": - let [uri, clientId, title] = args; - URIsToDisplay.push({ uri, clientId, title }); + this._handleDisplayURI.apply(this, args); break; default: this._log.debug("Received an unknown command: " + command); break; } - // Add the command to the "cleared" commands list - this._addClientCommand(this.localID, rawCommand) - } - this._tracker.addChangedID(this.localID); - - if (URIsToDisplay.length) { - this._handleDisplayURIs(URIsToDisplay); } return true; @@ -588,10 +313,8 @@ ClientEngine.prototype = { if (clientId) { this._sendCommandToClient(command, args, clientId); } else { - for (let [id, record] of Object.entries(this._store._remoteClients)) { - if (!record.stale) { - this._sendCommandToClient(command, args, id); - } + for (let id in this._store._remoteClients) { + this._sendCommandToClient(command, args, id); } } }, @@ -622,11 +345,11 @@ ClientEngine.prototype = { }, /** - * Handle a bunch of received 'displayURI' commands. + * Handle a single received 'displayURI' command. * - * Interested parties should observe the "weave:engine:clients:display-uris" - * topic. The callback will receive an array as the subject parameter - * containing objects with the following keys: + * Interested parties should observe the "weave:engine:clients:display-uri" + * topic. The callback will receive an object as the subject parameter with + * the following keys: * * uri URI (string) that is requested for display. * clientId ID of client that sent the command. @@ -634,24 +357,21 @@ ClientEngine.prototype = { * * The 'data' parameter to the callback will not be defined. * - * @param uris - * An array containing URI objects to display - * @param uris[].uri + * @param uri * String URI that was received - * @param uris[].clientId + * @param clientId * ID of client that sent URI - * @param uris[].title + * @param title * String title of page that URI corresponds to. Older clients may not * send this. */ - _handleDisplayURIs: function _handleDisplayURIs(uris) { - Svc.Obs.notify("weave:engine:clients:display-uris", uris); - }, + _handleDisplayURI: function _handleDisplayURI(uri, clientId, title) { + this._log.info("Received a URI for display: " + uri + " (" + title + + ") from " + clientId); - _removeRemoteClient(id) { - delete this._store._remoteClients[id]; - this._tracker.removeChangedID(id); - }, + let subject = {uri: uri, client: clientId, title: title}; + Svc.Obs.notify("weave:engine:clients:display-uri", subject); + } }; function ClientStore(name, engine) { @@ -660,48 +380,29 @@ function ClientStore(name, engine) { ClientStore.prototype = { __proto__: Store.prototype, - _remoteClients: {}, - create(record) { - this.update(record); + this.update(record) }, update: function update(record) { - if (record.id == this.engine.localID) { - // Only grab commands from the server; local name/type always wins + // Only grab commands from the server; local name/type always wins + if (record.id == this.engine.localID) this.engine.localCommands = record.commands; - } else { + else this._remoteClients[record.id] = record.cleartext; - } }, createRecord: function createRecord(id, collection) { let record = new ClientsRec(collection, id); - const commandsChanges = this.engine._currentlySyncingCommands ? - this.engine._currentlySyncingCommands[id] : - []; - // Package the individual components into a record for the local client if (id == this.engine.localID) { - let cb = Async.makeSpinningCallback(); - fxAccounts.getDeviceId().then(id => cb(null, id), cb); - try { - record.fxaDeviceId = cb.wait(); - } catch(error) { - this._log.warn("failed to get fxa device id", error); - } record.name = this.engine.localName; record.type = this.engine.localType; + record.commands = this.engine.localCommands; record.version = Services.appinfo.version; record.protocols = SUPPORTED_PROTOCOL_VERSIONS; - // Substract the commands we recorded that we've already executed - if (commandsChanges && commandsChanges.length && - this.engine.localCommands && this.engine.localCommands.length) { - record.commands = this.engine.localCommands.filter(command => !hasDupeCommand(commandsChanges, command)); - } - // Optional fields. record.os = Services.appinfo.OS; // "Darwin" record.appPackage = Services.appinfo.ID; @@ -712,20 +413,6 @@ ClientStore.prototype = { // record.formfactor = ""; // Bug 1100722 } else { record.cleartext = this._remoteClients[id]; - - // Add the commands we have to send - if (commandsChanges && commandsChanges.length) { - const recordCommands = record.cleartext.commands || []; - const newCommands = commandsChanges.filter(command => !hasDupeCommand(recordCommands, command)); - record.cleartext.commands = recordCommands.concat(newCommands); - } - - if (record.cleartext.stale) { - // It's almost certainly a logic error for us to upload a record we - // consider stale, so make log noise, but still remove the flag. - this._log.error(`Preparing to upload record ${id} that we consider stale`); - delete record.cleartext.stale; - } } return record; @@ -768,7 +455,7 @@ ClientsTracker.prototype = { break; case "weave:engine:stop-tracking": if (this._enabled) { - Svc.Prefs.ignore("client.name", this); + Svc.Prefs.ignore("clients.name", this); this._enabled = false; } break; diff --git a/services/sync/modules/engines/forms.js b/services/sync/modules/engines/forms.js index 43f79d4f7..d26d57176 100644 --- a/services/sync/modules/engines/forms.js +++ b/services/sync/modules/engines/forms.js @@ -2,11 +2,11 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ -this.EXPORTED_SYMBOLS = ['FormEngine', 'FormRec', 'FormValidator']; +this.EXPORTED_SYMBOLS = ['FormEngine', 'FormRec']; -var Cc = Components.classes; -var Ci = Components.interfaces; -var Cu = Components.utils; +const Cc = Components.classes; +const Ci = Components.interfaces; +const Cu = Components.utils; Cu.import("resource://gre/modules/XPCOMUtils.jsm"); Cu.import("resource://services-sync/engines.js"); @@ -14,10 +14,9 @@ Cu.import("resource://services-sync/record.js"); Cu.import("resource://services-common/async.js"); Cu.import("resource://services-sync/util.js"); Cu.import("resource://services-sync/constants.js"); -Cu.import("resource://services-sync/collection_validator.js"); Cu.import("resource://gre/modules/Log.jsm"); -const FORMS_TTL = 3 * 365 * 24 * 60 * 60; // Three years in seconds. +const FORMS_TTL = 5184000; // 60 days this.FormRec = function FormRec(collection, id) { CryptoWrapper.call(this, collection, id); @@ -31,30 +30,26 @@ FormRec.prototype = { Utils.deferGetSet(FormRec, "cleartext", ["name", "value"]); -var FormWrapper = { +let FormWrapper = { _log: Log.repository.getLogger("Sync.Engine.Forms"), _getEntryCols: ["fieldname", "value"], _guidCols: ["guid"], - _promiseSearch: function(terms, searchData) { - return new Promise(resolve => { - let results = []; - let callbacks = { - handleResult(result) { - results.push(result); - }, - handleCompletion(reason) { - resolve(results); - } - }; - Svc.FormHistory.search(terms, searchData, callbacks); - }) - }, - // Do a "sync" search by spinning the event loop until it completes. _searchSpinningly: function(terms, searchData) { - return Async.promiseSpinningly(this._promiseSearch(terms, searchData)); + let results = []; + let cb = Async.makeSpinningCallback(); + let callbacks = { + handleResult: function(result) { + results.push(result); + }, + handleCompletion: function(reason) { + cb(null, results); + } + }; + Svc.FormHistory.search(terms, searchData, callbacks); + return cb.wait(); }, _updateSpinningly: function(changes) { @@ -114,9 +109,7 @@ FormEngine.prototype = { syncPriority: 6, - get prefName() { - return "history"; - }, + get prefName() "history", _findDupe: function _findDupe(item) { return FormWrapper.getGUID(item.name, item.value); @@ -232,9 +225,7 @@ FormTracker.prototype = { observe: function (subject, topic, data) { Tracker.prototype.observe.call(this, subject, topic, data); - if (this.ignoreAll) { - return; - } + switch (topic) { case "satchel-storage-changed": if (data == "formhistory-add" || data == "formhistory-remove") { @@ -250,56 +241,3 @@ FormTracker.prototype = { this.score += SCORE_INCREMENT_MEDIUM; }, }; - - -class FormsProblemData extends CollectionProblemData { - getSummary() { - // We don't support syncing deleted form data, so "clientMissing" isn't a problem - return super.getSummary().filter(entry => - entry.name !== "clientMissing"); - } -} - -class FormValidator extends CollectionValidator { - constructor() { - super("forms", "id", ["name", "value"]); - } - - emptyProblemData() { - return new FormsProblemData(); - } - - getClientItems() { - return FormWrapper._promiseSearch(["guid", "fieldname", "value"], {}); - } - - normalizeClientItem(item) { - return { - id: item.guid, - guid: item.guid, - name: item.fieldname, - fieldname: item.fieldname, - value: item.value, - original: item, - }; - } - - normalizeServerItem(item) { - let res = Object.assign({ - guid: item.id, - fieldname: item.name, - original: item, - }, item); - // Missing `name` or `value` causes the getGUID call to throw - if (item.name !== undefined && item.value !== undefined) { - let guid = FormWrapper.getGUID(item.name, item.value); - if (guid) { - res.guid = guid; - res.id = guid; - res.duped = true; - } - } - - return res; - } -}
\ No newline at end of file diff --git a/services/sync/modules/engines/history.js b/services/sync/modules/engines/history.js index 307d484c1..99ecb4506 100644 --- a/services/sync/modules/engines/history.js +++ b/services/sync/modules/engines/history.js @@ -4,10 +4,10 @@ this.EXPORTED_SYMBOLS = ['HistoryEngine', 'HistoryRec']; -var Cc = Components.classes; -var Ci = Components.interfaces; -var Cu = Components.utils; -var Cr = Components.results; +const Cc = Components.classes; +const Ci = Components.interfaces; +const Cu = Components.utils; +const Cr = Components.results; const HISTORY_TTL = 5184000; // 60 days @@ -44,25 +44,6 @@ HistoryEngine.prototype = { applyIncomingBatchSize: HISTORY_STORE_BATCH_SIZE, syncPriority: 7, - - _processIncoming: function (newitems) { - // We want to notify history observers that a batch operation is underway - // so they don't do lots of work for each incoming record. - let observers = PlacesUtils.history.getObservers(); - function notifyHistoryObservers(notification) { - for (let observer of observers) { - try { - observer[notification](); - } catch (ex) { } - } - } - notifyHistoryObservers("onBeginUpdateBatch"); - try { - return SyncEngine.prototype._processIncoming.call(this, newitems); - } finally { - notifyHistoryObservers("onEndUpdateBatch"); - } - }, }; function HistoryStore(name, engine) { @@ -70,8 +51,7 @@ function HistoryStore(name, engine) { // Explicitly nullify our references to our cached services so we don't leak Svc.Obs.add("places-shutdown", function() { - for (let query in this._stmts) { - let stmt = this._stmts; + for each ([query, stmt] in Iterator(this._stmts)) { stmt.finalize(); } this._stmts = {}; @@ -105,7 +85,7 @@ HistoryStore.prototype = { return this._getStmt( "UPDATE moz_places " + "SET guid = :guid " + - "WHERE url_hash = hash(:page_url) AND url = :page_url"); + "WHERE url = :page_url"); }, // Some helper functions to handle GUIDs @@ -127,7 +107,7 @@ HistoryStore.prototype = { return this._getStmt( "SELECT guid " + "FROM moz_places " + - "WHERE url_hash = hash(:page_url) AND url = :page_url"); + "WHERE url = :page_url"); }, _guidCols: ["guid"], @@ -146,12 +126,12 @@ HistoryStore.prototype = { }, get _visitStm() { - return this._getStmt(`/* do not warn (bug 599936) */ - SELECT visit_type type, visit_date date - FROM moz_historyvisits - JOIN moz_places h ON h.id = place_id - WHERE url_hash = hash(:url) AND url = :url - ORDER BY date DESC LIMIT 20`); + return this._getStmt( + "/* do not warn (bug 599936) */ " + + "SELECT visit_type type, visit_date date " + + "FROM moz_historyvisits " + + "WHERE place_id = (SELECT id FROM moz_places WHERE url = :url) " + + "ORDER BY date DESC LIMIT 10"); }, _visitCols: ["date", "type"], @@ -223,10 +203,7 @@ HistoryStore.prototype = { } else { shouldApply = this._recordToPlaceInfo(record); } - } catch (ex) { - if (Async.isShutdownException(ex)) { - throw ex; - } + } catch(ex) { failed.push(record.id); shouldApply = false; } @@ -299,14 +276,14 @@ HistoryStore.prototype = { if (!visit.date || typeof visit.date != "number") { this._log.warn("Encountered record with invalid visit date: " + visit.date); - continue; + throw "Visit has no date!"; } - if (!visit.type || - !Object.values(PlacesUtils.history.TRANSITIONS).includes(visit.type)) { - this._log.warn("Encountered record with invalid visit type: " + - visit.type + "; ignoring."); - continue; + if (!visit.type || !(visit.type >= PlacesUtils.history.TRANSITION_LINK && + visit.type <= PlacesUtils.history.TRANSITION_FRAMED_LINK)) { + this._log.warn("Encountered record with invalid visit type: " + + visit.type); + throw "Invalid visit type!"; } // Dates need to be integers. @@ -317,7 +294,6 @@ HistoryStore.prototype = { // overwritten. continue; } - visit.visitDate = visit.date; visit.transitionType = visit.type; k += 1; @@ -369,9 +345,7 @@ HistoryStore.prototype = { }, wipe: function HistStore_wipe() { - let cb = Async.makeSyncCallback(); - PlacesUtils.history.clear().then(result => {cb(null, result)}, err => {cb(err)}); - return Async.waitForSyncCallback(cb); + PlacesUtils.history.removeAllPages(); } }; diff --git a/services/sync/modules/engines/passwords.js b/services/sync/modules/engines/passwords.js index 51db49a0a..994b59767 100644 --- a/services/sync/modules/engines/passwords.js +++ b/services/sync/modules/engines/passwords.js @@ -2,16 +2,14 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ -this.EXPORTED_SYMBOLS = ['PasswordEngine', 'LoginRec', 'PasswordValidator']; +this.EXPORTED_SYMBOLS = ['PasswordEngine', 'LoginRec']; -var {classes: Cc, interfaces: Ci, utils: Cu} = Components; +const {classes: Cc, interfaces: Ci, utils: Cu} = Components; Cu.import("resource://services-sync/record.js"); Cu.import("resource://services-sync/constants.js"); -Cu.import("resource://services-sync/collection_validator.js"); Cu.import("resource://services-sync/engines.js"); Cu.import("resource://services-sync/util.js"); -Cu.import("resource://services-common/async.js"); this.LoginRec = function LoginRec(collection, id) { CryptoWrapper.call(this, collection, id); @@ -24,7 +22,6 @@ LoginRec.prototype = { Utils.deferGetSet(LoginRec, "cleartext", [ "hostname", "formSubmitURL", "httpRealm", "username", "password", "usernameField", "passwordField", - "timeCreated", "timePasswordChanged", ]); @@ -70,10 +67,7 @@ PasswordEngine.prototype = { Svc.Prefs.set("deletePwdFxA", true); Svc.Prefs.reset("deletePwd"); // The old prefname we previously used. } catch (ex) { - if (Async.isShutdownException(ex)) { - throw ex; - } - this._log.debug("Password deletes failed", ex); + this._log.debug("Password deletes failed: " + Utils.exceptionStr(ex)); } } }, @@ -89,7 +83,7 @@ PasswordEngine.prototype = { this._store._sleep(0); // Yield back to main thread after synchronous operation. // Look for existing logins that match the hostname, but ignore the password. - for (let local of logins) { + for each (let local in logins) { if (login.matches(local, true) && local instanceof Ci.nsILoginMetaInfo) { return local.guid; } @@ -104,13 +98,6 @@ function PasswordStore(name, engine) { PasswordStore.prototype = { __proto__: Store.prototype, - _newPropertyBag: function () { - return Cc["@mozilla.org/hash-property-bag;1"].createInstance(Ci.nsIWritablePropertyBag2); - }, - - /** - * Return an instance of nsILoginInfo (and, implicitly, nsILoginMetaInfo). - */ _nsLoginInfoFromRecord: function (record) { function nullUndefined(x) { return (x == undefined) ? null : x; @@ -131,21 +118,13 @@ PasswordStore.prototype = { record.password, record.usernameField, record.passwordField); - info.QueryInterface(Ci.nsILoginMetaInfo); info.guid = record.id; - if (record.timeCreated) { - info.timeCreated = record.timeCreated; - } - if (record.timePasswordChanged) { - info.timePasswordChanged = record.timePasswordChanged; - } - return info; }, _getLoginFromGUID: function (id) { - let prop = this._newPropertyBag(); + let prop = Cc["@mozilla.org/hash-property-bag;1"].createInstance(Ci.nsIWritablePropertyBag2); prop.setPropertyAsAUTF8String("guid", id); let logins = Services.logins.searchLogins({}, prop); @@ -190,7 +169,8 @@ PasswordStore.prototype = { return; } - let prop = this._newPropertyBag(); + let prop = Cc["@mozilla.org/hash-property-bag;1"] + .createInstance(Ci.nsIWritablePropertyBag2); prop.setPropertyAsAUTF8String("guid", newID); Services.logins.modifyLogin(oldLogin, prop); @@ -217,11 +197,6 @@ PasswordStore.prototype = { record.usernameField = login.usernameField; record.passwordField = login.passwordField; - // Optional fields. - login.QueryInterface(Ci.nsILoginMetaInfo); - record.timeCreated = login.timeCreated; - record.timePasswordChanged = login.timePasswordChanged; - return record; }, @@ -237,7 +212,8 @@ PasswordStore.prototype = { try { Services.logins.addLogin(login); } catch(ex) { - this._log.debug(`Adding record ${record.id} resulted in exception`, ex); + this._log.debug("Adding record " + record.id + + " resulted in exception " + Utils.exceptionStr(ex)); } }, @@ -269,7 +245,9 @@ PasswordStore.prototype = { try { Services.logins.modifyLogin(loginItem, newinfo); } catch(ex) { - this._log.debug(`Modifying record ${record.id} resulted in exception; not modifying`, ex); + this._log.debug("Modifying record " + record.id + + " resulted in exception " + Utils.exceptionStr(ex) + + ". Not modifying."); } }, @@ -326,46 +304,3 @@ PasswordTracker.prototype = { } }, }; - -class PasswordValidator extends CollectionValidator { - constructor() { - super("passwords", "id", [ - "hostname", - "formSubmitURL", - "httpRealm", - "password", - "passwordField", - "username", - "usernameField", - ]); - } - - getClientItems() { - let logins = Services.logins.getAllLogins({}); - let syncHosts = Utils.getSyncCredentialsHosts() - let result = logins.map(l => l.QueryInterface(Ci.nsILoginMetaInfo)) - .filter(l => !syncHosts.has(l.hostname)); - return Promise.resolve(result); - } - - normalizeClientItem(item) { - return { - id: item.guid, - guid: item.guid, - hostname: item.hostname, - formSubmitURL: item.formSubmitURL, - httpRealm: item.httpRealm, - password: item.password, - passwordField: item.passwordField, - username: item.username, - usernameField: item.usernameField, - original: item, - } - } - - normalizeServerItem(item) { - return Object.assign({ guid: item.id }, item); - } -} - - diff --git a/services/sync/modules/engines/prefs.js b/services/sync/modules/engines/prefs.js index 9ceeb9ac6..82091d5b4 100644 --- a/services/sync/modules/engines/prefs.js +++ b/services/sync/modules/engines/prefs.js @@ -4,11 +4,11 @@ this.EXPORTED_SYMBOLS = ['PrefsEngine', 'PrefRec']; -var Cc = Components.classes; -var Ci = Components.interfaces; -var Cu = Components.utils; +const Cc = Components.classes; +const Ci = Components.interfaces; +const Cu = Components.utils; -const PREF_SYNC_PREFS_PREFIX = "services.sync.prefs.sync."; +const SYNC_PREFS_PREFIX = "services.sync.prefs.sync."; Cu.import("resource://services-sync/engines.js"); Cu.import("resource://services-sync/record.js"); @@ -42,7 +42,6 @@ PrefsEngine.prototype = { version: 2, syncPriority: 1, - allowSkippedRecord: false, getChangedIDs: function () { // No need for a proper timestamp (no conflict resolution needed). @@ -88,45 +87,37 @@ PrefStore.prototype = { _getSyncPrefs: function () { let syncPrefs = Cc["@mozilla.org/preferences-service;1"] .getService(Ci.nsIPrefService) - .getBranch(PREF_SYNC_PREFS_PREFIX) + .getBranch(SYNC_PREFS_PREFIX) .getChildList("", {}); // Also sync preferences that determine which prefs get synced. - let controlPrefs = syncPrefs.map(pref => PREF_SYNC_PREFS_PREFIX + pref); + let controlPrefs = syncPrefs.map(pref => SYNC_PREFS_PREFIX + pref); return controlPrefs.concat(syncPrefs); }, _isSynced: function (pref) { - return pref.startsWith(PREF_SYNC_PREFS_PREFIX) || - this._prefs.get(PREF_SYNC_PREFS_PREFIX + pref, false); + return pref.startsWith(SYNC_PREFS_PREFIX) || + this._prefs.get(SYNC_PREFS_PREFIX + pref, false); }, _getAllPrefs: function () { let values = {}; - for (let pref of this._getSyncPrefs()) { + for each (let pref in this._getSyncPrefs()) { if (this._isSynced(pref)) { - // Missing and default prefs get the null value. - values[pref] = this._prefs.isSet(pref) ? this._prefs.get(pref, null) : null; + // Missing prefs get the null value. + values[pref] = this._prefs.get(pref, null); } } return values; }, - _updateLightWeightTheme (themeID) { - let themeObject = null; - if (themeID) { - themeObject = LightweightThemeManager.getUsedTheme(themeID); - } - LightweightThemeManager.currentTheme = themeObject; - }, - _setAllPrefs: function (values) { - let selectedThemeIDPref = "lightweightThemes.selectedThemeID"; - let selectedThemeIDBefore = this._prefs.get(selectedThemeIDPref, null); - let selectedThemeIDAfter = selectedThemeIDBefore; + let enabledPref = "lightweightThemes.isThemeSelected"; + let enabledBefore = this._prefs.get(enabledPref, false); + let prevTheme = LightweightThemeManager.currentTheme; // Update 'services.sync.prefs.sync.foo.pref' before 'foo.pref', otherwise // _isSynced returns false when 'foo.pref' doesn't exist (e.g., on a new device). - let prefs = Object.keys(values).sort(a => -a.indexOf(PREF_SYNC_PREFS_PREFIX)); + let prefs = Object.keys(values).sort(a => -a.indexOf(SYNC_PREFS_PREFIX)); for (let pref of prefs) { if (!this._isSynced(pref)) { continue; @@ -134,30 +125,26 @@ PrefStore.prototype = { let value = values[pref]; - switch (pref) { - // Some special prefs we don't want to set directly. - case selectedThemeIDPref: - selectedThemeIDAfter = value; - break; - - // default is to just set the pref - default: - if (value == null) { - // Pref has gone missing. The best we can do is reset it. - this._prefs.reset(pref); - } else { - try { - this._prefs.set(pref, value); - } catch(ex) { - this._log.trace("Failed to set pref: " + pref + ": " + ex); - } - } + // Pref has gone missing. The best we can do is reset it. + if (value == null) { + this._prefs.reset(pref); + continue; } + + try { + this._prefs.set(pref, value); + } catch(ex) { + this._log.trace("Failed to set pref: " + pref + ": " + ex); + } } - // Notify the lightweight theme manager if the selected theme has changed. - if (selectedThemeIDBefore != selectedThemeIDAfter) { - this._updateLightWeightTheme(selectedThemeIDAfter); + // Notify the lightweight theme manager of all the new values + let enabledNow = this._prefs.get(enabledPref, false); + if (enabledBefore && !enabledNow) { + LightweightThemeManager.currentTheme = null; + } else if (enabledNow && LightweightThemeManager.usedThemes[0] != prevTheme) { + LightweightThemeManager.currentTheme = null; + LightweightThemeManager.currentTheme = LightweightThemeManager.usedThemes[0]; } }, @@ -261,8 +248,8 @@ PrefTracker.prototype = { case "nsPref:changed": // Trigger a sync for MULTI-DEVICE for a change that determines // which prefs are synced or a regular pref change. - if (data.indexOf(PREF_SYNC_PREFS_PREFIX) == 0 || - this._prefs.get(PREF_SYNC_PREFS_PREFIX + data, false)) { + if (data.indexOf(SYNC_PREFS_PREFIX) == 0 || + this._prefs.get(SYNC_PREFS_PREFIX + data, false)) { this.score += SCORE_INCREMENT_XLARGE; this.modified = true; this._log.trace("Preference " + data + " changed"); diff --git a/services/sync/modules/engines/tabs.js b/services/sync/modules/engines/tabs.js index 45ece4a23..1fce737d2 100644 --- a/services/sync/modules/engines/tabs.js +++ b/services/sync/modules/engines/tabs.js @@ -4,7 +4,7 @@ this.EXPORTED_SYMBOLS = ["TabEngine", "TabSetRecord"]; -var {classes: Cc, interfaces: Ci, utils: Cu} = Components; +const {classes: Cc, interfaces: Ci, utils: Cu} = Components; const TABS_TTL = 604800; // 7 days. const TAB_ENTRIES_LIMIT = 25; // How many URLs to include in tab history. @@ -43,11 +43,6 @@ TabEngine.prototype = { _storeObj: TabStore, _trackerObj: TabTracker, _recordObj: TabSetRecord, - // A flag to indicate if we have synced in this session. This is to help - // consumers of remote tabs that may want to differentiate between "I've an - // empty tab list as I haven't yet synced" vs "I've an empty tab list - // as there really are no tabs" - hasSyncedThisSession: false, syncPriority: 3, @@ -72,7 +67,6 @@ TabEngine.prototype = { SyncEngine.prototype._resetClient.call(this); this._store.wipe(); this._tracker.modified = true; - this.hasSyncedThisSession = false; }, removeClientData: function () { @@ -100,12 +94,7 @@ TabEngine.prototype = { } return SyncEngine.prototype._reconcile.call(this, item); - }, - - _syncFinish() { - this.hasSyncedThisSession = true; - return SyncEngine.prototype._syncFinish.call(this); - }, + } }; @@ -145,7 +134,7 @@ TabStore.prototype = { } for (let tab of win.gBrowser.tabs) { - let tabState = this.getTabState(tab); + tabState = this.getTabState(tab); // Make sure there are history entries to look at. if (!tabState || !tabState.entries.length) { @@ -165,11 +154,6 @@ TabStore.prototype = { continue; } - if (current.url.length >= (MAX_UPLOAD_BYTES - 1000)) { - this._log.trace("Skipping over-long URL."); - continue; - } - // The element at `index` is the current page. Previous URLs were // previously visited URLs; subsequent URLs are in the 'forward' stack, // which we can't represent in Sync, so we truncate here. @@ -189,9 +173,7 @@ TabStore.prototype = { allTabs.push({ title: current.title || "", urlHistory: urls, - icon: tabState.image || - (tabState.attributes && tabState.attributes.image) || - "", + icon: tabState.attributes && tabState.attributes.image || "", lastUsed: Math.floor((tabState.lastAccessed || 0) / 1000), }); } @@ -265,9 +247,27 @@ TabStore.prototype = { create: function (record) { this._log.debug("Adding remote tabs from " + record.clientName); - this._remoteClients[record.id] = Object.assign({}, record.cleartext, { - lastModified: record.modified - }); + this._remoteClients[record.id] = record.cleartext; + + // Lose some precision, but that's good enough (seconds). + let roundModify = Math.floor(record.modified / 1000); + let notifyState = Svc.Prefs.get("notifyTabState"); + + // If there's no existing pref, save this first modified time. + if (notifyState == null) { + Svc.Prefs.set("notifyTabState", roundModify); + return; + } + + // Don't change notifyState if it's already 0 (don't notify). + if (notifyState == 0) { + return; + } + + // We must have gotten a new tab that isn't the same as last time. + if (notifyState != roundModify) { + Svc.Prefs.set("notifyTabState", 0); + } }, update: function (record) { @@ -302,14 +302,10 @@ TabTracker.prototype = { _registerListenersForWindow: function (window) { this._log.trace("Registering tab listeners in window"); - for (let topic of this._topics) { + for each (let topic in this._topics) { window.addEventListener(topic, this.onTab, false); } window.addEventListener("unload", this._unregisterListeners, false); - // If it's got a tab browser we can listen for things like navigation. - if (window.gBrowser) { - window.gBrowser.addProgressListener(this); - } }, _unregisterListeners: function (event) { @@ -319,12 +315,9 @@ TabTracker.prototype = { _unregisterListenersForWindow: function (window) { this._log.trace("Removing tab listeners in window"); window.removeEventListener("unload", this._unregisterListeners, false); - for (let topic of this._topics) { + for each (let topic in this._topics) { window.removeEventListener(topic, this.onTab, false); } - if (window.gBrowser) { - window.gBrowser.removeProgressListener(this); - } }, startTracking: function () { @@ -380,14 +373,4 @@ TabTracker.prototype = { this.score += SCORE_INCREMENT_SMALL; } }, - - // web progress listeners. - onLocationChange: function (webProgress, request, location, flags) { - // We only care about top-level location changes which are not in the same - // document. - if (webProgress.isTopLevel && - ((flags & Ci.nsIWebProgressListener.LOCATION_CHANGE_SAME_DOCUMENT) == 0)) { - this.modified = true; - } - }, }; diff --git a/services/sync/modules/healthreport.jsm b/services/sync/modules/healthreport.jsm new file mode 100644 index 000000000..47161c095 --- /dev/null +++ b/services/sync/modules/healthreport.jsm @@ -0,0 +1,262 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +"use strict"; + +this.EXPORTED_SYMBOLS = [ + "SyncProvider", +]; + +const {classes: Cc, interfaces: Ci, utils: Cu} = Components; + +Cu.import("resource://gre/modules/Metrics.jsm", this); +Cu.import("resource://gre/modules/Promise.jsm", this); +Cu.import("resource://gre/modules/Services.jsm", this); +Cu.import("resource://gre/modules/XPCOMUtils.jsm", this); + +const DAILY_LAST_NUMERIC_FIELD = {type: Metrics.Storage.FIELD_DAILY_LAST_NUMERIC}; +const DAILY_LAST_TEXT_FIELD = {type: Metrics.Storage.FIELD_DAILY_LAST_TEXT}; +const DAILY_COUNTER_FIELD = {type: Metrics.Storage.FIELD_DAILY_COUNTER}; + +XPCOMUtils.defineLazyModuleGetter(this, "Weave", + "resource://services-sync/main.js"); + +function SyncMeasurement1() { + Metrics.Measurement.call(this); +} + +SyncMeasurement1.prototype = Object.freeze({ + __proto__: Metrics.Measurement.prototype, + + name: "sync", + version: 1, + + fields: { + enabled: DAILY_LAST_NUMERIC_FIELD, + preferredProtocol: DAILY_LAST_TEXT_FIELD, + activeProtocol: DAILY_LAST_TEXT_FIELD, + syncStart: DAILY_COUNTER_FIELD, + syncSuccess: DAILY_COUNTER_FIELD, + syncError: DAILY_COUNTER_FIELD, + }, +}); + +function SyncDevicesMeasurement1() { + Metrics.Measurement.call(this); +} + +SyncDevicesMeasurement1.prototype = Object.freeze({ + __proto__: Metrics.Measurement.prototype, + + name: "devices", + version: 1, + + fields: {}, + + shouldIncludeField: function (name) { + return true; + }, + + fieldType: function (name) { + return Metrics.Storage.FIELD_DAILY_COUNTER; + }, +}); + +function SyncMigrationMeasurement1() { + Metrics.Measurement.call(this); +} + +SyncMigrationMeasurement1.prototype = Object.freeze({ + __proto__: Metrics.Measurement.prototype, + + name: "migration", + version: 1, + + fields: { + state: DAILY_LAST_TEXT_FIELD, // last "user" or "internal" state we saw for the day + accepted: DAILY_COUNTER_FIELD, // number of times user tried to start migration + declined: DAILY_COUNTER_FIELD, // number of times user closed nagging infobar + unlinked: DAILY_LAST_NUMERIC_FIELD, // did the user decline and unlink + }, +}); + +this.SyncProvider = function () { + Metrics.Provider.call(this); +}; +SyncProvider.prototype = Object.freeze({ + __proto__: Metrics.Provider.prototype, + + name: "org.mozilla.sync", + + measurementTypes: [ + SyncDevicesMeasurement1, + SyncMeasurement1, + SyncMigrationMeasurement1, + ], + + _OBSERVERS: [ + "weave:service:sync:start", + "weave:service:sync:finish", + "weave:service:sync:error", + "fxa-migration:state-changed", + "fxa-migration:internal-state-changed", + "fxa-migration:internal-telemetry", + ], + + postInit: function () { + for (let o of this._OBSERVERS) { + Services.obs.addObserver(this, o, false); + } + + return Promise.resolve(); + }, + + onShutdown: function () { + for (let o of this._OBSERVERS) { + Services.obs.removeObserver(this, o); + } + + return Promise.resolve(); + }, + + observe: function (subject, topic, data) { + switch (topic) { + case "weave:service:sync:start": + case "weave:service:sync:finish": + case "weave:service:sync:error": + return this._observeSync(subject, topic, data); + + case "fxa-migration:state-changed": + case "fxa-migration:internal-state-changed": + case "fxa-migration:internal-telemetry": + return this._observeMigration(subject, topic, data); + } + Cu.reportError("unexpected topic in sync healthreport provider: " + topic); + }, + + _observeSync: function (subject, topic, data) { + let field; + switch (topic) { + case "weave:service:sync:start": + field = "syncStart"; + break; + + case "weave:service:sync:finish": + field = "syncSuccess"; + break; + + case "weave:service:sync:error": + field = "syncError"; + break; + + default: + Cu.reportError("unexpected sync topic in sync healthreport provider: " + topic); + return; + } + + let m = this.getMeasurement(SyncMeasurement1.prototype.name, + SyncMeasurement1.prototype.version); + return this.enqueueStorageOperation(function recordSyncEvent() { + return m.incrementDailyCounter(field); + }); + }, + + _observeMigration: function(subject, topic, data) { + switch (topic) { + case "fxa-migration:state-changed": + case "fxa-migration:internal-state-changed": { + // We record both "user" and "internal" states in the same field. This + // works for us as user state is always null when there is an internal + // state. + if (!data) { + return; // we don't count the |null| state + } + let m = this.getMeasurement(SyncMigrationMeasurement1.prototype.name, + SyncMigrationMeasurement1.prototype.version); + return this.enqueueStorageOperation(function() { + return m.setDailyLastText("state", data); + }); + } + + case "fxa-migration:internal-telemetry": { + // |data| is our field name. + let m = this.getMeasurement(SyncMigrationMeasurement1.prototype.name, + SyncMigrationMeasurement1.prototype.version); + return this.enqueueStorageOperation(function() { + switch (data) { + case "accepted": + case "declined": + return m.incrementDailyCounter(data); + case "unlinked": + return m.setDailyLastNumeric(data, 1); + default: + Cu.reportError("Unexpected migration field in sync healthreport provider: " + data); + return Promise.resolve(); + } + }); + } + + default: + Cu.reportError("unexpected migration topic in sync healthreport provider: " + topic); + return; + } + }, + + collectDailyData: function () { + return this.storage.enqueueTransaction(this._populateDailyData.bind(this)); + }, + + _populateDailyData: function* () { + let m = this.getMeasurement(SyncMeasurement1.prototype.name, + SyncMeasurement1.prototype.version); + + let svc = Cc["@mozilla.org/weave/service;1"] + .getService(Ci.nsISupports) + .wrappedJSObject; + + let enabled = svc.enabled; + yield m.setDailyLastNumeric("enabled", enabled ? 1 : 0); + + // preferredProtocol is constant and only changes as the client + // evolves. + yield m.setDailyLastText("preferredProtocol", "1.5"); + + let protocol = svc.fxAccountsEnabled ? "1.5" : "1.1"; + yield m.setDailyLastText("activeProtocol", protocol); + + if (!enabled) { + return; + } + + // Before grabbing more information, be sure the Sync service + // is fully initialized. This has the potential to initialize + // Sync on the spot. This may be undesired if Sync appears to + // be enabled but it really isn't. That responsibility should + // be up to svc.enabled to not return false positives, however. + yield svc.whenLoaded(); + + if (Weave.Status.service != Weave.STATUS_OK) { + return; + } + + // Device types are dynamic. So we need to dynamically create fields if + // they don't exist. + let dm = this.getMeasurement(SyncDevicesMeasurement1.prototype.name, + SyncDevicesMeasurement1.prototype.version); + let devices = Weave.Service.clientsEngine.deviceTypes; + for (let [field, count] of devices) { + let hasField = this.storage.hasFieldFromMeasurement(dm.id, field, + this.storage.FIELD_DAILY_LAST_NUMERIC); + let fieldID; + if (hasField) { + fieldID = this.storage.fieldIDFromMeasurement(dm.id, field); + } else { + fieldID = yield this.storage.registerField(dm.id, field, + this.storage.FIELD_DAILY_LAST_NUMERIC); + } + + yield this.storage.setDailyLastNumericFromFieldID(fieldID, count); + } + }, +}); diff --git a/services/sync/modules/identity.js b/services/sync/modules/identity.js index b4da8c0bb..2bee13b5b 100644 --- a/services/sync/modules/identity.js +++ b/services/sync/modules/identity.js @@ -6,14 +6,13 @@ this.EXPORTED_SYMBOLS = ["IdentityManager"]; -var {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components; +const {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components; Cu.import("resource://gre/modules/XPCOMUtils.jsm"); Cu.import("resource://gre/modules/Promise.jsm"); Cu.import("resource://services-sync/constants.js"); Cu.import("resource://gre/modules/Log.jsm"); Cu.import("resource://services-sync/util.js"); -Cu.import("resource://services-common/async.js"); // Lazy import to prevent unnecessary load on startup. for (let symbol of ["BulkKeyBundle", "SyncKeyBundle"]) { @@ -85,14 +84,18 @@ IdentityManager.prototype = { _syncKeyBundle: null, /** - * Initialize the identity provider. + * Initialize the identity provider. Returns a promise that is resolved + * when initialization is complete and the provider can be queried for + * its state */ initialize: function() { // Nothing to do for this identity provider. + return Promise.resolve(); }, finalize: function() { // Nothing to do for this identity provider. + return Promise.resolve(); }, /** @@ -111,6 +114,14 @@ IdentityManager.prototype = { return Promise.resolve(); }, + /** + * Indicates if the identity manager is still initializing + */ + get readyToAuthenticate() { + // We initialize in a fully sync manner, so we are always finished. + return true; + }, + get account() { return Svc.Prefs.get("account", this.username); }, @@ -195,7 +206,7 @@ IdentityManager.prototype = { return null; } - for (let login of this._getLogins(PWDMGR_PASSWORD_REALM)) { + for each (let login in this._getLogins(PWDMGR_PASSWORD_REALM)) { if (login.username.toLowerCase() == username) { // It should already be UTF-8 encoded, but we don't take any chances. this._basicPassword = Utils.encodeUTF8(login.password); @@ -249,7 +260,7 @@ IdentityManager.prototype = { return null; } - for (let login of this._getLogins(PWDMGR_PASSPHRASE_REALM)) { + for each (let login in this._getLogins(PWDMGR_PASSPHRASE_REALM)) { if (login.username.toLowerCase() == username) { this._syncKey = login.password; } @@ -326,7 +337,7 @@ IdentityManager.prototype = { try { this._syncKeyBundle = new SyncKeyBundle(this.username, this.syncKey); } catch (ex) { - this._log.warn("Failed to create sync bundle", ex); + this._log.warn(Utils.exceptionStr(ex)); return null; } } @@ -400,7 +411,7 @@ IdentityManager.prototype = { this._setLogin(PWDMGR_PASSWORD_REALM, this.username, this._basicPassword); } else { - for (let login of this._getLogins(PWDMGR_PASSWORD_REALM)) { + for each (let login in this._getLogins(PWDMGR_PASSWORD_REALM)) { Services.logins.removeLogin(login); } } @@ -412,7 +423,7 @@ IdentityManager.prototype = { if (this._syncKey) { this._setLogin(PWDMGR_PASSPHRASE_REALM, this.username, this._syncKey); } else { - for (let login of this._getLogins(PWDMGR_PASSPHRASE_REALM)) { + for each (let login in this._getLogins(PWDMGR_PASSPHRASE_REALM)) { Services.logins.removeLogin(login); } } @@ -447,9 +458,6 @@ IdentityManager.prototype = { try { service.recordManager.get(service.storageURL + "meta/fxa_credentials"); } catch (ex) { - if (Async.isShutdownException(ex)) { - throw ex; - } this._log.warn("Failed to pre-fetch the migration sentinel", ex); } }, @@ -469,7 +477,7 @@ IdentityManager.prototype = { */ _setLogin: function _setLogin(realm, username, password) { let exists = false; - for (let login of this._getLogins(realm)) { + for each (let login in this._getLogins(realm)) { if (login.username == username && login.password == password) { exists = true; } else { @@ -505,7 +513,7 @@ IdentityManager.prototype = { deleteSyncCredentials: function deleteSyncCredentials() { for (let host of this._getSyncCredentialsHosts()) { let logins = Services.logins.findLogins({}, host, "", ""); - for (let login of logins) { + for each (let login in logins) { Services.logins.removeLogin(login); } } @@ -593,13 +601,4 @@ IdentityManager.prototype = { // Do nothing for Sync 1.1. return {accepted: true}; }, - - // Tell Sync what the login status should be if it saw a 401 fetching - // info/collections as part of login verification (typically immediately - // after login.) - // In our case it means an authoritative "password is incorrect". - loginStatusFromVerification404() { - return LOGIN_FAILED_LOGIN_REJECTED; - } - }; diff --git a/services/sync/modules/jpakeclient.js b/services/sync/modules/jpakeclient.js index 625dc91b6..10f405371 100644 --- a/services/sync/modules/jpakeclient.js +++ b/services/sync/modules/jpakeclient.js @@ -4,7 +4,7 @@ this.EXPORTED_SYMBOLS = ["JPAKEClient", "SendCredentialsController"]; -var {classes: Cc, interfaces: Ci, results: Cr, utils: Cu} = Components; +const {classes: Cc, interfaces: Ci, results: Cr, utils: Cu} = Components; Cu.import("resource://gre/modules/Log.jsm"); Cu.import("resource://services-common/rest.js"); @@ -281,7 +281,8 @@ JPAKEClient.prototype = { let rng = Cc["@mozilla.org/security/random-generator;1"] .createInstance(Ci.nsIRandomGenerator); let bytes = rng.generateRandomBytes(JPAKE_LENGTH_CLIENTID / 2); - this._clientID = bytes.map(byte => ("0" + byte.toString(16)).slice(-2)).join(""); + this._clientID = [("0" + byte.toString(16)).slice(-2) + for each (byte in bytes)].join(""); }, _createSecret: function _createSecret() { @@ -290,7 +291,8 @@ JPAKEClient.prototype = { let rng = Cc["@mozilla.org/security/random-generator;1"] .createInstance(Ci.nsIRandomGenerator); let bytes = rng.generateRandomBytes(JPAKE_LENGTH_SECRET); - return bytes.map(byte => key[Math.floor(byte * key.length / 256)]).join(""); + return [key[Math.floor(byte * key.length / 256)] + for each (byte in bytes)].join(""); }, _newRequest: function _newRequest(uri) { diff --git a/services/sync/modules/keys.js b/services/sync/modules/keys.js index b93de7f31..bf909bdc2 100644 --- a/services/sync/modules/keys.js +++ b/services/sync/modules/keys.js @@ -9,7 +9,7 @@ this.EXPORTED_SYMBOLS = [ "SyncKeyBundle" ]; -var {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components; +const {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components; Cu.import("resource://services-sync/constants.js"); Cu.import("resource://gre/modules/Log.jsm"); diff --git a/services/sync/modules/main.js b/services/sync/modules/main.js index e8e705e72..488a2594b 100644 --- a/services/sync/modules/main.js +++ b/services/sync/modules/main.js @@ -6,7 +6,7 @@ this.EXPORTED_SYMBOLS = ['Weave']; this.Weave = {}; Components.utils.import("resource://services-sync/constants.js", Weave); -var lazies = { +let lazies = { "jpakeclient.js": ["JPAKEClient", "SendCredentialsController"], "notifications.js": ["Notifications", "Notification", "NotificationButton"], "service.js": ["Service"], @@ -15,14 +15,12 @@ var lazies = { }; function lazyImport(module, dest, props) { - function getter(prop) { - return function() { - let ns = {}; - Components.utils.import(module, ns); - delete dest[prop]; - return dest[prop] = ns[prop]; - }; - } + function getter(prop) function() { + let ns = {}; + Components.utils.import(module, ns); + delete dest[prop]; + return dest[prop] = ns[prop]; + }; props.forEach(function (prop) { dest.__defineGetter__(prop, getter(prop)); }); } diff --git a/services/sync/modules/notifications.js b/services/sync/modules/notifications.js index 72187a4ce..1ee24f2cd 100644 --- a/services/sync/modules/notifications.js +++ b/services/sync/modules/notifications.js @@ -4,10 +4,10 @@ this.EXPORTED_SYMBOLS = ["Notifications", "Notification", "NotificationButton"]; -var Cc = Components.classes; -var Ci = Components.interfaces; -var Cr = Components.results; -var Cu = Components.utils; +const Cc = Components.classes; +const Ci = Components.interfaces; +const Cr = Components.results; +const Cu = Components.utils; Cu.import("resource://services-common/observers.js"); Cu.import("resource://gre/modules/Log.jsm"); diff --git a/services/sync/modules/policies.js b/services/sync/modules/policies.js index a3933426d..d799cb235 100644 --- a/services/sync/modules/policies.js +++ b/services/sync/modules/policies.js @@ -7,26 +7,16 @@ this.EXPORTED_SYMBOLS = [ "SyncScheduler", ]; -var {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components; +const {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components; Cu.import("resource://gre/modules/Log.jsm"); Cu.import("resource://services-sync/constants.js"); Cu.import("resource://services-sync/engines.js"); Cu.import("resource://services-sync/util.js"); Cu.import("resource://services-common/logmanager.js"); -Cu.import("resource://services-common/async.js"); XPCOMUtils.defineLazyModuleGetter(this, "Status", "resource://services-sync/status.js"); -XPCOMUtils.defineLazyModuleGetter(this, "AddonManager", - "resource://gre/modules/AddonManager.jsm"); - -// Get the value for an interval that's stored in preferences. To save users -// from themselves (and us from them!) the minimum time they can specify -// is 60s. -function getThrottledIntervalPreference(prefName) { - return Math.max(Svc.Prefs.get(prefName), 60) * 1000; -} this.SyncScheduler = function SyncScheduler(service) { this.service = service; @@ -55,12 +45,12 @@ SyncScheduler.prototype = { let part = service.fxAccountsEnabled ? "fxa" : "sync11"; let prefSDInterval = "scheduler." + part + ".singleDeviceInterval"; - this.singleDeviceInterval = getThrottledIntervalPreference(prefSDInterval); + this.singleDeviceInterval = Svc.Prefs.get(prefSDInterval) * 1000; - this.idleInterval = getThrottledIntervalPreference("scheduler.idleInterval"); - this.activeInterval = getThrottledIntervalPreference("scheduler.activeInterval"); - this.immediateInterval = getThrottledIntervalPreference("scheduler.immediateInterval"); - this.eolInterval = getThrottledIntervalPreference("scheduler.eolInterval"); + this.idleInterval = Svc.Prefs.get("scheduler.idleInterval") * 1000; + this.activeInterval = Svc.Prefs.get("scheduler.activeInterval") * 1000; + this.immediateInterval = Svc.Prefs.get("scheduler.immediateInterval") * 1000; + this.eolInterval = Svc.Prefs.get("scheduler.eolInterval") * 1000; // A user is non-idle on startup by default. this.idle = false; @@ -71,40 +61,20 @@ SyncScheduler.prototype = { }, // nextSync is in milliseconds, but prefs can't hold that much - get nextSync() { - return Svc.Prefs.get("nextSync", 0) * 1000; - }, - set nextSync(value) { - Svc.Prefs.set("nextSync", Math.floor(value / 1000)); - }, + get nextSync() Svc.Prefs.get("nextSync", 0) * 1000, + set nextSync(value) Svc.Prefs.set("nextSync", Math.floor(value / 1000)), - get syncInterval() { - return Svc.Prefs.get("syncInterval", this.singleDeviceInterval); - }, - set syncInterval(value) { - Svc.Prefs.set("syncInterval", value); - }, + get syncInterval() Svc.Prefs.get("syncInterval", this.singleDeviceInterval), + set syncInterval(value) Svc.Prefs.set("syncInterval", value), - get syncThreshold() { - return Svc.Prefs.get("syncThreshold", SINGLE_USER_THRESHOLD); - }, - set syncThreshold(value) { - Svc.Prefs.set("syncThreshold", value); - }, + get syncThreshold() Svc.Prefs.get("syncThreshold", SINGLE_USER_THRESHOLD), + set syncThreshold(value) Svc.Prefs.set("syncThreshold", value), - get globalScore() { - return Svc.Prefs.get("globalScore", 0); - }, - set globalScore(value) { - Svc.Prefs.set("globalScore", value); - }, + get globalScore() Svc.Prefs.get("globalScore", 0), + set globalScore(value) Svc.Prefs.set("globalScore", value), - get numClients() { - return Svc.Prefs.get("numClients", 0); - }, - set numClients(value) { - Svc.Prefs.set("numClients", value); - }, + get numClients() Svc.Prefs.get("numClients", 0), + set numClients(value) Svc.Prefs.set("numClients", value), init: function init() { this._log.level = Log.Level[Svc.Prefs.get("log.logger.service.main")]; @@ -249,10 +219,7 @@ SyncScheduler.prototype = { this.setDefaults(); try { Svc.Idle.removeIdleObserver(this, Svc.Prefs.get("scheduler.idleTime")); - } catch (ex) { - if (ex.result != Cr.NS_ERROR_FAILURE) { - throw ex; - } + } catch (ex if (ex.result == Cr.NS_ERROR_FAILURE)) { // In all likelihood we didn't have an idle observer registered yet. // It's all good. } @@ -285,11 +252,10 @@ SyncScheduler.prototype = { case "wake_notification": this._log.debug("Woke from sleep."); Utils.nextTick(() => { - // Trigger a sync if we have multiple clients. We give it 5 seconds - // incase the network is still in the process of coming back up. + // Trigger a sync if we have multiple clients. if (this.numClients > 1) { - this._log.debug("More than 1 client. Will sync in 5s."); - this.scheduleNextSync(5000); + this._log.debug("More than 1 client. Syncing."); + this.scheduleNextSync(0); } }); break; @@ -531,6 +497,45 @@ SyncScheduler.prototype = { this.syncTimer.clear(); }, + /** + * Prevent new syncs from starting. This is used by the FxA migration code + * where we can't afford to have a sync start partway through the migration. + * To handle the edge-case of a sync starting and not stopping, we store + * this state in a pref, so on the next startup we remain blocked (and thus + * sync will never start) so the migration can complete. + * + * As a safety measure, we only block for some period of time, and after + * that it will automatically unblock. This ensures that if things go + * really pear-shaped and we never end up calling unblockSync() we haven't + * completely broken the world. + */ + blockSync: function(until = null) { + if (!until) { + until = Date.now() + DEFAULT_BLOCK_PERIOD; + } + // until is specified in ms, but Prefs can't hold that much + Svc.Prefs.set("scheduler.blocked-until", Math.floor(until / 1000)); + }, + + unblockSync: function() { + Svc.Prefs.reset("scheduler.blocked-until"); + // the migration code should be ready to roll, so resume normal operations. + this.checkSyncStatus(); + }, + + get isBlocked() { + let until = Svc.Prefs.get("scheduler.blocked-until"); + if (until === undefined) { + return false; + } + if (until <= Math.floor(Date.now() / 1000)) { + // we were previously blocked but the time has expired. + Svc.Prefs.reset("scheduler.blocked-until"); + return false; + } + // we remain blocked. + return true; + }, }; this.ErrorHandler = function ErrorHandler(service) { @@ -570,10 +575,7 @@ ErrorHandler.prototype = { root.level = Log.Level[Svc.Prefs.get("log.rootLogger")]; let logs = ["Sync", "FirefoxAccounts", "Hawk", "Common.TokenServerClient", - "Sync.SyncMigration", "browserwindow.syncui", - "Services.Common.RESTRequest", "Services.Common.RESTRequest", - "BookmarkSyncUtils" - ]; + "Sync.SyncMigration"]; this._logManager = new LogManager(Svc.Prefs, logs, "sync"); }, @@ -590,25 +592,17 @@ ErrorHandler.prototype = { this._log.debug(data + " failed to apply some records."); } break; - case "weave:engine:sync:error": { + case "weave:engine:sync:error": let exception = subject; // exception thrown by engine's sync() method let engine_name = data; // engine name that threw the exception this.checkServerError(exception); Status.engines = [engine_name, exception.failureCode || ENGINE_UNKNOWN_FAIL]; - if (Async.isShutdownException(exception)) { - this._log.debug(engine_name + " was interrupted due to the application shutting down"); - } else { - this._log.debug(engine_name + " failed", exception); - Services.telemetry.getKeyedHistogramById("WEAVE_ENGINE_SYNC_ERRORS") - .add(engine_name); - } + this._log.debug(engine_name + " failed: " + Utils.exceptionStr(exception)); break; - } case "weave:service:login:error": - this._log.error("Sync encountered a login error"); - this.resetFileLog(); + this.resetFileLog(this._logManager.REASON_ERROR); if (this.shouldReportError()) { this.notifyOnNextTick("weave:ui:login:error"); @@ -618,23 +612,12 @@ ErrorHandler.prototype = { this.dontIgnoreErrors = false; break; - case "weave:service:sync:error": { + case "weave:service:sync:error": if (Status.sync == CREDENTIALS_CHANGED) { this.service.logout(); } - let exception = subject; - if (Async.isShutdownException(exception)) { - // If we are shutting down we just log the fact, attempt to flush - // the log file and get out of here! - this._log.error("Sync was interrupted due to the application shutting down"); - this.resetFileLog(); - break; - } - - // Not a shutdown related exception... - this._log.error("Sync encountered an error", exception); - this.resetFileLog(); + this.resetFileLog(this._logManager.REASON_ERROR); if (this.shouldReportError()) { this.notifyOnNextTick("weave:ui:sync:error"); @@ -644,7 +627,6 @@ ErrorHandler.prototype = { this.dontIgnoreErrors = false; break; - } case "weave:service:sync:finish": this._log.trace("Status.service is " + Status.service); @@ -660,8 +642,8 @@ ErrorHandler.prototype = { } if (Status.service == SYNC_FAILED_PARTIAL) { - this._log.error("Some engines did not sync correctly."); - this.resetFileLog(); + this._log.debug("Some engines did not sync correctly."); + this.resetFileLog(this._logManager.REASON_ERROR); if (this.shouldReportError()) { this.dontIgnoreErrors = false; @@ -669,7 +651,7 @@ ErrorHandler.prototype = { break; } } else { - this.resetFileLog(); + this.resetFileLog(this._logManager.REASON_SUCCESS); } this.dontIgnoreErrors = false; this.notifyOnNextTick("weave:ui:sync:finish"); @@ -696,52 +678,22 @@ ErrorHandler.prototype = { Utils.nextTick(this.service.sync, this.service); }, - _dumpAddons: function _dumpAddons() { - // Just dump the items that sync may be concerned with. Specifically, - // active extensions that are not hidden. - let addonPromise = new Promise(resolve => { - try { - AddonManager.getAddonsByTypes(["extension"], resolve); - } catch (e) { - this._log.warn("Failed to dump addons", e) - resolve([]) - } - }); - - return addonPromise.then(addons => { - let relevantAddons = addons.filter(x => x.isActive && !x.hidden); - this._log.debug("Addons installed", relevantAddons.length); - for (let addon of relevantAddons) { - this._log.debug(" - ${name}, version ${version}, id ${id}", addon); - } - }); - }, - /** * Generate a log file for the sync that just completed * and refresh the input & output streams. + * + * @param reason + * A constant from the LogManager that indicates the reason for the + * reset. */ - resetFileLog: function resetFileLog() { - let onComplete = logType => { + resetFileLog: function resetFileLog(reason) { + let onComplete = () => { Svc.Obs.notify("weave:service:reset-file-log"); this._log.trace("Notified: " + Date.now()); - if (logType == this._logManager.ERROR_LOG_WRITTEN) { - Cu.reportError("Sync encountered an error - see about:sync-log for the log file."); - } }; - - // If we're writing an error log, dump extensions that may be causing problems. - let beforeResetLog; - if (this._logManager.sawError) { - beforeResetLog = this._dumpAddons(); - } else { - beforeResetLog = Promise.resolve(); - } // Note we do not return the promise here - the caller doesn't need to wait // for this to complete. - beforeResetLog - .then(() => this._logManager.resetFileLog()) - .then(onComplete, onComplete); + this._logManager.resetFileLog(reason).then(onComplete, onComplete); }, /** @@ -775,9 +727,6 @@ ErrorHandler.prototype = { } }, - // A function to indicate if Sync errors should be "reported" - which in this - // context really means "should be notify observers of an error" - but note - // that since bug 1180587, no one is going to surface an error to the user. shouldReportError: function shouldReportError() { if (Status.login == MASTER_PASSWORD_LOCKED) { this._log.trace("shouldReportError: false (master password locked)."); @@ -817,12 +766,8 @@ ErrorHandler.prototype = { return false; } - - let result = ([Status.login, Status.sync].indexOf(SERVER_MAINTENANCE) == -1 && - [Status.login, Status.sync].indexOf(LOGIN_FAILED_NETWORK_ERROR) == -1); - this._log.trace("shouldReportError: ${result} due to login=${login}, sync=${sync}", - {result, login: Status.login, sync: Status.sync}); - return result; + return ([Status.login, Status.sync].indexOf(SERVER_MAINTENANCE) == -1 && + [Status.login, Status.sync].indexOf(LOGIN_FAILED_NETWORK_ERROR) == -1); }, get currentAlertMode() { @@ -925,7 +870,7 @@ ErrorHandler.prototype = { case 401: this.service.logout(); this._log.info("Got 401 response; resetting clusterURL."); - this.service.clusterURL = null; + Svc.Prefs.reset("clusterURL"); let delay = 0; if (Svc.Prefs.get("lastSyncReassigned")) { diff --git a/services/sync/modules/record.js b/services/sync/modules/record.js index f7a69d9ef..04ccd2dd2 100644 --- a/services/sync/modules/record.js +++ b/services/sync/modules/record.js @@ -10,10 +10,10 @@ this.EXPORTED_SYMBOLS = [ "Collection", ]; -var Cc = Components.classes; -var Ci = Components.interfaces; -var Cr = Components.results; -var Cu = Components.utils; +const Cc = Components.classes; +const Ci = Components.interfaces; +const Cr = Components.results; +const Cu = Components.utils; const CRYPTO_COLLECTION = "crypto"; const KEYS_WBO = "keys"; @@ -23,7 +23,6 @@ Cu.import("resource://services-sync/constants.js"); Cu.import("resource://services-sync/keys.js"); Cu.import("resource://services-sync/resource.js"); Cu.import("resource://services-sync/util.js"); -Cu.import("resource://services-common/async.js"); this.WBORecord = function WBORecord(collection, id) { this.data = {}; @@ -86,7 +85,7 @@ WBORecord.prototype = { toJSON: function toJSON() { // Copy fields from data to be stringified, making sure payload is a string let obj = {}; - for (let [key, val] of Object.entries(this.data)) + for (let [key, val] in Iterator(this.data)) obj[key] = key == "payload" ? JSON.stringify(val) : val; if (this.ttl) obj.ttl = this.ttl; @@ -196,9 +195,7 @@ CryptoWrapper.prototype = { }, // The custom setter below masks the parent's getter, so explicitly call it :( - get id() { - return WBORecord.prototype.__lookupGetter__("id").call(this); - }, + get id() WBORecord.prototype.__lookupGetter__("id").call(this), // Keep both plaintext and encrypted versions of the id to verify integrity set id(val) { @@ -238,11 +235,8 @@ RecordManager.prototype = { record.deserialize(this.response); return this.set(url, record); - } catch (ex) { - if (Async.isShutdownException(ex)) { - throw ex; - } - this._log.debug("Failed to import record", ex); + } catch(ex) { + this._log.debug("Failed to import record: " + Utils.exceptionStr(ex)); return null; } }, @@ -281,10 +275,10 @@ RecordManager.prototype = { * You can update this thing simply by giving it /info/collections. It'll * use the last modified time to bring itself up to date. */ -this.CollectionKeyManager = function CollectionKeyManager(lastModified, default_, collections) { - this.lastModified = lastModified || 0; - this._default = default_ || null; - this._collections = collections || {}; +this.CollectionKeyManager = function CollectionKeyManager() { + this.lastModified = 0; + this._collections = {}; + this._default = null; this._log = Log.repository.getLogger("Sync.CollectionKeyManager"); } @@ -293,19 +287,6 @@ this.CollectionKeyManager = function CollectionKeyManager(lastModified, default_ // Note that the last modified time needs to be preserved. CollectionKeyManager.prototype = { - /** - * Generate a new CollectionKeyManager that has the same attributes - * as this one. - */ - clone() { - const newCollections = {}; - for (let c in this._collections) { - newCollections[c] = this._collections[c]; - } - - return new CollectionKeyManager(this.lastModified, this._default, newCollections); - }, - // Return information about old vs new keys: // * same: true if two collections are equal // * changed: an array of collection names that changed. @@ -328,7 +309,7 @@ CollectionKeyManager.prototype = { // Return a sorted, unique array. changed.sort(); let last; - changed = changed.filter(x => (x != last) && (last = x)); + changed = [x for each (x in changed) if ((x != last) && (last = x))]; return {same: changed.length == 0, changed: changed}; }, @@ -374,15 +355,15 @@ CollectionKeyManager.prototype = { /** * Create a WBO for the current keys. */ - asWBO: function(collection, id) { - return this._makeWBO(this._collections, this._default); - }, + asWBO: function(collection, id) + this._makeWBO(this._collections, this._default), /** * Compute a new default key, and new keys for any specified collections. */ newKeys: function(collections) { - let newDefaultKeyBundle = this.newDefaultKeyBundle(); + let newDefaultKey = new BulkKeyBundle(DEFAULT_KEYBUNDLE_NAME); + newDefaultKey.generateRandom(); let newColls = {}; if (collections) { @@ -392,7 +373,7 @@ CollectionKeyManager.prototype = { newColls[c] = b; }); } - return [newDefaultKeyBundle, newColls]; + return [newDefaultKey, newColls]; }, /** @@ -406,57 +387,6 @@ CollectionKeyManager.prototype = { return this._makeWBO(newColls, newDefaultKey); }, - /** - * Create a new default key. - * - * @returns {BulkKeyBundle} - */ - newDefaultKeyBundle() { - const key = new BulkKeyBundle(DEFAULT_KEYBUNDLE_NAME); - key.generateRandom(); - return key; - }, - - /** - * Create a new default key and store it as this._default, since without one you cannot use setContents. - */ - generateDefaultKey() { - this._default = this.newDefaultKeyBundle(); - }, - - /** - * Return true if keys are already present for each of the given - * collections. - */ - hasKeysFor(collections) { - // We can't use filter() here because sometimes collections is an iterator. - for (let collection of collections) { - if (!this._collections[collection]) { - return false; - } - } - return true; - }, - - /** - * Return a new CollectionKeyManager that has keys for each of the - * given collections (creating new ones for collections where we - * don't already have keys). - */ - ensureKeysFor(collections) { - const newKeys = Object.assign({}, this._collections); - for (let c of collections) { - if (newKeys[c]) { - continue; // don't replace existing keys - } - - const b = new BulkKeyBundle(c); - b.generateRandom(); - newKeys[c] = b; - } - return new CollectionKeyManager(this.lastModified, this._default, newKeys); - }, - // Take the fetched info/collections WBO, checking the change // time of the crypto collection. updateNeeded: function(info_collections) { @@ -487,6 +417,9 @@ CollectionKeyManager.prototype = { // setContents: function setContents(payload, modified) { + if (!modified) + throw "No modified time provided to setContents."; + let self = this; this._log.info("Setting collection keys contents. Our last modified: " + @@ -516,7 +449,9 @@ CollectionKeyManager.prototype = { if (v) { let keyObj = new BulkKeyBundle(k); keyObj.keyPairB64 = v; - newCollections[k] = keyObj; + if (keyObj) { + newCollections[k] = keyObj; + } } } } @@ -527,11 +462,8 @@ CollectionKeyManager.prototype = { let sameColls = collComparison.same; if (sameDefault && sameColls) { - self._log.info("New keys are the same as our old keys!"); - if (modified) { - self._log.info("Bumped local modified time."); - self.lastModified = modified; - } + self._log.info("New keys are the same as our old keys! Bumped local modified time."); + self.lastModified = modified; return false; } @@ -543,10 +475,8 @@ CollectionKeyManager.prototype = { this._collections = newCollections; // Always trust the server. - if (modified) { - self._log.info("Bumping last modified to " + modified); - self.lastModified = modified; - } + self._log.info("Bumping last modified to " + modified); + self.lastModified = modified; return sameDefault ? collComparison.changed : true; }, @@ -594,12 +524,6 @@ this.Collection = function Collection(uri, recordObj, service) { this._older = 0; this._newer = 0; this._data = []; - // optional members used by batch upload operations. - this._batch = null; - this._commit = false; - // Used for batch download operations -- note that this is explicitly an - // opaque value and not (necessarily) a number. - this._offset = null; } Collection.prototype = { __proto__: Resource.prototype, @@ -623,12 +547,6 @@ Collection.prototype = { args.push("ids=" + this.ids); if (this.limit > 0 && this.limit != Infinity) args.push("limit=" + this.limit); - if (this._batch) - args.push("batch=" + encodeURIComponent(this._batch)); - if (this._commit) - args.push("commit=true"); - if (this._offset) - args.push("offset=" + encodeURIComponent(this._offset)); this.uri.query = (args.length > 0)? '?' + args.join('&') : ''; }, @@ -641,14 +559,14 @@ Collection.prototype = { }, // Apply the action to a certain set of ids - get ids() { return this._ids; }, + get ids() this._ids, set ids(value) { this._ids = value; this._rebuildURL(); }, // Limit how many records to get - get limit() { return this._limit; }, + get limit() this._limit, set limit(value) { this._limit = value; this._rebuildURL(); @@ -678,100 +596,12 @@ Collection.prototype = { this._rebuildURL(); }, - get offset() { return this._offset; }, - set offset(value) { - this._offset = value; - this._rebuildURL(); - }, - - // Set information about the batch for this request. - get batch() { return this._batch; }, - set batch(value) { - this._batch = value; - this._rebuildURL(); - }, - - get commit() { return this._commit; }, - set commit(value) { - this._commit = value && true; - this._rebuildURL(); + pushData: function Coll_pushData(data) { + this._data.push(data); }, - // Similar to get(), but will page through the items `batchSize` at a time, - // deferring calling the record handler until we've gotten them all. - // - // Returns the last response processed, and doesn't run the record handler - // on any items if a non-success status is received while downloading the - // records (or if a network error occurs). - getBatched(batchSize = DEFAULT_DOWNLOAD_BATCH_SIZE) { - let totalLimit = Number(this.limit) || Infinity; - if (batchSize <= 0 || batchSize >= totalLimit) { - // Invalid batch sizes should arguably be an error, but they're easy to handle - return this.get(); - } - - if (!this.full) { - throw new Error("getBatched is unimplemented for guid-only GETs"); - } - - // _onComplete and _onProgress are reset after each `get` by AsyncResource. - // We overwrite _onRecord to something that stores the data in an array - // until the end. - let { _onComplete, _onProgress, _onRecord } = this; - let recordBuffer = []; - let resp; - try { - this._onRecord = r => recordBuffer.push(r); - let lastModifiedTime; - this.limit = batchSize; - - do { - this._onProgress = _onProgress; - this._onComplete = _onComplete; - if (batchSize + recordBuffer.length > totalLimit) { - this.limit = totalLimit - recordBuffer.length; - } - this._log.trace("Performing batched GET", { limit: this.limit, offset: this.offset }); - // Actually perform the request - resp = this.get(); - if (!resp.success) { - break; - } - - // Initialize last modified, or check that something broken isn't happening. - let lastModified = resp.headers["x-last-modified"]; - if (!lastModifiedTime) { - lastModifiedTime = lastModified; - this.setHeader("X-If-Unmodified-Since", lastModified); - } else if (lastModified != lastModifiedTime) { - // Should be impossible -- We'd get a 412 in this case. - throw new Error("X-Last-Modified changed in the middle of a download batch! " + - `${lastModified} => ${lastModifiedTime}`) - } - - // If this is missing, we're finished. - this.offset = resp.headers["x-weave-next-offset"]; - } while (this.offset && totalLimit > recordBuffer.length); - } finally { - // Ensure we undo any temporary state so that subsequent calls to get() - // or getBatched() work properly. We do this before calling the record - // handler so that we can more convincingly pretend to be a normal get() - // call. Note: we're resetting these to the values they had before this - // function was called. - this._onRecord = _onRecord; - this._limit = totalLimit; - this._offset = null; - delete this._headers["x-if-unmodified-since"]; - this._rebuildURL(); - } - if (resp.success && Async.checkAppReady()) { - // call the original _onRecord (e.g. the user supplied record handler) - // for each record we've stored - for (let record of recordBuffer) { - this._onRecord(record); - } - } - return resp; + clearRecords: function Coll_clearRecords() { + this._data = []; }, set recordHandler(onRecord) { @@ -781,8 +611,6 @@ Collection.prototype = { // Switch to newline separated records for incremental parsing coll.setHeader("Accept", "application/newlines"); - this._onRecord = onRecord; - this._onProgress = function() { let newline; while ((newline = this._data.indexOf("\n")) > 0) { @@ -793,247 +621,8 @@ Collection.prototype = { // Deserialize a record from json and give it to the callback let record = new coll._recordObj(); record.deserialize(json); - coll._onRecord(record); + onRecord(record); } }; }, - - // This object only supports posting via the postQueue object. - post() { - throw new Error("Don't directly post to a collection - use newPostQueue instead"); - }, - - newPostQueue(log, timestamp, postCallback) { - let poster = (data, headers, batch, commit) => { - this.batch = batch; - this.commit = commit; - for (let [header, value] of headers) { - this.setHeader(header, value); - } - return Resource.prototype.post.call(this, data); - } - let getConfig = (name, defaultVal) => { - if (this._service.serverConfiguration && this._service.serverConfiguration.hasOwnProperty(name)) { - return this._service.serverConfiguration[name]; - } - return defaultVal; - } - - let config = { - max_post_bytes: getConfig("max_post_bytes", MAX_UPLOAD_BYTES), - max_post_records: getConfig("max_post_records", MAX_UPLOAD_RECORDS), - - max_batch_bytes: getConfig("max_total_bytes", Infinity), - max_batch_records: getConfig("max_total_records", Infinity), - } - - // Handle config edge cases - if (config.max_post_records <= 0) { config.max_post_records = MAX_UPLOAD_RECORDS; } - if (config.max_batch_records <= 0) { config.max_batch_records = Infinity; } - if (config.max_post_bytes <= 0) { config.max_post_bytes = MAX_UPLOAD_BYTES; } - if (config.max_batch_bytes <= 0) { config.max_batch_bytes = Infinity; } - - // Max size of BSO payload is 256k. This assumes at most 4k of overhead, - // which sounds like plenty. If the server says it can't handle this, we - // might have valid records we can't sync, so we give up on syncing. - let requiredMax = 260 * 1024; - if (config.max_post_bytes < requiredMax) { - this._log.error("Server configuration max_post_bytes is too low", config); - throw new Error("Server configuration max_post_bytes is too low"); - } - - return new PostQueue(poster, timestamp, config, log, postCallback); - }, }; - -/* A helper to manage the posting of records while respecting the various - size limits. - - This supports the concept of a server-side "batch". The general idea is: - * We queue as many records as allowed in memory, then make a single POST. - * This first POST (optionally) gives us a batch ID, which we use for - all subsequent posts, until... - * At some point we hit a batch-maximum, and jump through a few hoops to - commit the current batch (ie, all previous POSTs) and start a new one. - * Eventually commit the final batch. - - In most cases we expect there to be exactly 1 batch consisting of possibly - multiple POSTs. -*/ -function PostQueue(poster, timestamp, config, log, postCallback) { - // The "post" function we should use when it comes time to do the post. - this.poster = poster; - this.log = log; - - // The config we use. We expect it to have fields "max_post_records", - // "max_batch_records", "max_post_bytes", and "max_batch_bytes" - this.config = config; - - // The callback we make with the response when we do get around to making the - // post (which could be during any of the enqueue() calls or the final flush()) - // This callback may be called multiple times and must not add new items to - // the queue. - // The second argument passed to this callback is a boolean value that is true - // if we're in the middle of a batch, and false if either the batch is - // complete, or it's a post to a server that does not understand batching. - this.postCallback = postCallback; - - // The string where we are capturing the stringified version of the records - // queued so far. It will always be invalid JSON as it is always missing the - // closing bracket. - this.queued = ""; - - // The number of records we've queued so far but are yet to POST. - this.numQueued = 0; - - // The number of records/bytes we've processed in previous POSTs for our - // current batch. Does *not* include records currently queued for the next POST. - this.numAlreadyBatched = 0; - this.bytesAlreadyBatched = 0; - - // The ID of our current batch. Can be undefined (meaning we are yet to make - // the first post of a patch, so don't know if we have a batch), null (meaning - // we've made the first post but the server response indicated no batching - // semantics), otherwise we have made the first post and it holds the batch ID - // returned from the server. - this.batchID = undefined; - - // Time used for X-If-Unmodified-Since -- should be the timestamp from the last GET. - this.lastModified = timestamp; -} - -PostQueue.prototype = { - enqueue(record) { - // We want to ensure the record has a .toJSON() method defined - even - // though JSON.stringify() would implicitly call it, the stringify might - // still work even if it isn't defined, which isn't what we want. - let jsonRepr = record.toJSON(); - if (!jsonRepr) { - throw new Error("You must only call this with objects that explicitly support JSON"); - } - let bytes = JSON.stringify(jsonRepr); - - // Do a flush if we can't add this record without exceeding our single-request - // limits, or without exceeding the total limit for a single batch. - let newLength = this.queued.length + bytes.length + 2; // extras for leading "[" / "," and trailing "]" - - let maxAllowedBytes = Math.min(256 * 1024, this.config.max_post_bytes); - - let postSizeExceeded = this.numQueued >= this.config.max_post_records || - newLength >= maxAllowedBytes; - - let batchSizeExceeded = (this.numQueued + this.numAlreadyBatched) >= this.config.max_batch_records || - (newLength + this.bytesAlreadyBatched) >= this.config.max_batch_bytes; - - let singleRecordTooBig = bytes.length + 2 > maxAllowedBytes; - - if (postSizeExceeded || batchSizeExceeded) { - this.log.trace(`PostQueue flushing due to postSizeExceeded=${postSizeExceeded}, batchSizeExceeded=${batchSizeExceeded}` + - `, max_batch_bytes: ${this.config.max_batch_bytes}, max_post_bytes: ${this.config.max_post_bytes}`); - - if (singleRecordTooBig) { - return { enqueued: false, error: new Error("Single record too large to submit to server") }; - } - - // We need to write the queue out before handling this one, but we only - // commit the batch (and thus start a new one) if the batch is full. - // Note that if a single record is too big for the batch or post, then - // the batch may be empty, and so we don't flush in that case. - if (this.numQueued) { - this.flush(batchSizeExceeded || singleRecordTooBig); - } - } - // Either a ',' or a '[' depending on whether this is the first record. - this.queued += this.numQueued ? "," : "["; - this.queued += bytes; - this.numQueued++; - return { enqueued: true }; - }, - - flush(finalBatchPost) { - if (!this.queued) { - // nothing queued - we can't be in a batch, and something has gone very - // bad if we think we are. - if (this.batchID) { - throw new Error(`Flush called when no queued records but we are in a batch ${this.batchID}`); - } - return; - } - // the batch query-param and headers we'll send. - let batch; - let headers = []; - if (this.batchID === undefined) { - // First commit in a (possible) batch. - batch = "true"; - } else if (this.batchID) { - // We have an existing batch. - batch = this.batchID; - } else { - // Not the first post and we know we have no batch semantics. - batch = null; - } - - headers.push(["x-if-unmodified-since", this.lastModified]); - - this.log.info(`Posting ${this.numQueued} records of ${this.queued.length+1} bytes with batch=${batch}`); - let queued = this.queued + "]"; - if (finalBatchPost) { - this.bytesAlreadyBatched = 0; - this.numAlreadyBatched = 0; - } else { - this.bytesAlreadyBatched += queued.length; - this.numAlreadyBatched += this.numQueued; - } - this.queued = ""; - this.numQueued = 0; - let response = this.poster(queued, headers, batch, !!(finalBatchPost && this.batchID !== null)); - - if (!response.success) { - this.log.trace("Server error response during a batch", response); - // not clear what we should do here - we expect the consumer of this to - // abort by throwing in the postCallback below. - return this.postCallback(response, !finalBatchPost); - } - - if (finalBatchPost) { - this.log.trace("Committed batch", this.batchID); - this.batchID = undefined; // we are now in "first post for the batch" state. - this.lastModified = response.headers["x-last-modified"]; - return this.postCallback(response, false); - } - - if (response.status != 202) { - if (this.batchID) { - throw new Error("Server responded non-202 success code while a batch was in progress"); - } - this.batchID = null; // no batch semantics are in place. - this.lastModified = response.headers["x-last-modified"]; - return this.postCallback(response, false); - } - - // this response is saying the server has batch semantics - we should - // always have a batch ID in the response. - let responseBatchID = response.obj.batch; - this.log.trace("Server responsed 202 with batch", responseBatchID); - if (!responseBatchID) { - this.log.error("Invalid server response: 202 without a batch ID", response); - throw new Error("Invalid server response: 202 without a batch ID"); - } - - if (this.batchID === undefined) { - this.batchID = responseBatchID; - if (!this.lastModified) { - this.lastModified = response.headers["x-last-modified"]; - if (!this.lastModified) { - throw new Error("Batch response without x-last-modified"); - } - } - } - - if (this.batchID != responseBatchID) { - throw new Error(`Invalid client/server batch state - client has ${this.batchID}, server has ${responseBatchID}`); - } - - this.postCallback(response, true); - }, -} diff --git a/services/sync/modules/resource.js b/services/sync/modules/resource.js index bf7066b9f..1c2a67b90 100644 --- a/services/sync/modules/resource.js +++ b/services/sync/modules/resource.js @@ -7,13 +7,12 @@ this.EXPORTED_SYMBOLS = [ "Resource" ]; -var Cc = Components.classes; -var Ci = Components.interfaces; -var Cr = Components.results; -var Cu = Components.utils; +const Cc = Components.classes; +const Ci = Components.interfaces; +const Cr = Components.results; +const Cu = Components.utils; Cu.import("resource://gre/modules/Preferences.jsm"); -Cu.import("resource://gre/modules/NetUtil.jsm"); Cu.import("resource://services-common/async.js"); Cu.import("resource://gre/modules/Log.jsm"); Cu.import("resource://services-common/observers.js"); @@ -74,6 +73,20 @@ AsyncResource.prototype = { */ authenticator: null, + // The string to use as the base User-Agent in Sync requests. + // These strings will look something like + // + // Firefox/4.0 FxSync/1.8.0.20100101.mobile + // + // or + // + // Firefox Aurora/5.0a1 FxSync/1.9.0.20110409.desktop + // + _userAgent: + Services.appinfo.name + "/" + Services.appinfo.version + // Product. + " FxSync/" + WEAVE_VERSION + "." + // Sync. + Services.appinfo.appBuildID + ".", // Build. + // Wait 5 minutes before killing a request. ABORT_TIMEOUT: 300000, @@ -121,9 +134,7 @@ AsyncResource.prototype = { // // Get and set the data encapulated in the resource. _data: null, - get data() { - return this._data; - }, + get data() this._data, set data(value) { this._data = value; }, @@ -135,9 +146,16 @@ AsyncResource.prototype = { // to obtain a request channel. // _createRequest: function Res__createRequest(method) { - let channel = NetUtil.newChannel({uri: this.spec, loadUsingSystemPrincipal: true}) - .QueryInterface(Ci.nsIRequest) - .QueryInterface(Ci.nsIHttpChannel); + let channel = Services.io.newChannel2(this.spec, + null, + null, + null, // aLoadingNode + Services.scriptSecurityManager.getSystemPrincipal(), + null, // aTriggeringPrincipal + Ci.nsILoadInfo.SEC_NORMAL, + Ci.nsIContentPolicy.TYPE_OTHER) + .QueryInterface(Ci.nsIRequest) + .QueryInterface(Ci.nsIHttpChannel); channel.loadFlags |= DEFAULT_LOAD_FLAGS; @@ -147,7 +165,8 @@ AsyncResource.prototype = { // Compose a UA string fragment from the various available identifiers. if (Svc.Prefs.get("sendVersionInfo", true)) { - channel.setRequestHeader("user-agent", Utils.userAgent, false); + let ua = this._userAgent + Svc.Prefs.get("client.type", "desktop"); + channel.setRequestHeader("user-agent", ua, false); } let headers = this.headers; @@ -155,7 +174,7 @@ AsyncResource.prototype = { if (this.authenticator) { let result = this.authenticator(this, method); if (result && result.headers) { - for (let [k, v] of Object.entries(result.headers)) { + for (let [k, v] in Iterator(result.headers)) { headers[k.toLowerCase()] = v; } } @@ -163,7 +182,7 @@ AsyncResource.prototype = { this._log.debug("No authenticator found."); } - for (let [key, value] of Object.entries(headers)) { + for (let [key, value] in Iterator(headers)) { if (key == 'authorization') this._log.trace("HTTP Header " + key + ": ***** (suppressed)"); else @@ -209,10 +228,10 @@ AsyncResource.prototype = { this._log, this.ABORT_TIMEOUT); channel.requestMethod = action; try { - channel.asyncOpen2(listener); + channel.asyncOpen(listener, null); } catch (ex) { - // asyncOpen2 can throw in a bunch of cases -- e.g., a forbidden port. - this._log.warn("Caught an error in asyncOpen2", ex); + // asyncOpen can throw in a bunch of cases -- e.g., a forbidden port. + this._log.warn("Caught an error in asyncOpen: " + CommonUtils.exceptionStr(ex)); CommonUtils.nextTick(callback.bind(this, ex)); } }, @@ -259,7 +278,9 @@ AsyncResource.prototype = { } catch(ex) { // Got a response, but an exception occurred during processing. // This shouldn't occur. - this._log.warn("Caught unexpected exception in _oncomplete", ex); + this._log.warn("Caught unexpected exception " + CommonUtils.exceptionStr(ex) + + " in _onComplete."); + this._log.debug(CommonUtils.stackTrace(ex)); } // Process headers. They can be empty, or the call can otherwise fail, so @@ -297,18 +318,16 @@ AsyncResource.prototype = { contentLength + "."); } } catch (ex) { - this._log.debug("Caught exception visiting headers in _onComplete", ex); + this._log.debug("Caught exception " + CommonUtils.exceptionStr(ex) + + " visiting headers in _onComplete."); + this._log.debug(CommonUtils.stackTrace(ex)); } let ret = new String(data); - ret.url = channel.URI.spec; ret.status = status; ret.success = success; ret.headers = headers; - if (!success) { - this._log.warn(`${action} request to ${ret.url} failed with status ${status}`); - } // Make a lazy getter to convert the json response into an object. // Note that this can cause a parse error to be thrown far away from the // actual fetch, so be warned! @@ -316,7 +335,7 @@ AsyncResource.prototype = { try { return JSON.parse(ret); } catch (ex) { - this._log.warn("Got exception parsing response body", ex); + this._log.warn("Got exception parsing response body: \"" + CommonUtils.exceptionStr(ex)); // Stringify to avoid possibly printing non-printable characters. this._log.debug("Parse fail: Response body starts: \"" + JSON.stringify((ret + "").slice(0, 100)) + @@ -384,12 +403,7 @@ Resource.prototype = { try { this._doRequest(action, data, callback); return Async.waitForSyncCallback(cb); - } catch (ex) { - if (Async.isShutdownException(ex)) { - throw ex; - } - this._log.warn("${action} request to ${url} failed: ${ex}", - { action, url: this.uri.spec, ex }); + } catch(ex) { // Combine the channel stack with this request stack. Need to create // a new error object for that. let error = Error(ex.message); @@ -527,7 +541,7 @@ ChannelListener.prototype = { siStream = Cc["@mozilla.org/scriptableinputstream;1"].createInstance(Ci.nsIScriptableInputStream); siStream.init(stream); } catch (ex) { - this._log.warn("Exception creating nsIScriptableInputStream", ex); + this._log.warn("Exception creating nsIScriptableInputStream." + CommonUtils.exceptionStr(ex)); this._log.debug("Parameters: " + req.URI.spec + ", " + stream + ", " + off + ", " + count); // Cannot proceed, so rethrow and allow the channel to cancel itself. throw ex; @@ -543,11 +557,9 @@ ChannelListener.prototype = { try { this._onProgress(); } catch (ex) { - if (Async.isShutdownException(ex)) { - throw ex; - } this._log.warn("Got exception calling onProgress handler during fetch of " - + req.URI.spec, ex); + + req.URI.spec); + this._log.debug(CommonUtils.exceptionStr(ex)); this._log.trace("Rethrowing; expect a failure code from the HTTP channel."); throw ex; } @@ -562,7 +574,7 @@ ChannelListener.prototype = { try { CommonUtils.namedTimer(this.abortRequest, this._timeout, this, "abortTimer"); } catch (ex) { - this._log.warn("Got exception extending abort timer", ex); + this._log.warn("Got exception extending abort timer: " + CommonUtils.exceptionStr(ex)); } }, @@ -656,14 +668,14 @@ ChannelNotificationListener.prototype = { } } } catch (ex) { - this._log.error("Error copying headers", ex); + this._log.error("Error copying headers: " + CommonUtils.exceptionStr(ex)); } // We let all redirects proceed. try { callback.onRedirectVerifyCallback(Cr.NS_OK); } catch (ex) { - this._log.error("onRedirectVerifyCallback threw!", ex); + this._log.error("onRedirectVerifyCallback threw!" + CommonUtils.exceptionStr(ex)); } } }; diff --git a/services/sync/modules/rest.js b/services/sync/modules/rest.js index 94c096dba..34382eed5 100644 --- a/services/sync/modules/rest.js +++ b/services/sync/modules/rest.js @@ -2,7 +2,7 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ -var {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components; +const {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components; Cu.import("resource://gre/modules/Log.jsm"); Cu.import("resource://services-common/rest.js"); @@ -28,6 +28,21 @@ SyncStorageRequest.prototype = { _logName: "Sync.StorageRequest", /** + * The string to use as the base User-Agent in Sync requests. + * These strings will look something like + * + * Firefox/4.0 FxSync/1.8.0.20100101.mobile + * + * or + * + * Firefox Aurora/5.0a1 FxSync/1.9.0.20110409.desktop + */ + userAgent: + Services.appinfo.name + "/" + Services.appinfo.version + // Product. + " FxSync/" + WEAVE_VERSION + "." + // Sync. + Services.appinfo.appBuildID + ".", // Build. + + /** * Wait 5 minutes before killing a request. */ timeout: STORAGE_REQUEST_TIMEOUT, @@ -35,7 +50,8 @@ SyncStorageRequest.prototype = { dispatch: function dispatch(method, data, onComplete, onProgress) { // Compose a UA string fragment from the various available identifiers. if (Svc.Prefs.get("sendVersionInfo", true)) { - this.setHeader("user-agent", Utils.userAgent); + let ua = this.userAgent + Svc.Prefs.get("client.type", "desktop"); + this.setHeader("user-agent", ua); } if (this.authenticator) { diff --git a/services/sync/modules/service.js b/services/sync/modules/service.js index 32e047f53..637760b8f 100644 --- a/services/sync/modules/service.js +++ b/services/sync/modules/service.js @@ -4,10 +4,10 @@ this.EXPORTED_SYMBOLS = ["Service"]; -var Cc = Components.classes; -var Ci = Components.interfaces; -var Cr = Components.results; -var Cu = Components.utils; +const Cc = Components.classes; +const Ci = Components.interfaces; +const Cr = Components.results; +const Cu = Components.utils; // How long before refreshing the cluster const CLUSTER_BACKOFF = 5 * 60 * 1000; // 5 minutes @@ -21,6 +21,7 @@ const KEYS_WBO = "keys"; Cu.import("resource://gre/modules/Preferences.jsm"); Cu.import("resource://gre/modules/XPCOMUtils.jsm"); Cu.import("resource://gre/modules/Log.jsm"); +Cu.import("resource://services-common/utils.js"); Cu.import("resource://services-sync/constants.js"); Cu.import("resource://services-sync/engines.js"); Cu.import("resource://services-sync/engines/clients.js"); @@ -32,7 +33,6 @@ Cu.import("resource://services-sync/rest.js"); Cu.import("resource://services-sync/stages/enginesync.js"); Cu.import("resource://services-sync/stages/declined.js"); Cu.import("resource://services-sync/status.js"); -Cu.import("resource://services-sync/telemetry.js"); Cu.import("resource://services-sync/userapi.js"); Cu.import("resource://services-sync/util.js"); @@ -51,6 +51,15 @@ const STORAGE_INFO_TYPES = [INFO_COLLECTIONS, INFO_COLLECTION_COUNTS, INFO_QUOTA]; +// A structure mapping a (boolean) telemetry probe name to a preference name. +// The probe will record true if the pref is modified, false otherwise. +const TELEMETRY_CUSTOM_SERVER_PREFS = { + WEAVE_CUSTOM_LEGACY_SERVER_CONFIGURATION: "services.sync.serverURL", + WEAVE_CUSTOM_FXA_SERVER_CONFIGURATION: "identity.fxaccounts.auth.uri", + WEAVE_CUSTOM_TOKEN_SERVER_CONFIGURATION: "services.sync.tokenServerURI", +}; + + function Sync11Service() { this._notify = Utils.notify("weave:service:"); } @@ -64,13 +73,8 @@ Sync11Service.prototype = { storageURL: null, metaURL: null, cryptoKeyURL: null, - // The cluster URL comes via the ClusterManager object, which in the FxA - // world is ebbedded in the token returned from the token server. - _clusterURL: null, - get serverURL() { - return Svc.Prefs.get("serverURL"); - }, + get serverURL() Svc.Prefs.get("serverURL"), set serverURL(value) { if (!value.endsWith("/")) { value += "/"; @@ -80,20 +84,14 @@ Sync11Service.prototype = { if (value == this.serverURL) return; + // A new server most likely uses a different cluster, so clear that Svc.Prefs.set("serverURL", value); - - // A new server most likely uses a different cluster, so clear that. - this._clusterURL = null; + Svc.Prefs.reset("clusterURL"); }, - get clusterURL() { - return this._clusterURL || ""; - }, + get clusterURL() Svc.Prefs.get("clusterURL", ""), set clusterURL(value) { - if (value != null && typeof value != "string") { - throw new Error("cluster must be a string, got " + (typeof value)); - } - this._clusterURL = value; + Svc.Prefs.set("clusterURL", value); this._updateCachedURLs(); }, @@ -171,16 +169,8 @@ Sync11Service.prototype = { _updateCachedURLs: function _updateCachedURLs() { // Nothing to cache yet if we don't have the building blocks - if (!this.clusterURL || !this.identity.username) { - // Also reset all other URLs used by Sync to ensure we aren't accidentally - // using one cached earlier - if there's no cluster URL any cached ones - // are invalid. - this.infoURL = undefined; - this.storageURL = undefined; - this.metaURL = undefined; - this.cryptoKeysURL = undefined; + if (!this.clusterURL || !this.identity.username) return; - } this._log.debug("Caching URLs under storage user base: " + this.userBaseURL); @@ -315,6 +305,21 @@ Sync11Service.prototype = { return false; }, + // The global "enabled" state comes from prefs, and will be set to false + // whenever the UI that exposes what to sync finds all Sync engines disabled. + get enabled() { + return Svc.Prefs.get("enabled"); + }, + set enabled(val) { + // There's no real reason to impose this other than to catch someone doing + // something we don't expect with bad consequences - all setting of this + // pref are in the UI code and external to this module. + if (val) { + throw new Error("Only disabling via this setter is supported"); + } + Svc.Prefs.set("enabled", val); + }, + /** * Prepare to initialize the rest of Weave after waiting a little bit */ @@ -344,8 +349,6 @@ Sync11Service.prototype = { this._clusterManager = this.identity.createClusterManager(this); this.recordManager = new RecordManager(this); - this.enabled = true; - this._registerEngines(); let ua = Cc["@mozilla.org/network/protocol;1?name=http"]. @@ -359,7 +362,6 @@ Sync11Service.prototype = { } Svc.Obs.add("weave:service:setup-complete", this); - Svc.Obs.add("sync:collection_changed", this); // Pulled from FxAccountsCommon Svc.Prefs.observe("engine.", this); this.scheduler = new SyncScheduler(this); @@ -375,6 +377,12 @@ Sync11Service.prototype = { Svc.Obs.notify("weave:engine:start-tracking"); } + // Telemetry probes to indicate if the user is using custom servers. + for (let [probeName, prefName] of Iterator(TELEMETRY_CUSTOM_SERVER_PREFS)) { + let isCustomized = Services.prefs.prefHasUserValue(prefName); + Services.telemetry.getHistogramById(probeName).add(isCustomized); + } + // Send an event now that Weave service is ready. We don't do this // synchronously so that observers can import this module before // registering an observer. @@ -424,7 +432,7 @@ Sync11Service.prototype = { // Map each old pref to the current pref branch let oldPref = new Preferences(oldPrefBranch); - for (let pref of oldPrefNames) + for each (let pref in oldPrefNames) Svc.Prefs.set(pref, oldPref.get(pref)); // Remove all the old prefs and remember that we've migrated @@ -472,7 +480,8 @@ Sync11Service.prototype = { this.engineManager.register(ns[engineName]); } catch (ex) { - this._log.warn("Could not register engine " + name, ex); + this._log.warn("Could not register engine " + name + ": " + + CommonUtils.exceptionStr(ex)); } } @@ -486,13 +495,6 @@ Sync11Service.prototype = { observe: function observe(subject, topic, data) { switch (topic) { - // Ideally this observer should be in the SyncScheduler, but it would require - // some work to know about the sync specific engines. We should move this there once it does. - case "sync:collection_changed": - if (data.includes("clients")) { - this.sync([]); // [] = clients collection only - } - break; case "weave:service:setup-complete": let status = this._checkSetup(); if (status != STATUS_DISABLED && status != CLIENT_NOT_CONFIGURED) @@ -557,8 +559,7 @@ Sync11Service.prototype = { // Always check for errors; this is also where we look for X-Weave-Alert. this.errorHandler.checkServerError(info); if (!info.success) { - this._log.error("Aborting sync: failed to get collections.") - throw info; + throw "Aborting sync: failed to get collections."; } return info; }, @@ -675,13 +676,21 @@ Sync11Service.prototype = { } catch (ex) { // This means no keys are present, or there's a network error. - this._log.debug("Failed to fetch and verify keys", ex); + this._log.debug("Failed to fetch and verify keys: " + + Utils.exceptionStr(ex)); this.errorHandler.checkServerError(ex); return false; } }, verifyLogin: function verifyLogin(allow40XRecovery = true) { + // If the identity isn't ready it might not know the username... + if (!this.identity.readyToAuthenticate) { + this._log.info("Not ready to authenticate in verifyLogin."); + this.status.login = LOGIN_FAILED_NOT_READY; + return false; + } + if (!this.identity.username) { this._log.warn("No username in verifyLogin."); this.status.login = LOGIN_FAILED_NO_USERNAME; @@ -753,12 +762,8 @@ Sync11Service.prototype = { return this.verifyLogin(false); } - // We must have the right cluster, but the server doesn't expect us. - // The implications of this depend on the identity being used - for - // the legacy identity, it's an authoritatively "incorrect password", - // (ie, LOGIN_FAILED_LOGIN_REJECTED) but for FxA it probably means - // "transient error fetching auth token". - this.status.login = this.identity.loginStatusFromVerification404(); + // We must have the right cluster, but the server doesn't expect us + this.status.login = LOGIN_FAILED_LOGIN_REJECTED; return false; default: @@ -769,7 +774,7 @@ Sync11Service.prototype = { } } catch (ex) { // Must have failed on some network issue - this._log.debug("verifyLogin failed", ex); + this._log.debug("verifyLogin failed: " + Utils.exceptionStr(ex)); this.status.login = LOGIN_FAILED_NETWORK_ERROR; this.errorHandler.checkServerError(ex); return false; @@ -842,7 +847,8 @@ Sync11Service.prototype = { try { cb.wait(); } catch (ex) { - this._log.debug("Password change failed", ex); + this._log.debug("Password change failed: " + + CommonUtils.exceptionStr(ex)); return false; } @@ -884,11 +890,12 @@ Sync11Service.prototype = { // Deletion doesn't make sense if we aren't set up yet! if (this.clusterURL != "") { // Clear client-specific data from the server, including disabled engines. - for (let engine of [this.clientsEngine].concat(this.engineManager.getAll())) { + for each (let engine in [this.clientsEngine].concat(this.engineManager.getAll())) { try { engine.removeClientData(); } catch(ex) { - this._log.warn(`Deleting client data for ${engine.name} failed`, ex); + this._log.warn("Deleting client data for " + engine.name + " failed:" + + Utils.exceptionStr(ex)); } } this._log.debug("Finished deleting client data."); @@ -914,7 +921,6 @@ Sync11Service.prototype = { this._ignorePrefObserver = true; Svc.Prefs.resetBranch(""); this._ignorePrefObserver = false; - this.clusterURL = null; Svc.Prefs.set("lastversion", WEAVE_VERSION); @@ -931,22 +937,25 @@ Sync11Service.prototype = { return; } - try { - this.identity.finalize(); - // an observer so the FxA migration code can take some action before - // the new identity is created. - Svc.Obs.notify("weave:service:start-over:init-identity"); - this.identity.username = ""; - this.status.__authManager = null; - this.identity = Status._authManager; - this._clusterManager = this.identity.createClusterManager(this); - Svc.Obs.notify("weave:service:start-over:finish"); - } catch (err) { - this._log.error("startOver failed to re-initialize the identity manager: " + err); - // Still send the observer notification so the current state is - // reflected in the UI. - Svc.Obs.notify("weave:service:start-over:finish"); - } + this.identity.finalize().then( + () => { + // an observer so the FxA migration code can take some action before + // the new identity is created. + Svc.Obs.notify("weave:service:start-over:init-identity"); + this.identity.username = ""; + this.status.__authManager = null; + this.identity = Status._authManager; + this._clusterManager = this.identity.createClusterManager(this); + Svc.Obs.notify("weave:service:start-over:finish"); + } + ).then(null, + err => { + this._log.error("startOver failed to re-initialize the identity manager: " + err); + // Still send the observer notification so the current state is + // reflected in the UI. + Svc.Obs.notify("weave:service:start-over:finish"); + } + ); }, persistLogin: function persistLogin() { @@ -981,12 +990,8 @@ Sync11Service.prototype = { } // Ask the identity manager to explicitly login now. - this._log.info("Logging in the user."); let cb = Async.makeSpinningCallback(); - this.identity.ensureLoggedIn().then( - () => cb(null), - err => cb(err || "ensureLoggedIn failed") - ); + this.identity.ensureLoggedIn().then(cb, cb); // Just let any errors bubble up - they've more context than we do! cb.wait(); @@ -997,9 +1002,9 @@ Sync11Service.prototype = { && (username || password || passphrase)) { Svc.Obs.notify("weave:service:setup-complete"); } + this._log.info("Logging in the user."); this._updateCachedURLs(); - this._log.info("User logged in successfully - verifying login."); if (!this.verifyLogin()) { // verifyLogin sets the failure states here. throw "Login failed: " + this.status.login; @@ -1064,49 +1069,11 @@ Sync11Service.prototype = { } }, - // Note: returns false if we failed for a reason other than the server not yet - // supporting the api. - _fetchServerConfiguration() { - if (Svc.Prefs.get("APILevel") >= 2) { - // This is similar to _fetchInfo, but with different error handling. - // Only supported by later sync implementations. - - let infoURL = this.userBaseURL + "info/configuration"; - this._log.debug("Fetching server configuration", infoURL); - let configResponse; - try { - configResponse = this.resource(infoURL).get(); - } catch (ex) { - // This is probably a network or similar error. - this._log.warn("Failed to fetch info/configuration", ex); - this.errorHandler.checkServerError(ex); - return false; - } - - if (configResponse.status == 404) { - // This server doesn't support the URL yet - that's OK. - this._log.debug("info/configuration returned 404 - using default upload semantics"); - } else if (configResponse.status != 200) { - this._log.warn(`info/configuration returned ${configResponse.status} - using default configuration`); - this.errorHandler.checkServerError(configResponse); - return false; - } else { - this.serverConfiguration = configResponse.obj; - } - this._log.trace("info/configuration for this server", this.serverConfiguration); - } - return true; - }, - // Stuff we need to do after login, before we can really do // anything (e.g. key setup). _remoteSetup: function _remoteSetup(infoResponse) { let reset = false; - if (!this._fetchServerConfiguration()) { - return false; - } - this._log.debug("Fetching global metadata record"); let meta = this.recordManager.get(this.metaURL); @@ -1133,7 +1100,7 @@ Sync11Service.prototype = { return false; } - if (this.recordManager.response.status == 404) { + if (!this.recordManager.response.success || !newMeta) { this._log.debug("No meta/global record on the server. Creating one."); newMeta = new WBORecord("meta", "global"); newMeta.payload.syncID = this.syncID; @@ -1143,16 +1110,10 @@ Sync11Service.prototype = { newMeta.isNew = true; this.recordManager.set(this.metaURL, newMeta); - let uploadRes = newMeta.upload(this.resource(this.metaURL)); - if (!uploadRes.success) { + if (!newMeta.upload(this.resource(this.metaURL)).success) { this._log.warn("Unable to upload new meta/global. Failing remote setup."); - this.errorHandler.checkServerError(uploadRes); return false; } - } else if (!newMeta) { - this._log.warn("Unable to get meta/global. Failing remote setup."); - this.errorHandler.checkServerError(this.recordManager.response); - return false; } else { // If newMeta, then it stands to reason that meta != null. newMeta.isNew = meta.isNew; @@ -1293,9 +1254,13 @@ Sync11Service.prototype = { return reason; }, - sync: function sync(engineNamesToSync) { - let dateStr = Utils.formatTimestamp(new Date()); - this._log.debug("User-Agent: " + Utils.userAgent); + sync: function sync() { + if (!this.enabled) { + this._log.debug("Not syncing as Sync is disabled."); + return; + } + let dateStr = new Date().toLocaleFormat(LOG_DATE_FORMAT); + this._log.debug("User-Agent: " + SyncStorageRequest.prototype.userAgent); this._log.info("Starting sync at " + dateStr); this._catch(function () { // Make sure we're logged in. @@ -1309,14 +1274,14 @@ Sync11Service.prototype = { else { this._log.trace("In sync: no need to login."); } - return this._lockedSync(engineNamesToSync); + return this._lockedSync.apply(this, arguments); })(); }, /** * Sync up engines with the server. */ - _lockedSync: function _lockedSync(engineNamesToSync) { + _lockedSync: function _lockedSync() { return this._lock("service.js: sync", this._notify("sync", "", function onNotify() { @@ -1327,7 +1292,7 @@ Sync11Service.prototype = { let cb = Async.makeSpinningCallback(); synchronizer.onComplete = cb; - synchronizer.sync(engineNamesToSync); + synchronizer.sync(); // wait() throws if the first argument is truthy, which is exactly what // we want. let result = cb.wait(); @@ -1338,31 +1303,27 @@ Sync11Service.prototype = { // We successfully synchronized. // Check if the identity wants to pre-fetch a migration sentinel from // the server. - // Only supported by Sync server API level 2+ // If we have no clusterURL, we are probably doing a node reassignment // so don't attempt to get it in that case. - if (Svc.Prefs.get("APILevel") >= 2 && this.clusterURL) { - this.identity.prefetchMigrationSentinel(this); + //if (this.clusterURL) { + // this.identity.prefetchMigrationSentinel(this); + //} + + // Now let's update our declined engines. + let meta = this.recordManager.get(this.metaURL); + if (!meta) { + this._log.warn("No meta/global; can't update declined state."); + return; } - // Now let's update our declined engines (but only if we have a metaURL; - // if Sync failed due to no node we will not have one) - if (this.metaURL) { - let meta = this.recordManager.get(this.metaURL); - if (!meta) { - this._log.warn("No meta/global; can't update declined state."); - return; - } - - let declinedEngines = new DeclinedEngines(this); - let didChange = declinedEngines.updateDeclined(meta, this.engineManager); - if (!didChange) { - this._log.info("No change to declined engines. Not reuploading meta/global."); - return; - } - - this.uploadMetaGlobal(meta); + let declinedEngines = new DeclinedEngines(this); + let didChange = declinedEngines.updateDeclined(meta, this.engineManager); + if (!didChange) { + this._log.info("No change to declined engines. Not reuploading meta/global."); + return; } + + this.uploadMetaGlobal(meta); }))(); }, @@ -1536,7 +1497,7 @@ Sync11Service.prototype = { // Wipe everything we know about except meta because we just uploaded it let engines = [this.clientsEngine].concat(this.engineManager.getAll()); - let collections = engines.map(engine => engine.name); + let collections = [engine.name for each (engine in engines)]; // TODO: there's a bug here. We should be calling resetClient, no? // Generate, upload, and download new keys. Do this last so we don't wipe @@ -1555,7 +1516,6 @@ Sync11Service.prototype = { */ wipeServer: function wipeServer(collections) { let response; - let histogram = Services.telemetry.getHistogramById("WEAVE_WIPE_SERVER_SUCCEEDED"); if (!collections) { // Strip the trailing slash. let res = this.resource(this.storageURL.slice(0, -1)); @@ -1563,17 +1523,14 @@ Sync11Service.prototype = { try { response = res.delete(); } catch (ex) { - this._log.debug("Failed to wipe server", ex); - histogram.add(false); + this._log.debug("Failed to wipe server: " + CommonUtils.exceptionStr(ex)); throw ex; } if (response.status != 200 && response.status != 404) { this._log.debug("Aborting wipeServer. Server responded with " + response.status + " response for " + this.storageURL); - histogram.add(false); throw response; } - histogram.add(true); return response.headers["x-weave-timestamp"]; } @@ -1583,15 +1540,14 @@ Sync11Service.prototype = { try { response = this.resource(url).delete(); } catch (ex) { - this._log.debug("Failed to wipe '" + name + "' collection", ex); - histogram.add(false); + this._log.debug("Failed to wipe '" + name + "' collection: " + + Utils.exceptionStr(ex)); throw ex; } if (response.status != 200 && response.status != 404) { this._log.debug("Aborting wipeServer. Server responded with " + response.status + " response for " + url); - histogram.add(false); throw response; } @@ -1599,7 +1555,7 @@ Sync11Service.prototype = { timestamp = response.headers["x-weave-timestamp"]; } } - histogram.add(true); + return timestamp; }, @@ -1623,7 +1579,7 @@ Sync11Service.prototype = { } // Fully wipe each engine if it's able to decrypt data - for (let engine of engines) { + for each (let engine in engines) { if (engine.canDecrypt()) { engine.wipeClient(); } @@ -1701,7 +1657,7 @@ Sync11Service.prototype = { } // Have each engine drop any temporary meta data - for (let engine of engines) { + for each (let engine in engines) { engine.resetClient(); } })(); @@ -1731,7 +1687,8 @@ Sync11Service.prototype = { return this.getStorageRequest(url).get(function onComplete(error) { // Note: 'this' is the request. if (error) { - this._log.debug("Failed to retrieve '" + info_type + "'", error); + this._log.debug("Failed to retrieve '" + info_type + "': " + + Utils.exceptionStr(error)); return callback(error); } if (this.response.status != 200) { diff --git a/services/sync/modules/stages/cluster.js b/services/sync/modules/stages/cluster.js index 7665ce825..dd358bf98 100644 --- a/services/sync/modules/stages/cluster.js +++ b/services/sync/modules/stages/cluster.js @@ -4,7 +4,7 @@ this.EXPORTED_SYMBOLS = ["ClusterManager"]; -var {utils: Cu} = Components; +const {utils: Cu} = Components; Cu.import("resource://gre/modules/Log.jsm"); Cu.import("resource://services-sync/constants.js"); @@ -80,9 +80,6 @@ ClusterManager.prototype = { return false; } - // Convert from the funky "String object with additional properties" that - // resource.js returns to a plain-old string. - cluster = cluster.toString(); // Don't update stuff if we already have the right cluster if (cluster == this.service.clusterURL) { return false; @@ -90,6 +87,7 @@ ClusterManager.prototype = { this._log.debug("Setting cluster to " + cluster); this.service.clusterURL = cluster; + Svc.Prefs.set("lastClusterUpdate", Date.now().toString()); return true; }, diff --git a/services/sync/modules/stages/declined.js b/services/sync/modules/stages/declined.js index ff8a14181..b0877e929 100644 --- a/services/sync/modules/stages/declined.js +++ b/services/sync/modules/stages/declined.js @@ -11,7 +11,7 @@ this.EXPORTED_SYMBOLS = ["DeclinedEngines"]; -var {utils: Cu} = Components; +const {utils: Cu} = Components; Cu.import("resource://services-sync/constants.js"); Cu.import("resource://gre/modules/Log.jsm"); @@ -29,8 +29,8 @@ this.DeclinedEngines = function (service) { } this.DeclinedEngines.prototype = { updateDeclined: function (meta, engineManager=this.service.engineManager) { - let enabled = new Set(engineManager.getEnabled().map(e => e.name)); - let known = new Set(engineManager.getAll().map(e => e.name)); + let enabled = new Set([e.name for each (e in engineManager.getEnabled())]); + let known = new Set([e.name for each (e in engineManager.getAll())]); let remoteDeclined = new Set(meta.payload.declined || []); let localDeclined = new Set(engineManager.getDeclined()); diff --git a/services/sync/modules/stages/enginesync.js b/services/sync/modules/stages/enginesync.js index a00a2f48b..ed91adddb 100644 --- a/services/sync/modules/stages/enginesync.js +++ b/services/sync/modules/stages/enginesync.js @@ -8,16 +8,13 @@ this.EXPORTED_SYMBOLS = ["EngineSynchronizer"]; -var {utils: Cu} = Components; +const {utils: Cu} = Components; Cu.import("resource://gre/modules/Log.jsm"); Cu.import("resource://services-sync/constants.js"); Cu.import("resource://services-sync/engines.js"); Cu.import("resource://services-sync/policies.js"); Cu.import("resource://services-sync/util.js"); -Cu.import("resource://services-common/observers.js"); -Cu.import("resource://services-common/async.js"); -Cu.import("resource://gre/modules/Task.jsm"); /** * Perform synchronization of engines. @@ -34,7 +31,7 @@ this.EngineSynchronizer = function EngineSynchronizer(service) { } EngineSynchronizer.prototype = { - sync: function sync(engineNamesToSync) { + sync: function sync() { if (!this.onComplete) { throw new Error("onComplete handler not installed."); } @@ -99,9 +96,6 @@ EngineSynchronizer.prototype = { return; } - // We only honor the "hint" of what engines to Sync if this isn't - // a first sync. - let allowEnginesHint = false; // Wipe data in the desired direction if necessary switch (Svc.Prefs.get("firstSync")) { case "resetClient": @@ -113,9 +107,6 @@ EngineSynchronizer.prototype = { case "wipeRemote": this.service.wipeRemote(engineManager.enabledEngineNames); break; - default: - allowEnginesHint = true; - break; } if (this.service.clientsEngine.localCommands) { @@ -145,31 +136,20 @@ EngineSynchronizer.prototype = { try { this._updateEnabledEngines(); } catch (ex) { - this._log.debug("Updating enabled engines failed", ex); + this._log.debug("Updating enabled engines failed: " + + Utils.exceptionStr(ex)); this.service.errorHandler.checkServerError(ex); this.onComplete(ex); return; } - // If the engines to sync has been specified, we sync in the order specified. - let enginesToSync; - if (allowEnginesHint && engineNamesToSync) { - this._log.info("Syncing specified engines", engineNamesToSync); - enginesToSync = engineManager.get(engineNamesToSync).filter(e => e.enabled); - } else { - this._log.info("Syncing all enabled engines."); - enginesToSync = engineManager.getEnabled(); - } try { - // We don't bother validating engines that failed to sync. - let enginesToValidate = []; - for (let engine of enginesToSync) { + for (let engine of engineManager.getEnabled()) { // If there's any problems with syncing the engine, report the failure if (!(this._syncEngine(engine)) || this.service.status.enforceBackoff) { this._log.info("Aborting sync for failure in " + engine.name); break; } - enginesToValidate.push(engine); } // If _syncEngine fails for a 401, we might not have a cluster URL here. @@ -195,8 +175,6 @@ EngineSynchronizer.prototype = { } } - Async.promiseSpinningly(this._tryValidateEngines(enginesToValidate)); - // If there were no sync engine failures if (this.service.status.service != SYNC_FAILED_PARTIAL) { Svc.Prefs.set("lastSync", new Date().toString()); @@ -206,7 +184,7 @@ EngineSynchronizer.prototype = { Svc.Prefs.reset("firstSync"); let syncTime = ((Date.now() - startTime) / 1000).toFixed(2); - let dateStr = Utils.formatTimestamp(new Date()); + let dateStr = new Date().toLocaleFormat(LOG_DATE_FORMAT); this._log.info("Sync completed at " + dateStr + " after " + syncTime + " secs."); } @@ -214,106 +192,6 @@ EngineSynchronizer.prototype = { this.onComplete(null); }, - _tryValidateEngines: Task.async(function* (recentlySyncedEngines) { - if (!Services.telemetry.canRecordBase || !Svc.Prefs.get("validation.enabled", false)) { - this._log.info("Skipping validation: validation or telemetry reporting is disabled"); - return; - } - - let lastValidation = Svc.Prefs.get("validation.lastTime", 0); - let validationInterval = Svc.Prefs.get("validation.interval"); - let nowSeconds = Math.floor(Date.now() / 1000); - - if (nowSeconds - lastValidation < validationInterval) { - this._log.info("Skipping validation: too recent since last validation attempt"); - return; - } - // Update the time now, even if we may return false still. We don't want to - // check the rest of these more frequently than once a day. - Svc.Prefs.set("validation.lastTime", nowSeconds); - - // Validation only occurs a certain percentage of the time. - let validationProbability = Svc.Prefs.get("validation.percentageChance", 0) / 100.0; - if (validationProbability < Math.random()) { - this._log.info("Skipping validation: Probability threshold not met"); - return; - } - let maxRecords = Svc.Prefs.get("validation.maxRecords"); - if (!maxRecords) { - // Don't bother asking the server for the counts if we know validation - // won't happen anyway. - return; - } - - // maxRecords of -1 means "any number", so we can skip asking the server. - // Used for tests. - let info; - if (maxRecords < 0) { - info = {}; - for (let e of recentlySyncedEngines) { - info[e.name] = 1; // needs to be < maxRecords - } - maxRecords = 2; - } else { - - let collectionCountsURL = this.service.userBaseURL + "info/collection_counts"; - try { - let infoResp = this.service._fetchInfo(collectionCountsURL); - if (!infoResp.success) { - this._log.error("Can't run validation: request to info/collection_counts responded with " - + resp.status); - return; - } - info = infoResp.obj; // might throw because obj is a getter which parses json. - } catch (e) { - // Not running validation is totally fine, so we just write an error log and return. - this._log.error("Can't run validation: Caught error when fetching counts", e); - return; - } - } - - if (!info) { - return; - } - - let engineLookup = new Map(recentlySyncedEngines.map(e => [e.name, e])); - let toRun = []; - for (let [engineName, recordCount] of Object.entries(info)) { - let engine = engineLookup.get(engineName); - if (recordCount > maxRecords || !engine) { - this._log.debug(`Skipping validation for ${engineName} because it's not an engine or ` + - `the number of records (${recordCount}) is greater than the maximum allowed (${maxRecords}).`); - continue; - } - let validator = engine.getValidator(); - if (!validator) { - continue; - } - // Put this in an array so that we know how many we're going to do, so we - // don't tell users we're going to run some validators when we aren't. - toRun.push({ engine, validator }); - } - - if (!toRun.length) { - return; - } - Services.console.logStringMessage( - "Sync is about to run a consistency check. This may be slow, and " + - "can be controlled using the pref \"services.sync.validation.enabled\".\n" + - "If you encounter any problems because of this, please file a bug."); - for (let { validator, engine } of toRun) { - try { - let result = yield validator.validate(engine); - Observers.notify("weave:engine:validate:finish", result, engine.name); - } catch (e) { - this._log.error(`Failed to run validation on ${engine.name}!`, e); - Observers.notify("weave:engine:validate:error", e, engine.name) - // Keep validating -- there's no reason to think that a failure for one - // validator would mean the others will fail. - } - } - }), - // Returns true if sync should proceed. // false / no return value means sync should be aborted. _syncEngine: function _syncEngine(engine) { @@ -346,15 +224,8 @@ EngineSynchronizer.prototype = { // If we're the only client, and no engines are marked as enabled, // thumb our noses at the server data: it can't be right. // Belt-and-suspenders approach to Bug 615926. - let hasEnabledEngines = false; - for (let e in meta.payload.engines) { - if (e != "clients") { - hasEnabledEngines = true; - break; - } - } - - if ((numClients <= 1) && !hasEnabledEngines) { + if ((numClients <= 1) && + ([e for (e in meta.payload.engines) if (e != "clients")].length == 0)) { this._log.info("One client and no enabled engines: not touching local engine status."); return; } @@ -418,7 +289,7 @@ EngineSynchronizer.prototype = { } // Any remaining engines were either enabled locally or disabled remotely. - for (let engineName of enabled) { + for each (let engineName in enabled) { let engine = engineManager.get(engineName); if (Svc.Prefs.get("engineStatusChanged." + engine.prefName, false)) { this._log.trace("The " + engineName + " engine was enabled locally."); diff --git a/services/sync/modules/status.js b/services/sync/modules/status.js index 100bc7965..19dff9712 100644 --- a/services/sync/modules/status.js +++ b/services/sync/modules/status.js @@ -4,10 +4,10 @@ this.EXPORTED_SYMBOLS = ["Status"]; -var Cc = Components.classes; -var Ci = Components.interfaces; -var Cr = Components.results; -var Cu = Components.utils; +const Cc = Components.classes; +const Ci = Components.interfaces; +const Cr = Components.results; +const Cu = Components.utils; Cu.import("resource://services-sync/constants.js"); Cu.import("resource://gre/modules/Log.jsm"); @@ -30,7 +30,10 @@ this.Status = { .wrappedJSObject; let idClass = service.fxAccountsEnabled ? BrowserIDManager : IdentityManager; this.__authManager = new idClass(); - this.__authManager.initialize(); + // .initialize returns a promise, so we need to spin until it resolves. + let cb = Async.makeSpinningCallback(); + this.__authManager.initialize().then(cb, cb); + cb.wait(); return this.__authManager; }, diff --git a/services/sync/modules/telemetry.js b/services/sync/modules/telemetry.js deleted file mode 100644 index c311387f7..000000000 --- a/services/sync/modules/telemetry.js +++ /dev/null @@ -1,578 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this file, - * You can obtain one at http://mozilla.org/MPL/2.0/. */ - -"use strict"; - -const {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components; - -this.EXPORTED_SYMBOLS = ["SyncTelemetry"]; - -Cu.import("resource://services-sync/browserid_identity.js"); -Cu.import("resource://services-sync/main.js"); -Cu.import("resource://services-sync/status.js"); -Cu.import("resource://services-sync/util.js"); -Cu.import("resource://services-common/observers.js"); -Cu.import("resource://services-common/async.js"); -Cu.import("resource://gre/modules/Log.jsm"); -Cu.import("resource://gre/modules/TelemetryController.jsm"); -Cu.import("resource://gre/modules/FxAccounts.jsm"); -Cu.import("resource://gre/modules/XPCOMUtils.jsm"); -Cu.import("resource://gre/modules/osfile.jsm", this); - -let constants = {}; -Cu.import("resource://services-sync/constants.js", constants); - -var fxAccountsCommon = {}; -Cu.import("resource://gre/modules/FxAccountsCommon.js", fxAccountsCommon); - -XPCOMUtils.defineLazyServiceGetter(this, "Telemetry", - "@mozilla.org/base/telemetry;1", - "nsITelemetry"); - -const log = Log.repository.getLogger("Sync.Telemetry"); - -const TOPICS = [ - "profile-before-change", - "weave:service:sync:start", - "weave:service:sync:finish", - "weave:service:sync:error", - - "weave:engine:sync:start", - "weave:engine:sync:finish", - "weave:engine:sync:error", - "weave:engine:sync:applied", - "weave:engine:sync:uploaded", - "weave:engine:validate:finish", - "weave:engine:validate:error", -]; - -const PING_FORMAT_VERSION = 1; - -// The set of engines we record telemetry for - any other engines are ignored. -const ENGINES = new Set(["addons", "bookmarks", "clients", "forms", "history", - "passwords", "prefs", "tabs", "extension-storage"]); - -// A regex we can use to replace the profile dir in error messages. We use a -// regexp so we can simply replace all case-insensitive occurences. -// This escaping function is from: -// https://developer.mozilla.org/en/docs/Web/JavaScript/Guide/Regular_Expressions -const reProfileDir = new RegExp( - OS.Constants.Path.profileDir.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"), - "gi"); - -function transformError(error, engineName) { - if (Async.isShutdownException(error)) { - return { name: "shutdownerror" }; - } - - if (typeof error === "string") { - if (error.startsWith("error.")) { - // This is hacky, but I can't imagine that it's not also accurate. - return { name: "othererror", error }; - } - // There's a chance the profiledir is in the error string which is PII we - // want to avoid including in the ping. - error = error.replace(reProfileDir, "[profileDir]"); - return { name: "unexpectederror", error }; - } - - if (error.failureCode) { - return { name: "othererror", error: error.failureCode }; - } - - if (error instanceof AuthenticationError) { - return { name: "autherror", from: error.source }; - } - - if (error instanceof Ci.mozIStorageError) { - return { name: "sqlerror", code: error.result }; - } - - let httpCode = error.status || - (error.response && error.response.status) || - error.code; - - if (httpCode) { - return { name: "httperror", code: httpCode }; - } - - if (error.result) { - return { name: "nserror", code: error.result }; - } - - return { - name: "unexpectederror", - // as above, remove the profile dir value. - error: String(error).replace(reProfileDir, "[profileDir]") - } -} - -function tryGetMonotonicTimestamp() { - try { - return Telemetry.msSinceProcessStart(); - } catch (e) { - log.warn("Unable to get a monotonic timestamp!"); - return -1; - } -} - -function timeDeltaFrom(monotonicStartTime) { - let now = tryGetMonotonicTimestamp(); - if (monotonicStartTime !== -1 && now !== -1) { - return Math.round(now - monotonicStartTime); - } - return -1; -} - -class EngineRecord { - constructor(name) { - // startTime is in ms from process start, but is monotonic (unlike Date.now()) - // so we need to keep both it and when. - this.startTime = tryGetMonotonicTimestamp(); - this.name = name; - } - - toJSON() { - let result = Object.assign({}, this); - delete result.startTime; - return result; - } - - finished(error) { - let took = timeDeltaFrom(this.startTime); - if (took > 0) { - this.took = took; - } - if (error) { - this.failureReason = transformError(error, this.name); - } - } - - recordApplied(counts) { - if (this.incoming) { - log.error(`Incoming records applied multiple times for engine ${this.name}!`); - return; - } - if (this.name === "clients" && !counts.failed) { - // ignore successful application of client records - // since otherwise they show up every time and are meaningless. - return; - } - - let incomingData = {}; - let properties = ["applied", "failed", "newFailed", "reconciled"]; - // Only record non-zero properties and only record incoming at all if - // there's at least one property we care about. - for (let property of properties) { - if (counts[property]) { - incomingData[property] = counts[property]; - this.incoming = incomingData; - } - } - } - - recordValidation(validationResult) { - if (this.validation) { - log.error(`Multiple validations occurred for engine ${this.name}!`); - return; - } - let { problems, version, duration, recordCount } = validationResult; - let validation = { - version: version || 0, - checked: recordCount || 0, - }; - if (duration > 0) { - validation.took = Math.round(duration); - } - let summarized = problems.getSummary(true).filter(({count}) => count > 0); - if (summarized.length) { - validation.problems = summarized; - } - this.validation = validation; - } - - recordValidationError(e) { - if (this.validation) { - log.error(`Multiple validations occurred for engine ${this.name}!`); - return; - } - - this.validation = { - failureReason: transformError(e) - }; - } - - recordUploaded(counts) { - if (counts.sent || counts.failed) { - if (!this.outgoing) { - this.outgoing = []; - } - this.outgoing.push({ - sent: counts.sent || undefined, - failed: counts.failed || undefined, - }); - } - } -} - -class TelemetryRecord { - constructor(allowedEngines) { - this.allowedEngines = allowedEngines; - // Our failure reason. This property only exists in the generated ping if an - // error actually occurred. - this.failureReason = undefined; - this.uid = ""; - this.when = Date.now(); - this.startTime = tryGetMonotonicTimestamp(); - this.took = 0; // will be set later. - - // All engines that have finished (ie, does not include the "current" one) - // We omit this from the ping if it's empty. - this.engines = []; - // The engine that has started but not yet stopped. - this.currentEngine = null; - } - - toJSON() { - let result = { - when: this.when, - uid: this.uid, - took: this.took, - failureReason: this.failureReason, - status: this.status, - deviceID: this.deviceID, - devices: this.devices, - }; - let engines = []; - for (let engine of this.engines) { - engines.push(engine.toJSON()); - } - if (engines.length > 0) { - result.engines = engines; - } - return result; - } - - finished(error) { - this.took = timeDeltaFrom(this.startTime); - if (this.currentEngine != null) { - log.error("Finished called for the sync before the current engine finished"); - this.currentEngine.finished(null); - this.onEngineStop(this.currentEngine.name); - } - if (error) { - this.failureReason = transformError(error); - } - - // We don't bother including the "devices" field if we can't come up with a - // UID or device ID for *this* device -- If that's the case, any data we'd - // put there would be likely to be full of garbage anyway. - let includeDeviceInfo = false; - try { - this.uid = Weave.Service.identity.hashedUID(); - let deviceID = Weave.Service.identity.deviceID(); - if (deviceID) { - // Combine the raw device id with the metrics uid to create a stable - // unique identifier that can't be mapped back to the user's FxA - // identity without knowing the metrics HMAC key. - this.deviceID = Utils.sha256(deviceID + this.uid); - includeDeviceInfo = true; - } - } catch (e) { - this.uid = "0".repeat(32); - this.deviceID = undefined; - } - - if (includeDeviceInfo) { - let remoteDevices = Weave.Service.clientsEngine.remoteClients; - this.devices = remoteDevices.map(device => { - return { - os: device.os, - version: device.version, - id: Utils.sha256(device.id + this.uid) - }; - }); - } - - // Check for engine statuses. -- We do this now, and not in engine.finished - // to make sure any statuses that get set "late" are recorded - for (let engine of this.engines) { - let status = Status.engines[engine.name]; - if (status && status !== constants.ENGINE_SUCCEEDED) { - engine.status = status; - } - } - - let statusObject = {}; - - let serviceStatus = Status.service; - if (serviceStatus && serviceStatus !== constants.STATUS_OK) { - statusObject.service = serviceStatus; - this.status = statusObject; - } - let syncStatus = Status.sync; - if (syncStatus && syncStatus !== constants.SYNC_SUCCEEDED) { - statusObject.sync = syncStatus; - this.status = statusObject; - } - } - - onEngineStart(engineName) { - if (this._shouldIgnoreEngine(engineName, false)) { - return; - } - - if (this.currentEngine) { - log.error(`Being told that engine ${engineName} has started, but current engine ${ - this.currentEngine.name} hasn't stopped`); - // Just discard the current engine rather than making up data for it. - } - this.currentEngine = new EngineRecord(engineName); - } - - onEngineStop(engineName, error) { - // We only care if it's the current engine if we have a current engine. - if (this._shouldIgnoreEngine(engineName, !!this.currentEngine)) { - return; - } - if (!this.currentEngine) { - // It's possible for us to get an error before the start message of an engine - // (somehow), in which case we still want to record that error. - if (!error) { - return; - } - log.error(`Error triggered on ${engineName} when no current engine exists: ${error}`); - this.currentEngine = new EngineRecord(engineName); - } - this.currentEngine.finished(error); - this.engines.push(this.currentEngine); - this.currentEngine = null; - } - - onEngineApplied(engineName, counts) { - if (this._shouldIgnoreEngine(engineName)) { - return; - } - this.currentEngine.recordApplied(counts); - } - - onEngineValidated(engineName, validationData) { - if (this._shouldIgnoreEngine(engineName, false)) { - return; - } - let engine = this.engines.find(e => e.name === engineName); - if (!engine && this.currentEngine && engineName === this.currentEngine.name) { - engine = this.currentEngine; - } - if (engine) { - engine.recordValidation(validationData); - } else { - log.warn(`Validation event triggered for engine ${engineName}, which hasn't been synced!`); - } - } - - onEngineValidateError(engineName, error) { - if (this._shouldIgnoreEngine(engineName, false)) { - return; - } - let engine = this.engines.find(e => e.name === engineName); - if (!engine && this.currentEngine && engineName === this.currentEngine.name) { - engine = this.currentEngine; - } - if (engine) { - engine.recordValidationError(error); - } else { - log.warn(`Validation failure event triggered for engine ${engineName}, which hasn't been synced!`); - } - } - - onEngineUploaded(engineName, counts) { - if (this._shouldIgnoreEngine(engineName)) { - return; - } - this.currentEngine.recordUploaded(counts); - } - - _shouldIgnoreEngine(engineName, shouldBeCurrent = true) { - if (!this.allowedEngines.has(engineName)) { - log.info(`Notification for engine ${engineName}, but we aren't recording telemetry for it`); - return true; - } - if (shouldBeCurrent) { - if (!this.currentEngine || engineName != this.currentEngine.name) { - log.error(`Notification for engine ${engineName} but it isn't current`); - return true; - } - } - return false; - } -} - -class SyncTelemetryImpl { - constructor(allowedEngines) { - log.level = Log.Level[Svc.Prefs.get("log.logger.telemetry", "Trace")]; - // This is accessible so we can enable custom engines during tests. - this.allowedEngines = allowedEngines; - this.current = null; - this.setupObservers(); - - this.payloads = []; - this.discarded = 0; - this.maxPayloadCount = Svc.Prefs.get("telemetry.maxPayloadCount"); - this.submissionInterval = Svc.Prefs.get("telemetry.submissionInterval") * 1000; - this.lastSubmissionTime = Telemetry.msSinceProcessStart(); - } - - getPingJSON(reason) { - return { - why: reason, - discarded: this.discarded || undefined, - version: PING_FORMAT_VERSION, - syncs: this.payloads.slice(), - }; - } - - finish(reason) { - // Note that we might be in the middle of a sync right now, and so we don't - // want to touch this.current. - let result = this.getPingJSON(reason); - this.payloads = []; - this.discarded = 0; - this.submit(result); - } - - setupObservers() { - for (let topic of TOPICS) { - Observers.add(topic, this, this); - } - } - - shutdown() { - this.finish("shutdown"); - for (let topic of TOPICS) { - Observers.remove(topic, this, this); - } - } - - submit(record) { - // We still call submit() with possibly illegal payloads so that tests can - // know that the ping was built. We don't end up submitting them, however. - if (record.syncs.length) { - log.trace(`submitting ${record.syncs.length} sync record(s) to telemetry`); - TelemetryController.submitExternalPing("sync", record); - } - } - - - onSyncStarted() { - if (this.current) { - log.warn("Observed weave:service:sync:start, but we're already recording a sync!"); - // Just discard the old record, consistent with our handling of engines, above. - this.current = null; - } - this.current = new TelemetryRecord(this.allowedEngines); - } - - _checkCurrent(topic) { - if (!this.current) { - log.warn(`Observed notification ${topic} but no current sync is being recorded.`); - return false; - } - return true; - } - - onSyncFinished(error) { - if (!this.current) { - log.warn("onSyncFinished but we aren't recording"); - return; - } - this.current.finished(error); - if (this.payloads.length < this.maxPayloadCount) { - this.payloads.push(this.current.toJSON()); - } else { - ++this.discarded; - } - this.current = null; - if ((Telemetry.msSinceProcessStart() - this.lastSubmissionTime) > this.submissionInterval) { - this.finish("schedule"); - this.lastSubmissionTime = Telemetry.msSinceProcessStart(); - } - } - - observe(subject, topic, data) { - log.trace(`observed ${topic} ${data}`); - - switch (topic) { - case "profile-before-change": - this.shutdown(); - break; - - /* sync itself state changes */ - case "weave:service:sync:start": - this.onSyncStarted(); - break; - - case "weave:service:sync:finish": - if (this._checkCurrent(topic)) { - this.onSyncFinished(null); - } - break; - - case "weave:service:sync:error": - // argument needs to be truthy (this should always be the case) - this.onSyncFinished(subject || "Unknown"); - break; - - /* engine sync state changes */ - case "weave:engine:sync:start": - if (this._checkCurrent(topic)) { - this.current.onEngineStart(data); - } - break; - case "weave:engine:sync:finish": - if (this._checkCurrent(topic)) { - this.current.onEngineStop(data, null); - } - break; - - case "weave:engine:sync:error": - if (this._checkCurrent(topic)) { - // argument needs to be truthy (this should always be the case) - this.current.onEngineStop(data, subject || "Unknown"); - } - break; - - /* engine counts */ - case "weave:engine:sync:applied": - if (this._checkCurrent(topic)) { - this.current.onEngineApplied(data, subject); - } - break; - - case "weave:engine:sync:uploaded": - if (this._checkCurrent(topic)) { - this.current.onEngineUploaded(data, subject); - } - break; - - case "weave:engine:validate:finish": - if (this._checkCurrent(topic)) { - this.current.onEngineValidated(data, subject); - } - break; - - case "weave:engine:validate:error": - if (this._checkCurrent(topic)) { - this.current.onEngineValidateError(data, subject || "Unknown"); - } - break; - - default: - log.warn(`unexpected observer topic ${topic}`); - break; - } - } -} - -this.SyncTelemetry = new SyncTelemetryImpl(ENGINES); diff --git a/services/sync/modules/userapi.js b/services/sync/modules/userapi.js index e906440bd..ec77d63e2 100644 --- a/services/sync/modules/userapi.js +++ b/services/sync/modules/userapi.js @@ -8,7 +8,7 @@ this.EXPORTED_SYMBOLS = [ "UserAPI10Client", ]; -var {utils: Cu} = Components; +const {utils: Cu} = Components; Cu.import("resource://gre/modules/Log.jsm"); Cu.import("resource://services-common/rest.js"); diff --git a/services/sync/modules/util.js b/services/sync/modules/util.js index e9dbcb37d..67cc3f063 100644 --- a/services/sync/modules/util.js +++ b/services/sync/modules/util.js @@ -4,7 +4,7 @@ this.EXPORTED_SYMBOLS = ["XPCOMUtils", "Services", "Utils", "Async", "Svc", "Str"]; -var {classes: Cc, interfaces: Ci, results: Cr, utils: Cu} = Components; +const {classes: Cc, interfaces: Ci, results: Cr, utils: Cu} = Components; Cu.import("resource://gre/modules/Log.jsm"); Cu.import("resource://services-common/observers.js"); @@ -35,6 +35,8 @@ this.Utils = { // In the ideal world, references to these would be removed. nextTick: CommonUtils.nextTick, namedTimer: CommonUtils.namedTimer, + exceptionStr: CommonUtils.exceptionStr, + stackTrace: CommonUtils.stackTrace, makeURI: CommonUtils.makeURI, encodeUTF8: CommonUtils.encodeUTF8, decodeUTF8: CommonUtils.decodeUTF8, @@ -52,7 +54,6 @@ this.Utils = { digestBytes: CryptoUtils.digestBytes, sha1: CryptoUtils.sha1, sha1Base32: CryptoUtils.sha1Base32, - sha256: CryptoUtils.sha256, makeHMACKey: CryptoUtils.makeHMACKey, makeHMACHasher: CryptoUtils.makeHMACHasher, hkdfExpand: CryptoUtils.hkdfExpand, @@ -61,25 +62,6 @@ this.Utils = { getHTTPMACSHA1Header: CryptoUtils.getHTTPMACSHA1Header, /** - * The string to use as the base User-Agent in Sync requests. - * This string will look something like - * - * Firefox/49.0a1 (Windows NT 6.1; WOW64; rv:46.0) FxSync/1.51.0.20160516142357.desktop - */ - _userAgent: null, - get userAgent() { - if (!this._userAgent) { - let hph = Cc["@mozilla.org/network/protocol;1?name=http"].getService(Ci.nsIHttpProtocolHandler); - this._userAgent = - Services.appinfo.name + "/" + Services.appinfo.version + // Product. - " (" + hph.oscpu + ")" + // (oscpu) - " FxSync/" + WEAVE_VERSION + "." + // Sync. - Services.appinfo.appBuildID + "."; // Build. - } - return this._userAgent + Svc.Prefs.get("client.type", "desktop"); - }, - - /** * Wrap a function to catch all exceptions and log them * * @usage MyObj._catch = Utils.catch; @@ -95,7 +77,7 @@ this.Utils = { return func.call(thisArg); } catch(ex) { - thisArg._log.debug("Exception calling " + (func.name || "anonymous function"), ex); + thisArg._log.debug("Exception: " + Utils.exceptionStr(ex)); if (exceptionCallback) { return exceptionCallback.call(thisArg, ex); } @@ -271,14 +253,14 @@ this.Utils = { */ base32ToFriendly: function base32ToFriendly(input) { return input.toLowerCase() - .replace(/l/g, '8') - .replace(/o/g, '9'); + .replace("l", '8', "g") + .replace("o", '9', "g"); }, base32FromFriendly: function base32FromFriendly(input) { return input.toUpperCase() - .replace(/8/g, 'L') - .replace(/9/g, 'O'); + .replace("8", 'L', "g") + .replace("9", 'O', "g"); }, /** @@ -356,14 +338,12 @@ this.Utils = { try { json = yield CommonUtils.readJSON(path); + } catch (e if e instanceof OS.File.Error && e.becauseNoSuchFile) { + // Ignore non-existent files. } catch (e) { - if (e instanceof OS.File.Error && e.becauseNoSuchFile) { - // Ignore non-existent files, but explicitly return null. - json = null; - } else { - if (that._log) { - that._log.debug("Failed to load json", e); - } + if (that._log) { + that._log.debug("Failed to load json: " + + CommonUtils.exceptionStr(e)); } } @@ -411,52 +391,6 @@ this.Utils = { } }), - /** - * Move a json file in the profile directory. Will fail if a file exists at the - * destination. - * - * @returns a promise that resolves to undefined on success, or rejects on failure - * - * @param aFrom - * Current path to the JSON file saved on disk, relative to profileDir/weave - * .json will be appended to the file name. - * @param aTo - * New path to the JSON file saved on disk, relative to profileDir/weave - * .json will be appended to the file name. - * @param that - * Object to use for logging - */ - jsonMove(aFrom, aTo, that) { - let pathFrom = OS.Path.join(OS.Constants.Path.profileDir, "weave", - ...(aFrom + ".json").split("/")); - let pathTo = OS.Path.join(OS.Constants.Path.profileDir, "weave", - ...(aTo + ".json").split("/")); - if (that._log) { - that._log.trace("Moving " + pathFrom + " to " + pathTo); - } - return OS.File.move(pathFrom, pathTo, { noOverwrite: true }); - }, - - /** - * Removes a json file in the profile directory. - * - * @returns a promise that resolves to undefined on success, or rejects on failure - * - * @param filePath - * Current path to the JSON file saved on disk, relative to profileDir/weave - * .json will be appended to the file name. - * @param that - * Object to use for logging - */ - jsonRemove(filePath, that) { - let path = OS.Path.join(OS.Constants.Path.profileDir, "weave", - ...(filePath + ".json").split("/")); - if (that._log) { - that._log.trace("Deleting " + path); - } - return OS.File.remove(path, { ignoreAbsent: true }); - }, - getErrorString: function Utils_getErrorString(error, args) { try { return Str.errors.get(error, args || null); @@ -543,7 +477,7 @@ this.Utils = { // 20-char sync key. if (pp.length == 23 && - [5, 11, 17].every(i => pp[i] == '-')) { + [5, 11, 17].every(function(i) pp[i] == '-')) { return pp.slice(0, 5) + pp.slice(6, 11) + pp.slice(12, 17) + pp.slice(18, 23); @@ -551,7 +485,7 @@ this.Utils = { // "Modern" 26-char key. if (pp.length == 31 && - [1, 7, 13, 19, 25].every(i => pp[i] == '-')) { + [1, 7, 13, 19, 25].every(function(i) pp[i] == '-')) { return pp.slice(0, 1) + pp.slice(2, 7) + pp.slice(8, 13) + pp.slice(14, 19) @@ -681,12 +615,30 @@ this.Utils = { * Get the FxA identity hosts. */ getSyncCredentialsHostsFxA: function() { + // This is somewhat expensive and the result static, so we cache the result. + if (this._syncCredentialsHostsFxA) { + return this._syncCredentialsHostsFxA; + } let result = new Set(); // the FxA host result.add(FxAccountsCommon.FXA_PWDMGR_HOST); - // We used to include the FxA hosts (hence the Set() result) but we now - // don't give them special treatment (hence the Set() with exactly 1 item) - return result; + // + // The FxA hosts - these almost certainly all have the same hostname, but + // better safe than sorry... + for (let prefName of ["identity.fxaccounts.remote.force_auth.uri", + "identity.fxaccounts.remote.signup.uri", + "identity.fxaccounts.remote.signin.uri", + "identity.fxaccounts.settings.uri"]) { + let prefVal; + try { + prefVal = Services.prefs.getCharPref(prefName); + } catch (_) { + continue; + } + let uri = Services.io.newURI(prefVal, null, null); + result.add(uri.prePath); + } + return this._syncCredentialsHostsFxA = result; }, getDefaultDeviceName() { @@ -720,32 +672,6 @@ this.Utils = { Cc["@mozilla.org/network/protocol;1?name=http"].getService(Ci.nsIHttpProtocolHandler).oscpu; return Str.sync.get("client.name2", [user, appName, system]); - }, - - getDeviceName() { - const deviceName = Svc.Prefs.get("client.name", ""); - - if (deviceName === "") { - return this.getDefaultDeviceName(); - } - - return deviceName; - }, - - getDeviceType() { - return Svc.Prefs.get("client.type", DEVICE_TYPE_DESKTOP); - }, - - formatTimestamp(date) { - // Format timestamp as: "%Y-%m-%d %H:%M:%S" - let year = String(date.getFullYear()); - let month = String(date.getMonth() + 1).padStart(2, "0"); - let day = String(date.getDate()).padStart(2, "0"); - let hours = String(date.getHours()).padStart(2, "0"); - let minutes = String(date.getMinutes()).padStart(2, "0"); - let seconds = String(date.getSeconds()).padStart(2, "0"); - - return `${year}-${month}-${day} ${hours}:${minutes}:${seconds}`; } }; @@ -764,7 +690,7 @@ Svc.Prefs = new Preferences(PREFS_BRANCH); Svc.DefaultPrefs = new Preferences({branch: PREFS_BRANCH, defaultBranch: true}); Svc.Obs = Observers; -var _sessionCID = Services.appinfo.ID == SEAMONKEY_ID ? +let _sessionCID = Services.appinfo.ID == SEAMONKEY_ID ? "@mozilla.org/suite/sessionstore;1" : "@mozilla.org/browser/sessionstore;1"; diff --git a/services/sync/moz.build b/services/sync/moz.build index 5e5de10b7..cedeb0529 100644 --- a/services/sync/moz.build +++ b/services/sync/moz.build @@ -1,31 +1,28 @@ -# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*- # vim: set filetype=python: # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('Mozilla Services', 'Firefox Sync: Backend') - DIRS += ['locales'] XPCSHELL_TESTS_MANIFESTS += ['tests/unit/xpcshell.ini'] EXTRA_COMPONENTS += [ - 'SyncComponents.manifest', + 'Weave.js', ] EXTRA_PP_COMPONENTS += [ - 'Weave.js', + 'SyncComponents.manifest', ] EXTRA_JS_MODULES['services-sync'] += [ 'modules/addonsreconciler.js', 'modules/addonutils.js', - 'modules/bookmark_validator.js', 'modules/browserid_identity.js', - 'modules/collection_validator.js', 'modules/engines.js', + 'modules/FxaMigrator.jsm', + 'modules/healthreport.jsm', 'modules/identity.js', 'modules/jpakeclient.js', 'modules/keys.js', @@ -35,22 +32,12 @@ EXTRA_JS_MODULES['services-sync'] += [ 'modules/record.js', 'modules/resource.js', 'modules/rest.js', + 'modules/service.js', 'modules/status.js', - 'modules/SyncedTabs.jsm', - 'modules/telemetry.js', 'modules/userapi.js', 'modules/util.js', ] -EXTRA_PP_JS_MODULES['services-sync'] += [ - 'modules/constants.js', - 'modules/service.js', -] - -# Definitions used by constants.js -DEFINES['weave_version'] = '1.54.1' -DEFINES['weave_id'] = '{340c2bbc-ce74-4362-90b5-7c26312808ef}' - EXTRA_JS_MODULES['services-sync'].engines += [ 'modules/engines/addons.js', 'modules/engines/bookmarks.js', @@ -78,3 +65,4 @@ TESTING_JS_MODULES.services.sync += [ JS_PREFERENCE_FILES += [ 'services-sync.js', ] + diff --git a/services/sync/services-sync.js b/services/sync/services-sync.js index 9473d9a9d..d5d153436 100644 --- a/services/sync/services-sync.js +++ b/services/sync/services-sync.js @@ -2,17 +2,16 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ -pref("services.sync.serverURL", "https://auth.services.mozilla.com/"); +pref("services.sync.serverURL", "https://pmsync.palemoon.org/sync/index.php/"); pref("services.sync.userURL", "user/"); pref("services.sync.miscURL", "misc/"); -pref("services.sync.termsURL", "https://services.mozilla.com/tos/"); -pref("services.sync.privacyURL", "https://services.mozilla.com/privacy-policy/"); -pref("services.sync.statusURL", "https://services.mozilla.com/status/"); -pref("services.sync.syncKeyHelpURL", "https://services.mozilla.com/help/synckey"); +pref("services.sync.termsURL", "http://www.palemoon.org/sync/terms.shtml"); +pref("services.sync.privacyURL", "http://www.palemoon.org/sync/privacy.shtml"); +pref("services.sync.statusURL", "https://pmsync.palemoon.org/status/"); +pref("services.sync.syncKeyHelpURL", "http://www.palemoon.org/sync/keyhelp.shtml"); pref("services.sync.lastversion", "firstrun"); pref("services.sync.sendVersionInfo", true); -pref("services.sync.APILevel", 2); pref("services.sync.scheduler.eolInterval", 604800); // 1 week pref("services.sync.scheduler.idleInterval", 3600); // 1 hour @@ -25,29 +24,37 @@ pref("services.sync.scheduler.sync11.singleDeviceInterval", 86400); // 1 day pref("services.sync.errorhandler.networkFailureReportTimeout", 1209600); // 2 weeks -pref("services.sync.engine.addons", true); +// A "master" pref for Sync being enabled. Will be set to false if the sync +// customization UI finds all our builtin engines disabled (and addons are +// free to force this to true if they have their own engine) +pref("services.sync.enabled", true); +// Our engines. +pref("services.sync.engine.addons", false); pref("services.sync.engine.bookmarks", true); pref("services.sync.engine.history", true); pref("services.sync.engine.passwords", true); pref("services.sync.engine.prefs", true); pref("services.sync.engine.tabs", true); -pref("services.sync.engine.tabs.filteredUrls", "^(about:.*|chrome://weave/.*|wyciwyg:.*|file:.*|blob:.*)$"); +pref("services.sync.engine.tabs.filteredUrls", "^(about:.*|chrome://weave/.*|wyciwyg:.*|file:.*)$"); -pref("services.sync.jpake.serverURL", "https://setup.services.mozilla.com/"); +pref("services.sync.jpake.serverURL", "https://keyserver.palemoon.org/"); pref("services.sync.jpake.pollInterval", 1000); pref("services.sync.jpake.firstMsgMaxTries", 300); // 5 minutes pref("services.sync.jpake.lastMsgMaxTries", 300); // 5 minutes pref("services.sync.jpake.maxTries", 10); +// Allow add-ons to be synced from non-trusted sources. +pref("services.sync.addons.ignoreRepositoryChecking", true); + // If true, add-on sync ignores changes to the user-enabled flag. This // allows people to have the same set of add-ons installed across all // profiles while maintaining different enabled states. pref("services.sync.addons.ignoreUserEnabledChanges", false); // Comma-delimited list of hostnames to trust for add-on install. -pref("services.sync.addons.trustedSourceHostnames", "addons.mozilla.org"); +pref("services.sync.addons.trustedSourceHostnames", "addons.palemoon.org,addons.mozilla.org"); -pref("services.sync.log.appender.console", "Fatal"); +pref("services.sync.log.appender.console", "Warn"); pref("services.sync.log.appender.dump", "Error"); pref("services.sync.log.appender.file.level", "Trace"); pref("services.sync.log.appender.file.logOnError", true); @@ -69,28 +76,12 @@ pref("services.sync.log.logger.engine.passwords", "Debug"); pref("services.sync.log.logger.engine.prefs", "Debug"); pref("services.sync.log.logger.engine.tabs", "Debug"); pref("services.sync.log.logger.engine.addons", "Debug"); -pref("services.sync.log.logger.engine.extension-storage", "Debug"); pref("services.sync.log.logger.engine.apps", "Debug"); pref("services.sync.log.logger.identity", "Debug"); pref("services.sync.log.logger.userapi", "Debug"); pref("services.sync.log.cryptoDebug", false); +pref("services.sync.tokenServerURI", "https://token.services.mozilla.com/1.0/sync/1.5"); + pref("services.sync.fxa.termsURL", "https://accounts.firefox.com/legal/terms"); pref("services.sync.fxa.privacyURL", "https://accounts.firefox.com/legal/privacy"); - -pref("services.sync.telemetry.submissionInterval", 43200); // 12 hours in seconds -pref("services.sync.telemetry.maxPayloadCount", 500); - -// Note that services.sync.validation.enabled is located in application/[application name]/app/profile/[application name].js - -// We consider validation this frequently. After considering validation, even -// if we don't end up validating, we won't try again unless this much time has passed. -pref("services.sync.validation.interval", 86400); // 24 hours in seconds - -// We only run validation `services.sync.validation.percentageChance` percent of -// the time, even if it's been the right amount of time since the last validation, -// and you meet the maxRecord checks. -pref("services.sync.validation.percentageChance", 10); - -// We won't validate an engine if it has more than this many records on the server. -pref("services.sync.validation.maxRecords", 100); diff --git a/services/sync/tests/tps/addons/api/restartless-xpi@tests.mozilla.org.xml b/services/sync/tests/tps/addons/api/restartless-xpi@tests.mozilla.org.xml index 6eb153ad1..9a5f6d52b 100644 --- a/services/sync/tests/tps/addons/api/restartless-xpi@tests.mozilla.org.xml +++ b/services/sync/tests/tps/addons/api/restartless-xpi@tests.mozilla.org.xml @@ -12,7 +12,7 @@ <application_id>1</application_id> <min_version>3.6</min_version> <max_version>*</max_version> - <appID>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</appID> + <appID>{8de7fcbb-c55c-4fbe-bfc5-fc555c87dbc4}</appID> </application></compatible_applications> <all_compatible_os><os>ALL</os></all_compatible_os> diff --git a/services/sync/tests/tps/addons/api/unsigned-xpi@tests.mozilla.org.xml b/services/sync/tests/tps/addons/api/unsigned-xpi@tests.mozilla.org.xml index 14a056013..d7a577b31 100644 --- a/services/sync/tests/tps/addons/api/unsigned-xpi@tests.mozilla.org.xml +++ b/services/sync/tests/tps/addons/api/unsigned-xpi@tests.mozilla.org.xml @@ -12,7 +12,7 @@ <application_id>1</application_id> <min_version>3.6</min_version> <max_version>*</max_version> - <appID>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</appID> + <appID>{8de7fcbb-c55c-4fbe-bfc5-fc555c87dbc4}</appID> </application></compatible_applications> <all_compatible_os><os>ALL</os></all_compatible_os> diff --git a/services/sync/tests/tps/all_tests.json b/services/sync/tests/tps/all_tests.json index ca7031e59..fdcbe1821 100644 --- a/services/sync/tests/tps/all_tests.json +++ b/services/sync/tests/tps/all_tests.json @@ -1,5 +1,4 @@ { "tests": [ - "test_bookmark_conflict.js", "test_sync.js", "test_prefs.js", "test_tabs.js", @@ -17,6 +16,7 @@ "test_bug575423.js", "test_bug546807.js", "test_history_collision.js", + "test_privbrw_formdata.js", "test_privbrw_passwords.js", "test_privbrw_tabs.js", "test_bookmarks_in_same_named_folder.js", diff --git a/services/sync/tests/tps/test_addon_nonrestartless_xpi.js b/services/sync/tests/tps/test_addon_nonrestartless_xpi.js index b6c85b351..d9a15fc50 100644 --- a/services/sync/tests/tps/test_addon_nonrestartless_xpi.js +++ b/services/sync/tests/tps/test_addon_nonrestartless_xpi.js @@ -6,7 +6,7 @@ // syncs between profiles. EnableEngines(["addons"]); -var phases = { +let phases = { "phase01": "profile1", "phase02": "profile1", "phase03": "profile2", @@ -33,8 +33,7 @@ Phase("phase01", [ [Sync] ]); Phase("phase02", [ - [Addons.verify, [id], STATE_ENABLED], - [Sync] + [Addons.verify, [id], STATE_ENABLED] ]); Phase("phase03", [ [Addons.verifyNot, [id]], @@ -42,7 +41,6 @@ Phase("phase03", [ ]); Phase("phase04", [ [Addons.verify, [id], STATE_ENABLED], - [Sync] ]); // Now we disable the add-on @@ -53,15 +51,13 @@ Phase("phase05", [ ]); Phase("phase06", [ [Addons.verify, [id], STATE_DISABLED], - [Sync] ]); Phase("phase07", [ [Addons.verify, [id], STATE_ENABLED], [Sync] ]); Phase("phase08", [ - [Addons.verify, [id], STATE_DISABLED], - [Sync] + [Addons.verify, [id], STATE_DISABLED] ]); // Now we re-enable it again. @@ -72,15 +68,13 @@ Phase("phase09", [ ]); Phase("phase10", [ [Addons.verify, [id], STATE_ENABLED], - [Sync] ]); Phase("phase11", [ [Addons.verify, [id], STATE_DISABLED], [Sync] ]); Phase("phase12", [ - [Addons.verify, [id], STATE_ENABLED], - [Sync] + [Addons.verify, [id], STATE_ENABLED] ]); // And we uninstall it @@ -92,14 +86,12 @@ Phase("phase13", [ [Sync] ]); Phase("phase14", [ - [Addons.verifyNot, [id]], - [Sync] + [Addons.verifyNot, [id]] ]); Phase("phase15", [ [Addons.verify, [id], STATE_ENABLED], [Sync] ]); Phase("phase16", [ - [Addons.verifyNot, [id]], - [Sync] + [Addons.verifyNot, [id]] ]); diff --git a/services/sync/tests/tps/test_addon_reconciling.js b/services/sync/tests/tps/test_addon_reconciling.js index a4244ab03..14dda8ade 100644 --- a/services/sync/tests/tps/test_addon_reconciling.js +++ b/services/sync/tests/tps/test_addon_reconciling.js @@ -6,7 +6,7 @@ // the proper action is taken. EnableEngines(["addons"]); -var phases = { +let phases = { "phase01": "profile1", "phase02": "profile2", "phase03": "profile1", @@ -34,9 +34,6 @@ Phase("phase02", [ Phase("phase03", [ [Sync], // Get GUID updates, potentially. [Addons.setEnabled, [id], STATE_DISABLED], - // We've changed the state, but don't want this profile to sync until phase5, - // so if we ran a validation now we'd be expecting to find errors. - [Addons.skipValidation] ]); Phase("phase04", [ [EnsureTracking], diff --git a/services/sync/tests/tps/test_addon_restartless_xpi.js b/services/sync/tests/tps/test_addon_restartless_xpi.js index b242c95f0..7d45406bd 100644 --- a/services/sync/tests/tps/test_addon_restartless_xpi.js +++ b/services/sync/tests/tps/test_addon_restartless_xpi.js @@ -5,7 +5,7 @@ // other profiles. EnableEngines(["addons"]); -var phases = { +let phases = { "phase01": "profile1", "phase02": "profile2", "phase03": "profile1", diff --git a/services/sync/tests/tps/test_addon_sanity.js b/services/sync/tests/tps/test_addon_sanity.js index 240918094..0d738eb67 100644 --- a/services/sync/tests/tps/test_addon_sanity.js +++ b/services/sync/tests/tps/test_addon_sanity.js @@ -9,7 +9,7 @@ EnableEngines(["addons"]); -var phases = { "phase1": "profile1", +let phases = { "phase1": "profile1", "phase2": "profile1" }; const id = "unsigned-xpi@tests.mozilla.org"; @@ -25,6 +25,5 @@ Phase("phase1", [ Phase("phase2", [ // Add-on should be present after restart - [Addons.verify, [id], STATE_ENABLED], - [Sync] // Sync to ensure everything is initialized enough for the addon validator to run + [Addons.verify, [id], STATE_ENABLED] ]); diff --git a/services/sync/tests/tps/test_addon_wipe.js b/services/sync/tests/tps/test_addon_wipe.js index 60131abc0..2aafbd6bf 100644 --- a/services/sync/tests/tps/test_addon_wipe.js +++ b/services/sync/tests/tps/test_addon_wipe.js @@ -8,7 +8,7 @@ EnableEngines(["addons"]); -var phases = { +let phases = { "phase01": "profile1", "phase02": "profile1", "phase03": "profile1" @@ -30,6 +30,5 @@ Phase("phase02", [ ]); Phase("phase03", [ [Addons.verify, [id1], STATE_ENABLED], - [Addons.verify, [id2], STATE_ENABLED], - [Sync] // Sync to ensure that the addon validator can run without error + [Addons.verify, [id2], STATE_ENABLED] ]); diff --git a/services/sync/tests/tps/test_bookmark_conflict.js b/services/sync/tests/tps/test_bookmark_conflict.js deleted file mode 100644 index cfe9d782e..000000000 --- a/services/sync/tests/tps/test_bookmark_conflict.js +++ /dev/null @@ -1,143 +0,0 @@ -/* Any copyright is dedicated to the Public Domain. - http://creativecommons.org/publicdomain/zero/1.0/ */ - -/* - * The list of phases mapped to their corresponding profiles. The object - * here must be in strict JSON format, as it will get parsed by the Python - * testrunner (no single quotes, extra comma's, etc). - */ -EnableEngines(["bookmarks"]); - -var phases = { "phase1": "profile1", - "phase2": "profile2", - "phase3": "profile1", - "phase4": "profile2" }; - - -// the initial list of bookmarks to add to the browser -var bookmarksInitial = { - "menu": [ - { folder: "foldera" }, - { folder: "folderb" }, - { folder: "folderc" }, - { folder: "folderd" }, - ], - - "menu/foldera": [{ uri: "http://www.cnn.com", title: "CNN" }], - "menu/folderb": [{ uri: "http://www.apple.com", title: "Apple", tags: [] }], - "menu/folderc": [{ uri: "http://www.yahoo.com", title: "Yahoo" }], - - "menu/folderd": [] -}; - -// a list of bookmarks to delete during a 'delete' action on P2 -var bookmarksToDelete = { - "menu": [ - { folder: "foldera" }, - { folder: "folderb" }, - ], - "menu/folderc": [{ uri: "http://www.yahoo.com", title: "Yahoo" }], -}; - - -// the modifications to make on P1, after P2 has synced, but before P1 has gotten -// P2's changes -var bookmarkMods = { - "menu": [ - { folder: "foldera" }, - { folder: "folderb" }, - { folder: "folderc" }, - { folder: "folderd" }, - ], - - // we move this child out of its folder (p1), after deleting the folder (p2) - // and expect the child to come back to p2 after sync. - "menu/foldera": [{ - uri: "http://www.cnn.com", - title: "CNN", - changes: { location: "menu/folderd" } - }], - - // we rename this child (p1) after deleting the folder (p2), and expect the child - // to be moved into great grandparent (menu) - "menu/folderb": [{ - uri: "http://www.apple.com", - title: "Apple", - tags: [], - changes: { title: "Mac" } - }], - - - // we move this child (p1) after deleting the child (p2) and expect it to survive - "menu/folderc": [{ - uri: "http://www.yahoo.com", - title: "Yahoo", - changes: { location: "menu/folderd" } - }], - - "menu/folderd": [] -}; - -// a list of bookmarks to delete during a 'delete' action -var bookmarksToDelete = { - "menu": [ - { folder: "foldera" }, - { folder: "folderb" }, - ], - "menu/folderc": [ - { uri: "http://www.yahoo.com", title: "Yahoo" }, - ], -}; - - - -// expected bookmark state after conflict resolution -var bookmarksExpected = { - "menu": [ - { folder: "folderc" }, - { folder: "folderd" }, - { uri: "http://www.apple.com", title: "Mac", }, - ], - - "menu/folderc": [], - - "menu/folderd": [ - { uri: "http://www.cnn.com", title: "CNN" }, - { uri: "http://www.yahoo.com", title: "Yahoo" } - ] -}; - -// Add bookmarks to profile1 and sync. -Phase("phase1", [ - [Bookmarks.add, bookmarksInitial], - [Bookmarks.verify, bookmarksInitial], - [Sync], - [Bookmarks.verify, bookmarksInitial], -]); - -// Sync to profile2 and verify that the bookmarks are present. Delete -// bookmarks/folders, verify that it's not present, and sync -Phase("phase2", [ - [Sync], - [Bookmarks.verify, bookmarksInitial], - [Bookmarks.delete, bookmarksToDelete], - [Bookmarks.verifyNot, bookmarksToDelete], - [Sync] -]); - -// Using profile1, modify the bookmarks, and sync *after* the modification, -// and then sync again to propagate the reconciliation changes. -Phase("phase3", [ - [Bookmarks.verify, bookmarksInitial], - [Bookmarks.modify, bookmarkMods], - [Sync], - [Bookmarks.verify, bookmarksExpected], - [Bookmarks.verifyNot, bookmarksToDelete], -]); - -// Back in profile2, do a sync and verify that we're in the expected state -Phase("phase4", [ - [Sync], - [Bookmarks.verify, bookmarksExpected], - [Bookmarks.verifyNot, bookmarksToDelete], -]); diff --git a/services/sync/tests/tps/test_bug530717.js b/services/sync/tests/tps/test_bug530717.js index 4a11b0a27..1252b382f 100644 --- a/services/sync/tests/tps/test_bug530717.js +++ b/services/sync/tests/tps/test_bug530717.js @@ -23,7 +23,7 @@ var prefs1 = [ { name: "browser.urlbar.maxRichResults", value: 20 }, - { name: "privacy.clearOnShutdown.siteSettings", + { name: "security.OCSP.require", value: true } ]; @@ -35,7 +35,7 @@ var prefs2 = [ { name: "browser.urlbar.maxRichResults", value: 18 }, - { name: "privacy.clearOnShutdown.siteSettings", + { name: "security.OCSP.require", value: false } ]; diff --git a/services/sync/tests/tps/test_bug563989.js b/services/sync/tests/tps/test_bug563989.js index faf63de65..ec890a1a2 100644 --- a/services/sync/tests/tps/test_bug563989.js +++ b/services/sync/tests/tps/test_bug563989.js @@ -88,8 +88,7 @@ Phase('phase2', [ [Sync], [Bookmarks.verify, bookmarks_initial], [Bookmarks.delete, bookmarks_to_delete], - [Bookmarks.verifyNot, bookmarks_to_delete], - [Bookmarks.skipValidation] + [Bookmarks.verifyNot, bookmarks_to_delete] ]); // Using profile1, sync again with wipe-server set to true. Verify our diff --git a/services/sync/tests/tps/test_client_wipe.js b/services/sync/tests/tps/test_client_wipe.js index ba9815db5..049b385fe 100644 --- a/services/sync/tests/tps/test_client_wipe.js +++ b/services/sync/tests/tps/test_client_wipe.js @@ -108,7 +108,7 @@ var prefs1 = [ { name: "browser.urlbar.maxRichResults", value: 20 }, - { name: "privacy.clearOnShutdown.siteSettings", + { name: "security.OCSP.require", value: true } ]; @@ -120,7 +120,7 @@ var prefs2 = [ { name: "browser.urlbar.maxRichResults", value: 18 }, - { name: "privacy.clearOnShutdown.siteSettings", + { name: "security.OCSP.require", value: false } ]; diff --git a/services/sync/tests/tps/test_formdata.js b/services/sync/tests/tps/test_formdata.js index decb58dd8..2c93f6592 100644 --- a/services/sync/tests/tps/test_formdata.js +++ b/services/sync/tests/tps/test_formdata.js @@ -31,11 +31,6 @@ var formdata1 = [ } ]; -// This is currently pointless - it *looks* like it is trying to check that -// one of the entries in formdata1 has been removed, but (a) the delete code -// isn't active (see comments below), and (b) the way the verification works -// means it would never do the right thing - it only checks all the entries -// here exist, but not that they are the only entries in the DB. var formdata2 = [ { fieldname: "testing", value: "success", @@ -52,11 +47,6 @@ var formdata_delete = [ } ]; -var formdata_new = [ - { fieldname: "new-field", - value: "new-value" - } -] /* * Test phases */ @@ -82,15 +72,12 @@ Phase('phase3', [ [Formdata.delete, formdata_delete], //[Formdata.verifyNot, formdata_delete], [Formdata.verify, formdata2], - // add new data after the first Sync, ensuring the tracker works. - [Formdata.add, formdata_new], [Sync], ]); Phase('phase4', [ [Sync], [Formdata.verify, formdata2], - [Formdata.verify, formdata_new], //[Formdata.verifyNot, formdata_delete] ]); diff --git a/services/sync/tests/tps/test_prefs.js b/services/sync/tests/tps/test_prefs.js index 3afff130d..48ffe80e5 100644 --- a/services/sync/tests/tps/test_prefs.js +++ b/services/sync/tests/tps/test_prefs.js @@ -19,7 +19,7 @@ var prefs1 = [ { name: "browser.urlbar.maxRichResults", value: 20 }, - { name: "privacy.clearOnShutdown.siteSettings", + { name: "security.OCSP.require", value: true } ]; @@ -31,7 +31,7 @@ var prefs2 = [ { name: "browser.urlbar.maxRichResults", value: 18 }, - { name: "privacy.clearOnShutdown.siteSettings", + { name: "security.OCSP.require", value: false } ]; diff --git a/services/sync/tests/tps/test_privbrw_formdata.js b/services/sync/tests/tps/test_privbrw_formdata.js new file mode 100644 index 000000000..e1661611e --- /dev/null +++ b/services/sync/tests/tps/test_privbrw_formdata.js @@ -0,0 +1,73 @@ +/* Any copyright is dedicated to the Public Domain. + http://creativecommons.org/publicdomain/zero/1.0/ */ + +/* + * The list of phases mapped to their corresponding profiles. The object + * here must be in strict JSON format, as it will get parsed by the Python + * testrunner (no single quotes, extra comma's, etc). + */ +EnableEngines(["forms"]); + +var phases = { "phase1": "profile1", + "phase2": "profile2", + "phase3": "profile1", + "phase4": "profile2" }; + +/* + * Form data + */ + +// the form data to add to the browser +var formdata1 = [ + { fieldname: "name", + value: "xyz", + date: -1 + }, + { fieldname: "email", + value: "abc@gmail.com", + date: -2 + }, + { fieldname: "username", + value: "joe" + } +]; + +// the form data to add in private browsing mode +var formdata2 = [ + { fieldname: "password", + value: "secret", + date: -1 + }, + { fieldname: "city", + value: "mtview" + } +]; + +/* + * Test phases + */ + +Phase('phase1', [ + [Formdata.add, formdata1], + [Formdata.verify, formdata1], + [Sync] +]); + +Phase('phase2', [ + [Sync], + [Formdata.verify, formdata1] +]); + +Phase('phase3', [ + [Sync], + [Windows.add, { private: true }], + [Formdata.add, formdata2], + [Formdata.verify, formdata2], + [Sync], +]); + +Phase('phase4', [ + [Sync], + [Formdata.verify, formdata1], + [Formdata.verifyNot, formdata2] +]); diff --git a/services/sync/tests/unit/fake_login_manager.js b/services/sync/tests/unit/fake_login_manager.js index 6f3148c45..32adcbcb5 100644 --- a/services/sync/tests/unit/fake_login_manager.js +++ b/services/sync/tests/unit/fake_login_manager.js @@ -4,7 +4,7 @@ Cu.import("resource://services-sync/util.js"); // Fake Sample Data // ---------------------------------------- -var fakeSampleLogins = [ +let fakeSampleLogins = [ // Fake nsILoginInfo object. {hostname: "www.boogle.com", formSubmitURL: "http://www.boogle.com/search", diff --git a/services/sync/tests/unit/head_appinfo.js b/services/sync/tests/unit/head_appinfo.js index d2a680df5..eea47905f 100644 --- a/services/sync/tests/unit/head_appinfo.js +++ b/services/sync/tests/unit/head_appinfo.js @@ -1,54 +1,65 @@ /* Any copyright is dedicated to the Public Domain. http://creativecommons.org/publicdomain/zero/1.0/ */ -var {classes: Cc, interfaces: Ci, results: Cr, utils: Cu} = Components; -Cu.import("resource://gre/modules/Services.jsm"); -Cu.import("resource://gre/modules/XPCOMUtils.jsm"); +const {classes: Cc, interfaces: Ci, results: Cr, utils: Cu} = Components; -var gSyncProfile; +let gSyncProfile; gSyncProfile = do_get_profile(); // Init FormHistoryStartup and pretend we opened a profile. -var fhs = Cc["@mozilla.org/satchel/form-history-startup;1"] +let fhs = Cc["@mozilla.org/satchel/form-history-startup;1"] .getService(Ci.nsIObserver); fhs.observe(null, "profile-after-change", null); -// An app is going to have some prefs set which xpcshell tests don't. -Services.prefs.setCharPref("identity.sync.tokenserver.uri", "http://token-server"); -// Set the validation prefs to attempt validation every time to avoid non-determinism. -Services.prefs.setIntPref("services.sync.validation.interval", 0); -Services.prefs.setIntPref("services.sync.validation.percentageChance", 100); -Services.prefs.setIntPref("services.sync.validation.maxRecords", -1); -Services.prefs.setBoolPref("services.sync.validation.enabled", true); +Cu.import("resource://gre/modules/XPCOMUtils.jsm"); // Make sure to provide the right OS so crypto loads the right binaries -function getOS() { - switch (mozinfo.os) { - case "win": - return "WINNT"; - case "mac": - return "Darwin"; - default: - return "Linux"; - } -} +let OS = "XPCShell"; +if ("@mozilla.org/windows-registry-key;1" in Cc) + OS = "WINNT"; +else if ("nsILocalFileMac" in Ci) + OS = "Darwin"; +else + OS = "Linux"; -Cu.import("resource://testing-common/AppInfo.jsm", this); -updateAppInfo({ +let XULAppInfo = { + vendor: "Mozilla", name: "XPCShell", ID: "xpcshell@tests.mozilla.org", version: "1", + appBuildID: "20100621", platformVersion: "", - OS: getOS(), -}); + platformBuildID: "20100621", + inSafeMode: false, + logConsoleErrors: true, + OS: OS, + XPCOMABI: "noarch-spidermonkey", + QueryInterface: XPCOMUtils.generateQI([Ci.nsIXULAppInfo, Ci.nsIXULRuntime]), + invalidateCachesOnRestart: function invalidateCachesOnRestart() { } +}; + +let XULAppInfoFactory = { + createInstance: function (outer, iid) { + if (outer != null) + throw Cr.NS_ERROR_NO_AGGREGATION; + return XULAppInfo.QueryInterface(iid); + } +}; + +let registrar = Components.manager.QueryInterface(Ci.nsIComponentRegistrar); +registrar.registerFactory(Components.ID("{fbfae60b-64a4-44ef-a911-08ceb70b9f31}"), + "XULAppInfo", "@mozilla.org/xre/app-info;1", + XULAppInfoFactory); + // Register resource aliases. Normally done in SyncComponents.manifest. function addResourceAlias() { + Cu.import("resource://gre/modules/Services.jsm"); const resProt = Services.io.getProtocolHandler("resource") .QueryInterface(Ci.nsIResProtocolHandler); - for (let s of ["common", "sync", "crypto"]) { + for each (let s in ["common", "sync", "crypto"]) { let uri = Services.io.newURI("resource://gre/modules/services-" + s + "/", null, null); resProt.setSubstitution("services-" + s, uri); diff --git a/services/sync/tests/unit/head_errorhandler_common.js b/services/sync/tests/unit/head_errorhandler_common.js deleted file mode 100644 index f4af60d9d..000000000 --- a/services/sync/tests/unit/head_errorhandler_common.js +++ /dev/null @@ -1,112 +0,0 @@ -/* Any copyright is dedicated to the Public Domain. - http://creativecommons.org/publicdomain/zero/1.0/ */ - -Cu.import("resource://services-sync/engines.js"); - -// Common code for test_errorhandler_{1,2}.js -- pulled out to make it less -// monolithic and take less time to execute. -const EHTestsCommon = { - - service_unavailable(request, response) { - let body = "Service Unavailable"; - response.setStatusLine(request.httpVersion, 503, "Service Unavailable"); - response.setHeader("Retry-After", "42"); - response.bodyOutputStream.write(body, body.length); - }, - - sync_httpd_setup() { - let global = new ServerWBO("global", { - syncID: Service.syncID, - storageVersion: STORAGE_VERSION, - engines: {clients: {version: Service.clientsEngine.version, - syncID: Service.clientsEngine.syncID}, - catapult: {version: Service.engineManager.get("catapult").version, - syncID: Service.engineManager.get("catapult").syncID}} - }); - let clientsColl = new ServerCollection({}, true); - - // Tracking info/collections. - let collectionsHelper = track_collections_helper(); - let upd = collectionsHelper.with_updated_collection; - - let handler_401 = httpd_handler(401, "Unauthorized"); - return httpd_setup({ - // Normal server behaviour. - "/1.1/johndoe/storage/meta/global": upd("meta", global.handler()), - "/1.1/johndoe/info/collections": collectionsHelper.handler, - "/1.1/johndoe/storage/crypto/keys": - upd("crypto", (new ServerWBO("keys")).handler()), - "/1.1/johndoe/storage/clients": upd("clients", clientsColl.handler()), - - // Credentials are wrong or node reallocated. - "/1.1/janedoe/storage/meta/global": handler_401, - "/1.1/janedoe/info/collections": handler_401, - - // Maintenance or overloaded (503 + Retry-After) at info/collections. - "/maintenance/1.1/broken.info/info/collections": EHTestsCommon.service_unavailable, - - // Maintenance or overloaded (503 + Retry-After) at meta/global. - "/maintenance/1.1/broken.meta/storage/meta/global": EHTestsCommon.service_unavailable, - "/maintenance/1.1/broken.meta/info/collections": collectionsHelper.handler, - - // Maintenance or overloaded (503 + Retry-After) at crypto/keys. - "/maintenance/1.1/broken.keys/storage/meta/global": upd("meta", global.handler()), - "/maintenance/1.1/broken.keys/info/collections": collectionsHelper.handler, - "/maintenance/1.1/broken.keys/storage/crypto/keys": EHTestsCommon.service_unavailable, - - // Maintenance or overloaded (503 + Retry-After) at wiping collection. - "/maintenance/1.1/broken.wipe/info/collections": collectionsHelper.handler, - "/maintenance/1.1/broken.wipe/storage/meta/global": upd("meta", global.handler()), - "/maintenance/1.1/broken.wipe/storage/crypto/keys": - upd("crypto", (new ServerWBO("keys")).handler()), - "/maintenance/1.1/broken.wipe/storage": EHTestsCommon.service_unavailable, - "/maintenance/1.1/broken.wipe/storage/clients": upd("clients", clientsColl.handler()), - "/maintenance/1.1/broken.wipe/storage/catapult": EHTestsCommon.service_unavailable - }); - }, - - CatapultEngine: (function() { - function CatapultEngine() { - SyncEngine.call(this, "Catapult", Service); - } - CatapultEngine.prototype = { - __proto__: SyncEngine.prototype, - exception: null, // tests fill this in - _sync: function _sync() { - if (this.exception) { - throw this.exception; - } - } - }; - - return CatapultEngine; - }()), - - - generateCredentialsChangedFailure() { - // Make sync fail due to changed credentials. We simply re-encrypt - // the keys with a different Sync Key, without changing the local one. - let newSyncKeyBundle = new SyncKeyBundle("johndoe", "23456234562345623456234562"); - let keys = Service.collectionKeys.asWBO(); - keys.encrypt(newSyncKeyBundle); - keys.upload(Service.resource(Service.cryptoKeysURL)); - }, - - setUp(server) { - return configureIdentity({ username: "johndoe" }).then( - () => { - Service.serverURL = server.baseURI + "/"; - Service.clusterURL = server.baseURI + "/"; - } - ).then( - () => EHTestsCommon.generateAndUploadKeys() - ); - }, - - generateAndUploadKeys() { - generateNewKeys(Service.collectionKeys); - let serverKeys = Service.collectionKeys.asWBO("crypto", "keys"); - serverKeys.encrypt(Service.identity.syncKeyBundle); - return serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success; - } -}; diff --git a/services/sync/tests/unit/head_helpers.js b/services/sync/tests/unit/head_helpers.js index 3c59e1de5..04534dc8e 100644 --- a/services/sync/tests/unit/head_helpers.js +++ b/services/sync/tests/unit/head_helpers.js @@ -4,39 +4,8 @@ Cu.import("resource://services-common/async.js"); Cu.import("resource://testing-common/services/common/utils.js"); Cu.import("resource://testing-common/PlacesTestUtils.jsm"); -Cu.import("resource://services-sync/util.js"); -Cu.import("resource://gre/modules/XPCOMUtils.jsm"); -XPCOMUtils.defineLazyGetter(this, 'SyncPingSchema', function() { - let ns = {}; - Cu.import("resource://gre/modules/FileUtils.jsm", ns); - let stream = Cc["@mozilla.org/network/file-input-stream;1"] - .createInstance(Ci.nsIFileInputStream); - let jsonReader = Cc["@mozilla.org/dom/json;1"] - .createInstance(Components.interfaces.nsIJSON); - let schema; - try { - let schemaFile = do_get_file("sync_ping_schema.json"); - stream.init(schemaFile, ns.FileUtils.MODE_RDONLY, ns.FileUtils.PERMS_FILE, 0); - schema = jsonReader.decodeFromStream(stream, stream.available()); - } finally { - stream.close(); - } - - // Allow tests to make whatever engines they want, this shouldn't cause - // validation failure. - schema.definitions.engine.properties.name = { type: "string" }; - return schema; -}); - -XPCOMUtils.defineLazyGetter(this, 'SyncPingValidator', function() { - let ns = {}; - Cu.import("resource://testing-common/ajv-4.1.1.js", ns); - let ajv = new ns.Ajv({ async: "co*" }); - return ajv.compile(SyncPingSchema); -}); - -var provider = { +let provider = { getFile: function(prop, persistent) { persistent.value = true; switch (prop) { @@ -51,7 +20,7 @@ var provider = { Services.dirsvc.QueryInterface(Ci.nsIDirectoryService).registerProvider(provider); // This is needed for loadAddonTestFunctions(). -var gGlobalScope = this; +let gGlobalScope = this; function ExtensionsTestPath(path) { if (path[0] != "/") { @@ -76,24 +45,6 @@ function loadAddonTestFunctions() { createAppInfo("xpcshell@tests.mozilla.org", "XPCShell", "1", "1.9.2"); } -function webExtensionsTestPath(path) { - if (path[0] != "/") { - throw Error("Path must begin with '/': " + path); - } - - return "../../../../toolkit/components/extensions/test/xpcshell" + path; -} - -/** - * Loads the WebExtension test functions by importing its test file. - */ -function loadWebExtensionTestFunctions() { - const path = webExtensionsTestPath("/head_sync.js"); - let file = do_get_file(path); - let uri = Services.io.newFileURI(file); - Services.scriptloader.loadSubScript(uri.spec, gGlobalScope); -} - function getAddonInstall(name) { let f = do_get_file(ExtensionsTestPath("/addons/" + name + ".xpi")); let cb = Async.makeSyncCallback(); @@ -255,192 +206,3 @@ function do_check_array_eq(a1, a2) { do_check_eq(a1[i], a2[i]); } } - -// Helper function to get the sync telemetry and add the typically used test -// engine names to its list of allowed engines. -function get_sync_test_telemetry() { - let ns = {}; - Cu.import("resource://services-sync/telemetry.js", ns); - let testEngines = ["rotary", "steam", "sterling", "catapult"]; - for (let engineName of testEngines) { - ns.SyncTelemetry.allowedEngines.add(engineName); - } - ns.SyncTelemetry.submissionInterval = -1; - return ns.SyncTelemetry; -} - -function assert_valid_ping(record) { - // This is called as the test harness tears down due to shutdown. This - // will typically have no recorded syncs, and the validator complains about - // it. So ignore such records (but only ignore when *both* shutdown and - // no Syncs - either of them not being true might be an actual problem) - if (record && (record.why != "shutdown" || record.syncs.length != 0)) { - if (!SyncPingValidator(record)) { - deepEqual([], SyncPingValidator.errors, "Sync telemetry ping validation failed"); - } - equal(record.version, 1); - record.syncs.forEach(p => { - lessOrEqual(p.when, Date.now()); - if (p.devices) { - ok(!p.devices.some(device => device.id == p.deviceID)); - equal(new Set(p.devices.map(device => device.id)).size, - p.devices.length, "Duplicate device ids in ping devices list"); - } - }); - } -} - -// Asserts that `ping` is a ping that doesn't contain any failure information -function assert_success_ping(ping) { - ok(!!ping); - assert_valid_ping(ping); - ping.syncs.forEach(record => { - ok(!record.failureReason); - equal(undefined, record.status); - greater(record.engines.length, 0); - for (let e of record.engines) { - ok(!e.failureReason); - equal(undefined, e.status); - if (e.validation) { - equal(undefined, e.validation.problems); - equal(undefined, e.validation.failureReason); - } - if (e.outgoing) { - for (let o of e.outgoing) { - equal(undefined, o.failed); - notEqual(undefined, o.sent); - } - } - if (e.incoming) { - equal(undefined, e.incoming.failed); - equal(undefined, e.incoming.newFailed); - notEqual(undefined, e.incoming.applied || e.incoming.reconciled); - } - } - }); -} - -// Hooks into telemetry to validate all pings after calling. -function validate_all_future_pings() { - let telem = get_sync_test_telemetry(); - telem.submit = assert_valid_ping; -} - -function wait_for_ping(callback, allowErrorPings, getFullPing = false) { - return new Promise(resolve => { - let telem = get_sync_test_telemetry(); - let oldSubmit = telem.submit; - telem.submit = function(record) { - telem.submit = oldSubmit; - if (allowErrorPings) { - assert_valid_ping(record); - } else { - assert_success_ping(record); - } - if (getFullPing) { - resolve(record); - } else { - equal(record.syncs.length, 1); - resolve(record.syncs[0]); - } - }; - callback(); - }); -} - -// Short helper for wait_for_ping -function sync_and_validate_telem(allowErrorPings, getFullPing = false) { - return wait_for_ping(() => Service.sync(), allowErrorPings, getFullPing); -} - -// Used for the (many) cases where we do a 'partial' sync, where only a single -// engine is actually synced, but we still want to ensure we're generating a -// valid ping. Returns a promise that resolves to the ping, or rejects with the -// thrown error after calling an optional callback. -function sync_engine_and_validate_telem(engine, allowErrorPings, onError) { - return new Promise((resolve, reject) => { - let telem = get_sync_test_telemetry(); - let caughtError = null; - // Clear out status, so failures from previous syncs won't show up in the - // telemetry ping. - let ns = {}; - Cu.import("resource://services-sync/status.js", ns); - ns.Status._engines = {}; - ns.Status.partial = false; - // Ideally we'd clear these out like we do with engines, (probably via - // Status.resetSync()), but this causes *numerous* tests to fail, so we just - // assume that if no failureReason or engine failures are set, and the - // status properties are the same as they were initially, that it's just - // a leftover. - // This is only an issue since we're triggering the sync of just one engine, - // without doing any other parts of the sync. - let initialServiceStatus = ns.Status._service; - let initialSyncStatus = ns.Status._sync; - - let oldSubmit = telem.submit; - telem.submit = function(ping) { - telem.submit = oldSubmit; - ping.syncs.forEach(record => { - if (record && record.status) { - // did we see anything to lead us to believe that something bad actually happened - let realProblem = record.failureReason || record.engines.some(e => { - if (e.failureReason || e.status) { - return true; - } - if (e.outgoing && e.outgoing.some(o => o.failed > 0)) { - return true; - } - return e.incoming && e.incoming.failed; - }); - if (!realProblem) { - // no, so if the status is the same as it was initially, just assume - // that its leftover and that we can ignore it. - if (record.status.sync && record.status.sync == initialSyncStatus) { - delete record.status.sync; - } - if (record.status.service && record.status.service == initialServiceStatus) { - delete record.status.service; - } - if (!record.status.sync && !record.status.service) { - delete record.status; - } - } - } - }); - if (allowErrorPings) { - assert_valid_ping(ping); - } else { - assert_success_ping(ping); - } - equal(ping.syncs.length, 1); - if (caughtError) { - if (onError) { - onError(ping.syncs[0]); - } - reject(caughtError); - } else { - resolve(ping.syncs[0]); - } - } - Svc.Obs.notify("weave:service:sync:start"); - try { - engine.sync(); - } catch (e) { - caughtError = e; - } - if (caughtError) { - Svc.Obs.notify("weave:service:sync:error", caughtError); - } else { - Svc.Obs.notify("weave:service:sync:finish"); - } - }); -} - -// Avoid an issue where `client.name2` containing unicode characters causes -// a number of tests to fail, due to them assuming that we do not need to utf-8 -// encode or decode data sent through the mocked server (see bug 1268912). -Utils.getDefaultDeviceName = function() { - return "Test device name"; -}; - - diff --git a/services/sync/tests/unit/head_http_server.js b/services/sync/tests/unit/head_http_server.js index 26f62310c..c917c4988 100644 --- a/services/sync/tests/unit/head_http_server.js +++ b/services/sync/tests/unit/head_http_server.js @@ -1,4 +1,4 @@ -var Cm = Components.manager; +const Cm = Components.manager; // Shared logging for all HTTP server functions. Cu.import("resource://gre/modules/Log.jsm"); @@ -178,13 +178,9 @@ ServerCollection.prototype = { * @return an array of IDs. */ keys: function keys(filter) { - let ids = []; - for (let [id, wbo] of Object.entries(this._wbos)) { - if (wbo.payload && (!filter || filter(id, wbo))) { - ids.push(id); - } - } - return ids; + return [id for ([id, wbo] in Iterator(this._wbos)) + if (wbo.payload && + (!filter || filter(id, wbo)))]; }, /** @@ -198,13 +194,8 @@ ServerCollection.prototype = { * @return an array of ServerWBOs. */ wbos: function wbos(filter) { - let os = []; - for (let [id, wbo] of Object.entries(this._wbos)) { - if (wbo.payload) { - os.push(wbo); - } - } - + let os = [wbo for ([id, wbo] in Iterator(this._wbos)) + if (wbo.payload)]; if (filter) { return os.filter(filter); } @@ -276,7 +267,7 @@ ServerCollection.prototype = { count: function(options) { options = options || {}; let c = 0; - for (let [id, wbo] of Object.entries(this._wbos)) { + for (let [id, wbo] in Iterator(this._wbos)) { if (wbo.modified && this._inResultSet(wbo, options)) { c++; } @@ -287,23 +278,12 @@ ServerCollection.prototype = { get: function(options) { let result; if (options.full) { - let data = []; - for (let [id, wbo] of Object.entries(this._wbos)) { - // Drop deleted. - if (wbo.modified && this._inResultSet(wbo, options)) { - data.push(wbo.get()); - } - } - let start = options.offset || 0; + let data = [wbo.get() for ([id, wbo] in Iterator(this._wbos)) + // Drop deleted. + if (wbo.modified && + this._inResultSet(wbo, options))]; if (options.limit) { - let numItemsPastOffset = data.length - start; - data = data.slice(start, start + options.limit); - // use options as a backchannel to set x-weave-next-offset - if (numItemsPastOffset > options.limit) { - options.nextOffset = start + options.limit; - } - } else if (start) { - data = data.slice(start); + data = data.slice(0, options.limit); } // Our implementation of application/newlines. result = data.join("\n") + "\n"; @@ -311,18 +291,10 @@ ServerCollection.prototype = { // Use options as a backchannel to report count. options.recordCount = data.length; } else { - let data = []; - for (let [id, wbo] of Object.entries(this._wbos)) { - if (this._inResultSet(wbo, options)) { - data.push(id); - } - } - let start = options.offset || 0; + let data = [id for ([id, wbo] in Iterator(this._wbos)) + if (this._inResultSet(wbo, options))]; if (options.limit) { - data = data.slice(start, start + options.limit); - options.nextOffset = start + options.limit; - } else if (start) { - data = data.slice(start); + data = data.slice(0, options.limit); } result = JSON.stringify(data); options.recordCount = data.length; @@ -337,8 +309,7 @@ ServerCollection.prototype = { // This will count records where we have an existing ServerWBO // registered with us as successful and all other records as failed. - for (let key in input) { - let record = input[key]; + for each (let record in input) { let wbo = this.wbo(record.id); if (!wbo && this.acceptNew) { this._log.debug("Creating WBO " + JSON.stringify(record.id) + @@ -361,7 +332,7 @@ ServerCollection.prototype = { delete: function(options) { let deleted = []; - for (let [id, wbo] of Object.entries(this._wbos)) { + for (let [id, wbo] in Iterator(this._wbos)) { if (this._inResultSet(wbo, options)) { this._log.debug("Deleting " + JSON.stringify(wbo)); deleted.push(wbo.id); @@ -383,7 +354,7 @@ ServerCollection.prototype = { // Parse queryString let options = {}; - for (let chunk of request.queryString.split("&")) { + for each (let chunk in request.queryString.split("&")) { if (!chunk) { continue; } @@ -403,36 +374,29 @@ ServerCollection.prototype = { if (options.limit) { options.limit = parseInt(options.limit, 10); } - if (options.offset) { - options.offset = parseInt(options.offset, 10); - } switch(request.method) { case "GET": - body = self.get(options, request); - // see http://moz-services-docs.readthedocs.io/en/latest/storage/apis-1.5.html - // for description of these headers. - let { recordCount: records, nextOffset } = options; - - self._log.info("Records: " + records + ", nextOffset: " + nextOffset); + body = self.get(options); + // "If supported by the db, this header will return the number of + // records total in the request body of any multiple-record GET + // request." + let records = options.recordCount; + self._log.info("Records: " + records); if (records != null) { response.setHeader("X-Weave-Records", "" + records); } - if (nextOffset) { - response.setHeader("X-Weave-Next-Offset", "" + nextOffset); - } - response.setHeader("X-Last-Modified", "" + this.timestamp); break; case "POST": - let res = self.post(readBytesFromInputStream(request.bodyInputStream), request); + let res = self.post(readBytesFromInputStream(request.bodyInputStream)); body = JSON.stringify(res); response.newModified = res.modified; break; case "DELETE": self._log.debug("Invoking ServerCollection.DELETE."); - let deleted = self.delete(options, request); + let deleted = self.delete(options); let ts = new_timestamp(); body = JSON.stringify(ts); response.newModified = ts; @@ -541,7 +505,7 @@ function track_collections_helper() { * find out what it needs without monkeypatching. Use this object as your * prototype, and override as appropriate. */ -var SyncServerCallback = { +let SyncServerCallback = { onCollectionDeleted: function onCollectionDeleted(user, collection) {}, onItemDeleted: function onItemDeleted(user, collection, wboID) {}, @@ -581,13 +545,13 @@ SyncServer.prototype = { * Start the SyncServer's underlying HTTP server. * * @param port - * The numeric port on which to start. -1 implies the default, a - * randomly chosen port. + * The numeric port on which to start. A falsy value implies the + * default, a randomly chosen port. * @param cb * A callback function (of no arguments) which is invoked after * startup. */ - start: function start(port = -1, cb) { + start: function start(port, cb) { if (this.started) { this._log.warn("Warning: server already started on " + this.port); return; @@ -605,7 +569,7 @@ SyncServer.prototype = { } catch (ex) { _("=========================================="); _("Got exception starting Sync HTTP server."); - _("Error: " + Log.exceptionStr(ex)); + _("Error: " + Utils.exceptionStr(ex)); _("Is there a process already listening on port " + port + "?"); _("=========================================="); do_throw(ex); @@ -703,10 +667,10 @@ SyncServer.prototype = { throw new Error("Unknown user."); } let userCollections = this.users[username].collections; - for (let [id, contents] of Object.entries(collections)) { + for (let [id, contents] in Iterator(collections)) { let coll = userCollections[id] || this._insertCollection(userCollections, id); - for (let [wboID, payload] of Object.entries(contents)) { + for (let [wboID, payload] in Iterator(contents)) { coll.insert(wboID, payload); } } @@ -740,8 +704,7 @@ SyncServer.prototype = { throw new Error("Unknown user."); } let userCollections = this.users[username].collections; - for (let name in userCollections) { - let coll = userCollections[name]; + for each (let [name, coll] in Iterator(userCollections)) { this._log.trace("Bulk deleting " + name + " for " + username + "..."); coll.delete({}); } @@ -805,10 +768,7 @@ SyncServer.prototype = { */ respond: function respond(req, resp, code, status, body, headers) { resp.setStatusLine(req.httpVersion, code, status); - if (!headers) - headers = this.defaultHeaders; - for (let header in headers) { - let value = headers[header]; + for each (let [header, value] in Iterator(headers || this.defaultHeaders)) { resp.setHeader(header, value); } resp.setHeader("X-Weave-Timestamp", "" + this.timestamp(), false); @@ -1035,7 +995,7 @@ SyncServer.prototype = { */ function serverForUsers(users, contents, callback) { let server = new SyncServer(callback); - for (let [user, pass] of Object.entries(users)) { + for (let [user, pass] in Iterator(users)) { server.registerUser(user, pass); server.createContents(user, contents); } diff --git a/services/sync/tests/unit/prefs_test_prefs_store.js b/services/sync/tests/unit/prefs_test_prefs_store.js deleted file mode 100644 index 109757a35..000000000 --- a/services/sync/tests/unit/prefs_test_prefs_store.js +++ /dev/null @@ -1,25 +0,0 @@ -// This is a "preferences" file used by test_prefs_store.js - -// The prefs that control what should be synced. -// Most of these are "default" prefs, so the value itself will not sync. -pref("services.sync.prefs.sync.testing.int", true); -pref("services.sync.prefs.sync.testing.string", true); -pref("services.sync.prefs.sync.testing.bool", true); -pref("services.sync.prefs.sync.testing.dont.change", true); -// this one is a user pref, so it *will* sync. -user_pref("services.sync.prefs.sync.testing.turned.off", false); -pref("services.sync.prefs.sync.testing.nonexistent", true); -pref("services.sync.prefs.sync.testing.default", true); - -// The preference values - these are all user_prefs, otherwise their value -// will not be synced. -user_pref("testing.int", 123); -user_pref("testing.string", "ohai"); -user_pref("testing.bool", true); -user_pref("testing.dont.change", "Please don't change me."); -user_pref("testing.turned.off", "I won't get synced."); -user_pref("testing.not.turned.on", "I won't get synced either!"); - -// A pref that exists but still has the default value - will be synced with -// null as the value. -pref("testing.default", "I'm the default value"); diff --git a/services/sync/tests/unit/sync_ping_schema.json b/services/sync/tests/unit/sync_ping_schema.json deleted file mode 100644 index 56114fb93..000000000 --- a/services/sync/tests/unit/sync_ping_schema.json +++ /dev/null @@ -1,198 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - "description": "schema for Sync pings, documentation avaliable in toolkit/components/telemetry/docs/sync-ping.rst", - "type": "object", - "additionalProperties": false, - "required": ["version", "syncs", "why"], - "properties": { - "version": { "type": "integer", "minimum": 0 }, - "discarded": { "type": "integer", "minimum": 1 }, - "why": { "enum": ["shutdown", "schedule"] }, - "syncs": { - "type": "array", - "minItems": 1, - "items": { "$ref": "#/definitions/payload" } - } - }, - "definitions": { - "payload": { - "type": "object", - "additionalProperties": false, - "required": ["when", "uid", "took"], - "properties": { - "didLogin": { "type": "boolean" }, - "when": { "type": "integer" }, - "uid": { - "type": "string", - "pattern": "^[0-9a-f]{32}$" - }, - "devices": { - "type": "array", - "items": { "$ref": "#/definitions/device" } - }, - "deviceID": { - "type": "string", - "pattern": "^[0-9a-f]{64}$" - }, - "status": { - "type": "object", - "anyOf": [ - { "required": ["sync"] }, - { "required": ["service"] } - ], - "additionalProperties": false, - "properties": { - "sync": { "type": "string" }, - "service": { "type": "string" } - } - }, - "why": { "enum": ["startup", "schedule", "score", "user", "tabs"] }, - "took": { "type": "integer", "minimum": -1 }, - "failureReason": { "$ref": "#/definitions/error" }, - "engines": { - "type": "array", - "minItems": 1, - "items": { "$ref": "#/definitions/engine" } - } - } - }, - "device": { - "required": ["os", "id", "version"], - "additionalProperties": false, - "type": "object", - "properties": { - "id": { "type": "string", "pattern": "^[0-9a-f]{64}$" }, - "os": { "type": "string" }, - "version": { "type": "string" } - } - }, - "engine": { - "required": ["name"], - "additionalProperties": false, - "properties": { - "failureReason": { "$ref": "#/definitions/error" }, - "name": { "enum": ["addons", "bookmarks", "clients", "forms", "history", "passwords", "prefs", "tabs"] }, - "took": { "type": "integer", "minimum": 1 }, - "status": { "type": "string" }, - "incoming": { - "type": "object", - "additionalProperties": false, - "anyOf": [ - {"required": ["applied"]}, - {"required": ["failed"]}, - {"required": ["newFailed"]}, - {"required": ["reconciled"]} - ], - "properties": { - "applied": { "type": "integer", "minimum": 1 }, - "failed": { "type": "integer", "minimum": 1 }, - "newFailed": { "type": "integer", "minimum": 1 }, - "reconciled": { "type": "integer", "minimum": 1 } - } - }, - "outgoing": { - "type": "array", - "minItems": 1, - "items": { "$ref": "#/definitions/outgoingBatch" } - }, - "validation": { - "type": "object", - "additionalProperties": false, - "anyOf": [ - { "required": ["checked"] }, - { "required": ["failureReason"] } - ], - "properties": { - "checked": { "type": "integer", "minimum": 0 }, - "failureReason": { "$ref": "#/definitions/error" }, - "took": { "type": "integer" }, - "version": { "type": "integer" }, - "problems": { - "type": "array", - "minItems": 1, - "$ref": "#/definitions/validationProblem" - } - } - } - } - }, - "outgoingBatch": { - "type": "object", - "additionalProperties": false, - "anyOf": [ - {"required": ["sent"]}, - {"required": ["failed"]} - ], - "properties": { - "sent": { "type": "integer", "minimum": 1 }, - "failed": { "type": "integer", "minimum": 1 } - } - }, - "error": { - "oneOf": [ - { "$ref": "#/definitions/httpError" }, - { "$ref": "#/definitions/nsError" }, - { "$ref": "#/definitions/shutdownError" }, - { "$ref": "#/definitions/authError" }, - { "$ref": "#/definitions/otherError" }, - { "$ref": "#/definitions/unexpectedError" }, - { "$ref": "#/definitions/sqlError" } - ] - }, - "httpError": { - "required": ["name", "code"], - "properties": { - "name": { "enum": ["httperror"] }, - "code": { "type": "integer" } - } - }, - "nsError": { - "required": ["name", "code"], - "properties": { - "name": { "enum": ["nserror"] }, - "code": { "type": "integer" } - } - }, - "shutdownError": { - "required": ["name"], - "properties": { - "name": { "enum": ["shutdownerror"] } - } - }, - "authError": { - "required": ["name"], - "properties": { - "name": { "enum": ["autherror"] }, - "from": { "enum": ["tokenserver", "fxaccounts", "hawkclient"] } - } - }, - "otherError": { - "required": ["name"], - "properties": { - "name": { "enum": ["othererror"] }, - "error": { "type": "string" } - } - }, - "unexpectedError": { - "required": ["name"], - "properties": { - "name": { "enum": ["unexpectederror"] }, - "error": { "type": "string" } - } - }, - "sqlError": { - "required": ["name"], - "properties": { - "name": { "enum": ["sqlerror"] }, - "code": { "type": "integer" } - } - }, - "validationProblem": { - "required": ["name", "count"], - "properties": { - "name": { "type": "string" }, - "count": { "type": "integer" } - } - } - } -}
\ No newline at end of file diff --git a/services/sync/tests/unit/systemaddon-search.xml b/services/sync/tests/unit/systemaddon-search.xml deleted file mode 100644 index d34e3937c..000000000 --- a/services/sync/tests/unit/systemaddon-search.xml +++ /dev/null @@ -1,27 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<searchresults total_results="1"> - <addon id="5618"> - <name>System Add-on Test</name> - <type id="1">Extension</type> - <guid>system1@tests.mozilla.org</guid> - <slug>addon11</slug> - <version>1.0</version> - - <compatible_applications><application> - <name>Firefox</name> - <application_id>1</application_id> - <min_version>3.6</min_version> - <max_version>*</max_version> - <appID>xpcshell@tests.mozilla.org</appID> - </application></compatible_applications> - <all_compatible_os><os>ALL</os></all_compatible_os> - - <install os="ALL" size="999">http://127.0.0.1:8888/system.xpi</install> - <created epoch="1252903662"> - 2009-09-14T04:47:42Z - </created> - <last_updated epoch="1315255329"> - 2011-09-05T20:42:09Z - </last_updated> - </addon> -</searchresults> diff --git a/services/sync/tests/unit/test_addon_utils.js b/services/sync/tests/unit/test_addon_utils.js index bbbd81d0d..49824cd4c 100644 --- a/services/sync/tests/unit/test_addon_utils.js +++ b/services/sync/tests/unit/test_addon_utils.js @@ -3,7 +3,6 @@ "use strict"; -Cu.import("resource://gre/modules/Log.jsm"); Cu.import("resource://gre/modules/Preferences.jsm"); Cu.import("resource://services-sync/addonutils.js"); Cu.import("resource://services-sync/util.js"); @@ -11,7 +10,7 @@ Cu.import("resource://services-sync/util.js"); const HTTP_PORT = 8888; const SERVER_ADDRESS = "http://127.0.0.1:8888"; -var prefs = new Preferences(); +let prefs = new Preferences(); prefs.set("extensions.getAddons.get.url", SERVER_ADDRESS + "/search/guid:%IDS%"); @@ -36,7 +35,7 @@ function createAndStartHTTPServer(port=HTTP_PORT) { return server; } catch (ex) { _("Got exception starting HTTP server on port " + port); - _("Error: " + Log.exceptionStr(ex)); + _("Error: " + Utils.exceptionStr(ex)); do_throw(ex); } } @@ -61,9 +60,6 @@ add_test(function test_handle_empty_source_uri() { do_check_true("installedIDs" in result); do_check_eq(0, result.installedIDs.length); - do_check_true("skipped" in result); - do_check_true(result.skipped.includes(ID)); - server.stop(run_next_test); }); @@ -83,18 +79,44 @@ add_test(function test_ignore_untrusted_source_uris() { let sourceURI = ioService.newURI(s, null, null); let addon = {sourceURI: sourceURI, name: "bad", id: "bad"}; - let canInstall = AddonUtils.canInstallAddon(addon); - do_check_false(canInstall, "Correctly rejected a bad URL"); + try { + let cb = Async.makeSpinningCallback(); + AddonUtils.getInstallFromSearchResult(addon, cb, true); + cb.wait(); + } catch (ex) { + do_check_neq(null, ex); + do_check_eq(0, ex.message.indexOf("Insecure source URI")); + continue; + } + + // We should never get here if an exception is thrown. + do_check_true(false); } + let count = 0; for (let s of good) { let sourceURI = ioService.newURI(s, null, null); let addon = {sourceURI: sourceURI, name: "good", id: "good"}; - let canInstall = AddonUtils.canInstallAddon(addon); - do_check_true(canInstall, "Correctly accepted a good URL"); + // Despite what you might think, we don't get an error in the callback. + // The install won't work because the underlying Addon instance wasn't + // proper. But, that just results in an AddonInstall that is missing + // certain values. We really just care that the callback is being invoked + // anyway. + let callback = function onInstall(error, install) { + do_check_null(error); + do_check_neq(null, install); + do_check_eq(sourceURI.spec, install.sourceURI.spec); + + count += 1; + + if (count >= good.length) { + run_next_test(); + } + }; + + AddonUtils.getInstallFromSearchResult(addon, callback, true); } - run_next_test(); }); add_test(function test_source_uri_rewrite() { @@ -103,6 +125,8 @@ add_test(function test_source_uri_rewrite() { // This tests for conformance with bug 708134 so server-side metrics aren't // skewed. + Svc.Prefs.set("addons.ignoreRepositoryChecking", true); + // We resort to monkeypatching because of the API design. let oldFunction = AddonUtils.__proto__.installAddonFromSearchResult; @@ -127,15 +151,12 @@ add_test(function test_source_uri_rewrite() { let server = createAndStartHTTPServer(); let installCallback = Async.makeSpinningCallback(); - let installOptions = { - id: "rewrite@tests.mozilla.org", - requireSecureURI: false, - } - AddonUtils.installAddons([installOptions], installCallback); + AddonUtils.installAddons([{id: "rewrite@tests.mozilla.org"}], installCallback); installCallback.wait(); do_check_true(installCalled); AddonUtils.__proto__.installAddonFromSearchResult = oldFunction; + Svc.Prefs.reset("addons.ignoreRepositoryChecking"); server.stop(run_next_test); }); diff --git a/services/sync/tests/unit/test_addons_engine.js b/services/sync/tests/unit/test_addons_engine.js index 64e4e32e8..ca2e4bd96 100644 --- a/services/sync/tests/unit/test_addons_engine.js +++ b/services/sync/tests/unit/test_addons_engine.js @@ -13,20 +13,19 @@ Cu.import("resource://services-sync/service.js"); Cu.import("resource://services-sync/util.js"); Cu.import("resource://testing-common/services/sync/utils.js"); -var prefs = new Preferences(); +let prefs = new Preferences(); prefs.set("extensions.getAddons.get.url", "http://localhost:8888/search/guid:%IDS%"); -prefs.set("extensions.install.requireSecureOrigin", false); loadAddonTestFunctions(); startupManager(); -var engineManager = Service.engineManager; +let engineManager = Service.engineManager; engineManager.register(AddonsEngine); -var engine = engineManager.get("addons"); -var reconciler = engine._reconciler; -var tracker = engine._tracker; +let engine = engineManager.get("addons"); +let reconciler = engine._reconciler; +let tracker = engine._tracker; function advance_test() { reconciler._addons = {}; @@ -36,6 +35,8 @@ function advance_test() { reconciler.saveState(null, cb); cb.wait(); + Svc.Prefs.reset("addons.ignoreRepositoryChecking"); + run_next_test(); } @@ -103,6 +104,7 @@ add_test(function test_get_changed_ids() { tracker.clearChangedIDs(); _("Ensure reconciler changes are populated."); + Svc.Prefs.set("addons.ignoreRepositoryChecking", true); let addon = installAddon("test_bootstrap1_1"); tracker.clearChangedIDs(); // Just in case. changes = engine.getChangedIDs(); @@ -149,6 +151,9 @@ add_test(function test_disabled_install_semantics() { // This is essentially a test for bug 712542, which snuck into the original // add-on sync drop. It ensures that when an add-on is installed that the // disabled state and incoming syncGUID is preserved, even on the next sync. + + Svc.Prefs.set("addons.ignoreRepositoryChecking", true); + const USER = "foo"; const PASSWORD = "password"; const PASSPHRASE = "abcdeabcdeabcdeabcdeabcdea"; diff --git a/services/sync/tests/unit/test_addons_reconciler.js b/services/sync/tests/unit/test_addons_reconciler.js index d93bdfc03..8cfa37d78 100644 --- a/services/sync/tests/unit/test_addons_reconciler.js +++ b/services/sync/tests/unit/test_addons_reconciler.js @@ -71,7 +71,7 @@ add_test(function test_install_detection() { const KEYS = ["id", "guid", "enabled", "installed", "modified", "type", "scope", "foreignInstall"]; - for (let key of KEYS) { + for each (let key in KEYS) { do_check_true(key in record); do_check_neq(null, record[key]); } diff --git a/services/sync/tests/unit/test_addons_store.js b/services/sync/tests/unit/test_addons_store.js index b52cfab31..b21f6afe1 100644 --- a/services/sync/tests/unit/test_addons_store.js +++ b/services/sync/tests/unit/test_addons_store.js @@ -3,47 +3,25 @@ "use strict"; -Cu.import("resource://gre/modules/Log.jsm"); Cu.import("resource://gre/modules/Preferences.jsm"); Cu.import("resource://services-sync/addonutils.js"); Cu.import("resource://services-sync/engines/addons.js"); Cu.import("resource://services-sync/service.js"); Cu.import("resource://services-sync/util.js"); -Cu.import("resource://testing-common/services/sync/utils.js"); -Cu.import("resource://gre/modules/FileUtils.jsm"); const HTTP_PORT = 8888; -var prefs = new Preferences(); +let prefs = new Preferences(); prefs.set("extensions.getAddons.get.url", "http://localhost:8888/search/guid:%IDS%"); -prefs.set("extensions.install.requireSecureOrigin", false); - -const SYSTEM_ADDON_ID = "system1@tests.mozilla.org"; -let systemAddonFile; - -// The system add-on must be installed before AddonManager is started. -function loadSystemAddon() { - let addonFilename = SYSTEM_ADDON_ID + ".xpi"; - const distroDir = FileUtils.getDir("ProfD", ["sysfeatures", "app0"], true); - do_get_file(ExtensionsTestPath("/data/system_addons/system1_1.xpi")).copyTo(distroDir, addonFilename); - systemAddonFile = FileUtils.File(distroDir.path); - systemAddonFile.append(addonFilename); - systemAddonFile.lastModifiedTime = Date.now(); - // As we're not running in application, we need to setup the features directory - // used by system add-ons. - registerDirectory("XREAppFeat", distroDir); -} - loadAddonTestFunctions(); -loadSystemAddon(); startupManager(); Service.engineManager.register(AddonsEngine); -var engine = Service.engineManager.get("addons"); -var tracker = engine._tracker; -var store = engine._store; -var reconciler = engine._reconciler; +let engine = Service.engineManager.get("addons"); +let tracker = engine._tracker; +let store = engine._store; +let reconciler = engine._reconciler; /** * Create a AddonsRec for this application with the fields specified. @@ -77,16 +55,12 @@ function createAndStartHTTPServer(port) { server.registerFile("/search/guid:missing-xpi%40tests.mozilla.org", do_get_file("missing-xpi-search.xml")); - server.registerFile("/search/guid:system1%40tests.mozilla.org", - do_get_file("systemaddon-search.xml")); - server.registerFile("/system.xpi", systemAddonFile); - server.start(port); return server; } catch (ex) { _("Got exception starting HTTP server on port " + port); - _("Error: " + Log.exceptionStr(ex)); + _("Error: " + Utils.exceptionStr(ex)); do_throw(ex); } } @@ -94,7 +68,6 @@ function createAndStartHTTPServer(port) { function run_test() { initTestLogging("Trace"); Log.repository.getLogger("Sync.Engine.Addons").level = Log.Level.Trace; - Log.repository.getLogger("Sync.Tracker.Addons").level = Log.Level.Trace; Log.repository.getLogger("Sync.AddonsRepository").level = Log.Level.Trace; @@ -219,6 +192,7 @@ add_test(function test_apply_uninstall() { add_test(function test_addon_syncability() { _("Ensure isAddonSyncable functions properly."); + Svc.Prefs.set("addons.ignoreRepositoryChecking", true); Svc.Prefs.set("addons.trustedSourceHostnames", "addons.mozilla.org,other.example.com"); @@ -228,8 +202,8 @@ add_test(function test_addon_syncability() { do_check_true(store.isAddonSyncable(addon)); let dummy = {}; - const KEYS = ["id", "syncGUID", "type", "scope", "foreignInstall", "isSyncable"]; - for (let k of KEYS) { + const KEYS = ["id", "syncGUID", "type", "scope", "foreignInstall"]; + for each (let k in KEYS) { dummy[k] = addon[k]; } @@ -243,10 +217,6 @@ add_test(function test_addon_syncability() { do_check_false(store.isAddonSyncable(dummy)); dummy.scope = addon.scope; - dummy.isSyncable = false; - do_check_false(store.isAddonSyncable(dummy)); - dummy.isSyncable = addon.isSyncable; - dummy.foreignInstall = true; do_check_false(store.isAddonSyncable(dummy)); dummy.foreignInstall = false; @@ -272,16 +242,16 @@ add_test(function test_addon_syncability() { "https://untrusted.example.com/foo", // non-trusted hostname` ]; - for (let uri of trusted) { + for each (let uri in trusted) { do_check_true(store.isSourceURITrusted(createURI(uri))); } - for (let uri of untrusted) { + for each (let uri in untrusted) { do_check_false(store.isSourceURITrusted(createURI(uri))); } Svc.Prefs.set("addons.trustedSourceHostnames", ""); - for (let uri of trusted) { + for each (let uri in trusted) { do_check_false(store.isSourceURITrusted(createURI(uri))); } @@ -296,6 +266,8 @@ add_test(function test_addon_syncability() { add_test(function test_ignore_hotfixes() { _("Ensure that hotfix extensions are ignored."); + Svc.Prefs.set("addons.ignoreRepositoryChecking", true); + // A hotfix extension is one that has the id the same as the // extensions.hotfix.id pref. let prefs = new Preferences("extensions."); @@ -304,8 +276,8 @@ add_test(function test_ignore_hotfixes() { do_check_true(store.isAddonSyncable(addon)); let dummy = {}; - const KEYS = ["id", "syncGUID", "type", "scope", "foreignInstall", "isSyncable"]; - for (let k of KEYS) { + const KEYS = ["id", "syncGUID", "type", "scope", "foreignInstall"]; + for each (let k in KEYS) { dummy[k] = addon[k]; } @@ -327,6 +299,7 @@ add_test(function test_ignore_hotfixes() { uninstallAddon(addon); + Svc.Prefs.reset("addons.ignoreRepositoryChecking"); prefs.reset("hotfix.id"); run_next_test(); @@ -336,6 +309,8 @@ add_test(function test_ignore_hotfixes() { add_test(function test_get_all_ids() { _("Ensures that getAllIDs() returns an appropriate set."); + Svc.Prefs.set("addons.ignoreRepositoryChecking", true); + _("Installing two addons."); let addon1 = installAddon("test_install1"); let addon2 = installAddon("test_bootstrap1_1"); @@ -354,6 +329,7 @@ add_test(function test_get_all_ids() { addon1.install.cancel(); uninstallAddon(addon2); + Svc.Prefs.reset("addons.ignoreRepositoryChecking"); run_next_test(); }); @@ -379,6 +355,9 @@ add_test(function test_change_item_id() { add_test(function test_create() { _("Ensure creating/installing an add-on from a record works."); + // Set this so that getInstallFromSearchResult doesn't end up + // failing the install due to an insecure source URI scheme. + Svc.Prefs.set("addons.ignoreRepositoryChecking", true); let server = createAndStartHTTPServer(HTTP_PORT); let addon = installAddon("test_bootstrap1_1"); @@ -398,6 +377,7 @@ add_test(function test_create() { uninstallAddon(newAddon); + Svc.Prefs.reset("addons.ignoreRepositoryChecking"); server.stop(run_next_test); }); @@ -432,18 +412,8 @@ add_test(function test_create_bad_install() { let record = createRecordForThisApp(guid, id, true, false); let failed = store.applyIncomingBatch([record]); - // This addon had no source URI so was skipped - but it's not treated as - // failure. - // XXX - this test isn't testing what we thought it was. Previously the addon - // was not being installed due to requireSecureURL checking *before* we'd - // attempted to get the XPI. - // With requireSecureURL disabled we do see a download failure, but the addon - // *does* get added to |failed|. - // FTR: onDownloadFailed() is called with ERROR_NETWORK_FAILURE, so it's going - // to be tricky to distinguish a 404 from other transient network errors - // where we do want the addon to end up in |failed|. - // This is being tracked in bug 1284778. - //do_check_eq(0, failed.length); + do_check_eq(1, failed.length); + do_check_eq(guid, failed[0]); let addon = getAddonFromAddonManagerByID(id); do_check_eq(null, addon); @@ -451,56 +421,19 @@ add_test(function test_create_bad_install() { server.stop(run_next_test); }); -add_test(function test_ignore_system() { - _("Ensure we ignore system addons"); - // Our system addon should not appear in getAllIDs - engine._refreshReconcilerState(); - let num = 0; - for (let guid in store.getAllIDs()) { - num += 1; - let addon = reconciler.getAddonStateFromSyncGUID(guid); - do_check_neq(addon.id, SYSTEM_ADDON_ID); - } - do_check_true(num > 1, "should have seen at least one.") - run_next_test(); -}); - -add_test(function test_incoming_system() { - _("Ensure we handle incoming records that refer to a system addon"); - // eg, loop initially had a normal addon but it was then "promoted" to be a - // system addon but wanted to keep the same ID. The server record exists due - // to this. - - // before we start, ensure the system addon isn't disabled. - do_check_false(getAddonFromAddonManagerByID(SYSTEM_ADDON_ID).userDisabled); - - // Now simulate an incoming record with the same ID as the system addon, - // but flagged as disabled - it should not be applied. - let server = createAndStartHTTPServer(HTTP_PORT); - // We make the incoming record flag the system addon as disabled - it should - // be ignored. - let guid = Utils.makeGUID(); - let record = createRecordForThisApp(guid, SYSTEM_ADDON_ID, false, false); - - let failed = store.applyIncomingBatch([record]); - do_check_eq(0, failed.length); - - // The system addon should still not be userDisabled. - do_check_false(getAddonFromAddonManagerByID(SYSTEM_ADDON_ID).userDisabled); - - server.stop(run_next_test); -}); - add_test(function test_wipe() { _("Ensures that wiping causes add-ons to be uninstalled."); let addon1 = installAddon("test_bootstrap1_1"); + Svc.Prefs.set("addons.ignoreRepositoryChecking", true); store.wipe(); let addon = getAddonFromAddonManagerByID(addon1.id); do_check_eq(null, addon); + Svc.Prefs.reset("addons.ignoreRepositoryChecking"); + run_next_test(); }); @@ -515,6 +448,7 @@ add_test(function test_wipe_and_install() { let record = createRecordForThisApp(installed.syncGUID, installed.id, true, false); + Svc.Prefs.set("addons.ignoreRepositoryChecking", true); store.wipe(); let deleted = getAddonFromAddonManagerByID(installed.id); @@ -528,6 +462,7 @@ add_test(function test_wipe_and_install() { let fetched = getAddonFromAddonManagerByID(record.addonID); do_check_true(!!fetched); + Svc.Prefs.reset("addons.ignoreRepositoryChecking"); server.stop(run_next_test); }); diff --git a/services/sync/tests/unit/test_addons_tracker.js b/services/sync/tests/unit/test_addons_tracker.js index 01bf37ab9..690a57d03 100644 --- a/services/sync/tests/unit/test_addons_tracker.js +++ b/services/sync/tests/unit/test_addons_tracker.js @@ -11,13 +11,14 @@ Cu.import("resource://services-sync/util.js"); loadAddonTestFunctions(); startupManager(); +Svc.Prefs.set("addons.ignoreRepositoryChecking", true); Svc.Prefs.set("engine.addons", true); Service.engineManager.register(AddonsEngine); -var engine = Service.engineManager.get("addons"); -var reconciler = engine._reconciler; -var store = engine._store; -var tracker = engine._tracker; +let engine = Service.engineManager.get("addons"); +let reconciler = engine._reconciler; +let store = engine._store; +let tracker = engine._tracker; // Don't write out by default. tracker.persistChangedIDs = false; diff --git a/services/sync/tests/unit/test_block_sync.js b/services/sync/tests/unit/test_block_sync.js new file mode 100644 index 000000000..f83b7b740 --- /dev/null +++ b/services/sync/tests/unit/test_block_sync.js @@ -0,0 +1,37 @@ +/* Any copyright is dedicated to the Public Domain. + http://creativecommons.org/publicdomain/zero/1.0/ */ +Cu.import("resource://services-sync/main.js"); +Cu.import("resource://services-sync/util.js"); + +// Simple test for block/unblock. +add_task(function *() { + Assert.ok(!Weave.Service.scheduler.isBlocked, "sync is not blocked.") + Assert.ok(!Svc.Prefs.has("scheduler.blocked-until"), "have no blocked pref"); + Weave.Service.scheduler.blockSync(); + + Assert.ok(Weave.Service.scheduler.isBlocked, "sync is blocked.") + Assert.ok(Svc.Prefs.has("scheduler.blocked-until"), "have the blocked pref"); + + Weave.Service.scheduler.unblockSync(); + Assert.ok(!Weave.Service.scheduler.isBlocked, "sync is not blocked.") + Assert.ok(!Svc.Prefs.has("scheduler.blocked-until"), "have no blocked pref"); + + // now check the "until" functionality. + let until = Date.now() + 1000; + Weave.Service.scheduler.blockSync(until); + Assert.ok(Weave.Service.scheduler.isBlocked, "sync is blocked.") + Assert.ok(Svc.Prefs.has("scheduler.blocked-until"), "have the blocked pref"); + + // wait for 'until' to pass. + yield new Promise((resolve, reject) => { + CommonUtils.namedTimer(resolve, 1000, {}, "timer"); + }); + + // should have automagically unblocked and removed the pref. + Assert.ok(!Weave.Service.scheduler.isBlocked, "sync is not blocked.") + Assert.ok(!Svc.Prefs.has("scheduler.blocked-until"), "have no blocked pref"); +}); + +function run_test() { + run_next_test(); +} diff --git a/services/sync/tests/unit/test_bookmark_duping.js b/services/sync/tests/unit/test_bookmark_duping.js deleted file mode 100644 index 1e6c6ed2e..000000000 --- a/services/sync/tests/unit/test_bookmark_duping.js +++ /dev/null @@ -1,644 +0,0 @@ -/* Any copyright is dedicated to the Public Domain. - http://creativecommons.org/publicdomain/zero/1.0/ */ - -Cu.import("resource://gre/modules/PlacesUtils.jsm"); -Cu.import("resource://services-common/async.js"); -Cu.import("resource://gre/modules/Log.jsm"); -Cu.import("resource://services-sync/engines.js"); -Cu.import("resource://services-sync/engines/bookmarks.js"); -Cu.import("resource://services-sync/service.js"); -Cu.import("resource://services-sync/util.js"); -Cu.import("resource://testing-common/services/sync/utils.js"); -Cu.import("resource://services-sync/bookmark_validator.js"); - - -initTestLogging("Trace"); - -const bms = PlacesUtils.bookmarks; - -Service.engineManager.register(BookmarksEngine); - -const engine = new BookmarksEngine(Service); -const store = engine._store; -store._log.level = Log.Level.Trace; -engine._log.level = Log.Level.Trace; - -function promiseOneObserver(topic) { - return new Promise((resolve, reject) => { - let observer = function(subject, topic, data) { - Services.obs.removeObserver(observer, topic); - resolve({ subject: subject, data: data }); - } - Services.obs.addObserver(observer, topic, false); - }); -} - -function setup() { - let server = serverForUsers({"foo": "password"}, { - meta: {global: {engines: {bookmarks: {version: engine.version, - syncID: engine.syncID}}}}, - bookmarks: {}, - }); - - generateNewKeys(Service.collectionKeys); - - new SyncTestingInfrastructure(server.server); - - let collection = server.user("foo").collection("bookmarks"); - - Svc.Obs.notify("weave:engine:start-tracking"); // We skip usual startup... - - return { server, collection }; -} - -function* cleanup(server) { - Svc.Obs.notify("weave:engine:stop-tracking"); - Services.prefs.setBoolPref("services.sync-testing.startOverKeepIdentity", true); - let promiseStartOver = promiseOneObserver("weave:service:start-over:finish"); - Service.startOver(); - yield promiseStartOver; - yield new Promise(resolve => server.stop(resolve)); - yield bms.eraseEverything(); -} - -function getFolderChildrenIDs(folderId) { - let index = 0; - let result = []; - while (true) { - let childId = bms.getIdForItemAt(folderId, index); - if (childId == -1) { - break; - } - result.push(childId); - index++; - } - return result; -} - -function createFolder(parentId, title) { - let id = bms.createFolder(parentId, title, 0); - let guid = store.GUIDForId(id); - return { id, guid }; -} - -function createBookmark(parentId, url, title, index = bms.DEFAULT_INDEX) { - let uri = Utils.makeURI(url); - let id = bms.insertBookmark(parentId, uri, index, title) - let guid = store.GUIDForId(id); - return { id, guid }; -} - -function getServerRecord(collection, id) { - let wbo = collection.get({ full: true, ids: [id] }); - // Whew - lots of json strings inside strings. - return JSON.parse(JSON.parse(JSON.parse(wbo).payload).ciphertext); -} - -function* promiseNoLocalItem(guid) { - // Check there's no item with the specified guid. - let got = yield bms.fetch({ guid }); - ok(!got, `No record remains with GUID ${guid}`); - // and while we are here ensure the places cache doesn't still have it. - yield Assert.rejects(PlacesUtils.promiseItemId(guid)); -} - -function* validate(collection, expectedFailures = []) { - let validator = new BookmarkValidator(); - let records = collection.payloads(); - - let problems = validator.inspectServerRecords(records).problemData; - // all non-zero problems. - let summary = problems.getSummary().filter(prob => prob.count != 0); - - // split into 2 arrays - expected and unexpected. - let isInExpectedFailures = elt => { - for (let i = 0; i < expectedFailures.length; i++) { - if (elt.name == expectedFailures[i].name && elt.count == expectedFailures[i].count) { - return true; - } - } - return false; - } - let expected = []; - let unexpected = []; - for (let elt of summary) { - (isInExpectedFailures(elt) ? expected : unexpected).push(elt); - } - if (unexpected.length || expected.length != expectedFailures.length) { - do_print("Validation failed:"); - do_print(JSON.stringify(summary)); - // print the entire validator output as it has IDs etc. - do_print(JSON.stringify(problems, undefined, 2)); - // All server records and the entire bookmark tree. - do_print("Server records:\n" + JSON.stringify(collection.payloads(), undefined, 2)); - let tree = yield PlacesUtils.promiseBookmarksTree("", { includeItemIds: true }); - do_print("Local bookmark tree:\n" + JSON.stringify(tree, undefined, 2)); - ok(false); - } -} - -add_task(function* test_dupe_bookmark() { - _("Ensure that a bookmark we consider a dupe is handled correctly."); - - let { server, collection } = this.setup(); - - try { - // The parent folder and one bookmark in it. - let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1"); - let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!"); - - engine.sync(); - - // We've added the bookmark, its parent (folder1) plus "menu", "toolbar", "unfiled", and "mobile". - equal(collection.count(), 6); - equal(getFolderChildrenIDs(folder1_id).length, 1); - - // Now create a new incoming record that looks alot like a dupe. - let newGUID = Utils.makeGUID(); - let to_apply = { - id: newGUID, - bmkUri: "http://getfirefox.com/", - type: "bookmark", - title: "Get Firefox!", - parentName: "Folder 1", - parentid: folder1_guid, - }; - - collection.insert(newGUID, encryptPayload(to_apply), Date.now() / 1000 + 10); - _("Syncing so new dupe record is processed"); - engine.lastSync = engine.lastSync - 0.01; - engine.sync(); - - // We should have logically deleted the dupe record. - equal(collection.count(), 7); - ok(getServerRecord(collection, bmk1_guid).deleted); - // and physically removed from the local store. - yield promiseNoLocalItem(bmk1_guid); - // Parent should still only have 1 item. - equal(getFolderChildrenIDs(folder1_id).length, 1); - // The parent record on the server should now reference the new GUID and not the old. - let serverRecord = getServerRecord(collection, folder1_guid); - ok(!serverRecord.children.includes(bmk1_guid)); - ok(serverRecord.children.includes(newGUID)); - - // and a final sanity check - use the validator - yield validate(collection); - } finally { - yield cleanup(server); - } -}); - -add_task(function* test_dupe_reparented_bookmark() { - _("Ensure that a bookmark we consider a dupe from a different parent is handled correctly"); - - let { server, collection } = this.setup(); - - try { - // The parent folder and one bookmark in it. - let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1"); - let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!"); - // Another parent folder *with the same name* - let {id: folder2_id, guid: folder2_guid } = createFolder(bms.toolbarFolder, "Folder 1"); - - do_print(`folder1_guid=${folder1_guid}, folder2_guid=${folder2_guid}, bmk1_guid=${bmk1_guid}`); - - engine.sync(); - - // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile". - equal(collection.count(), 7); - equal(getFolderChildrenIDs(folder1_id).length, 1); - equal(getFolderChildrenIDs(folder2_id).length, 0); - - // Now create a new incoming record that looks alot like a dupe of the - // item in folder1_guid, but with a record that points to folder2_guid. - let newGUID = Utils.makeGUID(); - let to_apply = { - id: newGUID, - bmkUri: "http://getfirefox.com/", - type: "bookmark", - title: "Get Firefox!", - parentName: "Folder 1", - parentid: folder2_guid, - }; - - collection.insert(newGUID, encryptPayload(to_apply), Date.now() / 1000 + 10); - - _("Syncing so new dupe record is processed"); - engine.lastSync = engine.lastSync - 0.01; - engine.sync(); - - // We should have logically deleted the dupe record. - equal(collection.count(), 8); - ok(getServerRecord(collection, bmk1_guid).deleted); - // and physically removed from the local store. - yield promiseNoLocalItem(bmk1_guid); - // The original folder no longer has the item - equal(getFolderChildrenIDs(folder1_id).length, 0); - // But the second dupe folder does. - equal(getFolderChildrenIDs(folder2_id).length, 1); - - // The record for folder1 on the server should reference neither old or new GUIDs. - let serverRecord1 = getServerRecord(collection, folder1_guid); - ok(!serverRecord1.children.includes(bmk1_guid)); - ok(!serverRecord1.children.includes(newGUID)); - - // The record for folder2 on the server should only reference the new new GUID. - let serverRecord2 = getServerRecord(collection, folder2_guid); - ok(!serverRecord2.children.includes(bmk1_guid)); - ok(serverRecord2.children.includes(newGUID)); - - // and a final sanity check - use the validator - yield validate(collection); - } finally { - yield cleanup(server); - } -}); - -add_task(function* test_dupe_reparented_locally_changed_bookmark() { - _("Ensure that a bookmark with local changes we consider a dupe from a different parent is handled correctly"); - - let { server, collection } = this.setup(); - - try { - // The parent folder and one bookmark in it. - let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1"); - let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!"); - // Another parent folder *with the same name* - let {id: folder2_id, guid: folder2_guid } = createFolder(bms.toolbarFolder, "Folder 1"); - - do_print(`folder1_guid=${folder1_guid}, folder2_guid=${folder2_guid}, bmk1_guid=${bmk1_guid}`); - - engine.sync(); - - // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile". - equal(collection.count(), 7); - equal(getFolderChildrenIDs(folder1_id).length, 1); - equal(getFolderChildrenIDs(folder2_id).length, 0); - - // Now create a new incoming record that looks alot like a dupe of the - // item in folder1_guid, but with a record that points to folder2_guid. - let newGUID = Utils.makeGUID(); - let to_apply = { - id: newGUID, - bmkUri: "http://getfirefox.com/", - type: "bookmark", - title: "Get Firefox!", - parentName: "Folder 1", - parentid: folder2_guid, - }; - - collection.insert(newGUID, encryptPayload(to_apply), Date.now() / 1000 + 10); - - // Make a change to the bookmark that's a dupe, and set the modification - // time further in the future than the incoming record. This will cause - // us to issue the infamous "DATA LOSS" warning in the logs but cause us - // to *not* apply the incoming record. - engine._tracker.addChangedID(bmk1_guid, Date.now() / 1000 + 60); - - _("Syncing so new dupe record is processed"); - engine.lastSync = engine.lastSync - 0.01; - engine.sync(); - - // We should have logically deleted the dupe record. - equal(collection.count(), 8); - ok(getServerRecord(collection, bmk1_guid).deleted); - // and physically removed from the local store. - yield promiseNoLocalItem(bmk1_guid); - // The original folder still longer has the item - equal(getFolderChildrenIDs(folder1_id).length, 1); - // The second folder does not. - equal(getFolderChildrenIDs(folder2_id).length, 0); - - // The record for folder1 on the server should reference only the GUID. - let serverRecord1 = getServerRecord(collection, folder1_guid); - ok(!serverRecord1.children.includes(bmk1_guid)); - ok(serverRecord1.children.includes(newGUID)); - - // The record for folder2 on the server should reference nothing. - let serverRecord2 = getServerRecord(collection, folder2_guid); - ok(!serverRecord2.children.includes(bmk1_guid)); - ok(!serverRecord2.children.includes(newGUID)); - - // and a final sanity check - use the validator - yield validate(collection); - } finally { - yield cleanup(server); - } -}); - -add_task(function* test_dupe_reparented_to_earlier_appearing_parent_bookmark() { - _("Ensure that a bookmark we consider a dupe from a different parent that " + - "appears in the same sync before the dupe item"); - - let { server, collection } = this.setup(); - - try { - // The parent folder and one bookmark in it. - let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1"); - let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!"); - // One more folder we'll use later. - let {id: folder2_id, guid: folder2_guid} = createFolder(bms.toolbarFolder, "A second folder"); - - do_print(`folder1=${folder1_guid}, bmk1=${bmk1_guid} folder2=${folder2_guid}`); - - engine.sync(); - - // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile". - equal(collection.count(), 7); - equal(getFolderChildrenIDs(folder1_id).length, 1); - - let newGUID = Utils.makeGUID(); - let newParentGUID = Utils.makeGUID(); - - // Have the new parent appear before the dupe item. - collection.insert(newParentGUID, encryptPayload({ - id: newParentGUID, - type: "folder", - title: "Folder 1", - parentName: "A second folder", - parentid: folder2_guid, - children: [newGUID], - tags: [], - }), Date.now() / 1000 + 10); - - // And also the update to "folder 2" that references the new parent. - collection.insert(folder2_guid, encryptPayload({ - id: folder2_guid, - type: "folder", - title: "A second folder", - parentName: "Bookmarks Toolbar", - parentid: "toolbar", - children: [newParentGUID], - tags: [], - }), Date.now() / 1000 + 10); - - // Now create a new incoming record that looks alot like a dupe of the - // item in folder1_guid, with a record that points to a parent with the - // same name which appeared earlier in this sync. - collection.insert(newGUID, encryptPayload({ - id: newGUID, - bmkUri: "http://getfirefox.com/", - type: "bookmark", - title: "Get Firefox!", - parentName: "Folder 1", - parentid: newParentGUID, - tags: [], - }), Date.now() / 1000 + 10); - - - _("Syncing so new records are processed."); - engine.lastSync = engine.lastSync - 0.01; - engine.sync(); - - // Everything should be parented correctly. - equal(getFolderChildrenIDs(folder1_id).length, 0); - let newParentID = store.idForGUID(newParentGUID); - let newID = store.idForGUID(newGUID); - deepEqual(getFolderChildrenIDs(newParentID), [newID]); - - // Make sure the validator thinks everything is hunky-dory. - yield validate(collection); - } finally { - yield cleanup(server); - } -}); - -add_task(function* test_dupe_reparented_to_later_appearing_parent_bookmark() { - _("Ensure that a bookmark we consider a dupe from a different parent that " + - "doesn't exist locally as we process the child, but does appear in the same sync"); - - let { server, collection } = this.setup(); - - try { - // The parent folder and one bookmark in it. - let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1"); - let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!"); - // One more folder we'll use later. - let {id: folder2_id, guid: folder2_guid} = createFolder(bms.toolbarFolder, "A second folder"); - - do_print(`folder1=${folder1_guid}, bmk1=${bmk1_guid} folder2=${folder2_guid}`); - - engine.sync(); - - // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile". - equal(collection.count(), 7); - equal(getFolderChildrenIDs(folder1_id).length, 1); - - // Now create a new incoming record that looks alot like a dupe of the - // item in folder1_guid, but with a record that points to a parent with the - // same name, but a non-existing local ID. - let newGUID = Utils.makeGUID(); - let newParentGUID = Utils.makeGUID(); - - collection.insert(newGUID, encryptPayload({ - id: newGUID, - bmkUri: "http://getfirefox.com/", - type: "bookmark", - title: "Get Firefox!", - parentName: "Folder 1", - parentid: newParentGUID, - tags: [], - }), Date.now() / 1000 + 10); - - // Now have the parent appear after (so when the record above is processed - // this is still unknown.) - collection.insert(newParentGUID, encryptPayload({ - id: newParentGUID, - type: "folder", - title: "Folder 1", - parentName: "A second folder", - parentid: folder2_guid, - children: [newGUID], - tags: [], - }), Date.now() / 1000 + 10); - // And also the update to "folder 2" that references the new parent. - collection.insert(folder2_guid, encryptPayload({ - id: folder2_guid, - type: "folder", - title: "A second folder", - parentName: "Bookmarks Toolbar", - parentid: "toolbar", - children: [newParentGUID], - tags: [], - }), Date.now() / 1000 + 10); - - _("Syncing so out-of-order records are processed."); - engine.lastSync = engine.lastSync - 0.01; - engine.sync(); - - // The intended parent did end up existing, so it should be parented - // correctly after de-duplication. - equal(getFolderChildrenIDs(folder1_id).length, 0); - let newParentID = store.idForGUID(newParentGUID); - let newID = store.idForGUID(newGUID); - deepEqual(getFolderChildrenIDs(newParentID), [newID]); - - // Make sure the validator thinks everything is hunky-dory. - yield validate(collection); - } finally { - yield cleanup(server); - } -}); - -add_task(function* test_dupe_reparented_to_future_arriving_parent_bookmark() { - _("Ensure that a bookmark we consider a dupe from a different parent that " + - "doesn't exist locally and doesn't appear in this Sync is handled correctly"); - - let { server, collection } = this.setup(); - - try { - // The parent folder and one bookmark in it. - let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1"); - let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!"); - // One more folder we'll use later. - let {id: folder2_id, guid: folder2_guid} = createFolder(bms.toolbarFolder, "A second folder"); - - do_print(`folder1=${folder1_guid}, bmk1=${bmk1_guid} folder2=${folder2_guid}`); - - engine.sync(); - - // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile". - equal(collection.count(), 7); - equal(getFolderChildrenIDs(folder1_id).length, 1); - - // Now create a new incoming record that looks alot like a dupe of the - // item in folder1_guid, but with a record that points to a parent with the - // same name, but a non-existing local ID. - let newGUID = Utils.makeGUID(); - let newParentGUID = Utils.makeGUID(); - - collection.insert(newGUID, encryptPayload({ - id: newGUID, - bmkUri: "http://getfirefox.com/", - type: "bookmark", - title: "Get Firefox!", - parentName: "Folder 1", - parentid: newParentGUID, - tags: [], - }), Date.now() / 1000 + 10); - - _("Syncing so new dupe record is processed"); - engine.lastSync = engine.lastSync - 0.01; - engine.sync(); - - // We should have logically deleted the dupe record. - equal(collection.count(), 8); - ok(getServerRecord(collection, bmk1_guid).deleted); - // and physically removed from the local store. - yield promiseNoLocalItem(bmk1_guid); - // The intended parent doesn't exist, so it remains in the original folder - equal(getFolderChildrenIDs(folder1_id).length, 1); - - // The record for folder1 on the server should reference the new GUID. - let serverRecord1 = getServerRecord(collection, folder1_guid); - ok(!serverRecord1.children.includes(bmk1_guid)); - ok(serverRecord1.children.includes(newGUID)); - - // As the incoming parent is missing the item should have been annotated - // with that missing parent. - equal(PlacesUtils.annotations.getItemAnnotation(store.idForGUID(newGUID), "sync/parent"), - newParentGUID); - - // Check the validator. Sadly, this is known to cause a mismatch between - // the server and client views of the tree. - let expected = [ - // We haven't fixed the incoming record that referenced the missing parent. - { name: "orphans", count: 1 }, - ]; - yield validate(collection, expected); - - // Now have the parent magically appear in a later sync - but - // it appears as being in a different parent from our existing "Folder 1", - // so the folder itself isn't duped. - collection.insert(newParentGUID, encryptPayload({ - id: newParentGUID, - type: "folder", - title: "Folder 1", - parentName: "A second folder", - parentid: folder2_guid, - children: [newGUID], - tags: [], - }), Date.now() / 1000 + 10); - // We also queue an update to "folder 2" that references the new parent. - collection.insert(folder2_guid, encryptPayload({ - id: folder2_guid, - type: "folder", - title: "A second folder", - parentName: "Bookmarks Toolbar", - parentid: "toolbar", - children: [newParentGUID], - tags: [], - }), Date.now() / 1000 + 10); - - _("Syncing so missing parent appears"); - engine.lastSync = engine.lastSync - 0.01; - engine.sync(); - - // The intended parent now does exist, so it should have been reparented. - equal(getFolderChildrenIDs(folder1_id).length, 0); - let newParentID = store.idForGUID(newParentGUID); - let newID = store.idForGUID(newGUID); - deepEqual(getFolderChildrenIDs(newParentID), [newID]); - - // validation now has different errors :( - expected = [ - // The validator reports multipleParents because: - // * The incoming record newParentGUID still (and correctly) references - // newGUID as a child. - // * Our original Folder1 was updated to include newGUID when it - // originally de-deuped and couldn't find the parent. - // * When the parent *did* eventually arrive we used the parent annotation - // to correctly reparent - but that reparenting process does not change - // the server record. - // Hence, newGUID is a child of both those server records :( - { name: "multipleParents", count: 1 }, - ]; - yield validate(collection, expected); - - } finally { - yield cleanup(server); - } -}); - -add_task(function* test_dupe_empty_folder() { - _("Ensure that an empty folder we consider a dupe is handled correctly."); - // Empty folders aren't particularly interesting in practice (as that seems - // an edge-case) but duping folders with items is broken - bug 1293163. - let { server, collection } = this.setup(); - - try { - // The folder we will end up duping away. - let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1"); - - engine.sync(); - - // We've added 1 folder, "menu", "toolbar", "unfiled", and "mobile". - equal(collection.count(), 5); - - // Now create new incoming records that looks alot like a dupe of "Folder 1". - let newFolderGUID = Utils.makeGUID(); - collection.insert(newFolderGUID, encryptPayload({ - id: newFolderGUID, - type: "folder", - title: "Folder 1", - parentName: "Bookmarks Toolbar", - parentid: "toolbar", - children: [], - }), Date.now() / 1000 + 10); - - _("Syncing so new dupe records are processed"); - engine.lastSync = engine.lastSync - 0.01; - engine.sync(); - - yield validate(collection); - - // Collection now has one additional record - the logically deleted dupe. - equal(collection.count(), 6); - // original folder should be logically deleted. - ok(getServerRecord(collection, folder1_guid).deleted); - yield promiseNoLocalItem(folder1_guid); - } finally { - yield cleanup(server); - } -}); -// XXX - TODO - folders with children. Bug 1293163 diff --git a/services/sync/tests/unit/test_bookmark_engine.js b/services/sync/tests/unit/test_bookmark_engine.js index 9de6c5c0d..bd4c740cb 100644 --- a/services/sync/tests/unit/test_bookmark_engine.js +++ b/services/sync/tests/unit/test_bookmark_engine.js @@ -2,10 +2,9 @@ http://creativecommons.org/publicdomain/zero/1.0/ */ Cu.import("resource://gre/modules/PlacesUtils.jsm"); -Cu.import("resource://gre/modules/PlacesSyncUtils.jsm"); Cu.import("resource://gre/modules/BookmarkJSONUtils.jsm"); +Cu.import("resource://services-common/async.js"); Cu.import("resource://gre/modules/Log.jsm"); -Cu.import("resource://services-sync/constants.js"); Cu.import("resource://services-sync/engines.js"); Cu.import("resource://services-sync/engines/bookmarks.js"); Cu.import("resource://services-sync/service.js"); @@ -13,168 +12,9 @@ Cu.import("resource://services-sync/util.js"); Cu.import("resource://testing-common/services/sync/utils.js"); Cu.import("resource://gre/modules/Promise.jsm"); -initTestLogging("Trace"); - Service.engineManager.register(BookmarksEngine); -function* assertChildGuids(folderGuid, expectedChildGuids, message) { - let tree = yield PlacesUtils.promiseBookmarksTree(folderGuid); - let childGuids = tree.children.map(child => child.guid); - deepEqual(childGuids, expectedChildGuids, message); -} - -add_task(function* test_change_during_sync() { - _("Ensure that we track changes made during a sync."); - - let engine = new BookmarksEngine(Service); - let store = engine._store; - let tracker = engine._tracker; - let server = serverForFoo(engine); - new SyncTestingInfrastructure(server.server); - - let collection = server.user("foo").collection("bookmarks"); - - let bz_id = PlacesUtils.bookmarks.insertBookmark( - PlacesUtils.bookmarksMenuFolderId, Utils.makeURI("https://bugzilla.mozilla.org/"), - PlacesUtils.bookmarks.DEFAULT_INDEX, "Bugzilla"); - let bz_guid = yield PlacesUtils.promiseItemGuid(bz_id); - _(`Bugzilla GUID: ${bz_guid}`); - - Svc.Obs.notify("weave:engine:start-tracking"); - - try { - let folder1_id = PlacesUtils.bookmarks.createFolder( - PlacesUtils.bookmarks.toolbarFolder, "Folder 1", 0); - let folder1_guid = store.GUIDForId(folder1_id); - _(`Folder GUID: ${folder1_guid}`); - - let bmk1_id = PlacesUtils.bookmarks.insertBookmark( - folder1_id, Utils.makeURI("http://getthunderbird.com/"), - PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Thunderbird!"); - let bmk1_guid = store.GUIDForId(bmk1_id); - _(`Thunderbird GUID: ${bmk1_guid}`); - - // Sync is synchronous, so, to simulate a bookmark change made during a - // sync, we create a server record that adds a bookmark as a side effect. - let bmk2_guid = "get-firefox1"; // New child of Folder 1, created remotely. - let bmk3_id = -1; // New child of Folder 1, created locally during sync. - let folder2_guid = "folder2-1111"; // New folder, created remotely. - let tagQuery_guid = "tag-query111"; // New tag query child of Folder 2, created remotely. - let bmk4_guid = "example-org1"; // New tagged child of Folder 2, created remotely. - { - // An existing record changed on the server that should not trigger - // another sync when applied. - let bzBmk = new Bookmark("bookmarks", bz_guid); - bzBmk.bmkUri = "https://bugzilla.mozilla.org/"; - bzBmk.description = "New description"; - bzBmk.title = "Bugzilla"; - bzBmk.tags = ["new", "tags"]; - bzBmk.parentName = "Bookmarks Toolbar"; - bzBmk.parentid = "toolbar"; - collection.insert(bz_guid, encryptPayload(bzBmk.cleartext)); - - let remoteFolder = new BookmarkFolder("bookmarks", folder2_guid); - remoteFolder.title = "Folder 2"; - remoteFolder.children = [bmk4_guid, tagQuery_guid]; - remoteFolder.parentName = "Bookmarks Menu"; - remoteFolder.parentid = "menu"; - collection.insert(folder2_guid, encryptPayload(remoteFolder.cleartext)); - - let localFxBmk = new Bookmark("bookmarks", bmk2_guid); - localFxBmk.bmkUri = "http://getfirefox.com/"; - localFxBmk.description = "Firefox is awesome."; - localFxBmk.title = "Get Firefox!"; - localFxBmk.tags = ["firefox", "awesome", "browser"]; - localFxBmk.keyword = "awesome"; - localFxBmk.loadInSidebar = false; - localFxBmk.parentName = "Folder 1"; - localFxBmk.parentid = folder1_guid; - let remoteFxBmk = collection.insert(bmk2_guid, encryptPayload(localFxBmk.cleartext)); - remoteFxBmk.get = function get() { - _("Inserting bookmark into local store"); - bmk3_id = PlacesUtils.bookmarks.insertBookmark( - folder1_id, Utils.makeURI("https://mozilla.org/"), - PlacesUtils.bookmarks.DEFAULT_INDEX, "Mozilla"); - - return ServerWBO.prototype.get.apply(this, arguments); - }; - - // A tag query referencing a nonexistent tag folder, which we should - // create locally when applying the record. - let localTagQuery = new BookmarkQuery("bookmarks", tagQuery_guid); - localTagQuery.bmkUri = "place:type=7&folder=999"; - localTagQuery.title = "Taggy tags"; - localTagQuery.folderName = "taggy"; - localTagQuery.parentName = "Folder 2"; - localTagQuery.parentid = folder2_guid; - collection.insert(tagQuery_guid, encryptPayload(localTagQuery.cleartext)); - - // A bookmark that should appear in the results for the tag query. - let localTaggedBmk = new Bookmark("bookmarks", bmk4_guid); - localTaggedBmk.bmkUri = "https://example.org"; - localTaggedBmk.title = "Tagged bookmark"; - localTaggedBmk.tags = ["taggy"]; - localTaggedBmk.parentName = "Folder 2"; - localTaggedBmk.parentid = folder2_guid; - collection.insert(bmk4_guid, encryptPayload(localTaggedBmk.cleartext)); - } - - yield* assertChildGuids(folder1_guid, [bmk1_guid], "Folder should have 1 child before first sync"); - - _("Perform first sync"); - { - let changes = engine.pullNewChanges(); - deepEqual(changes.ids().sort(), [folder1_guid, bmk1_guid, "toolbar"].sort(), - "Should track bookmark and folder created before first sync"); - yield sync_engine_and_validate_telem(engine, false); - } - - let bmk2_id = store.idForGUID(bmk2_guid); - let bmk3_guid = store.GUIDForId(bmk3_id); - _(`Mozilla GUID: ${bmk3_guid}`); - { - equal(store.GUIDForId(bmk2_id), bmk2_guid, - "Remote bookmark should be applied during first sync"); - ok(bmk3_id > -1, - "Bookmark created during first sync should exist locally"); - ok(!collection.wbo(bmk3_guid), - "Bookmark created during first sync shouldn't be uploaded yet"); - - yield* assertChildGuids(folder1_guid, [bmk1_guid, bmk3_guid, bmk2_guid], - "Folder 1 should have 3 children after first sync"); - yield* assertChildGuids(folder2_guid, [bmk4_guid, tagQuery_guid], - "Folder 2 should have 2 children after first sync"); - let taggedURIs = PlacesUtils.tagging.getURIsForTag("taggy"); - equal(taggedURIs.length, 1, "Should have 1 tagged URI"); - equal(taggedURIs[0].spec, "https://example.org/", - "Synced tagged bookmark should appear in tagged URI list"); - } - - _("Perform second sync"); - { - let changes = engine.pullNewChanges(); - deepEqual(changes.ids().sort(), [bmk3_guid, folder1_guid].sort(), - "Should track bookmark added during last sync and its parent"); - yield sync_engine_and_validate_telem(engine, false); - - ok(collection.wbo(bmk3_guid), - "Bookmark created during first sync should be uploaded during second sync"); - - yield* assertChildGuids(folder1_guid, [bmk1_guid, bmk3_guid, bmk2_guid], - "Folder 1 should have same children after second sync"); - yield* assertChildGuids(folder2_guid, [bmk4_guid, tagQuery_guid], - "Folder 2 should have same children after second sync"); - } - } finally { - store.wipe(); - Svc.Prefs.resetBranch(""); - Service.recordManager.clearCache(); - yield new Promise(resolve => server.stop(resolve)); - Svc.Obs.notify("weave:engine:stop-tracking"); - } -}); - -add_task(function* bad_record_allIDs() { +add_test(function bad_record_allIDs() { let server = new SyncServer(); server.start(); let syncTesting = new SyncTestingInfrastructure(server.server); @@ -192,6 +32,9 @@ add_task(function* bad_record_allIDs() { _("Record is " + badRecordID); _("Type: " + PlacesUtils.bookmarks.getItemType(badRecordID)); + _("Fetching children."); + store._getChildren("toolbar", {}); + _("Fetching all IDs."); let all = store.getAllIDs(); @@ -201,7 +44,49 @@ add_task(function* bad_record_allIDs() { _("Clean up."); PlacesUtils.bookmarks.removeItem(badRecordID); - yield new Promise(r => server.stop(r)); + server.stop(run_next_test); +}); + +add_test(function test_ID_caching() { + let server = new SyncServer(); + server.start(); + let syncTesting = new SyncTestingInfrastructure(server.server); + + _("Ensure that Places IDs are not cached."); + let engine = new BookmarksEngine(Service); + let store = engine._store; + _("All IDs: " + JSON.stringify(store.getAllIDs())); + + let mobileID = store.idForGUID("mobile"); + _("Change the GUID for that item, and drop the mobile anno."); + store._setGUID(mobileID, "abcdefghijkl"); + PlacesUtils.annotations.removeItemAnnotation(mobileID, "mobile/bookmarksRoot"); + + let err; + let newMobileID; + + // With noCreate, we don't find an entry. + try { + newMobileID = store.idForGUID("mobile", true); + _("New mobile ID: " + newMobileID); + } catch (ex) { + err = ex; + _("Error: " + Utils.exceptionStr(err)); + } + + do_check_true(!err); + + // With !noCreate, lookup works, and it's different. + newMobileID = store.idForGUID("mobile", false); + _("New mobile ID: " + newMobileID); + do_check_true(!!newMobileID); + do_check_neq(newMobileID, mobileID); + + // And it's repeatable, even with creation enabled. + do_check_eq(newMobileID, store.idForGUID("mobile", false)); + + do_check_eq(store.GUIDForId(mobileID), "abcdefghijkl"); + server.stop(run_next_test); }); function serverForFoo(engine) { @@ -212,7 +97,7 @@ function serverForFoo(engine) { }); } -add_task(function* test_processIncoming_error_orderChildren() { +add_test(function test_processIncoming_error_orderChildren() { _("Ensure that _orderChildren() is called even when _processIncoming() throws an error."); let engine = new BookmarksEngine(Service); @@ -259,11 +144,11 @@ add_task(function* test_processIncoming_error_orderChildren() { let error; try { - yield sync_engine_and_validate_telem(engine, true) + engine.sync(); } catch(ex) { error = ex; } - ok(!!error); + do_check_true(!!error); // Verify that the bookmark order has been applied. let new_children = store.createRecord(folder1_guid).children; @@ -278,11 +163,11 @@ add_task(function* test_processIncoming_error_orderChildren() { store.wipe(); Svc.Prefs.resetBranch(""); Service.recordManager.clearCache(); - yield new Promise(resolve => server.stop(resolve)); + server.stop(run_next_test); } }); -add_task(function* test_restorePromptsReupload() { +add_task(function test_restorePromptsReupload() { _("Ensure that restoring from a backup will reupload all records."); let engine = new BookmarksEngine(Service); let store = engine._store; @@ -319,7 +204,8 @@ add_task(function* test_restorePromptsReupload() { backupFile.append("t_b_e_" + Date.now() + ".json"); _("Backing up to file " + backupFile.path); - yield BookmarkJSONUtils.exportToFile(backupFile.path); + backupFile.create(Ci.nsILocalFile.NORMAL_FILE_TYPE, 0600); + yield BookmarkJSONUtils.exportToFile(backupFile); _("Create a different record and sync."); let bmk2_id = PlacesUtils.bookmarks.insertBookmark( @@ -331,17 +217,17 @@ add_task(function* test_restorePromptsReupload() { let error; try { - yield sync_engine_and_validate_telem(engine, false); + engine.sync(); } catch(ex) { error = ex; - _("Got error: " + Log.exceptionStr(ex)); + _("Got error: " + Utils.exceptionStr(ex)); } do_check_true(!error); _("Verify that there's only one bookmark on the server, and it's Thunderbird."); // Of course, there's also the Bookmarks Toolbar and Bookmarks Menu... let wbos = collection.keys(function (id) { - return ["menu", "toolbar", "mobile", "unfiled", folder1_guid].indexOf(id) == -1; + return ["menu", "toolbar", "mobile", folder1_guid].indexOf(id) == -1; }); do_check_eq(wbos.length, 1); do_check_eq(wbos[0], bmk2_guid); @@ -371,14 +257,14 @@ add_task(function* test_restorePromptsReupload() { do_check_true(found); _("Have the correct number of IDs locally, too."); - do_check_eq(count, ["menu", "toolbar", "mobile", "unfiled", folder1_id, bmk1_id].length); + do_check_eq(count, ["menu", "toolbar", folder1_id, bmk1_id].length); _("Sync again. This'll wipe bookmarks from the server."); try { - yield sync_engine_and_validate_telem(engine, false); + engine.sync(); } catch(ex) { error = ex; - _("Got error: " + Log.exceptionStr(ex)); + _("Got error: " + Utils.exceptionStr(ex)); } do_check_true(!error); @@ -391,9 +277,7 @@ add_task(function* test_restorePromptsReupload() { let folderWBOs = payloads.filter(function (wbo) { return ((wbo.type == "folder") && (wbo.id != "menu") && - (wbo.id != "toolbar") && - (wbo.id != "unfiled") && - (wbo.id != "mobile")); + (wbo.id != "toolbar")); }); do_check_eq(bookmarkWBOs.length, 1); @@ -420,12 +304,10 @@ function FakeRecord(constructor, r) { for (let x in r) { this[x] = r[x]; } - // Borrow the constructor's conversion functions. - this.toSyncBookmark = constructor.prototype.toSyncBookmark; } // Bug 632287. -add_task(function* test_mismatched_types() { +add_test(function test_mismatched_types() { _("Ensure that handling a record that changes type causes deletion " + "then re-adding."); @@ -437,7 +319,6 @@ add_task(function* test_mismatched_types() { "description":null, "parentid": "toolbar" }; - oldRecord.cleartext = oldRecord; let newRecord = { "id": "l1nZZXfB8nC7", @@ -453,7 +334,6 @@ add_task(function* test_mismatched_types() { "oT74WwV8_j4P", "IztsItWVSo3-"], "parentid": "toolbar" }; - newRecord.cleartext = newRecord; let engine = new BookmarksEngine(Service); let store = engine._store; @@ -466,8 +346,8 @@ add_task(function* test_mismatched_types() { let bms = PlacesUtils.bookmarks; let oldR = new FakeRecord(BookmarkFolder, oldRecord); let newR = new FakeRecord(Livemark, newRecord); - oldR.parentid = PlacesUtils.bookmarks.toolbarGuid; - newR.parentid = PlacesUtils.bookmarks.toolbarGuid; + oldR._parent = PlacesUtils.bookmarks.toolbarFolder; + newR._parent = PlacesUtils.bookmarks.toolbarFolder; store.applyIncoming(oldR); _("Applied old. It's a folder."); @@ -490,11 +370,11 @@ add_task(function* test_mismatched_types() { store.wipe(); Svc.Prefs.resetBranch(""); Service.recordManager.clearCache(); - yield new Promise(r => server.stop(r)); + server.stop(run_next_test); } }); -add_task(function* test_bookmark_guidMap_fail() { +add_test(function test_bookmark_guidMap_fail() { _("Ensure that failures building the GUID map cause early death."); let engine = new BookmarksEngine(Service); @@ -514,9 +394,7 @@ add_task(function* test_bookmark_guidMap_fail() { engine.lastSync = 1; // So we don't back up. // Make building the GUID map fail. - - let pbt = PlacesUtils.promiseBookmarksTree; - PlacesUtils.promiseBookmarksTree = function() { return Promise.reject("Nooo"); }; + store.getAllIDs = function () { throw "Nooo"; }; // Ensure that we throw when accessing _guidMap. engine._syncStartup(); @@ -542,11 +420,26 @@ add_task(function* test_bookmark_guidMap_fail() { } do_check_eq(err, "Nooo"); - PlacesUtils.promiseBookmarksTree = pbt; - yield new Promise(r => server.stop(r)); + server.stop(run_next_test); +}); + +add_test(function test_bookmark_is_taggable() { + let engine = new BookmarksEngine(Service); + let store = engine._store; + + do_check_true(store.isTaggable("bookmark")); + do_check_true(store.isTaggable("microsummary")); + do_check_true(store.isTaggable("query")); + do_check_false(store.isTaggable("folder")); + do_check_false(store.isTaggable("livemark")); + do_check_false(store.isTaggable(null)); + do_check_false(store.isTaggable(undefined)); + do_check_false(store.isTaggable("")); + + run_next_test(); }); -add_task(function* test_bookmark_tag_but_no_uri() { +add_test(function test_bookmark_tag_but_no_uri() { _("Ensure that a bookmark record with tags, but no URI, doesn't throw an exception."); let engine = new BookmarksEngine(Service); @@ -555,43 +448,30 @@ add_task(function* test_bookmark_tag_but_no_uri() { // We're simply checking that no exception is thrown, so // no actual checks in this test. - yield PlacesSyncUtils.bookmarks.insert({ - kind: PlacesSyncUtils.bookmarks.KINDS.BOOKMARK, - syncId: Utils.makeGUID(), - parentSyncId: "toolbar", - url: "http://example.com", - tags: ["foo"], - }); - yield PlacesSyncUtils.bookmarks.insert({ - kind: PlacesSyncUtils.bookmarks.KINDS.BOOKMARK, - syncId: Utils.makeGUID(), - parentSyncId: "toolbar", - url: "http://example.org", - tags: null, - }); - yield PlacesSyncUtils.bookmarks.insert({ - kind: PlacesSyncUtils.bookmarks.KINDS.BOOKMARK, - syncId: Utils.makeGUID(), - url: "about:fake", - parentSyncId: "toolbar", - tags: null, - }); + store._tagURI(null, ["foo"]); + store._tagURI(null, null); + store._tagURI(Utils.makeURI("about:fake"), null); - let record = new FakeRecord(BookmarkFolder, { - parentid: "toolbar", + let record = { + _parent: PlacesUtils.bookmarks.toolbarFolder, id: Utils.makeGUID(), description: "", tags: ["foo"], title: "Taggy tag", type: "folder" - }); + }; + + // Because update() walks the cleartext. + record.cleartext = record; store.create(record); record.tags = ["bar"]; store.update(record); + + run_next_test(); }); -add_task(function* test_misreconciled_root() { +add_test(function test_misreconciled_root() { _("Ensure that we don't reconcile an arbitrary record with a root."); let engine = new BookmarksEngine(Service); @@ -636,9 +516,6 @@ add_task(function* test_misreconciled_root() { _("Applying record."); engine._processIncoming({ - getBatched() { - return this.get(); - }, get: function () { this.recordHandler(encrypted); return {success: true} @@ -655,7 +532,7 @@ add_task(function* test_misreconciled_root() { do_check_eq(parentGUIDBefore, parentGUIDAfter); do_check_eq(parentIDBefore, parentIDAfter); - yield new Promise(r => server.stop(r)); + server.stop(run_next_test); }); function run_test() { diff --git a/services/sync/tests/unit/test_bookmark_invalid.js b/services/sync/tests/unit/test_bookmark_invalid.js deleted file mode 100644 index af476a7f9..000000000 --- a/services/sync/tests/unit/test_bookmark_invalid.js +++ /dev/null @@ -1,63 +0,0 @@ -Cu.import("resource://gre/modules/PlacesUtils.jsm"); -Cu.import("resource://gre/modules/Log.jsm"); -Cu.import("resource://gre/modules/Task.jsm"); -Cu.import("resource://services-sync/engines.js"); -Cu.import("resource://services-sync/engines/bookmarks.js"); -Cu.import("resource://services-sync/service.js"); -Cu.import("resource://services-sync/util.js"); - -Service.engineManager.register(BookmarksEngine); - -var engine = Service.engineManager.get("bookmarks"); -var store = engine._store; -var tracker = engine._tracker; - -add_task(function* test_ignore_invalid_uri() { - _("Ensure that we don't die with invalid bookmarks."); - - // First create a valid bookmark. - let bmid = PlacesUtils.bookmarks.insertBookmark(PlacesUtils.unfiledBookmarksFolderId, - Services.io.newURI("http://example.com/", null, null), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "the title"); - - // Now update moz_places with an invalid url. - yield PlacesUtils.withConnectionWrapper("test_ignore_invalid_uri", Task.async(function* (db) { - yield db.execute( - `UPDATE moz_places SET url = :url, url_hash = hash(:url) - WHERE id = (SELECT b.fk FROM moz_bookmarks b - WHERE b.id = :id LIMIT 1)`, - { id: bmid, url: "<invalid url>" }); - })); - - // Ensure that this doesn't throw even though the DB is now in a bad state (a - // bookmark has an illegal url). - engine._buildGUIDMap(); -}); - -add_task(function* test_ignore_missing_uri() { - _("Ensure that we don't die with a bookmark referencing an invalid bookmark id."); - - // First create a valid bookmark. - let bmid = PlacesUtils.bookmarks.insertBookmark(PlacesUtils.unfiledBookmarksFolderId, - Services.io.newURI("http://example.com/", null, null), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "the title"); - - // Now update moz_bookmarks to reference a non-existing places ID - yield PlacesUtils.withConnectionWrapper("test_ignore_missing_uri", Task.async(function* (db) { - yield db.execute( - `UPDATE moz_bookmarks SET fk = 999999 - WHERE id = :id` - , { id: bmid }); - })); - - // Ensure that this doesn't throw even though the DB is now in a bad state (a - // bookmark has an illegal url). - engine._buildGUIDMap(); -}); - -function run_test() { - initTestLogging('Trace'); - run_next_test(); -} diff --git a/services/sync/tests/unit/test_bookmark_legacy_microsummaries_support.js b/services/sync/tests/unit/test_bookmark_legacy_microsummaries_support.js index 207372ed6..a7e3a4647 100644 --- a/services/sync/tests/unit/test_bookmark_legacy_microsummaries_support.js +++ b/services/sync/tests/unit/test_bookmark_legacy_microsummaries_support.js @@ -85,12 +85,12 @@ function run_test() { do_check_eq(PlacesUtils.bookmarks.getKeywordForBookmark(id), null); do_check_throws( - () => PlacesUtils.annotations.getItemAnnotation(id, GENERATORURI_ANNO), + function () PlacesUtils.annotations.getItemAnnotation(id, GENERATORURI_ANNO), Cr.NS_ERROR_NOT_AVAILABLE ); do_check_throws( - () => PlacesUtils.annotations.getItemAnnotation(id, STATICTITLE_ANNO), + function () PlacesUtils.annotations.getItemAnnotation(id, STATICTITLE_ANNO), Cr.NS_ERROR_NOT_AVAILABLE ); diff --git a/services/sync/tests/unit/test_bookmark_livemarks.js b/services/sync/tests/unit/test_bookmark_livemarks.js index 8adde76d8..d7cda091b 100644 --- a/services/sync/tests/unit/test_bookmark_livemarks.js +++ b/services/sync/tests/unit/test_bookmark_livemarks.js @@ -12,11 +12,11 @@ Cu.import("resource://testing-common/services/common/utils.js"); const DESCRIPTION_ANNO = "bookmarkProperties/description"; -var engine = Service.engineManager.get("bookmarks"); -var store = engine._store; +let engine = Service.engineManager.get("bookmarks"); +let store = engine._store; // Record borrowed from Bug 631361. -var record631361 = { +let record631361 = { id: "M5bwUKK8hPyF", index: 150, modified: 1296768176.49, @@ -103,11 +103,20 @@ add_test(function test_livemark_descriptions() { add_test(function test_livemark_invalid() { _("Livemarks considered invalid by nsLivemarkService are skipped."); + _("Parent is 0, which is invalid. Will be set to unfiled."); + let noParentRec = makeLivemark(record631361.payload, true); + noParentRec._parent = 0; + store.create(noParentRec); + let recID = store.idForGUID(noParentRec.id, true); + do_check_true(recID > 0); + do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(recID), PlacesUtils.bookmarks.unfiledBookmarksFolder); + _("Parent is unknown. Will be set to unfiled."); let lateParentRec = makeLivemark(record631361.payload, true); let parentGUID = Utils.makeGUID(); lateParentRec.parentid = parentGUID; - do_check_eq(-1, store.idForGUID(parentGUID)); + lateParentRec._parent = store.idForGUID(parentGUID); // Usually done by applyIncoming. + do_check_eq(-1, lateParentRec._parent); store.create(lateParentRec); recID = store.idForGUID(lateParentRec.id, true); @@ -124,7 +133,7 @@ add_test(function test_livemark_invalid() { _("Parent is a Livemark. Will be skipped."); let lmParentRec = makeLivemark(record631361.payload, true); - lmParentRec.parentid = store.GUIDForId(recID); + lmParentRec._parent = recID; store.create(lmParentRec); // No exception, but no creation occurs. do_check_eq(-1, store.idForGUID(lmParentRec.id, true)); diff --git a/services/sync/tests/unit/test_bookmark_order.js b/services/sync/tests/unit/test_bookmark_order.js index 7625a813f..56806dba0 100644 --- a/services/sync/tests/unit/test_bookmark_order.js +++ b/services/sync/tests/unit/test_bookmark_order.js @@ -2,61 +2,53 @@ http://creativecommons.org/publicdomain/zero/1.0/ */ _("Making sure after processing incoming bookmarks, they show up in the right order"); -Cu.import("resource://gre/modules/PlacesUtils.jsm"); -Cu.import("resource://gre/modules/Task.jsm"); +Cu.import("resource://gre/modules/PlacesUtils.jsm", this); Cu.import("resource://services-sync/engines/bookmarks.js"); Cu.import("resource://services-sync/service.js"); Cu.import("resource://services-sync/util.js"); -var check = Task.async(function* (expected, message) { - let root = yield PlacesUtils.promiseBookmarksTree(); +function getBookmarks(folderId) { + let bookmarks = []; + + let pos = 0; + while (true) { + let itemId = PlacesUtils.bookmarks.getIdForItemAt(folderId, pos); + _("Got itemId", itemId, "under", folderId, "at", pos); + if (itemId == -1) + break; + + switch (PlacesUtils.bookmarks.getItemType(itemId)) { + case PlacesUtils.bookmarks.TYPE_BOOKMARK: + bookmarks.push(PlacesUtils.bookmarks.getItemTitle(itemId)); + break; + case PlacesUtils.bookmarks.TYPE_FOLDER: + bookmarks.push(getBookmarks(itemId)); + break; + default: + _("Unsupported item type.."); + } + + pos++; + } + + return bookmarks; +} - let bookmarks = (function mapTree(children) { - return children.map(child => { - let result = { - guid: child.guid, - index: child.index, - }; - if (child.children) { - result.children = mapTree(child.children); - } - if (child.annos) { - let orphanAnno = child.annos.find( - anno => anno.name == "sync/parent"); - if (orphanAnno) { - result.requestedParent = orphanAnno.value; - } - } - return result; - }); - }(root.children)); +function check(expected) { + let bookmarks = getBookmarks(PlacesUtils.bookmarks.unfiledBookmarksFolder); _("Checking if the bookmark structure is", JSON.stringify(expected)); _("Got bookmarks:", JSON.stringify(bookmarks)); - deepEqual(bookmarks, expected); -}); + do_check_true(Utils.deepEquals(bookmarks, expected)); +} -add_task(function* test_bookmark_order() { +function run_test() { let store = new BookmarksEngine(Service)._store; initTestLogging("Trace"); _("Starting with a clean slate of no bookmarks"); store.wipe(); - yield check([{ - guid: PlacesUtils.bookmarks.menuGuid, - index: 0, - }, { - guid: PlacesUtils.bookmarks.toolbarGuid, - index: 1, - }, { - // Index 2 is the tags root. (Root indices depend on the order of the - // `CreateRoot` calls in `Database::CreateBookmarkRoots`). - guid: PlacesUtils.bookmarks.unfiledGuid, - index: 3, - }, { - guid: PlacesUtils.bookmarks.mobileGuid, - index: 4, - }], "clean slate"); + check([]); function bookmark(name, parent) { let bookmark = new Bookmark("http://weave.server/my-bookmark"); @@ -83,447 +75,64 @@ add_task(function* test_bookmark_order() { store._orderChildren(); delete store._childrenToOrder; } - let id10 = "10_aaaaaaaaa"; + _("basic add first bookmark"); - apply(bookmark(id10, "")); - yield check([{ - guid: PlacesUtils.bookmarks.menuGuid, - index: 0, - }, { - guid: PlacesUtils.bookmarks.toolbarGuid, - index: 1, - }, { - guid: PlacesUtils.bookmarks.unfiledGuid, - index: 3, - children: [{ - guid: id10, - index: 0, - }], - }, { - guid: PlacesUtils.bookmarks.mobileGuid, - index: 4, - }], "basic add first bookmark"); - let id20 = "20_aaaaaaaaa"; + apply(bookmark("10", "")); + check(["10"]); + _("basic append behind 10"); - apply(bookmark(id20, "")); - yield check([{ - guid: PlacesUtils.bookmarks.menuGuid, - index: 0, - }, { - guid: PlacesUtils.bookmarks.toolbarGuid, - index: 1, - }, { - guid: PlacesUtils.bookmarks.unfiledGuid, - index: 3, - children: [{ - guid: id10, - index: 0, - }, { - guid: id20, - index: 1, - }], - }, { - guid: PlacesUtils.bookmarks.mobileGuid, - index: 4, - }], "basic append behind 10"); + apply(bookmark("20", "")); + check(["10", "20"]); - let id31 = "31_aaaaaaaaa"; - let id30 = "f30_aaaaaaaa"; _("basic create in folder"); - apply(bookmark(id31, id30)); - let f30 = folder(id30, "", [id31]); + apply(bookmark("31", "f30")); + let f30 = folder("f30", "", ["31"]); apply(f30); - yield check([{ - guid: PlacesUtils.bookmarks.menuGuid, - index: 0, - }, { - guid: PlacesUtils.bookmarks.toolbarGuid, - index: 1, - }, { - guid: PlacesUtils.bookmarks.unfiledGuid, - index: 3, - children: [{ - guid: id10, - index: 0, - }, { - guid: id20, - index: 1, - }, { - guid: id30, - index: 2, - children: [{ - guid: id31, - index: 0, - }], - }], - }, { - guid: PlacesUtils.bookmarks.mobileGuid, - index: 4, - }], "basic create in folder"); + check(["10", "20", ["31"]]); - let id41 = "41_aaaaaaaaa"; - let id40 = "f40_aaaaaaaa"; _("insert missing parent -> append to unfiled"); - apply(bookmark(id41, id40)); - yield check([{ - guid: PlacesUtils.bookmarks.menuGuid, - index: 0, - }, { - guid: PlacesUtils.bookmarks.toolbarGuid, - index: 1, - }, { - guid: PlacesUtils.bookmarks.unfiledGuid, - index: 3, - children: [{ - guid: id10, - index: 0, - }, { - guid: id20, - index: 1, - }, { - guid: id30, - index: 2, - children: [{ - guid: id31, - index: 0, - }], - }, { - guid: id41, - index: 3, - requestedParent: id40, - }], - }, { - guid: PlacesUtils.bookmarks.mobileGuid, - index: 4, - }], "insert missing parent -> append to unfiled"); - - let id42 = "42_aaaaaaaaa"; + apply(bookmark("41", "f40")); + check(["10", "20", ["31"], "41"]); _("insert another missing parent -> append"); - apply(bookmark(id42, id40)); - yield check([{ - guid: PlacesUtils.bookmarks.menuGuid, - index: 0, - }, { - guid: PlacesUtils.bookmarks.toolbarGuid, - index: 1, - }, { - guid: PlacesUtils.bookmarks.unfiledGuid, - index: 3, - children: [{ - guid: id10, - index: 0, - }, { - guid: id20, - index: 1, - }, { - guid: id30, - index: 2, - children: [{ - guid: id31, - index: 0, - }], - }, { - guid: id41, - index: 3, - requestedParent: id40, - }, { - guid: id42, - index: 4, - requestedParent: id40, - }], - }, { - guid: PlacesUtils.bookmarks.mobileGuid, - index: 4, - }], "insert another missing parent -> append"); + apply(bookmark("42", "f40")); + check(["10", "20", ["31"], "41", "42"]); _("insert folder -> move children and followers"); - let f40 = folder(id40, "", [id41, id42]); + let f40 = folder("f40", "", ["41", "42"]); apply(f40); - yield check([{ - guid: PlacesUtils.bookmarks.menuGuid, - index: 0, - }, { - guid: PlacesUtils.bookmarks.toolbarGuid, - index: 1, - }, { - guid: PlacesUtils.bookmarks.unfiledGuid, - index: 3, - children: [{ - guid: id10, - index: 0, - }, { - guid: id20, - index: 1, - }, { - guid: id30, - index: 2, - children: [{ - guid: id31, - index: 0, - }], - }, { - guid: id40, - index: 3, - children: [{ - guid: id41, - index: 0, - }, { - guid: id42, - index: 1, - }] - }], - }, { - guid: PlacesUtils.bookmarks.mobileGuid, - index: 4, - }], "insert folder -> move children and followers"); + check(["10", "20", ["31"], ["41", "42"]]); _("Moving 41 behind 42 -> update f40"); - f40.children = [id42, id41]; + f40.children = ["42", "41"]; apply(f40); - yield check([{ - guid: PlacesUtils.bookmarks.menuGuid, - index: 0, - }, { - guid: PlacesUtils.bookmarks.toolbarGuid, - index: 1, - }, { - guid: PlacesUtils.bookmarks.unfiledGuid, - index: 3, - children: [{ - guid: id10, - index: 0, - }, { - guid: id20, - index: 1, - }, { - guid: id30, - index: 2, - children: [{ - guid: id31, - index: 0, - }], - }, { - guid: id40, - index: 3, - children: [{ - guid: id42, - index: 0, - }, { - guid: id41, - index: 1, - }] - }], - }, { - guid: PlacesUtils.bookmarks.mobileGuid, - index: 4, - }], "Moving 41 behind 42 -> update f40"); + check(["10", "20", ["31"], ["42", "41"]]); _("Moving 10 back to front -> update 10, 20"); - f40.children = [id41, id42]; + f40.children = ["41", "42"]; apply(f40); - yield check([{ - guid: PlacesUtils.bookmarks.menuGuid, - index: 0, - }, { - guid: PlacesUtils.bookmarks.toolbarGuid, - index: 1, - }, { - guid: PlacesUtils.bookmarks.unfiledGuid, - index: 3, - children: [{ - guid: id10, - index: 0, - }, { - guid: id20, - index: 1, - }, { - guid: id30, - index: 2, - children: [{ - guid: id31, - index: 0, - }], - }, { - guid: id40, - index: 3, - children: [{ - guid: id41, - index: 0, - }, { - guid: id42, - index: 1, - }] - }], - }, { - guid: PlacesUtils.bookmarks.mobileGuid, - index: 4, - }], "Moving 10 back to front -> update 10, 20"); + check(["10", "20", ["31"], ["41", "42"]]); _("Moving 20 behind 42 in f40 -> update 50"); - apply(bookmark(id20, id40)); - yield check([{ - guid: PlacesUtils.bookmarks.menuGuid, - index: 0, - }, { - guid: PlacesUtils.bookmarks.toolbarGuid, - index: 1, - }, { - guid: PlacesUtils.bookmarks.unfiledGuid, - index: 3, - children: [{ - guid: id10, - index: 0, - }, { - guid: id30, - index: 1, - children: [{ - guid: id31, - index: 0, - }], - }, { - guid: id40, - index: 2, - children: [{ - guid: id41, - index: 0, - }, { - guid: id42, - index: 1, - }, { - guid: id20, - index: 2, - }] - }], - }, { - guid: PlacesUtils.bookmarks.mobileGuid, - index: 4, - }], "Moving 20 behind 42 in f40 -> update 50"); + apply(bookmark("20", "f40")); + check(["10", ["31"], ["41", "42", "20"]]); _("Moving 10 in front of 31 in f30 -> update 10, f30"); - apply(bookmark(id10, id30)); - f30.children = [id10, id31]; + apply(bookmark("10", "f30")); + f30.children = ["10", "31"]; apply(f30); - yield check([{ - guid: PlacesUtils.bookmarks.menuGuid, - index: 0, - }, { - guid: PlacesUtils.bookmarks.toolbarGuid, - index: 1, - }, { - guid: PlacesUtils.bookmarks.unfiledGuid, - index: 3, - children: [{ - guid: id30, - index: 0, - children: [{ - guid: id10, - index: 0, - }, { - guid: id31, - index: 1, - }], - }, { - guid: id40, - index: 1, - children: [{ - guid: id41, - index: 0, - }, { - guid: id42, - index: 1, - }, { - guid: id20, - index: 2, - }] - }], - }, { - guid: PlacesUtils.bookmarks.mobileGuid, - index: 4, - }], "Moving 10 in front of 31 in f30 -> update 10, f30"); + check([["10", "31"], ["41", "42", "20"]]); _("Moving 20 from f40 to f30 -> update 20, f30"); - apply(bookmark(id20, id30)); - f30.children = [id10, id20, id31]; + apply(bookmark("20", "f30")); + f30.children = ["10", "20", "31"]; apply(f30); - yield check([{ - guid: PlacesUtils.bookmarks.menuGuid, - index: 0, - }, { - guid: PlacesUtils.bookmarks.toolbarGuid, - index: 1, - }, { - guid: PlacesUtils.bookmarks.unfiledGuid, - index: 3, - children: [{ - guid: id30, - index: 0, - children: [{ - guid: id10, - index: 0, - }, { - guid: id20, - index: 1, - }, { - guid: id31, - index: 2, - }], - }, { - guid: id40, - index: 1, - children: [{ - guid: id41, - index: 0, - }, { - guid: id42, - index: 1, - }] - }], - }, { - guid: PlacesUtils.bookmarks.mobileGuid, - index: 4, - }], "Moving 20 from f40 to f30 -> update 20, f30"); + check([["10", "20", "31"], ["41", "42"]]); _("Move 20 back to front -> update 20, f30"); - apply(bookmark(id20, "")); - f30.children = [id10, id31]; + apply(bookmark("20", "")); + f30.children = ["10", "31"]; apply(f30); - yield check([{ - guid: PlacesUtils.bookmarks.menuGuid, - index: 0, - }, { - guid: PlacesUtils.bookmarks.toolbarGuid, - index: 1, - }, { - guid: PlacesUtils.bookmarks.unfiledGuid, - index: 3, - children: [{ - guid: id30, - index: 0, - children: [{ - guid: id10, - index: 0, - }, { - guid: id31, - index: 1, - }], - }, { - guid: id40, - index: 1, - children: [{ - guid: id41, - index: 0, - }, { - guid: id42, - index: 1, - }], - }, { - guid: id20, - index: 2, - }], - }, { - guid: PlacesUtils.bookmarks.mobileGuid, - index: 4, - }], "Move 20 back to front -> update 20, f30"); + check([["10", "31"], ["41", "42"], "20"]); -}); +} diff --git a/services/sync/tests/unit/test_bookmark_places_query_rewriting.js b/services/sync/tests/unit/test_bookmark_places_query_rewriting.js index 0ddf81583..8b764d675 100644 --- a/services/sync/tests/unit/test_bookmark_places_query_rewriting.js +++ b/services/sync/tests/unit/test_bookmark_places_query_rewriting.js @@ -7,54 +7,45 @@ Cu.import("resource://services-sync/engines/bookmarks.js"); Cu.import("resource://services-sync/service.js"); Cu.import("resource://services-sync/util.js"); -var engine = new BookmarksEngine(Service); -var store = engine._store; - -function makeTagRecord(id, uri) { - let tagRecord = new BookmarkQuery("bookmarks", id); - tagRecord.queryId = "MagicTags"; - tagRecord.parentName = "Bookmarks Toolbar"; - tagRecord.bmkUri = uri; - tagRecord.title = "tagtag"; - tagRecord.folderName = "bar"; - tagRecord.parentid = PlacesUtils.bookmarks.toolbarGuid; - return tagRecord; -} +let engine = new BookmarksEngine(Service); +let store = engine._store; function run_test() { initTestLogging("Trace"); Log.repository.getLogger("Sync.Engine.Bookmarks").level = Log.Level.Trace; Log.repository.getLogger("Sync.Store.Bookmarks").level = Log.Level.Trace; + let tagRecord = new BookmarkQuery("bookmarks", "abcdefabcdef"); let uri = "place:folder=499&type=7&queryType=1"; - let tagRecord = makeTagRecord("abcdefabcdef", uri); + tagRecord.queryId = "MagicTags"; + tagRecord.parentName = "Bookmarks Toolbar"; + tagRecord.bmkUri = uri; + tagRecord.title = "tagtag"; + tagRecord.folderName = "bar"; _("Type: " + tagRecord.type); _("Folder name: " + tagRecord.folderName); - store.applyIncoming(tagRecord); + store.preprocessTagQuery(tagRecord); + + _("Verify that the URI has been rewritten."); + do_check_neq(tagRecord.bmkUri, uri); - let tags = PlacesUtils.getFolderContents(PlacesUtils.tagsFolderId).root; + let tags = store._getNode(PlacesUtils.tagsFolderId); + tags.containerOpen = true; let tagID; - try { - for (let i = 0; i < tags.childCount; ++i) { - let child = tags.getChild(i); - if (child.title == "bar") { - tagID = child.itemId; - } - } - } finally { - tags.containerOpen = false; + for (let i = 0; i < tags.childCount; ++i) { + let child = tags.getChild(i); + if (child.title == "bar") + tagID = child.itemId; } + tags.containerOpen = false; _("Tag ID: " + tagID); - let insertedRecord = store.createRecord("abcdefabcdef", "bookmarks"); - do_check_eq(insertedRecord.bmkUri, uri.replace("499", tagID)); + do_check_eq(tagRecord.bmkUri, uri.replace("499", tagID)); _("... but not if the type is wrong."); let wrongTypeURI = "place:folder=499&type=2&queryType=1"; - let wrongTypeRecord = makeTagRecord("fedcbafedcba", wrongTypeURI); - store.applyIncoming(wrongTypeRecord); - - insertedRecord = store.createRecord("fedcbafedcba", "bookmarks"); - do_check_eq(insertedRecord.bmkUri, wrongTypeURI); + tagRecord.bmkUri = wrongTypeURI; + store.preprocessTagQuery(tagRecord); + do_check_eq(tagRecord.bmkUri, wrongTypeURI); } diff --git a/services/sync/tests/unit/test_bookmark_smart_bookmarks.js b/services/sync/tests/unit/test_bookmark_smart_bookmarks.js index 942cf2761..4e9b2834d 100644 --- a/services/sync/tests/unit/test_bookmark_smart_bookmarks.js +++ b/services/sync/tests/unit/test_bookmark_smart_bookmarks.js @@ -16,8 +16,8 @@ var IOService = Cc["@mozilla.org/network/io-service;1"] Service.engineManager.register(BookmarksEngine); -var engine = Service.engineManager.get("bookmarks"); -var store = engine._store; +let engine = Service.engineManager.get("bookmarks"); +let store = engine._store; // Clean up after other tests. Only necessary in XULRunner. store.wipe(); @@ -57,7 +57,7 @@ function serverForFoo(engine) { // Verify that Places smart bookmarks have their annotation uploaded and // handled locally. -add_task(function *test_annotation_uploaded() { +add_test(function test_annotation_uploaded() { let server = serverForFoo(engine); new SyncTestingInfrastructure(server.server); @@ -110,9 +110,9 @@ add_task(function *test_annotation_uploaded() { let collection = server.user("foo").collection("bookmarks"); try { - yield sync_engine_and_validate_telem(engine, false); + engine.sync(); let wbos = collection.keys(function (id) { - return ["menu", "toolbar", "mobile", "unfiled"].indexOf(id) == -1; + return ["menu", "toolbar", "mobile"].indexOf(id) == -1; }); do_check_eq(wbos.length, 1); @@ -141,7 +141,7 @@ add_task(function *test_annotation_uploaded() { do_check_eq(smartBookmarkCount(), startCount); _("Sync. Verify that the downloaded record carries the annotation."); - yield sync_engine_and_validate_telem(engine, false); + engine.sync(); _("Verify that the Places DB now has an annotated bookmark."); _("Our count has increased again."); diff --git a/services/sync/tests/unit/test_bookmark_store.js b/services/sync/tests/unit/test_bookmark_store.js index 902206ba6..53ea433e6 100644 --- a/services/sync/tests/unit/test_bookmark_store.js +++ b/services/sync/tests/unit/test_bookmark_store.js @@ -11,17 +11,17 @@ const PARENT_ANNO = "sync/parent"; Service.engineManager.register(BookmarksEngine); -var engine = Service.engineManager.get("bookmarks"); -var store = engine._store; -var tracker = engine._tracker; +let engine = Service.engineManager.get("bookmarks"); +let store = engine._store; +let tracker = engine._tracker; // Don't write some persistence files asynchronously. tracker.persistChangedIDs = false; -var fxuri = Utils.makeURI("http://getfirefox.com/"); -var tburi = Utils.makeURI("http://getthunderbird.com/"); +let fxuri = Utils.makeURI("http://getfirefox.com/"); +let tburi = Utils.makeURI("http://getthunderbird.com/"); -add_task(function* test_ignore_specials() { +add_test(function test_ignore_specials() { _("Ensure that we can't delete bookmark roots."); // Belt... @@ -30,7 +30,6 @@ add_task(function* test_ignore_specials() { do_check_neq(null, store.idForGUID("toolbar")); store.applyIncoming(record); - yield store.deletePending(); // Ensure that the toolbar exists. do_check_neq(null, store.idForGUID("toolbar")); @@ -40,11 +39,11 @@ add_task(function* test_ignore_specials() { // Braces... store.remove(record); - yield store.deletePending(); do_check_neq(null, store.idForGUID("toolbar")); engine._buildGUIDMap(); store.wipe(); + run_next_test(); }); add_test(function test_bookmark_create() { @@ -81,8 +80,8 @@ add_test(function test_bookmark_create() { _("Have the store create a new record object. Verify that it has the same data."); let newrecord = store.createRecord(fxrecord.id); do_check_true(newrecord instanceof Bookmark); - for (let property of ["type", "bmkUri", "description", "title", - "keyword", "parentName", "parentid"]) { + for each (let property in ["type", "bmkUri", "description", "title", + "keyword", "parentName", "parentid"]) { do_check_eq(newrecord[property], fxrecord[property]); } do_check_true(Utils.deepEquals(newrecord.tags.sort(), @@ -167,7 +166,7 @@ add_test(function test_bookmark_createRecord() { _("Verify that the record is created accordingly."); let record = store.createRecord(bmk1_guid); - do_check_eq(record.title, ""); + do_check_eq(record.title, null); do_check_eq(record.description, null); do_check_eq(record.keyword, null); @@ -198,7 +197,7 @@ add_test(function test_folder_create() { _("Have the store create a new record object. Verify that it has the same data."); let newrecord = store.createRecord(folder.id); do_check_true(newrecord instanceof BookmarkFolder); - for (let property of ["title", "parentName", "parentid"]) + for each (let property in ["title", "parentName", "parentid"]) do_check_eq(newrecord[property], folder[property]); _("Folders have high sort index to ensure they're synced first."); @@ -244,7 +243,7 @@ add_test(function test_folder_createRecord() { } }); -add_task(function* test_deleted() { +add_test(function test_deleted() { try { _("Create a bookmark that will be deleted."); let bmk1_id = PlacesUtils.bookmarks.insertBookmark( @@ -256,7 +255,7 @@ add_task(function* test_deleted() { let record = new PlacesItem("bookmarks", bmk1_guid); record.deleted = true; store.applyIncoming(record); - yield store.deletePending(); + _("Ensure it has been deleted."); let error; try { @@ -272,6 +271,7 @@ add_task(function* test_deleted() { } finally { _("Clean up."); store.wipe(); + run_next_test(); } }); @@ -428,106 +428,6 @@ add_test(function test_empty_query_doesnt_die() { run_next_test(); }); -function assertDeleted(id) { - let error; - try { - PlacesUtils.bookmarks.getItemType(id); - } catch (e) { - error = e; - } - equal(error.result, Cr.NS_ERROR_ILLEGAL_VALUE) -} - -add_task(function* test_delete_buffering() { - store.wipe(); - try { - _("Create a folder with two bookmarks."); - let folder = new BookmarkFolder("bookmarks", "testfolder-1"); - folder.parentName = "Bookmarks Toolbar"; - folder.parentid = "toolbar"; - folder.title = "Test Folder"; - store.applyIncoming(folder); - - - let fxRecord = new Bookmark("bookmarks", "get-firefox1"); - fxRecord.bmkUri = fxuri.spec; - fxRecord.title = "Get Firefox!"; - fxRecord.parentName = "Test Folder"; - fxRecord.parentid = "testfolder-1"; - - let tbRecord = new Bookmark("bookmarks", "get-tndrbrd1"); - tbRecord.bmkUri = tburi.spec; - tbRecord.title = "Get Thunderbird!"; - tbRecord.parentName = "Test Folder"; - tbRecord.parentid = "testfolder-1"; - - store.applyIncoming(fxRecord); - store.applyIncoming(tbRecord); - - let folderId = store.idForGUID(folder.id); - let fxRecordId = store.idForGUID(fxRecord.id); - let tbRecordId = store.idForGUID(tbRecord.id); - - _("Check everything was created correctly."); - - equal(PlacesUtils.bookmarks.getItemType(fxRecordId), - PlacesUtils.bookmarks.TYPE_BOOKMARK); - equal(PlacesUtils.bookmarks.getItemType(tbRecordId), - PlacesUtils.bookmarks.TYPE_BOOKMARK); - equal(PlacesUtils.bookmarks.getItemType(folderId), - PlacesUtils.bookmarks.TYPE_FOLDER); - - equal(PlacesUtils.bookmarks.getFolderIdForItem(fxRecordId), folderId); - equal(PlacesUtils.bookmarks.getFolderIdForItem(tbRecordId), folderId); - equal(PlacesUtils.bookmarks.getFolderIdForItem(folderId), - PlacesUtils.bookmarks.toolbarFolder); - - _("Delete the folder and one bookmark."); - - let deleteFolder = new PlacesItem("bookmarks", "testfolder-1"); - deleteFolder.deleted = true; - - let deleteFxRecord = new PlacesItem("bookmarks", "get-firefox1"); - deleteFxRecord.deleted = true; - - store.applyIncoming(deleteFolder); - store.applyIncoming(deleteFxRecord); - - _("Check that we haven't deleted them yet, but that the deletions are queued"); - // these will throw if we've deleted them - equal(PlacesUtils.bookmarks.getItemType(fxRecordId), - PlacesUtils.bookmarks.TYPE_BOOKMARK); - - equal(PlacesUtils.bookmarks.getItemType(folderId), - PlacesUtils.bookmarks.TYPE_FOLDER); - - equal(PlacesUtils.bookmarks.getFolderIdForItem(fxRecordId), folderId); - - ok(store._foldersToDelete.has(folder.id)); - ok(store._atomsToDelete.has(fxRecord.id)); - ok(!store._atomsToDelete.has(tbRecord.id)); - - _("Process pending deletions and ensure that the right things are deleted."); - let updatedGuids = yield store.deletePending(); - - deepEqual(updatedGuids.sort(), ["get-tndrbrd1", "toolbar"]); - - assertDeleted(fxRecordId); - assertDeleted(folderId); - - ok(!store._foldersToDelete.has(folder.id)); - ok(!store._atomsToDelete.has(fxRecord.id)); - - equal(PlacesUtils.bookmarks.getFolderIdForItem(tbRecordId), - PlacesUtils.bookmarks.toolbarFolder); - - } finally { - _("Clean up."); - store.wipe(); - } -}); - - function run_test() { initTestLogging('Trace'); run_next_test(); diff --git a/services/sync/tests/unit/test_bookmark_tracker.js b/services/sync/tests/unit/test_bookmark_tracker.js index 9b9242579..6060fbae4 100644 --- a/services/sync/tests/unit/test_bookmark_tracker.js +++ b/services/sync/tests/unit/test_bookmark_tracker.js @@ -2,80 +2,24 @@ http://creativecommons.org/publicdomain/zero/1.0/ */ Cu.import("resource://gre/modules/PlacesUtils.jsm"); -Cu.import("resource://gre/modules/PlacesSyncUtils.jsm"); -Cu.import("resource://gre/modules/Task.jsm"); Cu.import("resource://services-sync/constants.js"); Cu.import("resource://services-sync/engines/bookmarks.js"); Cu.import("resource://services-sync/engines.js"); Cu.import("resource://services-sync/service.js"); Cu.import("resource://services-sync/util.js"); -Cu.import("resource:///modules/PlacesUIUtils.jsm"); Service.engineManager.register(BookmarksEngine); -var engine = Service.engineManager.get("bookmarks"); -var store = engine._store; -var tracker = engine._tracker; +let engine = Service.engineManager.get("bookmarks"); +let store = engine._store; +let tracker = engine._tracker; store.wipe(); tracker.persistChangedIDs = false; -const DAY_IN_MS = 24 * 60 * 60 * 1000; - -// Test helpers. -function* verifyTrackerEmpty() { - let changes = engine.pullNewChanges(); - equal(changes.count(), 0); - equal(tracker.score, 0); -} - -function* resetTracker() { - tracker.clearChangedIDs(); - tracker.resetScore(); -} - -function* cleanup() { - store.wipe(); - yield resetTracker(); - yield stopTracking(); -} - -// startTracking is a signal that the test wants to notice things that happen -// after this is called (ie, things already tracked should be discarded.) -function* startTracking() { - Svc.Obs.notify("weave:engine:start-tracking"); -} - -function* stopTracking() { - Svc.Obs.notify("weave:engine:stop-tracking"); -} - -function* verifyTrackedItems(tracked) { - let changes = engine.pullNewChanges(); - let trackedIDs = new Set(changes.ids()); - for (let guid of tracked) { - ok(changes.has(guid), `${guid} should be tracked`); - ok(changes.getModifiedTimestamp(guid) > 0, - `${guid} should have a modified time`); - trackedIDs.delete(guid); - } - equal(trackedIDs.size, 0, `Unhandled tracked IDs: ${ - JSON.stringify(Array.from(trackedIDs))}`); -} - -function* verifyTrackedCount(expected) { - let changes = engine.pullNewChanges(); - equal(changes.count(), expected); -} - -// Copied from PlacesSyncUtils.jsm. -function findAnnoItems(anno, val) { - let annos = PlacesUtils.annotations; - return annos.getItemsWithAnnotation(anno, {}).filter(id => - annos.getItemAnnotation(id, anno) == val); -} - -add_task(function* test_tracking() { - _("Test starting and stopping the tracker"); +function test_tracking() { + _("Verify we've got an empty tracker to work with."); + let tracker = engine._tracker; + do_check_empty(tracker.changedIDs); let folder = PlacesUtils.bookmarks.createFolder( PlacesUtils.bookmarks.bookmarksMenuFolder, @@ -89,630 +33,60 @@ add_task(function* test_tracking() { try { _("Create bookmark. Won't show because we haven't started tracking yet"); createBmk(); - yield verifyTrackedCount(0); + do_check_empty(tracker.changedIDs); do_check_eq(tracker.score, 0); _("Tell the tracker to start tracking changes."); - yield startTracking(); + Svc.Obs.notify("weave:engine:start-tracking"); createBmk(); // We expect two changed items because the containing folder // changed as well (new child). - yield verifyTrackedCount(2); + do_check_attribute_count(tracker.changedIDs, 2); do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2); _("Notifying twice won't do any harm."); - yield startTracking(); + Svc.Obs.notify("weave:engine:start-tracking"); createBmk(); - yield verifyTrackedCount(3); + do_check_attribute_count(tracker.changedIDs, 3); do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 4); _("Let's stop tracking again."); - yield resetTracker(); - yield stopTracking(); + tracker.clearChangedIDs(); + tracker.resetScore(); + Svc.Obs.notify("weave:engine:stop-tracking"); createBmk(); - yield verifyTrackedCount(0); + do_check_empty(tracker.changedIDs); do_check_eq(tracker.score, 0); _("Notifying twice won't do any harm."); - yield stopTracking(); + Svc.Obs.notify("weave:engine:stop-tracking"); createBmk(); - yield verifyTrackedCount(0); + do_check_empty(tracker.changedIDs); do_check_eq(tracker.score, 0); } finally { _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_batch_tracking() { - _("Test tracker does the correct thing during and after a places 'batch'"); - - yield startTracking(); - - PlacesUtils.bookmarks.runInBatchMode({ - runBatched: function() { - let folder = PlacesUtils.bookmarks.createFolder( - PlacesUtils.bookmarks.bookmarksMenuFolder, - "Test Folder", PlacesUtils.bookmarks.DEFAULT_INDEX); - // We should be tracking the new folder and its parent (and need to jump - // through blocking hoops...) - Async.promiseSpinningly(Task.spawn(verifyTrackedCount(2))); - // But not have bumped the score. - do_check_eq(tracker.score, 0); - } - }, null); - - // Out of batch mode - tracker should be the same, but score should be up. - yield verifyTrackedCount(2); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE); - yield cleanup(); -}); - -add_task(function* test_nested_batch_tracking() { - _("Test tracker does the correct thing if a places 'batch' is nested"); - - yield startTracking(); - - PlacesUtils.bookmarks.runInBatchMode({ - runBatched: function() { - - PlacesUtils.bookmarks.runInBatchMode({ - runBatched: function() { - let folder = PlacesUtils.bookmarks.createFolder( - PlacesUtils.bookmarks.bookmarksMenuFolder, - "Test Folder", PlacesUtils.bookmarks.DEFAULT_INDEX); - // We should be tracking the new folder and its parent (and need to jump - // through blocking hoops...) - Async.promiseSpinningly(Task.spawn(verifyTrackedCount(2))); - // But not have bumped the score. - do_check_eq(tracker.score, 0); - } - }, null); - _("inner batch complete."); - // should still not have a score as the outer batch is pending. - do_check_eq(tracker.score, 0); - } - }, null); - - // Out of both batches - tracker should be the same, but score should be up. - yield verifyTrackedCount(2); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE); - yield cleanup(); -}); - -add_task(function* test_tracker_sql_batching() { - _("Test tracker does the correct thing when it is forced to batch SQL queries"); - - const SQLITE_MAX_VARIABLE_NUMBER = 999; - let numItems = SQLITE_MAX_VARIABLE_NUMBER * 2 + 10; - let createdIDs = []; - - yield startTracking(); - - PlacesUtils.bookmarks.runInBatchMode({ - runBatched: function() { - for (let i = 0; i < numItems; i++) { - let syncBmkID = PlacesUtils.bookmarks.insertBookmark( - PlacesUtils.bookmarks.unfiledBookmarksFolder, - Utils.makeURI("https://example.org/" + i), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Sync Bookmark " + i); - createdIDs.push(syncBmkID); - } - } - }, null); - - do_check_eq(createdIDs.length, numItems); - yield verifyTrackedCount(numItems + 1); // the folder is also tracked. - yield cleanup(); -}); - -add_task(function* test_onItemAdded() { - _("Items inserted via the synchronous bookmarks API should be tracked"); - - try { - yield startTracking(); - - _("Insert a folder using the sync API"); - let syncFolderID = PlacesUtils.bookmarks.createFolder( - PlacesUtils.bookmarks.bookmarksMenuFolder, "Sync Folder", - PlacesUtils.bookmarks.DEFAULT_INDEX); - let syncFolderGUID = engine._store.GUIDForId(syncFolderID); - yield verifyTrackedItems(["menu", syncFolderGUID]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2); - - yield resetTracker(); - yield startTracking(); - - _("Insert a bookmark using the sync API"); - let syncBmkID = PlacesUtils.bookmarks.insertBookmark(syncFolderID, - Utils.makeURI("https://example.org/sync"), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Sync Bookmark"); - let syncBmkGUID = engine._store.GUIDForId(syncBmkID); - yield verifyTrackedItems([syncFolderGUID, syncBmkGUID]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2); - - yield resetTracker(); - yield startTracking(); - - _("Insert a separator using the sync API"); - let syncSepID = PlacesUtils.bookmarks.insertSeparator( - PlacesUtils.bookmarks.bookmarksMenuFolder, - PlacesUtils.bookmarks.getItemIndex(syncFolderID)); - let syncSepGUID = engine._store.GUIDForId(syncSepID); - yield verifyTrackedItems(["menu", syncSepGUID]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_async_onItemAdded() { - _("Items inserted via the asynchronous bookmarks API should be tracked"); - - try { - yield startTracking(); - - _("Insert a folder using the async API"); - let asyncFolder = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_FOLDER, - parentGuid: PlacesUtils.bookmarks.menuGuid, - title: "Async Folder", - }); - yield verifyTrackedItems(["menu", asyncFolder.guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2); - - yield resetTracker(); - yield startTracking(); - - _("Insert a bookmark using the async API"); - let asyncBmk = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: asyncFolder.guid, - url: "https://example.org/async", - title: "Async Bookmark", - }); - yield verifyTrackedItems([asyncFolder.guid, asyncBmk.guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2); - - yield resetTracker(); - yield startTracking(); - - _("Insert a separator using the async API"); - let asyncSep = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_SEPARATOR, - parentGuid: PlacesUtils.bookmarks.menuGuid, - index: asyncFolder.index, - }); - yield verifyTrackedItems(["menu", asyncSep.guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_async_onItemChanged() { - _("Items updated using the asynchronous bookmarks API should be tracked"); - - try { - yield stopTracking(); - - _("Insert a bookmark"); - let fxBmk = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: PlacesUtils.bookmarks.menuGuid, - url: "http://getfirefox.com", - title: "Get Firefox!", - }); - _(`Firefox GUID: ${fxBmk.guid}`); - - yield startTracking(); - - _("Update the bookmark using the async API"); - yield PlacesUtils.bookmarks.update({ - guid: fxBmk.guid, - title: "Download Firefox", - url: "https://www.mozilla.org/firefox", - // PlacesUtils.bookmarks.update rejects last modified dates older than - // the added date. - lastModified: new Date(Date.now() + DAY_IN_MS), - }); - - yield verifyTrackedItems([fxBmk.guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_onItemChanged_itemDates() { - _("Changes to item dates should be tracked"); - - try { - yield stopTracking(); - - _("Insert a bookmark"); - let fx_id = PlacesUtils.bookmarks.insertBookmark( - PlacesUtils.bookmarks.bookmarksMenuFolder, - Utils.makeURI("http://getfirefox.com"), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Get Firefox!"); - let fx_guid = engine._store.GUIDForId(fx_id); - _(`Firefox GUID: ${fx_guid}`); - - yield startTracking(); - - _("Reset the bookmark's added date"); - // Convert to microseconds for PRTime. - let dateAdded = (Date.now() - DAY_IN_MS) * 1000; - PlacesUtils.bookmarks.setItemDateAdded(fx_id, dateAdded); - yield verifyTrackedItems([fx_guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE); - yield resetTracker(); - - _("Set the bookmark's last modified date"); - let dateModified = Date.now() * 1000; - PlacesUtils.bookmarks.setItemLastModified(fx_id, dateModified); - yield verifyTrackedItems([fx_guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_onItemChanged_changeBookmarkURI() { - _("Changes to bookmark URIs should be tracked"); - - try { - yield stopTracking(); - - _("Insert a bookmark"); - let fx_id = PlacesUtils.bookmarks.insertBookmark( - PlacesUtils.bookmarks.bookmarksMenuFolder, - Utils.makeURI("http://getfirefox.com"), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Get Firefox!"); - let fx_guid = engine._store.GUIDForId(fx_id); - _(`Firefox GUID: ${fx_guid}`); - - _("Set a tracked annotation to make sure we only notify once"); - PlacesUtils.annotations.setItemAnnotation( - fx_id, PlacesSyncUtils.bookmarks.DESCRIPTION_ANNO, "A test description", 0, - PlacesUtils.annotations.EXPIRE_NEVER); - - yield startTracking(); - - _("Change the bookmark's URI"); - PlacesUtils.bookmarks.changeBookmarkURI(fx_id, - Utils.makeURI("https://www.mozilla.org/firefox")); - yield verifyTrackedItems([fx_guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_onItemTagged() { - _("Items tagged using the synchronous API should be tracked"); - - try { - yield stopTracking(); - - _("Create a folder"); - let folder = PlacesUtils.bookmarks.createFolder( - PlacesUtils.bookmarks.bookmarksMenuFolder, "Parent", - PlacesUtils.bookmarks.DEFAULT_INDEX); - let folderGUID = engine._store.GUIDForId(folder); - _("Folder ID: " + folder); - _("Folder GUID: " + folderGUID); - - _("Track changes to tags"); - let uri = Utils.makeURI("http://getfirefox.com"); - let b = PlacesUtils.bookmarks.insertBookmark( - folder, uri, - PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!"); - let bGUID = engine._store.GUIDForId(b); - _("New item is " + b); - _("GUID: " + bGUID); - - yield startTracking(); - - _("Tag the item"); - PlacesUtils.tagging.tagURI(uri, ["foo"]); - - // bookmark should be tracked, folder should not be. - yield verifyTrackedItems([bGUID]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 5); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_onItemUntagged() { - _("Items untagged using the synchronous API should be tracked"); - - try { - yield stopTracking(); - - _("Insert tagged bookmarks"); - let uri = Utils.makeURI("http://getfirefox.com"); - let fx1ID = PlacesUtils.bookmarks.insertBookmark( - PlacesUtils.bookmarks.bookmarksMenuFolder, uri, - PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!"); - let fx1GUID = engine._store.GUIDForId(fx1ID); - // Different parent and title; same URL. - let fx2ID = PlacesUtils.bookmarks.insertBookmark( - PlacesUtils.bookmarks.toolbarFolder, uri, - PlacesUtils.bookmarks.DEFAULT_INDEX, "Download Firefox"); - let fx2GUID = engine._store.GUIDForId(fx2ID); - PlacesUtils.tagging.tagURI(uri, ["foo"]); - - yield startTracking(); - - _("Remove the tag"); - PlacesUtils.tagging.untagURI(uri, ["foo"]); - - yield verifyTrackedItems([fx1GUID, fx2GUID]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_async_onItemUntagged() { - _("Items untagged using the asynchronous API should be tracked"); - - try { - yield stopTracking(); - - _("Insert tagged bookmarks"); - let fxBmk1 = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: PlacesUtils.bookmarks.menuGuid, - url: "http://getfirefox.com", - title: "Get Firefox!", - }); - let fxBmk2 = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: PlacesUtils.bookmarks.toolbarGuid, - url: "http://getfirefox.com", - title: "Download Firefox", - }); - let tag = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_FOLDER, - parentGuid: PlacesUtils.bookmarks.tagsGuid, - title: "some tag", - }); - let fxTag = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: tag.guid, - url: "http://getfirefox.com", - }); - - yield startTracking(); - - _("Remove the tag using the async bookmarks API"); - yield PlacesUtils.bookmarks.remove(fxTag.guid); - - yield verifyTrackedItems([fxBmk1.guid, fxBmk2.guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_async_onItemTagged() { - _("Items tagged using the asynchronous API should be tracked"); - - try { - yield stopTracking(); - - _("Insert untagged bookmarks"); - let folder1 = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_FOLDER, - parentGuid: PlacesUtils.bookmarks.menuGuid, - title: "Folder 1", - }); - let fxBmk1 = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: folder1.guid, - url: "http://getfirefox.com", - title: "Get Firefox!", - }); - let folder2 = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_FOLDER, - parentGuid: PlacesUtils.bookmarks.menuGuid, - title: "Folder 2", - }); - // Different parent and title; same URL. - let fxBmk2 = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: folder2.guid, - url: "http://getfirefox.com", - title: "Download Firefox", - }); - - yield startTracking(); - - // This will change once tags are moved into a separate table (bug 424160). - // We specifically test this case because Bookmarks.jsm updates tagged - // bookmarks and notifies observers. - _("Insert a tag using the async bookmarks API"); - let tag = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_FOLDER, - parentGuid: PlacesUtils.bookmarks.tagsGuid, - title: "some tag", - }); - - _("Tag an item using the async bookmarks API"); - yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: tag.guid, - url: "http://getfirefox.com", - }); - - yield verifyTrackedItems([fxBmk1.guid, fxBmk2.guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 6); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_onItemKeywordChanged() { - _("Keyword changes via the synchronous API should be tracked"); - - try { - yield stopTracking(); - let folder = PlacesUtils.bookmarks.createFolder( - PlacesUtils.bookmarks.bookmarksMenuFolder, "Parent", - PlacesUtils.bookmarks.DEFAULT_INDEX); - let folderGUID = engine._store.GUIDForId(folder); - _("Track changes to keywords"); - let uri = Utils.makeURI("http://getfirefox.com"); - let b = PlacesUtils.bookmarks.insertBookmark( - folder, uri, - PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!"); - let bGUID = engine._store.GUIDForId(b); - _("New item is " + b); - _("GUID: " + bGUID); - - yield startTracking(); - - _("Give the item a keyword"); - PlacesUtils.bookmarks.setKeywordForBookmark(b, "the_keyword"); - - // bookmark should be tracked, folder should not be. - yield verifyTrackedItems([bGUID]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE); - - } finally { - _("Clean up."); - yield cleanup(); + store.wipe(); + tracker.clearChangedIDs(); + tracker.resetScore(); + Svc.Obs.notify("weave:engine:stop-tracking"); } -}); - -add_task(function* test_async_onItemKeywordChanged() { - _("Keyword changes via the asynchronous API should be tracked"); - - try { - yield stopTracking(); - - _("Insert two bookmarks with the same URL"); - let fxBmk1 = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: PlacesUtils.bookmarks.menuGuid, - url: "http://getfirefox.com", - title: "Get Firefox!", - }); - let fxBmk2 = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: PlacesUtils.bookmarks.toolbarGuid, - url: "http://getfirefox.com", - title: "Download Firefox", - }); - - yield startTracking(); - - _("Add a keyword for both items"); - yield PlacesUtils.keywords.insert({ - keyword: "the_keyword", - url: "http://getfirefox.com", - postData: "postData", - }); - - yield verifyTrackedItems([fxBmk1.guid, fxBmk2.guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_async_onItemKeywordDeleted() { - _("Keyword deletions via the asynchronous API should be tracked"); - - try { - yield stopTracking(); - - _("Insert two bookmarks with the same URL and keywords"); - let fxBmk1 = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: PlacesUtils.bookmarks.menuGuid, - url: "http://getfirefox.com", - title: "Get Firefox!", - }); - let fxBmk2 = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: PlacesUtils.bookmarks.toolbarGuid, - url: "http://getfirefox.com", - title: "Download Firefox", - }); - yield PlacesUtils.keywords.insert({ - keyword: "the_keyword", - url: "http://getfirefox.com", - }); - - yield startTracking(); - - _("Remove the keyword"); - yield PlacesUtils.keywords.remove("the_keyword"); - - yield verifyTrackedItems([fxBmk1.guid, fxBmk2.guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_onItemPostDataChanged() { - _("Post data changes should be tracked"); - - try { - yield stopTracking(); - - _("Insert a bookmark"); - let fx_id = PlacesUtils.bookmarks.insertBookmark( - PlacesUtils.bookmarks.bookmarksMenuFolder, - Utils.makeURI("http://getfirefox.com"), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Get Firefox!"); - let fx_guid = engine._store.GUIDForId(fx_id); - _(`Firefox GUID: ${fx_guid}`); - - yield startTracking(); +} - // PlacesUtils.setPostDataForBookmark is deprecated, but still used by - // PlacesTransactions.NewBookmark. - _("Post data for the bookmark should be ignored"); - yield PlacesUtils.setPostDataForBookmark(fx_id, "postData"); - yield verifyTrackerEmpty(); - } finally { - _("Clean up."); - yield cleanup(); - } -}); +function test_onItemChanged() { + // Anno that's in ANNOS_TO_TRACK. + const DESCRIPTION_ANNO = "bookmarkProperties/description"; -add_task(function* test_onItemAnnoChanged() { - _("Item annotations should be tracked"); + _("Verify we've got an empty tracker to work with."); + let tracker = engine._tracker; + do_check_empty(tracker.changedIDs); + do_check_eq(tracker.score, 0); try { - yield stopTracking(); + Svc.Obs.notify("weave:engine:stop-tracking"); let folder = PlacesUtils.bookmarks.createFolder( PlacesUtils.bookmarks.bookmarksMenuFolder, "Parent", PlacesUtils.bookmarks.DEFAULT_INDEX); - let folderGUID = engine._store.GUIDForId(folder); _("Track changes to annos."); let b = PlacesUtils.bookmarks.insertBookmark( folder, Utils.makeURI("http://getfirefox.com"), @@ -721,225 +95,27 @@ add_task(function* test_onItemAnnoChanged() { _("New item is " + b); _("GUID: " + bGUID); - yield startTracking(); + Svc.Obs.notify("weave:engine:start-tracking"); PlacesUtils.annotations.setItemAnnotation( - b, PlacesSyncUtils.bookmarks.DESCRIPTION_ANNO, "A test description", 0, + b, DESCRIPTION_ANNO, "A test description", 0, PlacesUtils.annotations.EXPIRE_NEVER); - // bookmark should be tracked, folder should not. - yield verifyTrackedItems([bGUID]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE); - yield resetTracker(); - - PlacesUtils.annotations.removeItemAnnotation(b, - PlacesSyncUtils.bookmarks.DESCRIPTION_ANNO); - yield verifyTrackedItems([bGUID]); + do_check_true(tracker.changedIDs[bGUID] > 0); do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_onItemAdded_filtered_root() { - _("Items outside the change roots should not be tracked"); - - try { - yield startTracking(); - - _("Create a new root"); - let rootID = PlacesUtils.bookmarks.createFolder( - PlacesUtils.bookmarks.placesRoot, - "New root", - PlacesUtils.bookmarks.DEFAULT_INDEX); - let rootGUID = engine._store.GUIDForId(rootID); - _(`New root GUID: ${rootGUID}`); - - _("Insert a bookmark underneath the new root"); - let untrackedBmkID = PlacesUtils.bookmarks.insertBookmark( - rootID, - Utils.makeURI("http://getthunderbird.com"), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Get Thunderbird!"); - let untrackedBmkGUID = engine._store.GUIDForId(untrackedBmkID); - _(`New untracked bookmark GUID: ${untrackedBmkGUID}`); - - _("Insert a bookmark underneath the Places root"); - let rootBmkID = PlacesUtils.bookmarks.insertBookmark( - PlacesUtils.bookmarks.placesRoot, - Utils.makeURI("http://getfirefox.com"), - PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!"); - let rootBmkGUID = engine._store.GUIDForId(rootBmkID); - _(`New Places root bookmark GUID: ${rootBmkGUID}`); - - _("New root and bookmark should be ignored"); - yield verifyTrackedItems([]); - // ...But we'll still increment the score and filter out the changes at - // sync time. - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 6); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_onItemDeleted_filtered_root() { - _("Deleted items outside the change roots should be tracked"); - - try { - yield stopTracking(); - - _("Insert a bookmark underneath the Places root"); - let rootBmkID = PlacesUtils.bookmarks.insertBookmark( - PlacesUtils.bookmarks.placesRoot, - Utils.makeURI("http://getfirefox.com"), - PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!"); - let rootBmkGUID = engine._store.GUIDForId(rootBmkID); - _(`New Places root bookmark GUID: ${rootBmkGUID}`); - - yield startTracking(); - - PlacesUtils.bookmarks.removeItem(rootBmkID); - - // We shouldn't upload tombstones for items in filtered roots, but the - // `onItemRemoved` observer doesn't have enough context to determine - // the root, so we'll end up uploading it. - yield verifyTrackedItems([rootBmkGUID]); - // We'll increment the counter twice (once for the removed item, and once - // for the Places root), then filter out the root. - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2); - } finally { - _("Clean up."); - yield cleanup(); - } -}); -add_task(function* test_onPageAnnoChanged() { - _("Page annotations should not be tracked"); - - try { - yield stopTracking(); - - _("Insert a bookmark without an annotation"); - let pageURI = Utils.makeURI("http://getfirefox.com"); - PlacesUtils.bookmarks.insertBookmark( - PlacesUtils.bookmarks.bookmarksMenuFolder, - pageURI, - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Get Firefox!"); - - yield startTracking(); - - _("Add a page annotation"); - PlacesUtils.annotations.setPageAnnotation(pageURI, "URIProperties/characterSet", - "UTF-8", 0, PlacesUtils.annotations.EXPIRE_NEVER); - yield verifyTrackerEmpty(); - yield resetTracker(); - - _("Remove the page annotation"); - PlacesUtils.annotations.removePageAnnotation(pageURI, - "URIProperties/characterSet"); - yield verifyTrackerEmpty(); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_onFaviconChanged() { - _("Favicon changes should not be tracked"); - - try { - yield stopTracking(); - - let pageURI = Utils.makeURI("http://getfirefox.com"); - let iconURI = Utils.makeURI("http://getfirefox.com/icon"); - PlacesUtils.bookmarks.insertBookmark( - PlacesUtils.bookmarks.bookmarksMenuFolder, - pageURI, - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Get Firefox!"); - - yield PlacesTestUtils.addVisits(pageURI); - - yield startTracking(); - - _("Favicon annotations should be ignored"); - let iconURL = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAA" + - "AAAA6fptVAAAACklEQVQI12NgAAAAAgAB4iG8MwAAAABJRU5ErkJggg=="; - - PlacesUtils.favicons.replaceFaviconDataFromDataURL(iconURI, iconURL, 0, - Services.scriptSecurityManager.getSystemPrincipal()); - - yield new Promise(resolve => { - PlacesUtils.favicons.setAndFetchFaviconForPage(pageURI, iconURI, true, - PlacesUtils.favicons.FAVICON_LOAD_NON_PRIVATE, (iconURI, dataLen, data, mimeType) => { - resolve(); - }, - Services.scriptSecurityManager.getSystemPrincipal()); - }); - yield verifyTrackerEmpty(); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_onLivemarkAdded() { - _("New livemarks should be tracked"); - - try { - yield startTracking(); - - _("Insert a livemark"); - let livemark = yield PlacesUtils.livemarks.addLivemark({ - parentGuid: PlacesUtils.bookmarks.menuGuid, - // Use a local address just in case, to avoid potential aborts for - // non-local connections. - feedURI: Utils.makeURI("http://localhost:0"), - }); - // Prevent the livemark refresh timer from requesting the URI. - livemark.terminate(); - - yield verifyTrackedItems(["menu", livemark.guid]); - // Three changes: one for the parent, one for creating the livemark - // folder, and one for setting the "livemark/feedURI" anno on the folder. - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_onLivemarkDeleted() { - _("Deleted livemarks should be tracked"); - - try { - yield stopTracking(); - - _("Insert a livemark"); - let livemark = yield PlacesUtils.livemarks.addLivemark({ - parentGuid: PlacesUtils.bookmarks.menuGuid, - feedURI: Utils.makeURI("http://localhost:0"), - }); - livemark.terminate(); - - yield startTracking(); - - _("Remove a livemark"); - yield PlacesUtils.livemarks.removeLivemark({ - guid: livemark.guid, - }); - - yield verifyTrackedItems(["menu", livemark.guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2); } finally { _("Clean up."); - yield cleanup(); + store.wipe(); + tracker.clearChangedIDs(); + tracker.resetScore(); + Svc.Obs.notify("weave:engine:stop-tracking"); } -}); +} -add_task(function* test_onItemMoved() { - _("Items moved via the synchronous API should be tracked"); +function test_onItemMoved() { + _("Verify we've got an empty tracker to work with."); + let tracker = engine._tracker; + do_check_empty(tracker.changedIDs); + do_check_eq(tracker.score, 0); try { let fx_id = PlacesUtils.bookmarks.insertBookmark( @@ -948,590 +124,55 @@ add_task(function* test_onItemMoved() { PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!"); let fx_guid = engine._store.GUIDForId(fx_id); - _("Firefox GUID: " + fx_guid); let tb_id = PlacesUtils.bookmarks.insertBookmark( PlacesUtils.bookmarks.bookmarksMenuFolder, Utils.makeURI("http://getthunderbird.com"), PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Thunderbird!"); let tb_guid = engine._store.GUIDForId(tb_id); - _("Thunderbird GUID: " + tb_guid); - yield startTracking(); + Svc.Obs.notify("weave:engine:start-tracking"); // Moving within the folder will just track the folder. PlacesUtils.bookmarks.moveItem( tb_id, PlacesUtils.bookmarks.bookmarksMenuFolder, 0); - yield verifyTrackedItems(['menu']); + do_check_true(tracker.changedIDs['menu'] > 0); + do_check_eq(tracker.changedIDs['toolbar'], undefined); + do_check_eq(tracker.changedIDs[fx_guid], undefined); + do_check_eq(tracker.changedIDs[tb_guid], undefined); do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE); - yield resetTracker(); + tracker.clearChangedIDs(); + tracker.resetScore(); // Moving a bookmark to a different folder will track the old // folder, the new folder and the bookmark. - PlacesUtils.bookmarks.moveItem(fx_id, PlacesUtils.bookmarks.toolbarFolder, + PlacesUtils.bookmarks.moveItem(tb_id, PlacesUtils.bookmarks.toolbarFolder, PlacesUtils.bookmarks.DEFAULT_INDEX); - yield verifyTrackedItems(['menu', 'toolbar', fx_guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3); - - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_async_onItemMoved_update() { - _("Items moved via the asynchronous API should be tracked"); - - try { - yield stopTracking(); - - let fxBmk = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: PlacesUtils.bookmarks.menuGuid, - url: "http://getfirefox.com", - title: "Get Firefox!", - }); - let tbBmk = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: PlacesUtils.bookmarks.menuGuid, - url: "http://getthunderbird.com", - title: "Get Thunderbird!", - }); - - yield startTracking(); - - _("Repositioning a bookmark should track the folder"); - yield PlacesUtils.bookmarks.update({ - guid: tbBmk.guid, - parentGuid: PlacesUtils.bookmarks.menuGuid, - index: 0, - }); - yield verifyTrackedItems(['menu']); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE); - yield resetTracker(); - - _("Reparenting a bookmark should track both folders and the bookmark"); - yield PlacesUtils.bookmarks.update({ - guid: tbBmk.guid, - parentGuid: PlacesUtils.bookmarks.toolbarGuid, - index: PlacesUtils.bookmarks.DEFAULT_INDEX, - }); - yield verifyTrackedItems(['menu', 'toolbar', tbBmk.guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_async_onItemMoved_reorder() { - _("Items reordered via the asynchronous API should be tracked"); - - try { - yield stopTracking(); - - _("Insert out-of-order bookmarks"); - let fxBmk = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: PlacesUtils.bookmarks.menuGuid, - url: "http://getfirefox.com", - title: "Get Firefox!", - }); - _(`Firefox GUID: ${fxBmk.guid}`); - - let tbBmk = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: PlacesUtils.bookmarks.menuGuid, - url: "http://getthunderbird.com", - title: "Get Thunderbird!", - }); - _(`Thunderbird GUID: ${tbBmk.guid}`); - - let mozBmk = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: PlacesUtils.bookmarks.menuGuid, - url: "https://mozilla.org", - title: "Mozilla", - }); - _(`Mozilla GUID: ${mozBmk.guid}`); - - yield startTracking(); - - _("Reorder bookmarks"); - yield PlacesUtils.bookmarks.reorder(PlacesUtils.bookmarks.menuGuid, - [mozBmk.guid, fxBmk.guid, tbBmk.guid]); - - // As with setItemIndex, we should only track the folder if we reorder - // its children, but we should bump the score for every changed item. - yield verifyTrackedItems(["menu"]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_onItemMoved_setItemIndex() { - _("Items with updated indices should be tracked"); - - try { - yield stopTracking(); - - let folder_id = PlacesUtils.bookmarks.createFolder( - PlacesUtils.bookmarks.bookmarksMenuFolder, - "Test folder", - PlacesUtils.bookmarks.DEFAULT_INDEX); - let folder_guid = engine._store.GUIDForId(folder_id); - _(`Folder GUID: ${folder_guid}`); - - let tb_id = PlacesUtils.bookmarks.insertBookmark( - folder_id, - Utils.makeURI("http://getthunderbird.com"), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Thunderbird"); - let tb_guid = engine._store.GUIDForId(tb_id); - _(`Thunderbird GUID: ${tb_guid}`); - - let fx_id = PlacesUtils.bookmarks.insertBookmark( - folder_id, - Utils.makeURI("http://getfirefox.com"), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Firefox"); - let fx_guid = engine._store.GUIDForId(fx_id); - _(`Firefox GUID: ${fx_guid}`); - - let moz_id = PlacesUtils.bookmarks.insertBookmark( - PlacesUtils.bookmarks.bookmarksMenuFolder, - Utils.makeURI("https://mozilla.org"), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Mozilla" - ); - let moz_guid = engine._store.GUIDForId(moz_id); - _(`Mozilla GUID: ${moz_guid}`); - - yield startTracking(); - - // PlacesSortFolderByNameTransaction exercises - // PlacesUtils.bookmarks.setItemIndex. - let txn = new PlacesSortFolderByNameTransaction(folder_id); - - // We're reordering items within the same folder, so only the folder - // should be tracked. - _("Execute the sort folder transaction"); - txn.doTransaction(); - yield verifyTrackedItems([folder_guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE); - yield resetTracker(); - - _("Undo the sort folder transaction"); - txn.undoTransaction(); - yield verifyTrackedItems([folder_guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_onItemDeleted_removeFolderTransaction() { - _("Folders removed in a transaction should be tracked"); - - try { - yield stopTracking(); - - _("Create a folder with two children"); - let folder_id = PlacesUtils.bookmarks.createFolder( - PlacesUtils.bookmarks.bookmarksMenuFolder, - "Test folder", - PlacesUtils.bookmarks.DEFAULT_INDEX); - let folder_guid = engine._store.GUIDForId(folder_id); - _(`Folder GUID: ${folder_guid}`); - let fx_id = PlacesUtils.bookmarks.insertBookmark( - folder_id, - Utils.makeURI("http://getfirefox.com"), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Get Firefox!"); - let fx_guid = engine._store.GUIDForId(fx_id); - _(`Firefox GUID: ${fx_guid}`); - let tb_id = PlacesUtils.bookmarks.insertBookmark( - folder_id, - Utils.makeURI("http://getthunderbird.com"), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Get Thunderbird!"); - let tb_guid = engine._store.GUIDForId(tb_id); - _(`Thunderbird GUID: ${tb_guid}`); - - yield startTracking(); - - let txn = PlacesUtils.bookmarks.getRemoveFolderTransaction(folder_id); - // We haven't executed the transaction yet. - yield verifyTrackerEmpty(); - - _("Execute the remove folder transaction"); - txn.doTransaction(); - yield verifyTrackedItems(["menu", folder_guid, fx_guid, tb_guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 6); - yield resetTracker(); - - _("Undo the remove folder transaction"); - txn.undoTransaction(); - - // At this point, the restored folder has the same ID, but a different GUID. - let new_folder_guid = yield PlacesUtils.promiseItemGuid(folder_id); - - yield verifyTrackedItems(["menu", new_folder_guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2); - yield resetTracker(); - - _("Redo the transaction"); - txn.redoTransaction(); - yield verifyTrackedItems(["menu", new_folder_guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_treeMoved() { - _("Moving an entire tree of bookmarks should track the parents"); - - try { - // Create a couple of parent folders. - let folder1_id = PlacesUtils.bookmarks.createFolder( - PlacesUtils.bookmarks.bookmarksMenuFolder, - "First test folder", - PlacesUtils.bookmarks.DEFAULT_INDEX); - let folder1_guid = engine._store.GUIDForId(folder1_id); - - // A second folder in the first. - let folder2_id = PlacesUtils.bookmarks.createFolder( - folder1_id, - "Second test folder", - PlacesUtils.bookmarks.DEFAULT_INDEX); - let folder2_guid = engine._store.GUIDForId(folder2_id); - - // Create a couple of bookmarks in the second folder. - let fx_id = PlacesUtils.bookmarks.insertBookmark( - folder2_id, - Utils.makeURI("http://getfirefox.com"), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Get Firefox!"); - let fx_guid = engine._store.GUIDForId(fx_id); - let tb_id = PlacesUtils.bookmarks.insertBookmark( - folder2_id, - Utils.makeURI("http://getthunderbird.com"), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Get Thunderbird!"); - let tb_guid = engine._store.GUIDForId(tb_id); - - yield startTracking(); - - // Move folder 2 to be a sibling of folder1. - PlacesUtils.bookmarks.moveItem( - folder2_id, PlacesUtils.bookmarks.bookmarksMenuFolder, 0); - // the menu and both folders should be tracked, the children should not be. - yield verifyTrackedItems(['menu', folder1_guid, folder2_guid]); + do_check_true(tracker.changedIDs['menu'] > 0); + do_check_true(tracker.changedIDs['toolbar'] > 0); + do_check_eq(tracker.changedIDs[fx_guid], undefined); + do_check_true(tracker.changedIDs[tb_guid] > 0); do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_onItemDeleted() { - _("Bookmarks deleted via the synchronous API should be tracked"); - - try { - let fx_id = PlacesUtils.bookmarks.insertBookmark( - PlacesUtils.bookmarks.bookmarksMenuFolder, - Utils.makeURI("http://getfirefox.com"), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Get Firefox!"); - let fx_guid = engine._store.GUIDForId(fx_id); - let tb_id = PlacesUtils.bookmarks.insertBookmark( - PlacesUtils.bookmarks.bookmarksMenuFolder, - Utils.makeURI("http://getthunderbird.com"), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Get Thunderbird!"); - let tb_guid = engine._store.GUIDForId(tb_id); - - yield startTracking(); - - // Delete the last item - the item and parent should be tracked. - PlacesUtils.bookmarks.removeItem(tb_id); - - yield verifyTrackedItems(['menu', tb_guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_async_onItemDeleted() { - _("Bookmarks deleted via the asynchronous API should be tracked"); - - try { - yield stopTracking(); - - let fxBmk = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: PlacesUtils.bookmarks.menuGuid, - url: "http://getfirefox.com", - title: "Get Firefox!", - }); - let tbBmk = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: PlacesUtils.bookmarks.menuGuid, - url: "http://getthunderbird.com", - title: "Get Thunderbird!", - }); - - yield startTracking(); - - _("Delete the first item"); - yield PlacesUtils.bookmarks.remove(fxBmk.guid); - yield verifyTrackedItems(["menu", fxBmk.guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2); } finally { _("Clean up."); - yield cleanup(); + store.wipe(); + tracker.clearChangedIDs(); + tracker.resetScore(); + Svc.Obs.notify("weave:engine:stop-tracking"); } -}); - -add_task(function* test_async_onItemDeleted_eraseEverything() { - _("Erasing everything should track all deleted items"); - - try { - yield stopTracking(); - let fxBmk = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: PlacesUtils.bookmarks.mobileGuid, - url: "http://getfirefox.com", - title: "Get Firefox!", - }); - _(`Firefox GUID: ${fxBmk.guid}`); - let tbBmk = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: PlacesUtils.bookmarks.mobileGuid, - url: "http://getthunderbird.com", - title: "Get Thunderbird!", - }); - _(`Thunderbird GUID: ${tbBmk.guid}`); - let mozBmk = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: PlacesUtils.bookmarks.menuGuid, - url: "https://mozilla.org", - title: "Mozilla", - }); - _(`Mozilla GUID: ${mozBmk.guid}`); - let mdnBmk = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: PlacesUtils.bookmarks.menuGuid, - url: "https://developer.mozilla.org", - title: "MDN", - }); - _(`MDN GUID: ${mdnBmk.guid}`); - let bugsFolder = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_FOLDER, - parentGuid: PlacesUtils.bookmarks.toolbarGuid, - title: "Bugs", - }); - _(`Bugs folder GUID: ${bugsFolder.guid}`); - let bzBmk = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: bugsFolder.guid, - url: "https://bugzilla.mozilla.org", - title: "Bugzilla", - }); - _(`Bugzilla GUID: ${bzBmk.guid}`); - let bugsChildFolder = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_FOLDER, - parentGuid: bugsFolder.guid, - title: "Bugs child", - }); - _(`Bugs child GUID: ${bugsChildFolder.guid}`); - let bugsGrandChildBmk = yield PlacesUtils.bookmarks.insert({ - type: PlacesUtils.bookmarks.TYPE_BOOKMARK, - parentGuid: bugsChildFolder.guid, - url: "https://example.com", - title: "Bugs grandchild", - }); - _(`Bugs grandchild GUID: ${bugsGrandChildBmk.guid}`); - - yield startTracking(); - - yield PlacesUtils.bookmarks.eraseEverything(); - - // `eraseEverything` removes all items from the database before notifying - // observers. Because of this, grandchild lookup in the tracker's - // `onItemRemoved` observer will fail. That means we won't track - // (bzBmk.guid, bugsGrandChildBmk.guid, bugsChildFolder.guid), even - // though we should. - yield verifyTrackedItems(["menu", mozBmk.guid, mdnBmk.guid, "toolbar", - bugsFolder.guid, "mobile", fxBmk.guid, - tbBmk.guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 10); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_onItemDeleted_removeFolderChildren() { - _("Removing a folder's children should track the folder and its children"); - - try { - let fx_id = PlacesUtils.bookmarks.insertBookmark( - PlacesUtils.mobileFolderId, - Utils.makeURI("http://getfirefox.com"), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Get Firefox!"); - let fx_guid = engine._store.GUIDForId(fx_id); - _(`Firefox GUID: ${fx_guid}`); - - let tb_id = PlacesUtils.bookmarks.insertBookmark( - PlacesUtils.mobileFolderId, - Utils.makeURI("http://getthunderbird.com"), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Get Thunderbird!"); - let tb_guid = engine._store.GUIDForId(tb_id); - _(`Thunderbird GUID: ${tb_guid}`); - - let moz_id = PlacesUtils.bookmarks.insertBookmark( - PlacesUtils.bookmarks.bookmarksMenuFolder, - Utils.makeURI("https://mozilla.org"), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Mozilla" - ); - let moz_guid = engine._store.GUIDForId(moz_id); - _(`Mozilla GUID: ${moz_guid}`); - - yield startTracking(); - - _(`Mobile root ID: ${PlacesUtils.mobileFolderId}`); - PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.mobileFolderId); - - yield verifyTrackedItems(["mobile", fx_guid, tb_guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 4); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_onItemDeleted_tree() { - _("Deleting a tree of bookmarks should track all items"); - - try { - // Create a couple of parent folders. - let folder1_id = PlacesUtils.bookmarks.createFolder( - PlacesUtils.bookmarks.bookmarksMenuFolder, - "First test folder", - PlacesUtils.bookmarks.DEFAULT_INDEX); - let folder1_guid = engine._store.GUIDForId(folder1_id); - - // A second folder in the first. - let folder2_id = PlacesUtils.bookmarks.createFolder( - folder1_id, - "Second test folder", - PlacesUtils.bookmarks.DEFAULT_INDEX); - let folder2_guid = engine._store.GUIDForId(folder2_id); - - // Create a couple of bookmarks in the second folder. - let fx_id = PlacesUtils.bookmarks.insertBookmark( - folder2_id, - Utils.makeURI("http://getfirefox.com"), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Get Firefox!"); - let fx_guid = engine._store.GUIDForId(fx_id); - let tb_id = PlacesUtils.bookmarks.insertBookmark( - folder2_id, - Utils.makeURI("http://getthunderbird.com"), - PlacesUtils.bookmarks.DEFAULT_INDEX, - "Get Thunderbird!"); - let tb_guid = engine._store.GUIDForId(tb_id); - - yield startTracking(); - - // Delete folder2 - everything we created should be tracked. - PlacesUtils.bookmarks.removeItem(folder2_id); - - yield verifyTrackedItems([fx_guid, tb_guid, folder1_guid, folder2_guid]); - do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 6); - } finally { - _("Clean up."); - yield cleanup(); - } -}); - -add_task(function* test_mobile_query() { - _("Ensure we correctly create the mobile query"); - - try { - // Creates the organizer queries as a side effect. - let leftPaneId = PlacesUIUtils.leftPaneFolderId; - _(`Left pane root ID: ${leftPaneId}`); - - let allBookmarksIds = findAnnoItems("PlacesOrganizer/OrganizerQuery", "AllBookmarks"); - equal(allBookmarksIds.length, 1, "Should create folder with all bookmarks queries"); - let allBookmarkGuid = yield PlacesUtils.promiseItemGuid(allBookmarksIds[0]); - - _("Try creating query after organizer is ready"); - tracker._ensureMobileQuery(); - let queryIds = findAnnoItems("PlacesOrganizer/OrganizerQuery", "MobileBookmarks"); - equal(queryIds.length, 0, "Should not create query without any mobile bookmarks"); - - _("Insert mobile bookmark, then create query"); - yield PlacesUtils.bookmarks.insert({ - parentGuid: PlacesUtils.bookmarks.mobileGuid, - url: "https://mozilla.org", - }); - tracker._ensureMobileQuery(); - queryIds = findAnnoItems("PlacesOrganizer/OrganizerQuery", "MobileBookmarks", {}); - equal(queryIds.length, 1, "Should create query once mobile bookmarks exist"); - - let queryId = queryIds[0]; - let queryGuid = yield PlacesUtils.promiseItemGuid(queryId); +} - let queryInfo = yield PlacesUtils.bookmarks.fetch(queryGuid); - equal(queryInfo.url, `place:folder=${PlacesUtils.mobileFolderId}`, "Query should point to mobile root"); - equal(queryInfo.title, "Mobile Bookmarks", "Query title should be localized"); - equal(queryInfo.parentGuid, allBookmarkGuid, "Should append mobile query to all bookmarks queries"); +function run_test() { + initTestLogging("Trace"); - _("Rename root and query, then recreate"); - yield PlacesUtils.bookmarks.update({ - guid: PlacesUtils.bookmarks.mobileGuid, - title: "renamed root", - }); - yield PlacesUtils.bookmarks.update({ - guid: queryGuid, - title: "renamed query", - }); - tracker._ensureMobileQuery(); - let rootInfo = yield PlacesUtils.bookmarks.fetch(PlacesUtils.bookmarks.mobileGuid); - equal(rootInfo.title, "Mobile Bookmarks", "Should fix root title"); - queryInfo = yield PlacesUtils.bookmarks.fetch(queryGuid); - equal(queryInfo.title, "Mobile Bookmarks", "Should fix query title"); + Log.repository.getLogger("Sync.Engine.Bookmarks").level = Log.Level.Trace; + Log.repository.getLogger("Sync.Store.Bookmarks").level = Log.Level.Trace; + Log.repository.getLogger("Sync.Tracker.Bookmarks").level = Log.Level.Trace; - _("Point query to different folder"); - yield PlacesUtils.bookmarks.update({ - guid: queryGuid, - url: "place:folder=BOOKMARKS_MENU", - }); - tracker._ensureMobileQuery(); - queryInfo = yield PlacesUtils.bookmarks.fetch(queryGuid); - equal(queryInfo.url.href, `place:folder=${PlacesUtils.mobileFolderId}`, - "Should fix query URL to point to mobile root"); + test_tracking(); + test_onItemChanged(); + test_onItemMoved(); +} - _("We shouldn't track the query or the left pane root"); - yield verifyTrackedCount(0); - do_check_eq(tracker.score, 0); - } finally { - _("Clean up."); - yield cleanup(); - } -}); diff --git a/services/sync/tests/unit/test_bookmark_validator.js b/services/sync/tests/unit/test_bookmark_validator.js deleted file mode 100644 index cc0b3b08f..000000000 --- a/services/sync/tests/unit/test_bookmark_validator.js +++ /dev/null @@ -1,347 +0,0 @@ -/* Any copyright is dedicated to the Public Domain. - http://creativecommons.org/publicdomain/zero/1.0/ */ - -Components.utils.import("resource://services-sync/bookmark_validator.js"); -Components.utils.import("resource://services-sync/util.js"); - -function inspectServerRecords(data) { - return new BookmarkValidator().inspectServerRecords(data); -} - -add_test(function test_isr_rootOnServer() { - let c = inspectServerRecords([{ - id: 'places', - type: 'folder', - children: [], - }]); - ok(c.problemData.rootOnServer); - run_next_test(); -}); - -add_test(function test_isr_empty() { - let c = inspectServerRecords([]); - ok(!c.problemData.rootOnServer); - notEqual(c.root, null); - run_next_test(); -}); - -add_test(function test_isr_cycles() { - let c = inspectServerRecords([ - {id: 'C', type: 'folder', children: ['A', 'B'], parentid: 'places'}, - {id: 'A', type: 'folder', children: ['B'], parentid: 'B'}, - {id: 'B', type: 'folder', children: ['A'], parentid: 'A'}, - ]).problemData; - - equal(c.cycles.length, 1); - ok(c.cycles[0].indexOf('A') >= 0); - ok(c.cycles[0].indexOf('B') >= 0); - run_next_test(); -}); - -add_test(function test_isr_orphansMultiParents() { - let c = inspectServerRecords([ - { id: 'A', type: 'bookmark', parentid: 'D' }, - { id: 'B', type: 'folder', parentid: 'places', children: ['A']}, - { id: 'C', type: 'folder', parentid: 'places', children: ['A']}, - - ]).problemData; - deepEqual(c.orphans, [{ id: "A", parent: "D" }]); - equal(c.multipleParents.length, 1) - ok(c.multipleParents[0].parents.indexOf('B') >= 0); - ok(c.multipleParents[0].parents.indexOf('C') >= 0); - run_next_test(); -}); - -add_test(function test_isr_orphansMultiParents2() { - let c = inspectServerRecords([ - { id: 'A', type: 'bookmark', parentid: 'D' }, - { id: 'B', type: 'folder', parentid: 'places', children: ['A']}, - ]).problemData; - equal(c.orphans.length, 1); - equal(c.orphans[0].id, 'A'); - equal(c.multipleParents.length, 0); - run_next_test(); -}); - -add_test(function test_isr_deletedParents() { - let c = inspectServerRecords([ - { id: 'A', type: 'bookmark', parentid: 'B' }, - { id: 'B', type: 'folder', parentid: 'places', children: ['A']}, - { id: 'B', type: 'item', deleted: true}, - ]).problemData; - deepEqual(c.deletedParents, ['A']) - run_next_test(); -}); - -add_test(function test_isr_badChildren() { - let c = inspectServerRecords([ - { id: 'A', type: 'bookmark', parentid: 'places', children: ['B', 'C'] }, - { id: 'C', type: 'bookmark', parentid: 'A' } - ]).problemData; - deepEqual(c.childrenOnNonFolder, ['A']) - deepEqual(c.missingChildren, [{parent: 'A', child: 'B'}]); - deepEqual(c.parentNotFolder, ['C']); - run_next_test(); -}); - - -add_test(function test_isr_parentChildMismatches() { - let c = inspectServerRecords([ - { id: 'A', type: 'folder', parentid: 'places', children: [] }, - { id: 'B', type: 'bookmark', parentid: 'A' } - ]).problemData; - deepEqual(c.parentChildMismatches, [{parent: 'A', child: 'B'}]); - run_next_test(); -}); - -add_test(function test_isr_duplicatesAndMissingIDs() { - let c = inspectServerRecords([ - {id: 'A', type: 'folder', parentid: 'places', children: []}, - {id: 'A', type: 'folder', parentid: 'places', children: []}, - {type: 'folder', parentid: 'places', children: []} - ]).problemData; - equal(c.missingIDs, 1); - deepEqual(c.duplicates, ['A']); - run_next_test(); -}); - -add_test(function test_isr_duplicateChildren() { - let c = inspectServerRecords([ - {id: 'A', type: 'folder', parentid: 'places', children: ['B', 'B']}, - {id: 'B', type: 'bookmark', parentid: 'A'}, - ]).problemData; - deepEqual(c.duplicateChildren, ['A']); - run_next_test(); -}); - -// Each compareServerWithClient test mutates these, so we can't just keep them -// global -function getDummyServerAndClient() { - let server = [ - { - id: 'menu', - parentid: 'places', - type: 'folder', - parentName: '', - title: 'foo', - children: ['bbbbbbbbbbbb', 'cccccccccccc'] - }, - { - id: 'bbbbbbbbbbbb', - type: 'bookmark', - parentid: 'menu', - parentName: 'foo', - title: 'bar', - bmkUri: 'http://baz.com' - }, - { - id: 'cccccccccccc', - parentid: 'menu', - parentName: 'foo', - title: '', - type: 'query', - bmkUri: 'place:type=6&sort=14&maxResults=10' - } - ]; - - let client = { - "guid": "root________", - "title": "", - "id": 1, - "type": "text/x-moz-place-container", - "children": [ - { - "guid": "menu________", - "title": "foo", - "id": 1000, - "type": "text/x-moz-place-container", - "children": [ - { - "guid": "bbbbbbbbbbbb", - "title": "bar", - "id": 1001, - "type": "text/x-moz-place", - "uri": "http://baz.com" - }, - { - "guid": "cccccccccccc", - "title": "", - "id": 1002, - "annos": [{ - "name": "Places/SmartBookmark", - "flags": 0, - "expires": 4, - "value": "RecentTags" - }], - "type": "text/x-moz-place", - "uri": "place:type=6&sort=14&maxResults=10" - } - ] - } - ] - }; - return {server, client}; -} - - -add_test(function test_cswc_valid() { - let {server, client} = getDummyServerAndClient(); - - let c = new BookmarkValidator().compareServerWithClient(server, client).problemData; - equal(c.clientMissing.length, 0); - equal(c.serverMissing.length, 0); - equal(c.differences.length, 0); - run_next_test(); -}); - -add_test(function test_cswc_serverMissing() { - let {server, client} = getDummyServerAndClient(); - // remove c - server.pop(); - server[0].children.pop(); - - let c = new BookmarkValidator().compareServerWithClient(server, client).problemData; - deepEqual(c.serverMissing, ['cccccccccccc']); - equal(c.clientMissing.length, 0); - deepEqual(c.structuralDifferences, [{id: 'menu', differences: ['childGUIDs']}]); - run_next_test(); -}); - -add_test(function test_cswc_clientMissing() { - let {server, client} = getDummyServerAndClient(); - client.children[0].children.pop(); - - let c = new BookmarkValidator().compareServerWithClient(server, client).problemData; - deepEqual(c.clientMissing, ['cccccccccccc']); - equal(c.serverMissing.length, 0); - deepEqual(c.structuralDifferences, [{id: 'menu', differences: ['childGUIDs']}]); - run_next_test(); -}); - -add_test(function test_cswc_differences() { - { - let {server, client} = getDummyServerAndClient(); - client.children[0].children[0].title = 'asdf'; - let c = new BookmarkValidator().compareServerWithClient(server, client).problemData; - equal(c.clientMissing.length, 0); - equal(c.serverMissing.length, 0); - deepEqual(c.differences, [{id: 'bbbbbbbbbbbb', differences: ['title']}]); - } - - { - let {server, client} = getDummyServerAndClient(); - server[2].type = 'bookmark'; - let c = new BookmarkValidator().compareServerWithClient(server, client).problemData; - equal(c.clientMissing.length, 0); - equal(c.serverMissing.length, 0); - deepEqual(c.differences, [{id: 'cccccccccccc', differences: ['type']}]); - } - run_next_test(); -}); - -add_test(function test_cswc_serverUnexpected() { - let {server, client} = getDummyServerAndClient(); - client.children.push({ - "guid": "dddddddddddd", - "title": "", - "id": 2000, - "annos": [{ - "name": "places/excludeFromBackup", - "flags": 0, - "expires": 4, - "value": 1 - }, { - "name": "PlacesOrganizer/OrganizerFolder", - "flags": 0, - "expires": 4, - "value": 7 - }], - "type": "text/x-moz-place-container", - "children": [{ - "guid": "eeeeeeeeeeee", - "title": "History", - "annos": [{ - "name": "places/excludeFromBackup", - "flags": 0, - "expires": 4, - "value": 1 - }, { - "name": "PlacesOrganizer/OrganizerQuery", - "flags": 0, - "expires": 4, - "value": "History" - }], - "type": "text/x-moz-place", - "uri": "place:type=3&sort=4" - }] - }); - server.push({ - id: 'dddddddddddd', - parentid: 'places', - parentName: '', - title: '', - type: 'folder', - children: ['eeeeeeeeeeee'] - }, { - id: 'eeeeeeeeeeee', - parentid: 'dddddddddddd', - parentName: '', - title: 'History', - type: 'query', - bmkUri: 'place:type=3&sort=4' - }); - - let c = new BookmarkValidator().compareServerWithClient(server, client).problemData; - equal(c.clientMissing.length, 0); - equal(c.serverMissing.length, 0); - equal(c.serverUnexpected.length, 2); - deepEqual(c.serverUnexpected, ["dddddddddddd", "eeeeeeeeeeee"]); - run_next_test(); -}); - -function validationPing(server, client, duration) { - return wait_for_ping(function() { - // fake this entirely - Svc.Obs.notify("weave:service:sync:start"); - Svc.Obs.notify("weave:engine:sync:start", null, "bookmarks"); - Svc.Obs.notify("weave:engine:sync:finish", null, "bookmarks"); - let validator = new BookmarkValidator(); - let data = { - // We fake duration and version just so that we can verify they're passed through. - duration, - version: validator.version, - recordCount: server.length, - problems: validator.compareServerWithClient(server, client).problemData, - }; - Svc.Obs.notify("weave:engine:validate:finish", data, "bookmarks"); - Svc.Obs.notify("weave:service:sync:finish"); - }, true); // Allow "failing" pings, since having validation info indicates failure. -} - -add_task(function *test_telemetry_integration() { - let {server, client} = getDummyServerAndClient(); - // remove "c" - server.pop(); - server[0].children.pop(); - const duration = 50; - let ping = yield validationPing(server, client, duration); - ok(ping.engines); - let bme = ping.engines.find(e => e.name === "bookmarks"); - ok(bme); - ok(bme.validation); - ok(bme.validation.problems) - equal(bme.validation.checked, server.length); - equal(bme.validation.took, duration); - bme.validation.problems.sort((a, b) => String.localeCompare(a.name, b.name)); - equal(bme.validation.version, new BookmarkValidator().version); - deepEqual(bme.validation.problems, [ - { name: "badClientRoots", count: 3 }, - { name: "sdiff:childGUIDs", count: 1 }, - { name: "serverMissing", count: 1 }, - { name: "structuralDifferences", count: 1 }, - ]); -}); - -function run_test() { - run_next_test(); -} diff --git a/services/sync/tests/unit/test_browserid_identity.js b/services/sync/tests/unit/test_browserid_identity.js index 531c01bf6..f3cde9f8f 100644 --- a/services/sync/tests/unit/test_browserid_identity.js +++ b/services/sync/tests/unit/test_browserid_identity.js @@ -16,14 +16,13 @@ Cu.import("resource://gre/modules/FxAccountsCommon.js"); Cu.import("resource://services-sync/service.js"); Cu.import("resource://services-sync/status.js"); Cu.import("resource://services-sync/constants.js"); -Cu.import("resource://services-common/tokenserverclient.js"); const SECOND_MS = 1000; const MINUTE_MS = SECOND_MS * 60; const HOUR_MS = MINUTE_MS * 60; -var identityConfig = makeIdentityConfig(); -var browseridManager = new BrowserIDManager(); +let identityConfig = makeIdentityConfig(); +let browseridManager = new BrowserIDManager(); configureFxAccountIdentity(browseridManager, identityConfig); /** @@ -32,14 +31,11 @@ configureFxAccountIdentity(browseridManager, identityConfig); * headers. We will use this to test clock skew compensation in these headers * below. */ -var MockFxAccountsClient = function() { +let MockFxAccountsClient = function() { FxAccountsClient.apply(this); }; MockFxAccountsClient.prototype = { - __proto__: FxAccountsClient.prototype, - accountStatus() { - return Promise.resolve(true); - } + __proto__: FxAccountsClient.prototype }; function MockFxAccounts() { @@ -77,7 +73,7 @@ add_test(function test_initial_state() { } ); -add_task(function* test_initialializeWithCurrentIdentity() { +add_task(function test_initialializeWithCurrentIdentity() { _("Verify start after initializeWithCurrentIdentity"); browseridManager.initializeWithCurrentIdentity(); yield browseridManager.whenReadyToAuthenticate.promise; @@ -87,57 +83,7 @@ add_task(function* test_initialializeWithCurrentIdentity() { } ); -add_task(function* test_initialializeWithAuthErrorAndDeletedAccount() { - _("Verify sync unpair after initializeWithCurrentIdentity with auth error + account deleted"); - - var identityConfig = makeIdentityConfig(); - var browseridManager = new BrowserIDManager(); - - // Use the real `_getAssertion` method that calls - // `mockFxAClient.signCertificate`. - let fxaInternal = makeFxAccountsInternalMock(identityConfig); - delete fxaInternal._getAssertion; - - configureFxAccountIdentity(browseridManager, identityConfig, fxaInternal); - browseridManager._fxaService.internal.initialize(); - - let signCertificateCalled = false; - let accountStatusCalled = false; - - let MockFxAccountsClient = function() { - FxAccountsClient.apply(this); - }; - MockFxAccountsClient.prototype = { - __proto__: FxAccountsClient.prototype, - signCertificate() { - signCertificateCalled = true; - return Promise.reject({ - code: 401, - errno: ERRNO_INVALID_AUTH_TOKEN, - }); - }, - accountStatus() { - accountStatusCalled = true; - return Promise.resolve(false); - } - }; - - let mockFxAClient = new MockFxAccountsClient(); - browseridManager._fxaService.internal._fxAccountsClient = mockFxAClient; - - yield browseridManager.initializeWithCurrentIdentity(); - yield Assert.rejects(browseridManager.whenReadyToAuthenticate.promise, - "should reject due to an auth error"); - - do_check_true(signCertificateCalled); - do_check_true(accountStatusCalled); - do_check_false(browseridManager.account); - do_check_false(browseridManager._token); - do_check_false(browseridManager.hasValidToken()); - do_check_false(browseridManager.account); -}); - -add_task(function* test_initialializeWithNoKeys() { +add_task(function test_initialializeWithNoKeys() { _("Verify start after initializeWithCurrentIdentity without kA, kB or keyFetchToken"); let identityConfig = makeIdentityConfig(); delete identityConfig.fxaccount.user.kA; @@ -306,7 +252,7 @@ add_test(function test_RESTResourceAuthenticatorSkew() { run_next_test(); }); -add_task(function* test_ensureLoggedIn() { +add_task(function test_ensureLoggedIn() { configureFxAccountIdentity(browseridManager); yield browseridManager.initializeWithCurrentIdentity(); yield browseridManager.whenReadyToAuthenticate.promise; @@ -318,8 +264,8 @@ add_task(function* test_ensureLoggedIn() { // arrange for no logged in user. let fxa = browseridManager._fxaService - let signedInUser = fxa.internal.currentAccountState.storageManager.accountData; - fxa.internal.currentAccountState.storageManager.accountData = null; + let signedInUser = fxa.internal.currentAccountState.signedInUser; + fxa.internal.currentAccountState.signedInUser = null; browseridManager.initializeWithCurrentIdentity(); Assert.ok(!browseridManager._shouldHaveSyncKeyBundle, "_shouldHaveSyncKeyBundle should be false so we know we are testing what we think we are."); @@ -327,8 +273,7 @@ add_task(function* test_ensureLoggedIn() { yield Assert.rejects(browseridManager.ensureLoggedIn(), "expecting rejection due to no user"); Assert.ok(browseridManager._shouldHaveSyncKeyBundle, "_shouldHaveSyncKeyBundle should always be true after ensureLogin completes."); - // Restore the logged in user to what it was. - fxa.internal.currentAccountState.storageManager.accountData = signedInUser; + fxa.internal.currentAccountState.signedInUser = signedInUser; Status.login = LOGIN_FAILED_LOGIN_REJECTED; yield Assert.rejects(browseridManager.ensureLoggedIn(), "LOGIN_FAILED_LOGIN_REJECTED should have caused immediate rejection"); @@ -404,7 +349,7 @@ add_test(function test_computeXClientStateHeader() { run_next_test(); }); -add_task(function* test_getTokenErrors() { +add_task(function test_getTokenErrors() { _("BrowserIDManager correctly handles various failures to get a token."); _("Arrange for a 401 - Sync should reflect an auth error."); @@ -437,75 +382,7 @@ add_task(function* test_getTokenErrors() { Assert.equal(Status.login, LOGIN_FAILED_NETWORK_ERROR, "login state is LOGIN_FAILED_NETWORK_ERROR"); }); -add_task(function* test_refreshCertificateOn401() { - _("BrowserIDManager refreshes the FXA certificate after a 401."); - var identityConfig = makeIdentityConfig(); - var browseridManager = new BrowserIDManager(); - // Use the real `_getAssertion` method that calls - // `mockFxAClient.signCertificate`. - let fxaInternal = makeFxAccountsInternalMock(identityConfig); - delete fxaInternal._getAssertion; - configureFxAccountIdentity(browseridManager, identityConfig, fxaInternal); - browseridManager._fxaService.internal.initialize(); - - let getCertCount = 0; - - let MockFxAccountsClient = function() { - FxAccountsClient.apply(this); - }; - MockFxAccountsClient.prototype = { - __proto__: FxAccountsClient.prototype, - signCertificate() { - ++getCertCount; - } - }; - - let mockFxAClient = new MockFxAccountsClient(); - browseridManager._fxaService.internal._fxAccountsClient = mockFxAClient; - - let didReturn401 = false; - let didReturn200 = false; - let mockTSC = mockTokenServer(() => { - if (getCertCount <= 1) { - didReturn401 = true; - return { - status: 401, - headers: {"content-type": "application/json"}, - body: JSON.stringify({}), - }; - } else { - didReturn200 = true; - return { - status: 200, - headers: {"content-type": "application/json"}, - body: JSON.stringify({ - id: "id", - key: "key", - api_endpoint: "http://example.com/", - uid: "uid", - duration: 300, - }) - }; - } - }); - - browseridManager._tokenServerClient = mockTSC; - - yield browseridManager.initializeWithCurrentIdentity(); - yield browseridManager.whenReadyToAuthenticate.promise; - - do_check_eq(getCertCount, 2); - do_check_true(didReturn401); - do_check_true(didReturn200); - do_check_true(browseridManager.account); - do_check_true(browseridManager._token); - do_check_true(browseridManager.hasValidToken()); - do_check_true(browseridManager.account); -}); - - - -add_task(function* test_getTokenErrorWithRetry() { +add_task(function test_getTokenErrorWithRetry() { _("tokenserver sends an observer notification on various backoff headers."); // Set Sync's backoffInterval to zero - after we simulated the backoff header @@ -547,7 +424,7 @@ add_task(function* test_getTokenErrorWithRetry() { Assert.ok(Status.backoffInterval >= 200000); }); -add_task(function* test_getKeysErrorWithBackoff() { +add_task(function test_getKeysErrorWithBackoff() { _("Auth server (via hawk) sends an observer notification on backoff headers."); // Set Sync's backoffInterval to zero - after we simulated the backoff header @@ -581,7 +458,7 @@ add_task(function* test_getKeysErrorWithBackoff() { Assert.ok(Status.backoffInterval >= 100000); }); -add_task(function* test_getKeysErrorWithRetry() { +add_task(function test_getKeysErrorWithRetry() { _("Auth server (via hawk) sends an observer notification on retry headers."); // Set Sync's backoffInterval to zero - after we simulated the backoff header @@ -615,7 +492,7 @@ add_task(function* test_getKeysErrorWithRetry() { Assert.ok(Status.backoffInterval >= 100000); }); -add_task(function* test_getHAWKErrors() { +add_task(function test_getHAWKErrors() { _("BrowserIDManager correctly handles various HAWK failures."); _("Arrange for a 401 - Sync should reflect an auth error."); @@ -648,7 +525,7 @@ add_task(function* test_getHAWKErrors() { Assert.equal(Status.login, LOGIN_FAILED_NETWORK_ERROR, "login state is LOGIN_FAILED_NETWORK_ERROR"); }); -add_task(function* test_getGetKeysFailing401() { +add_task(function test_getGetKeysFailing401() { _("BrowserIDManager correctly handles 401 responses fetching keys."); _("Arrange for a 401 - Sync should reflect an auth error."); @@ -669,7 +546,7 @@ add_task(function* test_getGetKeysFailing401() { Assert.equal(Status.login, LOGIN_FAILED_LOGIN_REJECTED, "login was rejected"); }); -add_task(function* test_getGetKeysFailing503() { +add_task(function test_getGetKeysFailing503() { _("BrowserIDManager correctly handles 5XX responses fetching keys."); _("Arrange for a 503 - Sync should reflect a network error."); @@ -690,7 +567,7 @@ add_task(function* test_getGetKeysFailing503() { Assert.equal(Status.login, LOGIN_FAILED_NETWORK_ERROR, "state reflects network error"); }); -add_task(function* test_getKeysMissing() { +add_task(function test_getKeysMissing() { _("BrowserIDManager correctly handles getKeys succeeding but not returning keys."); let browseridManager = new BrowserIDManager(); @@ -708,17 +585,7 @@ add_task(function* test_getKeysMissing() { fetchAndUnwrapKeys: function () { return Promise.resolve({}); }, - fxAccountsClient: new MockFxAccountsClient(), - newAccountState(credentials) { - // We only expect this to be called with null indicating the (mock) - // storage should be read. - if (credentials) { - throw new Error("Not expecting to have credentials passed"); - } - let storageManager = new MockFxaStorageManager(); - storageManager.initialize(identityConfig.fxaccount.user); - return new AccountState(storageManager); - }, + fxAccountsClient: new MockFxAccountsClient() }); // Add a mock to the currentAccountState object. @@ -730,6 +597,9 @@ add_task(function* test_getKeysMissing() { return Promise.resolve(this.cert.cert); }; + // Ensure the new FxAccounts mock has a signed-in user. + fxa.internal.currentAccountState.signedInUser = browseridManager._fxaService.internal.currentAccountState.signedInUser; + browseridManager._fxaService = fxa; yield browseridManager.initializeWithCurrentIdentity(); @@ -744,41 +614,6 @@ add_task(function* test_getKeysMissing() { Assert.ok(ex.message.indexOf("missing kA or kB") >= 0); }); -add_task(function* test_signedInUserMissing() { - _("BrowserIDManager detects getSignedInUser returning incomplete account data"); - - let browseridManager = new BrowserIDManager(); - let config = makeIdentityConfig(); - // Delete stored keys and the key fetch token. - delete identityConfig.fxaccount.user.kA; - delete identityConfig.fxaccount.user.kB; - delete identityConfig.fxaccount.user.keyFetchToken; - - configureFxAccountIdentity(browseridManager, identityConfig); - - let fxa = new FxAccounts({ - fetchAndUnwrapKeys: function () { - return Promise.resolve({}); - }, - fxAccountsClient: new MockFxAccountsClient(), - newAccountState(credentials) { - // We only expect this to be called with null indicating the (mock) - // storage should be read. - if (credentials) { - throw new Error("Not expecting to have credentials passed"); - } - let storageManager = new MockFxaStorageManager(); - storageManager.initialize(identityConfig.fxaccount.user); - return new AccountState(storageManager); - }, - }); - - browseridManager._fxaService = fxa; - - let status = yield browseridManager.unlockAndVerifyAuthState(); - Assert.equal(status, LOGIN_FAILED_LOGIN_REJECTED); -}); - // End of tests // Utility functions follow @@ -803,17 +638,7 @@ function* initializeIdentityWithHAWKResponseFactory(config, cbGetResponse) { callback.call(this); }, get: function(callback) { - // Skip /status requests (browserid_identity checks if the account still - // exists after an auth error) - if (this._uri.startsWith("http://mockedserver:9999/account/status")) { - this.response = { - status: 200, - headers: {"content-type": "application/json"}, - body: JSON.stringify({exists: true}), - }; - } else { - this.response = cbGetResponse("get", null, this._uri, this._credentials, this._extra); - } + this.response = cbGetResponse("get", null, this._uri, this._credentials, this._extra); callback.call(this); } } @@ -833,18 +658,11 @@ function* initializeIdentityWithHAWKResponseFactory(config, cbGetResponse) { fxaClient.hawk = new MockedHawkClient(); let internal = { fxAccountsClient: fxaClient, - newAccountState(credentials) { - // We only expect this to be called with null indicating the (mock) - // storage should be read. - if (credentials) { - throw new Error("Not expecting to have credentials passed"); - } - let storageManager = new MockFxaStorageManager(); - storageManager.initialize(config.fxaccount.user); - return new AccountState(storageManager); - }, } let fxa = new FxAccounts(internal); + fxa.internal.currentAccountState.signedInUser = { + accountData: config.fxaccount.user, + }; browseridManager._fxaService = fxa; browseridManager._signedInUser = null; @@ -862,29 +680,3 @@ function getTimestampDelta(hawkAuthHeader, now=Date.now()) { return Math.abs(getTimestamp(hawkAuthHeader) - now); } -function mockTokenServer(func) { - let requestLog = Log.repository.getLogger("testing.mock-rest"); - if (!requestLog.appenders.length) { // might as well see what it says :) - requestLog.addAppender(new Log.DumpAppender()); - requestLog.level = Log.Level.Trace; - } - function MockRESTRequest(url) {}; - MockRESTRequest.prototype = { - _log: requestLog, - setHeader: function() {}, - get: function(callback) { - this.response = func(); - callback.call(this); - } - } - // The mocked TokenServer client which will get the response. - function MockTSC() { } - MockTSC.prototype = new TokenServerClient(); - MockTSC.prototype.constructor = MockTSC; - MockTSC.prototype.newRESTRequest = function(url) { - return new MockRESTRequest(url); - } - // Arrange for the same observerPrefix as browserid_identity uses. - MockTSC.prototype.observerPrefix = "weave:service"; - return new MockTSC(); -} diff --git a/services/sync/tests/unit/test_clients_engine.js b/services/sync/tests/unit/test_clients_engine.js index d2123f80a..919913f82 100644 --- a/services/sync/tests/unit/test_clients_engine.js +++ b/services/sync/tests/unit/test_clients_engine.js @@ -12,7 +12,7 @@ Cu.import("resource://testing-common/services/sync/utils.js"); const MORE_THAN_CLIENTS_TTL_REFRESH = 691200; // 8 days const LESS_THAN_CLIENTS_TTL_REFRESH = 86400; // 1 day -var engine = Service.clientsEngine; +let engine = Service.clientsEngine; /** * Unpack the record with this ID, and verify that it has the same version that @@ -31,10 +31,10 @@ function check_record_version(user, id) { let cleartext = rec.decrypt(Service.collectionKeys.keyForCollection("clients")); _("Payload is " + JSON.stringify(cleartext)); - equal(Services.appinfo.version, cleartext.version); - equal(2, cleartext.protocols.length); - equal("1.1", cleartext.protocols[0]); - equal("1.5", cleartext.protocols[1]); + do_check_eq(Services.appinfo.version, cleartext.version); + do_check_eq(2, cleartext.protocols.length); + do_check_eq("1.1", cleartext.protocols[0]); + do_check_eq("1.5", cleartext.protocols[1]); } add_test(function test_bad_hmac() { @@ -64,7 +64,7 @@ add_test(function test_bad_hmac() { let coll = user.collection("clients"); // Treat a non-existent collection as empty. - equal(expectedCount, coll ? coll.count() : 0, stack); + do_check_eq(expectedCount, coll ? coll.count() : 0, stack); } function check_client_deleted(id) { @@ -77,7 +77,7 @@ add_test(function test_bad_hmac() { generateNewKeys(Service.collectionKeys); let serverKeys = Service.collectionKeys.asWBO("crypto", "keys"); serverKeys.encrypt(Service.identity.syncKeyBundle); - ok(serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success); + do_check_true(serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success); } try { @@ -89,11 +89,11 @@ add_test(function test_bad_hmac() { generateNewKeys(Service.collectionKeys); _("First sync, client record is uploaded"); - equal(engine.lastRecordUpload, 0); + do_check_eq(engine.lastRecordUpload, 0); check_clients_count(0); engine._sync(); check_clients_count(1); - ok(engine.lastRecordUpload > 0); + do_check_true(engine.lastRecordUpload > 0); // Our uploaded record has a version. check_record_version(user, engine.localID); @@ -109,7 +109,7 @@ add_test(function test_bad_hmac() { generateNewKeys(Service.collectionKeys); let serverKeys = Service.collectionKeys.asWBO("crypto", "keys"); serverKeys.encrypt(Service.identity.syncKeyBundle); - ok(serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success); + do_check_true(serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success); _("Sync."); engine._sync(); @@ -130,8 +130,8 @@ add_test(function test_bad_hmac() { engine._sync(); _("Old record was not deleted, new one uploaded."); - equal(deletedCollections.length, 0); - equal(deletedItems.length, 0); + do_check_eq(deletedCollections.length, 0); + do_check_eq(deletedItems.length, 0); check_clients_count(2); _("Now try the scenario where our keys are wrong *and* there's a bad record."); @@ -162,14 +162,14 @@ add_test(function test_bad_hmac() { generateNewKeys(Service.collectionKeys); let oldKey = Service.collectionKeys.keyForCollection(); - equal(deletedCollections.length, 0); - equal(deletedItems.length, 0); + do_check_eq(deletedCollections.length, 0); + do_check_eq(deletedItems.length, 0); engine._sync(); - equal(deletedItems.length, 1); + do_check_eq(deletedItems.length, 1); check_client_deleted(oldLocalID); check_clients_count(1); let newKey = Service.collectionKeys.keyForCollection(); - ok(!oldKey.equals(newKey)); + do_check_false(oldKey.equals(newKey)); } finally { Svc.Prefs.resetBranch(""); @@ -181,91 +181,18 @@ add_test(function test_bad_hmac() { add_test(function test_properties() { _("Test lastRecordUpload property"); try { - equal(Svc.Prefs.get("clients.lastRecordUpload"), undefined); - equal(engine.lastRecordUpload, 0); + do_check_eq(Svc.Prefs.get("clients.lastRecordUpload"), undefined); + do_check_eq(engine.lastRecordUpload, 0); let now = Date.now(); engine.lastRecordUpload = now / 1000; - equal(engine.lastRecordUpload, Math.floor(now / 1000)); + do_check_eq(engine.lastRecordUpload, Math.floor(now / 1000)); } finally { Svc.Prefs.resetBranch(""); run_next_test(); } }); -add_test(function test_full_sync() { - _("Ensure that Clients engine fetches all records for each sync."); - - let now = Date.now() / 1000; - let contents = { - meta: {global: {engines: {clients: {version: engine.version, - syncID: engine.syncID}}}}, - clients: {}, - crypto: {} - }; - let server = serverForUsers({"foo": "password"}, contents); - let user = server.user("foo"); - - new SyncTestingInfrastructure(server.server); - generateNewKeys(Service.collectionKeys); - - let activeID = Utils.makeGUID(); - server.insertWBO("foo", "clients", new ServerWBO(activeID, encryptPayload({ - id: activeID, - name: "Active client", - type: "desktop", - commands: [], - version: "48", - protocols: ["1.5"], - }), now - 10)); - - let deletedID = Utils.makeGUID(); - server.insertWBO("foo", "clients", new ServerWBO(deletedID, encryptPayload({ - id: deletedID, - name: "Client to delete", - type: "desktop", - commands: [], - version: "48", - protocols: ["1.5"], - }), now - 10)); - - try { - let store = engine._store; - - _("First sync. 2 records downloaded; our record uploaded."); - strictEqual(engine.lastRecordUpload, 0); - engine._sync(); - ok(engine.lastRecordUpload > 0); - deepEqual(user.collection("clients").keys().sort(), - [activeID, deletedID, engine.localID].sort(), - "Our record should be uploaded on first sync"); - deepEqual(Object.keys(store.getAllIDs()).sort(), - [activeID, deletedID, engine.localID].sort(), - "Other clients should be downloaded on first sync"); - - _("Delete a record, then sync again"); - let collection = server.getCollection("foo", "clients"); - collection.remove(deletedID); - // Simulate a timestamp update in info/collections. - engine.lastModified = now; - engine._sync(); - - _("Record should be updated"); - deepEqual(Object.keys(store.getAllIDs()).sort(), - [activeID, engine.localID].sort(), - "Deleted client should be removed on next sync"); - } finally { - Svc.Prefs.resetBranch(""); - Service.recordManager.clearCache(); - - try { - server.deleteCollections("foo"); - } finally { - server.stop(run_next_test); - } - } -}); - add_test(function test_sync() { _("Ensure that Clients engine uploads a new client record once a week."); @@ -288,30 +215,30 @@ add_test(function test_sync() { try { _("First sync. Client record is uploaded."); - equal(clientWBO(), undefined); - equal(engine.lastRecordUpload, 0); + do_check_eq(clientWBO(), undefined); + do_check_eq(engine.lastRecordUpload, 0); engine._sync(); - ok(!!clientWBO().payload); - ok(engine.lastRecordUpload > 0); + do_check_true(!!clientWBO().payload); + do_check_true(engine.lastRecordUpload > 0); _("Let's time travel more than a week back, new record should've been uploaded."); engine.lastRecordUpload -= MORE_THAN_CLIENTS_TTL_REFRESH; let lastweek = engine.lastRecordUpload; clientWBO().payload = undefined; engine._sync(); - ok(!!clientWBO().payload); - ok(engine.lastRecordUpload > lastweek); + do_check_true(!!clientWBO().payload); + do_check_true(engine.lastRecordUpload > lastweek); _("Remove client record."); engine.removeClientData(); - equal(clientWBO().payload, undefined); + do_check_eq(clientWBO().payload, undefined); _("Time travel one day back, no record uploaded."); engine.lastRecordUpload -= LESS_THAN_CLIENTS_TTL_REFRESH; let yesterday = engine.lastRecordUpload; engine._sync(); - equal(clientWBO().payload, undefined); - equal(engine.lastRecordUpload, yesterday); + do_check_eq(clientWBO().payload, undefined); + do_check_eq(engine.lastRecordUpload, yesterday); } finally { Svc.Prefs.resetBranch(""); @@ -336,16 +263,16 @@ add_test(function test_client_name_change() { let initialScore = tracker.score; - equal(Object.keys(tracker.changedIDs).length, 0); + do_check_eq(Object.keys(tracker.changedIDs).length, 0); Svc.Prefs.set("client.name", "new name"); _("new name: " + engine.localName); - notEqual(initialName, engine.localName); - equal(Object.keys(tracker.changedIDs).length, 1); - ok(engine.localID in tracker.changedIDs); - ok(tracker.score > initialScore); - ok(tracker.score >= SCORE_INCREMENT_XLARGE); + do_check_neq(initialName, engine.localName); + do_check_eq(Object.keys(tracker.changedIDs).length, 1); + do_check_true(engine.localID in tracker.changedIDs); + do_check_true(tracker.score > initialScore); + do_check_true(tracker.score >= SCORE_INCREMENT_XLARGE); Svc.Obs.notify("weave:engine:stop-tracking"); @@ -369,16 +296,15 @@ add_test(function test_send_command() { engine._sendCommandToClient(action, args, remoteId); let newRecord = store._remoteClients[remoteId]; - let clientCommands = engine._readCommands()[remoteId]; - notEqual(newRecord, undefined); - equal(clientCommands.length, 1); + do_check_neq(newRecord, undefined); + do_check_eq(newRecord.commands.length, 1); - let command = clientCommands[0]; - equal(command.command, action); - equal(command.args.length, 2); - deepEqual(command.args, args); + let command = newRecord.commands[0]; + do_check_eq(command.command, action); + do_check_eq(command.args.length, 2); + do_check_eq(command.args, args); - notEqual(tracker.changedIDs[remoteId], undefined); + do_check_neq(tracker.changedIDs[remoteId], undefined); run_next_test(); }); @@ -402,7 +328,7 @@ add_test(function test_command_validation() { ["__UNKNOWN__", [], false] ]; - for (let [action, args, expectedResult] of testCommands) { + for each (let [action, args, expectedResult] in testCommands) { let remoteId = Utils.makeGUID(); let rec = new ClientsRec("clients", remoteId); @@ -412,26 +338,24 @@ add_test(function test_command_validation() { engine.sendCommand(action, args, remoteId); let newRecord = store._remoteClients[remoteId]; - notEqual(newRecord, undefined); - - let clientCommands = engine._readCommands()[remoteId]; + do_check_neq(newRecord, undefined); if (expectedResult) { _("Ensuring command is sent: " + action); - equal(clientCommands.length, 1); + do_check_eq(newRecord.commands.length, 1); - let command = clientCommands[0]; - equal(command.command, action); - deepEqual(command.args, args); + let command = newRecord.commands[0]; + do_check_eq(command.command, action); + do_check_eq(command.args, args); - notEqual(engine._tracker, undefined); - notEqual(engine._tracker.changedIDs[remoteId], undefined); + do_check_neq(engine._tracker, undefined); + do_check_neq(engine._tracker.changedIDs[remoteId], undefined); } else { _("Ensuring command is scrubbed: " + action); - equal(clientCommands, undefined); + do_check_eq(newRecord.commands, undefined); if (store._tracker) { - equal(engine._tracker[remoteId], undefined); + do_check_eq(engine._tracker[remoteId], undefined); } } @@ -455,11 +379,10 @@ add_test(function test_command_duplication() { engine.sendCommand(action, args, remoteId); let newRecord = store._remoteClients[remoteId]; - let clientCommands = engine._readCommands()[remoteId]; - equal(clientCommands.length, 1); + do_check_eq(newRecord.commands.length, 1); _("Check variant args length"); - engine._saveCommands({}); + newRecord.commands = []; action = "resetEngine"; engine.sendCommand(action, [{ x: "foo" }], remoteId); @@ -468,8 +391,7 @@ add_test(function test_command_duplication() { _("Make sure we spot a real dupe argument."); engine.sendCommand(action, [{ x: "bar" }], remoteId); - clientCommands = engine._readCommands()[remoteId]; - equal(clientCommands.length, 2); + do_check_eq(newRecord.commands.length, 2); run_next_test(); }); @@ -486,7 +408,7 @@ add_test(function test_command_invalid_client() { error = ex; } - equal(error.message.indexOf("Unknown remote client ID: "), 0); + do_check_eq(error.message.indexOf("Unknown remote client ID: "), 0); run_next_test(); }); @@ -500,174 +422,13 @@ add_test(function test_process_incoming_commands() { var handler = function() { Svc.Obs.remove(ev, handler); - - Svc.Prefs.resetBranch(""); - Service.recordManager.clearCache(); - engine._resetClient(); - run_next_test(); }; Svc.Obs.add(ev, handler); // logout command causes processIncomingCommands to return explicit false. - ok(!engine.processIncomingCommands()); -}); - -add_test(function test_filter_duplicate_names() { - _("Ensure that we exclude clients with identical names that haven't synced in a week."); - - let now = Date.now() / 1000; - let contents = { - meta: {global: {engines: {clients: {version: engine.version, - syncID: engine.syncID}}}}, - clients: {}, - crypto: {} - }; - let server = serverForUsers({"foo": "password"}, contents); - let user = server.user("foo"); - - new SyncTestingInfrastructure(server.server); - generateNewKeys(Service.collectionKeys); - - // Synced recently. - let recentID = Utils.makeGUID(); - server.insertWBO("foo", "clients", new ServerWBO(recentID, encryptPayload({ - id: recentID, - name: "My Phone", - type: "mobile", - commands: [], - version: "48", - protocols: ["1.5"], - }), now - 10)); - - // Dupe of our client, synced more than 1 week ago. - let dupeID = Utils.makeGUID(); - server.insertWBO("foo", "clients", new ServerWBO(dupeID, encryptPayload({ - id: dupeID, - name: engine.localName, - type: "desktop", - commands: [], - version: "48", - protocols: ["1.5"], - }), now - 604810)); - - // Synced more than 1 week ago, but not a dupe. - let oldID = Utils.makeGUID(); - server.insertWBO("foo", "clients", new ServerWBO(oldID, encryptPayload({ - id: oldID, - name: "My old desktop", - type: "desktop", - commands: [], - version: "48", - protocols: ["1.5"], - }), now - 604820)); - - try { - let store = engine._store; - - _("First sync"); - strictEqual(engine.lastRecordUpload, 0); - engine._sync(); - ok(engine.lastRecordUpload > 0); - deepEqual(user.collection("clients").keys().sort(), - [recentID, dupeID, oldID, engine.localID].sort(), - "Our record should be uploaded on first sync"); - - deepEqual(Object.keys(store.getAllIDs()).sort(), - [recentID, dupeID, oldID, engine.localID].sort(), - "Duplicate ID should remain in getAllIDs"); - ok(engine._store.itemExists(dupeID), "Dupe ID should be considered as existing for Sync methods."); - ok(!engine.remoteClientExists(dupeID), "Dupe ID should not be considered as existing for external methods."); - - // dupe desktop should not appear in .deviceTypes. - equal(engine.deviceTypes.get("desktop"), 2); - equal(engine.deviceTypes.get("mobile"), 1); - - // dupe desktop should not appear in stats - deepEqual(engine.stats, { - hasMobile: 1, - names: [engine.localName, "My Phone", "My old desktop"], - numClients: 3, - }); - - ok(engine.remoteClientExists(oldID), "non-dupe ID should exist."); - ok(!engine.remoteClientExists(dupeID), "dupe ID should not exist"); - equal(engine.remoteClients.length, 2, "dupe should not be in remoteClients"); - - // Check that a subsequent Sync doesn't report anything as being processed. - let counts; - Svc.Obs.add("weave:engine:sync:applied", function observe(subject, data) { - Svc.Obs.remove("weave:engine:sync:applied", observe); - counts = subject; - }); - - engine._sync(); - equal(counts.applied, 0); // We didn't report applying any records. - equal(counts.reconciled, 4); // We reported reconcilliation for all records - equal(counts.succeeded, 0); - equal(counts.failed, 0); - equal(counts.newFailed, 0); - - _("Broadcast logout to all clients"); - engine.sendCommand("logout", []); - engine._sync(); - - let collection = server.getCollection("foo", "clients"); - let recentPayload = JSON.parse(JSON.parse(collection.payload(recentID)).ciphertext); - deepEqual(recentPayload.commands, [{ command: "logout", args: [] }], - "Should send commands to the recent client"); - - let oldPayload = JSON.parse(JSON.parse(collection.payload(oldID)).ciphertext); - deepEqual(oldPayload.commands, [{ command: "logout", args: [] }], - "Should send commands to the week-old client"); - - let dupePayload = JSON.parse(JSON.parse(collection.payload(dupeID)).ciphertext); - deepEqual(dupePayload.commands, [], - "Should not send commands to the dupe client"); - - _("Update the dupe client's modified time"); - server.insertWBO("foo", "clients", new ServerWBO(dupeID, encryptPayload({ - id: dupeID, - name: engine.localName, - type: "desktop", - commands: [], - version: "48", - protocols: ["1.5"], - }), now - 10)); - - _("Second sync."); - engine._sync(); - - deepEqual(Object.keys(store.getAllIDs()).sort(), - [recentID, oldID, dupeID, engine.localID].sort(), - "Stale client synced, so it should no longer be marked as a dupe"); - - ok(engine.remoteClientExists(dupeID), "Dupe ID should appear as it synced."); - - // Recently synced dupe desktop should appear in .deviceTypes. - equal(engine.deviceTypes.get("desktop"), 3); - - // Recently synced dupe desktop should now appear in stats - deepEqual(engine.stats, { - hasMobile: 1, - names: [engine.localName, "My Phone", engine.localName, "My old desktop"], - numClients: 4, - }); - - ok(engine.remoteClientExists(dupeID), "recently synced dupe ID should now exist"); - equal(engine.remoteClients.length, 3, "recently synced dupe should now be in remoteClients"); - - } finally { - Svc.Prefs.resetBranch(""); - Service.recordManager.clearCache(); - - try { - server.deleteCollections("foo"); - } finally { - server.stop(run_next_test); - } - } + do_check_false(engine.processIncomingCommands()); }); add_test(function test_command_sync() { @@ -693,58 +454,40 @@ add_test(function test_command_sync() { } _("Create remote client record"); - server.insertWBO("foo", "clients", new ServerWBO(remoteId, encryptPayload({ - id: remoteId, - name: "Remote client", - type: "desktop", - commands: [], - version: "48", - protocols: ["1.5"], - }), Date.now() / 1000)); + let rec = new ClientsRec("clients", remoteId); + engine._store.create(rec); + let remoteRecord = engine._store.createRecord(remoteId, "clients"); + engine.sendCommand("wipeAll", []); + + let clientRecord = engine._store._remoteClients[remoteId]; + do_check_neq(clientRecord, undefined); + do_check_eq(clientRecord.commands.length, 1); try { _("Syncing."); engine._sync(); - - _("Checking remote record was downloaded."); - let clientRecord = engine._store._remoteClients[remoteId]; - notEqual(clientRecord, undefined); - equal(clientRecord.commands.length, 0); - - _("Send a command to the remote client."); - engine.sendCommand("wipeAll", []); - let clientCommands = engine._readCommands()[remoteId]; - equal(clientCommands.length, 1); - engine._sync(); - _("Checking record was uploaded."); - notEqual(clientWBO(engine.localID).payload, undefined); - ok(engine.lastRecordUpload > 0); + do_check_neq(clientWBO(engine.localID).payload, undefined); + do_check_true(engine.lastRecordUpload > 0); - notEqual(clientWBO(remoteId).payload, undefined); + do_check_neq(clientWBO(remoteId).payload, undefined); Svc.Prefs.set("client.GUID", remoteId); engine._resetClient(); - equal(engine.localID, remoteId); + do_check_eq(engine.localID, remoteId); _("Performing sync on resetted client."); engine._sync(); - notEqual(engine.localCommands, undefined); - equal(engine.localCommands.length, 1); + do_check_neq(engine.localCommands, undefined); + do_check_eq(engine.localCommands.length, 1); let command = engine.localCommands[0]; - equal(command.command, "wipeAll"); - equal(command.args.length, 0); + do_check_eq(command.command, "wipeAll"); + do_check_eq(command.args.length, 0); } finally { Svc.Prefs.resetBranch(""); Service.recordManager.clearCache(); - - try { - let collection = server.getCollection("foo", "clients"); - collection.remove(remoteId); - } finally { - server.stop(run_next_test); - } + server.stop(run_next_test); } }); @@ -769,19 +512,18 @@ add_test(function test_send_uri_to_client_for_display() { let newRecord = store._remoteClients[remoteId]; - notEqual(newRecord, undefined); - let clientCommands = engine._readCommands()[remoteId]; - equal(clientCommands.length, 1); + do_check_neq(newRecord, undefined); + do_check_eq(newRecord.commands.length, 1); - let command = clientCommands[0]; - equal(command.command, "displayURI"); - equal(command.args.length, 3); - equal(command.args[0], uri); - equal(command.args[1], engine.localID); - equal(command.args[2], title); + let command = newRecord.commands[0]; + do_check_eq(command.command, "displayURI"); + do_check_eq(command.args.length, 3); + do_check_eq(command.args[0], uri); + do_check_eq(command.args[1], engine.localID); + do_check_eq(command.args[2], title); - ok(tracker.score > initialScore); - ok(tracker.score - initialScore >= SCORE_INCREMENT_XLARGE); + do_check_true(tracker.score > initialScore); + do_check_true(tracker.score - initialScore >= SCORE_INCREMENT_XLARGE); _("Ensure unknown client IDs result in exception."); let unknownId = Utils.makeGUID(); @@ -793,11 +535,7 @@ add_test(function test_send_uri_to_client_for_display() { error = ex; } - equal(error.message.indexOf("Unknown remote client ID: "), 0); - - Svc.Prefs.resetBranch(""); - Service.recordManager.clearCache(); - engine._resetClient(); + do_check_eq(error.message.indexOf("Unknown remote client ID: "), 0); run_next_test(); }); @@ -821,26 +559,22 @@ add_test(function test_receive_display_uri() { // Received 'displayURI' command should result in the topic defined below // being called. - let ev = "weave:engine:clients:display-uris"; + let ev = "weave:engine:clients:display-uri"; let handler = function(subject, data) { Svc.Obs.remove(ev, handler); - equal(subject[0].uri, uri); - equal(subject[0].clientId, remoteId); - equal(subject[0].title, title); - equal(data, null); + do_check_eq(subject.uri, uri); + do_check_eq(subject.client, remoteId); + do_check_eq(subject.title, title); + do_check_eq(data, null); run_next_test(); }; Svc.Obs.add(ev, handler); - ok(engine.processIncomingCommands()); - - Svc.Prefs.resetBranch(""); - Service.recordManager.clearCache(); - engine._resetClient(); + do_check_true(engine.processIncomingCommands()); }); add_test(function test_optional_client_fields() { @@ -848,590 +582,27 @@ add_test(function test_optional_client_fields() { const SUPPORTED_PROTOCOL_VERSIONS = ["1.1", "1.5"]; let local = engine._store.createRecord(engine.localID, "clients"); - equal(local.name, engine.localName); - equal(local.type, engine.localType); - equal(local.version, Services.appinfo.version); - deepEqual(local.protocols, SUPPORTED_PROTOCOL_VERSIONS); + do_check_eq(local.name, engine.localName); + do_check_eq(local.type, engine.localType); + do_check_eq(local.version, Services.appinfo.version); + do_check_array_eq(local.protocols, SUPPORTED_PROTOCOL_VERSIONS); // Optional fields. // Make sure they're what they ought to be... - equal(local.os, Services.appinfo.OS); - equal(local.appPackage, Services.appinfo.ID); + do_check_eq(local.os, Services.appinfo.OS); + do_check_eq(local.appPackage, Services.appinfo.ID); // ... and also that they're non-empty. - ok(!!local.os); - ok(!!local.appPackage); - ok(!!local.application); + do_check_true(!!local.os); + do_check_true(!!local.appPackage); + do_check_true(!!local.application); // We don't currently populate device or formfactor. // See Bug 1100722, Bug 1100723. - engine._resetClient(); run_next_test(); }); -add_test(function test_merge_commands() { - _("Verifies local commands for remote clients are merged with the server's"); - - let now = Date.now() / 1000; - let contents = { - meta: {global: {engines: {clients: {version: engine.version, - syncID: engine.syncID}}}}, - clients: {}, - crypto: {} - }; - let server = serverForUsers({"foo": "password"}, contents); - let user = server.user("foo"); - - new SyncTestingInfrastructure(server.server); - generateNewKeys(Service.collectionKeys); - - let desktopID = Utils.makeGUID(); - server.insertWBO("foo", "clients", new ServerWBO(desktopID, encryptPayload({ - id: desktopID, - name: "Desktop client", - type: "desktop", - commands: [{ - command: "displayURI", - args: ["https://example.com", engine.localID, "Yak Herders Anonymous"], - }], - version: "48", - protocols: ["1.5"], - }), now - 10)); - - let mobileID = Utils.makeGUID(); - server.insertWBO("foo", "clients", new ServerWBO(mobileID, encryptPayload({ - id: mobileID, - name: "Mobile client", - type: "mobile", - commands: [{ - command: "logout", - args: [], - }], - version: "48", - protocols: ["1.5"], - }), now - 10)); - - try { - let store = engine._store; - - _("First sync. 2 records downloaded."); - strictEqual(engine.lastRecordUpload, 0); - engine._sync(); - - _("Broadcast logout to all clients"); - engine.sendCommand("logout", []); - engine._sync(); - - let collection = server.getCollection("foo", "clients"); - let desktopPayload = JSON.parse(JSON.parse(collection.payload(desktopID)).ciphertext); - deepEqual(desktopPayload.commands, [{ - command: "displayURI", - args: ["https://example.com", engine.localID, "Yak Herders Anonymous"], - }, { - command: "logout", - args: [], - }], "Should send the logout command to the desktop client"); - - let mobilePayload = JSON.parse(JSON.parse(collection.payload(mobileID)).ciphertext); - deepEqual(mobilePayload.commands, [{ command: "logout", args: [] }], - "Should not send a duplicate logout to the mobile client"); - } finally { - Svc.Prefs.resetBranch(""); - Service.recordManager.clearCache(); - engine._resetClient(); - - try { - server.deleteCollections("foo"); - } finally { - server.stop(run_next_test); - } - } -}); - -add_test(function test_duplicate_remote_commands() { - _("Verifies local commands for remote clients are sent only once (bug 1289287)"); - - let now = Date.now() / 1000; - let contents = { - meta: {global: {engines: {clients: {version: engine.version, - syncID: engine.syncID}}}}, - clients: {}, - crypto: {} - }; - let server = serverForUsers({"foo": "password"}, contents); - let user = server.user("foo"); - - new SyncTestingInfrastructure(server.server); - generateNewKeys(Service.collectionKeys); - - let desktopID = Utils.makeGUID(); - server.insertWBO("foo", "clients", new ServerWBO(desktopID, encryptPayload({ - id: desktopID, - name: "Desktop client", - type: "desktop", - commands: [], - version: "48", - protocols: ["1.5"], - }), now - 10)); - - try { - let store = engine._store; - - _("First sync. 1 record downloaded."); - strictEqual(engine.lastRecordUpload, 0); - engine._sync(); - - _("Send tab to client"); - engine.sendCommand("displayURI", ["https://example.com", engine.localID, "Yak Herders Anonymous"]); - engine._sync(); - - _("Simulate the desktop client consuming the command and syncing to the server"); - server.insertWBO("foo", "clients", new ServerWBO(desktopID, encryptPayload({ - id: desktopID, - name: "Desktop client", - type: "desktop", - commands: [], - version: "48", - protocols: ["1.5"], - }), now - 10)); - - _("Send another tab to the desktop client"); - engine.sendCommand("displayURI", ["https://foobar.com", engine.localID, "Foo bar!"], desktopID); - engine._sync(); - - let collection = server.getCollection("foo", "clients"); - let desktopPayload = JSON.parse(JSON.parse(collection.payload(desktopID)).ciphertext); - deepEqual(desktopPayload.commands, [{ - command: "displayURI", - args: ["https://foobar.com", engine.localID, "Foo bar!"], - }], "Should only send the second command to the desktop client"); - } finally { - Svc.Prefs.resetBranch(""); - Service.recordManager.clearCache(); - engine._resetClient(); - - try { - server.deleteCollections("foo"); - } finally { - server.stop(run_next_test); - } - } -}); - -add_test(function test_upload_after_reboot() { - _("Multiple downloads, reboot, then upload (bug 1289287)"); - - let now = Date.now() / 1000; - let contents = { - meta: {global: {engines: {clients: {version: engine.version, - syncID: engine.syncID}}}}, - clients: {}, - crypto: {} - }; - let server = serverForUsers({"foo": "password"}, contents); - let user = server.user("foo"); - - new SyncTestingInfrastructure(server.server); - generateNewKeys(Service.collectionKeys); - - let deviceBID = Utils.makeGUID(); - let deviceCID = Utils.makeGUID(); - server.insertWBO("foo", "clients", new ServerWBO(deviceBID, encryptPayload({ - id: deviceBID, - name: "Device B", - type: "desktop", - commands: [{ - command: "displayURI", args: ["https://deviceclink.com", deviceCID, "Device C link"] - }], - version: "48", - protocols: ["1.5"], - }), now - 10)); - server.insertWBO("foo", "clients", new ServerWBO(deviceCID, encryptPayload({ - id: deviceCID, - name: "Device C", - type: "desktop", - commands: [], - version: "48", - protocols: ["1.5"], - }), now - 10)); - - try { - let store = engine._store; - - _("First sync. 2 records downloaded."); - strictEqual(engine.lastRecordUpload, 0); - engine._sync(); - - _("Send tab to client"); - engine.sendCommand("displayURI", ["https://example.com", engine.localID, "Yak Herders Anonymous"], deviceBID); - - const oldUploadOutgoing = SyncEngine.prototype._uploadOutgoing; - SyncEngine.prototype._uploadOutgoing = () => engine._onRecordsWritten.call(engine, [], [deviceBID]); - engine._sync(); - - let collection = server.getCollection("foo", "clients"); - let deviceBPayload = JSON.parse(JSON.parse(collection.payload(deviceBID)).ciphertext); - deepEqual(deviceBPayload.commands, [{ - command: "displayURI", args: ["https://deviceclink.com", deviceCID, "Device C link"] - }], "Should be the same because the upload failed"); - - _("Simulate the client B consuming the command and syncing to the server"); - server.insertWBO("foo", "clients", new ServerWBO(deviceBID, encryptPayload({ - id: deviceBID, - name: "Device B", - type: "desktop", - commands: [], - version: "48", - protocols: ["1.5"], - }), now - 10)); - - // Simulate reboot - SyncEngine.prototype._uploadOutgoing = oldUploadOutgoing; - engine = Service.clientsEngine = new ClientEngine(Service); - - engine._sync(); - - deviceBPayload = JSON.parse(JSON.parse(collection.payload(deviceBID)).ciphertext); - deepEqual(deviceBPayload.commands, [{ - command: "displayURI", - args: ["https://example.com", engine.localID, "Yak Herders Anonymous"], - }], "Should only had written our outgoing command"); - } finally { - Svc.Prefs.resetBranch(""); - Service.recordManager.clearCache(); - engine._resetClient(); - - try { - server.deleteCollections("foo"); - } finally { - server.stop(run_next_test); - } - } -}); - -add_test(function test_keep_cleared_commands_after_reboot() { - _("Download commands, fail upload, reboot, then apply new commands (bug 1289287)"); - - let now = Date.now() / 1000; - let contents = { - meta: {global: {engines: {clients: {version: engine.version, - syncID: engine.syncID}}}}, - clients: {}, - crypto: {} - }; - let server = serverForUsers({"foo": "password"}, contents); - let user = server.user("foo"); - - new SyncTestingInfrastructure(server.server); - generateNewKeys(Service.collectionKeys); - - let deviceBID = Utils.makeGUID(); - let deviceCID = Utils.makeGUID(); - server.insertWBO("foo", "clients", new ServerWBO(engine.localID, encryptPayload({ - id: engine.localID, - name: "Device A", - type: "desktop", - commands: [{ - command: "displayURI", args: ["https://deviceblink.com", deviceBID, "Device B link"] - }, - { - command: "displayURI", args: ["https://deviceclink.com", deviceCID, "Device C link"] - }], - version: "48", - protocols: ["1.5"], - }), now - 10)); - server.insertWBO("foo", "clients", new ServerWBO(deviceBID, encryptPayload({ - id: deviceBID, - name: "Device B", - type: "desktop", - commands: [], - version: "48", - protocols: ["1.5"], - }), now - 10)); - server.insertWBO("foo", "clients", new ServerWBO(deviceCID, encryptPayload({ - id: deviceCID, - name: "Device C", - type: "desktop", - commands: [], - version: "48", - protocols: ["1.5"], - }), now - 10)); - - try { - let store = engine._store; - - _("First sync. Download remote and our record."); - strictEqual(engine.lastRecordUpload, 0); - - let collection = server.getCollection("foo", "clients"); - const oldUploadOutgoing = SyncEngine.prototype._uploadOutgoing; - SyncEngine.prototype._uploadOutgoing = () => engine._onRecordsWritten.call(engine, [], [deviceBID]); - let commandsProcessed = 0; - engine._handleDisplayURIs = (uris) => { commandsProcessed = uris.length }; - - engine._sync(); - engine.processIncomingCommands(); // Not called by the engine.sync(), gotta call it ourselves - equal(commandsProcessed, 2, "We processed 2 commands"); - - let localRemoteRecord = JSON.parse(JSON.parse(collection.payload(engine.localID)).ciphertext); - deepEqual(localRemoteRecord.commands, [{ - command: "displayURI", args: ["https://deviceblink.com", deviceBID, "Device B link"] - }, - { - command: "displayURI", args: ["https://deviceclink.com", deviceCID, "Device C link"] - }], "Should be the same because the upload failed"); - - // Another client sends another link - server.insertWBO("foo", "clients", new ServerWBO(engine.localID, encryptPayload({ - id: engine.localID, - name: "Device A", - type: "desktop", - commands: [{ - command: "displayURI", args: ["https://deviceblink.com", deviceBID, "Device B link"] - }, - { - command: "displayURI", args: ["https://deviceclink.com", deviceCID, "Device C link"] - }, - { - command: "displayURI", args: ["https://deviceclink2.com", deviceCID, "Device C link 2"] - }], - version: "48", - protocols: ["1.5"], - }), now - 10)); - - // Simulate reboot - SyncEngine.prototype._uploadOutgoing = oldUploadOutgoing; - engine = Service.clientsEngine = new ClientEngine(Service); - - commandsProcessed = 0; - engine._handleDisplayURIs = (uris) => { commandsProcessed = uris.length }; - engine._sync(); - engine.processIncomingCommands(); - equal(commandsProcessed, 1, "We processed one command (the other were cleared)"); - - localRemoteRecord = JSON.parse(JSON.parse(collection.payload(deviceBID)).ciphertext); - deepEqual(localRemoteRecord.commands, [], "Should be empty"); - } finally { - Svc.Prefs.resetBranch(""); - Service.recordManager.clearCache(); - - // Reset service (remove mocks) - engine = Service.clientsEngine = new ClientEngine(Service); - engine._resetClient(); - - try { - server.deleteCollections("foo"); - } finally { - server.stop(run_next_test); - } - } -}); - -add_test(function test_deleted_commands() { - _("Verifies commands for a deleted client are discarded"); - - let now = Date.now() / 1000; - let contents = { - meta: {global: {engines: {clients: {version: engine.version, - syncID: engine.syncID}}}}, - clients: {}, - crypto: {} - }; - let server = serverForUsers({"foo": "password"}, contents); - let user = server.user("foo"); - - new SyncTestingInfrastructure(server.server); - generateNewKeys(Service.collectionKeys); - - let activeID = Utils.makeGUID(); - server.insertWBO("foo", "clients", new ServerWBO(activeID, encryptPayload({ - id: activeID, - name: "Active client", - type: "desktop", - commands: [], - version: "48", - protocols: ["1.5"], - }), now - 10)); - - let deletedID = Utils.makeGUID(); - server.insertWBO("foo", "clients", new ServerWBO(deletedID, encryptPayload({ - id: deletedID, - name: "Client to delete", - type: "desktop", - commands: [], - version: "48", - protocols: ["1.5"], - }), now - 10)); - - try { - let store = engine._store; - - _("First sync. 2 records downloaded."); - engine._sync(); - - _("Delete a record on the server."); - let collection = server.getCollection("foo", "clients"); - collection.remove(deletedID); - - _("Broadcast a command to all clients"); - engine.sendCommand("logout", []); - engine._sync(); - - deepEqual(collection.keys().sort(), [activeID, engine.localID].sort(), - "Should not reupload deleted clients"); - - let activePayload = JSON.parse(JSON.parse(collection.payload(activeID)).ciphertext); - deepEqual(activePayload.commands, [{ command: "logout", args: [] }], - "Should send the command to the active client"); - } finally { - Svc.Prefs.resetBranch(""); - Service.recordManager.clearCache(); - engine._resetClient(); - - try { - server.deleteCollections("foo"); - } finally { - server.stop(run_next_test); - } - } -}); - -add_test(function test_send_uri_ack() { - _("Ensure a sent URI is deleted when the client syncs"); - - let now = Date.now() / 1000; - let contents = { - meta: {global: {engines: {clients: {version: engine.version, - syncID: engine.syncID}}}}, - clients: {}, - crypto: {} - }; - let server = serverForUsers({"foo": "password"}, contents); - let user = server.user("foo"); - - new SyncTestingInfrastructure(server.server); - generateNewKeys(Service.collectionKeys); - - try { - let fakeSenderID = Utils.makeGUID(); - - _("Initial sync for empty clients collection"); - engine._sync(); - let collection = server.getCollection("foo", "clients"); - let ourPayload = JSON.parse(JSON.parse(collection.payload(engine.localID)).ciphertext); - ok(ourPayload, "Should upload our client record"); - - _("Send a URL to the device on the server"); - ourPayload.commands = [{ - command: "displayURI", - args: ["https://example.com", fakeSenderID, "Yak Herders Anonymous"], - }]; - server.insertWBO("foo", "clients", new ServerWBO(engine.localID, encryptPayload(ourPayload), now)); - - _("Sync again"); - engine._sync(); - deepEqual(engine.localCommands, [{ - command: "displayURI", - args: ["https://example.com", fakeSenderID, "Yak Herders Anonymous"], - }], "Should receive incoming URI"); - ok(engine.processIncomingCommands(), "Should process incoming commands"); - const clearedCommands = engine._readCommands()[engine.localID]; - deepEqual(clearedCommands, [{ - command: "displayURI", - args: ["https://example.com", fakeSenderID, "Yak Herders Anonymous"], - }], "Should mark the commands as cleared after processing"); - - _("Check that the command was removed on the server"); - engine._sync(); - ourPayload = JSON.parse(JSON.parse(collection.payload(engine.localID)).ciphertext); - ok(ourPayload, "Should upload the synced client record"); - deepEqual(ourPayload.commands, [], "Should not reupload cleared commands"); - } finally { - Svc.Prefs.resetBranch(""); - Service.recordManager.clearCache(); - engine._resetClient(); - - try { - server.deleteCollections("foo"); - } finally { - server.stop(run_next_test); - } - } -}); - -add_test(function test_command_sync() { - _("Notify other clients when writing their record."); - - engine._store.wipe(); - generateNewKeys(Service.collectionKeys); - - let contents = { - meta: {global: {engines: {clients: {version: engine.version, - syncID: engine.syncID}}}}, - clients: {}, - crypto: {} - }; - let server = serverForUsers({"foo": "password"}, contents); - new SyncTestingInfrastructure(server.server); - - let user = server.user("foo"); - let collection = server.getCollection("foo", "clients"); - let remoteId = Utils.makeGUID(); - let remoteId2 = Utils.makeGUID(); - - function clientWBO(id) { - return user.collection("clients").wbo(id); - } - - _("Create remote client record 1"); - server.insertWBO("foo", "clients", new ServerWBO(remoteId, encryptPayload({ - id: remoteId, - name: "Remote client", - type: "desktop", - commands: [], - version: "48", - protocols: ["1.5"] - }), Date.now() / 1000)); - - _("Create remote client record 2"); - server.insertWBO("foo", "clients", new ServerWBO(remoteId2, encryptPayload({ - id: remoteId2, - name: "Remote client 2", - type: "mobile", - commands: [], - version: "48", - protocols: ["1.5"] - }), Date.now() / 1000)); - - try { - equal(collection.count(), 2, "2 remote records written"); - engine._sync(); - equal(collection.count(), 3, "3 remote records written (+1 for the synced local record)"); - - let notifiedIds; - engine.sendCommand("wipeAll", []); - engine._tracker.addChangedID(engine.localID); - engine.getClientFxaDeviceId = (id) => "fxa-" + id; - engine._notifyCollectionChanged = (ids) => (notifiedIds = ids); - _("Syncing."); - engine._sync(); - deepEqual(notifiedIds, ["fxa-fake-guid-00","fxa-fake-guid-01"]); - ok(!notifiedIds.includes(engine.getClientFxaDeviceId(engine.localID)), - "We never notify the local device"); - - } finally { - Svc.Prefs.resetBranch(""); - Service.recordManager.clearCache(); - - try { - server.deleteCollections("foo"); - } finally { - server.stop(run_next_test); - } - } -}); - function run_test() { initTestLogging("Trace"); Log.repository.getLogger("Sync.Engine.Clients").level = Log.Level.Trace; diff --git a/services/sync/tests/unit/test_collection_getBatched.js b/services/sync/tests/unit/test_collection_getBatched.js deleted file mode 100644 index c6523d497..000000000 --- a/services/sync/tests/unit/test_collection_getBatched.js +++ /dev/null @@ -1,195 +0,0 @@ -/* Any copyright is dedicated to the Public Domain. - http://creativecommons.org/publicdomain/zero/1.0/ */ - -Cu.import("resource://services-sync/record.js"); -Cu.import("resource://services-sync/service.js"); - -function run_test() { - initTestLogging("Trace"); - Log.repository.getLogger("Sync.Collection").level = Log.Level.Trace; - run_next_test(); -} - -function recordRange(lim, offset, total) { - let res = []; - for (let i = offset; i < Math.min(lim + offset, total); ++i) { - res.push(JSON.stringify({ id: String(i), payload: "test:" + i })); - } - return res.join("\n") + "\n"; -} - -function get_test_collection_info({ totalRecords, batchSize, lastModified, - throwAfter = Infinity, - interruptedAfter = Infinity }) { - let coll = new Collection("http://example.com/test/", WBORecord, Service); - coll.full = true; - let requests = []; - let responses = []; - let sawRecord = false; - coll.get = function() { - ok(!sawRecord); // make sure we call record handler after all requests. - let limit = +this.limit; - let offset = 0; - if (this.offset) { - equal(this.offset.slice(0, 6), "foobar"); - offset = +this.offset.slice(6); - } - requests.push({ - limit, - offset, - spec: this.spec, - headers: Object.assign({}, this.headers) - }); - if (--throwAfter === 0) { - throw "Some Network Error"; - } - let body = recordRange(limit, offset, totalRecords); - this._onProgress.call({ _data: body }); - let response = { - body, - success: true, - status: 200, - headers: {} - }; - if (--interruptedAfter === 0) { - response.success = false; - response.status = 412; - response.body = ""; - } else if (offset + limit < totalRecords) { - // Ensure we're treating this as an opaque string, since the docs say - // it might not be numeric. - response.headers["x-weave-next-offset"] = "foobar" + (offset + batchSize); - } - response.headers["x-last-modified"] = lastModified; - responses.push(response); - return response; - }; - - let records = []; - coll.recordHandler = function(record) { - sawRecord = true; - // ensure records are coming in in the right order - equal(record.id, String(records.length)); - equal(record.payload, "test:" + records.length); - records.push(record); - }; - return { records, responses, requests, coll }; -} - -add_test(function test_success() { - const totalRecords = 11; - const batchSize = 2; - const lastModified = "111111"; - let { records, responses, requests, coll } = get_test_collection_info({ - totalRecords, - batchSize, - lastModified, - }); - let response = coll.getBatched(batchSize); - - equal(requests.length, Math.ceil(totalRecords / batchSize)); - - // records are mostly checked in recordHandler, we just care about the length - equal(records.length, totalRecords); - - // ensure we're returning the last response - equal(responses[responses.length - 1], response); - - // check first separately since its a bit of a special case - ok(!requests[0].headers["x-if-unmodified-since"]); - ok(!requests[0].offset); - equal(requests[0].limit, batchSize); - let expectedOffset = 2; - for (let i = 1; i < requests.length; ++i) { - let req = requests[i]; - equal(req.headers["x-if-unmodified-since"], lastModified); - equal(req.limit, batchSize); - if (i !== requests.length - 1) { - equal(req.offset, expectedOffset); - } - - expectedOffset += batchSize; - } - - // ensure we cleaned up anything that would break further - // use of this collection. - ok(!coll._headers["x-if-unmodified-since"]); - ok(!coll.offset); - ok(!coll.limit || (coll.limit == Infinity)); - - run_next_test(); -}); - -add_test(function test_total_limit() { - _("getBatched respects the (initial) value of the limit property"); - const totalRecords = 100; - const recordLimit = 11; - const batchSize = 2; - const lastModified = "111111"; - let { records, responses, requests, coll } = get_test_collection_info({ - totalRecords, - batchSize, - lastModified, - }); - coll.limit = recordLimit; - let response = coll.getBatched(batchSize); - - equal(requests.length, Math.ceil(recordLimit / batchSize)); - equal(records.length, recordLimit); - - for (let i = 0; i < requests.length; ++i) { - let req = requests[i]; - if (i !== requests.length - 1) { - equal(req.limit, batchSize); - } else { - equal(req.limit, recordLimit % batchSize); - } - } - - equal(coll._limit, recordLimit); - - run_next_test(); -}); - -add_test(function test_412() { - _("We shouldn't record records if we get a 412 in the middle of a batch"); - const totalRecords = 11; - const batchSize = 2; - const lastModified = "111111"; - let { records, responses, requests, coll } = get_test_collection_info({ - totalRecords, - batchSize, - lastModified, - interruptedAfter: 3 - }); - let response = coll.getBatched(batchSize); - - equal(requests.length, 3); - equal(records.length, 0); // record handler shouldn't be called for anything - - // ensure we're returning the last response - equal(responses[responses.length - 1], response); - - ok(!response.success); - equal(response.status, 412); - run_next_test(); -}); - -add_test(function test_get_throws() { - _("We shouldn't record records if get() throws for some reason"); - const totalRecords = 11; - const batchSize = 2; - const lastModified = "111111"; - let { records, responses, requests, coll } = get_test_collection_info({ - totalRecords, - batchSize, - lastModified, - throwAfter: 3 - }); - - throws(() => coll.getBatched(batchSize), "Some Network Error"); - - equal(requests.length, 3); - equal(records.length, 0); - run_next_test(); -}); diff --git a/services/sync/tests/unit/test_collections_recovery.js b/services/sync/tests/unit/test_collections_recovery.js index 0e7f54676..377a05383 100644 --- a/services/sync/tests/unit/test_collections_recovery.js +++ b/services/sync/tests/unit/test_collections_recovery.js @@ -6,7 +6,7 @@ Cu.import("resource://services-sync/service.js"); Cu.import("resource://services-sync/util.js"); Cu.import("resource://testing-common/services/sync/utils.js"); -add_identity_test(this, function* test_missing_crypto_collection() { +add_identity_test(this, function test_missing_crypto_collection() { let johnHelper = track_collections_helper(); let johnU = johnHelper.with_updated_collection; let johnColls = johnHelper.collections; @@ -33,10 +33,7 @@ add_identity_test(this, function* test_missing_crypto_collection() { }; let collections = ["clients", "bookmarks", "forms", "history", "passwords", "prefs", "tabs"]; - // Disable addon sync because AddonManager won't be initialized here. - Service.engineManager.unregister("addons"); - - for (let coll of collections) { + for each (let coll in collections) { handlers["/1.1/johndoe/storage/" + coll] = johnU(coll, new ServerCollection({}, true).handler()); } @@ -53,7 +50,7 @@ add_identity_test(this, function* test_missing_crypto_collection() { }; _("Startup, no meta/global: freshStart called once."); - yield sync_and_validate_telem(); + Service.sync(); do_check_eq(fresh, 1); fresh = 0; @@ -63,12 +60,12 @@ add_identity_test(this, function* test_missing_crypto_collection() { _("Simulate a bad info/collections."); delete johnColls.crypto; - yield sync_and_validate_telem(); + Service.sync(); do_check_eq(fresh, 1); fresh = 0; _("Regular sync: no need to freshStart."); - yield sync_and_validate_telem(); + Service.sync(); do_check_eq(fresh, 0); } finally { diff --git a/services/sync/tests/unit/test_corrupt_keys.js b/services/sync/tests/unit/test_corrupt_keys.js index 009461c2a..2db080a8f 100644 --- a/services/sync/tests/unit/test_corrupt_keys.js +++ b/services/sync/tests/unit/test_corrupt_keys.js @@ -14,7 +14,7 @@ Cu.import("resource://services-sync/util.js"); Cu.import("resource://testing-common/services/sync/utils.js"); Cu.import("resource://gre/modules/Promise.jsm"); -add_task(function* test_locally_changed_keys() { +add_task(function test_locally_changed_keys() { let passphrase = "abcdeabcdeabcdeabcdeabcdea"; let hmacErrorCount = 0; @@ -51,7 +51,7 @@ add_task(function* test_locally_changed_keys() { }]}]}; delete Svc.Session; Svc.Session = { - getBrowserState: () => JSON.stringify(myTabs) + getBrowserState: function () JSON.stringify(myTabs) }; setBasicCredentials("johndoe", "password", passphrase); @@ -59,7 +59,6 @@ add_task(function* test_locally_changed_keys() { Service.clusterURL = server.baseURI; Service.engineManager.register(HistoryEngine); - Service.engineManager.unregister("addons"); function corrupt_local_keys() { Service.collectionKeys._default.keyPair = [Svc.Crypto.generateRandomKey(), @@ -87,7 +86,7 @@ add_task(function* test_locally_changed_keys() { do_check_true(Service.isLoggedIn); // Sync should upload records. - yield sync_and_validate_telem(); + Service.sync(); // Tabs exist. _("Tabs modified: " + johndoe.modified("tabs")); @@ -140,9 +139,7 @@ add_task(function* test_locally_changed_keys() { _("HMAC error count: " + hmacErrorCount); // Now syncing should succeed, after one HMAC error. - let ping = yield wait_for_ping(() => Service.sync(), true); - equal(ping.engines.find(e => e.name == "history").incoming.applied, 5); - + Service.sync(); do_check_eq(hmacErrorCount, 1); _("Keys now: " + Service.collectionKeys.keyForCollection("history").keyPair); @@ -186,9 +183,7 @@ add_task(function* test_locally_changed_keys() { Service.lastHMACEvent = 0; _("Syncing..."); - ping = yield sync_and_validate_telem(true); - - do_check_eq(ping.engines.find(e => e.name == "history").incoming.failed, 5); + Service.sync(); _("Keys now: " + Service.collectionKeys.keyForCollection("history").keyPair); _("Server keys have been updated, and we skipped over 5 more HMAC errors without adjusting history."); do_check_true(johndoe.modified("crypto") > old_key_time); @@ -209,7 +204,6 @@ add_task(function* test_locally_changed_keys() { function run_test() { let logger = Log.repository.rootLogger; Log.repository.rootLogger.addAppender(new Log.DumpAppender()); - validate_all_future_pings(); ensureLegacyIdentityManager(); diff --git a/services/sync/tests/unit/test_engine.js b/services/sync/tests/unit/test_engine.js index be637efc8..000cd5b4a 100644 --- a/services/sync/tests/unit/test_engine.js +++ b/services/sync/tests/unit/test_engine.js @@ -25,8 +25,8 @@ SteamTracker.prototype = { __proto__: Tracker.prototype }; -function SteamEngine(name, service) { - Engine.call(this, name, service); +function SteamEngine(service) { + Engine.call(this, "Steam", service); this.wasReset = false; this.wasSynced = false; } @@ -44,7 +44,7 @@ SteamEngine.prototype = { } }; -var engineObserver = { +let engineObserver = { topics: [], observe: function(subject, topic, data) { @@ -69,7 +69,7 @@ function run_test() { add_test(function test_members() { _("Engine object members"); - let engine = new SteamEngine("Steam", Service); + let engine = new SteamEngine(Service); do_check_eq(engine.Name, "Steam"); do_check_eq(engine.prefName, "steam"); do_check_true(engine._store instanceof SteamStore); @@ -79,7 +79,7 @@ add_test(function test_members() { add_test(function test_score() { _("Engine.score corresponds to tracker.score and is readonly"); - let engine = new SteamEngine("Steam", Service); + let engine = new SteamEngine(Service); do_check_eq(engine.score, 0); engine._tracker.score += 5; do_check_eq(engine.score, 5); @@ -97,7 +97,7 @@ add_test(function test_score() { add_test(function test_resetClient() { _("Engine.resetClient calls _resetClient"); - let engine = new SteamEngine("Steam", Service); + let engine = new SteamEngine(Service); do_check_false(engine.wasReset); engine.resetClient(); @@ -112,7 +112,7 @@ add_test(function test_resetClient() { add_test(function test_invalidChangedIDs() { _("Test that invalid changed IDs on disk don't end up live."); - let engine = new SteamEngine("Steam", Service); + let engine = new SteamEngine(Service); let tracker = engine._tracker; tracker.changedIDs = 5; tracker.saveChangedIDs(function onSaved() { @@ -127,7 +127,7 @@ add_test(function test_invalidChangedIDs() { add_test(function test_wipeClient() { _("Engine.wipeClient calls resetClient, wipes store, clears changed IDs"); - let engine = new SteamEngine("Steam", Service); + let engine = new SteamEngine(Service); do_check_false(engine.wasReset); do_check_false(engine._store.wasWiped); do_check_true(engine._tracker.addChangedID("a-changed-id")); @@ -150,7 +150,7 @@ add_test(function test_wipeClient() { add_test(function test_enabled() { _("Engine.enabled corresponds to preference"); - let engine = new SteamEngine("Steam", Service); + let engine = new SteamEngine(Service); try { do_check_false(engine.enabled); Svc.Prefs.set("engine.steam", true); @@ -165,18 +165,16 @@ add_test(function test_enabled() { }); add_test(function test_sync() { - let engine = new SteamEngine("Steam", Service); + let engine = new SteamEngine(Service); try { _("Engine.sync doesn't call _sync if it's not enabled"); do_check_false(engine.enabled); do_check_false(engine.wasSynced); engine.sync(); - do_check_false(engine.wasSynced); _("Engine.sync calls _sync if it's enabled"); engine.enabled = true; - engine.sync(); do_check_true(engine.wasSynced); do_check_eq(engineObserver.topics[0], "weave:engine:sync:start"); @@ -191,7 +189,7 @@ add_test(function test_sync() { add_test(function test_disabled_no_track() { _("When an engine is disabled, its tracker is not tracking."); - let engine = new SteamEngine("Steam", Service); + let engine = new SteamEngine(Service); let tracker = engine._tracker; do_check_eq(engine, tracker.engine); diff --git a/services/sync/tests/unit/test_errorhandler.js b/services/sync/tests/unit/test_errorhandler.js new file mode 100644 index 000000000..c087acc9f --- /dev/null +++ b/services/sync/tests/unit/test_errorhandler.js @@ -0,0 +1,1893 @@ +/* Any copyright is dedicated to the Public Domain. + http://creativecommons.org/publicdomain/zero/1.0/ */ + +Cu.import("resource://services-sync/engines/clients.js"); +Cu.import("resource://services-sync/constants.js"); +Cu.import("resource://services-sync/engines.js"); +Cu.import("resource://services-sync/keys.js"); +Cu.import("resource://services-sync/policies.js"); +Cu.import("resource://services-sync/service.js"); +Cu.import("resource://services-sync/status.js"); +Cu.import("resource://services-sync/util.js"); +Cu.import("resource://testing-common/services/sync/utils.js"); +Cu.import("resource://gre/modules/FileUtils.jsm"); + +const FAKE_SERVER_URL = "http://dummy:9000/"; +const logsdir = FileUtils.getDir("ProfD", ["weave", "logs"], true); + +const PROLONGED_ERROR_DURATION = + (Svc.Prefs.get('errorhandler.networkFailureReportTimeout') * 2) * 1000; + +const NON_PROLONGED_ERROR_DURATION = + (Svc.Prefs.get('errorhandler.networkFailureReportTimeout') / 2) * 1000; + +Service.engineManager.clear(); + +function setLastSync(lastSyncValue) { + Svc.Prefs.set("lastSync", (new Date(Date.now() - lastSyncValue)).toString()); +} + +function CatapultEngine() { + SyncEngine.call(this, "Catapult", Service); +} +CatapultEngine.prototype = { + __proto__: SyncEngine.prototype, + exception: null, // tests fill this in + _sync: function _sync() { + if (this.exception) { + throw this.exception; + } + } +}; + +let engineManager = Service.engineManager; +engineManager.register(CatapultEngine); + +// This relies on Service/ErrorHandler being a singleton. Fixing this will take +// a lot of work. +let errorHandler = Service.errorHandler; + +function run_test() { + initTestLogging("Trace"); + + Log.repository.getLogger("Sync.Service").level = Log.Level.Trace; + Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace; + Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace; + + ensureLegacyIdentityManager(); + + run_next_test(); +} + +function generateCredentialsChangedFailure() { + // Make sync fail due to changed credentials. We simply re-encrypt + // the keys with a different Sync Key, without changing the local one. + let newSyncKeyBundle = new SyncKeyBundle("johndoe", "23456234562345623456234562"); + let keys = Service.collectionKeys.asWBO(); + keys.encrypt(newSyncKeyBundle); + keys.upload(Service.resource(Service.cryptoKeysURL)); +} + +function service_unavailable(request, response) { + let body = "Service Unavailable"; + response.setStatusLine(request.httpVersion, 503, "Service Unavailable"); + response.setHeader("Retry-After", "42"); + response.bodyOutputStream.write(body, body.length); +} + +function sync_httpd_setup() { + let global = new ServerWBO("global", { + syncID: Service.syncID, + storageVersion: STORAGE_VERSION, + engines: {clients: {version: Service.clientsEngine.version, + syncID: Service.clientsEngine.syncID}, + catapult: {version: engineManager.get("catapult").version, + syncID: engineManager.get("catapult").syncID}} + }); + let clientsColl = new ServerCollection({}, true); + + // Tracking info/collections. + let collectionsHelper = track_collections_helper(); + let upd = collectionsHelper.with_updated_collection; + + let handler_401 = httpd_handler(401, "Unauthorized"); + return httpd_setup({ + // Normal server behaviour. + "/1.1/johndoe/storage/meta/global": upd("meta", global.handler()), + "/1.1/johndoe/info/collections": collectionsHelper.handler, + "/1.1/johndoe/storage/crypto/keys": + upd("crypto", (new ServerWBO("keys")).handler()), + "/1.1/johndoe/storage/clients": upd("clients", clientsColl.handler()), + + // Credentials are wrong or node reallocated. + "/1.1/janedoe/storage/meta/global": handler_401, + "/1.1/janedoe/info/collections": handler_401, + + // Maintenance or overloaded (503 + Retry-After) at info/collections. + "/maintenance/1.1/broken.info/info/collections": service_unavailable, + + // Maintenance or overloaded (503 + Retry-After) at meta/global. + "/maintenance/1.1/broken.meta/storage/meta/global": service_unavailable, + "/maintenance/1.1/broken.meta/info/collections": collectionsHelper.handler, + + // Maintenance or overloaded (503 + Retry-After) at crypto/keys. + "/maintenance/1.1/broken.keys/storage/meta/global": upd("meta", global.handler()), + "/maintenance/1.1/broken.keys/info/collections": collectionsHelper.handler, + "/maintenance/1.1/broken.keys/storage/crypto/keys": service_unavailable, + + // Maintenance or overloaded (503 + Retry-After) at wiping collection. + "/maintenance/1.1/broken.wipe/info/collections": collectionsHelper.handler, + "/maintenance/1.1/broken.wipe/storage/meta/global": upd("meta", global.handler()), + "/maintenance/1.1/broken.wipe/storage/crypto/keys": + upd("crypto", (new ServerWBO("keys")).handler()), + "/maintenance/1.1/broken.wipe/storage": service_unavailable, + "/maintenance/1.1/broken.wipe/storage/clients": upd("clients", clientsColl.handler()), + "/maintenance/1.1/broken.wipe/storage/catapult": service_unavailable + }); +} + +function setUp(server) { + return configureIdentity({username: "johndoe"}).then( + () => { + Service.serverURL = server.baseURI + "/"; + Service.clusterURL = server.baseURI + "/"; + } + ).then( + () => generateAndUploadKeys() + ); +} + +function generateAndUploadKeys() { + generateNewKeys(Service.collectionKeys); + let serverKeys = Service.collectionKeys.asWBO("crypto", "keys"); + serverKeys.encrypt(Service.identity.syncKeyBundle); + return serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success; +} + +function clean() { + Service.startOver(); + Status.resetSync(); + Status.resetBackoff(); + errorHandler.didReportProlongedError = false; +} + +add_identity_test(this, function test_401_logout() { + let server = sync_httpd_setup(); + yield setUp(server); + + // By calling sync, we ensure we're logged in. + Service.sync(); + do_check_eq(Status.sync, SYNC_SUCCEEDED); + do_check_true(Service.isLoggedIn); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:service:sync:error", onSyncError); + function onSyncError() { + _("Got weave:service:sync:error in first sync."); + Svc.Obs.remove("weave:service:sync:error", onSyncError); + + // Wait for the automatic next sync. + function onLoginError() { + _("Got weave:service:login:error in second sync."); + Svc.Obs.remove("weave:service:login:error", onLoginError); + + do_check_eq(Status.login, LOGIN_FAILED_LOGIN_REJECTED); + do_check_false(Service.isLoggedIn); + + // Clean up. + Utils.nextTick(function () { + Service.startOver(); + server.stop(deferred.resolve); + }); + } + Svc.Obs.add("weave:service:login:error", onLoginError); + } + + // Make sync fail due to login rejected. + yield configureIdentity({username: "janedoe"}); + Service._updateCachedURLs(); + + _("Starting first sync."); + Service.sync(); + _("First sync done."); + yield deferred.promise; +}); + +add_identity_test(this, function test_credentials_changed_logout() { + let server = sync_httpd_setup(); + yield setUp(server); + + // By calling sync, we ensure we're logged in. + Service.sync(); + do_check_eq(Status.sync, SYNC_SUCCEEDED); + do_check_true(Service.isLoggedIn); + + generateCredentialsChangedFailure(); + Service.sync(); + + do_check_eq(Status.sync, CREDENTIALS_CHANGED); + do_check_false(Service.isLoggedIn); + + // Clean up. + Service.startOver(); + let deferred = Promise.defer(); + server.stop(deferred.resolve); + yield deferred.promise; +}); + +add_identity_test(this, function test_no_lastSync_pref() { + // Test reported error. + Status.resetSync(); + errorHandler.dontIgnoreErrors = true; + Status.sync = CREDENTIALS_CHANGED; + do_check_true(errorHandler.shouldReportError()); + + // Test unreported error. + Status.resetSync(); + errorHandler.dontIgnoreErrors = true; + Status.login = LOGIN_FAILED_NETWORK_ERROR; + do_check_true(errorHandler.shouldReportError()); + +}); + +add_identity_test(this, function test_shouldReportError() { + Status.login = MASTER_PASSWORD_LOCKED; + do_check_false(errorHandler.shouldReportError()); + + // Give ourselves a clusterURL so that the temporary 401 no-error situation + // doesn't come into play. + Service.serverURL = FAKE_SERVER_URL; + Service.clusterURL = FAKE_SERVER_URL; + + // Test dontIgnoreErrors, non-network, non-prolonged, login error reported + Status.resetSync(); + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = true; + Status.login = LOGIN_FAILED_NO_PASSWORD; + do_check_true(errorHandler.shouldReportError()); + + // Test dontIgnoreErrors, non-network, non-prolonged, sync error reported + Status.resetSync(); + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = true; + Status.sync = CREDENTIALS_CHANGED; + do_check_true(errorHandler.shouldReportError()); + + // Test dontIgnoreErrors, non-network, prolonged, login error reported + Status.resetSync(); + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = true; + Status.login = LOGIN_FAILED_NO_PASSWORD; + do_check_true(errorHandler.shouldReportError()); + + // Test dontIgnoreErrors, non-network, prolonged, sync error reported + Status.resetSync(); + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = true; + Status.sync = CREDENTIALS_CHANGED; + do_check_true(errorHandler.shouldReportError()); + + // Test dontIgnoreErrors, network, non-prolonged, login error reported + Status.resetSync(); + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = true; + Status.login = LOGIN_FAILED_NETWORK_ERROR; + do_check_true(errorHandler.shouldReportError()); + + // Test dontIgnoreErrors, network, non-prolonged, sync error reported + Status.resetSync(); + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = true; + Status.sync = LOGIN_FAILED_NETWORK_ERROR; + do_check_true(errorHandler.shouldReportError()); + + // Test dontIgnoreErrors, network, prolonged, login error reported + Status.resetSync(); + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = true; + Status.login = LOGIN_FAILED_NETWORK_ERROR; + do_check_true(errorHandler.shouldReportError()); + + // Test dontIgnoreErrors, network, prolonged, sync error reported + Status.resetSync(); + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = true; + Status.sync = LOGIN_FAILED_NETWORK_ERROR; + do_check_true(errorHandler.shouldReportError()); + + // Test non-network, prolonged, login error reported + do_check_false(errorHandler.didReportProlongedError); + Status.resetSync(); + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = false; + Status.login = LOGIN_FAILED_NO_PASSWORD; + do_check_true(errorHandler.shouldReportError()); + do_check_true(errorHandler.didReportProlongedError); + + // Second time with prolonged error and without resetting + // didReportProlongedError, sync error should not be reported. + Status.resetSync(); + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = false; + Status.login = LOGIN_FAILED_NO_PASSWORD; + do_check_false(errorHandler.shouldReportError()); + do_check_true(errorHandler.didReportProlongedError); + + // Test non-network, prolonged, sync error reported + Status.resetSync(); + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = false; + errorHandler.didReportProlongedError = false; + Status.sync = CREDENTIALS_CHANGED; + do_check_true(errorHandler.shouldReportError()); + do_check_true(errorHandler.didReportProlongedError); + errorHandler.didReportProlongedError = false; + + // Test network, prolonged, login error reported + Status.resetSync(); + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = false; + Status.login = LOGIN_FAILED_NETWORK_ERROR; + do_check_true(errorHandler.shouldReportError()); + do_check_true(errorHandler.didReportProlongedError); + errorHandler.didReportProlongedError = false; + + // Test network, prolonged, sync error reported + Status.resetSync(); + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = false; + Status.sync = LOGIN_FAILED_NETWORK_ERROR; + do_check_true(errorHandler.shouldReportError()); + do_check_true(errorHandler.didReportProlongedError); + errorHandler.didReportProlongedError = false; + + // Test non-network, non-prolonged, login error reported + Status.resetSync(); + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = false; + Status.login = LOGIN_FAILED_NO_PASSWORD; + do_check_true(errorHandler.shouldReportError()); + do_check_false(errorHandler.didReportProlongedError); + + // Test non-network, non-prolonged, sync error reported + Status.resetSync(); + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = false; + Status.sync = CREDENTIALS_CHANGED; + do_check_true(errorHandler.shouldReportError()); + do_check_false(errorHandler.didReportProlongedError); + + // Test network, non-prolonged, login error reported + Status.resetSync(); + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = false; + Status.login = LOGIN_FAILED_NETWORK_ERROR; + do_check_false(errorHandler.shouldReportError()); + do_check_false(errorHandler.didReportProlongedError); + + // Test network, non-prolonged, sync error reported + Status.resetSync(); + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = false; + Status.sync = LOGIN_FAILED_NETWORK_ERROR; + do_check_false(errorHandler.shouldReportError()); + do_check_false(errorHandler.didReportProlongedError); + + // Test server maintenance, sync errors are not reported + Status.resetSync(); + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = false; + Status.sync = SERVER_MAINTENANCE; + do_check_false(errorHandler.shouldReportError()); + do_check_false(errorHandler.didReportProlongedError); + + // Test server maintenance, login errors are not reported + Status.resetSync(); + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = false; + Status.login = SERVER_MAINTENANCE; + do_check_false(errorHandler.shouldReportError()); + do_check_false(errorHandler.didReportProlongedError); + + // Test prolonged, server maintenance, sync errors are reported + Status.resetSync(); + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = false; + Status.sync = SERVER_MAINTENANCE; + do_check_true(errorHandler.shouldReportError()); + do_check_true(errorHandler.didReportProlongedError); + errorHandler.didReportProlongedError = false; + + // Test prolonged, server maintenance, login errors are reported + Status.resetSync(); + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = false; + Status.login = SERVER_MAINTENANCE; + do_check_true(errorHandler.shouldReportError()); + do_check_true(errorHandler.didReportProlongedError); + errorHandler.didReportProlongedError = false; + + // Test dontIgnoreErrors, server maintenance, sync errors are reported + Status.resetSync(); + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = true; + Status.sync = SERVER_MAINTENANCE; + do_check_true(errorHandler.shouldReportError()); + // dontIgnoreErrors means we don't set didReportProlongedError + do_check_false(errorHandler.didReportProlongedError); + + // Test dontIgnoreErrors, server maintenance, login errors are reported + Status.resetSync(); + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = true; + Status.login = SERVER_MAINTENANCE; + do_check_true(errorHandler.shouldReportError()); + do_check_false(errorHandler.didReportProlongedError); + + // Test dontIgnoreErrors, prolonged, server maintenance, + // sync errors are reported + Status.resetSync(); + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = true; + Status.sync = SERVER_MAINTENANCE; + do_check_true(errorHandler.shouldReportError()); + do_check_false(errorHandler.didReportProlongedError); + + // Test dontIgnoreErrors, prolonged, server maintenance, + // login errors are reported + Status.resetSync(); + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.dontIgnoreErrors = true; + Status.login = SERVER_MAINTENANCE; + do_check_true(errorHandler.shouldReportError()); + do_check_false(errorHandler.didReportProlongedError); +}); + +add_identity_test(this, function test_shouldReportError_master_password() { + _("Test error ignored due to locked master password"); + let server = sync_httpd_setup(); + yield setUp(server); + + // Monkey patch Service.verifyLogin to imitate + // master password being locked. + Service._verifyLogin = Service.verifyLogin; + Service.verifyLogin = function () { + Status.login = MASTER_PASSWORD_LOCKED; + return false; + }; + + setLastSync(NON_PROLONGED_ERROR_DURATION); + Service.sync(); + do_check_false(errorHandler.shouldReportError()); + + // Clean up. + Service.verifyLogin = Service._verifyLogin; + clean(); + let deferred = Promise.defer(); + server.stop(deferred.resolve); + yield deferred.promise; +}); + +// Test that even if we don't have a cluster URL, a login failure due to +// authentication errors is always reported. +add_identity_test(this, function test_shouldReportLoginFailureWithNoCluster() { + // Ensure no clusterURL - any error not specific to login should not be reported. + Service.serverURL = ""; + Service.clusterURL = ""; + + // Test explicit "login rejected" state. + Status.resetSync(); + // If we have a LOGIN_REJECTED state, we always report the error. + Status.login = LOGIN_FAILED_LOGIN_REJECTED; + do_check_true(errorHandler.shouldReportError()); + // But any other status with a missing clusterURL is treated as a mid-sync + // 401 (ie, should be treated as a node reassignment) + Status.login = LOGIN_SUCCEEDED; + do_check_false(errorHandler.shouldReportError()); +}); + +// XXX - how to arrange for 'Service.identity.basicPassword = null;' in +// an fxaccounts environment? +add_task(function test_login_syncAndReportErrors_non_network_error() { + // Test non-network errors are reported + // when calling syncAndReportErrors + let server = sync_httpd_setup(); + yield setUp(server); + Service.identity.basicPassword = null; + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onSyncError() { + Svc.Obs.remove("weave:ui:login:error", onSyncError); + do_check_eq(Status.login, LOGIN_FAILED_NO_PASSWORD); + + clean(); + server.stop(deferred.resolve); + }); + + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); + yield deferred.promise; +}); + +add_identity_test(this, function test_sync_syncAndReportErrors_non_network_error() { + // Test non-network errors are reported + // when calling syncAndReportErrors + let server = sync_httpd_setup(); + yield setUp(server); + + // By calling sync, we ensure we're logged in. + Service.sync(); + do_check_eq(Status.sync, SYNC_SUCCEEDED); + do_check_true(Service.isLoggedIn); + + generateCredentialsChangedFailure(); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:sync:error", function onSyncError() { + Svc.Obs.remove("weave:ui:sync:error", onSyncError); + do_check_eq(Status.sync, CREDENTIALS_CHANGED); + + clean(); + server.stop(deferred.resolve); + }); + + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); + yield deferred.promise; +}); + +// XXX - how to arrange for 'Service.identity.basicPassword = null;' in +// an fxaccounts environment? +add_task(function test_login_syncAndReportErrors_prolonged_non_network_error() { + // Test prolonged, non-network errors are + // reported when calling syncAndReportErrors. + let server = sync_httpd_setup(); + yield setUp(server); + Service.identity.basicPassword = null; + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onSyncError() { + Svc.Obs.remove("weave:ui:login:error", onSyncError); + do_check_eq(Status.login, LOGIN_FAILED_NO_PASSWORD); + + clean(); + server.stop(deferred.resolve); + }); + + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); + yield deferred.promise; +}); + +add_identity_test(this, function test_sync_syncAndReportErrors_prolonged_non_network_error() { + // Test prolonged, non-network errors are + // reported when calling syncAndReportErrors. + let server = sync_httpd_setup(); + yield setUp(server); + + // By calling sync, we ensure we're logged in. + Service.sync(); + do_check_eq(Status.sync, SYNC_SUCCEEDED); + do_check_true(Service.isLoggedIn); + + generateCredentialsChangedFailure(); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:sync:error", function onSyncError() { + Svc.Obs.remove("weave:ui:sync:error", onSyncError); + do_check_eq(Status.sync, CREDENTIALS_CHANGED); + + clean(); + server.stop(deferred.resolve); + }); + + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); + yield deferred.promise; +}); + +add_identity_test(this, function test_login_syncAndReportErrors_network_error() { + // Test network errors are reported when calling syncAndReportErrors. + yield configureIdentity({username: "broken.wipe"}); + Service.serverURL = FAKE_SERVER_URL; + Service.clusterURL = FAKE_SERVER_URL; + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onSyncError() { + Svc.Obs.remove("weave:ui:login:error", onSyncError); + do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR); + + clean(); + deferred.resolve(); + }); + + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); + yield deferred.promise; +}); + + +add_test(function test_sync_syncAndReportErrors_network_error() { + // Test network errors are reported when calling syncAndReportErrors. + Services.io.offline = true; + + Svc.Obs.add("weave:ui:sync:error", function onSyncError() { + Svc.Obs.remove("weave:ui:sync:error", onSyncError); + do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR); + + Services.io.offline = false; + clean(); + run_next_test(); + }); + + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); +}); + +add_identity_test(this, function test_login_syncAndReportErrors_prolonged_network_error() { + // Test prolonged, network errors are reported + // when calling syncAndReportErrors. + yield configureIdentity({username: "johndoe"}); + + Service.serverURL = FAKE_SERVER_URL; + Service.clusterURL = FAKE_SERVER_URL; + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onSyncError() { + Svc.Obs.remove("weave:ui:login:error", onSyncError); + do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR); + + clean(); + deferred.resolve(); + }); + + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); + yield deferred.promise; +}); + +add_test(function test_sync_syncAndReportErrors_prolonged_network_error() { + // Test prolonged, network errors are reported + // when calling syncAndReportErrors. + Services.io.offline = true; + + Svc.Obs.add("weave:ui:sync:error", function onSyncError() { + Svc.Obs.remove("weave:ui:sync:error", onSyncError); + do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR); + + Services.io.offline = false; + clean(); + run_next_test(); + }); + + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); +}); + +add_task(function test_login_prolonged_non_network_error() { + // Test prolonged, non-network errors are reported + let server = sync_httpd_setup(); + yield setUp(server); + Service.identity.basicPassword = null; + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onSyncError() { + Svc.Obs.remove("weave:ui:login:error", onSyncError); + do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); + do_check_true(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + setLastSync(PROLONGED_ERROR_DURATION); + Service.sync(); + yield deferred.promise; +}); + +add_task(function test_sync_prolonged_non_network_error() { + // Test prolonged, non-network errors are reported + let server = sync_httpd_setup(); + yield setUp(server); + + // By calling sync, we ensure we're logged in. + Service.sync(); + do_check_eq(Status.sync, SYNC_SUCCEEDED); + do_check_true(Service.isLoggedIn); + + generateCredentialsChangedFailure(); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:sync:error", function onSyncError() { + Svc.Obs.remove("weave:ui:sync:error", onSyncError); + do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); + do_check_true(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + setLastSync(PROLONGED_ERROR_DURATION); + Service.sync(); + yield deferred.promise; +}); + +add_identity_test(this, function test_login_prolonged_network_error() { + // Test prolonged, network errors are reported + yield configureIdentity({username: "johndoe"}); + Service.serverURL = FAKE_SERVER_URL; + Service.clusterURL = FAKE_SERVER_URL; + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onSyncError() { + Svc.Obs.remove("weave:ui:login:error", onSyncError); + do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); + do_check_true(errorHandler.didReportProlongedError); + + clean(); + deferred.resolve(); + }); + + setLastSync(PROLONGED_ERROR_DURATION); + Service.sync(); + yield deferred.promise; +}); + +add_test(function test_sync_prolonged_network_error() { + // Test prolonged, network errors are reported + Services.io.offline = true; + + Svc.Obs.add("weave:ui:sync:error", function onSyncError() { + Svc.Obs.remove("weave:ui:sync:error", onSyncError); + do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); + do_check_true(errorHandler.didReportProlongedError); + + Services.io.offline = false; + clean(); + run_next_test(); + }); + + setLastSync(PROLONGED_ERROR_DURATION); + Service.sync(); +}); + +add_task(function test_login_non_network_error() { + // Test non-network errors are reported + let server = sync_httpd_setup(); + yield setUp(server); + Service.identity.basicPassword = null; + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onSyncError() { + Svc.Obs.remove("weave:ui:login:error", onSyncError); + do_check_eq(Status.login, LOGIN_FAILED_NO_PASSWORD); + do_check_false(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + setLastSync(NON_PROLONGED_ERROR_DURATION); + Service.sync(); + yield deferred.promise; +}); + +add_task(function test_sync_non_network_error() { + // Test non-network errors are reported + let server = sync_httpd_setup(); + yield setUp(server); + + // By calling sync, we ensure we're logged in. + Service.sync(); + do_check_eq(Status.sync, SYNC_SUCCEEDED); + do_check_true(Service.isLoggedIn); + + generateCredentialsChangedFailure(); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:sync:error", function onSyncError() { + Svc.Obs.remove("weave:ui:sync:error", onSyncError); + do_check_eq(Status.sync, CREDENTIALS_CHANGED); + do_check_false(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + setLastSync(NON_PROLONGED_ERROR_DURATION); + Service.sync(); + yield deferred.promise; +}); + +add_identity_test(this, function test_login_network_error() { + yield configureIdentity({username: "johndoe"}); + Service.serverURL = FAKE_SERVER_URL; + Service.clusterURL = FAKE_SERVER_URL; + + let deferred = Promise.defer(); + // Test network errors are not reported. + Svc.Obs.add("weave:ui:clear-error", function onClearError() { + Svc.Obs.remove("weave:ui:clear-error", onClearError); + + do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR); + do_check_false(errorHandler.didReportProlongedError); + + Services.io.offline = false; + clean(); + deferred.resolve() + }); + + setLastSync(NON_PROLONGED_ERROR_DURATION); + Service.sync(); + yield deferred.promise; +}); + +add_test(function test_sync_network_error() { + // Test network errors are not reported. + Services.io.offline = true; + + Svc.Obs.add("weave:ui:sync:finish", function onUIUpdate() { + Svc.Obs.remove("weave:ui:sync:finish", onUIUpdate); + do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR); + do_check_false(errorHandler.didReportProlongedError); + + Services.io.offline = false; + clean(); + run_next_test(); + }); + + setLastSync(NON_PROLONGED_ERROR_DURATION); + Service.sync(); +}); + +add_identity_test(this, function test_sync_server_maintenance_error() { + // Test server maintenance errors are not reported. + let server = sync_httpd_setup(); + yield setUp(server); + + const BACKOFF = 42; + let engine = engineManager.get("catapult"); + engine.enabled = true; + engine.exception = {status: 503, + headers: {"retry-after": BACKOFF}}; + + function onSyncError() { + do_throw("Shouldn't get here!"); + } + Svc.Obs.add("weave:ui:sync:error", onSyncError); + + do_check_eq(Status.service, STATUS_OK); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:sync:finish", function onSyncFinish() { + Svc.Obs.remove("weave:ui:sync:finish", onSyncFinish); + + do_check_eq(Status.service, SYNC_FAILED_PARTIAL); + do_check_eq(Status.sync, SERVER_MAINTENANCE); + do_check_false(errorHandler.didReportProlongedError); + + Svc.Obs.remove("weave:ui:sync:error", onSyncError); + clean(); + server.stop(deferred.resolve); + }); + + setLastSync(NON_PROLONGED_ERROR_DURATION); + Service.sync(); + yield deferred.promise; +}); + +add_identity_test(this, function test_info_collections_login_server_maintenance_error() { + // Test info/collections server maintenance errors are not reported. + let server = sync_httpd_setup(); + yield setUp(server); + + Service.username = "broken.info"; + yield configureIdentity({username: "broken.info"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + function onUIUpdate() { + do_throw("Shouldn't experience UI update!"); + } + Svc.Obs.add("weave:ui:login:error", onUIUpdate); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:clear-error", function onLoginFinish() { + Svc.Obs.remove("weave:ui:clear-error", onLoginFinish); + + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, LOGIN_FAILED); + do_check_eq(Status.login, SERVER_MAINTENANCE); + do_check_false(errorHandler.didReportProlongedError); + + Svc.Obs.remove("weave:ui:login:error", onUIUpdate); + clean(); + server.stop(deferred.resolve); + }); + + setLastSync(NON_PROLONGED_ERROR_DURATION); + Service.sync(); + yield deferred.promise; +}); + +add_identity_test(this, function test_meta_global_login_server_maintenance_error() { + // Test meta/global server maintenance errors are not reported. + let server = sync_httpd_setup(); + yield setUp(server); + + yield configureIdentity({username: "broken.meta"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + function onUIUpdate() { + do_throw("Shouldn't get here!"); + } + Svc.Obs.add("weave:ui:login:error", onUIUpdate); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:clear-error", function onLoginFinish() { + Svc.Obs.remove("weave:ui:clear-error", onLoginFinish); + + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, LOGIN_FAILED); + do_check_eq(Status.login, SERVER_MAINTENANCE); + do_check_false(errorHandler.didReportProlongedError); + + Svc.Obs.remove("weave:ui:login:error", onUIUpdate); + clean(); + server.stop(deferred.resolve); + }); + + setLastSync(NON_PROLONGED_ERROR_DURATION); + Service.sync(); + yield deferred.promise; +}); + +add_identity_test(this, function test_crypto_keys_login_server_maintenance_error() { + // Test crypto/keys server maintenance errors are not reported. + let server = sync_httpd_setup(); + yield setUp(server); + + yield configureIdentity({username: "broken.keys"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + + // Force re-download of keys + Service.collectionKeys.clear(); + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + function onUIUpdate() { + do_throw("Shouldn't get here!"); + } + Svc.Obs.add("weave:ui:login:error", onUIUpdate); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:clear-error", function onLoginFinish() { + Svc.Obs.remove("weave:ui:clear-error", onLoginFinish); + + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, LOGIN_FAILED); + do_check_eq(Status.login, SERVER_MAINTENANCE); + do_check_false(errorHandler.didReportProlongedError); + + Svc.Obs.remove("weave:ui:login:error", onUIUpdate); + clean(); + server.stop(deferred.resolve); + }); + + setLastSync(NON_PROLONGED_ERROR_DURATION); + Service.sync(); + yield deferred.promise; +}); + +add_task(function test_sync_prolonged_server_maintenance_error() { + // Test prolonged server maintenance errors are reported. + let server = sync_httpd_setup(); + yield setUp(server); + + const BACKOFF = 42; + let engine = engineManager.get("catapult"); + engine.enabled = true; + engine.exception = {status: 503, + headers: {"retry-after": BACKOFF}}; + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:sync:error", onUIUpdate); + do_check_eq(Status.service, SYNC_FAILED); + do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); + do_check_true(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_eq(Status.service, STATUS_OK); + + setLastSync(PROLONGED_ERROR_DURATION); + Service.sync(); + yield deferred.promise; +}); + +add_identity_test(this, function test_info_collections_login_prolonged_server_maintenance_error(){ + // Test info/collections prolonged server maintenance errors are reported. + let server = sync_httpd_setup(); + yield setUp(server); + + yield configureIdentity({username: "broken.info"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:login:error", onUIUpdate); + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, SYNC_FAILED); + do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); + do_check_true(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + setLastSync(PROLONGED_ERROR_DURATION); + Service.sync(); + yield deferred.promise; +}); + +add_identity_test(this, function test_meta_global_login_prolonged_server_maintenance_error(){ + // Test meta/global prolonged server maintenance errors are reported. + let server = sync_httpd_setup(); + yield setUp(server); + + yield configureIdentity({username: "broken.meta"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:login:error", onUIUpdate); + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, SYNC_FAILED); + do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); + do_check_true(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + setLastSync(PROLONGED_ERROR_DURATION); + Service.sync(); + yield deferred.promise; +}); + +add_identity_test(this, function test_download_crypto_keys_login_prolonged_server_maintenance_error(){ + // Test crypto/keys prolonged server maintenance errors are reported. + let server = sync_httpd_setup(); + yield setUp(server); + + yield configureIdentity({username: "broken.keys"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + // Force re-download of keys + Service.collectionKeys.clear(); + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:login:error", onUIUpdate); + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, SYNC_FAILED); + do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); + do_check_true(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + setLastSync(PROLONGED_ERROR_DURATION); + Service.sync(); + yield deferred.promise; +}); + +add_identity_test(this, function test_upload_crypto_keys_login_prolonged_server_maintenance_error(){ + // Test crypto/keys prolonged server maintenance errors are reported. + let server = sync_httpd_setup(); + + // Start off with an empty account, do not upload a key. + yield configureIdentity({username: "broken.keys"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:login:error", onUIUpdate); + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, SYNC_FAILED); + do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); + do_check_true(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + setLastSync(PROLONGED_ERROR_DURATION); + Service.sync(); + yield deferred.promise; +}); + +add_identity_test(this, function test_wipeServer_login_prolonged_server_maintenance_error(){ + // Test that we report prolonged server maintenance errors that occur whilst + // wiping the server. + let server = sync_httpd_setup(); + + // Start off with an empty account, do not upload a key. + yield configureIdentity({username: "broken.wipe"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:login:error", onUIUpdate); + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, SYNC_FAILED); + do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); + do_check_true(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + setLastSync(PROLONGED_ERROR_DURATION); + Service.sync(); + yield deferred.promise; +}); + +add_identity_test(this, function test_wipeRemote_prolonged_server_maintenance_error(){ + // Test that we report prolonged server maintenance errors that occur whilst + // wiping all remote devices. + let server = sync_httpd_setup(); + + server.registerPathHandler("/1.1/broken.wipe/storage/catapult", service_unavailable); + yield configureIdentity({username: "broken.wipe"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + generateAndUploadKeys(); + + let engine = engineManager.get("catapult"); + engine.exception = null; + engine.enabled = true; + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:sync:error", onUIUpdate); + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, SYNC_FAILED); + do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); + do_check_eq(Svc.Prefs.get("firstSync"), "wipeRemote"); + do_check_true(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + Svc.Prefs.set("firstSync", "wipeRemote"); + setLastSync(PROLONGED_ERROR_DURATION); + Service.sync(); + yield deferred.promise; +}); + +add_task(function test_sync_syncAndReportErrors_server_maintenance_error() { + // Test server maintenance errors are reported + // when calling syncAndReportErrors. + let server = sync_httpd_setup(); + yield setUp(server); + + const BACKOFF = 42; + let engine = engineManager.get("catapult"); + engine.enabled = true; + engine.exception = {status: 503, + headers: {"retry-after": BACKOFF}}; + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:sync:error", onUIUpdate); + do_check_eq(Status.service, SYNC_FAILED_PARTIAL); + do_check_eq(Status.sync, SERVER_MAINTENANCE); + do_check_false(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_eq(Status.service, STATUS_OK); + + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); + yield deferred.promise; +}); + +add_identity_test(this, function test_info_collections_login_syncAndReportErrors_server_maintenance_error() { + // Test info/collections server maintenance errors are reported + // when calling syncAndReportErrors. + let server = sync_httpd_setup(); + yield setUp(server); + + yield configureIdentity({username: "broken.info"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:login:error", onUIUpdate); + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, LOGIN_FAILED); + do_check_eq(Status.login, SERVER_MAINTENANCE); + do_check_false(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); + yield deferred.promise; +}); + +add_identity_test(this, function test_meta_global_login_syncAndReportErrors_server_maintenance_error() { + // Test meta/global server maintenance errors are reported + // when calling syncAndReportErrors. + let server = sync_httpd_setup(); + yield setUp(server); + + yield configureIdentity({username: "broken.meta"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:login:error", onUIUpdate); + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, LOGIN_FAILED); + do_check_eq(Status.login, SERVER_MAINTENANCE); + do_check_false(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); + yield deferred.promise; +}); + +add_identity_test(this, function test_download_crypto_keys_login_syncAndReportErrors_server_maintenance_error() { + // Test crypto/keys server maintenance errors are reported + // when calling syncAndReportErrors. + let server = sync_httpd_setup(); + yield setUp(server); + + yield configureIdentity({username: "broken.keys"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + // Force re-download of keys + Service.collectionKeys.clear(); + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:login:error", onUIUpdate); + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, LOGIN_FAILED); + do_check_eq(Status.login, SERVER_MAINTENANCE); + do_check_false(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); + yield deferred.promise; +}); + +add_identity_test(this, function test_upload_crypto_keys_login_syncAndReportErrors_server_maintenance_error() { + // Test crypto/keys server maintenance errors are reported + // when calling syncAndReportErrors. + let server = sync_httpd_setup(); + + // Start off with an empty account, do not upload a key. + yield configureIdentity({username: "broken.keys"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:login:error", onUIUpdate); + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, LOGIN_FAILED); + do_check_eq(Status.login, SERVER_MAINTENANCE); + do_check_false(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); + yield deferred.promise; +}); + +add_identity_test(this, function test_wipeServer_login_syncAndReportErrors_server_maintenance_error() { + // Test crypto/keys server maintenance errors are reported + // when calling syncAndReportErrors. + let server = sync_httpd_setup(); + + // Start off with an empty account, do not upload a key. + yield configureIdentity({username: "broken.wipe"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:login:error", onUIUpdate); + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, LOGIN_FAILED); + do_check_eq(Status.login, SERVER_MAINTENANCE); + do_check_false(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); + yield deferred.promise; +}); + +add_identity_test(this, function test_wipeRemote_syncAndReportErrors_server_maintenance_error(){ + // Test that we report prolonged server maintenance errors that occur whilst + // wiping all remote devices. + let server = sync_httpd_setup(); + + yield configureIdentity({username: "broken.wipe"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + generateAndUploadKeys(); + + let engine = engineManager.get("catapult"); + engine.exception = null; + engine.enabled = true; + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:sync:error", onUIUpdate); + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, SYNC_FAILED); + do_check_eq(Status.sync, SERVER_MAINTENANCE); + do_check_eq(Svc.Prefs.get("firstSync"), "wipeRemote"); + do_check_false(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + Svc.Prefs.set("firstSync", "wipeRemote"); + setLastSync(NON_PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); + yield deferred.promise; +}); + +add_task(function test_sync_syncAndReportErrors_prolonged_server_maintenance_error() { + // Test prolonged server maintenance errors are + // reported when calling syncAndReportErrors. + let server = sync_httpd_setup(); + yield setUp(server); + + const BACKOFF = 42; + let engine = engineManager.get("catapult"); + engine.enabled = true; + engine.exception = {status: 503, + headers: {"retry-after": BACKOFF}}; + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:sync:error", onUIUpdate); + do_check_eq(Status.service, SYNC_FAILED_PARTIAL); + do_check_eq(Status.sync, SERVER_MAINTENANCE); + // syncAndReportErrors means dontIgnoreErrors, which means + // didReportProlongedError not touched. + do_check_false(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_eq(Status.service, STATUS_OK); + + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); + yield deferred.promise; +}); + +add_identity_test(this, function test_info_collections_login_syncAndReportErrors_prolonged_server_maintenance_error() { + // Test info/collections server maintenance errors are reported + // when calling syncAndReportErrors. + let server = sync_httpd_setup(); + yield setUp(server); + + yield configureIdentity({username: "broken.info"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:login:error", onUIUpdate); + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, LOGIN_FAILED); + do_check_eq(Status.login, SERVER_MAINTENANCE); + // syncAndReportErrors means dontIgnoreErrors, which means + // didReportProlongedError not touched. + do_check_false(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); + yield deferred.promise; +}); + +add_identity_test(this, function test_meta_global_login_syncAndReportErrors_prolonged_server_maintenance_error() { + // Test meta/global server maintenance errors are reported + // when calling syncAndReportErrors. + let server = sync_httpd_setup(); + yield setUp(server); + + yield configureIdentity({username: "broken.meta"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:login:error", onUIUpdate); + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, LOGIN_FAILED); + do_check_eq(Status.login, SERVER_MAINTENANCE); + // syncAndReportErrors means dontIgnoreErrors, which means + // didReportProlongedError not touched. + do_check_false(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); + yield deferred.promise; +}); + +add_identity_test(this, function test_download_crypto_keys_login_syncAndReportErrors_prolonged_server_maintenance_error() { + // Test crypto/keys server maintenance errors are reported + // when calling syncAndReportErrors. + let server = sync_httpd_setup(); + yield setUp(server); + + yield configureIdentity({username: "broken.keys"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + // Force re-download of keys + Service.collectionKeys.clear(); + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:login:error", onUIUpdate); + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, LOGIN_FAILED); + do_check_eq(Status.login, SERVER_MAINTENANCE); + // syncAndReportErrors means dontIgnoreErrors, which means + // didReportProlongedError not touched. + do_check_false(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); + yield deferred.promise; +}); + +add_identity_test(this, function test_upload_crypto_keys_login_syncAndReportErrors_prolonged_server_maintenance_error() { + // Test crypto/keys server maintenance errors are reported + // when calling syncAndReportErrors. + let server = sync_httpd_setup(); + + // Start off with an empty account, do not upload a key. + yield configureIdentity({username: "broken.keys"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:login:error", onUIUpdate); + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, LOGIN_FAILED); + do_check_eq(Status.login, SERVER_MAINTENANCE); + // syncAndReportErrors means dontIgnoreErrors, which means + // didReportProlongedError not touched. + do_check_false(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); + yield deferred.promise; +}); + +add_identity_test(this, function test_wipeServer_login_syncAndReportErrors_prolonged_server_maintenance_error() { + // Test crypto/keys server maintenance errors are reported + // when calling syncAndReportErrors. + let server = sync_httpd_setup(); + + // Start off with an empty account, do not upload a key. + yield configureIdentity({username: "broken.wipe"}); + Service.serverURL = server.baseURI + "/maintenance/"; + Service.clusterURL = server.baseURI + "/maintenance/"; + + let backoffInterval; + Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { + Svc.Obs.remove("weave:service:backoff:interval", observe); + backoffInterval = subject; + }); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { + Svc.Obs.remove("weave:ui:login:error", onUIUpdate); + do_check_true(Status.enforceBackoff); + do_check_eq(backoffInterval, 42); + do_check_eq(Status.service, LOGIN_FAILED); + do_check_eq(Status.login, SERVER_MAINTENANCE); + // syncAndReportErrors means dontIgnoreErrors, which means + // didReportProlongedError not touched. + do_check_false(errorHandler.didReportProlongedError); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_false(Status.enforceBackoff); + do_check_eq(Status.service, STATUS_OK); + + setLastSync(PROLONGED_ERROR_DURATION); + errorHandler.syncAndReportErrors(); + yield deferred.promise; +}); + +add_task(function test_sync_engine_generic_fail() { + let server = sync_httpd_setup(); + + let engine = engineManager.get("catapult"); + engine.enabled = true; + engine.sync = function sync() { + Svc.Obs.notify("weave:engine:sync:error", "", "catapult"); + }; + + let log = Log.repository.getLogger("Sync.ErrorHandler"); + Svc.Prefs.set("log.appender.file.logOnError", true); + + do_check_eq(Status.engines["catapult"], undefined); + + let deferred = Promise.defer(); + // Don't wait for reset-file-log until the sync is underway. + // This avoids us catching a delayed notification from an earlier test. + Svc.Obs.add("weave:engine:sync:finish", function onEngineFinish() { + Svc.Obs.remove("weave:engine:sync:finish", onEngineFinish); + + log.info("Adding reset-file-log observer."); + Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() { + Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog); + + // Put these checks here, not after sync(), so that we aren't racing the + // log handler... which resets everything just a few lines below! + _("Status.engines: " + JSON.stringify(Status.engines)); + do_check_eq(Status.engines["catapult"], ENGINE_UNKNOWN_FAIL); + do_check_eq(Status.service, SYNC_FAILED_PARTIAL); + + // Test Error log was written on SYNC_FAILED_PARTIAL. + let entries = logsdir.directoryEntries; + do_check_true(entries.hasMoreElements()); + let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile); + do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName); + + clean(); + server.stop(deferred.resolve); + }); + }); + + do_check_true(yield setUp(server)); + Service.sync(); + yield deferred.promise; +}); + +add_test(function test_logs_on_sync_error_despite_shouldReportError() { + _("Ensure that an error is still logged when weave:service:sync:error " + + "is notified, despite shouldReportError returning false."); + + let log = Log.repository.getLogger("Sync.ErrorHandler"); + Svc.Prefs.set("log.appender.file.logOnError", true); + log.info("TESTING"); + + // Ensure that we report no error. + Status.login = MASTER_PASSWORD_LOCKED; + do_check_false(errorHandler.shouldReportError()); + + Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() { + Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog); + + // Test that error log was written. + let entries = logsdir.directoryEntries; + do_check_true(entries.hasMoreElements()); + let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile); + do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName); + + clean(); + run_next_test(); + }); + Svc.Obs.notify("weave:service:sync:error", {}); +}); + +add_test(function test_logs_on_login_error_despite_shouldReportError() { + _("Ensure that an error is still logged when weave:service:login:error " + + "is notified, despite shouldReportError returning false."); + + let log = Log.repository.getLogger("Sync.ErrorHandler"); + Svc.Prefs.set("log.appender.file.logOnError", true); + log.info("TESTING"); + + // Ensure that we report no error. + Status.login = MASTER_PASSWORD_LOCKED; + do_check_false(errorHandler.shouldReportError()); + + Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() { + Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog); + + // Test that error log was written. + let entries = logsdir.directoryEntries; + do_check_true(entries.hasMoreElements()); + let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile); + do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName); + + clean(); + run_next_test(); + }); + Svc.Obs.notify("weave:service:login:error", {}); +}); + +// This test should be the last one since it monkeypatches the engine object +// and we should only have one engine object throughout the file (bug 629664). +add_task(function test_engine_applyFailed() { + let server = sync_httpd_setup(); + + let engine = engineManager.get("catapult"); + engine.enabled = true; + delete engine.exception; + engine.sync = function sync() { + Svc.Obs.notify("weave:engine:sync:applied", {newFailed:1}, "catapult"); + }; + + let log = Log.repository.getLogger("Sync.ErrorHandler"); + Svc.Prefs.set("log.appender.file.logOnError", true); + + let deferred = Promise.defer(); + Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() { + Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog); + + do_check_eq(Status.engines["catapult"], ENGINE_APPLY_FAIL); + do_check_eq(Status.service, SYNC_FAILED_PARTIAL); + + // Test Error log was written on SYNC_FAILED_PARTIAL. + let entries = logsdir.directoryEntries; + do_check_true(entries.hasMoreElements()); + let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile); + do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName); + + clean(); + server.stop(deferred.resolve); + }); + + do_check_eq(Status.engines["catapult"], undefined); + do_check_true(yield setUp(server)); + Service.sync(); + yield deferred.promise; +}); diff --git a/services/sync/tests/unit/test_errorhandler_1.js b/services/sync/tests/unit/test_errorhandler_1.js deleted file mode 100644 index ea2070b48..000000000 --- a/services/sync/tests/unit/test_errorhandler_1.js +++ /dev/null @@ -1,913 +0,0 @@ -/* Any copyright is dedicated to the Public Domain. - http://creativecommons.org/publicdomain/zero/1.0/ */ - -Cu.import("resource://services-sync/engines/clients.js"); -Cu.import("resource://services-sync/constants.js"); -Cu.import("resource://services-sync/engines.js"); -Cu.import("resource://services-sync/keys.js"); -Cu.import("resource://services-sync/policies.js"); -Cu.import("resource://services-sync/service.js"); -Cu.import("resource://services-sync/status.js"); -Cu.import("resource://services-sync/util.js"); -Cu.import("resource://testing-common/services/sync/utils.js"); -Cu.import("resource://gre/modules/FileUtils.jsm"); - -var fakeServer = new SyncServer(); -fakeServer.start(); - -do_register_cleanup(function() { - return new Promise(resolve => { - fakeServer.stop(resolve); - }); -}); - -var fakeServerUrl = "http://localhost:" + fakeServer.port; - -const logsdir = FileUtils.getDir("ProfD", ["weave", "logs"], true); - -const PROLONGED_ERROR_DURATION = - (Svc.Prefs.get('errorhandler.networkFailureReportTimeout') * 2) * 1000; - -const NON_PROLONGED_ERROR_DURATION = - (Svc.Prefs.get('errorhandler.networkFailureReportTimeout') / 2) * 1000; - -Service.engineManager.clear(); - -function setLastSync(lastSyncValue) { - Svc.Prefs.set("lastSync", (new Date(Date.now() - lastSyncValue)).toString()); -} - -var engineManager = Service.engineManager; -engineManager.register(EHTestsCommon.CatapultEngine); - -// This relies on Service/ErrorHandler being a singleton. Fixing this will take -// a lot of work. -var errorHandler = Service.errorHandler; - -function run_test() { - initTestLogging("Trace"); - - Log.repository.getLogger("Sync.Service").level = Log.Level.Trace; - Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace; - Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace; - - ensureLegacyIdentityManager(); - - run_next_test(); -} - - -function clean() { - Service.startOver(); - Status.resetSync(); - Status.resetBackoff(); - errorHandler.didReportProlongedError = false; -} - -add_identity_test(this, function* test_401_logout() { - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - // By calling sync, we ensure we're logged in. - yield sync_and_validate_telem(); - do_check_eq(Status.sync, SYNC_SUCCEEDED); - do_check_true(Service.isLoggedIn); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:service:sync:error", onSyncError); - function onSyncError() { - _("Got weave:service:sync:error in first sync."); - Svc.Obs.remove("weave:service:sync:error", onSyncError); - - // Wait for the automatic next sync. - function onLoginError() { - _("Got weave:service:login:error in second sync."); - Svc.Obs.remove("weave:service:login:error", onLoginError); - - let expected = isConfiguredWithLegacyIdentity() ? - LOGIN_FAILED_LOGIN_REJECTED : LOGIN_FAILED_NETWORK_ERROR; - - do_check_eq(Status.login, expected); - do_check_false(Service.isLoggedIn); - - // Clean up. - Utils.nextTick(function () { - Service.startOver(); - server.stop(deferred.resolve); - }); - } - Svc.Obs.add("weave:service:login:error", onLoginError); - } - - // Make sync fail due to login rejected. - yield configureIdentity({username: "janedoe"}); - Service._updateCachedURLs(); - - _("Starting first sync."); - let ping = yield sync_and_validate_telem(true); - deepEqual(ping.failureReason, { name: "httperror", code: 401 }); - _("First sync done."); - yield deferred.promise; -}); - -add_identity_test(this, function* test_credentials_changed_logout() { - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - // By calling sync, we ensure we're logged in. - yield sync_and_validate_telem(); - do_check_eq(Status.sync, SYNC_SUCCEEDED); - do_check_true(Service.isLoggedIn); - - EHTestsCommon.generateCredentialsChangedFailure(); - - let ping = yield sync_and_validate_telem(true); - equal(ping.status.sync, CREDENTIALS_CHANGED); - deepEqual(ping.failureReason, { - name: "unexpectederror", - error: "Error: Aborting sync, remote setup failed" - }); - - do_check_eq(Status.sync, CREDENTIALS_CHANGED); - do_check_false(Service.isLoggedIn); - - // Clean up. - Service.startOver(); - let deferred = Promise.defer(); - server.stop(deferred.resolve); - yield deferred.promise; -}); - -add_identity_test(this, function test_no_lastSync_pref() { - // Test reported error. - Status.resetSync(); - errorHandler.dontIgnoreErrors = true; - Status.sync = CREDENTIALS_CHANGED; - do_check_true(errorHandler.shouldReportError()); - - // Test unreported error. - Status.resetSync(); - errorHandler.dontIgnoreErrors = true; - Status.login = LOGIN_FAILED_NETWORK_ERROR; - do_check_true(errorHandler.shouldReportError()); - -}); - -add_identity_test(this, function test_shouldReportError() { - Status.login = MASTER_PASSWORD_LOCKED; - do_check_false(errorHandler.shouldReportError()); - - // Give ourselves a clusterURL so that the temporary 401 no-error situation - // doesn't come into play. - Service.serverURL = fakeServerUrl; - Service.clusterURL = fakeServerUrl; - - // Test dontIgnoreErrors, non-network, non-prolonged, login error reported - Status.resetSync(); - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = true; - Status.login = LOGIN_FAILED_NO_PASSWORD; - do_check_true(errorHandler.shouldReportError()); - - // Test dontIgnoreErrors, non-network, non-prolonged, sync error reported - Status.resetSync(); - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = true; - Status.sync = CREDENTIALS_CHANGED; - do_check_true(errorHandler.shouldReportError()); - - // Test dontIgnoreErrors, non-network, prolonged, login error reported - Status.resetSync(); - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = true; - Status.login = LOGIN_FAILED_NO_PASSWORD; - do_check_true(errorHandler.shouldReportError()); - - // Test dontIgnoreErrors, non-network, prolonged, sync error reported - Status.resetSync(); - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = true; - Status.sync = CREDENTIALS_CHANGED; - do_check_true(errorHandler.shouldReportError()); - - // Test dontIgnoreErrors, network, non-prolonged, login error reported - Status.resetSync(); - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = true; - Status.login = LOGIN_FAILED_NETWORK_ERROR; - do_check_true(errorHandler.shouldReportError()); - - // Test dontIgnoreErrors, network, non-prolonged, sync error reported - Status.resetSync(); - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = true; - Status.sync = LOGIN_FAILED_NETWORK_ERROR; - do_check_true(errorHandler.shouldReportError()); - - // Test dontIgnoreErrors, network, prolonged, login error reported - Status.resetSync(); - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = true; - Status.login = LOGIN_FAILED_NETWORK_ERROR; - do_check_true(errorHandler.shouldReportError()); - - // Test dontIgnoreErrors, network, prolonged, sync error reported - Status.resetSync(); - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = true; - Status.sync = LOGIN_FAILED_NETWORK_ERROR; - do_check_true(errorHandler.shouldReportError()); - - // Test non-network, prolonged, login error reported - do_check_false(errorHandler.didReportProlongedError); - Status.resetSync(); - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = false; - Status.login = LOGIN_FAILED_NO_PASSWORD; - do_check_true(errorHandler.shouldReportError()); - do_check_true(errorHandler.didReportProlongedError); - - // Second time with prolonged error and without resetting - // didReportProlongedError, sync error should not be reported. - Status.resetSync(); - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = false; - Status.login = LOGIN_FAILED_NO_PASSWORD; - do_check_false(errorHandler.shouldReportError()); - do_check_true(errorHandler.didReportProlongedError); - - // Test non-network, prolonged, sync error reported - Status.resetSync(); - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = false; - errorHandler.didReportProlongedError = false; - Status.sync = CREDENTIALS_CHANGED; - do_check_true(errorHandler.shouldReportError()); - do_check_true(errorHandler.didReportProlongedError); - errorHandler.didReportProlongedError = false; - - // Test network, prolonged, login error reported - Status.resetSync(); - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = false; - Status.login = LOGIN_FAILED_NETWORK_ERROR; - do_check_true(errorHandler.shouldReportError()); - do_check_true(errorHandler.didReportProlongedError); - errorHandler.didReportProlongedError = false; - - // Test network, prolonged, sync error reported - Status.resetSync(); - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = false; - Status.sync = LOGIN_FAILED_NETWORK_ERROR; - do_check_true(errorHandler.shouldReportError()); - do_check_true(errorHandler.didReportProlongedError); - errorHandler.didReportProlongedError = false; - - // Test non-network, non-prolonged, login error reported - Status.resetSync(); - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = false; - Status.login = LOGIN_FAILED_NO_PASSWORD; - do_check_true(errorHandler.shouldReportError()); - do_check_false(errorHandler.didReportProlongedError); - - // Test non-network, non-prolonged, sync error reported - Status.resetSync(); - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = false; - Status.sync = CREDENTIALS_CHANGED; - do_check_true(errorHandler.shouldReportError()); - do_check_false(errorHandler.didReportProlongedError); - - // Test network, non-prolonged, login error reported - Status.resetSync(); - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = false; - Status.login = LOGIN_FAILED_NETWORK_ERROR; - do_check_false(errorHandler.shouldReportError()); - do_check_false(errorHandler.didReportProlongedError); - - // Test network, non-prolonged, sync error reported - Status.resetSync(); - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = false; - Status.sync = LOGIN_FAILED_NETWORK_ERROR; - do_check_false(errorHandler.shouldReportError()); - do_check_false(errorHandler.didReportProlongedError); - - // Test server maintenance, sync errors are not reported - Status.resetSync(); - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = false; - Status.sync = SERVER_MAINTENANCE; - do_check_false(errorHandler.shouldReportError()); - do_check_false(errorHandler.didReportProlongedError); - - // Test server maintenance, login errors are not reported - Status.resetSync(); - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = false; - Status.login = SERVER_MAINTENANCE; - do_check_false(errorHandler.shouldReportError()); - do_check_false(errorHandler.didReportProlongedError); - - // Test prolonged, server maintenance, sync errors are reported - Status.resetSync(); - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = false; - Status.sync = SERVER_MAINTENANCE; - do_check_true(errorHandler.shouldReportError()); - do_check_true(errorHandler.didReportProlongedError); - errorHandler.didReportProlongedError = false; - - // Test prolonged, server maintenance, login errors are reported - Status.resetSync(); - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = false; - Status.login = SERVER_MAINTENANCE; - do_check_true(errorHandler.shouldReportError()); - do_check_true(errorHandler.didReportProlongedError); - errorHandler.didReportProlongedError = false; - - // Test dontIgnoreErrors, server maintenance, sync errors are reported - Status.resetSync(); - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = true; - Status.sync = SERVER_MAINTENANCE; - do_check_true(errorHandler.shouldReportError()); - // dontIgnoreErrors means we don't set didReportProlongedError - do_check_false(errorHandler.didReportProlongedError); - - // Test dontIgnoreErrors, server maintenance, login errors are reported - Status.resetSync(); - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = true; - Status.login = SERVER_MAINTENANCE; - do_check_true(errorHandler.shouldReportError()); - do_check_false(errorHandler.didReportProlongedError); - - // Test dontIgnoreErrors, prolonged, server maintenance, - // sync errors are reported - Status.resetSync(); - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = true; - Status.sync = SERVER_MAINTENANCE; - do_check_true(errorHandler.shouldReportError()); - do_check_false(errorHandler.didReportProlongedError); - - // Test dontIgnoreErrors, prolonged, server maintenance, - // login errors are reported - Status.resetSync(); - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.dontIgnoreErrors = true; - Status.login = SERVER_MAINTENANCE; - do_check_true(errorHandler.shouldReportError()); - do_check_false(errorHandler.didReportProlongedError); -}); - -add_identity_test(this, function* test_shouldReportError_master_password() { - _("Test error ignored due to locked master password"); - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - // Monkey patch Service.verifyLogin to imitate - // master password being locked. - Service._verifyLogin = Service.verifyLogin; - Service.verifyLogin = function () { - Status.login = MASTER_PASSWORD_LOCKED; - return false; - }; - - setLastSync(NON_PROLONGED_ERROR_DURATION); - Service.sync(); - do_check_false(errorHandler.shouldReportError()); - - // Clean up. - Service.verifyLogin = Service._verifyLogin; - clean(); - let deferred = Promise.defer(); - server.stop(deferred.resolve); - yield deferred.promise; -}); - -// Test that even if we don't have a cluster URL, a login failure due to -// authentication errors is always reported. -add_identity_test(this, function test_shouldReportLoginFailureWithNoCluster() { - // Ensure no clusterURL - any error not specific to login should not be reported. - Service.serverURL = ""; - Service.clusterURL = ""; - - // Test explicit "login rejected" state. - Status.resetSync(); - // If we have a LOGIN_REJECTED state, we always report the error. - Status.login = LOGIN_FAILED_LOGIN_REJECTED; - do_check_true(errorHandler.shouldReportError()); - // But any other status with a missing clusterURL is treated as a mid-sync - // 401 (ie, should be treated as a node reassignment) - Status.login = LOGIN_SUCCEEDED; - do_check_false(errorHandler.shouldReportError()); -}); - -// XXX - how to arrange for 'Service.identity.basicPassword = null;' in -// an fxaccounts environment? -add_task(function* test_login_syncAndReportErrors_non_network_error() { - // Test non-network errors are reported - // when calling syncAndReportErrors - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - Service.identity.basicPassword = null; - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onSyncError() { - Svc.Obs.remove("weave:ui:login:error", onSyncError); - do_check_eq(Status.login, LOGIN_FAILED_NO_PASSWORD); - - clean(); - server.stop(deferred.resolve); - }); - - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.syncAndReportErrors(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_sync_syncAndReportErrors_non_network_error() { - // Test non-network errors are reported - // when calling syncAndReportErrors - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - // By calling sync, we ensure we're logged in. - Service.sync(); - do_check_eq(Status.sync, SYNC_SUCCEEDED); - do_check_true(Service.isLoggedIn); - - EHTestsCommon.generateCredentialsChangedFailure(); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:sync:error", function onSyncError() { - Svc.Obs.remove("weave:ui:sync:error", onSyncError); - do_check_eq(Status.sync, CREDENTIALS_CHANGED); - // If we clean this tick, telemetry won't get the right error - server.stop(() => { - clean(); - deferred.resolve(); - }); - }); - - setLastSync(NON_PROLONGED_ERROR_DURATION); - let ping = yield wait_for_ping(() => errorHandler.syncAndReportErrors(), true); - equal(ping.status.sync, CREDENTIALS_CHANGED); - deepEqual(ping.failureReason, { - name: "unexpectederror", - error: "Error: Aborting sync, remote setup failed" - }); - yield deferred.promise; -}); - -// XXX - how to arrange for 'Service.identity.basicPassword = null;' in -// an fxaccounts environment? -add_task(function* test_login_syncAndReportErrors_prolonged_non_network_error() { - // Test prolonged, non-network errors are - // reported when calling syncAndReportErrors. - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - Service.identity.basicPassword = null; - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onSyncError() { - Svc.Obs.remove("weave:ui:login:error", onSyncError); - do_check_eq(Status.login, LOGIN_FAILED_NO_PASSWORD); - - clean(); - server.stop(deferred.resolve); - }); - - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.syncAndReportErrors(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_sync_syncAndReportErrors_prolonged_non_network_error() { - // Test prolonged, non-network errors are - // reported when calling syncAndReportErrors. - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - // By calling sync, we ensure we're logged in. - Service.sync(); - do_check_eq(Status.sync, SYNC_SUCCEEDED); - do_check_true(Service.isLoggedIn); - - EHTestsCommon.generateCredentialsChangedFailure(); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:sync:error", function onSyncError() { - Svc.Obs.remove("weave:ui:sync:error", onSyncError); - do_check_eq(Status.sync, CREDENTIALS_CHANGED); - // If we clean this tick, telemetry won't get the right error - server.stop(() => { - clean(); - deferred.resolve(); - }); - }); - - setLastSync(PROLONGED_ERROR_DURATION); - let ping = yield wait_for_ping(() => errorHandler.syncAndReportErrors(), true); - equal(ping.status.sync, CREDENTIALS_CHANGED); - deepEqual(ping.failureReason, { - name: "unexpectederror", - error: "Error: Aborting sync, remote setup failed" - }); - yield deferred.promise; -}); - -add_identity_test(this, function* test_login_syncAndReportErrors_network_error() { - // Test network errors are reported when calling syncAndReportErrors. - yield configureIdentity({username: "broken.wipe"}); - Service.serverURL = fakeServerUrl; - Service.clusterURL = fakeServerUrl; - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onSyncError() { - Svc.Obs.remove("weave:ui:login:error", onSyncError); - do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR); - - clean(); - deferred.resolve(); - }); - - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.syncAndReportErrors(); - yield deferred.promise; -}); - - -add_test(function test_sync_syncAndReportErrors_network_error() { - // Test network errors are reported when calling syncAndReportErrors. - Services.io.offline = true; - - Svc.Obs.add("weave:ui:sync:error", function onSyncError() { - Svc.Obs.remove("weave:ui:sync:error", onSyncError); - do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR); - - Services.io.offline = false; - clean(); - run_next_test(); - }); - - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.syncAndReportErrors(); -}); - -add_identity_test(this, function* test_login_syncAndReportErrors_prolonged_network_error() { - // Test prolonged, network errors are reported - // when calling syncAndReportErrors. - yield configureIdentity({username: "johndoe"}); - - Service.serverURL = fakeServerUrl; - Service.clusterURL = fakeServerUrl; - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onSyncError() { - Svc.Obs.remove("weave:ui:login:error", onSyncError); - do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR); - - clean(); - deferred.resolve(); - }); - - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.syncAndReportErrors(); - yield deferred.promise; -}); - -add_test(function test_sync_syncAndReportErrors_prolonged_network_error() { - // Test prolonged, network errors are reported - // when calling syncAndReportErrors. - Services.io.offline = true; - - Svc.Obs.add("weave:ui:sync:error", function onSyncError() { - Svc.Obs.remove("weave:ui:sync:error", onSyncError); - do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR); - - Services.io.offline = false; - clean(); - run_next_test(); - }); - - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.syncAndReportErrors(); -}); - -add_task(function* test_login_prolonged_non_network_error() { - // Test prolonged, non-network errors are reported - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - Service.identity.basicPassword = null; - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onSyncError() { - Svc.Obs.remove("weave:ui:login:error", onSyncError); - do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); - do_check_true(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - setLastSync(PROLONGED_ERROR_DURATION); - Service.sync(); - yield deferred.promise; -}); - -add_task(function* test_sync_prolonged_non_network_error() { - // Test prolonged, non-network errors are reported - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - // By calling sync, we ensure we're logged in. - Service.sync(); - do_check_eq(Status.sync, SYNC_SUCCEEDED); - do_check_true(Service.isLoggedIn); - - EHTestsCommon.generateCredentialsChangedFailure(); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:sync:error", function onSyncError() { - Svc.Obs.remove("weave:ui:sync:error", onSyncError); - do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); - do_check_true(errorHandler.didReportProlongedError); - server.stop(() => { - clean(); - deferred.resolve(); - }); - }); - - setLastSync(PROLONGED_ERROR_DURATION); - - let ping = yield sync_and_validate_telem(true); - equal(ping.status.sync, PROLONGED_SYNC_FAILURE); - deepEqual(ping.failureReason, { - name: "unexpectederror", - error: "Error: Aborting sync, remote setup failed" - }); - yield deferred.promise; -}); - -add_identity_test(this, function* test_login_prolonged_network_error() { - // Test prolonged, network errors are reported - yield configureIdentity({username: "johndoe"}); - Service.serverURL = fakeServerUrl; - Service.clusterURL = fakeServerUrl; - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onSyncError() { - Svc.Obs.remove("weave:ui:login:error", onSyncError); - do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); - do_check_true(errorHandler.didReportProlongedError); - - clean(); - deferred.resolve(); - }); - - setLastSync(PROLONGED_ERROR_DURATION); - Service.sync(); - yield deferred.promise; -}); - -add_test(function test_sync_prolonged_network_error() { - // Test prolonged, network errors are reported - Services.io.offline = true; - - Svc.Obs.add("weave:ui:sync:error", function onSyncError() { - Svc.Obs.remove("weave:ui:sync:error", onSyncError); - do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); - do_check_true(errorHandler.didReportProlongedError); - - Services.io.offline = false; - clean(); - run_next_test(); - }); - - setLastSync(PROLONGED_ERROR_DURATION); - Service.sync(); -}); - -add_task(function* test_login_non_network_error() { - // Test non-network errors are reported - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - Service.identity.basicPassword = null; - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onSyncError() { - Svc.Obs.remove("weave:ui:login:error", onSyncError); - do_check_eq(Status.login, LOGIN_FAILED_NO_PASSWORD); - do_check_false(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - setLastSync(NON_PROLONGED_ERROR_DURATION); - Service.sync(); - yield deferred.promise; -}); - -add_task(function* test_sync_non_network_error() { - // Test non-network errors are reported - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - // By calling sync, we ensure we're logged in. - Service.sync(); - do_check_eq(Status.sync, SYNC_SUCCEEDED); - do_check_true(Service.isLoggedIn); - - EHTestsCommon.generateCredentialsChangedFailure(); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:sync:error", function onSyncError() { - Svc.Obs.remove("weave:ui:sync:error", onSyncError); - do_check_eq(Status.sync, CREDENTIALS_CHANGED); - do_check_false(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - setLastSync(NON_PROLONGED_ERROR_DURATION); - Service.sync(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_login_network_error() { - yield configureIdentity({username: "johndoe"}); - Service.serverURL = fakeServerUrl; - Service.clusterURL = fakeServerUrl; - - let deferred = Promise.defer(); - // Test network errors are not reported. - Svc.Obs.add("weave:ui:clear-error", function onClearError() { - Svc.Obs.remove("weave:ui:clear-error", onClearError); - - do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR); - do_check_false(errorHandler.didReportProlongedError); - - Services.io.offline = false; - clean(); - deferred.resolve() - }); - - setLastSync(NON_PROLONGED_ERROR_DURATION); - Service.sync(); - yield deferred.promise; -}); - -add_test(function test_sync_network_error() { - // Test network errors are not reported. - Services.io.offline = true; - - Svc.Obs.add("weave:ui:sync:finish", function onUIUpdate() { - Svc.Obs.remove("weave:ui:sync:finish", onUIUpdate); - do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR); - do_check_false(errorHandler.didReportProlongedError); - - Services.io.offline = false; - clean(); - run_next_test(); - }); - - setLastSync(NON_PROLONGED_ERROR_DURATION); - Service.sync(); -}); - -add_identity_test(this, function* test_sync_server_maintenance_error() { - // Test server maintenance errors are not reported. - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - const BACKOFF = 42; - let engine = engineManager.get("catapult"); - engine.enabled = true; - engine.exception = {status: 503, - headers: {"retry-after": BACKOFF}}; - - function onSyncError() { - do_throw("Shouldn't get here!"); - } - Svc.Obs.add("weave:ui:sync:error", onSyncError); - - do_check_eq(Status.service, STATUS_OK); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:sync:finish", function onSyncFinish() { - Svc.Obs.remove("weave:ui:sync:finish", onSyncFinish); - - do_check_eq(Status.service, SYNC_FAILED_PARTIAL); - do_check_eq(Status.sync, SERVER_MAINTENANCE); - do_check_false(errorHandler.didReportProlongedError); - - Svc.Obs.remove("weave:ui:sync:error", onSyncError); - server.stop(() => { - clean(); - deferred.resolve(); - }) - }); - - setLastSync(NON_PROLONGED_ERROR_DURATION); - let ping = yield sync_and_validate_telem(true); - equal(ping.status.sync, SERVER_MAINTENANCE); - deepEqual(ping.engines.find(e => e.failureReason).failureReason, { name: "httperror", code: 503 }) - - yield deferred.promise; -}); - -add_identity_test(this, function* test_info_collections_login_server_maintenance_error() { - // Test info/collections server maintenance errors are not reported. - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - Service.username = "broken.info"; - yield configureIdentity({username: "broken.info"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - function onUIUpdate() { - do_throw("Shouldn't experience UI update!"); - } - Svc.Obs.add("weave:ui:login:error", onUIUpdate); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:clear-error", function onLoginFinish() { - Svc.Obs.remove("weave:ui:clear-error", onLoginFinish); - - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, LOGIN_FAILED); - do_check_eq(Status.login, SERVER_MAINTENANCE); - do_check_false(errorHandler.didReportProlongedError); - - Svc.Obs.remove("weave:ui:login:error", onUIUpdate); - clean(); - server.stop(deferred.resolve); - }); - - setLastSync(NON_PROLONGED_ERROR_DURATION); - Service.sync(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_meta_global_login_server_maintenance_error() { - // Test meta/global server maintenance errors are not reported. - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - yield configureIdentity({username: "broken.meta"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - function onUIUpdate() { - do_throw("Shouldn't get here!"); - } - Svc.Obs.add("weave:ui:login:error", onUIUpdate); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:clear-error", function onLoginFinish() { - Svc.Obs.remove("weave:ui:clear-error", onLoginFinish); - - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, LOGIN_FAILED); - do_check_eq(Status.login, SERVER_MAINTENANCE); - do_check_false(errorHandler.didReportProlongedError); - - Svc.Obs.remove("weave:ui:login:error", onUIUpdate); - clean(); - server.stop(deferred.resolve); - }); - - setLastSync(NON_PROLONGED_ERROR_DURATION); - Service.sync(); - yield deferred.promise; -}); diff --git a/services/sync/tests/unit/test_errorhandler_2.js b/services/sync/tests/unit/test_errorhandler_2.js deleted file mode 100644 index 41f8ee727..000000000 --- a/services/sync/tests/unit/test_errorhandler_2.js +++ /dev/null @@ -1,1012 +0,0 @@ -/* Any copyright is dedicated to the Public Domain. - http://creativecommons.org/publicdomain/zero/1.0/ */ - -Cu.import("resource://services-sync/engines/clients.js"); -Cu.import("resource://services-sync/constants.js"); -Cu.import("resource://services-sync/engines.js"); -Cu.import("resource://services-sync/keys.js"); -Cu.import("resource://services-sync/policies.js"); -Cu.import("resource://services-sync/service.js"); -Cu.import("resource://services-sync/status.js"); -Cu.import("resource://services-sync/util.js"); -Cu.import("resource://testing-common/services/sync/utils.js"); -Cu.import("resource://gre/modules/FileUtils.jsm"); - -var fakeServer = new SyncServer(); -fakeServer.start(); - -do_register_cleanup(function() { - return new Promise(resolve => { - fakeServer.stop(resolve); - }); -}); - -var fakeServerUrl = "http://localhost:" + fakeServer.port; - -const logsdir = FileUtils.getDir("ProfD", ["weave", "logs"], true); - -const PROLONGED_ERROR_DURATION = - (Svc.Prefs.get('errorhandler.networkFailureReportTimeout') * 2) * 1000; - -const NON_PROLONGED_ERROR_DURATION = - (Svc.Prefs.get('errorhandler.networkFailureReportTimeout') / 2) * 1000; - -Service.engineManager.clear(); - -function setLastSync(lastSyncValue) { - Svc.Prefs.set("lastSync", (new Date(Date.now() - lastSyncValue)).toString()); -} - -var engineManager = Service.engineManager; -engineManager.register(EHTestsCommon.CatapultEngine); - -// This relies on Service/ErrorHandler being a singleton. Fixing this will take -// a lot of work. -var errorHandler = Service.errorHandler; - -function run_test() { - initTestLogging("Trace"); - - Log.repository.getLogger("Sync.Service").level = Log.Level.Trace; - Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace; - Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace; - - ensureLegacyIdentityManager(); - - run_next_test(); -} - - -function clean() { - Service.startOver(); - Status.resetSync(); - Status.resetBackoff(); - errorHandler.didReportProlongedError = false; -} - -add_identity_test(this, function* test_crypto_keys_login_server_maintenance_error() { - Status.resetSync(); - // Test crypto/keys server maintenance errors are not reported. - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - yield configureIdentity({username: "broken.keys"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - - // Force re-download of keys - Service.collectionKeys.clear(); - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - function onUIUpdate() { - do_throw("Shouldn't get here!"); - } - Svc.Obs.add("weave:ui:login:error", onUIUpdate); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:clear-error", function onLoginFinish() { - Svc.Obs.remove("weave:ui:clear-error", onLoginFinish); - - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, LOGIN_FAILED); - do_check_eq(Status.login, SERVER_MAINTENANCE); - do_check_false(errorHandler.didReportProlongedError); - - Svc.Obs.remove("weave:ui:login:error", onUIUpdate); - clean(); - server.stop(deferred.resolve); - }); - - setLastSync(NON_PROLONGED_ERROR_DURATION); - Service.sync(); - yield deferred.promise; -}); - -add_task(function* test_sync_prolonged_server_maintenance_error() { - // Test prolonged server maintenance errors are reported. - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - const BACKOFF = 42; - let engine = engineManager.get("catapult"); - engine.enabled = true; - engine.exception = {status: 503, - headers: {"retry-after": BACKOFF}}; - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:sync:error", onUIUpdate); - do_check_eq(Status.service, SYNC_FAILED); - do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); - do_check_true(errorHandler.didReportProlongedError); - - server.stop(() => { - clean(); - deferred.resolve(); - }); - }); - - do_check_eq(Status.service, STATUS_OK); - - setLastSync(PROLONGED_ERROR_DURATION); - let ping = yield sync_and_validate_telem(true); - deepEqual(ping.status.sync, PROLONGED_SYNC_FAILURE); - deepEqual(ping.engines.find(e => e.failureReason).failureReason, - { name: "httperror", code: 503 }); - yield deferred.promise; -}); - -add_identity_test(this, function* test_info_collections_login_prolonged_server_maintenance_error(){ - // Test info/collections prolonged server maintenance errors are reported. - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - yield configureIdentity({username: "broken.info"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:login:error", onUIUpdate); - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, SYNC_FAILED); - do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); - do_check_true(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - setLastSync(PROLONGED_ERROR_DURATION); - Service.sync(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_meta_global_login_prolonged_server_maintenance_error(){ - // Test meta/global prolonged server maintenance errors are reported. - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - yield configureIdentity({username: "broken.meta"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:login:error", onUIUpdate); - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, SYNC_FAILED); - do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); - do_check_true(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - setLastSync(PROLONGED_ERROR_DURATION); - Service.sync(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_download_crypto_keys_login_prolonged_server_maintenance_error(){ - // Test crypto/keys prolonged server maintenance errors are reported. - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - yield configureIdentity({username: "broken.keys"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - // Force re-download of keys - Service.collectionKeys.clear(); - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:login:error", onUIUpdate); - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, SYNC_FAILED); - do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); - do_check_true(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - setLastSync(PROLONGED_ERROR_DURATION); - Service.sync(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_upload_crypto_keys_login_prolonged_server_maintenance_error(){ - // Test crypto/keys prolonged server maintenance errors are reported. - let server = EHTestsCommon.sync_httpd_setup(); - - // Start off with an empty account, do not upload a key. - yield configureIdentity({username: "broken.keys"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:login:error", onUIUpdate); - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, SYNC_FAILED); - do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); - do_check_true(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - setLastSync(PROLONGED_ERROR_DURATION); - Service.sync(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_wipeServer_login_prolonged_server_maintenance_error(){ - // Test that we report prolonged server maintenance errors that occur whilst - // wiping the server. - let server = EHTestsCommon.sync_httpd_setup(); - - // Start off with an empty account, do not upload a key. - yield configureIdentity({username: "broken.wipe"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:login:error", onUIUpdate); - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, SYNC_FAILED); - do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); - do_check_true(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - setLastSync(PROLONGED_ERROR_DURATION); - Service.sync(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_wipeRemote_prolonged_server_maintenance_error(){ - // Test that we report prolonged server maintenance errors that occur whilst - // wiping all remote devices. - let server = EHTestsCommon.sync_httpd_setup(); - - server.registerPathHandler("/1.1/broken.wipe/storage/catapult", EHTestsCommon.service_unavailable); - yield configureIdentity({username: "broken.wipe"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - EHTestsCommon.generateAndUploadKeys(); - - let engine = engineManager.get("catapult"); - engine.exception = null; - engine.enabled = true; - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:sync:error", onUIUpdate); - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, SYNC_FAILED); - do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE); - do_check_eq(Svc.Prefs.get("firstSync"), "wipeRemote"); - do_check_true(errorHandler.didReportProlongedError); - server.stop(() => { - clean(); - deferred.resolve(); - }); - }); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - Svc.Prefs.set("firstSync", "wipeRemote"); - setLastSync(PROLONGED_ERROR_DURATION); - let ping = yield sync_and_validate_telem(true); - deepEqual(ping.failureReason, { name: "httperror", code: 503 }); - yield deferred.promise; -}); - -add_task(function* test_sync_syncAndReportErrors_server_maintenance_error() { - // Test server maintenance errors are reported - // when calling syncAndReportErrors. - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - const BACKOFF = 42; - let engine = engineManager.get("catapult"); - engine.enabled = true; - engine.exception = {status: 503, - headers: {"retry-after": BACKOFF}}; - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:sync:error", onUIUpdate); - do_check_eq(Status.service, SYNC_FAILED_PARTIAL); - do_check_eq(Status.sync, SERVER_MAINTENANCE); - do_check_false(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - do_check_eq(Status.service, STATUS_OK); - - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.syncAndReportErrors(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_info_collections_login_syncAndReportErrors_server_maintenance_error() { - // Test info/collections server maintenance errors are reported - // when calling syncAndReportErrors. - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - yield configureIdentity({username: "broken.info"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:login:error", onUIUpdate); - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, LOGIN_FAILED); - do_check_eq(Status.login, SERVER_MAINTENANCE); - do_check_false(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.syncAndReportErrors(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_meta_global_login_syncAndReportErrors_server_maintenance_error() { - // Test meta/global server maintenance errors are reported - // when calling syncAndReportErrors. - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - yield configureIdentity({username: "broken.meta"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:login:error", onUIUpdate); - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, LOGIN_FAILED); - do_check_eq(Status.login, SERVER_MAINTENANCE); - do_check_false(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.syncAndReportErrors(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_download_crypto_keys_login_syncAndReportErrors_server_maintenance_error() { - // Test crypto/keys server maintenance errors are reported - // when calling syncAndReportErrors. - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - yield configureIdentity({username: "broken.keys"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - // Force re-download of keys - Service.collectionKeys.clear(); - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:login:error", onUIUpdate); - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, LOGIN_FAILED); - do_check_eq(Status.login, SERVER_MAINTENANCE); - do_check_false(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.syncAndReportErrors(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_upload_crypto_keys_login_syncAndReportErrors_server_maintenance_error() { - // Test crypto/keys server maintenance errors are reported - // when calling syncAndReportErrors. - let server = EHTestsCommon.sync_httpd_setup(); - - // Start off with an empty account, do not upload a key. - yield configureIdentity({username: "broken.keys"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:login:error", onUIUpdate); - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, LOGIN_FAILED); - do_check_eq(Status.login, SERVER_MAINTENANCE); - do_check_false(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.syncAndReportErrors(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_wipeServer_login_syncAndReportErrors_server_maintenance_error() { - // Test crypto/keys server maintenance errors are reported - // when calling syncAndReportErrors. - let server = EHTestsCommon.sync_httpd_setup(); - - // Start off with an empty account, do not upload a key. - yield configureIdentity({username: "broken.wipe"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:login:error", onUIUpdate); - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, LOGIN_FAILED); - do_check_eq(Status.login, SERVER_MAINTENANCE); - do_check_false(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.syncAndReportErrors(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_wipeRemote_syncAndReportErrors_server_maintenance_error(){ - // Test that we report prolonged server maintenance errors that occur whilst - // wiping all remote devices. - let server = EHTestsCommon.sync_httpd_setup(); - - yield configureIdentity({username: "broken.wipe"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - EHTestsCommon.generateAndUploadKeys(); - - let engine = engineManager.get("catapult"); - engine.exception = null; - engine.enabled = true; - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:sync:error", onUIUpdate); - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, SYNC_FAILED); - do_check_eq(Status.sync, SERVER_MAINTENANCE); - do_check_eq(Svc.Prefs.get("firstSync"), "wipeRemote"); - do_check_false(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - Svc.Prefs.set("firstSync", "wipeRemote"); - setLastSync(NON_PROLONGED_ERROR_DURATION); - errorHandler.syncAndReportErrors(); - yield deferred.promise; -}); - -add_task(function* test_sync_syncAndReportErrors_prolonged_server_maintenance_error() { - // Test prolonged server maintenance errors are - // reported when calling syncAndReportErrors. - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - const BACKOFF = 42; - let engine = engineManager.get("catapult"); - engine.enabled = true; - engine.exception = {status: 503, - headers: {"retry-after": BACKOFF}}; - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:sync:error", onUIUpdate); - do_check_eq(Status.service, SYNC_FAILED_PARTIAL); - do_check_eq(Status.sync, SERVER_MAINTENANCE); - // syncAndReportErrors means dontIgnoreErrors, which means - // didReportProlongedError not touched. - do_check_false(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - do_check_eq(Status.service, STATUS_OK); - - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.syncAndReportErrors(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_info_collections_login_syncAndReportErrors_prolonged_server_maintenance_error() { - // Test info/collections server maintenance errors are reported - // when calling syncAndReportErrors. - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - yield configureIdentity({username: "broken.info"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:login:error", onUIUpdate); - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, LOGIN_FAILED); - do_check_eq(Status.login, SERVER_MAINTENANCE); - // syncAndReportErrors means dontIgnoreErrors, which means - // didReportProlongedError not touched. - do_check_false(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.syncAndReportErrors(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_meta_global_login_syncAndReportErrors_prolonged_server_maintenance_error() { - // Test meta/global server maintenance errors are reported - // when calling syncAndReportErrors. - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - yield configureIdentity({username: "broken.meta"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:login:error", onUIUpdate); - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, LOGIN_FAILED); - do_check_eq(Status.login, SERVER_MAINTENANCE); - // syncAndReportErrors means dontIgnoreErrors, which means - // didReportProlongedError not touched. - do_check_false(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.syncAndReportErrors(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_download_crypto_keys_login_syncAndReportErrors_prolonged_server_maintenance_error() { - // Test crypto/keys server maintenance errors are reported - // when calling syncAndReportErrors. - let server = EHTestsCommon.sync_httpd_setup(); - yield EHTestsCommon.setUp(server); - - yield configureIdentity({username: "broken.keys"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - // Force re-download of keys - Service.collectionKeys.clear(); - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:login:error", onUIUpdate); - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, LOGIN_FAILED); - do_check_eq(Status.login, SERVER_MAINTENANCE); - // syncAndReportErrors means dontIgnoreErrors, which means - // didReportProlongedError not touched. - do_check_false(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.syncAndReportErrors(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_upload_crypto_keys_login_syncAndReportErrors_prolonged_server_maintenance_error() { - // Test crypto/keys server maintenance errors are reported - // when calling syncAndReportErrors. - let server = EHTestsCommon.sync_httpd_setup(); - - // Start off with an empty account, do not upload a key. - yield configureIdentity({username: "broken.keys"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:login:error", onUIUpdate); - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, LOGIN_FAILED); - do_check_eq(Status.login, SERVER_MAINTENANCE); - // syncAndReportErrors means dontIgnoreErrors, which means - // didReportProlongedError not touched. - do_check_false(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.syncAndReportErrors(); - yield deferred.promise; -}); - -add_identity_test(this, function* test_wipeServer_login_syncAndReportErrors_prolonged_server_maintenance_error() { - // Test crypto/keys server maintenance errors are reported - // when calling syncAndReportErrors. - let server = EHTestsCommon.sync_httpd_setup(); - - // Start off with an empty account, do not upload a key. - yield configureIdentity({username: "broken.wipe"}); - Service.serverURL = server.baseURI + "/maintenance/"; - Service.clusterURL = server.baseURI + "/maintenance/"; - - let backoffInterval; - Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) { - Svc.Obs.remove("weave:service:backoff:interval", observe); - backoffInterval = subject; - }); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:ui:login:error", function onUIUpdate() { - Svc.Obs.remove("weave:ui:login:error", onUIUpdate); - do_check_true(Status.enforceBackoff); - do_check_eq(backoffInterval, 42); - do_check_eq(Status.service, LOGIN_FAILED); - do_check_eq(Status.login, SERVER_MAINTENANCE); - // syncAndReportErrors means dontIgnoreErrors, which means - // didReportProlongedError not touched. - do_check_false(errorHandler.didReportProlongedError); - - clean(); - server.stop(deferred.resolve); - }); - - do_check_false(Status.enforceBackoff); - do_check_eq(Status.service, STATUS_OK); - - setLastSync(PROLONGED_ERROR_DURATION); - errorHandler.syncAndReportErrors(); - yield deferred.promise; -}); - -add_task(function* test_sync_engine_generic_fail() { - let server = EHTestsCommon.sync_httpd_setup(); - -let engine = engineManager.get("catapult"); - engine.enabled = true; - engine.sync = function sync() { - Svc.Obs.notify("weave:engine:sync:error", ENGINE_UNKNOWN_FAIL, "catapult"); - }; - - let log = Log.repository.getLogger("Sync.ErrorHandler"); - Svc.Prefs.set("log.appender.file.logOnError", true); - - do_check_eq(Status.engines["catapult"], undefined); - - let deferred = Promise.defer(); - // Don't wait for reset-file-log until the sync is underway. - // This avoids us catching a delayed notification from an earlier test. - Svc.Obs.add("weave:engine:sync:finish", function onEngineFinish() { - Svc.Obs.remove("weave:engine:sync:finish", onEngineFinish); - - log.info("Adding reset-file-log observer."); - Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() { - Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog); - - // Put these checks here, not after sync(), so that we aren't racing the - // log handler... which resets everything just a few lines below! - _("Status.engines: " + JSON.stringify(Status.engines)); - do_check_eq(Status.engines["catapult"], ENGINE_UNKNOWN_FAIL); - do_check_eq(Status.service, SYNC_FAILED_PARTIAL); - - // Test Error log was written on SYNC_FAILED_PARTIAL. - let entries = logsdir.directoryEntries; - do_check_true(entries.hasMoreElements()); - let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile); - do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName); - - clean(); - - let syncErrors = sumHistogram("WEAVE_ENGINE_SYNC_ERRORS", { key: "catapult" }); - do_check_true(syncErrors, 1); - - server.stop(() => { - clean(); - deferred.resolve(); - }); - }); - }); - - do_check_true(yield EHTestsCommon.setUp(server)); - let ping = yield sync_and_validate_telem(true); - deepEqual(ping.status.service, SYNC_FAILED_PARTIAL); - deepEqual(ping.engines.find(e => e.status).status, ENGINE_UNKNOWN_FAIL); - - yield deferred.promise; -}); - -add_test(function test_logs_on_sync_error_despite_shouldReportError() { - _("Ensure that an error is still logged when weave:service:sync:error " + - "is notified, despite shouldReportError returning false."); - - let log = Log.repository.getLogger("Sync.ErrorHandler"); - Svc.Prefs.set("log.appender.file.logOnError", true); - log.info("TESTING"); - - // Ensure that we report no error. - Status.login = MASTER_PASSWORD_LOCKED; - do_check_false(errorHandler.shouldReportError()); - - Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() { - Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog); - - // Test that error log was written. - let entries = logsdir.directoryEntries; - do_check_true(entries.hasMoreElements()); - let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile); - do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName); - - clean(); - run_next_test(); - }); - Svc.Obs.notify("weave:service:sync:error", {}); -}); - -add_test(function test_logs_on_login_error_despite_shouldReportError() { - _("Ensure that an error is still logged when weave:service:login:error " + - "is notified, despite shouldReportError returning false."); - - let log = Log.repository.getLogger("Sync.ErrorHandler"); - Svc.Prefs.set("log.appender.file.logOnError", true); - log.info("TESTING"); - - // Ensure that we report no error. - Status.login = MASTER_PASSWORD_LOCKED; - do_check_false(errorHandler.shouldReportError()); - - Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() { - Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog); - - // Test that error log was written. - let entries = logsdir.directoryEntries; - do_check_true(entries.hasMoreElements()); - let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile); - do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName); - - clean(); - run_next_test(); - }); - Svc.Obs.notify("weave:service:login:error", {}); -}); - -// This test should be the last one since it monkeypatches the engine object -// and we should only have one engine object throughout the file (bug 629664). -add_task(function* test_engine_applyFailed() { - let server = EHTestsCommon.sync_httpd_setup(); - - let engine = engineManager.get("catapult"); - engine.enabled = true; - delete engine.exception; - engine.sync = function sync() { - Svc.Obs.notify("weave:engine:sync:applied", {newFailed:1}, "catapult"); - }; - - let log = Log.repository.getLogger("Sync.ErrorHandler"); - Svc.Prefs.set("log.appender.file.logOnError", true); - - let deferred = Promise.defer(); - Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() { - Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog); - - do_check_eq(Status.engines["catapult"], ENGINE_APPLY_FAIL); - do_check_eq(Status.service, SYNC_FAILED_PARTIAL); - - // Test Error log was written on SYNC_FAILED_PARTIAL. - let entries = logsdir.directoryEntries; - do_check_true(entries.hasMoreElements()); - let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile); - do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName); - - clean(); - server.stop(deferred.resolve); - }); - - do_check_eq(Status.engines["catapult"], undefined); - do_check_true(yield EHTestsCommon.setUp(server)); - Service.sync(); - yield deferred.promise; -}); diff --git a/services/sync/tests/unit/test_errorhandler_eol.js b/services/sync/tests/unit/test_errorhandler_eol.js index c8d2ff4be..381bc7268 100644 --- a/services/sync/tests/unit/test_errorhandler_eol.js +++ b/services/sync/tests/unit/test_errorhandler_eol.js @@ -43,7 +43,7 @@ function sync_httpd_setup(infoHandler) { return httpd_setup(handlers); } -function* setUp(server) { +function setUp(server) { yield configureIdentity({username: "johndoe"}); Service.serverURL = server.baseURI + "/"; Service.clusterURL = server.baseURI + "/"; @@ -66,7 +66,7 @@ function do_check_hard_eol(eh, start) { do_check_true(Status.eol); } -add_identity_test(this, function* test_200_hard() { +add_identity_test(this, function test_200_hard() { let eh = Service.errorHandler; let start = Date.now(); let server = sync_httpd_setup(handler200("hard-eol")); @@ -88,7 +88,7 @@ add_identity_test(this, function* test_200_hard() { yield deferred.promise; }); -add_identity_test(this, function* test_513_hard() { +add_identity_test(this, function test_513_hard() { let eh = Service.errorHandler; let start = Date.now(); let server = sync_httpd_setup(handler513); @@ -114,7 +114,7 @@ add_identity_test(this, function* test_513_hard() { yield deferred.promise; }); -add_identity_test(this, function* test_200_soft() { +add_identity_test(this, function test_200_soft() { let eh = Service.errorHandler; let start = Date.now(); let server = sync_httpd_setup(handler200("soft-eol")); diff --git a/services/sync/tests/unit/test_errorhandler_filelog.js b/services/sync/tests/unit/test_errorhandler_filelog.js index 993a478fd..0ce82b170 100644 --- a/services/sync/tests/unit/test_errorhandler_filelog.js +++ b/services/sync/tests/unit/test_errorhandler_filelog.js @@ -21,7 +21,7 @@ const DELAY_BUFFER = 500; // Buffer for timers on different OS platforms. const PROLONGED_ERROR_DURATION = (Svc.Prefs.get('errorhandler.networkFailureReportTimeout') * 2) * 1000; -var errorHandler = Service.errorHandler; +let errorHandler = Service.errorHandler; function setLastSync(lastSyncValue) { Svc.Prefs.set("lastSync", (new Date(Date.now() - lastSyncValue)).toString()); @@ -35,8 +35,6 @@ function run_test() { Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace; Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace; - validate_all_future_pings(); - run_next_test(); } @@ -47,22 +45,20 @@ add_test(function test_noOutput() { // Clear log output from startup. Svc.Prefs.set("log.appender.file.logOnSuccess", false); Svc.Obs.notify("weave:service:sync:finish"); - Svc.Obs.add("weave:service:reset-file-log", function onResetFileLogOuter() { - Svc.Obs.remove("weave:service:reset-file-log", onResetFileLogOuter); - // Clear again without having issued any output. - Svc.Prefs.set("log.appender.file.logOnSuccess", true); - Svc.Obs.add("weave:service:reset-file-log", function onResetFileLogInner() { - Svc.Obs.remove("weave:service:reset-file-log", onResetFileLogInner); + // Clear again without having issued any output. + Svc.Prefs.set("log.appender.file.logOnSuccess", true); - errorHandler._logManager._fileAppender.level = Log.Level.Trace; - Svc.Prefs.resetBranch(""); - run_next_test(); - }); + Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() { + Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog); - // Fake a successful sync. - Svc.Obs.notify("weave:service:sync:finish"); + errorHandler._logManager._fileAppender.level = Log.Level.Trace; + Svc.Prefs.resetBranch(""); + run_next_test(); }); + + // Fake a successful sync. + Svc.Obs.notify("weave:service:sync:finish"); }); add_test(function test_logOnSuccess_false() { @@ -85,14 +81,16 @@ add_test(function test_logOnSuccess_false() { }); function readFile(file, callback) { - NetUtil.asyncFetch({ - uri: NetUtil.newURI(file), - loadUsingSystemPrincipal: true - }, function (inputStream, statusCode, request) { + NetUtil.asyncFetch2(file, function (inputStream, statusCode, request) { let data = NetUtil.readInputStreamToString(inputStream, inputStream.available()); callback(statusCode, data); - }); + }, + null, // aLoadingNode + Services.scriptSecurityManager.getSystemPrincipal(), + null, // aTriggeringPrincipal + Ci.nsILoadInfo.SEC_NORMAL, + Ci.nsIContentPolicy.TYPE_OTHER); } add_test(function test_logOnSuccess_true() { @@ -269,51 +267,6 @@ add_test(function test_login_error_logOnError_true() { Svc.Obs.notify("weave:service:login:error"); }); - -add_test(function test_errorLog_dumpAddons() { - Svc.Prefs.set("log.appender.file.logOnError", true); - - let log = Log.repository.getLogger("Sync.Test.FileLog"); - - // We need to wait until the log cleanup started by this test is complete - // or the next test will fail as it is ongoing. - Svc.Obs.add("services-tests:common:log-manager:cleanup-logs", function onCleanupLogs() { - Svc.Obs.remove("services-tests:common:log-manager:cleanup-logs", onCleanupLogs); - run_next_test(); - }); - - Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() { - Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog); - - let entries = logsdir.directoryEntries; - do_check_true(entries.hasMoreElements()); - let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile); - do_check_eq(logfile.leafName.slice(-4), ".txt"); - do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName); - do_check_false(entries.hasMoreElements()); - - // Ensure we logged some addon list (which is probably empty) - readFile(logfile, function (error, data) { - do_check_true(Components.isSuccessCode(error)); - do_check_neq(data.indexOf("Addons installed"), -1); - - // Clean up. - try { - logfile.remove(false); - } catch(ex) { - dump("Couldn't delete file: " + ex + "\n"); - // Stupid Windows box. - } - - Svc.Prefs.resetBranch(""); - }); - }); - - // Fake an unsuccessful sync due to prolonged failure. - setLastSync(PROLONGED_ERROR_DURATION); - Svc.Obs.notify("weave:service:sync:error"); -}); - // Check that error log files are deleted above an age threshold. add_test(function test_logErrorCleanup_age() { _("Beginning test_logErrorCleanup_age."); diff --git a/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js b/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js index 953f59fcb..18cea2cce 100644 --- a/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js +++ b/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js @@ -13,7 +13,7 @@ Cu.import("resource://testing-common/services/sync/utils.js"); initTestLogging("Trace"); -var engineManager = Service.engineManager; +let engineManager = Service.engineManager; engineManager.clear(); function promiseStopServer(server) { @@ -59,7 +59,7 @@ function sync_httpd_setup() { return httpd_setup(handlers); } -function* setUp(server) { +function setUp(server) { yield configureIdentity({username: "johndoe"}); Service.serverURL = server.baseURI + "/"; Service.clusterURL = server.baseURI + "/"; @@ -75,7 +75,7 @@ function generateAndUploadKeys(server) { } -add_identity_test(this, function* test_backoff500() { +add_identity_test(this, function test_backoff500() { _("Test: HTTP 500 sets backoff status."); let server = sync_httpd_setup(); yield setUp(server); @@ -102,7 +102,7 @@ add_identity_test(this, function* test_backoff500() { yield promiseStopServer(server); }); -add_identity_test(this, function* test_backoff503() { +add_identity_test(this, function test_backoff503() { _("Test: HTTP 503 with Retry-After header leads to backoff notification and sets backoff status."); let server = sync_httpd_setup(); yield setUp(server); @@ -138,7 +138,7 @@ add_identity_test(this, function* test_backoff503() { yield promiseStopServer(server); }); -add_identity_test(this, function* test_overQuota() { +add_identity_test(this, function test_overQuota() { _("Test: HTTP 400 with body error code 14 means over quota."); let server = sync_httpd_setup(); yield setUp(server); @@ -167,7 +167,7 @@ add_identity_test(this, function* test_overQuota() { yield promiseStopServer(server); }); -add_identity_test(this, function* test_service_networkError() { +add_identity_test(this, function test_service_networkError() { _("Test: Connection refused error from Service.sync() leads to the right status code."); let server = sync_httpd_setup(); yield setUp(server); @@ -193,14 +193,13 @@ add_identity_test(this, function* test_service_networkError() { yield deferred.promise; }); -add_identity_test(this, function* test_service_offline() { +add_identity_test(this, function test_service_offline() { _("Test: Wanting to sync in offline mode leads to the right status code but does not increment the ignorable error count."); let server = sync_httpd_setup(); yield setUp(server); let deferred = Promise.defer(); server.stop(() => { Services.io.offline = true; - Services.prefs.setBoolPref("network.dns.offline-localhost", false); try { do_check_eq(Status.sync, SYNC_SUCCEEDED); @@ -215,13 +214,12 @@ add_identity_test(this, function* test_service_offline() { Service.startOver(); } Services.io.offline = false; - Services.prefs.clearUserPref("network.dns.offline-localhost"); deferred.resolve(); }); yield deferred.promise; }); -add_identity_test(this, function* test_engine_networkError() { +add_identity_test(this, function test_engine_networkError() { _("Test: Network related exceptions from engine.sync() lead to the right status code."); let server = sync_httpd_setup(); yield setUp(server); @@ -248,7 +246,7 @@ add_identity_test(this, function* test_engine_networkError() { yield promiseStopServer(server); }); -add_identity_test(this, function* test_resource_timeout() { +add_identity_test(this, function test_resource_timeout() { let server = sync_httpd_setup(); yield setUp(server); @@ -276,7 +274,6 @@ add_identity_test(this, function* test_resource_timeout() { }); function run_test() { - validate_all_future_pings(); engineManager.register(CatapultEngine); run_next_test(); } diff --git a/services/sync/tests/unit/test_extension_storage_crypto.js b/services/sync/tests/unit/test_extension_storage_crypto.js deleted file mode 100644 index f93e4970d..000000000 --- a/services/sync/tests/unit/test_extension_storage_crypto.js +++ /dev/null @@ -1,93 +0,0 @@ -/* Any copyright is dedicated to the Public Domain. - * http://creativecommons.org/publicdomain/zero/1.0/ */ - -"use strict"; - -Cu.import("resource://services-crypto/utils.js"); -Cu.import("resource://services-sync/engines/extension-storage.js"); -Cu.import("resource://services-sync/util.js"); - -/** - * Like Assert.throws, but for generators. - * - * @param {string | Object | function} constraint - * What to use to check the exception. - * @param {function} f - * The function to call. - */ -function* throwsGen(constraint, f) { - let threw = false; - let exception; - try { - yield* f(); - } - catch (e) { - threw = true; - exception = e; - } - - ok(threw, "did not throw an exception"); - - const debuggingMessage = `got ${exception}, expected ${constraint}`; - let message = exception; - if (typeof exception === "object") { - message = exception.message; - } - - if (typeof constraint === "function") { - ok(constraint(message), debuggingMessage); - } else { - ok(constraint === message, debuggingMessage); - } - -} - -/** - * An EncryptionRemoteTransformer that uses a fixed key bundle, - * suitable for testing. - */ -class StaticKeyEncryptionRemoteTransformer extends EncryptionRemoteTransformer { - constructor(keyBundle) { - super(); - this.keyBundle = keyBundle; - } - - getKeys() { - return Promise.resolve(this.keyBundle); - } -} -const BORING_KB = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"; -const STRETCHED_KEY = CryptoUtils.hkdf(BORING_KB, undefined, `testing storage.sync encryption`, 2*32); -const KEY_BUNDLE = { - sha256HMACHasher: Utils.makeHMACHasher(Ci.nsICryptoHMAC.SHA256, Utils.makeHMACKey(STRETCHED_KEY.slice(0, 32))), - encryptionKeyB64: btoa(STRETCHED_KEY.slice(32, 64)), -}; -const transformer = new StaticKeyEncryptionRemoteTransformer(KEY_BUNDLE); - -add_task(function* test_encryption_transformer_roundtrip() { - const POSSIBLE_DATAS = [ - "string", - 2, // number - [1, 2, 3], // array - {key: "value"}, // object - ]; - - for (let data of POSSIBLE_DATAS) { - const record = {data: data, id: "key-some_2D_key", key: "some-key"}; - - deepEqual(record, yield transformer.decode(yield transformer.encode(record))); - } -}); - -add_task(function* test_refuses_to_decrypt_tampered() { - const encryptedRecord = yield transformer.encode({data: [1, 2, 3], id: "key-some_2D_key", key: "some-key"}); - const tamperedHMAC = Object.assign({}, encryptedRecord, {hmac: "0000000000000000000000000000000000000000000000000000000000000001"}); - yield* throwsGen(Utils.isHMACMismatch, function*() { - yield transformer.decode(tamperedHMAC); - }); - - const tamperedIV = Object.assign({}, encryptedRecord, {IV: "aaaaaaaaaaaaaaaaaaaaaa=="}); - yield* throwsGen(Utils.isHMACMismatch, function*() { - yield transformer.decode(tamperedIV); - }); -}); diff --git a/services/sync/tests/unit/test_extension_storage_engine.js b/services/sync/tests/unit/test_extension_storage_engine.js deleted file mode 100644 index 1b2792703..000000000 --- a/services/sync/tests/unit/test_extension_storage_engine.js +++ /dev/null @@ -1,62 +0,0 @@ -/* Any copyright is dedicated to the Public Domain. - * http://creativecommons.org/publicdomain/zero/1.0/ */ - -"use strict"; - -Cu.import("resource://services-sync/engines.js"); -Cu.import("resource://services-sync/engines/extension-storage.js"); -Cu.import("resource://services-sync/service.js"); -Cu.import("resource://services-sync/util.js"); -Cu.import("resource://testing-common/services/sync/utils.js"); -Cu.import("resource://gre/modules/ExtensionStorageSync.jsm"); - -Service.engineManager.register(ExtensionStorageEngine); -const engine = Service.engineManager.get("extension-storage"); -do_get_profile(); // so we can use FxAccounts -loadWebExtensionTestFunctions(); - -function mock(options) { - let calls = []; - let ret = function() { - calls.push(arguments); - return options.returns; - } - Object.setPrototypeOf(ret, { - __proto__: Function.prototype, - get calls() { - return calls; - } - }); - return ret; -} - -add_task(function* test_calling_sync_calls__sync() { - let oldSync = ExtensionStorageEngine.prototype._sync; - let syncMock = ExtensionStorageEngine.prototype._sync = mock({returns: true}); - try { - // I wanted to call the main sync entry point for the entire - // package, but that fails because it tries to sync ClientEngine - // first, which fails. - yield engine.sync(); - } finally { - ExtensionStorageEngine.prototype._sync = oldSync; - } - equal(syncMock.calls.length, 1); -}); - -add_task(function* test_calling_sync_calls_ext_storage_sync() { - const extension = {id: "my-extension"}; - let oldSync = ExtensionStorageSync.syncAll; - let syncMock = ExtensionStorageSync.syncAll = mock({returns: Promise.resolve()}); - try { - yield* withSyncContext(function* (context) { - // Set something so that everyone knows that we're using storage.sync - yield ExtensionStorageSync.set(extension, {"a": "b"}, context); - - yield engine._sync(); - }); - } finally { - ExtensionStorageSync.syncAll = oldSync; - } - do_check_true(syncMock.calls.length >= 1); -}); diff --git a/services/sync/tests/unit/test_extension_storage_tracker.js b/services/sync/tests/unit/test_extension_storage_tracker.js deleted file mode 100644 index fac51a897..000000000 --- a/services/sync/tests/unit/test_extension_storage_tracker.js +++ /dev/null @@ -1,38 +0,0 @@ -/* Any copyright is dedicated to the Public Domain. - * http://creativecommons.org/publicdomain/zero/1.0/ */ - -"use strict"; - -Cu.import("resource://services-sync/constants.js"); -Cu.import("resource://services-sync/engines.js"); -Cu.import("resource://services-sync/engines/extension-storage.js"); -Cu.import("resource://services-sync/service.js"); -Cu.import("resource://services-sync/util.js"); -Cu.import("resource://gre/modules/ExtensionStorageSync.jsm"); - -Service.engineManager.register(ExtensionStorageEngine); -const engine = Service.engineManager.get("extension-storage"); -do_get_profile(); // so we can use FxAccounts -loadWebExtensionTestFunctions(); - -add_task(function* test_changing_extension_storage_changes_score() { - const tracker = engine._tracker; - const extension = {id: "my-extension-id"}; - Svc.Obs.notify("weave:engine:start-tracking"); - yield* withSyncContext(function*(context) { - yield ExtensionStorageSync.set(extension, {"a": "b"}, context); - }); - do_check_eq(tracker.score, SCORE_INCREMENT_MEDIUM); - - tracker.resetScore(); - yield* withSyncContext(function*(context) { - yield ExtensionStorageSync.remove(extension, "a", context); - }); - do_check_eq(tracker.score, SCORE_INCREMENT_MEDIUM); - - Svc.Obs.notify("weave:engine:stop-tracking"); -}); - -function run_test() { - run_next_test(); -} diff --git a/services/sync/tests/unit/test_forms_tracker.js b/services/sync/tests/unit/test_forms_tracker.js index f14e208b3..5f7aaa648 100644 --- a/services/sync/tests/unit/test_forms_tracker.js +++ b/services/sync/tests/unit/test_forms_tracker.js @@ -40,18 +40,6 @@ function run_test() { addEntry("address", "Memory Lane"); do_check_attribute_count(tracker.changedIDs, 3); - - _("Check that ignoreAll is respected"); - tracker.clearChangedIDs(); - tracker.score = 0; - tracker.ignoreAll = true; - addEntry("username", "johndoe123"); - addEntry("favoritecolor", "green"); - removeEntry("name", "John Doe"); - tracker.ignoreAll = false; - do_check_empty(tracker.changedIDs); - equal(tracker.score, 0); - _("Let's stop tracking again."); tracker.clearChangedIDs(); Svc.Obs.notify("weave:engine:stop-tracking"); @@ -63,8 +51,6 @@ function run_test() { removeEntry("email", "john@doe.com"); do_check_empty(tracker.changedIDs); - - } finally { _("Clean up."); engine._store.wipe(); diff --git a/services/sync/tests/unit/test_fxa_migration.js b/services/sync/tests/unit/test_fxa_migration.js new file mode 100644 index 000000000..7c65d5996 --- /dev/null +++ b/services/sync/tests/unit/test_fxa_migration.js @@ -0,0 +1,279 @@ +// Test the FxAMigration module +Cu.import("resource://services-sync/FxaMigrator.jsm"); +Cu.import("resource://gre/modules/Promise.jsm"); +Cu.import("resource://gre/modules/FxAccounts.jsm"); +Cu.import("resource://gre/modules/FxAccountsCommon.js"); +Cu.import("resource://services-sync/browserid_identity.js"); + +// Set our username pref early so sync initializes with the legacy provider. +Services.prefs.setCharPref("services.sync.username", "foo"); +// And ensure all debug messages end up being printed. +Services.prefs.setCharPref("services.sync.log.appender.dump", "Debug"); + +// Now import sync +Cu.import("resource://services-sync/service.js"); +Cu.import("resource://services-sync/record.js"); +Cu.import("resource://services-sync/util.js"); + +// And reset the username. +Services.prefs.clearUserPref("services.sync.username"); + +Cu.import("resource://testing-common/services/sync/utils.js"); +Cu.import("resource://testing-common/services/common/logging.js"); +Cu.import("resource://testing-common/services/sync/rotaryengine.js"); + +const FXA_USERNAME = "someone@somewhere"; + +// Utilities +function promiseOneObserver(topic) { + return new Promise((resolve, reject) => { + let observer = function(subject, topic, data) { + Services.obs.removeObserver(observer, topic); + resolve({ subject: subject, data: data }); + } + Services.obs.addObserver(observer, topic, false); + }); +} + +function promiseStopServer(server) { + return new Promise((resolve, reject) => { + server.stop(resolve); + }); +} + + +// Helpers +function configureLegacySync() { + let engine = new RotaryEngine(Service); + engine.enabled = true; + Svc.Prefs.set("registerEngines", engine.name); + Svc.Prefs.set("log.logger.engine.rotary", "Trace"); + + let contents = { + meta: {global: {engines: {rotary: {version: engine.version, + syncID: engine.syncID}}}}, + crypto: {}, + rotary: {} + }; + + const USER = "foo"; + const PASSPHRASE = "abcdeabcdeabcdeabcdeabcdea"; + + setBasicCredentials(USER, "password", PASSPHRASE); + + let onRequest = function(request, response) { + // ideally we'd only do this while a legacy user is configured, but WTH. + response.setHeader("x-weave-alert", JSON.stringify({code: "soft-eol"})); + } + let server = new SyncServer({onRequest: onRequest}); + server.registerUser(USER, "password"); + server.createContents(USER, contents); + server.start(); + + Service.serverURL = server.baseURI; + Service.clusterURL = server.baseURI; + Service.identity.username = USER; + Service._updateCachedURLs(); + + Service.engineManager._engines[engine.name] = engine; + + return [engine, server]; +} + +function configureFxa() { + Services.prefs.setCharPref("identity.fxaccounts.auth.uri", "http://localhost"); +} + +add_task(function *testMigration() { + configureFxa(); + + // when we do a .startOver we want the new provider. + let oldValue = Services.prefs.getBoolPref("services.sync-testing.startOverKeepIdentity"); + Services.prefs.setBoolPref("services.sync-testing.startOverKeepIdentity", false); + + // disable the addons engine - this engine choice is arbitrary, but we + // want to check it remains disabled after migration. + Services.prefs.setBoolPref("services.sync.engine.addons", false); + + do_register_cleanup(() => { + Services.prefs.setBoolPref("services.sync-testing.startOverKeepIdentity", oldValue) + Services.prefs.setBoolPref("services.sync.engine.addons", true); + }); + + // No sync user - that should report no user-action necessary. + Assert.deepEqual((yield fxaMigrator._queueCurrentUserState()), null, + "no user state when complete"); + + // Arrange for a legacy sync user and manually bump the migrator + let [engine, server] = configureLegacySync(); + + // Check our disabling of the "addons" engine worked, and for good measure, + // that the "passwords" engine is enabled. + Assert.ok(!Service.engineManager.get("addons").enabled, "addons is disabled"); + Assert.ok(Service.engineManager.get("passwords").enabled, "passwords is enabled"); + + // monkey-patch the migration sentinel code so we know it was called. + let haveStartedSentinel = false; + let origSetFxAMigrationSentinel = Service.setFxAMigrationSentinel; + let promiseSentinelWritten = new Promise((resolve, reject) => { + Service.setFxAMigrationSentinel = function(arg) { + haveStartedSentinel = true; + return origSetFxAMigrationSentinel.call(Service, arg).then(result => { + Service.setFxAMigrationSentinel = origSetFxAMigrationSentinel; + resolve(result); + return result; + }); + } + }); + + // We are now configured for legacy sync, but we aren't in an EOL state yet, + // so should still be not waiting for a user. + Assert.deepEqual((yield fxaMigrator._queueCurrentUserState()), null, + "no user state before server EOL"); + + // Start a sync - this will cause an EOL notification which the migrator's + // observer will notice. + let promise = promiseOneObserver("fxa-migration:state-changed"); + _("Starting sync"); + Service.sync(); + _("Finished sync"); + + // We should have seen the observer, so be waiting for an FxA user. + Assert.equal((yield promise).data, fxaMigrator.STATE_USER_FXA, "now waiting for FxA.") + + // Re-calling our user-state promise should also reflect the same state. + Assert.equal((yield fxaMigrator._queueCurrentUserState()), + fxaMigrator.STATE_USER_FXA, + "still waiting for FxA."); + + // arrange for an unverified FxA user. + let config = makeIdentityConfig({username: FXA_USERNAME}); + let fxa = new FxAccounts({}); + config.fxaccount.user.email = config.username; + delete config.fxaccount.user.verified; + // *sob* - shouldn't need this boilerplate + fxa.internal.currentAccountState.getCertificate = function(data, keyPair, mustBeValidUntil) { + this.cert = { + validUntil: fxa.internal.now() + CERT_LIFETIME, + cert: "certificate", + }; + return Promise.resolve(this.cert.cert); + }; + + // As soon as we set the FxA user the observers should fire and magically + // transition. + promise = promiseOneObserver("fxa-migration:state-changed"); + fxAccounts.setSignedInUser(config.fxaccount.user); + + let observerInfo = yield promise; + Assert.equal(observerInfo.data, + fxaMigrator.STATE_USER_FXA_VERIFIED, + "now waiting for verification"); + Assert.ok(observerInfo.subject instanceof Ci.nsISupportsString, + "email was passed to observer"); + Assert.equal(observerInfo.subject.data, + FXA_USERNAME, + "email passed to observer is correct"); + + // should have seen the user set, so state should automatically update. + Assert.equal((yield fxaMigrator._queueCurrentUserState()), + fxaMigrator.STATE_USER_FXA_VERIFIED, + "now waiting for verification"); + + // Before we verify the user, fire off a sync that calls us back during + // the sync and before it completes - this way we can ensure we do the right + // thing in terms of blocking sync and waiting for it to complete. + + let wasWaiting = false; + // This is a PITA as sync is pseudo-blocking. + engine._syncFinish = function () { + // We aren't in a generator here, so use a helper to block on promises. + function getState() { + let cb = Async.makeSpinningCallback(); + fxaMigrator._queueCurrentUserState().then(state => cb(null, state)); + return cb.wait(); + } + // should still be waiting for verification. + Assert.equal(getState(), fxaMigrator.STATE_USER_FXA_VERIFIED, + "still waiting for verification"); + + // arrange for the user to be verified. The fxAccount's mock story is + // broken, so go behind its back. + config.fxaccount.user.verified = true; + fxAccounts.setSignedInUser(config.fxaccount.user); + Services.obs.notifyObservers(null, ONVERIFIED_NOTIFICATION, null); + + // spinningly wait for the migrator to catch up - sync is running so + // we should be in a 'null' user-state as there is no user-action + // necessary. + let cb = Async.makeSpinningCallback(); + promiseOneObserver("fxa-migration:state-changed").then(({ data: state }) => cb(null, state)); + Assert.equal(cb.wait(), null, "no user action necessary while sync completes."); + + // We must not have started writing the sentinel yet. + Assert.ok(!haveStartedSentinel, "haven't written a sentinel yet"); + + // sync should be blocked from continuing + Assert.ok(Service.scheduler.isBlocked, "sync is blocked.") + + wasWaiting = true; + throw ex; + }; + + _("Starting sync"); + Service.sync(); + _("Finished sync"); + + // mock sync so we can ensure the final sync is scheduled with the FxA user. + // (letting a "normal" sync complete is a PITA without mocking huge amounts + // of FxA infra) + let promiseFinalSync = new Promise((resolve, reject) => { + let oldSync = Service.sync; + Service.sync = function() { + Service.sync = oldSync; + resolve(); + } + }); + + Assert.ok(wasWaiting, "everything was good while sync was running.") + + // The migration is now going to run to completion. + // sync should still be "blocked" + Assert.ok(Service.scheduler.isBlocked, "sync is blocked."); + + // We should see the migration sentinel written and it should return true. + Assert.ok((yield promiseSentinelWritten), "wrote the sentinel"); + + // And we should see a new sync start + yield promiseFinalSync; + + // and we should be configured for FxA + let WeaveService = Cc["@mozilla.org/weave/service;1"] + .getService(Components.interfaces.nsISupports) + .wrappedJSObject; + Assert.ok(WeaveService.fxAccountsEnabled, "FxA is enabled"); + Assert.ok(Service.identity instanceof BrowserIDManager, + "sync is configured with the browserid_identity provider."); + Assert.equal(Service.identity.username, config.username, "correct user configured") + Assert.ok(!Service.scheduler.isBlocked, "sync is not blocked.") + // and the user state should remain null. + Assert.deepEqual((yield fxaMigrator._queueCurrentUserState()), + null, + "still no user action necessary"); + // and our engines should be in the same enabled/disabled state as before. + Assert.ok(!Service.engineManager.get("addons").enabled, "addons is still disabled"); + Assert.ok(Service.engineManager.get("passwords").enabled, "passwords is still enabled"); + + // aaaand, we are done - clean up. + yield promiseStopServer(server); +}); + + +function run_test() { + initTestLogging(); + do_register_cleanup(() => { + fxaMigrator.finalize(); + Svc.Prefs.resetBranch(""); + }); + run_next_test(); +} diff --git a/services/sync/tests/unit/test_fxa_migration_sentinel.js b/services/sync/tests/unit/test_fxa_migration_sentinel.js new file mode 100644 index 000000000..bed2dd756 --- /dev/null +++ b/services/sync/tests/unit/test_fxa_migration_sentinel.js @@ -0,0 +1,150 @@ +/* Any copyright is dedicated to the Public Domain. + http://creativecommons.org/publicdomain/zero/1.0/ */ + +// Test the reading and writing of the sync migration sentinel. +Cu.import("resource://gre/modules/Promise.jsm"); +Cu.import("resource://gre/modules/FxAccounts.jsm"); +Cu.import("resource://gre/modules/FxAccountsCommon.js"); + +Cu.import("resource://testing-common/services/sync/utils.js"); +Cu.import("resource://testing-common/services/common/logging.js"); + +Cu.import("resource://services-sync/record.js"); + +// Set our username pref early so sync initializes with the legacy provider. +Services.prefs.setCharPref("services.sync.username", "foo"); + +// Now import sync +Cu.import("resource://services-sync/service.js"); + +const USER = "foo"; +const PASSPHRASE = "abcdeabcdeabcdeabcdeabcdea"; + +function promiseStopServer(server) { + return new Promise((resolve, reject) => { + server.stop(resolve); + }); +} + +let numServerRequests = 0; + +// Helpers +function configureLegacySync() { + let contents = { + meta: {global: {}}, + crypto: {}, + }; + + setBasicCredentials(USER, "password", PASSPHRASE); + + numServerRequests = 0; + let server = new SyncServer({ + onRequest: () => { + ++numServerRequests + } + }); + server.registerUser(USER, "password"); + server.createContents(USER, contents); + server.start(); + + Service.serverURL = server.baseURI; + Service.clusterURL = server.baseURI; + Service.identity.username = USER; + Service._updateCachedURLs(); + + return server; +} + +// Test a simple round-trip of the get/set functions. +add_task(function *() { + // Arrange for a legacy sync user. + let server = configureLegacySync(); + + Assert.equal((yield Service.getFxAMigrationSentinel()), null, "no sentinel to start"); + + let sentinel = {foo: "bar"}; + yield Service.setFxAMigrationSentinel(sentinel); + + Assert.deepEqual((yield Service.getFxAMigrationSentinel()), sentinel, "got the sentinel back"); + + yield promiseStopServer(server); +}); + +// Test the records are cached by the record manager. +add_task(function *() { + // Arrange for a legacy sync user. + let server = configureLegacySync(); + Service.login(); + + // Reset the request count here as the login would have made some. + numServerRequests = 0; + + Assert.equal((yield Service.getFxAMigrationSentinel()), null, "no sentinel to start"); + Assert.equal(numServerRequests, 1, "first fetch should hit the server"); + + let sentinel = {foo: "bar"}; + yield Service.setFxAMigrationSentinel(sentinel); + Assert.equal(numServerRequests, 2, "setting sentinel should hit the server"); + + Assert.deepEqual((yield Service.getFxAMigrationSentinel()), sentinel, "got the sentinel back"); + Assert.equal(numServerRequests, 2, "second fetch should not should hit the server"); + + // Clobber the caches and ensure we still get the correct value back when we + // do hit the server. + Service.recordManager.clearCache(); + Assert.deepEqual((yield Service.getFxAMigrationSentinel()), sentinel, "got the sentinel back"); + Assert.equal(numServerRequests, 3, "should have re-hit the server with empty caches"); + + yield promiseStopServer(server); +}); + +// Test the records are cached by a sync. +add_task(function* () { + let server = configureLegacySync(); + + // A first sync clobbers meta/global due to it being empty, so we first + // do a sync which forces a good set of data on the server. + Service.sync(); + + // Now create a sentinel exists on the server. It's encrypted, so we need to + // put an encrypted version. + let cryptoWrapper = new CryptoWrapper("meta", "fxa_credentials"); + let sentinel = {foo: "bar"}; + cryptoWrapper.cleartext = { + id: "fxa_credentials", + sentinel: sentinel, + deleted: false, + } + cryptoWrapper.encrypt(Service.identity.syncKeyBundle); + let payload = { + ciphertext: cryptoWrapper.ciphertext, + IV: cryptoWrapper.IV, + hmac: cryptoWrapper.hmac, + }; + + server.createContents(USER, { + meta: {fxa_credentials: payload}, + crypto: {}, + }); + + // Another sync - this will cause the encrypted record to be fetched. + Service.sync(); + // Reset the request count here as the sync will have made many! + numServerRequests = 0; + + // Asking for the sentinel should use the copy cached in the record manager. + Assert.deepEqual((yield Service.getFxAMigrationSentinel()), sentinel, "got it"); + Assert.equal(numServerRequests, 0, "should not have hit the server"); + + // And asking for it again should work (we have to work around the fact the + // ciphertext is clobbered on first decrypt...) + Assert.deepEqual((yield Service.getFxAMigrationSentinel()), sentinel, "got it again"); + Assert.equal(numServerRequests, 0, "should not have hit the server"); + + yield promiseStopServer(server); +}); + +function run_test() { + initTestLogging(); + run_next_test(); +} diff --git a/services/sync/tests/unit/test_fxa_node_reassignment.js b/services/sync/tests/unit/test_fxa_node_reassignment.js index 3e4cefd53..2f61afd6f 100644 --- a/services/sync/tests/unit/test_fxa_node_reassignment.js +++ b/services/sync/tests/unit/test_fxa_node_reassignment.js @@ -1,368 +1,321 @@ -/* Any copyright is dedicated to the Public Domain. - http://creativecommons.org/publicdomain/zero/1.0/ */ - -_("Test that node reassignment happens correctly using the FxA identity mgr."); -// The node-reassignment logic is quite different for FxA than for the legacy -// provider. In particular, there's no special request necessary for -// reassignment - it comes from the token server - so we need to ensure the -// Fxa cluster manager grabs a new token. - -Cu.import("resource://gre/modules/Log.jsm"); -Cu.import("resource://services-common/rest.js"); -Cu.import("resource://services-sync/constants.js"); -Cu.import("resource://services-sync/service.js"); -Cu.import("resource://services-sync/status.js"); -Cu.import("resource://services-sync/util.js"); -Cu.import("resource://testing-common/services/sync/rotaryengine.js"); -Cu.import("resource://services-sync/browserid_identity.js"); -Cu.import("resource://testing-common/services/sync/utils.js"); - -Service.engineManager.clear(); - -function run_test() { - Log.repository.getLogger("Sync.AsyncResource").level = Log.Level.Trace; - Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace; - Log.repository.getLogger("Sync.Resource").level = Log.Level.Trace; - Log.repository.getLogger("Sync.RESTRequest").level = Log.Level.Trace; - Log.repository.getLogger("Sync.Service").level = Log.Level.Trace; - Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace; - initTestLogging(); - - Service.engineManager.register(RotaryEngine); - - // Setup the FxA identity manager and cluster manager. - Status.__authManager = Service.identity = new BrowserIDManager(); - Service._clusterManager = Service.identity.createClusterManager(Service); - - // None of the failures in this file should result in a UI error. - function onUIError() { - do_throw("Errors should not be presented in the UI."); - } - Svc.Obs.add("weave:ui:login:error", onUIError); - Svc.Obs.add("weave:ui:sync:error", onUIError); - - run_next_test(); -} - - -// API-compatible with SyncServer handler. Bind `handler` to something to use -// as a ServerCollection handler. -function handleReassign(handler, req, resp) { - resp.setStatusLine(req.httpVersion, 401, "Node reassignment"); - resp.setHeader("Content-Type", "application/json"); - let reassignBody = JSON.stringify({error: "401inator in place"}); - resp.bodyOutputStream.write(reassignBody, reassignBody.length); -} - -var numTokenRequests = 0; - -function prepareServer(cbAfterTokenFetch) { - let config = makeIdentityConfig({username: "johndoe"}); - // A server callback to ensure we don't accidentally hit the wrong endpoint - // after a node reassignment. - let callback = { - __proto__: SyncServerCallback, - onRequest(req, resp) { - let full = `${req.scheme}://${req.host}:${req.port}${req.path}`; - do_check_true(full.startsWith(config.fxaccount.token.endpoint), - `request made to ${full}`); - } - } - let server = new SyncServer(callback); - server.registerUser("johndoe"); - server.start(); - - // Set the token endpoint for the initial token request that's done implicitly - // via configureIdentity. - config.fxaccount.token.endpoint = server.baseURI + "1.1/johndoe/"; - // And future token fetches will do magic around numReassigns. - let numReassigns = 0; - return configureIdentity(config).then(() => { - Service.identity._tokenServerClient = { - getTokenFromBrowserIDAssertion: function(uri, assertion, cb) { - // Build a new URL with trailing zeros for the SYNC_VERSION part - this - // will still be seen as equivalent by the test server, but different - // by sync itself. - numReassigns += 1; - let trailingZeros = new Array(numReassigns + 1).join('0'); - let token = config.fxaccount.token; - token.endpoint = server.baseURI + "1.1" + trailingZeros + "/johndoe"; - token.uid = config.username; - numTokenRequests += 1; - cb(null, token); - if (cbAfterTokenFetch) { - cbAfterTokenFetch(); - } - }, - }; - return server; - }); -} - -function getReassigned() { - try { - return Services.prefs.getBoolPref("services.sync.lastSyncReassigned"); - } catch (ex) { - if (ex.result == Cr.NS_ERROR_UNEXPECTED) { - return false; - } - do_throw("Got exception retrieving lastSyncReassigned: " + - Log.exceptionStr(ex)); - } -} - -/** - * Make a test request to `url`, then watch the result of two syncs - * to ensure that a node request was made. - * Runs `between` between the two. This can be used to undo deliberate failure - * setup, detach observers, etc. - */ -function* syncAndExpectNodeReassignment(server, firstNotification, between, - secondNotification, url) { - _("Starting syncAndExpectNodeReassignment\n"); - let deferred = Promise.defer(); - function onwards() { - let numTokenRequestsBefore; - function onFirstSync() { - _("First sync completed."); - Svc.Obs.remove(firstNotification, onFirstSync); - Svc.Obs.add(secondNotification, onSecondSync); - - do_check_eq(Service.clusterURL, ""); - - // Track whether we fetched a new token. - numTokenRequestsBefore = numTokenRequests; - - // Allow for tests to clean up error conditions. - between(); - } - function onSecondSync() { - _("Second sync completed."); - Svc.Obs.remove(secondNotification, onSecondSync); - Service.scheduler.clearSyncTriggers(); - - // Make absolutely sure that any event listeners are done with their work - // before we proceed. - waitForZeroTimer(function () { - _("Second sync nextTick."); - do_check_eq(numTokenRequests, numTokenRequestsBefore + 1, "fetched a new token"); - Service.startOver(); - server.stop(deferred.resolve); - }); - } - - Svc.Obs.add(firstNotification, onFirstSync); - Service.sync(); - } - - // Make sure that we really do get a 401 (but we can only do that if we are - // already logged in, as the login process is what sets up the URLs) - if (Service.isLoggedIn) { - _("Making request to " + url + " which should 401"); - let request = new RESTRequest(url); - request.get(function () { - do_check_eq(request.response.status, 401); - Utils.nextTick(onwards); - }); - } else { - _("Skipping preliminary validation check for a 401 as we aren't logged in"); - Utils.nextTick(onwards); - } - yield deferred.promise; -} - -// Check that when we sync we don't request a new token by default - our -// test setup has configured the client with a valid token, and that token -// should be used to form the cluster URL. -add_task(function* test_single_token_fetch() { - _("Test a normal sync only fetches 1 token"); - - let numTokenFetches = 0; - - function afterTokenFetch() { - numTokenFetches++; - } - - // Set the cluster URL to an "old" version - this is to ensure we don't - // use that old cached version for the first sync but prefer the value - // we got from the token (and as above, we are also checking we don't grab - // a new token). If the test actually attempts to connect to this URL - // it will crash. - Service.clusterURL = "http://example.com/"; - - let server = yield prepareServer(afterTokenFetch); - - do_check_false(Service.isLoggedIn, "not already logged in"); - Service.sync(); - do_check_eq(Status.sync, SYNC_SUCCEEDED, "sync succeeded"); - do_check_eq(numTokenFetches, 0, "didn't fetch a new token"); - // A bit hacky, but given we know how prepareServer works we can deduce - // that clusterURL we expect. - let expectedClusterURL = server.baseURI + "1.1/johndoe/"; - do_check_eq(Service.clusterURL, expectedClusterURL); - yield new Promise(resolve => server.stop(resolve)); -}); - -add_task(function* test_momentary_401_engine() { - _("Test a failure for engine URLs that's resolved by reassignment."); - let server = yield prepareServer(); - let john = server.user("johndoe"); - - _("Enabling the Rotary engine."); - let engine = Service.engineManager.get("rotary"); - engine.enabled = true; - - // We need the server to be correctly set up prior to experimenting. Do this - // through a sync. - let global = {syncID: Service.syncID, - storageVersion: STORAGE_VERSION, - rotary: {version: engine.version, - syncID: engine.syncID}} - john.createCollection("meta").insert("global", global); - - _("First sync to prepare server contents."); - Service.sync(); - - _("Setting up Rotary collection to 401."); - let rotary = john.createCollection("rotary"); - let oldHandler = rotary.collectionHandler; - rotary.collectionHandler = handleReassign.bind(this, undefined); - - // We want to verify that the clusterURL pref has been cleared after a 401 - // inside a sync. Flag the Rotary engine to need syncing. - john.collection("rotary").timestamp += 1000; - - function between() { - _("Undoing test changes."); - rotary.collectionHandler = oldHandler; - - function onLoginStart() { - // lastSyncReassigned shouldn't be cleared until a sync has succeeded. - _("Ensuring that lastSyncReassigned is still set at next sync start."); - Svc.Obs.remove("weave:service:login:start", onLoginStart); - do_check_true(getReassigned()); - } - - _("Adding observer that lastSyncReassigned is still set on login."); - Svc.Obs.add("weave:service:login:start", onLoginStart); - } - - yield syncAndExpectNodeReassignment(server, - "weave:service:sync:finish", - between, - "weave:service:sync:finish", - Service.storageURL + "rotary"); -}); - -// This test ends up being a failing info fetch *after we're already logged in*. -add_task(function* test_momentary_401_info_collections_loggedin() { - _("Test a failure for info/collections after login that's resolved by reassignment."); - let server = yield prepareServer(); - - _("First sync to prepare server contents."); - Service.sync(); - - _("Arrange for info/collections to return a 401."); - let oldHandler = server.toplevelHandlers.info; - server.toplevelHandlers.info = handleReassign; - - function undo() { - _("Undoing test changes."); - server.toplevelHandlers.info = oldHandler; - } - - do_check_true(Service.isLoggedIn, "already logged in"); - - yield syncAndExpectNodeReassignment(server, - "weave:service:sync:error", - undo, - "weave:service:sync:finish", - Service.infoURL); -}); - -// This test ends up being a failing info fetch *before we're logged in*. -// In this case we expect to recover during the login phase - so the first -// sync succeeds. -add_task(function* test_momentary_401_info_collections_loggedout() { - _("Test a failure for info/collections before login that's resolved by reassignment."); - - let oldHandler; - let sawTokenFetch = false; - - function afterTokenFetch() { - // After a single token fetch, we undo our evil handleReassign hack, so - // the next /info request returns the collection instead of a 401 - server.toplevelHandlers.info = oldHandler; - sawTokenFetch = true; - } - - let server = yield prepareServer(afterTokenFetch); - - // Return a 401 for the next /info request - it will be reset immediately - // after a new token is fetched. - oldHandler = server.toplevelHandlers.info - server.toplevelHandlers.info = handleReassign; - - do_check_false(Service.isLoggedIn, "not already logged in"); - - Service.sync(); - do_check_eq(Status.sync, SYNC_SUCCEEDED, "sync succeeded"); - // sync was successful - check we grabbed a new token. - do_check_true(sawTokenFetch, "a new token was fetched by this test.") - // and we are done. - Service.startOver(); - let deferred = Promise.defer(); - server.stop(deferred.resolve); - yield deferred.promise; -}); - -// This test ends up being a failing meta/global fetch *after we're already logged in*. -add_task(function* test_momentary_401_storage_loggedin() { - _("Test a failure for any storage URL after login that's resolved by" + - "reassignment."); - let server = yield prepareServer(); - - _("First sync to prepare server contents."); - Service.sync(); - - _("Arrange for meta/global to return a 401."); - let oldHandler = server.toplevelHandlers.storage; - server.toplevelHandlers.storage = handleReassign; - - function undo() { - _("Undoing test changes."); - server.toplevelHandlers.storage = oldHandler; - } - - do_check_true(Service.isLoggedIn, "already logged in"); - - yield syncAndExpectNodeReassignment(server, - "weave:service:sync:error", - undo, - "weave:service:sync:finish", - Service.storageURL + "meta/global"); -}); - -// This test ends up being a failing meta/global fetch *before we've logged in*. -add_task(function* test_momentary_401_storage_loggedout() { - _("Test a failure for any storage URL before login, not just engine parts. " + - "Resolved by reassignment."); - let server = yield prepareServer(); - - // Return a 401 for all storage requests. - let oldHandler = server.toplevelHandlers.storage; - server.toplevelHandlers.storage = handleReassign; - - function undo() { - _("Undoing test changes."); - server.toplevelHandlers.storage = oldHandler; - } - - do_check_false(Service.isLoggedIn, "already logged in"); - - yield syncAndExpectNodeReassignment(server, - "weave:service:login:error", - undo, - "weave:service:sync:finish", - Service.storageURL + "meta/global"); -}); +/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+_("Test that node reassignment happens correctly using the FxA identity mgr.");
+// The node-reassignment logic is quite different for FxA than for the legacy
+// provider. In particular, there's no special request necessary for
+// reassignment - it comes from the token server - so we need to ensure the
+// Fxa cluster manager grabs a new token.
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/rest.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/status.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/rotaryengine.js");
+Cu.import("resource://services-sync/browserid_identity.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+Service.engineManager.clear();
+
+function run_test() {
+ Log.repository.getLogger("Sync.AsyncResource").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.Resource").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.RESTRequest").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
+ initTestLogging();
+
+ Service.engineManager.register(RotaryEngine);
+
+ // Setup the FxA identity manager and cluster manager.
+ Status.__authManager = Service.identity = new BrowserIDManager();
+ Service._clusterManager = Service.identity.createClusterManager(Service);
+
+ // None of the failures in this file should result in a UI error.
+ function onUIError() {
+ do_throw("Errors should not be presented in the UI.");
+ }
+ Svc.Obs.add("weave:ui:login:error", onUIError);
+ Svc.Obs.add("weave:ui:sync:error", onUIError);
+
+ run_next_test();
+}
+
+
+// API-compatible with SyncServer handler. Bind `handler` to something to use
+// as a ServerCollection handler.
+function handleReassign(handler, req, resp) {
+ resp.setStatusLine(req.httpVersion, 401, "Node reassignment");
+ resp.setHeader("Content-Type", "application/json");
+ let reassignBody = JSON.stringify({error: "401inator in place"});
+ resp.bodyOutputStream.write(reassignBody, reassignBody.length);
+}
+
+let numTokenRequests = 0;
+
+function prepareServer(cbAfterTokenFetch) {
+ let config = makeIdentityConfig({username: "johndoe"});
+ let server = new SyncServer();
+ server.registerUser("johndoe");
+ server.start();
+
+ // Set the token endpoint for the initial token request that's done implicitly
+ // via configureIdentity.
+ config.fxaccount.token.endpoint = server.baseURI + "1.1/johndoe";
+ // And future token fetches will do magic around numReassigns.
+ let numReassigns = 0;
+ return configureIdentity(config).then(() => {
+ Service.identity._tokenServerClient = {
+ getTokenFromBrowserIDAssertion: function(uri, assertion, cb) {
+ // Build a new URL with trailing zeros for the SYNC_VERSION part - this
+ // will still be seen as equivalent by the test server, but different
+ // by sync itself.
+ numReassigns += 1;
+ let trailingZeros = new Array(numReassigns + 1).join('0');
+ let token = config.fxaccount.token;
+ token.endpoint = server.baseURI + "1.1" + trailingZeros + "/johndoe";
+ token.uid = config.username;
+ numTokenRequests += 1;
+ cb(null, token);
+ if (cbAfterTokenFetch) {
+ cbAfterTokenFetch();
+ }
+ },
+ };
+ Service.clusterURL = config.fxaccount.token.endpoint;
+ return server;
+ });
+}
+
+function getReassigned() {
+ try {
+ return Services.prefs.getBoolPref("services.sync.lastSyncReassigned");
+ } catch (ex if (ex.result == Cr.NS_ERROR_UNEXPECTED)) {
+ return false;
+ } catch (ex) {
+ do_throw("Got exception retrieving lastSyncReassigned: " +
+ Utils.exceptionStr(ex));
+ }
+}
+
+/**
+ * Make a test request to `url`, then watch the result of two syncs
+ * to ensure that a node request was made.
+ * Runs `between` between the two. This can be used to undo deliberate failure
+ * setup, detach observers, etc.
+ */
+function syncAndExpectNodeReassignment(server, firstNotification, between,
+ secondNotification, url) {
+ _("Starting syncAndExpectNodeReassignment\n");
+ let deferred = Promise.defer();
+ function onwards() {
+ let numTokenRequestsBefore;
+ function onFirstSync() {
+ _("First sync completed.");
+ Svc.Obs.remove(firstNotification, onFirstSync);
+ Svc.Obs.add(secondNotification, onSecondSync);
+
+ do_check_eq(Service.clusterURL, "");
+
+ // Track whether we fetched a new token.
+ numTokenRequestsBefore = numTokenRequests;
+
+ // Allow for tests to clean up error conditions.
+ between();
+ }
+ function onSecondSync() {
+ _("Second sync completed.");
+ Svc.Obs.remove(secondNotification, onSecondSync);
+ Service.scheduler.clearSyncTriggers();
+
+ // Make absolutely sure that any event listeners are done with their work
+ // before we proceed.
+ waitForZeroTimer(function () {
+ _("Second sync nextTick.");
+ do_check_eq(numTokenRequests, numTokenRequestsBefore + 1, "fetched a new token");
+ Service.startOver();
+ server.stop(deferred.resolve);
+ });
+ }
+
+ Svc.Obs.add(firstNotification, onFirstSync);
+ Service.sync();
+ }
+
+ // Make sure that it works!
+ _("Making request to " + url + " which should 401");
+ let request = new RESTRequest(url);
+ request.get(function () {
+ do_check_eq(request.response.status, 401);
+ Utils.nextTick(onwards);
+ });
+ yield deferred.promise;
+}
+
+add_task(function test_momentary_401_engine() {
+ _("Test a failure for engine URLs that's resolved by reassignment.");
+ let server = yield prepareServer();
+ let john = server.user("johndoe");
+
+ _("Enabling the Rotary engine.");
+ let engine = Service.engineManager.get("rotary");
+ engine.enabled = true;
+
+ // We need the server to be correctly set up prior to experimenting. Do this
+ // through a sync.
+ let global = {syncID: Service.syncID,
+ storageVersion: STORAGE_VERSION,
+ rotary: {version: engine.version,
+ syncID: engine.syncID}}
+ john.createCollection("meta").insert("global", global);
+
+ _("First sync to prepare server contents.");
+ Service.sync();
+
+ _("Setting up Rotary collection to 401.");
+ let rotary = john.createCollection("rotary");
+ let oldHandler = rotary.collectionHandler;
+ rotary.collectionHandler = handleReassign.bind(this, undefined);
+
+ // We want to verify that the clusterURL pref has been cleared after a 401
+ // inside a sync. Flag the Rotary engine to need syncing.
+ john.collection("rotary").timestamp += 1000;
+
+ function between() {
+ _("Undoing test changes.");
+ rotary.collectionHandler = oldHandler;
+
+ function onLoginStart() {
+ // lastSyncReassigned shouldn't be cleared until a sync has succeeded.
+ _("Ensuring that lastSyncReassigned is still set at next sync start.");
+ Svc.Obs.remove("weave:service:login:start", onLoginStart);
+ do_check_true(getReassigned());
+ }
+
+ _("Adding observer that lastSyncReassigned is still set on login.");
+ Svc.Obs.add("weave:service:login:start", onLoginStart);
+ }
+
+ yield syncAndExpectNodeReassignment(server,
+ "weave:service:sync:finish",
+ between,
+ "weave:service:sync:finish",
+ Service.storageURL + "rotary");
+});
+
+// This test ends up being a failing info fetch *after we're already logged in*.
+add_task(function test_momentary_401_info_collections_loggedin() {
+ _("Test a failure for info/collections after login that's resolved by reassignment.");
+ let server = yield prepareServer();
+
+ _("First sync to prepare server contents.");
+ Service.sync();
+
+ _("Arrange for info/collections to return a 401.");
+ let oldHandler = server.toplevelHandlers.info;
+ server.toplevelHandlers.info = handleReassign;
+
+ function undo() {
+ _("Undoing test changes.");
+ server.toplevelHandlers.info = oldHandler;
+ }
+
+ do_check_true(Service.isLoggedIn, "already logged in");
+
+ yield syncAndExpectNodeReassignment(server,
+ "weave:service:sync:error",
+ undo,
+ "weave:service:sync:finish",
+ Service.infoURL);
+});
+
+// This test ends up being a failing info fetch *before we're logged in*.
+// In this case we expect to recover during the login phase - so the first
+// sync succeeds.
+add_task(function test_momentary_401_info_collections_loggedout() {
+ _("Test a failure for info/collections before login that's resolved by reassignment.");
+
+ let oldHandler;
+ let sawTokenFetch = false;
+
+ function afterTokenFetch() {
+ // After a single token fetch, we undo our evil handleReassign hack, so
+ // the next /info request returns the collection instead of a 401
+ server.toplevelHandlers.info = oldHandler;
+ sawTokenFetch = true;
+ }
+
+ let server = yield prepareServer(afterTokenFetch);
+
+ // Return a 401 for the next /info request - it will be reset immediately
+ // after a new token is fetched.
+ oldHandler = server.toplevelHandlers.info
+ server.toplevelHandlers.info = handleReassign;
+
+ do_check_false(Service.isLoggedIn, "not already logged in");
+
+ Service.sync();
+ do_check_eq(Status.sync, SYNC_SUCCEEDED, "sync succeeded");
+ // sync was successful - check we grabbed a new token.
+ do_check_true(sawTokenFetch, "a new token was fetched by this test.")
+ // and we are done.
+ Service.startOver();
+ let deferred = Promise.defer();
+ server.stop(deferred.resolve);
+ yield deferred.promise;
+});
+
+// This test ends up being a failing meta/global fetch *after we're already logged in*.
+add_task(function test_momentary_401_storage_loggedin() {
+ _("Test a failure for any storage URL after login that's resolved by" +
+ "reassignment.");
+ let server = yield prepareServer();
+
+ _("First sync to prepare server contents.");
+ Service.sync();
+
+ _("Arrange for meta/global to return a 401.");
+ let oldHandler = server.toplevelHandlers.storage;
+ server.toplevelHandlers.storage = handleReassign;
+
+ function undo() {
+ _("Undoing test changes.");
+ server.toplevelHandlers.storage = oldHandler;
+ }
+
+ do_check_true(Service.isLoggedIn, "already logged in");
+
+ yield syncAndExpectNodeReassignment(server,
+ "weave:service:sync:error",
+ undo,
+ "weave:service:sync:finish",
+ Service.storageURL + "meta/global");
+});
+
+// This test ends up being a failing meta/global fetch *before we've logged in*.
+add_task(function test_momentary_401_storage_loggedout() {
+ _("Test a failure for any storage URL before login, not just engine parts. " +
+ "Resolved by reassignment.");
+ let server = yield prepareServer();
+
+ // Return a 401 for all storage requests.
+ let oldHandler = server.toplevelHandlers.storage;
+ server.toplevelHandlers.storage = handleReassign;
+
+ function undo() {
+ _("Undoing test changes.");
+ server.toplevelHandlers.storage = oldHandler;
+ }
+
+ do_check_false(Service.isLoggedIn, "already logged in");
+
+ yield syncAndExpectNodeReassignment(server,
+ "weave:service:login:error",
+ undo,
+ "weave:service:sync:finish",
+ Service.storageURL + "meta/global");
+});
+
diff --git a/services/sync/tests/unit/test_fxa_service_cluster.js b/services/sync/tests/unit/test_fxa_service_cluster.js index b4f83a7fe..f6f97184a 100644 --- a/services/sync/tests/unit/test_fxa_service_cluster.js +++ b/services/sync/tests/unit/test_fxa_service_cluster.js @@ -1,68 +1,68 @@ -/* Any copyright is dedicated to the Public Domain. - http://creativecommons.org/publicdomain/zero/1.0/ */ - -Cu.import("resource://services-sync/service.js"); -Cu.import("resource://services-sync/util.js"); -Cu.import("resource://testing-common/services/sync/fxa_utils.js"); -Cu.import("resource://testing-common/services/sync/utils.js"); - -add_task(function* test_findCluster() { - _("Test FxA _findCluster()"); - - _("_findCluster() throws on 500 errors."); - initializeIdentityWithTokenServerResponse({ - status: 500, - headers: [], - body: "", - }); - - yield Service.identity.initializeWithCurrentIdentity(); - yield Assert.rejects(Service.identity.whenReadyToAuthenticate.promise, - "should reject due to 500"); - - Assert.throws(function() { - Service._clusterManager._findCluster(); - }); - - _("_findCluster() returns null on authentication errors."); - initializeIdentityWithTokenServerResponse({ - status: 401, - headers: {"content-type": "application/json"}, - body: "{}", - }); - - yield Service.identity.initializeWithCurrentIdentity(); - yield Assert.rejects(Service.identity.whenReadyToAuthenticate.promise, - "should reject due to 401"); - - cluster = Service._clusterManager._findCluster(); - Assert.strictEqual(cluster, null); - - _("_findCluster() works with correct tokenserver response."); - let endpoint = "http://example.com/something"; - initializeIdentityWithTokenServerResponse({ - status: 200, - headers: {"content-type": "application/json"}, - body: - JSON.stringify({ - api_endpoint: endpoint, - duration: 300, - id: "id", - key: "key", - uid: "uid", - }) - }); - - yield Service.identity.initializeWithCurrentIdentity(); - yield Service.identity.whenReadyToAuthenticate.promise; - cluster = Service._clusterManager._findCluster(); - // The cluster manager ensures a trailing "/" - Assert.strictEqual(cluster, endpoint + "/"); - - Svc.Prefs.resetBranch(""); -}); - -function run_test() { - initTestLogging(); - run_next_test(); -} +/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/fxa_utils.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+add_task(function test_findCluster() {
+ _("Test FxA _findCluster()");
+
+ _("_findCluster() throws on 500 errors.");
+ initializeIdentityWithTokenServerResponse({
+ status: 500,
+ headers: [],
+ body: "",
+ });
+
+ yield Service.identity.initializeWithCurrentIdentity();
+ yield Assert.rejects(Service.identity.whenReadyToAuthenticate.promise,
+ "should reject due to 500");
+
+ Assert.throws(function() {
+ Service._clusterManager._findCluster();
+ });
+
+ _("_findCluster() returns null on authentication errors.");
+ initializeIdentityWithTokenServerResponse({
+ status: 401,
+ headers: {"content-type": "application/json"},
+ body: "{}",
+ });
+
+ yield Service.identity.initializeWithCurrentIdentity();
+ yield Assert.rejects(Service.identity.whenReadyToAuthenticate.promise,
+ "should reject due to 401");
+
+ cluster = Service._clusterManager._findCluster();
+ Assert.strictEqual(cluster, null);
+
+ _("_findCluster() works with correct tokenserver response.");
+ let endpoint = "http://example.com/something";
+ initializeIdentityWithTokenServerResponse({
+ status: 200,
+ headers: {"content-type": "application/json"},
+ body:
+ JSON.stringify({
+ api_endpoint: endpoint,
+ duration: 300,
+ id: "id",
+ key: "key",
+ uid: "uid",
+ })
+ });
+
+ yield Service.identity.initializeWithCurrentIdentity();
+ yield Service.identity.whenReadyToAuthenticate.promise;
+ cluster = Service._clusterManager._findCluster();
+ // The cluster manager ensures a trailing "/"
+ Assert.strictEqual(cluster, endpoint + "/");
+
+ Svc.Prefs.resetBranch("");
+});
+
+function run_test() {
+ initTestLogging();
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_fxa_startOver.js b/services/sync/tests/unit/test_fxa_startOver.js index 629379648..e27d86ea0 100644 --- a/services/sync/tests/unit/test_fxa_startOver.js +++ b/services/sync/tests/unit/test_fxa_startOver.js @@ -1,63 +1,63 @@ -/* Any copyright is dedicated to the Public Domain. - * http://creativecommons.org/publicdomain/zero/1.0/ */ - -Cu.import("resource://testing-common/services/sync/utils.js"); -Cu.import("resource://services-sync/identity.js"); -Cu.import("resource://services-sync/browserid_identity.js"); -Cu.import("resource://services-sync/service.js"); - -function run_test() { - initTestLogging("Trace"); - run_next_test(); -} - -add_task(function* test_startover() { - let oldValue = Services.prefs.getBoolPref("services.sync-testing.startOverKeepIdentity", true); - Services.prefs.setBoolPref("services.sync-testing.startOverKeepIdentity", false); - - ensureLegacyIdentityManager(); - yield configureIdentity({username: "johndoe"}); - - // The boolean flag on the xpcom service should reflect a legacy provider. - let xps = Cc["@mozilla.org/weave/service;1"] - .getService(Components.interfaces.nsISupports) - .wrappedJSObject; - do_check_false(xps.fxAccountsEnabled); - - // we expect the "legacy" provider (but can't instanceof that, as BrowserIDManager - // extends it) - do_check_false(Service.identity instanceof BrowserIDManager); - - Service.serverURL = "https://localhost/"; - Service.clusterURL = Service.serverURL; - - Service.login(); - // We should have a cluster URL - do_check_true(Service.clusterURL.length > 0); - - // remember some stuff so we can reset it after. - let oldIdentity = Service.identity; - let oldClusterManager = Service._clusterManager; - let deferred = Promise.defer(); - Services.obs.addObserver(function observeStartOverFinished() { - Services.obs.removeObserver(observeStartOverFinished, "weave:service:start-over:finish"); - deferred.resolve(); - }, "weave:service:start-over:finish", false); - - Service.startOver(); - yield deferred.promise; // wait for the observer to fire. - - // the xpcom service should indicate FxA is enabled. - do_check_true(xps.fxAccountsEnabled); - // should have swapped identities. - do_check_true(Service.identity instanceof BrowserIDManager); - // should have clobbered the cluster URL - do_check_eq(Service.clusterURL, ""); - - // we should have thrown away the old identity provider and cluster manager. - do_check_neq(oldIdentity, Service.identity); - do_check_neq(oldClusterManager, Service._clusterManager); - - // reset the world. - Services.prefs.setBoolPref("services.sync-testing.startOverKeepIdentity", oldValue); -}); +/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://testing-common/services/sync/utils.js");
+Cu.import("resource://services-sync/identity.js");
+Cu.import("resource://services-sync/browserid_identity.js");
+Cu.import("resource://services-sync/service.js");
+
+function run_test() {
+ initTestLogging("Trace");
+ run_next_test();
+}
+
+add_task(function* test_startover() {
+ let oldValue = Services.prefs.getBoolPref("services.sync-testing.startOverKeepIdentity", true);
+ Services.prefs.setBoolPref("services.sync-testing.startOverKeepIdentity", false);
+
+ ensureLegacyIdentityManager();
+ yield configureIdentity({username: "johndoe"});
+
+ // The boolean flag on the xpcom service should reflect a legacy provider.
+ let xps = Cc["@mozilla.org/weave/service;1"]
+ .getService(Components.interfaces.nsISupports)
+ .wrappedJSObject;
+ do_check_false(xps.fxAccountsEnabled);
+
+ // we expect the "legacy" provider (but can't instanceof that, as BrowserIDManager
+ // extends it)
+ do_check_false(Service.identity instanceof BrowserIDManager);
+
+ Service.serverURL = "https://localhost/";
+ Service.clusterURL = Service.serverURL;
+
+ Service.login();
+ // We should have a cluster URL
+ do_check_true(Service.clusterURL.length > 0);
+
+ // remember some stuff so we can reset it after.
+ let oldIdentity = Service.identity;
+ let oldClusterManager = Service._clusterManager;
+ let deferred = Promise.defer();
+ Services.obs.addObserver(function observeStartOverFinished() {
+ Services.obs.removeObserver(observeStartOverFinished, "weave:service:start-over:finish");
+ deferred.resolve();
+ }, "weave:service:start-over:finish", false);
+
+ Service.startOver();
+ yield deferred.promise; // wait for the observer to fire.
+
+ // the xpcom service should indicate FxA is enabled.
+ do_check_true(xps.fxAccountsEnabled);
+ // should have swapped identities.
+ do_check_true(Service.identity instanceof BrowserIDManager);
+ // should have clobbered the cluster URL
+ do_check_eq(Service.clusterURL, "");
+
+ // we should have thrown away the old identity provider and cluster manager.
+ do_check_neq(oldIdentity, Service.identity);
+ do_check_neq(oldClusterManager, Service._clusterManager);
+
+ // reset the world.
+ Services.prefs.setBoolPref("services.sync-testing.startOverKeepIdentity", oldValue);
+});
diff --git a/services/sync/tests/unit/test_healthreport.js b/services/sync/tests/unit/test_healthreport.js new file mode 100644 index 000000000..486320b6a --- /dev/null +++ b/services/sync/tests/unit/test_healthreport.js @@ -0,0 +1,194 @@ +/* Any copyright is dedicated to the Public Domain. + * http://creativecommons.org/publicdomain/zero/1.0/ */ + +"use strict"; + +Cu.import("resource://gre/modules/Metrics.jsm", this); +Cu.import("resource://gre/modules/Preferences.jsm", this); +Cu.import("resource://gre/modules/Promise.jsm", this); +Cu.import("resource://services-sync/main.js", this); +Cu.import("resource://services-sync/healthreport.jsm", this); +Cu.import("resource://testing-common/services/common/logging.js", this); +Cu.import("resource://testing-common/services/healthreport/utils.jsm", this); + +function run_test() { + initTestLogging(); + + run_next_test(); +} + +add_task(function test_constructor() { + let provider = new SyncProvider(); +}); + +// Provider can initialize and de-initialize properly. +add_task(function* test_init() { + let storage = yield Metrics.Storage("init"); + let provider = new SyncProvider(); + yield provider.init(storage); + yield provider.shutdown(); + yield storage.close(); +}); + +add_task(function* test_collect() { + let storage = yield Metrics.Storage("collect"); + let provider = new SyncProvider(); + yield provider.init(storage); + + // Initially nothing should be configured. + let now = new Date(); + yield provider.collectDailyData(); + + let m = provider.getMeasurement("sync", 1); + let values = yield m.getValues(); + Assert.equal(values.days.size, 1); + Assert.ok(values.days.hasDay(now)); + let day = values.days.getDay(now); + Assert.ok(day.has("enabled")); + Assert.ok(day.has("activeProtocol")); + Assert.ok(day.has("preferredProtocol")); + Assert.equal(day.get("enabled"), 0); + Assert.equal(day.get("preferredProtocol"), "1.5"); + Assert.equal(day.get("activeProtocol"), "1.5", + "Protocol without setup should be FX Accounts version."); + + // Now check for old Sync setup. + let branch = new Preferences("services.sync."); + branch.set("username", "foo"); + branch.reset("fxaccounts.enabled"); + yield provider.collectDailyData(); + values = yield m.getValues(); + Assert.equal(values.days.getDay(now).get("activeProtocol"), "1.1", + "Protocol with old Sync setup is correct."); + + Assert.equal(Weave.Status.__authManager, undefined, "Detect code changes"); + + // Let's enable Sync so we can get more useful data. + // We need to do this because the FHR probe only records more info if Sync + // is configured properly. + Weave.Service.identity.account = "johndoe"; + Weave.Service.identity.basicPassword = "ilovejane"; + Weave.Service.identity.syncKey = Weave.Utils.generatePassphrase(); + Weave.Service.clusterURL = "http://localhost/"; + Assert.equal(Weave.Status.checkSetup(), Weave.STATUS_OK); + + yield provider.collectDailyData(); + values = yield m.getValues(); + day = values.days.getDay(now); + Assert.equal(day.get("enabled"), 1); + + // An empty account should have 1 device: us. + let dm = provider.getMeasurement("devices", 1); + values = yield dm.getValues(); + Assert.ok(values.days.hasDay(now)); + day = values.days.getDay(now); + Assert.equal(day.size, 1); + let engine = Weave.Service.clientsEngine; + Assert.ok(engine); + Assert.ok(day.has(engine.localType)); + Assert.equal(day.get(engine.localType), 1); + + // Add some devices and ensure they show up. + engine._store._remoteClients["id1"] = {type: "mobile"}; + engine._store._remoteClients["id2"] = {type: "tablet"}; + engine._store._remoteClients["id3"] = {type: "mobile"}; + + yield provider.collectDailyData(); + values = yield dm.getValues(); + day = values.days.getDay(now); + + let expected = { + "foobar": 0, + "tablet": 1, + "mobile": 2, + "desktop": 0, + }; + + for (let type in expected) { + let count = expected[type]; + + if (engine.localType == type) { + count++; + } + + if (!count) { + Assert.ok(!day.has(type)); + } else { + Assert.ok(day.has(type)); + Assert.equal(day.get(type), count); + } + } + + engine._store._remoteClients = {}; + + yield provider.shutdown(); + yield storage.close(); +}); + +add_task(function* test_sync_events() { + let storage = yield Metrics.Storage("sync_events"); + let provider = new SyncProvider(); + yield provider.init(storage); + + let m = provider.getMeasurement("sync", 1); + + for (let i = 0; i < 5; i++) { + Services.obs.notifyObservers(null, "weave:service:sync:start", null); + } + + for (let i = 0; i < 3; i++) { + Services.obs.notifyObservers(null, "weave:service:sync:finish", null); + } + + for (let i = 0; i < 2; i++) { + Services.obs.notifyObservers(null, "weave:service:sync:error", null); + } + + // Wait for storage to complete. + yield m.storage.enqueueOperation(() => { + return Promise.resolve(); + }); + + let values = yield m.getValues(); + let now = new Date(); + Assert.ok(values.days.hasDay(now)); + let day = values.days.getDay(now); + + Assert.ok(day.has("syncStart")); + Assert.ok(day.has("syncSuccess")); + Assert.ok(day.has("syncError")); + Assert.equal(day.get("syncStart"), 5); + Assert.equal(day.get("syncSuccess"), 3); + Assert.equal(day.get("syncError"), 2); + + yield provider.shutdown(); + yield storage.close(); +}); + +add_task(function* test_healthreporter_json() { + let reporter = yield getHealthReporter("healthreporter_json"); + yield reporter.init(); + try { + yield reporter._providerManager.registerProvider(new SyncProvider()); + yield reporter.collectMeasurements(); + let payload = yield reporter.getJSONPayload(true); + let now = new Date(); + let today = reporter._formatDate(now); + + Assert.ok(today in payload.data.days); + let day = payload.data.days[today]; + + Assert.ok("org.mozilla.sync.sync" in day); + Assert.ok("org.mozilla.sync.devices" in day); + + let devices = day["org.mozilla.sync.devices"]; + let engine = Weave.Service.clientsEngine; + Assert.ok(engine); + let type = engine.localType; + Assert.ok(type); + Assert.ok(type in devices); + Assert.equal(devices[type], 1); + } finally { + reporter._shutdown(); + } +}); diff --git a/services/sync/tests/unit/test_healthreport_migration.js b/services/sync/tests/unit/test_healthreport_migration.js new file mode 100644 index 000000000..23f756748 --- /dev/null +++ b/services/sync/tests/unit/test_healthreport_migration.js @@ -0,0 +1,155 @@ +/* Any copyright is dedicated to the Public Domain. + * http://creativecommons.org/publicdomain/zero/1.0/ */ + +"use strict"; + +Cu.import("resource://gre/modules/Metrics.jsm", this); +Cu.import("resource://gre/modules/Preferences.jsm", this); +Cu.import("resource://gre/modules/Promise.jsm", this); +Cu.import("resource://services-sync/healthreport.jsm", this); +Cu.import("resource://services-sync/FxaMigrator.jsm", this); +Cu.import("resource://testing-common/services/common/logging.js", this); +Cu.import("resource://testing-common/services/healthreport/utils.jsm", this); + + +function run_test() { + initTestLogging(); + + run_next_test(); +} + +add_task(function* test_no_data() { + let storage = yield Metrics.Storage("collect"); + let provider = new SyncProvider(); + yield provider.init(storage); + + try { + // Initially nothing should be configured. + let now = new Date(); + yield provider.collectDailyData(); + + let m = provider.getMeasurement("migration", 1); + let values = yield m.getValues(); + Assert.equal(values.days.size, 0); + Assert.ok(!values.days.hasDay(now)); + } finally { + yield provider.shutdown(); + yield storage.close(); + } +}); + +function checkCorrectStateRecorded(provider, state) { + // Wait for storage to complete. + yield m.storage.enqueueOperation(() => { + return Promise.resolve(); + }); + + let m = provider.getMeasurement("migration", 1); + let values = yield m.getValues(); + Assert.equal(values.days.size, 1); + Assert.ok(values.days.hasDay(now)); + let day = values.days.getDay(now); + + Assert.ok(day.has("state")); + Assert.equal(day.get("state"), state); +} + +add_task(function* test_state() { + let storage = yield Metrics.Storage("collect"); + let provider = new SyncProvider(); + yield provider.init(storage); + + try { + // Initially nothing should be configured. + let now = new Date(); + + // We record both a "user" and "internal" state in the same field. + // So simulate a "user" state first. + Services.obs.notifyObservers(null, "fxa-migration:state-changed", + fxaMigrator.STATE_USER_FXA_VERIFIED); + checkCorrectStateRecorded(provider, fxaMigrator.STATE_USER_FXA_VERIFIED); + + // And an internal state. + Services.obs.notifyObservers(null, "fxa-migration:internal-state-changed", + fxaMigrator.STATE_INTERNAL_WAITING_SYNC_COMPLETE); + checkCorrectStateRecorded(provider, fxaMigrator.STATE_INTERNAL_WAITING_SYNC_COMPLETE); + } finally { + yield provider.shutdown(); + yield storage.close(); + } +}); + +add_task(function* test_flags() { + let storage = yield Metrics.Storage("collect"); + let provider = new SyncProvider(); + yield provider.init(storage); + + try { + // Initially nothing should be configured. + let now = new Date(); + + let m = provider.getMeasurement("migration", 1); + + let record = function*(what) { + Services.obs.notifyObservers(null, "fxa-migration:internal-telemetry", what); + // Wait for storage to complete. + yield m.storage.enqueueOperation(Promise.resolve); + let values = yield m.getValues(); + Assert.equal(values.days.size, 1); + return values.days.getDay(now); + } + + let values = yield m.getValues(); + Assert.equal(values.days.size, 1); + let day = values.days.getDay(now); + Assert.ok(!day.has(fxaMigrator.TELEMETRY_ACCEPTED)); + Assert.ok(!day.has(fxaMigrator.TELEMETRY_DECLINED)); + Assert.ok(!day.has(fxaMigrator.TELEMETRY_UNLINKED)); + + // let's send an unknown value to ensure our error mitigation works. + day = yield record("unknown"); + Assert.ok(!day.has(fxaMigrator.TELEMETRY_ACCEPTED)); + Assert.ok(!day.has(fxaMigrator.TELEMETRY_DECLINED)); + Assert.ok(!day.has(fxaMigrator.TELEMETRY_UNLINKED)); + + // record an fxaMigrator.TELEMETRY_ACCEPTED state. + day = yield record(fxaMigrator.TELEMETRY_ACCEPTED); + Assert.ok(day.has(fxaMigrator.TELEMETRY_ACCEPTED)); + Assert.ok(!day.has(fxaMigrator.TELEMETRY_DECLINED)); + Assert.ok(!day.has(fxaMigrator.TELEMETRY_UNLINKED)); + Assert.equal(day.get(fxaMigrator.TELEMETRY_ACCEPTED), 1); + + // and again - it should get 2. + day = yield record(fxaMigrator.TELEMETRY_ACCEPTED); + Assert.equal(day.get(fxaMigrator.TELEMETRY_ACCEPTED), 2); + + // record fxaMigrator.TELEMETRY_DECLINED - also a counter. + day = yield record(fxaMigrator.TELEMETRY_DECLINED); + Assert.ok(day.has(fxaMigrator.TELEMETRY_ACCEPTED)); + Assert.ok(day.has(fxaMigrator.TELEMETRY_DECLINED)); + Assert.ok(!day.has(fxaMigrator.TELEMETRY_UNLINKED)); + Assert.equal(day.get(fxaMigrator.TELEMETRY_ACCEPTED), 2); + Assert.equal(day.get(fxaMigrator.TELEMETRY_DECLINED), 1); + + day = yield record(fxaMigrator.TELEMETRY_DECLINED); + Assert.ok(day.has(fxaMigrator.TELEMETRY_ACCEPTED)); + Assert.ok(day.has(fxaMigrator.TELEMETRY_DECLINED)); + Assert.ok(!day.has(fxaMigrator.TELEMETRY_UNLINKED)); + Assert.equal(day.get(fxaMigrator.TELEMETRY_ACCEPTED), 2); + Assert.equal(day.get(fxaMigrator.TELEMETRY_DECLINED), 2); + + // and fxaMigrator.TELEMETRY_UNLINKED - this is conceptually a "daily bool". + // (ie, it's DAILY_LAST_NUMERIC_FIELD and only ever has |1| written to it) + day = yield record(fxaMigrator.TELEMETRY_UNLINKED); + Assert.ok(day.has(fxaMigrator.TELEMETRY_ACCEPTED)); + Assert.ok(day.has(fxaMigrator.TELEMETRY_DECLINED)); + Assert.ok(day.has(fxaMigrator.TELEMETRY_UNLINKED)); + Assert.equal(day.get(fxaMigrator.TELEMETRY_UNLINKED), 1); + // and doing it again still leaves us with |1| + day = yield record(fxaMigrator.TELEMETRY_UNLINKED); + Assert.equal(day.get(fxaMigrator.TELEMETRY_UNLINKED), 1); + } finally { + yield provider.shutdown(); + yield storage.close(); + } +}); diff --git a/services/sync/tests/unit/test_history_store.js b/services/sync/tests/unit/test_history_store.js index 207b621e0..2381f103d 100644 --- a/services/sync/tests/unit/test_history_store.js +++ b/services/sync/tests/unit/test_history_store.js @@ -68,12 +68,12 @@ function ensureThrows(func) { }; } -var store = new HistoryEngine(Service)._store; +let store = new HistoryEngine(Service)._store; function applyEnsureNoFailures(records) { do_check_eq(store.applyIncomingBatch(records).length, 0); } -var fxuri, fxguid, tburi, tbguid; +let fxuri, fxguid, tburi, tbguid; function run_test() { initTestLogging("Trace"); @@ -189,8 +189,8 @@ add_test(function test_invalid_records() { .DBConnection; let stmt = connection.createAsyncStatement( "INSERT INTO moz_places " - + "(url, url_hash, title, rev_host, visit_count, last_visit_date) " - + "VALUES ('invalid-uri', hash('invalid-uri'), 'Invalid URI', '.', 1, " + TIMESTAMP3 + ")" + + "(url, title, rev_host, visit_count, last_visit_date) " + + "VALUES ('invalid-uri', 'Invalid URI', '.', 1, " + TIMESTAMP3 + ")" ); Async.querySpinningly(stmt); stmt.finalize(); @@ -198,7 +198,7 @@ add_test(function test_invalid_records() { stmt = connection.createAsyncStatement( "INSERT INTO moz_historyvisits " + "(place_id, visit_date, visit_type, session) " - + "VALUES ((SELECT id FROM moz_places WHERE url_hash = hash('invalid-uri') AND url = 'invalid-uri'), " + + "VALUES ((SELECT id FROM moz_places WHERE url = 'invalid-uri'), " + TIMESTAMP3 + ", " + Ci.nsINavHistoryService.TRANSITION_TYPED + ", 1)" ); Async.querySpinningly(stmt); @@ -226,7 +226,7 @@ add_test(function test_invalid_records() { type: Ci.nsINavHistoryService.TRANSITION_EMBED}]} ]); - _("Make sure we handle records with invalid visit codes or visit dates, gracefully ignoring those visits."); + _("Make sure we report records with invalid visits, gracefully handle non-integer dates."); let no_date_visit_guid = Utils.makeGUID(); let no_type_visit_guid = Utils.makeGUID(); let invalid_type_visit_guid = Utils.makeGUID(); @@ -235,11 +235,11 @@ add_test(function test_invalid_records() { {id: no_date_visit_guid, histUri: "http://no.date.visit/", title: "Visit has no date", - visits: [{type: Ci.nsINavHistoryService.TRANSITION_EMBED}]}, + visits: [{date: TIMESTAMP3}]}, {id: no_type_visit_guid, histUri: "http://no.type.visit/", title: "Visit has no type", - visits: [{date: TIMESTAMP3}]}, + visits: [{type: Ci.nsINavHistoryService.TRANSITION_EMBED}]}, {id: invalid_type_visit_guid, histUri: "http://invalid.type.visit/", title: "Visit has invalid type", @@ -251,7 +251,14 @@ add_test(function test_invalid_records() { visits: [{date: 1234.567, type: Ci.nsINavHistoryService.TRANSITION_EMBED}]} ]); - do_check_eq(failed.length, 0); + do_check_eq(failed.length, 3); + failed.sort(); + let expected = [no_date_visit_guid, + no_type_visit_guid, + invalid_type_visit_guid].sort(); + for (let i = 0; i < expected.length; i++) { + do_check_eq(failed[i], expected[i]); + } _("Make sure we handle records with javascript: URLs gracefully."); applyEnsureNoFailures([ diff --git a/services/sync/tests/unit/test_history_tracker.js b/services/sync/tests/unit/test_history_tracker.js index 5ed022fb0..ca1090b79 100644 --- a/services/sync/tests/unit/test_history_tracker.js +++ b/services/sync/tests/unit/test_history_tracker.js @@ -22,13 +22,13 @@ function onScoreUpdated(callback) { Service.engineManager.clear(); Service.engineManager.register(HistoryEngine); -var engine = Service.engineManager.get("history"); -var tracker = engine._tracker; +let engine = Service.engineManager.get("history"); +let tracker = engine._tracker; // Don't write out by default. tracker.persistChangedIDs = false; -var _counter = 0; +let _counter = 0; function addVisit() { let uriString = "http://getfirefox.com/" + _counter++; let uri = Utils.makeURI(uriString); diff --git a/services/sync/tests/unit/test_hmac_error.js b/services/sync/tests/unit/test_hmac_error.js index 272c0de47..e41ff3797 100644 --- a/services/sync/tests/unit/test_hmac_error.js +++ b/services/sync/tests/unit/test_hmac_error.js @@ -8,7 +8,7 @@ Cu.import("resource://testing-common/services/sync/rotaryengine.js"); Cu.import("resource://testing-common/services/sync/utils.js"); // Track HMAC error counts. -var hmacErrorCount = 0; +let hmacErrorCount = 0; (function () { let hHE = Service.handleHMACEvent; Service.handleHMACEvent = function () { @@ -49,7 +49,7 @@ function shared_setup() { return [engine, rotaryColl, clientsColl, keysWBO, global]; } -add_task(function *hmac_error_during_404() { +add_test(function hmac_error_during_404() { _("Attempt to replicate the HMAC error setup."); let [engine, rotaryColl, clientsColl, keysWBO, global] = shared_setup(); @@ -83,14 +83,13 @@ add_task(function *hmac_error_during_404() { try { _("Syncing."); - yield sync_and_validate_telem(); - + Service.sync(); _("Partially resetting client, as if after a restart, and forcing redownload."); Service.collectionKeys.clear(); engine.lastSync = 0; // So that we redownload records. key404Counter = 1; _("---------------------------"); - yield sync_and_validate_telem(); + Service.sync(); _("---------------------------"); // Two rotary items, one client record... no errors. @@ -98,7 +97,7 @@ add_task(function *hmac_error_during_404() { } finally { Svc.Prefs.resetBranch(""); Service.recordManager.clearCache(); - yield new Promise(resolve => server.stop(resolve)); + server.stop(run_next_test); } }); diff --git a/services/sync/tests/unit/test_identity_manager.js b/services/sync/tests/unit/test_identity_manager.js index 1ac198ade..97dace95f 100644 --- a/services/sync/tests/unit/test_identity_manager.js +++ b/services/sync/tests/unit/test_identity_manager.js @@ -5,7 +5,7 @@ Cu.import("resource://services-sync/constants.js"); Cu.import("resource://services-sync/identity.js"); Cu.import("resource://services-sync/util.js"); -var identity = new IdentityManager(); +let identity = new IdentityManager(); function run_test() { initTestLogging("Trace"); diff --git a/services/sync/tests/unit/test_interval_triggers.js b/services/sync/tests/unit/test_interval_triggers.js index eca5ec289..0f355e636 100644 --- a/services/sync/tests/unit/test_interval_triggers.js +++ b/services/sync/tests/unit/test_interval_triggers.js @@ -10,13 +10,8 @@ Cu.import("resource://testing-common/services/sync/utils.js"); Svc.DefaultPrefs.set("registerEngines", ""); Cu.import("resource://services-sync/service.js"); -var scheduler = Service.scheduler; -var clientsEngine = Service.clientsEngine; - -// Don't remove stale clients when syncing. This is a test-only workaround -// that lets us add clients directly to the store, without losing them on -// the next sync. -clientsEngine._removeRemoteClient = id => {}; +let scheduler = Service.scheduler; +let clientsEngine = Service.clientsEngine; function promiseStopServer(server) { let deferred = Promise.defer(); @@ -46,7 +41,7 @@ function sync_httpd_setup() { }); } -function* setUp(server) { +function setUp(server) { yield configureIdentity({username: "johndoe"}); Service.serverURL = server.baseURI + "/"; Service.clusterURL = server.baseURI + "/"; @@ -65,7 +60,7 @@ function run_test() { run_next_test(); } -add_identity_test(this, function* test_successful_sync_adjustSyncInterval() { +add_identity_test(this, function test_successful_sync_adjustSyncInterval() { _("Test successful sync calling adjustSyncInterval"); let syncSuccesses = 0; function onSyncFinish() { @@ -164,7 +159,7 @@ add_identity_test(this, function* test_successful_sync_adjustSyncInterval() { yield promiseStopServer(server); }); -add_identity_test(this, function* test_unsuccessful_sync_adjustSyncInterval() { +add_identity_test(this, function test_unsuccessful_sync_adjustSyncInterval() { _("Test unsuccessful sync calling adjustSyncInterval"); let syncFailures = 0; @@ -269,7 +264,7 @@ add_identity_test(this, function* test_unsuccessful_sync_adjustSyncInterval() { yield promiseStopServer(server); }); -add_identity_test(this, function* test_back_triggers_sync() { +add_identity_test(this, function test_back_triggers_sync() { let server = sync_httpd_setup(); yield setUp(server); @@ -301,7 +296,7 @@ add_identity_test(this, function* test_back_triggers_sync() { yield deferred.promise; }); -add_identity_test(this, function* test_adjust_interval_on_sync_error() { +add_identity_test(this, function test_adjust_interval_on_sync_error() { let server = sync_httpd_setup(); yield setUp(server); @@ -332,7 +327,7 @@ add_identity_test(this, function* test_adjust_interval_on_sync_error() { yield promiseStopServer(server); }); -add_identity_test(this, function* test_bug671378_scenario() { +add_identity_test(this, function test_bug671378_scenario() { // Test scenario similar to bug 671378. This bug appeared when a score // update occurred that wasn't large enough to trigger a sync so // scheduleNextSync() was called without a time interval parameter, diff --git a/services/sync/tests/unit/test_jpakeclient.js b/services/sync/tests/unit/test_jpakeclient.js index 783edb460..ff13c5716 100644 --- a/services/sync/tests/unit/test_jpakeclient.js +++ b/services/sync/tests/unit/test_jpakeclient.js @@ -38,8 +38,8 @@ function new_channel() { return cid; } -var server; -var channels = {}; // Map channel -> ServerChannel object +let server; +let channels = {}; // Map channel -> ServerChannel object function server_new_channel(request, response) { check_headers(request); let cid = new_channel(); @@ -48,7 +48,7 @@ function server_new_channel(request, response) { response.bodyOutputStream.write(body, body.length); } -var error_report; +let error_report; function server_report(request, response) { check_headers(request); @@ -68,7 +68,7 @@ function server_report(request, response) { } // Hook for test code. -var hooks = {}; +let hooks = {}; function initHooks() { hooks.onGET = function onGET(request) {}; } @@ -146,7 +146,7 @@ ServerChannel.prototype = { /** * Controller that throws for everything. */ -var BaseController = { +let BaseController = { displayPIN: function displayPIN() { do_throw("displayPIN() shouldn't have been called!"); }, @@ -369,7 +369,7 @@ add_test(function test_wrongPIN() { displayPIN: function displayPIN(pin) { this.cid = pin.slice(JPAKE_LENGTH_SECRET); let secret = pin.slice(0, JPAKE_LENGTH_SECRET); - secret = Array.prototype.slice.call(secret).reverse().join(""); + secret = [char for each (char in secret)].reverse().join(""); let new_pin = secret + this.cid; _("Received PIN " + pin + ", but I'm entering " + new_pin); diff --git a/services/sync/tests/unit/test_keys.js b/services/sync/tests/unit/test_keys.js index a828b619c..6a2fdd027 100644 --- a/services/sync/tests/unit/test_keys.js +++ b/services/sync/tests/unit/test_keys.js @@ -7,7 +7,7 @@ Cu.import("resource://services-sync/keys.js"); Cu.import("resource://services-sync/record.js"); Cu.import("resource://services-sync/util.js"); -var collectionKeys = new CollectionKeyManager(); +let collectionKeys = new CollectionKeyManager(); function sha256HMAC(message, key) { let h = Utils.makeHMACHasher(Ci.nsICryptoHMAC.SHA256, key); diff --git a/services/sync/tests/unit/test_load_modules.js b/services/sync/tests/unit/test_load_modules.js index 0b222520c..4f561bae6 100644 --- a/services/sync/tests/unit/test_load_modules.js +++ b/services/sync/tests/unit/test_load_modules.js @@ -9,7 +9,6 @@ const modules = [ "engines/addons.js", "engines/bookmarks.js", "engines/clients.js", - "engines/extension-storage.js", "engines/forms.js", "engines/history.js", "engines/passwords.js", @@ -20,6 +19,7 @@ const modules = [ "jpakeclient.js", "keys.js", "main.js", + "notifications.js", "policies.js", "record.js", "resource.js", diff --git a/services/sync/tests/unit/test_node_reassignment.js b/services/sync/tests/unit/test_node_reassignment.js index 66d21b6f1..7fe5ed7ed 100644 --- a/services/sync/tests/unit/test_node_reassignment.js +++ b/services/sync/tests/unit/test_node_reassignment.js @@ -23,7 +23,7 @@ function run_test() { Log.repository.getLogger("Sync.Service").level = Log.Level.Trace; Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace; initTestLogging(); - validate_all_future_pings(); + ensureLegacyIdentityManager(); Service.engineManager.register(RotaryEngine); @@ -92,12 +92,11 @@ function prepareServer() { function getReassigned() { try { return Services.prefs.getBoolPref("services.sync.lastSyncReassigned"); + } catch (ex if (ex.result == Cr.NS_ERROR_UNEXPECTED)) { + return false; } catch (ex) { - if (ex.result == Cr.NS_ERROR_UNEXPECTED) { - return false; - } do_throw("Got exception retrieving lastSyncReassigned: " + - Log.exceptionStr(ex)); + Utils.exceptionStr(ex)); } } @@ -107,7 +106,7 @@ function getReassigned() { * Runs `between` between the two. This can be used to undo deliberate failure * setup, detach observers, etc. */ -function* syncAndExpectNodeReassignment(server, firstNotification, between, +function syncAndExpectNodeReassignment(server, firstNotification, between, secondNotification, url) { let deferred = Promise.defer(); function onwards() { @@ -161,7 +160,7 @@ function* syncAndExpectNodeReassignment(server, firstNotification, between, yield deferred.promise; } -add_task(function* test_momentary_401_engine() { +add_task(function test_momentary_401_engine() { _("Test a failure for engine URLs that's resolved by reassignment."); let server = yield prepareServer(); let john = server.user("johndoe"); @@ -213,7 +212,7 @@ add_task(function* test_momentary_401_engine() { }); // This test ends up being a failing fetch *after we're already logged in*. -add_task(function* test_momentary_401_info_collections() { +add_task(function test_momentary_401_info_collections() { _("Test a failure for info/collections that's resolved by reassignment."); let server = yield prepareServer(); @@ -236,7 +235,7 @@ add_task(function* test_momentary_401_info_collections() { Service.infoURL); }); -add_task(function* test_momentary_401_storage_loggedin() { +add_task(function test_momentary_401_storage_loggedin() { _("Test a failure for any storage URL, not just engine parts. " + "Resolved by reassignment."); let server = yield prepareServer(); @@ -261,7 +260,7 @@ add_task(function* test_momentary_401_storage_loggedin() { Service.storageURL + "meta/global"); }); -add_task(function* test_momentary_401_storage_loggedout() { +add_task(function test_momentary_401_storage_loggedout() { _("Test a failure for any storage URL, not just engine parts. " + "Resolved by reassignment."); let server = yield prepareServer(); @@ -283,7 +282,7 @@ add_task(function* test_momentary_401_storage_loggedout() { Service.storageURL + "meta/global"); }); -add_task(function* test_loop_avoidance_storage() { +add_task(function test_loop_avoidance_storage() { _("Test that a repeated failure doesn't result in a sync loop " + "if node reassignment cannot resolve the failure."); @@ -383,7 +382,7 @@ add_task(function* test_loop_avoidance_storage() { yield deferred.promise; }); -add_task(function* test_loop_avoidance_engine() { +add_task(function test_loop_avoidance_engine() { _("Test that a repeated 401 in an engine doesn't result in a sync loop " + "if node reassignment cannot resolve the failure."); let server = yield prepareServer(); diff --git a/services/sync/tests/unit/test_notifications.js b/services/sync/tests/unit/test_notifications.js new file mode 100644 index 000000000..9d6da1d2d --- /dev/null +++ b/services/sync/tests/unit/test_notifications.js @@ -0,0 +1,32 @@ +Cu.import("resource://services-sync/notifications.js"); + +function run_test() { + var logStats = initTestLogging("Info"); + + var blah = 0; + + function callback(i) { + blah = i; + } + + let button = new NotificationButton("label", "accessKey", callback); + + button.callback(5); + + do_check_eq(blah, 5); + do_check_eq(logStats.errorsLogged, 0); + + function badCallback() { + throw new Error("oops"); + } + + button = new NotificationButton("label", "accessKey", badCallback); + + try { + button.callback(); + } catch (e) { + do_check_eq(e.message, "oops"); + } + + do_check_eq(logStats.errorsLogged, 1); +} diff --git a/services/sync/tests/unit/test_password_store.js b/services/sync/tests/unit/test_password_store.js index d232d5e63..c56901d79 100644 --- a/services/sync/tests/unit/test_password_store.js +++ b/services/sync/tests/unit/test_password_store.js @@ -5,137 +5,6 @@ Cu.import("resource://services-sync/engines/passwords.js"); Cu.import("resource://services-sync/service.js"); Cu.import("resource://services-sync/util.js"); - -function checkRecord(name, record, expectedCount, timeCreated, - expectedTimeCreated, timePasswordChanged, - expectedTimePasswordChanged, recordIsUpdated) { - let engine = Service.engineManager.get("passwords"); - let store = engine._store; - - let count = {}; - let logins = Services.logins.findLogins(count, record.hostname, - record.formSubmitURL, null); - - _("Record" + name + ":" + JSON.stringify(logins)); - _("Count" + name + ":" + count.value); - - do_check_eq(count.value, expectedCount); - - if (expectedCount > 0) { - do_check_true(!!store.getAllIDs()[record.id]); - let stored_record = logins[0].QueryInterface(Ci.nsILoginMetaInfo); - - if (timeCreated !== undefined) { - do_check_eq(stored_record.timeCreated, expectedTimeCreated); - } - - if (timePasswordChanged !== undefined) { - if (recordIsUpdated) { - do_check_true(stored_record.timePasswordChanged >= expectedTimePasswordChanged); - } else { - do_check_eq(stored_record.timePasswordChanged, expectedTimePasswordChanged); - } - return stored_record.timePasswordChanged; - } - } else { - do_check_true(!store.getAllIDs()[record.id]); - } -} - - -function changePassword(name, hostname, password, expectedCount, timeCreated, - expectedTimeCreated, timePasswordChanged, - expectedTimePasswordChanged, insert, recordIsUpdated) { - - const BOGUS_GUID = "zzzzzz" + hostname; - - let record = {id: BOGUS_GUID, - hostname: hostname, - formSubmitURL: hostname, - username: "john", - password: password, - usernameField: "username", - passwordField: "password"}; - - if (timeCreated !== undefined) { - record.timeCreated = timeCreated; - } - - if (timePasswordChanged !== undefined) { - record.timePasswordChanged = timePasswordChanged; - } - - - let engine = Service.engineManager.get("passwords"); - let store = engine._store; - - if (insert) { - do_check_eq(store.applyIncomingBatch([record]).length, 0); - } - - return checkRecord(name, record, expectedCount, timeCreated, - expectedTimeCreated, timePasswordChanged, - expectedTimePasswordChanged, recordIsUpdated); - -} - - -function test_apply_records_with_times(hostname, timeCreated, timePasswordChanged) { - // The following record is going to be inserted in the store and it needs - // to be found there. Then its timestamps are going to be compared to - // the expected values. - changePassword(" ", hostname, "password", 1, timeCreated, timeCreated, - timePasswordChanged, timePasswordChanged, true); -} - - -function test_apply_multiple_records_with_times() { - // The following records are going to be inserted in the store and they need - // to be found there. Then their timestamps are going to be compared to - // the expected values. - changePassword("A", "http://foo.a.com", "password", 1, undefined, undefined, - undefined, undefined, true); - changePassword("B", "http://foo.b.com", "password", 1, 1000, 1000, undefined, - undefined, true); - changePassword("C", "http://foo.c.com", "password", 1, undefined, undefined, - 1000, 1000, true); - changePassword("D", "http://foo.d.com", "password", 1, 1000, 1000, 1000, - 1000, true); - - // The following records are not going to be inserted in the store and they - // are not going to be found there. - changePassword("NotInStoreA", "http://foo.aaaa.com", "password", 0, - undefined, undefined, undefined, undefined, false); - changePassword("NotInStoreB", "http://foo.bbbb.com", "password", 0, 1000, - 1000, undefined, undefined, false); - changePassword("NotInStoreC", "http://foo.cccc.com", "password", 0, - undefined, undefined, 1000, 1000, false); - changePassword("NotInStoreD", "http://foo.dddd.com", "password", 0, 1000, - 1000, 1000, 1000, false); -} - - -function test_apply_same_record_with_different_times() { - // The following record is going to be inserted multiple times in the store - // and it needs to be found there. Then its timestamps are going to be - // compared to the expected values. - var timePasswordChanged = 100; - timePasswordChanged = changePassword("A", "http://a.tn", "password", 1, 100, - 100, 100, timePasswordChanged, true); - timePasswordChanged = changePassword("A", "http://a.tn", "password", 1, 100, - 100, 800, timePasswordChanged, true, - true); - timePasswordChanged = changePassword("A", "http://a.tn", "password", 1, 500, - 100, 800, timePasswordChanged, true, - true); - timePasswordChanged = changePassword("A", "http://a.tn", "password2", 1, 500, - 100, 1536213005222, timePasswordChanged, - true, true); - timePasswordChanged = changePassword("A", "http://a.tn", "password2", 1, 500, - 100, 800, timePasswordChanged, true, true); -} - - function run_test() { initTestLogging("Trace"); Log.repository.getLogger("Sync.Engine.Passwords").level = Log.Level.Trace; @@ -161,9 +30,12 @@ function run_test() { let engine = Service.engineManager.get("passwords"); let store = engine._store; + function applyEnsureNoFailures(records) { + do_check_eq(store.applyIncomingBatch(records).length, 0); + } try { - do_check_eq(store.applyIncomingBatch([recordA, recordB]).length, 0); + applyEnsureNoFailures([recordA, recordB]); // Only the good record makes it to Services.logins. let badCount = {}; @@ -183,17 +55,7 @@ function run_test() { do_check_true(!!store.getAllIDs()[BOGUS_GUID_B]); do_check_true(!store.getAllIDs()[BOGUS_GUID_A]); - - test_apply_records_with_times("http://afoo.baz.com", undefined, undefined); - test_apply_records_with_times("http://bfoo.baz.com", 1000, undefined); - test_apply_records_with_times("http://cfoo.baz.com", undefined, 2000); - test_apply_records_with_times("http://dfoo.baz.com", 1000, 2000); - - test_apply_multiple_records_with_times(); - - test_apply_same_record_with_different_times(); - } finally { store.wipe(); } -}
\ No newline at end of file +} diff --git a/services/sync/tests/unit/test_password_tracker.js b/services/sync/tests/unit/test_password_tracker.js index 09ca141a6..ddfc524ab 100644 --- a/services/sync/tests/unit/test_password_tracker.js +++ b/services/sync/tests/unit/test_password_tracker.js @@ -8,9 +8,9 @@ Cu.import("resource://services-sync/service.js"); Cu.import("resource://services-sync/util.js"); Service.engineManager.register(PasswordEngine); -var engine = Service.engineManager.get("passwords"); -var store = engine._store; -var tracker = engine._tracker; +let engine = Service.engineManager.get("passwords"); +let store = engine._store; +let tracker = engine._tracker; // Don't do asynchronous writes. tracker.persistChangedIDs = false; diff --git a/services/sync/tests/unit/test_password_validator.js b/services/sync/tests/unit/test_password_validator.js deleted file mode 100644 index a4a148fbe..000000000 --- a/services/sync/tests/unit/test_password_validator.js +++ /dev/null @@ -1,158 +0,0 @@ -/* Any copyright is dedicated to the Public Domain. - http://creativecommons.org/publicdomain/zero/1.0/ */ - -Components.utils.import("resource://services-sync/engines/passwords.js"); - -function getDummyServerAndClient() { - return { - server: [ - { - id: "11111", - guid: "11111", - hostname: "https://www.11111.com", - formSubmitURL: "https://www.11111.com/login", - password: "qwerty123", - passwordField: "pass", - username: "foobar", - usernameField: "user", - httpRealm: null, - }, - { - id: "22222", - guid: "22222", - hostname: "https://www.22222.org", - formSubmitURL: "https://www.22222.org/login", - password: "hunter2", - passwordField: "passwd", - username: "baz12345", - usernameField: "user", - httpRealm: null, - }, - { - id: "33333", - guid: "33333", - hostname: "https://www.33333.com", - formSubmitURL: "https://www.33333.com/login", - password: "p4ssw0rd", - passwordField: "passwad", - username: "quux", - usernameField: "user", - httpRealm: null, - }, - ], - client: [ - { - id: "11111", - guid: "11111", - hostname: "https://www.11111.com", - formSubmitURL: "https://www.11111.com/login", - password: "qwerty123", - passwordField: "pass", - username: "foobar", - usernameField: "user", - httpRealm: null, - }, - { - id: "22222", - guid: "22222", - hostname: "https://www.22222.org", - formSubmitURL: "https://www.22222.org/login", - password: "hunter2", - passwordField: "passwd", - username: "baz12345", - usernameField: "user", - httpRealm: null, - - }, - { - id: "33333", - guid: "33333", - hostname: "https://www.33333.com", - formSubmitURL: "https://www.33333.com/login", - password: "p4ssw0rd", - passwordField: "passwad", - username: "quux", - usernameField: "user", - httpRealm: null, - } - ] - }; -} - - -add_test(function test_valid() { - let { server, client } = getDummyServerAndClient(); - let validator = new PasswordValidator(); - let { problemData, clientRecords, records, deletedRecords } = - validator.compareClientWithServer(client, server); - equal(clientRecords.length, 3); - equal(records.length, 3) - equal(deletedRecords.length, 0); - deepEqual(problemData, validator.emptyProblemData()); - - run_next_test(); -}); - -add_test(function test_missing() { - let validator = new PasswordValidator(); - { - let { server, client } = getDummyServerAndClient(); - - client.pop(); - - let { problemData, clientRecords, records, deletedRecords } = - validator.compareClientWithServer(client, server); - - equal(clientRecords.length, 2); - equal(records.length, 3) - equal(deletedRecords.length, 0); - - let expected = validator.emptyProblemData(); - expected.clientMissing.push("33333"); - deepEqual(problemData, expected); - } - { - let { server, client } = getDummyServerAndClient(); - - server.pop(); - - let { problemData, clientRecords, records, deletedRecords } = - validator.compareClientWithServer(client, server); - - equal(clientRecords.length, 3); - equal(records.length, 2) - equal(deletedRecords.length, 0); - - let expected = validator.emptyProblemData(); - expected.serverMissing.push("33333"); - deepEqual(problemData, expected); - } - - run_next_test(); -}); - - -add_test(function test_deleted() { - let { server, client } = getDummyServerAndClient(); - let deletionRecord = { id: "444444", guid: "444444", deleted: true }; - - server.push(deletionRecord); - let validator = new PasswordValidator(); - - let { problemData, clientRecords, records, deletedRecords } = - validator.compareClientWithServer(client, server); - - equal(clientRecords.length, 3); - equal(records.length, 4); - deepEqual(deletedRecords, [deletionRecord]); - - let expected = validator.emptyProblemData(); - deepEqual(problemData, expected); - - run_next_test(); -}); - - -function run_test() { - run_next_test(); -} diff --git a/services/sync/tests/unit/test_postqueue.js b/services/sync/tests/unit/test_postqueue.js deleted file mode 100644 index e60008a96..000000000 --- a/services/sync/tests/unit/test_postqueue.js +++ /dev/null @@ -1,455 +0,0 @@ -/* Any copyright is dedicated to the Public Domain. - * http://creativecommons.org/publicdomain/zero/1.0/ */ - -let { PostQueue } = Cu.import("resource://services-sync/record.js", {}); - -initTestLogging("Trace"); - -function makeRecord(nbytes) { - // make a string 2-bytes less - the added quotes will make it correct. - return { - toJSON: () => "x".repeat(nbytes-2), - } -} - -function makePostQueue(config, lastModTime, responseGenerator) { - let stats = { - posts: [], - } - let poster = (data, headers, batch, commit) => { - let thisPost = { nbytes: data.length, batch, commit }; - if (headers.length) { - thisPost.headers = headers; - } - stats.posts.push(thisPost); - return responseGenerator.next().value; - } - - let done = () => {} - let pq = new PostQueue(poster, lastModTime, config, getTestLogger(), done); - return { pq, stats }; -} - -add_test(function test_simple() { - let config = { - max_post_bytes: 1000, - max_post_records: 100, - max_batch_bytes: Infinity, - max_batch_records: Infinity, - } - - const time = 11111111; - - function* responseGenerator() { - yield { success: true, status: 200, headers: { 'x-weave-timestamp': time + 100, 'x-last-modified': time + 100 } }; - } - - let { pq, stats } = makePostQueue(config, time, responseGenerator()); - pq.enqueue(makeRecord(10)); - pq.flush(true); - - deepEqual(stats.posts, [{ - nbytes: 12, // expect our 10 byte record plus "[]" to wrap it. - commit: true, // we don't know if we have batch semantics, so committed. - headers: [["x-if-unmodified-since", time]], - batch: "true"}]); - - run_next_test(); -}); - -// Test we do the right thing when we need to make multiple posts when there -// are no batch semantics -add_test(function test_max_post_bytes_no_batch() { - let config = { - max_post_bytes: 50, - max_post_records: 4, - max_batch_bytes: Infinity, - max_batch_records: Infinity, - } - - const time = 11111111; - function* responseGenerator() { - yield { success: true, status: 200, headers: { 'x-weave-timestamp': time + 100, 'x-last-modified': time + 100 } }; - yield { success: true, status: 200, headers: { 'x-weave-timestamp': time + 200, 'x-last-modified': time + 200 } }; - } - - let { pq, stats } = makePostQueue(config, time, responseGenerator()); - pq.enqueue(makeRecord(20)); // total size now 22 bytes - "[" + record + "]" - pq.enqueue(makeRecord(20)); // total size now 43 bytes - "[" + record + "," + record + "]" - pq.enqueue(makeRecord(20)); // this will exceed our byte limit, so will be in the 2nd POST. - pq.flush(true); - - deepEqual(stats.posts, [ - { - nbytes: 43, // 43 for the first post - commit: false, - headers: [["x-if-unmodified-since", time]], - batch: "true", - },{ - nbytes: 22, - commit: false, // we know we aren't in a batch, so never commit. - headers: [["x-if-unmodified-since", time + 100]], - batch: null, - } - ]); - equal(pq.lastModified, time + 200); - - run_next_test(); -}); - -// Similar to the above, but we've hit max_records instead of max_bytes. -add_test(function test_max_post_records_no_batch() { - let config = { - max_post_bytes: 100, - max_post_records: 2, - max_batch_bytes: Infinity, - max_batch_records: Infinity, - } - - const time = 11111111; - - function* responseGenerator() { - yield { success: true, status: 200, headers: { 'x-weave-timestamp': time + 100, 'x-last-modified': time + 100 } }; - yield { success: true, status: 200, headers: { 'x-weave-timestamp': time + 200, 'x-last-modified': time + 200 } }; - } - - let { pq, stats } = makePostQueue(config, time, responseGenerator()); - pq.enqueue(makeRecord(20)); // total size now 22 bytes - "[" + record + "]" - pq.enqueue(makeRecord(20)); // total size now 43 bytes - "[" + record + "," + record + "]" - pq.enqueue(makeRecord(20)); // this will exceed our records limit, so will be in the 2nd POST. - pq.flush(true); - - deepEqual(stats.posts, [ - { - nbytes: 43, // 43 for the first post - commit: false, - batch: "true", - headers: [["x-if-unmodified-since", time]], - },{ - nbytes: 22, - commit: false, // we know we aren't in a batch, so never commit. - batch: null, - headers: [["x-if-unmodified-since", time + 100]], - } - ]); - equal(pq.lastModified, time + 200); - - run_next_test(); -}); - -// Batch tests. - -// Test making a single post when batch semantics are in place. -add_test(function test_single_batch() { - let config = { - max_post_bytes: 1000, - max_post_records: 100, - max_batch_bytes: 2000, - max_batch_records: 200, - } - const time = 11111111; - function* responseGenerator() { - yield { success: true, status: 202, obj: { batch: 1234 }, - headers: { 'x-last-modified': time, 'x-weave-timestamp': time + 100 }, - }; - } - - let { pq, stats } = makePostQueue(config, time, responseGenerator()); - ok(pq.enqueue(makeRecord(10)).enqueued); - pq.flush(true); - - deepEqual(stats.posts, [ - { - nbytes: 12, // expect our 10 byte record plus "[]" to wrap it. - commit: true, // we don't know if we have batch semantics, so committed. - batch: "true", - headers: [["x-if-unmodified-since", time]], - } - ]); - - run_next_test(); -}); - -// Test we do the right thing when we need to make multiple posts when there -// are batch semantics in place. -add_test(function test_max_post_bytes_batch() { - let config = { - max_post_bytes: 50, - max_post_records: 4, - max_batch_bytes: 5000, - max_batch_records: 100, - } - - const time = 11111111; - function* responseGenerator() { - yield { success: true, status: 202, obj: { batch: 1234 }, - headers: { 'x-last-modified': time, 'x-weave-timestamp': time + 100 }, - }; - yield { success: true, status: 202, obj: { batch: 1234 }, - headers: { 'x-last-modified': time + 200, 'x-weave-timestamp': time + 200 }, - }; - } - - let { pq, stats } = makePostQueue(config, time, responseGenerator()); - ok(pq.enqueue(makeRecord(20)).enqueued); // total size now 22 bytes - "[" + record + "]" - ok(pq.enqueue(makeRecord(20)).enqueued); // total size now 43 bytes - "[" + record + "," + record + "]" - ok(pq.enqueue(makeRecord(20)).enqueued); // this will exceed our byte limit, so will be in the 2nd POST. - pq.flush(true); - - deepEqual(stats.posts, [ - { - nbytes: 43, // 43 for the first post - commit: false, - batch: "true", - headers: [['x-if-unmodified-since', time]], - },{ - nbytes: 22, - commit: true, - batch: 1234, - headers: [['x-if-unmodified-since', time]], - } - ]); - - equal(pq.lastModified, time + 200); - - run_next_test(); -}); - -// Test we do the right thing when the batch bytes limit is exceeded. -add_test(function test_max_post_bytes_batch() { - let config = { - max_post_bytes: 50, - max_post_records: 20, - max_batch_bytes: 70, - max_batch_records: 100, - } - - const time0 = 11111111; - const time1 = 22222222; - function* responseGenerator() { - yield { success: true, status: 202, obj: { batch: 1234 }, - headers: { 'x-last-modified': time0, 'x-weave-timestamp': time0 + 100 }, - }; - yield { success: true, status: 202, obj: { batch: 1234 }, - headers: { 'x-last-modified': time1, 'x-weave-timestamp': time1 }, - }; - yield { success: true, status: 202, obj: { batch: 5678 }, - headers: { 'x-last-modified': time1, 'x-weave-timestamp': time1 + 100 }, - }; - yield { success: true, status: 202, obj: { batch: 5678 }, - headers: { 'x-last-modified': time1 + 200, 'x-weave-timestamp': time1 + 200 }, - }; - } - - let { pq, stats } = makePostQueue(config, time0, responseGenerator()); - ok(pq.enqueue(makeRecord(20)).enqueued); // total size now 22 bytes - "[" + record + "]" - ok(pq.enqueue(makeRecord(20)).enqueued); // total size now 43 bytes - "[" + record + "," + record + "]" - // this will exceed our POST byte limit, so will be in the 2nd POST - but still in the first batch. - ok(pq.enqueue(makeRecord(20)).enqueued); // 22 bytes for 2nd post, 55 bytes in the batch. - // this will exceed our batch byte limit, so will be in a new batch. - ok(pq.enqueue(makeRecord(20)).enqueued); // 22 bytes in 3rd post/2nd batch - ok(pq.enqueue(makeRecord(20)).enqueued); // 43 bytes in 3rd post/2nd batch - // This will exceed POST byte limit, so will be in the 4th post, part of the 2nd batch. - ok(pq.enqueue(makeRecord(20)).enqueued); // 22 bytes for 4th post/2nd batch - pq.flush(true); - - deepEqual(stats.posts, [ - { - nbytes: 43, // 43 for the first post - commit: false, - batch: "true", - headers: [['x-if-unmodified-since', time0]], - },{ - // second post of 22 bytes in the first batch, committing it. - nbytes: 22, - commit: true, - batch: 1234, - headers: [['x-if-unmodified-since', time0]], - }, { - // 3rd post of 43 bytes in a new batch, not yet committing it. - nbytes: 43, - commit: false, - batch: "true", - headers: [['x-if-unmodified-since', time1]], - },{ - // 4th post of 22 bytes in second batch, committing it. - nbytes: 22, - commit: true, - batch: 5678, - headers: [['x-if-unmodified-since', time1]], - }, - ]); - - equal(pq.lastModified, time1 + 200); - - run_next_test(); -}); - -// Test we split up the posts when we exceed the record limit when batch semantics -// are in place. -add_test(function test_max_post_bytes_batch() { - let config = { - max_post_bytes: 1000, - max_post_records: 2, - max_batch_bytes: 5000, - max_batch_records: 100, - } - - const time = 11111111; - function* responseGenerator() { - yield { success: true, status: 202, obj: { batch: 1234 }, - headers: { 'x-last-modified': time, 'x-weave-timestamp': time + 100 }, - }; - yield { success: true, status: 202, obj: { batch: 1234 }, - headers: { 'x-last-modified': time + 200, 'x-weave-timestamp': time + 200 }, - }; - } - - let { pq, stats } = makePostQueue(config, time, responseGenerator()); - ok(pq.enqueue(makeRecord(20)).enqueued); // total size now 22 bytes - "[" + record + "]" - ok(pq.enqueue(makeRecord(20)).enqueued); // total size now 43 bytes - "[" + record + "," + record + "]" - ok(pq.enqueue(makeRecord(20)).enqueued); // will exceed record limit, so will be in 2nd post. - pq.flush(true); - - deepEqual(stats.posts, [ - { - nbytes: 43, // 43 for the first post - commit: false, - batch: "true", - headers: [['x-if-unmodified-since', time]], - },{ - nbytes: 22, - commit: true, - batch: 1234, - headers: [['x-if-unmodified-since', time]], - } - ]); - - equal(pq.lastModified, time + 200); - - run_next_test(); -}); - -// Test that a single huge record fails to enqueue -add_test(function test_huge_record() { - let config = { - max_post_bytes: 50, - max_post_records: 100, - max_batch_bytes: 5000, - max_batch_records: 100, - } - - const time = 11111111; - function* responseGenerator() { - yield { success: true, status: 202, obj: { batch: 1234 }, - headers: { 'x-last-modified': time, 'x-weave-timestamp': time + 100 }, - }; - yield { success: true, status: 202, obj: { batch: 1234 }, - headers: { 'x-last-modified': time + 200, 'x-weave-timestamp': time + 200 }, - }; - } - - let { pq, stats } = makePostQueue(config, time, responseGenerator()); - ok(pq.enqueue(makeRecord(20)).enqueued); - - let { enqueued, error } = pq.enqueue(makeRecord(1000)); - ok(!enqueued); - notEqual(error, undefined); - - // make sure that we keep working, skipping the bad record entirely - // (handling the error the queue reported is left up to caller) - ok(pq.enqueue(makeRecord(20)).enqueued); - ok(pq.enqueue(makeRecord(20)).enqueued); - - pq.flush(true); - - deepEqual(stats.posts, [ - { - nbytes: 43, // 43 for the first post - commit: false, - batch: "true", - headers: [['x-if-unmodified-since', time]], - },{ - nbytes: 22, - commit: true, - batch: 1234, - headers: [['x-if-unmodified-since', time]], - } - ]); - - equal(pq.lastModified, time + 200); - - run_next_test(); -}); - -// Test we do the right thing when the batch record limit is exceeded. -add_test(function test_max_records_batch() { - let config = { - max_post_bytes: 1000, - max_post_records: 3, - max_batch_bytes: 10000, - max_batch_records: 5, - } - - const time0 = 11111111; - const time1 = 22222222; - function* responseGenerator() { - yield { success: true, status: 202, obj: { batch: 1234 }, - headers: { 'x-last-modified': time0, 'x-weave-timestamp': time0 + 100 }, - }; - yield { success: true, status: 202, obj: { batch: 1234 }, - headers: { 'x-last-modified': time1, 'x-weave-timestamp': time1 }, - }; - yield { success: true, status: 202, obj: { batch: 5678 }, - headers: { 'x-last-modified': time1, 'x-weave-timestamp': time1 + 100 }, - }; - yield { success: true, status: 202, obj: { batch: 5678 }, - headers: { 'x-last-modified': time1 + 200, 'x-weave-timestamp': time1 + 200 }, - }; - } - - let { pq, stats } = makePostQueue(config, time0, responseGenerator()); - - ok(pq.enqueue(makeRecord(20)).enqueued); - ok(pq.enqueue(makeRecord(20)).enqueued); - ok(pq.enqueue(makeRecord(20)).enqueued); - - ok(pq.enqueue(makeRecord(20)).enqueued); - ok(pq.enqueue(makeRecord(20)).enqueued); - - ok(pq.enqueue(makeRecord(20)).enqueued); - ok(pq.enqueue(makeRecord(20)).enqueued); - ok(pq.enqueue(makeRecord(20)).enqueued); - - ok(pq.enqueue(makeRecord(20)).enqueued); - - pq.flush(true); - - deepEqual(stats.posts, [ - { // 3 records - nbytes: 64, - commit: false, - batch: "true", - headers: [['x-if-unmodified-since', time0]], - },{ // 2 records -- end batch1 - nbytes: 43, - commit: true, - batch: 1234, - headers: [['x-if-unmodified-since', time0]], - }, { // 3 records - nbytes: 64, - commit: false, - batch: "true", - headers: [['x-if-unmodified-since', time1]], - },{ // 1 record -- end batch2 - nbytes: 22, - commit: true, - batch: 5678, - headers: [['x-if-unmodified-since', time1]], - }, - ]); - - equal(pq.lastModified, time1 + 200); - - run_next_test(); -});
\ No newline at end of file diff --git a/services/sync/tests/unit/test_prefs_store.js b/services/sync/tests/unit/test_prefs_store.js index 9c321bceb..51b220d53 100644 --- a/services/sync/tests/unit/test_prefs_store.js +++ b/services/sync/tests/unit/test_prefs_store.js @@ -23,22 +23,25 @@ function makePersona(id) { } function run_test() { - _("Test fixtures."); - // read our custom prefs file before doing anything. - Services.prefs.readUserPrefs(do_get_file("prefs_test_prefs_store.js")); - // Now we've read from this file, any writes the pref service makes will be - // back to this prefs_test_prefs_store.js directly in the obj dir. This - // upsets things in confusing ways :) We avoid this by explicitly telling the - // pref service to use a file in our profile dir. - let prefFile = do_get_profile(); - prefFile.append("prefs.js"); - Services.prefs.savePrefFile(prefFile); - Services.prefs.readUserPrefs(prefFile); - let store = Service.engineManager.get("prefs")._store; let prefs = new Preferences(); try { + _("Test fixtures."); + Svc.Prefs.set("prefs.sync.testing.int", true); + Svc.Prefs.set("prefs.sync.testing.string", true); + Svc.Prefs.set("prefs.sync.testing.bool", true); + Svc.Prefs.set("prefs.sync.testing.dont.change", true); + Svc.Prefs.set("prefs.sync.testing.turned.off", false); + Svc.Prefs.set("prefs.sync.testing.nonexistent", true); + + prefs.set("testing.int", 123); + prefs.set("testing.string", "ohai"); + prefs.set("testing.bool", true); + prefs.set("testing.dont.change", "Please don't change me."); + prefs.set("testing.turned.off", "I won't get synced."); + prefs.set("testing.not.turned.on", "I won't get synced either!"); + _("The GUID corresponds to XUL App ID."); let allIDs = store.getAllIDs(); let ids = Object.keys(allIDs); @@ -58,22 +61,17 @@ function run_test() { do_check_eq(record.value["testing.int"], 123); do_check_eq(record.value["testing.string"], "ohai"); do_check_eq(record.value["testing.bool"], true); - // non-existing prefs get null as the value do_check_eq(record.value["testing.nonexistent"], null); - // as do prefs that have a default value. - do_check_eq(record.value["testing.default"], null); do_check_false("testing.turned.off" in record.value); do_check_false("testing.not.turned.on" in record.value); - _("Prefs record contains non-default pref sync prefs too."); - do_check_eq(record.value["services.sync.prefs.sync.testing.int"], null); - do_check_eq(record.value["services.sync.prefs.sync.testing.string"], null); - do_check_eq(record.value["services.sync.prefs.sync.testing.bool"], null); - do_check_eq(record.value["services.sync.prefs.sync.testing.dont.change"], null); - // but this one is a user_pref so *will* be synced. + _("Prefs record contains pref sync prefs too."); + do_check_eq(record.value["services.sync.prefs.sync.testing.int"], true); + do_check_eq(record.value["services.sync.prefs.sync.testing.string"], true); + do_check_eq(record.value["services.sync.prefs.sync.testing.bool"], true); + do_check_eq(record.value["services.sync.prefs.sync.testing.dont.change"], true); do_check_eq(record.value["services.sync.prefs.sync.testing.turned.off"], false); - do_check_eq(record.value["services.sync.prefs.sync.testing.nonexistent"], null); - do_check_eq(record.value["services.sync.prefs.sync.testing.default"], null); + do_check_eq(record.value["services.sync.prefs.sync.testing.nonexistent"], true); _("Update some prefs, including one that's to be reset/deleted."); Svc.Prefs.set("testing.deleteme", "I'm going to be deleted!"); @@ -99,28 +97,28 @@ function run_test() { // Ensure we don't go to the network to fetch personas and end up leaking // stuff. Services.io.offline = true; - do_check_false(!!prefs.get("lightweightThemes.selectedThemeID")); + do_check_false(!!prefs.get("lightweightThemes.isThemeSelected")); do_check_eq(LightweightThemeManager.currentTheme, null); let persona1 = makePersona(); let persona2 = makePersona(); let usedThemes = JSON.stringify([persona1, persona2]); record.value = { - "lightweightThemes.selectedThemeID": persona1.id, + "lightweightThemes.isThemeSelected": true, "lightweightThemes.usedThemes": usedThemes }; store.update(record); - do_check_eq(prefs.get("lightweightThemes.selectedThemeID"), persona1.id); + do_check_true(prefs.get("lightweightThemes.isThemeSelected")); do_check_true(Utils.deepEquals(LightweightThemeManager.currentTheme, persona1)); _("Disable persona"); record.value = { - "lightweightThemes.selectedThemeID": null, + "lightweightThemes.isThemeSelected": false, "lightweightThemes.usedThemes": usedThemes }; store.update(record); - do_check_false(!!prefs.get("lightweightThemes.selectedThemeID")); + do_check_false(prefs.get("lightweightThemes.isThemeSelected")); do_check_eq(LightweightThemeManager.currentTheme, null); _("Only the current app's preferences are applied."); @@ -131,37 +129,6 @@ function run_test() { store.update(record); do_check_eq(prefs.get("testing.int"), 42); - _("The light-weight theme preference is handled correctly."); - let lastThemeID = undefined; - let orig_updateLightWeightTheme = store._updateLightWeightTheme; - store._updateLightWeightTheme = function(themeID) { - lastThemeID = themeID; - } - try { - record = new PrefRec("prefs", PREFS_GUID); - record.value = { - "testing.int": 42, - }; - store.update(record); - do_check_true(lastThemeID === undefined, - "should not have tried to change the theme with an unrelated pref."); - Services.prefs.setCharPref("lightweightThemes.selectedThemeID", "foo"); - record.value = { - "lightweightThemes.selectedThemeID": "foo", - }; - store.update(record); - do_check_true(lastThemeID === undefined, - "should not have tried to change the theme when the incoming pref matches current value."); - - record.value = { - "lightweightThemes.selectedThemeID": "bar", - }; - store.update(record); - do_check_eq(lastThemeID, "bar", - "should have tried to change the theme when the incoming pref was different."); - } finally { - store._updateLightWeightTheme = orig_updateLightWeightTheme; - } } finally { prefs.resetBranch(""); } diff --git a/services/sync/tests/unit/test_records_crypto.js b/services/sync/tests/unit/test_records_crypto.js index 392a746ef..4d623c917 100644 --- a/services/sync/tests/unit/test_records_crypto.js +++ b/services/sync/tests/unit/test_records_crypto.js @@ -10,7 +10,7 @@ Cu.import("resource://services-sync/service.js"); Cu.import("resource://services-sync/util.js"); Cu.import("resource://testing-common/services/sync/utils.js"); -var cryptoWrap; +let cryptoWrap; function crypted_resource_handler(metadata, response) { let obj = {id: "resource", @@ -148,32 +148,6 @@ function run_test() { do_check_eq(bookmarkItem.decrypt(Service.collectionKeys.keyForCollection("bookmarks")).stuff, "my payload here"); - do_check_true(Service.collectionKeys.hasKeysFor(["bookmarks"])); - - // Add a key for some new collection and verify that it isn't the - // default key. - do_check_false(Service.collectionKeys.hasKeysFor(["forms"])); - do_check_false(Service.collectionKeys.hasKeysFor(["bookmarks", "forms"])); - let oldFormsKey = Service.collectionKeys.keyForCollection("forms"); - do_check_eq(oldFormsKey, Service.collectionKeys._default); - let newKeys = Service.collectionKeys.ensureKeysFor(["forms"]); - do_check_true(newKeys.hasKeysFor(["forms"])); - do_check_true(newKeys.hasKeysFor(["bookmarks", "forms"])); - let newFormsKey = newKeys.keyForCollection("forms"); - do_check_neq(newFormsKey, oldFormsKey); - - // Verify that this doesn't overwrite keys - let regetKeys = newKeys.ensureKeysFor(["forms"]); - do_check_eq(regetKeys.keyForCollection("forms"), newFormsKey); - - const emptyKeys = new CollectionKeyManager(); - payload = { - default: Service.collectionKeys._default.keyPairB64, - collections: {} - }; - // Verify that not passing `modified` doesn't throw - emptyKeys.setContents(payload, null); - log.info("Done!"); } finally { diff --git a/services/sync/tests/unit/test_resource.js b/services/sync/tests/unit/test_resource.js index 8f5534c92..027d662b4 100644 --- a/services/sync/tests/unit/test_resource.js +++ b/services/sync/tests/unit/test_resource.js @@ -7,9 +7,9 @@ Cu.import("resource://services-sync/identity.js"); Cu.import("resource://services-sync/resource.js"); Cu.import("resource://services-sync/util.js"); -var logger; +let logger; -var fetched = false; +let fetched = false; function server_open(metadata, response) { let body; if (metadata.method == "GET") { @@ -45,7 +45,7 @@ function server_404(metadata, response) { response.bodyOutputStream.write(body, body.length); } -var pacFetched = false; +let pacFetched = false; function server_pac(metadata, response) { pacFetched = true; let body = 'function FindProxyForURL(url, host) { return "DIRECT"; }'; @@ -55,7 +55,7 @@ function server_pac(metadata, response) { } -var sample_data = { +let sample_data = { some: "sample_data", injson: "format", number: 42 @@ -140,7 +140,7 @@ function server_headers(metadata, response) { header_names = header_names.sort(); headers = {}; - for (let header of header_names) { + for each (let header in header_names) { headers[header] = metadata.getHeader(header); } let body = JSON.stringify(headers); @@ -442,8 +442,6 @@ function run_test() { // It throws and logs. do_check_eq(error.result, Cr.NS_ERROR_MALFORMED_URI); do_check_eq(error, "Error: NS_ERROR_MALFORMED_URI"); - // Note the strings haven't been formatted yet, but that's OK for this test. - do_check_eq(warnings.pop(), "${action} request to ${url} failed: ${ex}"); do_check_eq(warnings.pop(), "Got exception calling onProgress handler during fetch of " + server.baseURI + "/json"); @@ -467,7 +465,6 @@ function run_test() { // It throws and logs. do_check_eq(error.result, Cr.NS_ERROR_XPC_JS_THREW_STRING); do_check_eq(error, "Error: NS_ERROR_XPC_JS_THREW_STRING"); - do_check_eq(warnings.pop(), "${action} request to ${url} failed: ${ex}"); do_check_eq(warnings.pop(), "Got exception calling onProgress handler during fetch of " + server.baseURI + "/json"); diff --git a/services/sync/tests/unit/test_resource_async.js b/services/sync/tests/unit/test_resource_async.js index 0db91a1b5..c4b9a3804 100644 --- a/services/sync/tests/unit/test_resource_async.js +++ b/services/sync/tests/unit/test_resource_async.js @@ -7,9 +7,9 @@ Cu.import("resource://services-sync/identity.js"); Cu.import("resource://services-sync/resource.js"); Cu.import("resource://services-sync/util.js"); -var logger; +let logger; -var fetched = false; +let fetched = false; function server_open(metadata, response) { let body; if (metadata.method == "GET") { @@ -45,7 +45,7 @@ function server_404(metadata, response) { response.bodyOutputStream.write(body, body.length); } -var pacFetched = false; +let pacFetched = false; function server_pac(metadata, response) { _("Invoked PAC handler."); pacFetched = true; @@ -55,7 +55,7 @@ function server_pac(metadata, response) { response.bodyOutputStream.write(body, body.length); } -var sample_data = { +let sample_data = { some: "sample_data", injson: "format", number: 42 @@ -140,7 +140,7 @@ function server_headers(metadata, response) { header_names = header_names.sort(); headers = {}; - for (let header of header_names) { + for each (let header in header_names) { headers[header] = metadata.getHeader(header); } let body = JSON.stringify(headers); @@ -148,7 +148,7 @@ function server_headers(metadata, response) { response.bodyOutputStream.write(body, body.length); } -var quotaValue; +let quotaValue; Observers.add("weave:service:quota:remaining", function (subject) { quotaValue = subject; }); @@ -221,7 +221,7 @@ add_test(function test_new_channel() { }); -var server; +let server; add_test(function setup() { server = httpd_setup({ diff --git a/services/sync/tests/unit/test_resource_header.js b/services/sync/tests/unit/test_resource_header.js index 4f28e01da..1835cc0e0 100644 --- a/services/sync/tests/unit/test_resource_header.js +++ b/services/sync/tests/unit/test_resource_header.js @@ -11,7 +11,7 @@ function run_test() { run_next_test(); } -var httpServer = new HttpServer(); +let httpServer = new HttpServer(); httpServer.registerPathHandler("/content", contentHandler); httpServer.start(-1); @@ -20,8 +20,8 @@ const TEST_URL = "http://localhost:" + HTTP_PORT + "/content"; const BODY = "response body"; // Keep headers for later inspection. -var auth = null; -var foo = null; +let auth = null; +let foo = null; function contentHandler(metadata, response) { _("Handling request."); auth = metadata.getHeader("Authorization"); diff --git a/services/sync/tests/unit/test_resource_ua.js b/services/sync/tests/unit/test_resource_ua.js index 31c2cd379..279a2b3e6 100644 --- a/services/sync/tests/unit/test_resource_ua.js +++ b/services/sync/tests/unit/test_resource_ua.js @@ -7,18 +7,15 @@ Cu.import("resource://services-sync/service.js"); Cu.import("resource://services-sync/util.js"); Cu.import("resource://testing-common/services/sync/utils.js"); -var httpProtocolHandler = Cc["@mozilla.org/network/protocol;1?name=http"] - .getService(Ci.nsIHttpProtocolHandler); - // Tracking info/collections. -var collectionsHelper = track_collections_helper(); -var collections = collectionsHelper.collections; +let collectionsHelper = track_collections_helper(); +let collections = collectionsHelper.collections; -var meta_global; -var server; +let meta_global; +let server; -var expectedUA; -var ua; +let expectedUA; +let ua; function uaHandler(f) { return function(request, response) { ua = request.getHeader("User-Agent"); @@ -40,10 +37,7 @@ function run_test() { Service.clusterURL = server.baseURI + "/"; _("Server URL: " + server.baseURI); - // Note this string is missing the trailing ".destkop" as the test - // adjusts the "client.type" pref where that portion comes from. expectedUA = Services.appinfo.name + "/" + Services.appinfo.version + - " (" + httpProtocolHandler.oscpu + ")" + " FxSync/" + WEAVE_VERSION + "." + Services.appinfo.appBuildID; diff --git a/services/sync/tests/unit/test_score_triggers.js b/services/sync/tests/unit/test_score_triggers.js index 513be685a..98d3e094a 100644 --- a/services/sync/tests/unit/test_score_triggers.js +++ b/services/sync/tests/unit/test_score_triggers.js @@ -12,13 +12,13 @@ Cu.import("resource://testing-common/services/sync/utils.js"); Service.engineManager.clear(); Service.engineManager.register(RotaryEngine); -var engine = Service.engineManager.get("rotary"); -var tracker = engine._tracker; +let engine = Service.engineManager.get("rotary"); +let tracker = engine._tracker; engine.enabled = true; // Tracking info/collections. -var collectionsHelper = track_collections_helper(); -var upd = collectionsHelper.with_updated_collection; +let collectionsHelper = track_collections_helper(); +let upd = collectionsHelper.with_updated_collection; function sync_httpd_setup() { let handlers = {}; diff --git a/services/sync/tests/unit/test_service_attributes.js b/services/sync/tests/unit/test_service_attributes.js index 931c7741a..dc82f5edb 100644 --- a/services/sync/tests/unit/test_service_attributes.js +++ b/services/sync/tests/unit/test_service_attributes.js @@ -29,6 +29,7 @@ function test_urls() { Service.serverURL = "http://weave.server/"; Service.clusterURL = "http://weave.cluster/"; + do_check_eq(Svc.Prefs.get("clusterURL"), "http://weave.cluster/"); do_check_eq(Service.userBaseURL, "http://weave.cluster/1.1/johndoe/"); do_check_eq(Service.infoURL, @@ -62,11 +63,11 @@ function test_urls() { _("The 'serverURL' attributes updates/resets preferences."); // Identical value doesn't do anything Service.serverURL = Service.serverURL; - do_check_eq(Service.clusterURL, "http://weave.cluster/"); + do_check_eq(Svc.Prefs.get("clusterURL"), "http://weave.cluster/"); Service.serverURL = "http://different.auth.node/"; do_check_eq(Svc.Prefs.get("serverURL"), "http://different.auth.node/"); - do_check_eq(Service.clusterURL, ""); + do_check_eq(Svc.Prefs.get("clusterURL"), undefined); } finally { Svc.Prefs.resetBranch(""); @@ -83,12 +84,12 @@ function test_syncID() { do_check_eq(Svc.Prefs.get("client.syncID"), undefined); // Performing the first get on the attribute will generate a new GUID. - do_check_eq(Service.syncID, "fake-guid-00"); - do_check_eq(Svc.Prefs.get("client.syncID"), "fake-guid-00"); + do_check_eq(Service.syncID, "fake-guid-0"); + do_check_eq(Svc.Prefs.get("client.syncID"), "fake-guid-0"); Svc.Prefs.set("client.syncID", Utils.makeGUID()); - do_check_eq(Svc.Prefs.get("client.syncID"), "fake-guid-01"); - do_check_eq(Service.syncID, "fake-guid-01"); + do_check_eq(Svc.Prefs.get("client.syncID"), "fake-guid-1"); + do_check_eq(Service.syncID, "fake-guid-1"); } finally { Svc.Prefs.resetBranch(""); new FakeGUIDService(); diff --git a/services/sync/tests/unit/test_service_detect_upgrade.js b/services/sync/tests/unit/test_service_detect_upgrade.js index 0f46832d9..528bd751b 100644 --- a/services/sync/tests/unit/test_service_detect_upgrade.js +++ b/services/sync/tests/unit/test_service_detect_upgrade.js @@ -60,7 +60,7 @@ add_test(function v4_upgrade() { }]}]}; delete Svc.Session; Svc.Session = { - getBrowserState: () => JSON.stringify(myTabs) + getBrowserState: function () JSON.stringify(myTabs) }; Service.status.resetSync(); @@ -229,7 +229,7 @@ add_test(function v5_upgrade() { }]}]}; delete Svc.Session; Svc.Session = { - getBrowserState: () => JSON.stringify(myTabs) + getBrowserState: function () JSON.stringify(myTabs) }; Service.status.resetSync(); diff --git a/services/sync/tests/unit/test_service_getStorageInfo.js b/services/sync/tests/unit/test_service_getStorageInfo.js index 841dceb78..4d463044b 100644 --- a/services/sync/tests/unit/test_service_getStorageInfo.js +++ b/services/sync/tests/unit/test_service_getStorageInfo.js @@ -7,10 +7,7 @@ Cu.import("resource://services-sync/service.js"); Cu.import("resource://services-sync/util.js"); Cu.import("resource://testing-common/services/sync/utils.js"); -var httpProtocolHandler = Cc["@mozilla.org/network/protocol;1?name=http"] - .getService(Ci.nsIHttpProtocolHandler); - -var collections = {steam: 65.11328, +let collections = {steam: 65.11328, petrol: 82.488281, diesel: 2.25488281}; @@ -40,7 +37,6 @@ add_test(function test_success() { Service.identity.username, Service.identity.basicPassword)); let expectedUA = Services.appinfo.name + "/" + Services.appinfo.version + - " (" + httpProtocolHandler.oscpu + ")" + " FxSync/" + WEAVE_VERSION + "." + Services.appinfo.appBuildID + ".desktop"; do_check_eq(handler.request.getHeader("User-Agent"), expectedUA); diff --git a/services/sync/tests/unit/test_service_login.js b/services/sync/tests/unit/test_service_login.js index 2ecb0a377..52ee5e63a 100644 --- a/services/sync/tests/unit/test_service_login.js +++ b/services/sync/tests/unit/test_service_login.js @@ -183,7 +183,7 @@ add_test(function test_login_on_sync() { // This test exercises these two branches. _("We're ready to sync if locked."); - Service.enabled = true; + Svc.Prefs.set("enabled", true); Services.io.offline = false; Service.scheduler.checkSyncStatus(); do_check_true(scheduleCalled); diff --git a/services/sync/tests/unit/test_service_passwordUTF8.js b/services/sync/tests/unit/test_service_passwordUTF8.js index e781050b3..733911291 100644 --- a/services/sync/tests/unit/test_service_passwordUTF8.js +++ b/services/sync/tests/unit/test_service_passwordUTF8.js @@ -11,13 +11,13 @@ const APPLES = "\uf8ff\uf8ff\uf8ff\uf8ff"; const LOWBYTES = "\xff\xff\xff\xff"; // Poor man's /etc/passwd. Static since there's no btoa()/atob() in xpcshell. -var basicauth = {}; +let basicauth = {}; basicauth[LOWBYTES] = "Basic am9obmRvZTr/////"; basicauth[Utils.encodeUTF8(JAPANESE)] = "Basic am9obmRvZTrjk7/jl7/jm7/jn78="; // Global var for the server password, read by info_collections(), // modified by change_password(). -var server_password; +let server_password; function login_handling(handler) { return function (request, response) { diff --git a/services/sync/tests/unit/test_service_startOver.js b/services/sync/tests/unit/test_service_startOver.js index 899420548..6fb0a66d7 100644 --- a/services/sync/tests/unit/test_service_startOver.js +++ b/services/sync/tests/unit/test_service_startOver.js @@ -28,7 +28,7 @@ function run_test() { run_next_test(); } -add_identity_test(this, function* test_resetLocalData() { +add_identity_test(this, function test_resetLocalData() { yield configureIdentity(); Service.status.enforceBackoff = true; Service.status.backoffInterval = 42; diff --git a/services/sync/tests/unit/test_service_startup.js b/services/sync/tests/unit/test_service_startup.js index 5148f6d13..6ced39da9 100644 --- a/services/sync/tests/unit/test_service_startup.js +++ b/services/sync/tests/unit/test_service_startup.js @@ -10,7 +10,6 @@ Svc.Prefs.set("registerEngines", "Tab,Bookmarks,Form,History"); Cu.import("resource://services-sync/service.js"); function run_test() { - validate_all_future_pings(); _("When imported, Service.onStartup is called"); initTestLogging("Trace"); @@ -21,7 +20,7 @@ function run_test() { // Test fixtures Service.identity.username = "johndoe"; - do_check_true(xps.enabled); + do_check_false(xps.enabled); Cu.import("resource://services-sync/service.js"); @@ -30,7 +29,7 @@ function run_test() { _("Engines are registered."); let engines = Service.engineManager.getAll(); - do_check_true(Utils.deepEquals(engines.map(engine => engine.name), + do_check_true(Utils.deepEquals([engine.name for each (engine in engines)], ['tabs', 'bookmarks', 'forms', 'history'])); _("Observers are notified of startup"); @@ -46,4 +45,10 @@ function run_test() { Svc.Prefs.resetBranch(""); do_test_finished(); }); + + do_check_false(xps.enabled); + + Service.identity.account = "johndoe"; + Service.clusterURL = "http://localhost/"; + do_check_true(xps.enabled); } diff --git a/services/sync/tests/unit/test_service_sync_locked.js b/services/sync/tests/unit/test_service_sync_locked.js index ee952c7ee..e2cbbfa92 100644 --- a/services/sync/tests/unit/test_service_sync_locked.js +++ b/services/sync/tests/unit/test_service_sync_locked.js @@ -5,17 +5,14 @@ Cu.import("resource://services-sync/service.js"); Cu.import("resource://services-sync/util.js"); function run_test() { - validate_all_future_pings(); let debug = []; let info = []; function augmentLogger(old) { let d = old.debug; let i = old.info; - // For the purposes of this test we don't need to do full formatting - // of the 2nd param, as the ones we care about are always strings. - old.debug = function(m, p) { debug.push(p ? m + ": " + p : m); d.call(old, m, p); } - old.info = function(m, p) { info.push(p ? m + ": " + p : m); i.call(old, m, p); } + old.debug = function(m) { debug.push(m); d.call(old, m); } + old.info = function(m) { info.push(m); i.call(old, m); } return old; } @@ -31,7 +28,9 @@ function run_test() { Service.sync(); Service._locked = false; - do_check_true(debug[debug.length - 2].startsWith("Exception calling WrappedLock: Could not acquire lock. Label: \"service.js: login\".")); - do_check_eq(info[info.length - 1], "Cannot start sync: already syncing?"); + do_check_eq(debug[debug.length - 2], + "Exception: Could not acquire lock. Label: \"service.js: login\". No traceback available"); + do_check_eq(info[info.length - 1], + "Cannot start sync: already syncing?"); } diff --git a/services/sync/tests/unit/test_service_sync_remoteSetup.js b/services/sync/tests/unit/test_service_sync_remoteSetup.js index 83dbf3cd7..852ba64d5 100644 --- a/services/sync/tests/unit/test_service_sync_remoteSetup.js +++ b/services/sync/tests/unit/test_service_sync_remoteSetup.js @@ -10,7 +10,6 @@ Cu.import("resource://testing-common/services/sync/fakeservices.js"); Cu.import("resource://testing-common/services/sync/utils.js"); function run_test() { - validate_all_future_pings(); let logger = Log.repository.rootLogger; Log.repository.rootLogger.addAppender(new Log.DumpAppender()); @@ -54,27 +53,15 @@ function run_test() { return_timestamp(request, response, ts); } - const GLOBAL_PATH = "/1.1/johndoe/storage/meta/global"; - const INFO_PATH = "/1.1/johndoe/info/collections"; - - let handlers = { + let server = httpd_setup({ "/1.1/johndoe/storage": storageHandler, "/1.1/johndoe/storage/crypto/keys": upd("crypto", keysWBO.handler()), "/1.1/johndoe/storage/crypto": upd("crypto", cryptoColl.handler()), "/1.1/johndoe/storage/clients": upd("clients", clients.handler()), - "/1.1/johndoe/storage/meta": upd("meta", wasCalledHandler(metaColl)), "/1.1/johndoe/storage/meta/global": upd("meta", wasCalledHandler(meta_global)), + "/1.1/johndoe/storage/meta": upd("meta", wasCalledHandler(metaColl)), "/1.1/johndoe/info/collections": collectionsHelper.handler - }; - - function mockHandler(path, mock) { - server.registerPathHandler(path, mock(handlers[path])); - return { - restore() { server.registerPathHandler(path, handlers[path]); } - } - } - - let server = httpd_setup(handlers); + }); try { _("Log in."); @@ -102,63 +89,6 @@ function run_test() { Service.recordManager.get(Service.metaURL).payload.syncID = "foobar"; do_check_true(Service._remoteSetup()); - let returnStatusCode = (method, code) => (oldMethod) => (req, res) => { - if (req.method === method) { - res.setStatusLine(req.httpVersion, code, ""); - } else { - oldMethod(req, res); - } - }; - - let mock = mockHandler(GLOBAL_PATH, returnStatusCode("GET", 401)); - Service.recordManager.del(Service.metaURL); - _("Checking that remoteSetup returns false on 401 on first get /meta/global."); - do_check_false(Service._remoteSetup()); - mock.restore(); - - Service.login("johndoe", "ilovejane", syncKey); - mock = mockHandler(GLOBAL_PATH, returnStatusCode("GET", 503)); - Service.recordManager.del(Service.metaURL); - _("Checking that remoteSetup returns false on 503 on first get /meta/global."); - do_check_false(Service._remoteSetup()); - do_check_eq(Service.status.sync, METARECORD_DOWNLOAD_FAIL); - mock.restore(); - - mock = mockHandler(GLOBAL_PATH, returnStatusCode("GET", 404)); - Service.recordManager.del(Service.metaURL); - _("Checking that remoteSetup recovers on 404 on first get /meta/global."); - do_check_true(Service._remoteSetup()); - mock.restore(); - - let makeOutdatedMeta = () => { - Service.metaModified = 0; - let infoResponse = Service._fetchInfo(); - return { - status: infoResponse.status, - obj: { - crypto: infoResponse.obj.crypto, - clients: infoResponse.obj.clients, - meta: 1 - } - }; - } - - _("Checking that remoteSetup recovers on 404 on get /meta/global after clear cached one."); - mock = mockHandler(GLOBAL_PATH, returnStatusCode("GET", 404)); - Service.recordManager.set(Service.metaURL, { isNew: false }); - do_check_true(Service._remoteSetup(makeOutdatedMeta())); - mock.restore(); - - _("Checking that remoteSetup returns false on 503 on get /meta/global after clear cached one."); - mock = mockHandler(GLOBAL_PATH, returnStatusCode("GET", 503)); - Service.status.sync = ""; - Service.recordManager.set(Service.metaURL, { isNew: false }); - do_check_false(Service._remoteSetup(makeOutdatedMeta())); - do_check_eq(Service.status.sync, ""); - mock.restore(); - - metaColl.delete({}); - _("Do an initial sync."); let beforeSync = Date.now()/1000; Service.sync(); @@ -230,6 +160,7 @@ function run_test() { do_check_false(Service.verifyAndFetchSymmetricKeys()); do_check_eq(Service.status.login, LOGIN_FAILED_INVALID_PASSPHRASE); + } finally { Svc.Prefs.resetBranch(""); server.stop(do_test_finished); diff --git a/services/sync/tests/unit/test_service_sync_specified.js b/services/sync/tests/unit/test_service_sync_specified.js deleted file mode 100644 index 7cb0f9d9c..000000000 --- a/services/sync/tests/unit/test_service_sync_specified.js +++ /dev/null @@ -1,160 +0,0 @@ -/* Any copyright is dedicated to the Public Domain. - http://creativecommons.org/publicdomain/zero/1.0/ */ - -Cu.import("resource://services-sync/constants.js"); -Cu.import("resource://services-sync/engines.js"); -Cu.import("resource://services-sync/engines/clients.js"); -Cu.import("resource://services-sync/record.js"); -Cu.import("resource://services-sync/service.js"); -Cu.import("resource://services-sync/util.js"); -Cu.import("resource://testing-common/services/sync/utils.js"); - -initTestLogging(); -Service.engineManager.clear(); - -let syncedEngines = [] - -function SteamEngine() { - SyncEngine.call(this, "Steam", Service); -} -SteamEngine.prototype = { - __proto__: SyncEngine.prototype, - _sync: function _sync() { - syncedEngines.push(this.name); - } -}; -Service.engineManager.register(SteamEngine); - -function StirlingEngine() { - SyncEngine.call(this, "Stirling", Service); -} -StirlingEngine.prototype = { - __proto__: SteamEngine.prototype, - _sync: function _sync() { - syncedEngines.push(this.name); - } -}; -Service.engineManager.register(StirlingEngine); - -// Tracking info/collections. -var collectionsHelper = track_collections_helper(); -var upd = collectionsHelper.with_updated_collection; - -function sync_httpd_setup(handlers) { - - handlers["/1.1/johndoe/info/collections"] = collectionsHelper.handler; - delete collectionsHelper.collections.crypto; - delete collectionsHelper.collections.meta; - - let cr = new ServerWBO("keys"); - handlers["/1.1/johndoe/storage/crypto/keys"] = - upd("crypto", cr.handler()); - - let cl = new ServerCollection(); - handlers["/1.1/johndoe/storage/clients"] = - upd("clients", cl.handler()); - - return httpd_setup(handlers); -} - -function setUp() { - syncedEngines = []; - let engine = Service.engineManager.get("steam"); - engine.enabled = true; - engine.syncPriority = 1; - - engine = Service.engineManager.get("stirling"); - engine.enabled = true; - engine.syncPriority = 2; - - let server = sync_httpd_setup({ - "/1.1/johndoe/storage/meta/global": new ServerWBO("global", {}).handler(), - }); - new SyncTestingInfrastructure(server, "johndoe", "ilovejane", - "abcdeabcdeabcdeabcdeabcdea"); - return server; -} - -function run_test() { - initTestLogging("Trace"); - validate_all_future_pings(); - Log.repository.getLogger("Sync.Service").level = Log.Level.Trace; - Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace; - - run_next_test(); -} - -add_test(function test_noEngines() { - _("Test: An empty array of engines to sync does nothing."); - let server = setUp(); - - try { - _("Sync with no engines specified."); - Service.sync([]); - deepEqual(syncedEngines, [], "no engines were synced"); - - } finally { - Service.startOver(); - server.stop(run_next_test); - } -}); - -add_test(function test_oneEngine() { - _("Test: Only one engine is synced."); - let server = setUp(); - - try { - - _("Sync with 1 engine specified."); - Service.sync(["steam"]); - deepEqual(syncedEngines, ["steam"]) - - } finally { - Service.startOver(); - server.stop(run_next_test); - } -}); - -add_test(function test_bothEnginesSpecified() { - _("Test: All engines are synced when specified in the correct order (1)."); - let server = setUp(); - - try { - _("Sync with both engines specified."); - Service.sync(["steam", "stirling"]); - deepEqual(syncedEngines, ["steam", "stirling"]) - - } finally { - Service.startOver(); - server.stop(run_next_test); - } -}); - -add_test(function test_bothEnginesSpecified() { - _("Test: All engines are synced when specified in the correct order (2)."); - let server = setUp(); - - try { - _("Sync with both engines specified."); - Service.sync(["stirling", "steam"]); - deepEqual(syncedEngines, ["stirling", "steam"]) - - } finally { - Service.startOver(); - server.stop(run_next_test); - } -}); - -add_test(function test_bothEnginesDefault() { - _("Test: All engines are synced when nothing is specified."); - let server = setUp(); - - try { - Service.sync(); - deepEqual(syncedEngines, ["steam", "stirling"]) - - } finally { - Service.startOver(); - server.stop(run_next_test); - } -}); diff --git a/services/sync/tests/unit/test_service_sync_updateEnabledEngines.js b/services/sync/tests/unit/test_service_sync_updateEnabledEngines.js index ee1800fd3..c945cb6c2 100644 --- a/services/sync/tests/unit/test_service_sync_updateEnabledEngines.js +++ b/services/sync/tests/unit/test_service_sync_updateEnabledEngines.js @@ -41,15 +41,13 @@ function StirlingEngine() { StirlingEngine.prototype = { __proto__: SteamEngine.prototype, // This engine's enabled state is the same as the SteamEngine's. - get prefName() { - return "steam"; - } + get prefName() "steam" }; Service.engineManager.register(StirlingEngine); // Tracking info/collections. -var collectionsHelper = track_collections_helper(); -var upd = collectionsHelper.with_updated_collection; +let collectionsHelper = track_collections_helper(); +let upd = collectionsHelper.with_updated_collection; function sync_httpd_setup(handlers) { @@ -86,7 +84,6 @@ function run_test() { initTestLogging("Trace"); Log.repository.getLogger("Sync.Service").level = Log.Level.Trace; Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace; - validate_all_future_pings(); run_next_test(); } diff --git a/services/sync/tests/unit/test_service_wipeServer.js b/services/sync/tests/unit/test_service_wipeServer.js index 9320f4b88..3fc45cf86 100644 --- a/services/sync/tests/unit/test_service_wipeServer.js +++ b/services/sync/tests/unit/test_service_wipeServer.js @@ -31,7 +31,7 @@ FakeCollection.prototype = { } }; -function* setUpTestFixtures(server) { +function setUpTestFixtures(server) { let cryptoService = new FakeCryptoService(); Service.serverURL = server.baseURI + "/"; @@ -52,7 +52,7 @@ function promiseStopServer(server) { return deferred.promise; } -add_identity_test(this, function* test_wipeServer_list_success() { +add_identity_test(this, function test_wipeServer_list_success() { _("Service.wipeServer() deletes collections given as argument."); let steam_coll = new FakeCollection(); @@ -86,7 +86,7 @@ add_identity_test(this, function* test_wipeServer_list_success() { } }); -add_identity_test(this, function* test_wipeServer_list_503() { +add_identity_test(this, function test_wipeServer_list_503() { _("Service.wipeServer() deletes collections given as argument."); let steam_coll = new FakeCollection(); @@ -127,7 +127,7 @@ add_identity_test(this, function* test_wipeServer_list_503() { } }); -add_identity_test(this, function* test_wipeServer_all_success() { +add_identity_test(this, function test_wipeServer_all_success() { _("Service.wipeServer() deletes all the things."); /** @@ -157,7 +157,7 @@ add_identity_test(this, function* test_wipeServer_all_success() { Svc.Prefs.resetBranch(""); }); -add_identity_test(this, function* test_wipeServer_all_404() { +add_identity_test(this, function test_wipeServer_all_404() { _("Service.wipeServer() accepts a 404."); /** @@ -189,7 +189,7 @@ add_identity_test(this, function* test_wipeServer_all_404() { Svc.Prefs.resetBranch(""); }); -add_identity_test(this, function* test_wipeServer_all_503() { +add_identity_test(this, function test_wipeServer_all_503() { _("Service.wipeServer() throws if it encounters a non-200/404 response."); /** @@ -221,7 +221,7 @@ add_identity_test(this, function* test_wipeServer_all_503() { Svc.Prefs.resetBranch(""); }); -add_identity_test(this, function* test_wipeServer_all_connectionRefused() { +add_identity_test(this, function test_wipeServer_all_connectionRefused() { _("Service.wipeServer() throws if it encounters a network problem."); let server = httpd_setup({}); yield setUpTestFixtures(server); diff --git a/services/sync/tests/unit/test_status.js b/services/sync/tests/unit/test_status.js index 378aafe90..bc2d67f42 100644 --- a/services/sync/tests/unit/test_status.js +++ b/services/sync/tests/unit/test_status.js @@ -18,9 +18,9 @@ function run_test() { // Check login status - for (let code of [LOGIN_FAILED_NO_USERNAME, - LOGIN_FAILED_NO_PASSWORD, - LOGIN_FAILED_NO_PASSPHRASE]) { + for each (let code in [LOGIN_FAILED_NO_USERNAME, + LOGIN_FAILED_NO_PASSWORD, + LOGIN_FAILED_NO_PASSPHRASE]) { Status.login = code; do_check_eq(Status.login, code); do_check_eq(Status.service, CLIENT_NOT_CONFIGURED); diff --git a/services/sync/tests/unit/test_syncedtabs.js b/services/sync/tests/unit/test_syncedtabs.js deleted file mode 100644 index fe2cb6d1b..000000000 --- a/services/sync/tests/unit/test_syncedtabs.js +++ /dev/null @@ -1,221 +0,0 @@ -/* -*- indent-tabs-mode: nil; js-indent-level: 2 -*- - * vim:set ts=2 sw=2 sts=2 et: -*/ -"use strict"; - -Cu.import("resource://services-sync/main.js"); -Cu.import("resource://services-sync/SyncedTabs.jsm"); -Cu.import("resource://gre/modules/Log.jsm"); - -const faviconService = Cc["@mozilla.org/browser/favicon-service;1"] - .getService(Ci.nsIFaviconService); - -Log.repository.getLogger("Sync.RemoteTabs").addAppender(new Log.DumpAppender()); - -// A mock "Tabs" engine which the SyncedTabs module will use instead of the real -// engine. We pass a constructor that Sync creates. -function MockTabsEngine() { - this.clients = {}; // We'll set this dynamically -} - -MockTabsEngine.prototype = { - name: "tabs", - enabled: true, - - getAllClients() { - return this.clients; - }, - - getOpenURLs() { - return new Set(); - }, -} - -// A clients engine that doesn't need to be a constructor. -let MockClientsEngine = { - clientSettings: null, // Set in `configureClients`. - - isMobile(guid) { - if (!guid.endsWith("desktop") && !guid.endsWith("mobile")) { - throw new Error("this module expected guids to end with 'desktop' or 'mobile'"); - } - return guid.endsWith("mobile"); - }, - remoteClientExists(id) { - return this.clientSettings[id] !== false; - }, - getClientName(id) { - if (this.clientSettings[id]) { - return this.clientSettings[id]; - } - let engine = Weave.Service.engineManager.get("tabs"); - return engine.clients[id].clientName; - }, -} - -// Configure Sync with our mock tabs engine and force it to become initialized. -Services.prefs.setCharPref("services.sync.username", "someone@somewhere.com"); - -Weave.Service.engineManager.unregister("tabs"); -Weave.Service.engineManager.register(MockTabsEngine); -Weave.Service.clientsEngine = MockClientsEngine; - -// Tell the Sync XPCOM service it is initialized. -let weaveXPCService = Cc["@mozilla.org/weave/service;1"] - .getService(Ci.nsISupports) - .wrappedJSObject; -weaveXPCService.ready = true; - -function configureClients(clients, clientSettings = {}) { - // Configure the instance Sync created. - let engine = Weave.Service.engineManager.get("tabs"); - // each client record is expected to have an id. - for (let [guid, client] of Object.entries(clients)) { - client.id = guid; - } - engine.clients = clients; - // Apply clients collection overrides. - MockClientsEngine.clientSettings = clientSettings; - // Send an observer that pretends the engine just finished a sync. - Services.obs.notifyObservers(null, "weave:engine:sync:finish", "tabs"); -} - -// The tests. -add_task(function* test_noClients() { - // no clients, can't be tabs. - yield configureClients({}); - - let tabs = yield SyncedTabs.getTabClients(); - equal(Object.keys(tabs).length, 0); -}); - -add_task(function* test_clientWithTabs() { - yield configureClients({ - guid_desktop: { - clientName: "My Desktop", - tabs: [ - { - urlHistory: ["http://foo.com/"], - icon: "http://foo.com/favicon", - }], - }, - guid_mobile: { - clientName: "My Phone", - tabs: [], - } - }); - - let clients = yield SyncedTabs.getTabClients(); - equal(clients.length, 2); - clients.sort((a, b) => { return a.name.localeCompare(b.name);}); - equal(clients[0].tabs.length, 1); - equal(clients[0].tabs[0].url, "http://foo.com/"); - equal(clients[0].tabs[0].icon, "http://foo.com/favicon"); - // second client has no tabs. - equal(clients[1].tabs.length, 0); -}); - -add_task(function* test_staleClientWithTabs() { - yield configureClients({ - guid_desktop: { - clientName: "My Desktop", - tabs: [ - { - urlHistory: ["http://foo.com/"], - icon: "http://foo.com/favicon", - }], - }, - guid_mobile: { - clientName: "My Phone", - tabs: [], - }, - guid_stale_mobile: { - clientName: "My Deleted Phone", - tabs: [], - }, - guid_stale_desktop: { - clientName: "My Deleted Laptop", - tabs: [ - { - urlHistory: ["https://bar.com/"], - icon: "https://bar.com/favicon", - }], - }, - guid_stale_name_desktop: { - clientName: "My Generic Device", - tabs: [ - { - urlHistory: ["https://example.edu/"], - icon: "https://example.edu/favicon", - }], - }, - }, { - guid_stale_mobile: false, - guid_stale_desktop: false, - // We should always use the device name from the clients collection, instead - // of the possibly stale tabs collection. - guid_stale_name_desktop: "My Laptop", - }); - let clients = yield SyncedTabs.getTabClients(); - clients.sort((a, b) => { return a.name.localeCompare(b.name);}); - equal(clients.length, 3); - equal(clients[0].name, "My Desktop"); - equal(clients[0].tabs.length, 1); - equal(clients[0].tabs[0].url, "http://foo.com/"); - equal(clients[1].name, "My Laptop"); - equal(clients[1].tabs.length, 1); - equal(clients[1].tabs[0].url, "https://example.edu/"); - equal(clients[2].name, "My Phone"); - equal(clients[2].tabs.length, 0); -}); - -add_task(function* test_clientWithTabsIconsDisabled() { - Services.prefs.setBoolPref("services.sync.syncedTabs.showRemoteIcons", false); - yield configureClients({ - guid_desktop: { - clientName: "My Desktop", - tabs: [ - { - urlHistory: ["http://foo.com/"], - icon: "http://foo.com/favicon", - }], - }, - }); - - let clients = yield SyncedTabs.getTabClients(); - equal(clients.length, 1); - clients.sort((a, b) => { return a.name.localeCompare(b.name);}); - equal(clients[0].tabs.length, 1); - equal(clients[0].tabs[0].url, "http://foo.com/"); - // expect the default favicon (empty string) due to the pref being false. - equal(clients[0].tabs[0].icon, ""); - Services.prefs.clearUserPref("services.sync.syncedTabs.showRemoteIcons"); -}); - -add_task(function* test_filter() { - // Nothing matches. - yield configureClients({ - guid_desktop: { - clientName: "My Desktop", - tabs: [ - { - urlHistory: ["http://foo.com/"], - title: "A test page.", - }, - { - urlHistory: ["http://bar.com/"], - title: "Another page.", - }], - }, - }); - - let clients = yield SyncedTabs.getTabClients("foo"); - equal(clients.length, 1); - equal(clients[0].tabs.length, 1); - equal(clients[0].tabs[0].url, "http://foo.com/"); - // check it matches the title. - clients = yield SyncedTabs.getTabClients("test"); - equal(clients.length, 1); - equal(clients[0].tabs.length, 1); - equal(clients[0].tabs[0].url, "http://foo.com/"); -}); diff --git a/services/sync/tests/unit/test_syncengine.js b/services/sync/tests/unit/test_syncengine.js index 8c01ca048..393e49607 100644 --- a/services/sync/tests/unit/test_syncengine.js +++ b/services/sync/tests/unit/test_syncengine.js @@ -10,7 +10,7 @@ function makeSteamEngine() { return new SyncEngine('Steam', Service); } -var server; +let server; function test_url_attributes() { _("SyncEngine url attributes"); @@ -35,12 +35,12 @@ function test_syncID() { do_check_eq(Svc.Prefs.get("steam.syncID"), undefined); // Performing the first get on the attribute will generate a new GUID. - do_check_eq(engine.syncID, "fake-guid-00"); - do_check_eq(Svc.Prefs.get("steam.syncID"), "fake-guid-00"); + do_check_eq(engine.syncID, "fake-guid-0"); + do_check_eq(Svc.Prefs.get("steam.syncID"), "fake-guid-0"); Svc.Prefs.set("steam.syncID", Utils.makeGUID()); - do_check_eq(Svc.Prefs.get("steam.syncID"), "fake-guid-01"); - do_check_eq(engine.syncID, "fake-guid-01"); + do_check_eq(Svc.Prefs.get("steam.syncID"), "fake-guid-1"); + do_check_eq(engine.syncID, "fake-guid-1"); } finally { Svc.Prefs.resetBranch(""); } diff --git a/services/sync/tests/unit/test_syncengine_sync.js b/services/sync/tests/unit/test_syncengine_sync.js index 97289962f..6a6d047bf 100644 --- a/services/sync/tests/unit/test_syncengine_sync.js +++ b/services/sync/tests/unit/test_syncengine_sync.js @@ -15,22 +15,13 @@ function makeRotaryEngine() { return new RotaryEngine(Service); } -function clean() { +function cleanAndGo(server) { Svc.Prefs.resetBranch(""); Svc.Prefs.set("log.logger.engine.rotary", "Trace"); Service.recordManager.clearCache(); -} - -function cleanAndGo(server) { - clean(); server.stop(run_next_test); } -function promiseClean(server) { - clean(); - return new Promise(resolve => server.stop(resolve)); -} - function configureService(server, username, password) { Service.clusterURL = server.baseURI; @@ -181,7 +172,7 @@ add_test(function test_syncStartup_syncIDMismatchResetsClient() { try { // Confirm initial environment - do_check_eq(engine.syncID, 'fake-guid-00'); + do_check_eq(engine.syncID, 'fake-guid-0'); do_check_eq(engine._tracker.changedIDs["rekolok"], undefined); engine.lastSync = Date.now() / 1000; @@ -676,7 +667,7 @@ add_test(function test_processIncoming_mobile_batchSize() { }); -add_task(function *test_processIncoming_store_toFetch() { +add_test(function test_processIncoming_store_toFetch() { _("If processIncoming fails in the middle of a batch on mobile, state is saved in toFetch and lastSync."); Service.identity.username = "foo"; Svc.Prefs.set("client.type", "mobile"); @@ -723,10 +714,11 @@ add_task(function *test_processIncoming_store_toFetch() { let error; try { - yield sync_engine_and_validate_telem(engine, true); + engine.sync(); } catch (ex) { error = ex; } + do_check_true(!!error); // Only the first two batches have been applied. do_check_eq(Object.keys(engine._store.items).length, @@ -738,7 +730,7 @@ add_task(function *test_processIncoming_store_toFetch() { do_check_eq(engine.lastSync, collection.wbo("record-no-99").modified); } finally { - yield promiseClean(server); + cleanAndGo(server); } }); @@ -1229,7 +1221,7 @@ add_test(function test_processIncoming_failed_records() { }); -add_task(function *test_processIncoming_decrypt_failed() { +add_test(function test_processIncoming_decrypt_failed() { _("Ensure that records failing to decrypt are either replaced or refetched."); Service.identity.username = "foo"; @@ -1288,10 +1280,7 @@ add_task(function *test_processIncoming_decrypt_failed() { }); engine.lastSync = collection.wbo("nojson").modified - 1; - let ping = yield sync_engine_and_validate_telem(engine, true); - do_check_eq(ping.engines[0].incoming.applied, 2); - do_check_eq(ping.engines[0].incoming.failed, 4); - do_check_eq(ping.engines[0].incoming.newFailed, 4); + engine.sync(); do_check_eq(engine.previousFailed.length, 4); do_check_eq(engine.previousFailed[0], "nojson"); @@ -1305,7 +1294,7 @@ add_task(function *test_processIncoming_decrypt_failed() { do_check_eq(observerSubject.failed, 4); } finally { - yield promiseClean(server); + cleanAndGo(server); } }); @@ -1369,7 +1358,7 @@ add_test(function test_uploadOutgoing_toEmptyServer() { }); -add_task(function *test_uploadOutgoing_failed() { +add_test(function test_uploadOutgoing_failed() { _("SyncEngine._uploadOutgoing doesn't clear the tracker of objects that failed to upload."); Service.identity.username = "foo"; @@ -1412,7 +1401,7 @@ add_task(function *test_uploadOutgoing_failed() { do_check_eq(engine._tracker.changedIDs['peppercorn'], PEPPERCORN_CHANGED); engine.enabled = true; - yield sync_engine_and_validate_telem(engine, true); + engine.sync(); // Local timestamp has been set. do_check_true(engine.lastSyncLocal > 0); @@ -1427,14 +1416,11 @@ add_task(function *test_uploadOutgoing_failed() { do_check_eq(engine._tracker.changedIDs['peppercorn'], PEPPERCORN_CHANGED); } finally { - yield promiseClean(server); + cleanAndGo(server); } }); -/* A couple of "functional" tests to ensure we split records into appropriate - POST requests. More comprehensive unit-tests for this "batching" are in - test_postqueue.js. -*/ + add_test(function test_uploadOutgoing_MAX_UPLOAD_RECORDS() { _("SyncEngine._uploadOutgoing uploads in batches of MAX_UPLOAD_RECORDS"); @@ -1444,18 +1430,9 @@ add_test(function test_uploadOutgoing_MAX_UPLOAD_RECORDS() { // Let's count how many times the client posts to the server var noOfUploads = 0; collection.post = (function(orig) { - return function(data, request) { - // This test doesn't arrange for batch semantics - so we expect the - // first request to come in with batch=true and the others to have no - // batch related headers at all (as the first response did not provide - // a batch ID) - if (noOfUploads == 0) { - do_check_eq(request.queryString, "batch=true"); - } else { - do_check_eq(request.queryString, ""); - } + return function() { noOfUploads++; - return orig.call(this, data, request); + return orig.apply(this, arguments); }; }(collection.post)); @@ -1500,44 +1477,6 @@ add_test(function test_uploadOutgoing_MAX_UPLOAD_RECORDS() { } }); -add_test(function test_uploadOutgoing_largeRecords() { - _("SyncEngine._uploadOutgoing throws on records larger than MAX_UPLOAD_BYTES"); - - Service.identity.username = "foo"; - let collection = new ServerCollection(); - - let engine = makeRotaryEngine(); - engine.allowSkippedRecord = false; - engine._store.items["large-item"] = "Y".repeat(MAX_UPLOAD_BYTES*2); - engine._tracker.addChangedID("large-item", 0); - collection.insert("large-item"); - - - let meta_global = Service.recordManager.set(engine.metaURL, - new WBORecord(engine.metaURL)); - meta_global.payload.engines = {rotary: {version: engine.version, - syncID: engine.syncID}}; - - let server = sync_httpd_setup({ - "/1.1/foo/storage/rotary": collection.handler() - }); - - let syncTesting = new SyncTestingInfrastructure(server); - - try { - engine._syncStartup(); - let error = null; - try { - engine._uploadOutgoing(); - } catch (e) { - error = e; - } - ok(!!error); - } finally { - cleanAndGo(server); - } -}); - add_test(function test_syncFinish_noDelete() { _("SyncEngine._syncFinish resets tracker's score"); @@ -1667,7 +1606,7 @@ add_test(function test_syncFinish_deleteLotsInBatches() { }); -add_task(function *test_sync_partialUpload() { +add_test(function test_sync_partialUpload() { _("SyncEngine.sync() keeps changedIDs that couldn't be uploaded."); Service.identity.username = "foo"; @@ -1715,12 +1654,11 @@ add_task(function *test_sync_partialUpload() { engine.enabled = true; let error; try { - yield sync_engine_and_validate_telem(engine, true); + engine.sync(); } catch (ex) { error = ex; } - - ok(!!error); + do_check_true(!!error); // The timestamp has been updated. do_check_true(engine.lastSyncLocal > 456); @@ -1738,7 +1676,7 @@ add_task(function *test_sync_partialUpload() { } } finally { - yield promiseClean(server); + cleanAndGo(server); } }); diff --git a/services/sync/tests/unit/test_syncscheduler.js b/services/sync/tests/unit/test_syncscheduler.js index 730a3f996..d496b8838 100644 --- a/services/sync/tests/unit/test_syncscheduler.js +++ b/services/sync/tests/unit/test_syncscheduler.js @@ -26,13 +26,8 @@ CatapultEngine.prototype = { Service.engineManager.register(CatapultEngine); -var scheduler = new SyncScheduler(Service); -var clientsEngine = Service.clientsEngine; - -// Don't remove stale clients when syncing. This is a test-only workaround -// that lets us add clients directly to the store, without losing them on -// the next sync. -clientsEngine._removeRemoteClient = id => {}; +let scheduler = new SyncScheduler(Service); +let clientsEngine = Service.clientsEngine; function sync_httpd_setup() { let global = new ServerWBO("global", { @@ -74,7 +69,6 @@ function setUp(server) { function cleanUpAndGo(server) { let deferred = Promise.defer(); Utils.nextTick(function () { - clientsEngine._store.wipe(); Service.startOver(); if (server) { server.stop(deferred.resolve); @@ -90,7 +84,6 @@ function run_test() { Log.repository.getLogger("Sync.Service").level = Log.Level.Trace; Log.repository.getLogger("Sync.scheduler").level = Log.Level.Trace; - validate_all_future_pings(); // The scheduler checks Weave.fxaEnabled to determine whether to use // FxA defaults or legacy defaults. As .fxaEnabled checks the username, we @@ -148,33 +141,22 @@ add_test(function test_prefAttributes() { Svc.Prefs.get("scheduler.immediateInterval") * 1000); _("Custom values for prefs will take effect after a restart."); - Svc.Prefs.set("scheduler.sync11.singleDeviceInterval", 420); - Svc.Prefs.set("scheduler.idleInterval", 230); - Svc.Prefs.set("scheduler.activeInterval", 180); + Svc.Prefs.set("scheduler.sync11.singleDeviceInterval", 42); + Svc.Prefs.set("scheduler.idleInterval", 23); + Svc.Prefs.set("scheduler.activeInterval", 18); Svc.Prefs.set("scheduler.immediateInterval", 31415); scheduler.setDefaults(); - do_check_eq(scheduler.idleInterval, 230000); - do_check_eq(scheduler.singleDeviceInterval, 420000); - do_check_eq(scheduler.activeInterval, 180000); + do_check_eq(scheduler.idleInterval, 23000); + do_check_eq(scheduler.singleDeviceInterval, 42000); + do_check_eq(scheduler.activeInterval, 18000); do_check_eq(scheduler.immediateInterval, 31415000); - _("Custom values for interval prefs can't be less than 60 seconds."); - Svc.Prefs.set("scheduler.sync11.singleDeviceInterval", 42); - Svc.Prefs.set("scheduler.idleInterval", 50); - Svc.Prefs.set("scheduler.activeInterval", 50); - Svc.Prefs.set("scheduler.immediateInterval", 10); - scheduler.setDefaults(); - do_check_eq(scheduler.idleInterval, 60000); - do_check_eq(scheduler.singleDeviceInterval, 60000); - do_check_eq(scheduler.activeInterval, 60000); - do_check_eq(scheduler.immediateInterval, 60000); - Svc.Prefs.resetBranch(""); scheduler.setDefaults(); run_next_test(); }); -add_identity_test(this, function* test_updateClientMode() { +add_identity_test(this, function test_updateClientMode() { _("Test updateClientMode adjusts scheduling attributes based on # of clients appropriately"); do_check_eq(scheduler.syncThreshold, SINGLE_USER_THRESHOLD); do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval); @@ -204,7 +186,7 @@ add_identity_test(this, function* test_updateClientMode() { yield cleanUpAndGo(); }); -add_identity_test(this, function* test_masterpassword_locked_retry_interval() { +add_identity_test(this, function test_masterpassword_locked_retry_interval() { _("Test Status.login = MASTER_PASSWORD_LOCKED results in reschedule at MASTER_PASSWORD interval"); let loginFailed = false; Svc.Obs.add("weave:service:login:error", function onLoginError() { @@ -241,7 +223,7 @@ add_identity_test(this, function* test_masterpassword_locked_retry_interval() { yield cleanUpAndGo(server); }); -add_identity_test(this, function* test_calculateBackoff() { +add_identity_test(this, function test_calculateBackoff() { do_check_eq(Status.backoffInterval, 0); // Test no interval larger than the maximum backoff is used if @@ -263,7 +245,7 @@ add_identity_test(this, function* test_calculateBackoff() { yield cleanUpAndGo(); }); -add_identity_test(this, function* test_scheduleNextSync_nowOrPast() { +add_identity_test(this, function test_scheduleNextSync_nowOrPast() { let deferred = Promise.defer(); Svc.Obs.add("weave:service:sync:finish", function onSyncFinish() { Svc.Obs.remove("weave:service:sync:finish", onSyncFinish); @@ -278,7 +260,7 @@ add_identity_test(this, function* test_scheduleNextSync_nowOrPast() { yield deferred.promise; }); -add_identity_test(this, function* test_scheduleNextSync_future_noBackoff() { +add_identity_test(this, function test_scheduleNextSync_future_noBackoff() { _("scheduleNextSync() uses the current syncInterval if no interval is provided."); // Test backoffInterval is 0 as expected. do_check_eq(Status.backoffInterval, 0); @@ -327,7 +309,7 @@ add_identity_test(this, function* test_scheduleNextSync_future_noBackoff() { yield cleanUpAndGo(); }); -add_identity_test(this, function* test_scheduleNextSync_future_backoff() { +add_identity_test(this, function test_scheduleNextSync_future_backoff() { _("scheduleNextSync() will honour backoff in all scheduling requests."); // Let's take a backoff interval that's bigger than the default sync interval. const BACKOFF = 7337; @@ -377,7 +359,7 @@ add_identity_test(this, function* test_scheduleNextSync_future_backoff() { yield cleanUpAndGo(); }); -add_identity_test(this, function* test_handleSyncError() { +add_identity_test(this, function test_handleSyncError() { let server = sync_httpd_setup(); yield setUp(server); @@ -443,7 +425,7 @@ add_identity_test(this, function* test_handleSyncError() { yield deferred.promise; }); -add_identity_test(this, function* test_client_sync_finish_updateClientMode() { +add_identity_test(this, function test_client_sync_finish_updateClientMode() { let server = sync_httpd_setup(); yield setUp(server); @@ -477,7 +459,7 @@ add_identity_test(this, function* test_client_sync_finish_updateClientMode() { yield cleanUpAndGo(server); }); -add_identity_test(this, function* test_autoconnect_nextSync_past() { +add_identity_test(this, function test_autoconnect_nextSync_past() { let deferred = Promise.defer(); // nextSync will be 0 by default, so it's way in the past. @@ -493,7 +475,7 @@ add_identity_test(this, function* test_autoconnect_nextSync_past() { yield deferred.promise; }); -add_identity_test(this, function* test_autoconnect_nextSync_future() { +add_identity_test(this, function test_autoconnect_nextSync_future() { let deferred = Promise.defer(); let previousSync = Date.now() + scheduler.syncInterval / 2; scheduler.nextSync = previousSync; @@ -522,7 +504,7 @@ add_identity_test(this, function* test_autoconnect_nextSync_future() { // XXX - this test can't be run with the browserid identity as it relies // on the syncKey getter behaving in a certain way... -add_task(function* test_autoconnect_mp_locked() { +add_task(function test_autoconnect_mp_locked() { let server = sync_httpd_setup(); yield setUp(server); @@ -559,7 +541,7 @@ add_task(function* test_autoconnect_mp_locked() { yield deferred.promise; }); -add_identity_test(this, function* test_no_autoconnect_during_wizard() { +add_identity_test(this, function test_no_autoconnect_during_wizard() { let server = sync_httpd_setup(); yield setUp(server); @@ -582,7 +564,7 @@ add_identity_test(this, function* test_no_autoconnect_during_wizard() { yield deferred.promise; }); -add_identity_test(this, function* test_no_autoconnect_status_not_ok() { +add_identity_test(this, function test_no_autoconnect_status_not_ok() { let server = sync_httpd_setup(); // Ensure we don't actually try to sync (or log in for that matter). @@ -605,7 +587,7 @@ add_identity_test(this, function* test_no_autoconnect_status_not_ok() { yield deferred.promise; }); -add_identity_test(this, function* test_autoconnectDelay_pref() { +add_identity_test(this, function test_autoconnectDelay_pref() { let deferred = Promise.defer(); Svc.Obs.add("weave:service:sync:finish", function onSyncFinish() { Svc.Obs.remove("weave:service:sync:finish", onSyncFinish); @@ -625,7 +607,7 @@ add_identity_test(this, function* test_autoconnectDelay_pref() { yield deferred.promise; }); -add_identity_test(this, function* test_idle_adjustSyncInterval() { +add_identity_test(this, function test_idle_adjustSyncInterval() { // Confirm defaults. do_check_eq(scheduler.idle, false); @@ -645,7 +627,7 @@ add_identity_test(this, function* test_idle_adjustSyncInterval() { yield cleanUpAndGo(); }); -add_identity_test(this, function* test_back_triggersSync() { +add_identity_test(this, function test_back_triggersSync() { // Confirm defaults. do_check_false(scheduler.idle); do_check_eq(Status.backoffInterval, 0); @@ -668,7 +650,7 @@ add_identity_test(this, function* test_back_triggersSync() { yield deferred.promise; }); -add_identity_test(this, function* test_active_triggersSync_observesBackoff() { +add_identity_test(this, function test_active_triggersSync_observesBackoff() { // Confirm defaults. do_check_false(scheduler.idle); @@ -699,7 +681,7 @@ add_identity_test(this, function* test_active_triggersSync_observesBackoff() { yield deferred.promise; }); -add_identity_test(this, function* test_back_debouncing() { +add_identity_test(this, function test_back_debouncing() { _("Ensure spurious back-then-idle events, as observed on OS X, don't trigger a sync."); // Confirm defaults. @@ -727,7 +709,7 @@ add_identity_test(this, function* test_back_debouncing() { yield deferred.promise; }); -add_identity_test(this, function* test_no_sync_node() { +add_identity_test(this, function test_no_sync_node() { // Test when Status.sync == NO_SYNC_NODE_FOUND // it is not overwritten on sync:finish let server = sync_httpd_setup(); @@ -742,7 +724,7 @@ add_identity_test(this, function* test_no_sync_node() { yield cleanUpAndGo(server); }); -add_identity_test(this, function* test_sync_failed_partial_500s() { +add_identity_test(this, function test_sync_failed_partial_500s() { _("Test a 5xx status calls handleSyncError."); scheduler._syncErrors = MAX_ERROR_COUNT_BEFORE_BACKOFF; let server = sync_httpd_setup(); @@ -769,7 +751,7 @@ add_identity_test(this, function* test_sync_failed_partial_500s() { yield cleanUpAndGo(server); }); -add_identity_test(this, function* test_sync_failed_partial_400s() { +add_identity_test(this, function test_sync_failed_partial_400s() { _("Test a non-5xx status doesn't call handleSyncError."); scheduler._syncErrors = MAX_ERROR_COUNT_BEFORE_BACKOFF; let server = sync_httpd_setup(); @@ -799,7 +781,7 @@ add_identity_test(this, function* test_sync_failed_partial_400s() { yield cleanUpAndGo(server); }); -add_identity_test(this, function* test_sync_X_Weave_Backoff() { +add_identity_test(this, function test_sync_X_Weave_Backoff() { let server = sync_httpd_setup(); yield setUp(server); @@ -842,9 +824,9 @@ add_identity_test(this, function* test_sync_X_Weave_Backoff() { Service.sync(); do_check_true(Status.backoffInterval >= BACKOFF * 1000); - // Allowing 3 seconds worth of of leeway between when Status.minimumNextSync + // Allowing 1 second worth of of leeway between when Status.minimumNextSync // was set and when this line gets executed. - let minimumExpectedDelay = (BACKOFF - 3) * 1000; + let minimumExpectedDelay = (BACKOFF - 1) * 1000; do_check_true(Status.minimumNextSync >= Date.now() + minimumExpectedDelay); // Verify that the next sync is actually going to wait that long. @@ -854,7 +836,7 @@ add_identity_test(this, function* test_sync_X_Weave_Backoff() { yield cleanUpAndGo(server); }); -add_identity_test(this, function* test_sync_503_Retry_After() { +add_identity_test(this, function test_sync_503_Retry_After() { let server = sync_httpd_setup(); yield setUp(server); @@ -901,9 +883,9 @@ add_identity_test(this, function* test_sync_503_Retry_After() { do_check_true(Status.enforceBackoff); do_check_true(Status.backoffInterval >= BACKOFF * 1000); - // Allowing 3 seconds worth of of leeway between when Status.minimumNextSync + // Allowing 1 second worth of of leeway between when Status.minimumNextSync // was set and when this line gets executed. - let minimumExpectedDelay = (BACKOFF - 3) * 1000; + let minimumExpectedDelay = (BACKOFF - 1) * 1000; do_check_true(Status.minimumNextSync >= Date.now() + minimumExpectedDelay); // Verify that the next sync is actually going to wait that long. @@ -913,7 +895,7 @@ add_identity_test(this, function* test_sync_503_Retry_After() { yield cleanUpAndGo(server); }); -add_identity_test(this, function* test_loginError_recoverable_reschedules() { +add_identity_test(this, function test_loginError_recoverable_reschedules() { _("Verify that a recoverable login error schedules a new sync."); yield configureIdentity({username: "johndoe"}); Service.serverURL = "http://localhost:1234/"; @@ -957,7 +939,7 @@ add_identity_test(this, function* test_loginError_recoverable_reschedules() { yield deferred.promise; }); -add_identity_test(this, function* test_loginError_fatal_clearsTriggers() { +add_identity_test(this, function test_loginError_fatal_clearsTriggers() { _("Verify that a fatal login error clears sync triggers."); yield configureIdentity({username: "johndoe"}); @@ -974,22 +956,11 @@ add_identity_test(this, function* test_loginError_fatal_clearsTriggers() { Svc.Obs.add("weave:service:login:error", function onLoginError() { Svc.Obs.remove("weave:service:login:error", onLoginError); Utils.nextTick(function aLittleBitAfterLoginError() { + do_check_eq(Status.login, LOGIN_FAILED_LOGIN_REJECTED); + + do_check_eq(scheduler.nextSync, 0); + do_check_eq(scheduler.syncTimer, null); - if (isConfiguredWithLegacyIdentity()) { - // for the "legacy" identity, a 401 on info/collections means the - // password is wrong, so we enter a "login rejected" state. - do_check_eq(Status.login, LOGIN_FAILED_LOGIN_REJECTED); - - do_check_eq(scheduler.nextSync, 0); - do_check_eq(scheduler.syncTimer, null); - } else { - // For the FxA identity, a 401 on info/collections means a transient - // error, probably due to an inability to fetch a token. - do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR); - // syncs should still be scheduled. - do_check_true(scheduler.nextSync > Date.now()); - do_check_true(scheduler.syncTimer.delay > 0); - } cleanUpAndGo(server).then(deferred.resolve); }); }); @@ -1004,7 +975,7 @@ add_identity_test(this, function* test_loginError_fatal_clearsTriggers() { yield deferred.promise; }); -add_identity_test(this, function* test_proper_interval_on_only_failing() { +add_identity_test(this, function test_proper_interval_on_only_failing() { _("Ensure proper behavior when only failed records are applied."); // If an engine reports that no records succeeded, we shouldn't decrease the diff --git a/services/sync/tests/unit/test_syncstoragerequest.js b/services/sync/tests/unit/test_syncstoragerequest.js index 14e5daade..7c5246bab 100644 --- a/services/sync/tests/unit/test_syncstoragerequest.js +++ b/services/sync/tests/unit/test_syncstoragerequest.js @@ -8,9 +8,6 @@ Cu.import("resource://services-sync/service.js"); Cu.import("resource://services-sync/util.js"); Cu.import("resource://testing-common/services/sync/utils.js"); -var httpProtocolHandler = Cc["@mozilla.org/network/protocol;1?name=http"] - .getService(Ci.nsIHttpProtocolHandler); - function run_test() { Log.repository.getLogger("Sync.RESTRequest").level = Log.Level.Trace; initTestLogging(); @@ -25,7 +22,6 @@ add_test(function test_user_agent_desktop() { let server = httpd_setup({"/resource": handler}); let expectedUA = Services.appinfo.name + "/" + Services.appinfo.version + - " (" + httpProtocolHandler.oscpu + ")" + " FxSync/" + WEAVE_VERSION + "." + Services.appinfo.appBuildID + ".desktop"; @@ -45,7 +41,6 @@ add_test(function test_user_agent_mobile() { Svc.Prefs.set("client.type", "mobile"); let expectedUA = Services.appinfo.name + "/" + Services.appinfo.version + - " (" + httpProtocolHandler.oscpu + ")" + " FxSync/" + WEAVE_VERSION + "." + Services.appinfo.appBuildID + ".mobile"; diff --git a/services/sync/tests/unit/test_tab_engine.js b/services/sync/tests/unit/test_tab_engine.js index 049250230..db4b20a70 100644 --- a/services/sync/tests/unit/test_tab_engine.js +++ b/services/sync/tests/unit/test_tab_engine.js @@ -1,7 +1,6 @@ /* Any copyright is dedicated to the Public Domain. http://creativecommons.org/publicdomain/zero/1.0/ */ -Cu.import("resource://services-sync/constants.js"); Cu.import("resource://services-sync/engines/tabs.js"); Cu.import("resource://services-sync/record.js"); Cu.import("resource://services-sync/service.js"); @@ -24,12 +23,11 @@ add_test(function test_getOpenURLs() { _("Test getOpenURLs."); let [engine, store] = getMocks(); - let superLongURL = "http://" + (new Array(MAX_UPLOAD_BYTES).join("w")) + ".com/"; - let urls = ["http://bar.com", "http://foo.com", "http://foobar.com", superLongURL]; - function fourURLs() { + let urls = ["http://bar.com", "http://foo.com", "http://foobar.com"]; + function threeURLs() { return urls.pop(); } - store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, fourURLs, 1, 4); + store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, threeURLs, 1, 3); let matches; @@ -42,10 +40,6 @@ add_test(function test_getOpenURLs() { matches = openurlsset.has("http://barfoo.com"); ok(!matches); - _(" test matching works (too long)"); - matches = openurlsset.has(superLongURL); - ok(!matches); - run_next_test(); }); diff --git a/services/sync/tests/unit/test_tab_store.js b/services/sync/tests/unit/test_tab_store.js index 93b60f0c7..f8265492f 100644 --- a/services/sync/tests/unit/test_tab_store.js +++ b/services/sync/tests/unit/test_tab_store.js @@ -20,26 +20,32 @@ function test_create() { _("Create a first record"); let rec = {id: "id1", clientName: "clientName1", - cleartext: { "foo": "bar" }, + cleartext: "cleartext1", modified: 1000}; store.applyIncoming(rec); - deepEqual(store._remoteClients["id1"], { lastModified: 1000, foo: "bar" }); + do_check_eq(store._remoteClients["id1"], "cleartext1"); + do_check_eq(Svc.Prefs.get("notifyTabState"), 1); _("Create a second record"); rec = {id: "id2", clientName: "clientName2", - cleartext: { "foo2": "bar2" }, + cleartext: "cleartext2", modified: 2000}; store.applyIncoming(rec); - deepEqual(store._remoteClients["id2"], { lastModified: 2000, foo2: "bar2" }); + do_check_eq(store._remoteClients["id2"], "cleartext2"); + do_check_eq(Svc.Prefs.get("notifyTabState"), 0); _("Create a third record"); rec = {id: "id3", clientName: "clientName3", - cleartext: { "foo3": "bar3" }, + cleartext: "cleartext3", modified: 3000}; store.applyIncoming(rec); - deepEqual(store._remoteClients["id3"], { lastModified: 3000, foo3: "bar3" }); + do_check_eq(store._remoteClients["id3"], "cleartext3"); + do_check_eq(Svc.Prefs.get("notifyTabState"), 0); + + // reset the notifyTabState + Svc.Prefs.reset("notifyTabState"); } function test_getAllTabs() { @@ -53,20 +59,20 @@ function test_getAllTabs() { _("Get all tabs."); tabs = store.getAllTabs(); _("Tabs: " + JSON.stringify(tabs)); - equal(tabs.length, 1); - equal(tabs[0].title, "title"); - equal(tabs[0].urlHistory.length, 2); - equal(tabs[0].urlHistory[0], "http://foo.com"); - equal(tabs[0].urlHistory[1], "http://bar.com"); - equal(tabs[0].icon, "image"); - equal(tabs[0].lastUsed, 1); + do_check_eq(tabs.length, 1); + do_check_eq(tabs[0].title, "title"); + do_check_eq(tabs[0].urlHistory.length, 2); + do_check_eq(tabs[0].urlHistory[0], "http://foo.com"); + do_check_eq(tabs[0].urlHistory[1], "http://bar.com"); + do_check_eq(tabs[0].icon, "image"); + do_check_eq(tabs[0].lastUsed, 1); _("Get all tabs, and check that filtering works."); let twoUrls = ["about:foo", "http://fuubar.com"]; store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://foo.com", 1, 1, () => 2, () => twoUrls); tabs = store.getAllTabs(true); _("Filtered: " + JSON.stringify(tabs)); - equal(tabs.length, 0); + do_check_eq(tabs.length, 0); _("Get all tabs, and check that the entries safety limit works."); let allURLs = []; @@ -79,10 +85,10 @@ function test_getAllTabs() { tabs = store.getAllTabs((url) => url.startsWith("about")); _("Sliced: " + JSON.stringify(tabs)); - equal(tabs.length, 1); - equal(tabs[0].urlHistory.length, 25); - equal(tabs[0].urlHistory[0], "http://foo40.bar"); - equal(tabs[0].urlHistory[24], "http://foo16.bar"); + do_check_eq(tabs.length, 1); + do_check_eq(tabs[0].urlHistory.length, 25); + do_check_eq(tabs[0].urlHistory[0], "http://foo40.bar"); + do_check_eq(tabs[0].urlHistory[24], "http://foo16.bar"); } function test_createRecord() { @@ -99,14 +105,14 @@ function test_createRecord() { store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://foo.com", 1, 1); record = store.createRecord("fake-guid"); - ok(record instanceof TabSetRecord); - equal(record.tabs.length, 1); + do_check_true(record instanceof TabSetRecord); + do_check_eq(record.tabs.length, 1); _("create a big record"); store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://foo.com", 1, numtabs); record = store.createRecord("fake-guid"); - ok(record instanceof TabSetRecord); - equal(record.tabs.length, 256); + do_check_true(record instanceof TabSetRecord); + do_check_eq(record.tabs.length, 256); } function run_test() { diff --git a/services/sync/tests/unit/test_tab_tracker.js b/services/sync/tests/unit/test_tab_tracker.js index f98920a44..e7dd48829 100644 --- a/services/sync/tests/unit/test_tab_tracker.js +++ b/services/sync/tests/unit/test_tab_tracker.js @@ -5,7 +5,7 @@ Cu.import("resource://services-sync/engines/tabs.js"); Cu.import("resource://services-sync/service.js"); Cu.import("resource://services-sync/util.js"); -var clientsEngine = Service.clientsEngine; +let clientsEngine = Service.clientsEngine; function fakeSvcWinMediator() { // actions on windows are captured in logs @@ -15,11 +15,9 @@ function fakeSvcWinMediator() { getEnumerator: function() { return { cnt: 2, - hasMoreElements: function() { - return this.cnt-- > 0; - }, + hasMoreElements: function() this.cnt-- > 0, getNext: function() { - let elt = {addTopics: [], remTopics: [], numAPL: 0, numRPL: 0}; + let elt = {addTopics: [], remTopics: []}; logs.push(elt); return { addEventListener: function(topic) { @@ -27,15 +25,7 @@ function fakeSvcWinMediator() { }, removeEventListener: function(topic) { elt.remTopics.push(topic); - }, - gBrowser: { - addProgressListener() { - elt.numAPL++; - }, - removeProgressListener() { - elt.numRPL++; - }, - }, + } }; } }; @@ -61,7 +51,7 @@ function run_test() { logs = fakeSvcWinMediator(); Svc.Obs.notify("weave:engine:start-tracking"); do_check_eq(logs.length, 2); - for (let log of logs) { + for each (let log in logs) { do_check_eq(log.addTopics.length, 5); do_check_true(log.addTopics.indexOf("pageshow") >= 0); do_check_true(log.addTopics.indexOf("TabOpen") >= 0); @@ -69,15 +59,13 @@ function run_test() { do_check_true(log.addTopics.indexOf("TabSelect") >= 0); do_check_true(log.addTopics.indexOf("unload") >= 0); do_check_eq(log.remTopics.length, 0); - do_check_eq(log.numAPL, 1, "Added 1 progress listener"); - do_check_eq(log.numRPL, 0, "Didn't remove a progress listener"); } _("Test listeners are unregistered on windows"); logs = fakeSvcWinMediator(); Svc.Obs.notify("weave:engine:stop-tracking"); do_check_eq(logs.length, 2); - for (let log of logs) { + for each (let log in logs) { do_check_eq(log.addTopics.length, 0); do_check_eq(log.remTopics.length, 5); do_check_true(log.remTopics.indexOf("pageshow") >= 0); @@ -85,12 +73,10 @@ function run_test() { do_check_true(log.remTopics.indexOf("TabClose") >= 0); do_check_true(log.remTopics.indexOf("TabSelect") >= 0); do_check_true(log.remTopics.indexOf("unload") >= 0); - do_check_eq(log.numAPL, 0, "Didn't add a progress listener"); - do_check_eq(log.numRPL, 1, "Removed 1 progress listener"); } _("Test tab listener"); - for (let evttype of ["TabOpen", "TabClose", "TabSelect"]) { + for each (let evttype in ["TabOpen", "TabClose", "TabSelect"]) { // Pretend we just synced. tracker.clearChangedIDs(); do_check_false(tracker.modified); @@ -109,19 +95,4 @@ function run_test() { tracker.onTab({type: "pageshow", originalTarget: "pageshow"}); do_check_true(Utils.deepEquals(Object.keys(engine.getChangedIDs()), [clientsEngine.localID])); - - // Pretend we just synced and saw some progress listeners. - tracker.clearChangedIDs(); - do_check_false(tracker.modified); - tracker.onLocationChange({ isTopLevel: false }, undefined, undefined, 0); - do_check_false(tracker.modified, "non-toplevel request didn't flag as modified"); - - tracker.onLocationChange({ isTopLevel: true }, undefined, undefined, - Ci.nsIWebProgressListener.LOCATION_CHANGE_SAME_DOCUMENT); - do_check_false(tracker.modified, "location change within the same document request didn't flag as modified"); - - tracker.onLocationChange({ isTopLevel: true }, undefined, undefined, 0); - do_check_true(tracker.modified, "location change for a new top-level document flagged as modified"); - do_check_true(Utils.deepEquals(Object.keys(engine.getChangedIDs()), - [clientsEngine.localID])); } diff --git a/services/sync/tests/unit/test_telemetry.js b/services/sync/tests/unit/test_telemetry.js deleted file mode 100644 index 50a3d136b..000000000 --- a/services/sync/tests/unit/test_telemetry.js +++ /dev/null @@ -1,564 +0,0 @@ -/* Any copyright is dedicated to the Public Domain. - http://creativecommons.org/publicdomain/zero/1.0/ */ - -Cu.import("resource://services-common/observers.js"); -Cu.import("resource://services-sync/telemetry.js"); -Cu.import("resource://services-sync/service.js"); -Cu.import("resource://services-sync/record.js"); -Cu.import("resource://services-sync/resource.js"); -Cu.import("resource://services-sync/constants.js"); -Cu.import("resource://services-sync/engines.js"); -Cu.import("resource://services-sync/engines/bookmarks.js"); -Cu.import("resource://services-sync/engines/clients.js"); -Cu.import("resource://testing-common/services/sync/utils.js"); -Cu.import("resource://testing-common/services/sync/fxa_utils.js"); -Cu.import("resource://testing-common/services/sync/rotaryengine.js"); -Cu.import("resource://gre/modules/osfile.jsm", this); - -Cu.import("resource://gre/modules/PlacesUtils.jsm"); -Cu.import("resource://services-sync/util.js"); - -initTestLogging("Trace"); - -function SteamStore(engine) { - Store.call(this, "Steam", engine); -} - -SteamStore.prototype = { - __proto__: Store.prototype, -}; - -function SteamTracker(name, engine) { - Tracker.call(this, name || "Steam", engine); -} - -SteamTracker.prototype = { - __proto__: Tracker.prototype -}; - -function SteamEngine(service) { - Engine.call(this, "steam", service); -} - -SteamEngine.prototype = { - __proto__: Engine.prototype, - _storeObj: SteamStore, - _trackerObj: SteamTracker, - _errToThrow: null, - _sync() { - if (this._errToThrow) { - throw this._errToThrow; - } - } -}; - -function BogusEngine(service) { - Engine.call(this, "bogus", service); -} - -BogusEngine.prototype = Object.create(SteamEngine.prototype); - -function cleanAndGo(server) { - Svc.Prefs.resetBranch(""); - Svc.Prefs.set("log.logger.engine.rotary", "Trace"); - Service.recordManager.clearCache(); - return new Promise(resolve => server.stop(resolve)); -} - -// Avoid addon manager complaining about not being initialized -Service.engineManager.unregister("addons"); - -add_identity_test(this, function *test_basic() { - let helper = track_collections_helper(); - let upd = helper.with_updated_collection; - - yield configureIdentity({ username: "johndoe" }); - let handlers = { - "/1.1/johndoe/info/collections": helper.handler, - "/1.1/johndoe/storage/crypto/keys": upd("crypto", new ServerWBO("keys").handler()), - "/1.1/johndoe/storage/meta/global": upd("meta", new ServerWBO("global").handler()) - }; - - let collections = ["clients", "bookmarks", "forms", "history", "passwords", "prefs", "tabs"]; - - for (let coll of collections) { - handlers["/1.1/johndoe/storage/" + coll] = upd(coll, new ServerCollection({}, true).handler()); - } - - let server = httpd_setup(handlers); - Service.serverURL = server.baseURI; - - yield sync_and_validate_telem(true); - - yield new Promise(resolve => server.stop(resolve)); -}); - -add_task(function* test_processIncoming_error() { - let engine = new BookmarksEngine(Service); - let store = engine._store; - let server = serverForUsers({"foo": "password"}, { - meta: {global: {engines: {bookmarks: {version: engine.version, - syncID: engine.syncID}}}}, - bookmarks: {} - }); - new SyncTestingInfrastructure(server.server); - let collection = server.user("foo").collection("bookmarks"); - try { - // Create a bogus record that when synced down will provoke a - // network error which in turn provokes an exception in _processIncoming. - const BOGUS_GUID = "zzzzzzzzzzzz"; - let bogus_record = collection.insert(BOGUS_GUID, "I'm a bogus record!"); - bogus_record.get = function get() { - throw "Sync this!"; - }; - // Make the 10 minutes old so it will only be synced in the toFetch phase. - bogus_record.modified = Date.now() / 1000 - 60 * 10; - engine.lastSync = Date.now() / 1000 - 60; - engine.toFetch = [BOGUS_GUID]; - - let error, ping; - try { - yield sync_engine_and_validate_telem(engine, true, errPing => ping = errPing); - } catch(ex) { - error = ex; - } - ok(!!error); - ok(!!ping); - equal(ping.uid, "0".repeat(32)); - deepEqual(ping.failureReason, { - name: "othererror", - error: "error.engine.reason.record_download_fail" - }); - - equal(ping.engines.length, 1); - equal(ping.engines[0].name, "bookmarks"); - deepEqual(ping.engines[0].failureReason, { - name: "othererror", - error: "error.engine.reason.record_download_fail" - }); - - } finally { - store.wipe(); - yield cleanAndGo(server); - } -}); - -add_task(function *test_uploading() { - let engine = new BookmarksEngine(Service); - let store = engine._store; - let server = serverForUsers({"foo": "password"}, { - meta: {global: {engines: {bookmarks: {version: engine.version, - syncID: engine.syncID}}}}, - bookmarks: {} - }); - new SyncTestingInfrastructure(server.server); - - let parent = PlacesUtils.toolbarFolderId; - let uri = Utils.makeURI("http://getfirefox.com/"); - let title = "Get Firefox"; - - let bmk_id = PlacesUtils.bookmarks.insertBookmark(parent, uri, - PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!"); - - let guid = store.GUIDForId(bmk_id); - let record = store.createRecord(guid); - - let collection = server.user("foo").collection("bookmarks"); - try { - let ping = yield sync_engine_and_validate_telem(engine, false); - ok(!!ping); - equal(ping.engines.length, 1); - equal(ping.engines[0].name, "bookmarks"); - ok(!!ping.engines[0].outgoing); - greater(ping.engines[0].outgoing[0].sent, 0) - ok(!ping.engines[0].incoming); - - PlacesUtils.bookmarks.setItemTitle(bmk_id, "New Title"); - - store.wipe(); - engine.resetClient(); - - ping = yield sync_engine_and_validate_telem(engine, false); - equal(ping.engines.length, 1); - equal(ping.engines[0].name, "bookmarks"); - equal(ping.engines[0].outgoing.length, 1); - ok(!!ping.engines[0].incoming); - - } finally { - // Clean up. - store.wipe(); - yield cleanAndGo(server); - } -}); - -add_task(function *test_upload_failed() { - Service.identity.username = "foo"; - let collection = new ServerCollection(); - collection._wbos.flying = new ServerWBO('flying'); - - let server = sync_httpd_setup({ - "/1.1/foo/storage/rotary": collection.handler() - }); - - let syncTesting = new SyncTestingInfrastructure(server); - - let engine = new RotaryEngine(Service); - engine.lastSync = 123; // needs to be non-zero so that tracker is queried - engine.lastSyncLocal = 456; - engine._store.items = { - flying: "LNER Class A3 4472", - scotsman: "Flying Scotsman", - peppercorn: "Peppercorn Class" - }; - const FLYING_CHANGED = 12345; - const SCOTSMAN_CHANGED = 23456; - const PEPPERCORN_CHANGED = 34567; - engine._tracker.addChangedID("flying", FLYING_CHANGED); - engine._tracker.addChangedID("scotsman", SCOTSMAN_CHANGED); - engine._tracker.addChangedID("peppercorn", PEPPERCORN_CHANGED); - - let meta_global = Service.recordManager.set(engine.metaURL, new WBORecord(engine.metaURL)); - meta_global.payload.engines = { rotary: { version: engine.version, syncID: engine.syncID } }; - - try { - engine.enabled = true; - let ping = yield sync_engine_and_validate_telem(engine, true); - ok(!!ping); - equal(ping.engines.length, 1); - equal(ping.engines[0].incoming, null); - deepEqual(ping.engines[0].outgoing, [{ sent: 3, failed: 2 }]); - engine.lastSync = 123; - engine.lastSyncLocal = 456; - - ping = yield sync_engine_and_validate_telem(engine, true); - ok(!!ping); - equal(ping.engines.length, 1); - equal(ping.engines[0].incoming.reconciled, 1); - deepEqual(ping.engines[0].outgoing, [{ sent: 2, failed: 2 }]); - - } finally { - yield cleanAndGo(server); - } -}); - -add_task(function *test_sync_partialUpload() { - Service.identity.username = "foo"; - - let collection = new ServerCollection(); - let server = sync_httpd_setup({ - "/1.1/foo/storage/rotary": collection.handler() - }); - let syncTesting = new SyncTestingInfrastructure(server); - generateNewKeys(Service.collectionKeys); - - let engine = new RotaryEngine(Service); - engine.lastSync = 123; - engine.lastSyncLocal = 456; - - - // Create a bunch of records (and server side handlers) - for (let i = 0; i < 234; i++) { - let id = 'record-no-' + i; - engine._store.items[id] = "Record No. " + i; - engine._tracker.addChangedID(id, i); - // Let two items in the first upload batch fail. - if (i != 23 && i != 42) { - collection.insert(id); - } - } - - let meta_global = Service.recordManager.set(engine.metaURL, - new WBORecord(engine.metaURL)); - meta_global.payload.engines = {rotary: {version: engine.version, - syncID: engine.syncID}}; - - try { - engine.enabled = true; - let ping = yield sync_engine_and_validate_telem(engine, true); - - ok(!!ping); - ok(!ping.failureReason); - equal(ping.engines.length, 1); - equal(ping.engines[0].name, "rotary"); - ok(!ping.engines[0].incoming); - ok(!ping.engines[0].failureReason); - deepEqual(ping.engines[0].outgoing, [{ sent: 234, failed: 2 }]); - - collection.post = function() { throw "Failure"; } - - engine._store.items["record-no-1000"] = "Record No. 1000"; - engine._tracker.addChangedID("record-no-1000", 1000); - collection.insert("record-no-1000", 1000); - - engine.lastSync = 123; - engine.lastSyncLocal = 456; - ping = null; - - try { - // should throw - yield sync_engine_and_validate_telem(engine, true, errPing => ping = errPing); - } catch (e) {} - // It would be nice if we had a more descriptive error for this... - let uploadFailureError = { - name: "othererror", - error: "error.engine.reason.record_upload_fail" - }; - - ok(!!ping); - deepEqual(ping.failureReason, uploadFailureError); - equal(ping.engines.length, 1); - equal(ping.engines[0].name, "rotary"); - deepEqual(ping.engines[0].incoming, { - failed: 1, - newFailed: 1, - reconciled: 232 - }); - ok(!ping.engines[0].outgoing); - deepEqual(ping.engines[0].failureReason, uploadFailureError); - - } finally { - yield cleanAndGo(server); - } -}); - -add_task(function* test_generic_engine_fail() { - Service.engineManager.register(SteamEngine); - let engine = Service.engineManager.get("steam"); - engine.enabled = true; - let store = engine._store; - let server = serverForUsers({"foo": "password"}, { - meta: {global: {engines: {steam: {version: engine.version, - syncID: engine.syncID}}}}, - steam: {} - }); - new SyncTestingInfrastructure(server.server); - let e = new Error("generic failure message") - engine._errToThrow = e; - - try { - let ping = yield sync_and_validate_telem(true); - equal(ping.status.service, SYNC_FAILED_PARTIAL); - deepEqual(ping.engines.find(e => e.name === "steam").failureReason, { - name: "unexpectederror", - error: String(e) - }); - } finally { - Service.engineManager.unregister(engine); - yield cleanAndGo(server); - } -}); - -add_task(function* test_engine_fail_ioerror() { - Service.engineManager.register(SteamEngine); - let engine = Service.engineManager.get("steam"); - engine.enabled = true; - let store = engine._store; - let server = serverForUsers({"foo": "password"}, { - meta: {global: {engines: {steam: {version: engine.version, - syncID: engine.syncID}}}}, - steam: {} - }); - new SyncTestingInfrastructure(server.server); - // create an IOError to re-throw as part of Sync. - try { - // (Note that fakeservices.js has replaced Utils.jsonMove etc, but for - // this test we need the real one so we get real exceptions from the - // filesystem.) - yield Utils._real_jsonMove("file-does-not-exist", "anything", {}); - } catch (ex) { - engine._errToThrow = ex; - } - ok(engine._errToThrow, "expecting exception"); - - try { - let ping = yield sync_and_validate_telem(true); - equal(ping.status.service, SYNC_FAILED_PARTIAL); - let failureReason = ping.engines.find(e => e.name === "steam").failureReason; - equal(failureReason.name, "unexpectederror"); - // ensure the profile dir in the exception message has been stripped. - ok(!failureReason.error.includes(OS.Constants.Path.profileDir), failureReason.error); - ok(failureReason.error.includes("[profileDir]"), failureReason.error); - } finally { - Service.engineManager.unregister(engine); - yield cleanAndGo(server); - } -}); - -add_task(function* test_initial_sync_engines() { - Service.engineManager.register(SteamEngine); - let engine = Service.engineManager.get("steam"); - engine.enabled = true; - let store = engine._store; - let engines = {}; - // These are the only ones who actually have things to sync at startup. - let engineNames = ["clients", "bookmarks", "prefs", "tabs"]; - let conf = { meta: { global: { engines } } }; - for (let e of engineNames) { - engines[e] = { version: engine.version, syncID: engine.syncID }; - conf[e] = {}; - } - let server = serverForUsers({"foo": "password"}, conf); - new SyncTestingInfrastructure(server.server); - try { - let ping = yield wait_for_ping(() => Service.sync(), true); - - equal(ping.engines.find(e => e.name === "clients").outgoing[0].sent, 1); - equal(ping.engines.find(e => e.name === "tabs").outgoing[0].sent, 1); - - // for the rest we don't care about specifics - for (let e of ping.engines) { - if (!engineNames.includes(engine.name)) { - continue; - } - greaterOrEqual(e.took, 1); - ok(!!e.outgoing) - equal(e.outgoing.length, 1); - notEqual(e.outgoing[0].sent, undefined); - equal(e.outgoing[0].failed, undefined); - } - } finally { - yield cleanAndGo(server); - } -}); - -add_task(function* test_nserror() { - Service.engineManager.register(SteamEngine); - let engine = Service.engineManager.get("steam"); - engine.enabled = true; - let store = engine._store; - let server = serverForUsers({"foo": "password"}, { - meta: {global: {engines: {steam: {version: engine.version, - syncID: engine.syncID}}}}, - steam: {} - }); - new SyncTestingInfrastructure(server.server); - engine._errToThrow = Components.Exception("NS_ERROR_UNKNOWN_HOST", Cr.NS_ERROR_UNKNOWN_HOST); - try { - let ping = yield sync_and_validate_telem(true); - deepEqual(ping.status, { - service: SYNC_FAILED_PARTIAL, - sync: LOGIN_FAILED_NETWORK_ERROR - }); - let enginePing = ping.engines.find(e => e.name === "steam"); - deepEqual(enginePing.failureReason, { - name: "nserror", - code: Cr.NS_ERROR_UNKNOWN_HOST - }); - } finally { - Service.engineManager.unregister(engine); - yield cleanAndGo(server); - } -}); - -add_identity_test(this, function *test_discarding() { - let helper = track_collections_helper(); - let upd = helper.with_updated_collection; - let telem = get_sync_test_telemetry(); - telem.maxPayloadCount = 2; - telem.submissionInterval = Infinity; - let oldSubmit = telem.submit; - - let server; - try { - - yield configureIdentity({ username: "johndoe" }); - let handlers = { - "/1.1/johndoe/info/collections": helper.handler, - "/1.1/johndoe/storage/crypto/keys": upd("crypto", new ServerWBO("keys").handler()), - "/1.1/johndoe/storage/meta/global": upd("meta", new ServerWBO("global").handler()) - }; - - let collections = ["clients", "bookmarks", "forms", "history", "passwords", "prefs", "tabs"]; - - for (let coll of collections) { - handlers["/1.1/johndoe/storage/" + coll] = upd(coll, new ServerCollection({}, true).handler()); - } - - server = httpd_setup(handlers); - Service.serverURL = server.baseURI; - telem.submit = () => ok(false, "Submitted telemetry ping when we should not have"); - - for (let i = 0; i < 5; ++i) { - Service.sync(); - } - telem.submit = oldSubmit; - telem.submissionInterval = -1; - let ping = yield sync_and_validate_telem(true, true); // with this we've synced 6 times - equal(ping.syncs.length, 2); - equal(ping.discarded, 4); - } finally { - telem.maxPayloadCount = 500; - telem.submissionInterval = -1; - telem.submit = oldSubmit; - if (server) { - yield new Promise(resolve => server.stop(resolve)); - } - } -}) - -add_task(function* test_no_foreign_engines_in_error_ping() { - Service.engineManager.register(BogusEngine); - let engine = Service.engineManager.get("bogus"); - engine.enabled = true; - let store = engine._store; - let server = serverForUsers({"foo": "password"}, { - meta: {global: {engines: {bogus: {version: engine.version, syncID: engine.syncID}}}}, - steam: {} - }); - engine._errToThrow = new Error("Oh no!"); - new SyncTestingInfrastructure(server.server); - try { - let ping = yield sync_and_validate_telem(true); - equal(ping.status.service, SYNC_FAILED_PARTIAL); - ok(ping.engines.every(e => e.name !== "bogus")); - } finally { - Service.engineManager.unregister(engine); - yield cleanAndGo(server); - } -}); - -add_task(function* test_sql_error() { - Service.engineManager.register(SteamEngine); - let engine = Service.engineManager.get("steam"); - engine.enabled = true; - let store = engine._store; - let server = serverForUsers({"foo": "password"}, { - meta: {global: {engines: {steam: {version: engine.version, - syncID: engine.syncID}}}}, - steam: {} - }); - new SyncTestingInfrastructure(server.server); - engine._sync = function() { - // Just grab a DB connection and issue a bogus SQL statement synchronously. - let db = PlacesUtils.history.QueryInterface(Ci.nsPIPlacesDatabase).DBConnection; - Async.querySpinningly(db.createAsyncStatement("select bar from foo")); - }; - try { - let ping = yield sync_and_validate_telem(true); - let enginePing = ping.engines.find(e => e.name === "steam"); - deepEqual(enginePing.failureReason, { name: "sqlerror", code: 1 }); - } finally { - Service.engineManager.unregister(engine); - yield cleanAndGo(server); - } -}); - -add_task(function* test_no_foreign_engines_in_success_ping() { - Service.engineManager.register(BogusEngine); - let engine = Service.engineManager.get("bogus"); - engine.enabled = true; - let store = engine._store; - let server = serverForUsers({"foo": "password"}, { - meta: {global: {engines: {bogus: {version: engine.version, syncID: engine.syncID}}}}, - steam: {} - }); - - new SyncTestingInfrastructure(server.server); - try { - let ping = yield sync_and_validate_telem(); - ok(ping.engines.every(e => e.name !== "bogus")); - } finally { - Service.engineManager.unregister(engine); - yield cleanAndGo(server); - } -});
\ No newline at end of file diff --git a/services/sync/tests/unit/test_utils_catch.js b/services/sync/tests/unit/test_utils_catch.js index 5f50bf7e4..a10e5eb0d 100644 --- a/services/sync/tests/unit/test_utils_catch.js +++ b/services/sync/tests/unit/test_utils_catch.js @@ -8,46 +8,38 @@ function run_test() { catch: Utils.catch, _log: { debug: function(str) { - didThrow = str.search(/^Exception/) == 0; + didThrow = str.search(/^Exception: /) == 0; }, info: function(str) { wasLocked = str.indexOf("Cannot start sync: already syncing?") == 0; } }, - func: function() { - return this.catch(function() { - rightThis = this == obj; - didCall = true; - return 5; - })(); - }, + func: function() this.catch(function() { + rightThis = this == obj; + didCall = true; + return 5; + })(), - throwy: function() { - return this.catch(function() { - rightThis = this == obj; - didCall = true; - throw 10; - })(); - }, + throwy: function() this.catch(function() { + rightThis = this == obj; + didCall = true; + throw 10; + })(), - callbacky: function() { - return this.catch(function() { - rightThis = this == obj; - didCall = true; - throw 10; - }, function(ex) { - wasTen = (ex == 10) - })(); - }, + callbacky: function() this.catch(function() { + rightThis = this == obj; + didCall = true; + throw 10; + }, function(ex) { + wasTen = (ex == 10) + })(), - lockedy: function() { - return this.catch(function() { - rightThis = this == obj; - didCall = true; - throw("Could not acquire lock."); - })(); - } + lockedy: function() this.catch(function() { + rightThis = this == obj; + didCall = true; + throw("Could not acquire lock."); + })() }; _("Make sure a normal call will call and return"); diff --git a/services/sync/tests/unit/test_utils_deferGetSet.js b/services/sync/tests/unit/test_utils_deferGetSet.js index 9d58a9873..55c0fcb0e 100644 --- a/services/sync/tests/unit/test_utils_deferGetSet.js +++ b/services/sync/tests/unit/test_utils_deferGetSet.js @@ -6,12 +6,8 @@ function run_test() { base.prototype = { dst: {}, - get a() { - return "a"; - }, - set b(val) { - this.dst.b = val + "!!!"; - } + get a() "a", + set b(val) this.dst.b = val + "!!!" }; let src = new base(); diff --git a/services/sync/tests/unit/test_utils_deriveKey.js b/services/sync/tests/unit/test_utils_deriveKey.js index 17dd889c7..e205fa9f8 100644 --- a/services/sync/tests/unit/test_utils_deriveKey.js +++ b/services/sync/tests/unit/test_utils_deriveKey.js @@ -1,7 +1,7 @@ Cu.import("resource://services-crypto/WeaveCrypto.js"); Cu.import("resource://services-sync/util.js"); -var cryptoSvc = new WeaveCrypto(); +let cryptoSvc = new WeaveCrypto(); function run_test() { if (this.gczeal) { diff --git a/services/sync/tests/unit/test_utils_lock.js b/services/sync/tests/unit/test_utils_lock.js index d1830787e..fd8a4b1f5 100644 --- a/services/sync/tests/unit/test_utils_lock.js +++ b/services/sync/tests/unit/test_utils_lock.js @@ -27,23 +27,19 @@ function run_test() { this._locked = false; }, - func: function() { - return this._lock("Test utils lock", - function() { - rightThis = this == obj; - didCall = true; - return 5; - })(); - }, + func: function() this._lock("Test utils lock", + function() { + rightThis = this == obj; + didCall = true; + return 5; + })(), - throwy: function() { - return this._lock("Test utils lock throwy", - function() { - rightThis = this == obj; - didCall = true; - this.throwy(); - })(); - } + throwy: function() this._lock("Test utils lock throwy", + function() { + rightThis = this == obj; + didCall = true; + this.throwy(); + })() }; _("Make sure a normal call will call and return"); diff --git a/services/sync/tests/unit/test_utils_notify.js b/services/sync/tests/unit/test_utils_notify.js index 5bd38da5f..c191bbfef 100644 --- a/services/sync/tests/unit/test_utils_notify.js +++ b/services/sync/tests/unit/test_utils_notify.js @@ -9,21 +9,17 @@ function run_test() { trace: function() {} }, - func: function() { - return this.notify("bar", "baz", function() { - rightThis = this == obj; - didCall = true; - return 5; - })(); - }, + func: function() this.notify("bar", "baz", function() { + rightThis = this == obj; + didCall = true; + return 5; + })(), - throwy: function() { - return this.notify("bad", "one", function() { - rightThis = this == obj; - didCall = true; - throw 10; - })(); - } + throwy: function() this.notify("bad", "one", function() { + rightThis = this == obj; + didCall = true; + throw 10; + })() }; let state = 0; diff --git a/services/sync/tests/unit/test_warn_on_truncated_response.js b/services/sync/tests/unit/test_warn_on_truncated_response.js index 1f0d87ba9..a9f070ee4 100644 --- a/services/sync/tests/unit/test_warn_on_truncated_response.js +++ b/services/sync/tests/unit/test_warn_on_truncated_response.js @@ -12,11 +12,11 @@ function run_test() { run_next_test(); } -var BODY = "response body"; +let BODY = "response body"; // contentLength needs to be longer than the response body // length in order to get a mismatch between what is sent in // the response and the content-length header value. -var contentLength = BODY.length + 1; +let contentLength = BODY.length + 1; function contentHandler(request, response) { _("Handling request."); diff --git a/services/sync/tests/unit/xpcshell.ini b/services/sync/tests/unit/xpcshell.ini index 4c0f0e7b7..dc33c0eb2 100644 --- a/services/sync/tests/unit/xpcshell.ini +++ b/services/sync/tests/unit/xpcshell.ini @@ -1,7 +1,8 @@ [DEFAULT] -head = head_appinfo.js ../../../common/tests/unit/head_helpers.js head_helpers.js head_http_server.js head_errorhandler_common.js +head = head_appinfo.js ../../../common/tests/unit/head_helpers.js head_helpers.js head_http_server.js tail = firefox-appdir = browser +skip-if = toolkit == 'gonk' support-files = addon1-search.xml bootstrap1-search.xml @@ -10,10 +11,6 @@ support-files = missing-xpi-search.xml places_v10_from_v11.sqlite rewrite-search.xml - sync_ping_schema.json - systemaddon-search.xml - !/services/common/tests/unit/head_helpers.js - !/toolkit/components/webextensions/test/xpcshell/head_sync.js # The manifest is roughly ordered from low-level to high-level. When making # systemic sweeping changes, this makes it easier to identify errors closer to @@ -39,7 +36,6 @@ support-files = # We have a number of other libraries that are pretty much standalone. [test_addon_utils.js] run-sequentially = Restarts server, can't change pref. -tags = addons [test_httpd_sync_server.js] [test_jpakeclient.js] # Bug 618233: this test produces random failures on Windows 7. @@ -56,7 +52,6 @@ skip-if = os == "win" || os == "android" # Generic Sync types. [test_browserid_identity.js] [test_collection_inc_get.js] -[test_collection_getBatched.js] [test_collections_recovery.js] [test_identity_manager.js] [test_keys.js] @@ -97,7 +92,6 @@ skip-if = os == "mac" || os == "linux" [test_service_sync_remoteSetup.js] # Bug 676978: test hangs on Android (see also testing/xpcshell/xpcshell.ini) skip-if = os == "android" -[test_service_sync_specified.js] [test_service_sync_updateEnabledEngines.js] # Bug 676978: test hangs on Android (see also testing/xpcshell/xpcshell.ini) skip-if = os == "android" @@ -109,8 +103,7 @@ skip-if = os == "mac" || os == "linux" [test_corrupt_keys.js] [test_declined.js] -[test_errorhandler_1.js] -[test_errorhandler_2.js] +[test_errorhandler.js] [test_errorhandler_filelog.js] # Bug 676978: test hangs on Android (see also testing/xpcshell/xpcshell.ini) skip-if = os == "android" @@ -121,6 +114,7 @@ skip-if = os == "android" [test_hmac_error.js] [test_interval_triggers.js] [test_node_reassignment.js] +[test_notifications.js] [test_score_triggers.js] [test_sendcredentials_controller.js] [test_status.js] @@ -136,18 +130,12 @@ skip-if = os == "android" # Finally, we test each engine. [test_addons_engine.js] run-sequentially = Hardcoded port in static files. -tags = addons [test_addons_reconciler.js] -tags = addons [test_addons_store.js] run-sequentially = Hardcoded port in static files. -tags = addons [test_addons_tracker.js] -tags = addons [test_bookmark_batch_fail.js] -[test_bookmark_duping.js] [test_bookmark_engine.js] -[test_bookmark_invalid.js] [test_bookmark_legacy_microsummaries_support.js] [test_bookmark_livemarks.js] [test_bookmark_order.js] @@ -158,13 +146,8 @@ tags = addons # Too many intermittent "ASSERTION: thread pool wasn't shutdown: '!mPool'" (bug 804479) skip-if = debug [test_bookmark_tracker.js] -requesttimeoutfactor = 4 -[test_bookmark_validator.js] [test_clients_engine.js] [test_clients_escape.js] -[test_extension_storage_crypto.js] -[test_extension_storage_engine.js] -[test_extension_storage_tracker.js] [test_forms_store.js] [test_forms_tracker.js] # Too many intermittent "ASSERTION: thread pool wasn't shutdown: '!mPool'" (bug 804479) @@ -176,21 +159,24 @@ skip-if = debug skip-if = debug [test_places_guid_downgrade.js] [test_password_store.js] -[test_password_validator.js] [test_password_tracker.js] # Too many intermittent "ASSERTION: thread pool wasn't shutdown: '!mPool'" (bug 804479) skip-if = debug [test_prefs_store.js] -support-files = prefs_test_prefs_store.js [test_prefs_tracker.js] [test_tab_engine.js] [test_tab_store.js] [test_tab_tracker.js] -[test_warn_on_truncated_response.js] -[test_postqueue.js] +[test_healthreport.js] +skip-if = ! healthreport + +[test_healthreport_migration.js] +skip-if = ! healthreport -# Synced tabs. -[test_syncedtabs.js] +[test_warn_on_truncated_response.js] -[test_telemetry.js] +# FxA migration +[test_block_sync.js] +[test_fxa_migration.js] +[test_fxa_migration_sentinel.js] diff --git a/services/sync/tps/extensions/mozmill/resource/driver/controller.js b/services/sync/tps/extensions/mozmill/resource/driver/controller.js index 8d66a41ae..d5948598e 100644 --- a/services/sync/tps/extensions/mozmill/resource/driver/controller.js +++ b/services/sync/tps/extensions/mozmill/resource/driver/controller.js @@ -5,9 +5,9 @@ var EXPORTED_SYMBOLS = ["MozMillController", "globalEventRegistry", "sleep", "windowMap"]; -var Cc = Components.classes; -var Ci = Components.interfaces; -var Cu = Components.utils; +const Cc = Components.classes; +const Ci = Components.interfaces; +const Cu = Components.utils; var EventUtils = {}; Cu.import('resource://mozmill/stdlib/EventUtils.js', EventUtils); @@ -44,11 +44,7 @@ waitForEvents.prototype = { node.firedEvents = {}; this.registry = {}; - if (!events) { - return; - } - for (var key in events) { - var e = events[key]; + for each (var e in events) { var listener = function (event) { this.firedEvents[event.type] = true; } @@ -870,7 +866,7 @@ MozMillController.prototype.mouseMove = function (doc, start, dest) { /** * Drag an element to the specified offset on another element, firing mouse and - * drag events. Adapted from EventUtils.js synthesizeDrop() + * drag events. Adapted from ChromeUtils.js synthesizeDrop() * * @deprecated Use the MozMillElement object * @@ -977,10 +973,7 @@ function browserAdditions (controller) { return windows.map.hasPageLoaded(utils.getWindowId(win)); }, "Timeout", timeout, aInterval); } - catch (ex) { - if (!(ex instanceof errors.TimeoutError)) { - throw ex; - } + catch (ex if ex instanceof errors.TimeoutError) { timed_out = true; } finally { diff --git a/services/sync/tps/extensions/mozmill/resource/driver/elementslib.js b/services/sync/tps/extensions/mozmill/resource/driver/elementslib.js index 4bf35a384..f08cf42f3 100644 --- a/services/sync/tps/extensions/mozmill/resource/driver/elementslib.js +++ b/services/sync/tps/extensions/mozmill/resource/driver/elementslib.js @@ -6,9 +6,9 @@ var EXPORTED_SYMBOLS = ["ID", "Link", "XPath", "Selector", "Name", "Anon", "Anon "Lookup", "_byID", "_byName", "_byAttrib", "_byAnonAttrib", ]; -var Cc = Components.classes; -var Ci = Components.interfaces; -var Cu = Components.utils; +const Cc = Components.classes; +const Ci = Components.interfaces; +const Cu = Components.utils; Cu.import("resource://gre/modules/Services.jsm"); @@ -304,7 +304,7 @@ var _returnResult = function (results) { var _forChildren = function (element, name, value) { var results = []; - var nodes = Array.from(element.childNodes).filter(e => e); + var nodes = [e for each (e in element.childNodes) if (e)] for (var i in nodes) { var n = nodes[i]; @@ -318,7 +318,7 @@ var _forChildren = function (element, name, value) { var _forAnonChildren = function (_document, element, name, value) { var results = []; - var nodes = Array.from(_document.getAnoymousNodes(element)).filter(e => e); + var nodes = [e for each (e in _document.getAnoymousNodes(element)) if (e)]; for (var i in nodes ) { var n = nodes[i]; @@ -381,7 +381,7 @@ var _byAnonAttrib = function (_document, parent, attributes) { } } - var nodes = Array.from(_document.getAnonymousNodes(parent)).filter(n => n.getAttribute); + var nodes = [n for each (n in _document.getAnonymousNodes(parent)) if (n.getAttribute)]; function resultsForNodes (nodes) { for (var i in nodes) { @@ -404,7 +404,7 @@ var _byAnonAttrib = function (_document, parent, attributes) { resultsForNodes(nodes); if (results.length == 0) { - resultsForNodes(Array.from(parent.childNodes).filter(n => n != undefined && n.getAttribute)); + resultsForNodes([n for each (n in parent.childNodes) if (n != undefined && n.getAttribute)]) } return _returnResult(results) @@ -440,7 +440,7 @@ function Lookup(_document, expression) { throw new Error('Lookup constructor did not recieve enough arguments.'); } - var expSplit = smartSplit(expression).filter(e => e != ''); + var expSplit = [e for each (e in smartSplit(expression) ) if (e != '')]; expSplit.unshift(_document); var nCases = {'id':_byID, 'name':_byName, 'attrib':_byAttrib, 'index':_byIndex}; diff --git a/services/sync/tps/extensions/mozmill/resource/driver/mozelement.js b/services/sync/tps/extensions/mozmill/resource/driver/mozelement.js index 850c86523..0af204794 100644 --- a/services/sync/tps/extensions/mozmill/resource/driver/mozelement.js +++ b/services/sync/tps/extensions/mozmill/resource/driver/mozelement.js @@ -9,9 +9,9 @@ var EXPORTED_SYMBOLS = ["Elem", "Selector", "ID", "Link", "XPath", "Name", "Look const NAMESPACE_XUL = "http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul"; -var Cc = Components.classes; -var Ci = Components.interfaces; -var Cu = Components.utils; +const Cc = Components.classes; +const Ci = Components.interfaces; +const Cu = Components.utils; var EventUtils = {}; Cu.import('resource://mozmill/stdlib/EventUtils.js', EventUtils); @@ -131,7 +131,7 @@ MozMillElement.prototype.__defineGetter__("element", function () { /** * Drag an element to the specified offset on another element, firing mouse and - * drag events. Adapted from EventUtils.js synthesizeDrop() + * drag events. Adapted from ChromeUtils.js synthesizeDrop() * * By default it will drag the source element over the destination's element * center with a "move" dropEffect. @@ -218,7 +218,7 @@ MozMillElement.prototype.dragToElement = function(aElement, aOffsetX, aOffsetY, EventUtils.synthesizeMouse(destNode, destCoords.x, destCoords.y, { type: "mousemove" }, destWindow); - var event = destWindow.document.createEvent("DragEvent"); + var event = destWindow.document.createEvent("DragEvents"); event.initDragEvent("dragenter", true, true, destWindow, 0, 0, 0, 0, 0, false, false, false, false, 0, null, dataTransfer); event.initDragEvent("dragover", true, true, destWindow, 0, 0, 0, 0, 0, diff --git a/services/sync/tps/extensions/mozmill/resource/driver/mozmill.js b/services/sync/tps/extensions/mozmill/resource/driver/mozmill.js index 1e422591f..283c9bfb4 100644 --- a/services/sync/tps/extensions/mozmill/resource/driver/mozmill.js +++ b/services/sync/tps/extensions/mozmill/resource/driver/mozmill.js @@ -13,9 +13,9 @@ var EXPORTED_SYMBOLS = ["controller", "utils", "elementslib", "os", "firePythonCallback", "getAddons" ]; -var Cc = Components.classes; -var Ci = Components.interfaces; -var Cu = Components.utils; +const Cc = Components.classes; +const Ci = Components.interfaces; +const Cu = Components.utils; Cu.import("resource://gre/modules/AddonManager.jsm"); diff --git a/services/sync/tps/extensions/mozmill/resource/modules/assertions.js b/services/sync/tps/extensions/mozmill/resource/modules/assertions.js index c76f95747..b49502057 100644 --- a/services/sync/tps/extensions/mozmill/resource/modules/assertions.js +++ b/services/sync/tps/extensions/mozmill/resource/modules/assertions.js @@ -4,7 +4,7 @@ var EXPORTED_SYMBOLS = ['Assert', 'Expect']; -var Cu = Components.utils; +const Cu = Components.utils; Cu.import("resource://gre/modules/Services.jsm"); @@ -658,10 +658,7 @@ Expect.prototype.waitFor = function Expect_waitFor(aCallback, aMessage, aTimeout try { Assert.prototype.waitFor.apply(this, arguments); } - catch (ex) { - if (!(ex instanceof errors.AssertionError)) { - throw ex; - } + catch (ex if ex instanceof errors.AssertionError) { message = ex.message; condition = false; } diff --git a/services/sync/tps/extensions/mozmill/resource/modules/frame.js b/services/sync/tps/extensions/mozmill/resource/modules/frame.js index dae8276b6..799e81d55 100644 --- a/services/sync/tps/extensions/mozmill/resource/modules/frame.js +++ b/services/sync/tps/extensions/mozmill/resource/modules/frame.js @@ -5,9 +5,9 @@ var EXPORTED_SYMBOLS = ['Collector','Runner','events', 'runTestFile', 'log', 'timers', 'persisted', 'shutdownApplication']; -var Cc = Components.classes; -var Ci = Components.interfaces; -var Cu = Components.utils; +const Cc = Components.classes; +const Ci = Components.interfaces; +const Cu = Components.utils; const TIMEOUT_SHUTDOWN_HTTPD = 15000; @@ -256,7 +256,7 @@ events.pass = function (obj) { events.currentTest.__passes__.push(obj); } - for (var timer of timers) { + for each (var timer in timers) { timer.actions.push( {"currentTest": events.currentModule.__file__ + "::" + events.currentTest.__name__, "obj": obj, @@ -286,7 +286,7 @@ events.fail = function (obj) { events.currentTest.__fails__.push(obj); } - for (var time of timers) { + for each (var time in timers) { timer.actions.push( {"currentTest": events.currentModule.__file__ + "::" + events.currentTest.__name__, "obj": obj, @@ -325,7 +325,7 @@ events.fireEvent = function (name, obj) { } } - for (var listener of this.globalListeners) { + for each(var listener in this.globalListeners) { listener(name, obj); } } diff --git a/services/sync/tps/extensions/mozmill/resource/modules/windows.js b/services/sync/tps/extensions/mozmill/resource/modules/windows.js index 1c75a2d3d..fe9cfaa01 100644 --- a/services/sync/tps/extensions/mozmill/resource/modules/windows.js +++ b/services/sync/tps/extensions/mozmill/resource/modules/windows.js @@ -4,9 +4,9 @@ var EXPORTED_SYMBOLS = ["init", "map"]; -var Cc = Components.classes; -var Ci = Components.interfaces; -var Cu = Components.utils; +const Cc = Components.classes; +const Ci = Components.interfaces; +const Cu = Components.utils; // imports var utils = {}; Cu.import('resource://mozmill/stdlib/utils.js', utils); diff --git a/services/sync/tps/extensions/mozmill/resource/stdlib/EventUtils.js b/services/sync/tps/extensions/mozmill/resource/stdlib/EventUtils.js index 7f08469f0..a821ab2e0 100644 --- a/services/sync/tps/extensions/mozmill/resource/stdlib/EventUtils.js +++ b/services/sync/tps/extensions/mozmill/resource/stdlib/EventUtils.js @@ -8,8 +8,8 @@ var EXPORTED_SYMBOLS = ["disableNonTestMouseEvents","sendMouseEvent", "sendChar" "synthesizeText", "synthesizeComposition", "synthesizeQuerySelectedText"]; -var Ci = Components.interfaces; -var Cc = Components.classes; +const Ci = Components.interfaces; +const Cc = Components.classes; var window = Cc["@mozilla.org/appshell/appShellService;1"] .getService(Ci.nsIAppShellService).hiddenDOMWindow; diff --git a/services/sync/tps/extensions/mozmill/resource/stdlib/os.js b/services/sync/tps/extensions/mozmill/resource/stdlib/os.js index ce88bea8a..fcda30572 100644 --- a/services/sync/tps/extensions/mozmill/resource/stdlib/os.js +++ b/services/sync/tps/extensions/mozmill/resource/stdlib/os.js @@ -4,9 +4,9 @@ var EXPORTED_SYMBOLS = ['listDirectory', 'getFileForPath', 'abspath', 'getPlatform']; -var Cc = Components.classes; -var Ci = Components.interfaces; -var Cu = Components.utils; +const Cc = Components.classes; +const Ci = Components.interfaces; +const Cu = Components.utils; Cu.import("resource://gre/modules/Services.jsm"); @@ -37,7 +37,7 @@ function abspath(rel, file) { file = file.parent; } - for (var p of relSplit) { + for each(var p in relSplit) { if (p == '..') { file = file.parent; } else if (p == '.') { diff --git a/services/sync/tps/extensions/mozmill/resource/stdlib/securable-module.js b/services/sync/tps/extensions/mozmill/resource/stdlib/securable-module.js index 2648afd27..794c3e2c2 100644 --- a/services/sync/tps/extensions/mozmill/resource/stdlib/securable-module.js +++ b/services/sync/tps/extensions/mozmill/resource/stdlib/securable-module.js @@ -40,8 +40,6 @@ const Cu = Components.utils; const Cr = Components.results; - Cu.import("resource://gre/modules/NetUtil.jsm"); - var exports = {}; var ios = Cc['@mozilla.org/network/io-service;1'] @@ -170,7 +168,8 @@ if (rootPaths) { if (rootPaths.constructor.name != "Array") rootPaths = [rootPaths]; - var fses = rootPaths.map(path => new exports.LocalFileSystem(path)); + var fses = [new exports.LocalFileSystem(path) + for each (path in rootPaths)]; options.fs = new exports.CompositeFileSystem(fses); } else options.fs = new exports.LocalFileSystem(); @@ -315,26 +314,17 @@ else baseURI = ios.newURI(base, null, null); var newURI = ios.newURI(path, null, baseURI); - var channel = NetUtil.newChannel({ - uri: newURI, - loadUsingSystemPrincipal: true - }); + var channel = ios.newChannelFromURI(newURI); try { - channel.open2().close(); - } catch (e) { - if (e.result != Cr.NS_ERROR_FILE_NOT_FOUND) { - throw e; - } + channel.open().close(); + } catch (e if e.result == Cr.NS_ERROR_FILE_NOT_FOUND) { return null; } return newURI.spec; }, getFile: function getFile(path) { - var channel = NetUtil.newChannel({ - uri: path, - loadUsingSystemPrincipal: true - }); - var iStream = channel.open2(); + var channel = ios.newChannel(path, null, null); + var iStream = channel.open(); var ciStream = Cc["@mozilla.org/intl/converter-input-stream;1"]. createInstance(Ci.nsIConverterInputStream); var bufLen = 0x8000; diff --git a/services/sync/tps/extensions/mozmill/resource/stdlib/utils.js b/services/sync/tps/extensions/mozmill/resource/stdlib/utils.js index 73e13e11f..3dcca76e0 100644 --- a/services/sync/tps/extensions/mozmill/resource/stdlib/utils.js +++ b/services/sync/tps/extensions/mozmill/resource/stdlib/utils.js @@ -10,16 +10,16 @@ var EXPORTED_SYMBOLS = ["applicationName", "assert", "Copy", "getBrowserObject", "unwrapNode", "waitFor" ]; -var Cc = Components.classes; -var Ci = Components.interfaces; -var Cu = Components.utils; +const Cc = Components.classes; +const Ci = Components.interfaces; +const Cu = Components.utils; Cu.import("resource://gre/modules/NetUtil.jsm"); Cu.import("resource://gre/modules/Services.jsm"); const applicationIdMap = { - '{ec8030f7-c20a-464f-9b0e-13a3a9e97384}': 'Firefox' + '{8de7fcbb-c55c-4fbe-bfc5-fc555c87dbc4}': 'Firefox' } const applicationName = applicationIdMap[Services.appinfo.ID] || Services.appinfo.name; @@ -83,7 +83,7 @@ function getWindows(type) { } function getMethodInWindows(methodName) { - for (var w of getWindows()) { + for each (var w in getWindows()) { if (w[methodName] != undefined) { return w[methodName]; } @@ -93,7 +93,7 @@ function getMethodInWindows(methodName) { } function getWindowByTitle(title) { - for (var w of getWindows()) { + for each (var w in getWindows()) { if (w.document.title && w.document.title == title) { return w; } diff --git a/services/sync/tps/extensions/tps/install.rdf b/services/sync/tps/extensions/tps/install.rdf index 3dcdc5e44..cc9491b07 100644 --- a/services/sync/tps/extensions/tps/install.rdf +++ b/services/sync/tps/extensions/tps/install.rdf @@ -12,7 +12,7 @@ <em:targetApplication> <!-- Firefox --> <Description> - <em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id> + <em:id>{8de7fcbb-c55c-4fbe-bfc5-fc555c87dbc4}</em:id> <em:minVersion>24.0.*</em:minVersion> <em:maxVersion>31.0.*</em:maxVersion> </Description> diff --git a/services/sync/tps/extensions/tps/resource/auth/fxaccounts.jsm b/services/sync/tps/extensions/tps/resource/auth/fxaccounts.jsm index 86d0ed113..f5daa14be 100644 --- a/services/sync/tps/extensions/tps/resource/auth/fxaccounts.jsm +++ b/services/sync/tps/extensions/tps/resource/auth/fxaccounts.jsm @@ -12,7 +12,6 @@ const {classes: Cc, interfaces: Ci, utils: Cu} = Components; Cu.import("resource://gre/modules/FxAccounts.jsm"); Cu.import("resource://gre/modules/FxAccountsClient.jsm"); -Cu.import("resource://gre/modules/FxAccountsConfig.jsm"); Cu.import("resource://services-common/async.js"); Cu.import("resource://services-sync/main.js"); Cu.import("resource://tps/logger.jsm"); @@ -68,10 +67,7 @@ var Authentication = { Logger.AssertTrue(account["username"], "Username has been found"); Logger.AssertTrue(account["password"], "Password has been found"); - Logger.logInfo("Login user: " + account["username"]); - - // Required here since we don't go through the real login page - Async.promiseSpinningly(FxAccountsConfig.ensureConfigured()); + Logger.logInfo("Login user: " + account["username"] + '\n'); let client = new FxAccountsClient(); client.signIn(account["username"], account["password"], true).then(credentials => { @@ -96,26 +92,5 @@ var Authentication = { } catch (error) { throw new Error("signIn() failed with: " + error.message); } - }, - - /** - * Sign out of Firefox Accounts. It also clears out the device ID, if we find one. - */ - signOut() { - if (Authentication.isLoggedIn) { - let user = Authentication.getSignedInUser(); - if (!user) { - throw new Error("Failed to get signed in user!"); - } - let fxc = new FxAccountsClient(); - let { sessionToken, deviceId } = user; - if (deviceId) { - Logger.logInfo("Destroying device " + deviceId); - Async.promiseSpinningly(fxc.signOutAndDestroyDevice(sessionToken, deviceId, { service: "sync" })); - } else { - Logger.logError("No device found."); - Async.promiseSpinningly(fxc.signOut(sessionToken, { service: "sync" })); - } - } } }; diff --git a/services/sync/tps/extensions/tps/resource/auth/sync.jsm b/services/sync/tps/extensions/tps/resource/auth/sync.jsm index 35ffeb269..676b17a91 100644 --- a/services/sync/tps/extensions/tps/resource/auth/sync.jsm +++ b/services/sync/tps/extensions/tps/resource/auth/sync.jsm @@ -80,9 +80,5 @@ var Authentication = { } return true; - }, - - signOut() { - Weave.Service.logout(); } }; diff --git a/services/sync/tps/extensions/tps/resource/modules/addons.jsm b/services/sync/tps/extensions/tps/resource/modules/addons.jsm index 1570b42b1..5c308b5c2 100644 --- a/services/sync/tps/extensions/tps/resource/modules/addons.jsm +++ b/services/sync/tps/extensions/tps/resource/modules/addons.jsm @@ -3,13 +3,13 @@ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ "use strict"; -var EXPORTED_SYMBOLS = ["Addon", "STATE_ENABLED", "STATE_DISABLED"]; +let EXPORTED_SYMBOLS = ["Addon", "STATE_ENABLED", "STATE_DISABLED"]; const {classes: Cc, interfaces: Ci, utils: Cu} = Components; Cu.import("resource://gre/modules/AddonManager.jsm"); Cu.import("resource://gre/modules/addons/AddonRepository.jsm"); -Cu.import("resource://gre/modules/NetUtil.jsm"); +Cu.import("resource://gre/modules/Services.jsm"); Cu.import("resource://services-common/async.js"); Cu.import("resource://services-sync/addonutils.js"); Cu.import("resource://services-sync/util.js"); @@ -20,11 +20,15 @@ const STATE_ENABLED = 1; const STATE_DISABLED = 2; function GetFileAsText(file) { - let channel = NetUtil.newChannel({ - uri: file, - loadUsingSystemPrincipal: true - }); - let inputStream = channel.open2(); + let channel = Services.io.newChannel2(file, + null, + null, + null, // aLoadingNode + Services.scriptSecurityManager.getSystemPrincipal(), + null, // aTriggeringPrincipal + Ci.nsILoadInfo.SEC_NORMAL, + Ci.nsIContentPolicy.TYPE_OTHER); + let inputStream = channel.open(); if (channel instanceof Ci.nsIHttpChannel && channel.responseStatus != 200) { return ""; diff --git a/services/sync/tps/extensions/tps/resource/modules/bookmarks.jsm b/services/sync/tps/extensions/tps/resource/modules/bookmarks.jsm index 857c0c1e8..6a288bbec 100644 --- a/services/sync/tps/extensions/tps/resource/modules/bookmarks.jsm +++ b/services/sync/tps/extensions/tps/resource/modules/bookmarks.jsm @@ -13,7 +13,6 @@ var EXPORTED_SYMBOLS = ["PlacesItem", "Bookmark", "Separator", "Livemark", const {classes: Cc, interfaces: Ci, utils: Cu} = Components; Cu.import("resource://gre/modules/PlacesBackups.jsm"); -Cu.import("resource://gre/modules/PlacesSyncUtils.jsm"); Cu.import("resource://gre/modules/PlacesUtils.jsm"); Cu.import("resource://gre/modules/Services.jsm"); Cu.import("resource://services-common/async.js"); @@ -110,11 +109,6 @@ PlacesItem.prototype = { return string; }, - GetSyncId() { - let guid = Async.promiseSpinningly(PlacesUtils.promiseItemGuid(this.props.item_id)); - return PlacesSyncUtils.bookmarks.guidToSyncId(guid); - }, - /** * GetPlacesNodeId * @@ -438,19 +432,8 @@ Bookmark.prototype = { * @return nothing */ SetKeyword: function(keyword) { - if (keyword != null) { - // Mirror logic from PlacesSyncUtils's updateBookmarkMetadata - let entry = Async.promiseSpinningly(PlacesUtils.keywords.fetch({ - url: this.props.uri, - })); - if (entry) { - Async.promiseSpinningly(PlacesUtils.keywords.remove(entry)); - } - Async.promiseSpinningly(PlacesUtils.keywords.insert({ - keyword: keyword, - url: this.props.uri - })); - } + if (keyword != null) + PlacesUtils.bookmarks.setKeywordForBookmark(this.props.item_id, keyword); }, /** @@ -559,11 +542,11 @@ Bookmark.prototype = { Update: function() { Logger.AssertTrue(this.props.item_id != -1 && this.props.item_id != null, "Invalid item_id during Remove"); + this.SetKeyword(this.updateProps.keyword); this.SetDescription(this.updateProps.description); this.SetLoadInSidebar(this.updateProps.loadInSidebar); this.SetTitle(this.updateProps.title); this.SetUri(this.updateProps.uri); - this.SetKeyword(this.updateProps.keyword); this.SetTags(this.updateProps.tags); this.SetLocation(this.updateProps.location); this.SetPosition(this.updateProps.position); @@ -595,8 +578,7 @@ Bookmark.prototype = { if (!this.CheckDescription(this.props.description)) return -1; if (this.props.keyword != null) { - let { keyword } = Async.promiseSpinningly( - PlacesSyncUtils.bookmarks.fetch(this.GetSyncId())); + let keyword = PlacesUtils.bookmarks.getKeywordForBookmark(this.props.item_id); if (keyword != this.props.keyword) { Logger.logPotentialError("Incorrect keyword - expected: " + this.props.keyword + ", actual: " + keyword + diff --git a/services/sync/tps/extensions/tps/resource/modules/forms.jsm b/services/sync/tps/extensions/tps/resource/modules/forms.jsm index deb1a28a5..ece2e14f7 100644 --- a/services/sync/tps/extensions/tps/resource/modules/forms.jsm +++ b/services/sync/tps/extensions/tps/resource/modules/forms.jsm @@ -13,45 +13,74 @@ const {classes: Cc, interfaces: Ci, utils: Cu} = Components; Cu.import("resource://tps/logger.jsm"); -Cu.import("resource://gre/modules/FormHistory.jsm"); -Cu.import("resource://gre/modules/Log.jsm"); +let formService = Cc["@mozilla.org/satchel/form-history;1"] + .getService(Ci.nsIFormHistory2); /** * FormDB * - * Helper object containing methods to interact with the FormHistory module. + * Helper object containing methods to interact with the moz_formhistory + * SQLite table. */ -var FormDB = { - _update(data) { - return new Promise((resolve, reject) => { - let handlers = { - handleError(error) { - Logger.logError("Error occurred updating form history: " + Log.exceptionStr(error)); - reject(error); - }, - handleCompletion(reason) { - resolve(); - } - } - FormHistory.update(data, handlers); - }); +let FormDB = { + /** + * makeGUID + * + * Generates a brand-new globally unique identifier (GUID). Borrowed + * from Weave's utils.js. + * + * @return the new guid + */ + makeGUID: function makeGUID() { + // 70 characters that are not-escaped URL-friendly + const code = + "!()*-.0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz~"; + + let guid = ""; + let num = 0; + let val; + + // Generate ten 70-value characters for a 70^10 (~61.29-bit) GUID + for (let i = 0; i < 10; i++) { + // Refresh the number source after using it a few times + if (i == 0 || i == 5) + num = Math.random(); + + // Figure out which code to use for the next GUID character + num *= 70; + val = Math.floor(num); + guid += code[val]; + num -= val; + } + + return guid; }, /** * insertValue * - * Adds the specified value for the specified fieldname into form history. + * Inserts the specified value for the specified fieldname into the + * moz_formhistory table. * * @param fieldname The form fieldname to insert * @param value The form value to insert * @param us The time, in microseconds, to use for the lastUsed * and firstUsed columns - * @return Promise<undefined> + * @return nothing */ - insertValue(fieldname, value, us) { - let data = { op: "add", fieldname, value, timesUsed: 1, - firstUsed: us, lastUsed: us } - return this._update(data); + insertValue: function (fieldname, value, us) { + let query = this.createStatement( + "INSERT INTO moz_formhistory " + + "(fieldname, value, timesUsed, firstUsed, lastUsed, guid) VALUES " + + "(:fieldname, :value, :timesUsed, :firstUsed, :lastUsed, :guid)"); + query.params.fieldname = fieldname; + query.params.value = value; + query.params.timesUsed = 1; + query.params.firstUsed = us; + query.params.lastUsed = us; + query.params.guid = this.makeGUID(); + query.execute(); + query.reset(); }, /** @@ -61,10 +90,15 @@ var FormDB = { * * @param id The id of the row to update * @param newvalue The new value to set - * @return Promise<undefined> + * @return nothing */ - updateValue(id, newvalue) { - return this._update({ op: "update", guid: id, value: newvalue }); + updateValue: function (id, newvalue) { + let query = this.createStatement( + "UPDATE moz_formhistory SET value = :value WHERE id = :id"); + query.params.id = id; + query.params.value = newvalue; + query.execute(); + query.reset(); }, /** @@ -75,44 +109,52 @@ var FormDB = { * * @param fieldname The fieldname of the row to query * @param value The value of the row to query - * @return Promise<null if no row is found with the specified fieldname and value, - * or an object containing the row's guid, lastUsed, and firstUsed - * values> + * @return null if no row is found with the specified fieldname and value, + * or an object containing the row's id, lastUsed, and firstUsed + * values */ - getDataForValue(fieldname, value) { - return new Promise((resolve, reject) => { - let result = null; - let handlers = { - handleResult(oneResult) { - if (result != null) { - reject("more than 1 result for this query"); - return; - } - result = oneResult; - }, - handleError(error) { - Logger.logError("Error occurred updating form history: " + Log.exceptionStr(error)); - reject(error); - }, - handleCompletion(reason) { - resolve(result); - } - } - FormHistory.search(["guid", "lastUsed", "firstUsed"], { fieldname }, handlers); - }); + getDataForValue: function (fieldname, value) { + let query = this.createStatement( + "SELECT id, lastUsed, firstUsed FROM moz_formhistory WHERE " + + "fieldname = :fieldname AND value = :value"); + query.params.fieldname = fieldname; + query.params.value = value; + if (!query.executeStep()) + return null; + + return { + id: query.row.id, + lastUsed: query.row.lastUsed, + firstUsed: query.row.firstUsed + }; }, /** - * remove + * createStatement * - * Removes the specified GUID from the database. + * Creates a statement from a SQL string. This function is borrowed + * from Weave's forms.js. * - * @param guid The guid of the item to delete - * @return Promise<> + * @param query The SQL query string + * @return the mozIStorageStatement created from the specified SQL */ - remove(guid) { - return this._update({ op: "remove", guid }); - }, + createStatement: function createStatement(query) { + try { + // Just return the statement right away if it's okay + return formService.DBConnection.createStatement(query); + } + catch(ex) { + // Assume guid column must not exist yet, so add it with an index + formService.DBConnection.executeSimpleSQL( + "ALTER TABLE moz_formhistory ADD COLUMN guid TEXT"); + formService.DBConnection.executeSimpleSQL( + "CREATE INDEX IF NOT EXISTS moz_formhistory_guid_index " + + "ON moz_formhistory (guid)"); + } + + // Try creating the query now that the column exists + return formService.DBConnection.createStatement(query); + } }; /** @@ -162,18 +204,18 @@ FormData.prototype = { Logger.AssertTrue(this.fieldname != null && this.value != null, "Must specify both fieldname and value"); - return FormDB.getDataForValue(this.fieldname, this.value).then(formdata => { - if (!formdata) { - // this item doesn't exist yet in the db, so we need to insert it - return FormDB.insertValue(this.fieldname, this.value, - this.hours_to_us(this.date)); - } else { - /* Right now, we ignore this case. If bug 552531 is ever fixed, - we might need to add code here to update the firstUsed or - lastUsed fields, as appropriate. - */ - } - }); + let formdata = FormDB.getDataForValue(this.fieldname, this.value); + if (!formdata) { + // this item doesn't exist yet in the db, so we need to insert it + FormDB.insertValue(this.fieldname, this.value, + this.hours_to_us(this.date)); + } + else { + /* Right now, we ignore this case. If bug 552531 is ever fixed, + we might need to add code here to update the firstUsed or + lastUsed fields, as appropriate. + */ + } }, /** @@ -185,22 +227,21 @@ FormData.prototype = { * @return true if this entry exists in the database, otherwise false */ Find: function() { - return FormDB.getDataForValue(this.fieldname, this.value).then(formdata => { - let status = formdata != null; - if (status) { - /* - //form history dates currently not synced! bug 552531 - let us = this.hours_to_us(this.date); - status = Logger.AssertTrue( - us >= formdata.firstUsed && us <= formdata.lastUsed, - "No match for with that date value"); - - if (status) - */ - this.id = formdata.guid; - } - return status; - }); + let formdata = FormDB.getDataForValue(this.fieldname, this.value); + let status = formdata != null; + if (status) { + /* + //form history dates currently not synced! bug 552531 + let us = this.hours_to_us(this.date); + status = Logger.AssertTrue( + us >= formdata.firstUsed && us <= formdata.lastUsed, + "No match for with that date value"); + + if (status) + */ + this.id = formdata.id; + } + return status; }, /** @@ -214,6 +255,7 @@ FormData.prototype = { Remove: function() { /* Right now Weave doesn't handle this correctly, see bug 568363. */ - return FormDB.remove(this.id); + formService.removeEntry(this.fieldname, this.value); + return true; }, }; diff --git a/services/sync/tps/extensions/tps/resource/modules/history.jsm b/services/sync/tps/extensions/tps/resource/modules/history.jsm index 78deb42ab..ab0514bcc 100644 --- a/services/sync/tps/extensions/tps/resource/modules/history.jsm +++ b/services/sync/tps/extensions/tps/resource/modules/history.jsm @@ -33,7 +33,7 @@ var DumpHistory = function TPS_History__DumpHistory() { let node = root.getChild(i); let uri = node.uri; let curvisits = HistoryEntry._getVisits(uri); - for (var visit of curvisits) { + for each (var visit in curvisits) { Logger.logInfo("URI: " + uri + ", type=" + visit.type + ", date=" + visit.date, true); } } @@ -70,8 +70,8 @@ var HistoryEntry = { "WHERE place_id = (" + "SELECT id " + "FROM moz_places " + - "WHERE url_hash = hash(:url) AND url = :url) " + - "ORDER BY date DESC LIMIT 20"); + "WHERE url = :url) " + + "ORDER BY date DESC LIMIT 10"); this.__defineGetter__("_visitStm", () => stm); return stm; }, @@ -110,7 +110,7 @@ var HistoryEntry = { uri: uri, visits: [] }; - for (let visit of item.visits) { + for each (visit in item.visits) { place.visits.push({ visitDate: usSinceEpoch + (visit.date * 60 * 60 * 1000 * 1000), transitionType: visit.type @@ -150,8 +150,8 @@ var HistoryEntry = { "History entry in test file must have both 'visits' " + "and 'uri' properties"); let curvisits = this._getVisits(item.uri); - for (let visit of curvisits) { - for (let itemvisit of item.visits) { + for each (visit in curvisits) { + for each (itemvisit in item.visits) { let expectedDate = itemvisit.date * 60 * 60 * 1000 * 1000 + usSinceEpoch; if (visit.type == itemvisit.type && visit.date == expectedDate) { @@ -161,7 +161,7 @@ var HistoryEntry = { } let all_items_found = true; - for (let itemvisit of item.visits) { + for each (itemvisit in item.visits) { all_items_found = all_items_found && "found" in itemvisit; Logger.logInfo("History entry for " + item.uri + ", type:" + itemvisit.type + ", date:" + itemvisit.date + @@ -189,16 +189,9 @@ var HistoryEntry = { PlacesUtils.history.removePagesFromHost(item.host, false); } else if ("begin" in item && "end" in item) { - let cb = Async.makeSpinningCallback(); - let msSinceEpoch = parseInt(usSinceEpoch / 1000); - let filter = { - beginDate: new Date(msSinceEpoch + (item.begin * 60 * 60 * 1000)), - endDate: new Date(msSinceEpoch + (item.end * 60 * 60 * 1000)) - }; - PlacesUtils.history.removeVisitsByFilter(filter) - .catch(ex => Logger.AssertTrue(false, "An error occurred while deleting history: " + ex)) - .then(result => {cb(null, result)}, err => {cb(err)}); - Async.waitForSyncCallback(cb); + PlacesUtils.history.removeVisitsByTimeframe( + usSinceEpoch + (item.begin * 60 * 60 * 1000 * 1000), + usSinceEpoch + (item.end * 60 * 60 * 1000 * 1000)); } else { Logger.AssertTrue(false, "invalid entry in delete history"); diff --git a/services/sync/tps/extensions/tps/resource/modules/passwords.jsm b/services/sync/tps/extensions/tps/resource/modules/passwords.jsm index a84800bab..f7221224a 100644 --- a/services/sync/tps/extensions/tps/resource/modules/passwords.jsm +++ b/services/sync/tps/extensions/tps/resource/modules/passwords.jsm @@ -14,7 +14,7 @@ const {classes: Cc, interfaces: Ci, utils: Cu} = Components; Cu.import("resource://gre/modules/Services.jsm"); Cu.import("resource://tps/logger.jsm"); -var nsLoginInfo = new Components.Constructor( +let nsLoginInfo = new Components.Constructor( "@mozilla.org/login-manager/loginInfo;1", Ci.nsILoginInfo, "init"); diff --git a/services/sync/tps/extensions/tps/resource/modules/prefs.jsm b/services/sync/tps/extensions/tps/resource/modules/prefs.jsm index 286c5a6b5..18a6e32ee 100644 --- a/services/sync/tps/extensions/tps/resource/modules/prefs.jsm +++ b/services/sync/tps/extensions/tps/resource/modules/prefs.jsm @@ -13,7 +13,7 @@ const {classes: Cc, interfaces: Ci, utils: Cu} = Components; const WEAVE_PREF_PREFIX = "services.sync.prefs.sync."; -var prefs = Cc["@mozilla.org/preferences-service;1"] +let prefs = Cc["@mozilla.org/preferences-service;1"] .getService(Ci.nsIPrefBranch); Cu.import("resource://tps/logger.jsm"); diff --git a/services/sync/tps/extensions/tps/resource/modules/tabs.jsm b/services/sync/tps/extensions/tps/resource/modules/tabs.jsm index af983573f..a2ce1afc1 100644 --- a/services/sync/tps/extensions/tps/resource/modules/tabs.jsm +++ b/services/sync/tps/extensions/tps/resource/modules/tabs.jsm @@ -13,7 +13,7 @@ const {classes: Cc, interfaces: Ci, utils: Cu} = Components; Cu.import("resource://services-sync/main.js"); -var BrowserTabs = { +let BrowserTabs = { /** * Add * @@ -49,12 +49,8 @@ var BrowserTabs = { Find: function(uri, title, profile) { // Find the uri in Weave's list of tabs for the given profile. let engine = Weave.Service.engineManager.get("tabs"); - for (let [guid, client] of Object.entries(engine.getAllClients())) { - if (!client.tabs) { - continue; - } - for (let key in client.tabs) { - let tab = client.tabs[key]; + for (let [guid, client] in Iterator(engine.getAllClients())) { + for each (tab in client.tabs) { let weaveTabUrl = tab.urlHistory[0]; if (uri == weaveTabUrl && profile == client.clientName) if (title == undefined || title == tab.title) diff --git a/services/sync/tps/extensions/tps/resource/modules/windows.jsm b/services/sync/tps/extensions/tps/resource/modules/windows.jsm index d892aea56..62cc80d2c 100644 --- a/services/sync/tps/extensions/tps/resource/modules/windows.jsm +++ b/services/sync/tps/extensions/tps/resource/modules/windows.jsm @@ -14,7 +14,7 @@ const {classes: Cc, interfaces: Ci, utils: Cu} = Components; Cu.import("resource://services-sync/main.js"); -var BrowserWindows = { +let BrowserWindows = { /** * Add * diff --git a/services/sync/tps/extensions/tps/resource/tps.jsm b/services/sync/tps/extensions/tps/resource/tps.jsm index f4cc0214a..d3a8b0b7d 100644 --- a/services/sync/tps/extensions/tps/resource/tps.jsm +++ b/services/sync/tps/extensions/tps/resource/tps.jsm @@ -7,28 +7,20 @@ * listed symbols will exposed on import, and only when and where imported. */ -var EXPORTED_SYMBOLS = ["ACTIONS", "TPS"]; +let EXPORTED_SYMBOLS = ["ACTIONS", "TPS"]; const {classes: Cc, interfaces: Ci, utils: Cu} = Components; -var module = this; +let module = this; // Global modules -Cu.import("resource://gre/modules/Log.jsm"); Cu.import("resource://gre/modules/XPCOMUtils.jsm"); Cu.import("resource://gre/modules/Services.jsm"); -Cu.import("resource://gre/modules/AppConstants.jsm"); -Cu.import("resource://gre/modules/PlacesUtils.jsm"); -Cu.import("resource://gre/modules/FileUtils.jsm"); Cu.import("resource://services-common/async.js"); Cu.import("resource://services-sync/constants.js"); Cu.import("resource://services-sync/main.js"); Cu.import("resource://services-sync/util.js"); -Cu.import("resource://services-sync/telemetry.js"); -Cu.import("resource://services-sync/bookmark_validator.js"); -Cu.import("resource://services-sync/engines/passwords.js"); -Cu.import("resource://services-sync/engines/forms.js"); -Cu.import("resource://services-sync/engines/addons.js"); + // TPS modules Cu.import("resource://tps/logger.jsm"); @@ -50,11 +42,6 @@ var prefs = Cc["@mozilla.org/preferences-service;1"] var mozmillInit = {}; Cu.import('resource://mozmill/driver/mozmill.js', mozmillInit); -XPCOMUtils.defineLazyGetter(this, "fileProtocolHandler", () => { - let fileHandler = Services.io.getProtocolHandler("file"); - return fileHandler.QueryInterface(Ci.nsIFileProtocolHandler); -}); - // Options for wiping data during a sync const SYNC_RESET_CLIENT = "resetClient"; const SYNC_WIPE_CLIENT = "wipeClient"; @@ -90,7 +77,7 @@ const ACTIONS = [ const OBSERVER_TOPICS = ["fxaccounts:onlogin", "fxaccounts:onlogout", "private-browsing", - "profile-before-change", + "quit-application-requested", "sessionstore-windows-restored", "weave:engine:start-tracking", "weave:engine:stop-tracking", @@ -102,19 +89,18 @@ const OBSERVER_TOPICS = ["fxaccounts:onlogin", "weave:service:sync:start" ]; -var TPS = { +let TPS = { _currentAction: -1, _currentPhase: -1, _enabledEngines: null, _errors: 0, + _finalPhase: false, _isTracking: false, _operations_pending: 0, _phaseFinished: false, _phaselist: {}, _setupComplete: false, _syncActive: false, - _syncCount: 0, - _syncsReportedViaTelemetry: 0, _syncErrors: 0, _syncWipeAction: null, _tabsAdded: 0, @@ -122,11 +108,6 @@ var TPS = { _test: null, _triggeredSync: false, _usSinceEpoch: 0, - _requestedQuit: false, - shouldValidateAddons: false, - shouldValidateBookmarks: false, - shouldValidatePasswords: false, - shouldValidateForms: false, _init: function TPS__init() { // Check if Firefox Accounts is enabled @@ -141,8 +122,6 @@ var TPS = { Services.obs.addObserver(this, aTopic, true); }, this); - // Configure some logging prefs for Sync itself. - Weave.Svc.Prefs.set("log.appender.dump", "Debug"); // Import the appropriate authentication module if (this.fxaccounts_enabled) { Cu.import("resource://tps/auth/fxaccounts.jsm", module); @@ -152,16 +131,9 @@ var TPS = { } }, - DumpError(msg, exc = null) { + DumpError: function TPS__DumpError(msg) { this._errors++; - let errInfo; - if (exc) { - errInfo = Log.exceptionStr(exc); // includes details and stack-trace. - } else { - // always write a stack even if no error passed. - errInfo = Log.stackTrace(new Error()); - } - Logger.logError(`[phase ${this._currentPhase}] ${msg} - ${errInfo}`); + Logger.logError("[phase" + this._currentPhase + "] " + msg); this.quit(); }, @@ -177,7 +149,14 @@ var TPS = { Logger.logInfo("private browsing " + data); break; - case "profile-before-change": + case "quit-application-requested": + // Ensure that we eventually wipe the data on the server + if (this._errors || !this._phaseFinished || this._finalPhase) { + try { + this.WipeServer(); + } catch (ex) {} + } + OBSERVER_TOPICS.forEach(function(topic) { Services.obs.removeObserver(this, topic); }, this); @@ -253,7 +232,7 @@ var TPS = { } } catch (e) { - this.DumpError("Observer failed", e); + this.DumpError("Exception caught: " + Utils.exceptionStr(e)); return; } }, @@ -286,7 +265,6 @@ var TPS = { }, quit: function TPS__quit() { - this._requestedQuit = true; this.goQuitApplication(); }, @@ -307,7 +285,7 @@ var TPS = { HandleTabs: function (tabs, action) { this._tabsAdded = tabs.length; this._tabsFinished = 0; - for (let tab of tabs) { + for each (let tab in tabs) { Logger.logInfo("executing action " + action.toUpperCase() + " on tab " + JSON.stringify(tab)); switch(action) { @@ -352,7 +330,7 @@ var TPS = { }, HandlePrefs: function (prefs, action) { - for (let pref of prefs) { + for each (pref in prefs) { Logger.logInfo("executing action " + action.toUpperCase() + " on pref " + JSON.stringify(pref)); let preference = new Preference(pref); @@ -371,25 +349,23 @@ var TPS = { }, HandleForms: function (data, action) { - this.shouldValidateForms = true; - for (let datum of data) { + for each (datum in data) { Logger.logInfo("executing action " + action.toUpperCase() + " on form entry " + JSON.stringify(datum)); let formdata = new FormData(datum, this._usSinceEpoch); switch(action) { case ACTION_ADD: - Async.promiseSpinningly(formdata.Create()); + formdata.Create(); break; case ACTION_DELETE: - Async.promiseSpinningly(formdata.Remove()); + formdata.Remove(); break; case ACTION_VERIFY: - Logger.AssertTrue(Async.promiseSpinningly(formdata.Find()), - "form data not found"); + Logger.AssertTrue(formdata.Find(), "form data not found"); break; case ACTION_VERIFY_NOT: - Logger.AssertTrue(!Async.promiseSpinningly(formdata.Find()), - "form data found, but it shouldn't be present"); + Logger.AssertTrue(!formdata.Find(), + "form data found, but it shouldn't be present"); break; default: Logger.AssertTrue(false, "invalid action: " + action); @@ -401,7 +377,7 @@ var TPS = { HandleHistory: function (entries, action) { try { - for (let entry of entries) { + for each (entry in entries) { Logger.logInfo("executing action " + action.toUpperCase() + " on history entry " + JSON.stringify(entry)); switch(action) { @@ -433,32 +409,31 @@ var TPS = { }, HandlePasswords: function (passwords, action) { - this.shouldValidatePasswords = true; try { - for (let password of passwords) { + for each (password in passwords) { let password_id = -1; Logger.logInfo("executing action " + action.toUpperCase() + " on password " + JSON.stringify(password)); - let passwordOb = new Password(password); + var password = new Password(password); switch (action) { case ACTION_ADD: - Logger.AssertTrue(passwordOb.Create() > -1, "error adding password"); + Logger.AssertTrue(password.Create() > -1, "error adding password"); break; case ACTION_VERIFY: - Logger.AssertTrue(passwordOb.Find() != -1, "password not found"); + Logger.AssertTrue(password.Find() != -1, "password not found"); break; case ACTION_VERIFY_NOT: - Logger.AssertTrue(passwordOb.Find() == -1, + Logger.AssertTrue(password.Find() == -1, "password found, but it shouldn't exist"); break; case ACTION_DELETE: - Logger.AssertTrue(passwordOb.Find() != -1, "password not found"); - passwordOb.Remove(); + Logger.AssertTrue(password.Find() != -1, "password not found"); + password.Remove(); break; case ACTION_MODIFY: - if (passwordOb.updateProps != null) { - Logger.AssertTrue(passwordOb.Find() != -1, "password not found"); - passwordOb.Update(); + if (password.updateProps != null) { + Logger.AssertTrue(password.Find() != -1, "password not found"); + password.Update(); } break; default: @@ -475,8 +450,7 @@ var TPS = { }, HandleAddons: function (addons, action, state) { - this.shouldValidateAddons = true; - for (let entry of addons) { + for each (let entry in addons) { Logger.logInfo("executing action " + action.toUpperCase() + " on addon " + JSON.stringify(entry)); let addon = new Addon(this, entry); @@ -505,12 +479,11 @@ var TPS = { }, HandleBookmarks: function (bookmarks, action) { - this.shouldValidateBookmarks = true; try { let items = []; - for (let folder in bookmarks) { + for (folder in bookmarks) { let last_item_pos = -1; - for (let bookmark of bookmarks[folder]) { + for each (bookmark in bookmarks[folder]) { Logger.clearPotentialError(); let placesItem; bookmark['location'] = folder; @@ -552,7 +525,7 @@ var TPS = { } if (action == ACTION_DELETE || action == ACTION_MODIFY) { - for (let item of items) { + for each (item in items) { Logger.logInfo("executing action " + action.toUpperCase() + " on bookmark " + JSON.stringify(item)); switch(action) { @@ -597,163 +570,10 @@ var TPS = { Logger.logInfo("mozmill setTest: " + obj.name); }, - Cleanup() { - try { - this.WipeServer(); - } catch (ex) { - Logger.logError("Failed to wipe server: " + Log.exceptionStr(ex)); - } - try { - if (Authentication.isLoggedIn) { - // signout and wait for Sync to completely reset itself. - Logger.logInfo("signing out"); - let waiter = this.createEventWaiter("weave:service:start-over:finish"); - Authentication.signOut(); - waiter(); - Logger.logInfo("signout complete"); - } - } catch (e) { - Logger.logError("Failed to sign out: " + Log.exceptionStr(e)); - } - }, - - /** - * Use Sync's bookmark validation code to see if we've corrupted the tree. - */ - ValidateBookmarks() { - - let getServerBookmarkState = () => { - let bookmarkEngine = Weave.Service.engineManager.get('bookmarks'); - let collection = bookmarkEngine.itemSource(); - let collectionKey = bookmarkEngine.service.collectionKeys.keyForCollection(bookmarkEngine.name); - collection.full = true; - let items = []; - collection.recordHandler = function(item) { - item.decrypt(collectionKey); - items.push(item.cleartext); - }; - collection.get(); - return items; - }; - let serverRecordDumpStr; - try { - Logger.logInfo("About to perform bookmark validation"); - let clientTree = Async.promiseSpinningly(PlacesUtils.promiseBookmarksTree("", { - includeItemIds: true - })); - let serverRecords = getServerBookmarkState(); - // We can't wait until catch to stringify this, since at that point it will have cycles. - serverRecordDumpStr = JSON.stringify(serverRecords); - - let validator = new BookmarkValidator(); - let {problemData} = validator.compareServerWithClient(serverRecords, clientTree); - - for (let {name, count} of problemData.getSummary()) { - // Exclude mobile showing up on the server hackily so that we don't - // report it every time, see bug 1273234 and 1274394 for more information. - if (name === "serverUnexpected" && problemData.serverUnexpected.indexOf("mobile") >= 0) { - --count; - } - if (count) { - // Log this out before we assert. This is useful in the context of TPS logs, since we - // can see the IDs in the test files. - Logger.logInfo(`Validation problem: "${name}": ${JSON.stringify(problemData[name])}`); - } - Logger.AssertEqual(count, 0, `Bookmark validation error of type ${name}`); - } - } catch (e) { - // Dump the client records (should always be doable) - DumpBookmarks(); - // Dump the server records if gotten them already. - if (serverRecordDumpStr) { - Logger.logInfo("Server bookmark records:\n" + serverRecordDumpStr + "\n"); - } - this.DumpError("Bookmark validation failed", e); - } - Logger.logInfo("Bookmark validation finished"); - }, - - ValidateCollection(engineName, ValidatorType) { - let serverRecordDumpStr; - let clientRecordDumpStr; - try { - Logger.logInfo(`About to perform validation for "${engineName}"`); - let engine = Weave.Service.engineManager.get(engineName); - let validator = new ValidatorType(engine); - let serverRecords = validator.getServerItems(engine); - let clientRecords = Async.promiseSpinningly(validator.getClientItems()); - try { - // This substantially improves the logs for addons while not making a - // substantial difference for the other two - clientRecordDumpStr = JSON.stringify(clientRecords.map(r => { - let res = validator.normalizeClientItem(r); - delete res.original; // Try and prevent cyclic references - return res; - })); - } catch (e) { - // ignore the error, the dump string is just here to make debugging easier. - clientRecordDumpStr = "<Cyclic value>"; - } - try { - serverRecordDumpStr = JSON.stringify(serverRecords); - } catch (e) { - // as above - serverRecordDumpStr = "<Cyclic value>"; - } - let { problemData } = validator.compareClientWithServer(clientRecords, serverRecords); - for (let { name, count } of problemData.getSummary()) { - if (count) { - Logger.logInfo(`Validation problem: "${name}": ${JSON.stringify(problemData[name])}`); - } - Logger.AssertEqual(count, 0, `Validation error for "${engineName}" of type "${name}"`); - } - } catch (e) { - // Dump the client records if possible - if (clientRecordDumpStr) { - Logger.logInfo(`Client state for ${engineName}:\n${clientRecordDumpStr}\n`); - } - // Dump the server records if gotten them already. - if (serverRecordDumpStr) { - Logger.logInfo(`Server state for ${engineName}:\n${serverRecordDumpStr}\n`); - } - this.DumpError(`Validation failed for ${engineName}`, e); - } - Logger.logInfo(`Validation finished for ${engineName}`); - }, - - ValidatePasswords() { - return this.ValidateCollection("passwords", PasswordValidator); - }, - - ValidateForms() { - return this.ValidateCollection("forms", FormValidator); - }, - - ValidateAddons() { - return this.ValidateCollection("addons", AddonValidator); - }, - RunNextTestAction: function() { try { if (this._currentAction >= - this._phaselist[this._currentPhase].length) { - // Run necessary validations and then finish up - if (this.shouldValidateBookmarks) { - this.ValidateBookmarks(); - } - if (this.shouldValidatePasswords) { - this.ValidatePasswords(); - } - if (this.shouldValidateForms) { - this.ValidateForms(); - } - if (this.shouldValidateAddons) { - this.ValidateAddons(); - } - // Force this early so that we run the validation and detect missing pings - // *before* we start shutting down, since if we do it after, the python - // code won't notice the failure. - SyncTelemetry.shutdown(); + this._phaselist["phase" + this._currentPhase].length) { // we're all done Logger.logInfo("test phase " + this._currentPhase + ": " + (this._errors ? "FAIL" : "PASS")); @@ -761,7 +581,7 @@ var TPS = { this.quit(); return; } - this.seconds_since_epoch = prefs.getIntPref("tps.seconds_since_epoch", 0); + if (this.seconds_since_epoch) this._usSinceEpoch = this.seconds_since_epoch * 1000 * 1000; else { @@ -769,7 +589,7 @@ var TPS = { return; } - let phase = this._phaselist[this._currentPhase]; + let phase = this._phaselist["phase" + this._currentPhase]; let action = phase[this._currentAction]; Logger.logInfo("starting action: " + action[0].name); action[0].apply(this, action.slice(1)); @@ -781,64 +601,12 @@ var TPS = { this._currentAction++; } catch(e) { - if (Async.isShutdownException(e)) { - if (this._requestedQuit) { - Logger.logInfo("Sync aborted due to requested shutdown"); - } else { - this.DumpError("Sync aborted due to shutdown, but we didn't request it"); - } - } else { - this.DumpError("RunNextTestAction failed", e); - } + this.DumpError("Exception caught: " + Utils.exceptionStr(e)); return; } this.RunNextTestAction(); }, - _getFileRelativeToSourceRoot(testFileURL, relativePath) { - let file = fileProtocolHandler.getFileFromURLSpec(testFileURL); - let root = file // <root>/services/sync/tests/tps/test_foo.js - .parent // <root>/services/sync/tests/tps - .parent // <root>/services/sync/tests - .parent // <root>/services/sync - .parent // <root>/services - .parent // <root> - ; - root.appendRelativePath(relativePath); - return root; - }, - - // Attempt to load the sync_ping_schema.json and initialize `this.pingValidator` - // based on the source of the tps file. Assumes that it's at "../unit/sync_ping_schema.json" - // relative to the directory the tps test file (testFile) is contained in. - _tryLoadPingSchema(testFile) { - try { - let schemaFile = this._getFileRelativeToSourceRoot(testFile, - "services/sync/tests/unit/sync_ping_schema.json"); - - let stream = Cc["@mozilla.org/network/file-input-stream;1"] - .createInstance(Ci.nsIFileInputStream); - - let jsonReader = Cc["@mozilla.org/dom/json;1"] - .createInstance(Components.interfaces.nsIJSON); - - stream.init(schemaFile, FileUtils.MODE_RDONLY, FileUtils.PERMS_FILE, 0); - let schema = jsonReader.decodeFromStream(stream, stream.available()); - Logger.logInfo("Successfully loaded schema") - - // Importing resource://testing-common/* isn't possible from within TPS, - // so we load Ajv manually. - let ajvFile = this._getFileRelativeToSourceRoot(testFile, "testing/modules/ajv-4.1.1.js"); - let ajvURL = fileProtocolHandler.getURLSpecFromFile(ajvFile); - let ns = {}; - Cu.import(ajvURL, ns); - let ajv = new ns.Ajv({ async: "co*" }); - this.pingValidator = ajv.compile(schema); - } catch (e) { - this.DumpError(`Failed to load ping schema and AJV relative to "${testFile}".`, e); - } - }, - /** * Runs a single test phase. * @@ -872,8 +640,6 @@ var TPS = { Logger.logInfo("Sync version: " + WEAVE_VERSION); Logger.logInfo("Firefox buildid: " + Services.appinfo.appBuildID); Logger.logInfo("Firefox version: " + Services.appinfo.version); - Logger.logInfo("Firefox source revision: " + (AppConstants.SOURCE_REVISION_URL || "unknown")); - Logger.logInfo("Firefox platform: " + AppConstants.platform); Logger.logInfo('Firefox Accounts enabled: ' + this.fxaccounts_enabled); // do some sync housekeeping @@ -887,15 +653,12 @@ var TPS = { this.waitForEvent("weave:service:ready"); } - // We only want to do this if we modified the bookmarks this phase. - this.shouldValidateBookmarks = false; - // Always give Sync an extra tick to initialize. If we waited for the // service:ready event, this is required to ensure all handlers have // executed. Utils.nextTick(this._executeTestPhase.bind(this, file, phase, settings)); } catch(e) { - this.DumpError("RunTestPhase failed", e); + this.DumpError("Exception caught: " + Utils.exceptionStr(e)); return; } }, @@ -907,28 +670,17 @@ var TPS = { */ _executeTestPhase: function _executeTestPhase(file, phase, settings) { try { - this.config = JSON.parse(prefs.getCharPref('tps.config')); // parse the test file Services.scriptloader.loadSubScript(file, this); this._currentPhase = phase; - if (this._currentPhase.startsWith("cleanup-")) { - let profileToClean = Cc["@mozilla.org/toolkit/profile-service;1"] - .getService(Ci.nsIToolkitProfileService) - .selectedProfile.name; - this.phases[this._currentPhase] = profileToClean; - this.Phase(this._currentPhase, [[this.Cleanup]]); - } else { - // Don't bother doing this for cleanup phases. - this._tryLoadPingSchema(file); - } - let this_phase = this._phaselist[this._currentPhase]; + let this_phase = this._phaselist["phase" + this._currentPhase]; if (this_phase == undefined) { this.DumpError("invalid phase " + this._currentPhase); return; } - if (this.phases[this._currentPhase] == undefined) { + if (this.phases["phase" + this._currentPhase] == undefined) { this.DumpError("no profile defined for phase " + this._currentPhase); return; } @@ -937,7 +689,7 @@ var TPS = { // care about. if (settings.ignoreUnusedEngines && Array.isArray(this._enabledEngines)) { let names = {}; - for (let name of this._enabledEngines) { + for each (let name in this._enabledEngines) { names[name] = true; } @@ -948,63 +700,52 @@ var TPS = { } } } - Logger.logInfo("Starting phase " + this._currentPhase); - Logger.logInfo("setting client.name to " + this.phases[this._currentPhase]); - Weave.Svc.Prefs.set("client.name", this.phases[this._currentPhase]); + Logger.logInfo("Starting phase " + parseInt(phase, 10) + "/" + + Object.keys(this._phaselist).length); - this._interceptSyncTelemetry(); + Logger.logInfo("setting client.name to " + this.phases["phase" + this._currentPhase]); + Weave.Svc.Prefs.set("client.name", this.phases["phase" + this._currentPhase]); - // start processing the test actions - this._currentAction = 0; - } - catch(e) { - this.DumpError("_executeTestPhase failed", e); - return; - } - }, + // TODO Phases should be defined in a data type that has strong + // ordering, not by lexical sorting. + let currentPhase = parseInt(this._currentPhase, 10); - /** - * Override sync telemetry functions so that we can detect errors generating - * the sync ping, and count how many pings we report. - */ - _interceptSyncTelemetry() { - let originalObserve = SyncTelemetry.observe; - let self = this; - SyncTelemetry.observe = function() { - try { - originalObserve.apply(this, arguments); - } catch (e) { - self.DumpError("Error when generating sync telemetry", e); + // Login at the beginning of the test. + if (currentPhase <= 1) { + this_phase.unshift([this.Login]); } - }; - SyncTelemetry.submit = record => { - Logger.logInfo("Intercepted sync telemetry submission: " + JSON.stringify(record)); - this._syncsReportedViaTelemetry += record.syncs.length + (record.discarded || 0); - if (record.discarded) { - if (record.syncs.length != SyncTelemetry.maxPayloadCount) { - this.DumpError("Syncs discarded from ping before maximum payload count reached"); - } + + // Wipe the server at the end of the final test phase. + if (currentPhase >= Object.keys(this.phases).length) { + this._finalPhase = true; } - // If this is the shutdown ping, check and see that the telemetry saw all the syncs. - if (record.why === "shutdown") { - // If we happen to sync outside of tps manually causing it, its not an - // error in the telemetry, so we only complain if we didn't see all of them. - if (this._syncsReportedViaTelemetry < this._syncCount) { - this.DumpError(`Telemetry missed syncs: Saw ${this._syncsReportedViaTelemetry}, should have >= ${this._syncCount}.`); - } + + // If a custom server was specified, set it now + if (this.config["serverURL"]) { + Weave.Service.serverURL = this.config.serverURL; + prefs.setCharPref('tps.serverURL', this.config.serverURL); } - if (!record.syncs.length) { - // Note: we're overwriting submit, so this is called even for pings that - // may have no data (which wouldn't be submitted to telemetry and would - // fail validation). - return; + + // Store account details as prefs so they're accessible to the Mozmill + // framework. + if (this.fxaccounts_enabled) { + prefs.setCharPref('tps.account.username', this.config.fx_account.username); + prefs.setCharPref('tps.account.password', this.config.fx_account.password); } - if (!this.pingValidator(record)) { - // Note that we already logged the record. - this.DumpError("Sync ping validation failed with errors: " + JSON.stringify(this.pingValidator.errors)); + else { + prefs.setCharPref('tps.account.username', this.config.sync_account.username); + prefs.setCharPref('tps.account.password', this.config.sync_account.password); + prefs.setCharPref('tps.account.passphrase', this.config.sync_account.passphrase); } - }; + + // start processing the test actions + this._currentAction = 0; + } + catch(e) { + this.DumpError("Exception caught: " + Utils.exceptionStr(e)); + return; + } }, /** @@ -1018,10 +759,6 @@ var TPS = { * Array of functions/actions to perform. */ Phase: function Test__Phase(phasename, fnlist) { - if (Object.keys(this._phaselist).length === 0) { - // This is the first phase, add that we need to login. - fnlist.unshift([this.Login]); - } this._phaselist[phasename] = fnlist; }, @@ -1067,56 +804,28 @@ var TPS = { }, /** - * Return an object that when called, will block until the named event - * is observed. This is similar to waitForEvent, although is typically safer - * if you need to do some other work that may make the event fire. - * - * eg: - * doSomething(); // causes the event to be fired. - * waitForEvent("something"); - * is risky as the call to doSomething may trigger the event before the - * waitForEvent call is made. Contrast with: - * - * let waiter = createEventWaiter("something"); // does *not* block. - * doSomething(); // causes the event to be fired. - * waiter(); // will return as soon as the event fires, even if it fires - * // before this function is called. - * - * @param aEventName - * String event to wait for. - */ - createEventWaiter(aEventName) { - Logger.logInfo("Setting up wait for " + aEventName + "..."); - let cb = Async.makeSpinningCallback(); - Svc.Obs.add(aEventName, cb); - return function() { - try { - cb.wait(); - } finally { - Svc.Obs.remove(aEventName, cb); - Logger.logInfo(aEventName + " observed!"); - } - } - }, - - - /** * Synchronously wait for the named event to be observed. * * When the event is observed, the function will wait an extra tick before * returning. * - * Note that in general, you should probably use createEventWaiter unless you - * are 100% sure that the event being waited on can only be sent after this - * call adds the listener. - * * @param aEventName * String event to wait for. */ waitForEvent: function waitForEvent(aEventName) { - this.createEventWaiter(aEventName)(); + Logger.logInfo("Waiting for " + aEventName + "..."); + let cb = Async.makeSpinningCallback(); + Svc.Obs.add(aEventName, cb); + cb.wait(); + Svc.Obs.remove(aEventName, cb); + Logger.logInfo(aEventName + " observed!"); + + cb = Async.makeSpinningCallback(); + Utils.nextTick(cb); + cb.wait(); }, + /** * Waits for Sync to logged in before returning */ @@ -1159,12 +868,6 @@ var TPS = { this.waitForSetupComplete(); Logger.AssertEqual(Weave.Status.service, Weave.STATUS_OK, "Weave status OK"); this.waitForTracking(); - // If fxaccounts is enabled we get an initial sync at login time - let - // that complete. - if (this.fxaccounts_enabled) { - this._triggeredSync = true; - this.waitForSyncFinished(); - } }, /** @@ -1189,12 +892,10 @@ var TPS = { } this.Login(false); - ++this._syncCount; this._triggeredSync = true; this.StartAsyncOperation(); Weave.Service.sync(); - Logger.logInfo("Sync is complete"); }, WipeServer: function TPS__WipeServer() { @@ -1230,9 +931,6 @@ var Addons = { verifyNot: function Addons__verifyNot(addons) { TPS.HandleAddons(addons, ACTION_VERIFY_NOT); }, - skipValidation() { - TPS.shouldValidateAddons = false; - } }; var Bookmarks = { @@ -1250,9 +948,6 @@ var Bookmarks = { }, verifyNot: function Bookmarks__verifyNot(bookmarks) { TPS.HandleBookmarks(bookmarks, ACTION_VERIFY_NOT); - }, - skipValidation() { - TPS.shouldValidateBookmarks = false; } }; @@ -1301,9 +996,6 @@ var Passwords = { }, verifyNot: function Passwords__verifyNot(passwords) { this.HandlePasswords(passwords, ACTION_VERIFY_NOT); - }, - skipValidation() { - TPS.shouldValidatePasswords = false; } }; @@ -1337,4 +1029,4 @@ var Windows = { }; // Initialize TPS -TPS._init(); +TPS._init();
\ No newline at end of file |